diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index ad2fafc6c962..33040f7eb7ad 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -141,12 +141,17 @@ jobs:
- name: Start Dubbo Backend
if: matrix.os_name == 'linux_openresty' && (steps.test_env.outputs.type == 'plugin' || steps.test_env.outputs.type == 'last')
run: |
+ cur_dir=$(pwd)
sudo apt update
sudo apt install -y maven
cd t/lib/dubbo-backend
mvn package
cd dubbo-backend-provider/target
java -Djava.net.preferIPv4Stack=true -jar dubbo-demo-provider.one-jar.jar > /tmp/java.log &
+ cd $cur_dir/t/lib/dubbo-serialization-backend
+ mvn package
+ cd dubbo-serialization-backend-provider/target
+ java -Djava.net.preferIPv4Stack=true -jar dubbo-demo-provider.one-jar.jar > /tmp/java2.log &
- name: Build xDS library
if: steps.test_env.outputs.type == 'last'
diff --git a/.github/workflows/centos7-ci.yml b/.github/workflows/centos7-ci.yml
index 319b188497a0..177e9c8fb8d9 100644
--- a/.github/workflows/centos7-ci.yml
+++ b/.github/workflows/centos7-ci.yml
@@ -99,12 +99,18 @@ jobs:
- name: Start Dubbo Backend
run: |
+ cur_dir=$(pwd)
sudo apt update
sudo apt install -y maven
cd t/lib/dubbo-backend
mvn package
cd dubbo-backend-provider/target
java -Djava.net.preferIPv4Stack=true -jar dubbo-demo-provider.one-jar.jar > /tmp/java.log &
+ cd $cur_dir/t/lib/dubbo-serialization-backend
+ mvn package
+ cd dubbo-serialization-backend-provider/target
+ java -Djava.net.preferIPv4Stack=true -jar dubbo-demo-provider.one-jar.jar > /tmp/java2.log &
+
- name: Build xDS library
if: steps.test_env.outputs.type == 'last'
diff --git a/.github/workflows/gm-cron.yaml b/.github/workflows/gm-cron.yaml
index cac652e961de..b4c8f7e438ef 100644
--- a/.github/workflows/gm-cron.yaml
+++ b/.github/workflows/gm-cron.yaml
@@ -124,12 +124,17 @@ jobs:
- name: Start Dubbo Backend
if: steps.test_env.outputs.type == 'plugin'
run: |
+ cur_dir=$(pwd)
sudo apt update
sudo apt install -y maven
cd t/lib/dubbo-backend
mvn package
cd dubbo-backend-provider/target
java -Djava.net.preferIPv4Stack=true -jar dubbo-demo-provider.one-jar.jar > /tmp/java.log &
+ cd $cur_dir/t/lib/dubbo-serialization-backend
+ mvn package
+ cd dubbo-serialization-backend-provider/target
+ java -Djava.net.preferIPv4Stack=true -jar dubbo-demo-provider.one-jar.jar > /tmp/java2.log &
- name: Build xDS library
if: steps.test_env.outputs.type == 'last'
diff --git a/.github/workflows/redhat-ci.yaml b/.github/workflows/redhat-ci.yaml
index 62ca5b662f95..29b907302af5 100644
--- a/.github/workflows/redhat-ci.yaml
+++ b/.github/workflows/redhat-ci.yaml
@@ -95,12 +95,17 @@ jobs:
- name: Start Dubbo Backend
run: |
+ cur_dir=$(pwd)
sudo apt update
sudo apt install -y maven
cd t/lib/dubbo-backend
mvn package
cd dubbo-backend-provider/target
java -Djava.net.preferIPv4Stack=true -jar dubbo-demo-provider.one-jar.jar > /tmp/java.log &
+ cd $cur_dir/t/lib/dubbo-serialization-backend
+ mvn package
+ cd dubbo-serialization-backend-provider/target
+ java -Djava.net.preferIPv4Stack=true -jar dubbo-demo-provider.one-jar.jar > /tmp/java2.log &
- name: Build xDS library
if: steps.test_env.outputs.type == 'last'
diff --git a/.gitignore b/.gitignore
index 49dbbfe5759e..94669c34502a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -59,8 +59,7 @@ client_body_temp
utils/lj-releng
utils/reindex
*.etcd/
-t/lib/dubbo-backend/dubbo-backend-interface/target/
-t/lib/dubbo-backend/dubbo-backend-provider/target/
+t/lib/dubbo*/**/target/
.idea/
*.iml
\.*
@@ -88,3 +87,5 @@ pack-v*-linux.tgz*
# release tar package
*.tgz
release/*
+
+
diff --git a/NOTICE b/NOTICE
index c663509f2aee..abdbd3d3ff0f 100644
--- a/NOTICE
+++ b/NOTICE
@@ -1,5 +1,5 @@
Apache APISIX
-Copyright 2019-2023 The Apache Software Foundation
+Copyright 2019-2024 The Apache Software Foundation
This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).
diff --git a/apisix/plugins/elasticsearch-logger.lua b/apisix/plugins/elasticsearch-logger.lua
index de29da2e0184..38bc884ed53c 100644
--- a/apisix/plugins/elasticsearch-logger.lua
+++ b/apisix/plugins/elasticsearch-logger.lua
@@ -75,7 +75,23 @@ local schema = {
ssl_verify = {
type = "boolean",
default = true
- }
+ },
+ include_req_body = {type = "boolean", default = false},
+ include_req_body_expr = {
+ type = "array",
+ minItems = 1,
+ items = {
+ type = "array"
+ }
+ },
+ include_resp_body = { type = "boolean", default = false },
+ include_resp_body_expr = {
+ type = "array",
+ minItems = 1,
+ items = {
+ type = "array"
+ }
+ },
},
encrypt_fields = {"auth.password"},
oneOf = {
@@ -138,7 +154,10 @@ local function send_to_elasticsearch(conf, entries)
end
local uri = selected_endpoint_addr .. "/_bulk"
local body = core.table.concat(entries, "")
- local headers = {["Content-Type"] = "application/x-ndjson"}
+ local headers = {
+ ["Content-Type"] = "application/x-ndjson;compatible-with=7",
+ ["Accept"] = "application/vnd.elasticsearch+json;compatible-with=7"
+ }
if conf.auth then
local authorization = "Basic " .. ngx.encode_base64(
conf.auth.username .. ":" .. conf.auth.password
@@ -168,6 +187,11 @@ local function send_to_elasticsearch(conf, entries)
end
+function _M.body_filter(conf, ctx)
+ log_util.collect_body(conf, ctx)
+end
+
+
function _M.log(conf, ctx)
local entry = get_logger_entry(conf, ctx)
diff --git a/apisix/plugins/forward-auth.lua b/apisix/plugins/forward-auth.lua
index 6d4454a581e8..69dc48de6bf0 100644
--- a/apisix/plugins/forward-auth.lua
+++ b/apisix/plugins/forward-auth.lua
@@ -24,6 +24,7 @@ local schema = {
properties = {
uri = {type = "string"},
allow_degradation = {type = "boolean", default = false},
+ status_on_error = {type = "integer", minimum = 200, maximum = 599, default = 403},
ssl_verify = {
type = "boolean",
default = true,
@@ -131,8 +132,8 @@ function _M.access(conf, ctx)
if not res and conf.allow_degradation then
return
elseif not res then
- core.log.error("failed to process forward auth, err: ", err)
- return 403
+ core.log.warn("failed to process forward auth, err: ", err)
+ return conf.status_on_error
end
if res.status >= 300 then
diff --git a/apisix/plugins/http-dubbo.lua b/apisix/plugins/http-dubbo.lua
new file mode 100644
index 000000000000..f068654babc4
--- /dev/null
+++ b/apisix/plugins/http-dubbo.lua
@@ -0,0 +1,262 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements. See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License. You may obtain a copy of the License at
+--
+-- http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+local require = require
+local core = require("apisix.core")
+local pairs = pairs
+local str_format = string.format
+local bit = require("bit")
+local rshift = bit.rshift
+local band = bit.band
+local char = string.char
+local tostring = tostring
+local ngx = ngx
+local type = type
+local plugin_name = "http-dubbo"
+
+
+local schema = {
+ type = "object",
+ properties = {
+ service_name = {
+ type = "string",
+ minLength = 1,
+ },
+ service_version = {
+ type = "string",
+ pattern = [[^\d+\.\d+\.\d+]],
+ default ="0.0.0"
+ },
+ method = {
+ type = "string",
+ minLength = 1,
+ },
+ params_type_desc = {
+ type = "string",
+ default = ""
+ },
+ serialization_header_key = {
+ type = "string"
+ },
+ serialized = {
+ type = "boolean",
+ default = false
+ },
+ connect_timeout={
+ type = "number",
+ default = 6000
+ },
+ read_timeout={
+ type = "number",
+ default = 6000
+ },
+ send_timeout={
+ type = "number",
+ default = 6000
+ }
+ },
+ required = { "service_name", "method" },
+}
+
+local _M = {
+ version = 0.1,
+ priority = 504,
+ name = plugin_name,
+ schema = schema,
+}
+
+function _M.check_schema(conf)
+ return core.schema.check(schema, conf)
+end
+
+
+local function str_int32(int)
+ return char(band(rshift(int, 24), 0xff),
+ band(rshift(int, 16), 0xff),
+ band(rshift(int, 8), 0xff),
+ band(int, 0xff))
+end
+
+
+local function parse_dubbo_header(header)
+ for i = 1, 16 do
+ local currentByte = header:byte(i)
+ if not currentByte then
+ return nil
+ end
+ end
+
+ local magic_number = str_format("%04x", header:byte(1) * 256 + header:byte(2))
+ local message_flag = header:byte(3)
+ local status = header:byte(4)
+ local request_id = 0
+ for i = 5, 12 do
+ request_id = request_id * 256 + header:byte(i)
+ end
+
+ local byte13Val = header:byte(13) * 256 * 256 * 256
+ local byte14Val = header:byte(14) * 256 * 256
+ local data_length = byte13Val + byte14Val + header:byte(15) * 256 + header:byte(16)
+
+ local is_request = bit.band(bit.rshift(message_flag, 7), 0x01) == 1 and 1 or 0
+ local is_two_way = bit.band(bit.rshift(message_flag, 6), 0x01) == 1 and 1 or 0
+ local is_event = bit.band(bit.rshift(message_flag, 5), 0x01) == 1 and 1 or 0
+
+ return {
+ magic_number = magic_number,
+ message_flag = message_flag,
+ is_request = is_request,
+ is_two_way = is_two_way,
+ is_event = is_event,
+ status = status,
+ request_id = request_id,
+ data_length = data_length
+ }
+end
+
+
+local function string_to_json_string(str)
+ local result = "\""
+ for i = 1, #str do
+ local byte = core.string.sub(str, i, i)
+ if byte == "\\" then
+ result = result .. "\\\\"
+ elseif byte == "\n" then
+ result = result .. "\\n"
+ elseif byte == "\t" then
+ result = result .. "\\t"
+ elseif byte == "\r" then
+ result = result .. "\\r"
+ elseif byte == "\b" then
+ result = result .. "\\b"
+ elseif byte == "\f" then
+ result = result .. "\\f"
+ elseif byte == "\"" then
+ result = result .. "\\\""
+ else
+ result = result .. byte
+ end
+ end
+ return result .. "\""
+end
+
+
+local function get_dubbo_request(conf, ctx)
+ -- use dubbo and fastjson
+ local first_byte4 = "\xda\xbb\xc6\x00"
+
+ local requestId = "\x00\x00\x00\x00\x00\x00\x00\x01"
+ local version = "\"2.0.2\"\n"
+ local service = "\"" .. conf.service_name .. "\"" .. "\n"
+
+ local service_version = "\"" .. conf.service_version .. "\"" .. "\n"
+ local method_name = "\"" .. conf.method .. "\"" .. "\n"
+
+ local params_desc = "\"" .. conf.params_type_desc .. "\"" .. "\n"
+ local params = ""
+ local serialized = conf.serialized
+ if conf.serialization_header_key then
+ local serialization_header = core.request.header(ctx, conf.serialization_header_key)
+ serialized = serialization_header == "true"
+ end
+ if serialized then
+ params = core.request.get_body()
+ if params then
+ local end_of_params = core.string.sub(params, -1)
+ if end_of_params ~= "\n" then
+ params = params .. "\n"
+ end
+ end
+ else
+ local body_data = core.request.get_body()
+ if body_data then
+ local lua_object = core.json.decode(body_data);
+ for _, v in pairs(lua_object) do
+ local pt = type(v)
+ if pt == "nil" then
+ params = params .. "null" .. "\n"
+ elseif pt == "string" then
+ params = params .. string_to_json_string(v) .. "\n"
+ elseif pt == "number" then
+ params = params .. tostring(v) .. "\n"
+ else
+ params = params .. core.json.encode(v) .. "\n"
+ end
+ end
+ end
+
+ end
+ local attachments = "{}\n"
+ if params == nil then
+ params = ""
+ end
+ local payload = #version + #service + #service_version
+ + #method_name + #params_desc + #params + #attachments
+ return {
+ first_byte4,
+ requestId,
+ str_int32(payload),
+ version,
+ service,
+ service_version,
+ method_name,
+ params_desc,
+ params,
+ attachments
+ }
+end
+
+
+function _M.before_proxy(conf, ctx)
+ local sock = ngx.socket.tcp()
+
+ sock:settimeouts(conf.connect_timeout, conf.send_timeout, conf.read_timeout)
+ local ok, err = sock:connect(ctx.picked_server.host, ctx.picked_server.port)
+ if not ok then
+ sock:close()
+ core.log.error("failed to connect to upstream ", err)
+ return 502
+ end
+ local request = get_dubbo_request(conf, ctx)
+ local bytes, _ = sock:send(request)
+ if bytes > 0 then
+ local header, _ = sock:receiveany(16);
+ if header then
+ local header_info = parse_dubbo_header(header)
+ if header_info and header_info.status == 20 then
+ local readline = sock:receiveuntil("\n")
+ local body_status, _, _ = readline()
+ if body_status then
+ local response_status = core.string.sub(body_status, 1, 1)
+ if response_status == "2" or response_status == "5" then
+ sock:close()
+ return 200
+ elseif response_status == "1" or response_status == "4" then
+ local body, _, _ = readline()
+ sock:close()
+ return 200, body
+ end
+ end
+ end
+ end
+ end
+ sock:close()
+ return 500
+
+end
+
+return _M
diff --git a/apisix/plugins/mocking.lua b/apisix/plugins/mocking.lua
index af5bc75edb2a..51c4bff78cbf 100644
--- a/apisix/plugins/mocking.lua
+++ b/apisix/plugins/mocking.lua
@@ -229,6 +229,7 @@ function _M.access(conf, ctx)
if conf.response_headers then
for key, value in pairs(conf.response_headers) do
+ value = core.utils.resolve_var(value, ctx.var)
core.response.add_header(key, value)
end
end
diff --git a/apisix/plugins/response-rewrite.lua b/apisix/plugins/response-rewrite.lua
index ee9746fa37fc..adf630fe52c9 100644
--- a/apisix/plugins/response-rewrite.lua
+++ b/apisix/plugins/response-rewrite.lua
@@ -27,9 +27,7 @@ local pairs = pairs
local ipairs = ipairs
local type = type
local pcall = pcall
-local zlib = require("ffi-zlib")
-local str_buffer = require("string.buffer")
-local is_br_libs_loaded, brotli = pcall(require, "brotli")
+local content_decode = require("apisix.utils.content-decode")
local lrucache = core.lrucache.new({
@@ -203,83 +201,6 @@ local function check_set_headers(headers)
end
-local function inflate_gzip(data)
- local inputs = str_buffer.new():set(data)
- local outputs = str_buffer.new()
-
- local read_inputs = function(size)
- local data = inputs:get(size)
- if data == "" then
- return nil
- end
- return data
- end
-
- local write_outputs = function(data)
- return outputs:put(data)
- end
-
- local ok, err = zlib.inflateGzip(read_inputs, write_outputs)
- if not ok then
- return nil, err
- end
-
- return outputs:get()
-end
-
-
-local function brotli_stream_decode(read_inputs, write_outputs)
- -- read 64k data per times
- local read_size = 64 * 1024
- local decompressor = brotli.decompressor:new()
-
- local chunk, ok, res
- repeat
- chunk = read_inputs(read_size)
- if chunk then
- ok, res = pcall(function()
- return decompressor:decompress(chunk)
- end)
- else
- ok, res = pcall(function()
- return decompressor:finish()
- end)
- end
- if not ok then
- return false, res
- end
- write_outputs(res)
- until not chunk
-
- return true, nil
-end
-
-
-local function brotli_decode(data)
- local inputs = str_buffer.new():set(data)
- local outputs = str_buffer.new()
-
- local read_inputs = function(size)
- local data = inputs:get(size)
- if data == "" then
- return nil
- end
- return data
- end
-
- local write_outputs = function(data)
- return outputs:put(data)
- end
-
- local ok, err = brotli_stream_decode(read_inputs, write_outputs)
- if not ok then
- return nil, err
- end
-
- return outputs:get()
-end
-
-
function _M.check_schema(conf)
local ok, err = core.schema.check(schema, conf)
if not ok then
@@ -341,23 +262,19 @@ function _M.body_filter(conf, ctx)
end
local err
- if ctx.response_encoding == "gzip" then
- body, err = inflate_gzip(body)
- if err ~= nil then
- core.log.error("filters may not work as expected, inflate gzip err: ", err)
+ if ctx.response_encoding ~= nil then
+ local decoder = content_decode.dispatch_decoder(ctx.response_encoding)
+ if not decoder then
+ core.log.error("filters may not work as expected ",
+ "due to unsupported compression encoding type: ",
+ ctx.response_encoding)
return
end
- elseif ctx.response_encoding == "br" and is_br_libs_loaded then
- body, err = brotli_decode(body)
+ body, err = decoder(body)
if err ~= nil then
- core.log.error("filters may not work as expected, brotli decode err: ", err)
+ core.log.error("filters may not work as expected: ", err)
return
end
- elseif ctx.response_encoding ~= nil then
- core.log.error("filters may not work as expected ",
- "due to unsupported compression encoding type: ",
- ctx.response_encoding)
- return
end
for _, filter in ipairs(conf.filters) do
diff --git a/apisix/plugins/skywalking-logger.lua b/apisix/plugins/skywalking-logger.lua
index 136b9dec1b34..94ba65534a4c 100644
--- a/apisix/plugins/skywalking-logger.lua
+++ b/apisix/plugins/skywalking-logger.lua
@@ -39,6 +39,21 @@ local schema = {
log_format = {type = "object"},
timeout = {type = "integer", minimum = 1, default = 3},
include_req_body = {type = "boolean", default = false},
+ include_req_body_expr = {
+ type = "array",
+ minItems = 1,
+ items = {
+ type = "array"
+ }
+ },
+ include_resp_body = { type = "boolean", default = false },
+ include_resp_body_expr = {
+ type = "array",
+ minItems = 1,
+ items = {
+ type = "array"
+ }
+ },
},
required = {"endpoint_addr"},
}
@@ -116,6 +131,11 @@ local function send_http_data(conf, log_message)
end
+function _M.body_filter(conf, ctx)
+ log_util.collect_body(conf, ctx)
+end
+
+
function _M.log(conf, ctx)
local log_body = log_util.get_log_entry(plugin_name, conf, ctx)
local trace_context
diff --git a/apisix/plugins/sls-logger.lua b/apisix/plugins/sls-logger.lua
index de2fbae67ffa..114a209690e5 100644
--- a/apisix/plugins/sls-logger.lua
+++ b/apisix/plugins/sls-logger.lua
@@ -33,6 +33,21 @@ local schema = {
type = "object",
properties = {
include_req_body = {type = "boolean", default = false},
+ include_req_body_expr = {
+ type = "array",
+ minItems = 1,
+ items = {
+ type = "array"
+ }
+ },
+ include_resp_body = { type = "boolean", default = false },
+ include_resp_body_expr = {
+ type = "array",
+ minItems = 1,
+ items = {
+ type = "array"
+ }
+ },
timeout = {type = "integer", minimum = 1, default= 5000},
log_format = {type = "object"},
host = {type = "string"},
@@ -129,6 +144,12 @@ local function handle_log(entries)
return send_tcp_data(entries[1].route_conf, data)
end
+
+function _M.body_filter(conf, ctx)
+ log_util.collect_body(conf, ctx)
+end
+
+
-- log phase in APISIX
function _M.log(conf, ctx)
local entry = log_util.get_log_entry(plugin_name, conf, ctx)
@@ -146,7 +167,7 @@ function _M.log(conf, ctx)
}
local rf5424_data = rf5424.encode("SYSLOG", "INFO", ctx.var.host, "apisix",
ctx.var.pid, json_str, structured_data)
-
+ core.log.info("collect_data:" .. rf5424_data)
local process_context = {
data = rf5424_data,
route_conf = conf
diff --git a/apisix/plugins/syslog.lua b/apisix/plugins/syslog.lua
index 155ea7b41bba..fa160feeb45d 100644
--- a/apisix/plugins/syslog.lua
+++ b/apisix/plugins/syslog.lua
@@ -34,7 +34,22 @@ local schema = {
pool_size = {type = "integer", minimum = 5, default = 5},
tls = {type = "boolean", default = false},
log_format = {type = "object"},
- include_req_body = {type = "boolean", default = false}
+ include_req_body = {type = "boolean", default = false},
+ include_req_body_expr = {
+ type = "array",
+ minItems = 1,
+ items = {
+ type = "array"
+ }
+ },
+ include_resp_body = { type = "boolean", default = false },
+ include_resp_body_expr = {
+ type = "array",
+ minItems = 1,
+ items = {
+ type = "array"
+ }
+ },
},
required = {"host", "port"}
}
@@ -69,6 +84,11 @@ function _M.check_schema(conf, schema_type)
end
+function _M.body_filter(conf, ctx)
+ log_util.collect_body(conf, ctx)
+end
+
+
function _M.log(conf, ctx)
local entry = log_util.get_log_entry(plugin_name, conf, ctx)
syslog.push_entry(conf, ctx, entry)
diff --git a/apisix/plugins/syslog/init.lua b/apisix/plugins/syslog/init.lua
index 0ab34f8054e9..8a3d90e38b08 100644
--- a/apisix/plugins/syslog/init.lua
+++ b/apisix/plugins/syslog/init.lua
@@ -88,7 +88,7 @@ function _M.push_entry(conf, ctx, entry)
local rfc5424_data = rfc5424.encode("SYSLOG", "INFO", ctx.var.host,
"apisix", ctx.var.pid, json_str)
-
+ core.log.info("collect_data:" .. rfc5424_data)
if batch_processor_manager:add_entry(conf, rfc5424_data) then
return
end
diff --git a/apisix/plugins/tcp-logger.lua b/apisix/plugins/tcp-logger.lua
index 444afe1d968a..e95ba9339e95 100644
--- a/apisix/plugins/tcp-logger.lua
+++ b/apisix/plugins/tcp-logger.lua
@@ -33,7 +33,22 @@ local schema = {
tls_options = {type = "string"},
timeout = {type = "integer", minimum = 1, default= 1000},
log_format = {type = "object"},
- include_req_body = {type = "boolean", default = false}
+ include_req_body = {type = "boolean", default = false},
+ include_req_body_expr = {
+ type = "array",
+ minItems = 1,
+ items = {
+ type = "array"
+ }
+ },
+ include_resp_body = { type = "boolean", default = false },
+ include_resp_body_expr = {
+ type = "array",
+ minItems = 1,
+ items = {
+ type = "array"
+ }
+ },
},
required = {"host", "port"}
}
@@ -77,6 +92,7 @@ local function send_tcp_data(conf, log_message)
sock:settimeout(conf.timeout)
core.log.info("sending a batch logs to ", conf.host, ":", conf.port)
+ core.log.info("sending log_message: ", log_message)
local ok, err = sock:connect(conf.host, conf.port)
if not ok then
@@ -109,6 +125,11 @@ local function send_tcp_data(conf, log_message)
end
+function _M.body_filter(conf, ctx)
+ log_util.collect_body(conf, ctx)
+end
+
+
function _M.log(conf, ctx)
local entry = log_util.get_log_entry(plugin_name, conf, ctx)
diff --git a/apisix/plugins/udp-logger.lua b/apisix/plugins/udp-logger.lua
index 7d76a4b02eae..75e8bba31740 100644
--- a/apisix/plugins/udp-logger.lua
+++ b/apisix/plugins/udp-logger.lua
@@ -31,7 +31,22 @@ local schema = {
port = {type = "integer", minimum = 0},
timeout = {type = "integer", minimum = 1, default = 3},
log_format = {type = "object"},
- include_req_body = {type = "boolean", default = false}
+ include_req_body = {type = "boolean", default = false},
+ include_req_body_expr = {
+ type = "array",
+ minItems = 1,
+ items = {
+ type = "array"
+ }
+ },
+ include_resp_body = { type = "boolean", default = false },
+ include_resp_body_expr = {
+ type = "array",
+ minItems = 1,
+ items = {
+ type = "array"
+ }
+ },
},
required = {"host", "port"}
}
@@ -70,6 +85,7 @@ local function send_udp_data(conf, log_message)
sock:settimeout(conf.timeout * 1000)
core.log.info("sending a batch logs to ", conf.host, ":", conf.port)
+ core.log.info("sending log_message: ", log_message)
local ok, err = sock:setpeername(conf.host, conf.port)
@@ -95,6 +111,11 @@ local function send_udp_data(conf, log_message)
end
+function _M.body_filter(conf, ctx)
+ log_util.collect_body(conf, ctx)
+end
+
+
function _M.log(conf, ctx)
local entry = log_util.get_log_entry(plugin_name, conf, ctx)
diff --git a/apisix/utils/content-decode.lua b/apisix/utils/content-decode.lua
new file mode 100644
index 000000000000..c22c965fd865
--- /dev/null
+++ b/apisix/utils/content-decode.lua
@@ -0,0 +1,112 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements. See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License. You may obtain a copy of the License at
+--
+-- http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+local pcall = pcall
+local zlib = require("ffi-zlib")
+local str_buffer = require("string.buffer")
+local is_br_libs_loaded, brotli = pcall(require, "brotli")
+local content_decode_funcs = {}
+local _M = {}
+
+
+local function inflate_gzip(data)
+ local inputs = str_buffer.new():set(data)
+ local outputs = str_buffer.new()
+
+ local read_inputs = function(size)
+ local data = inputs:get(size)
+ if data == "" then
+ return nil
+ end
+ return data
+ end
+
+ local write_outputs = function(data)
+ return outputs:put(data)
+ end
+
+ local ok, err = zlib.inflateGzip(read_inputs, write_outputs)
+ if not ok then
+ return nil, "inflate gzip err: " .. err
+ end
+
+ return outputs:get()
+end
+content_decode_funcs.gzip = inflate_gzip
+
+
+local function brotli_stream_decode(read_inputs, write_outputs)
+ -- read 64k data per times
+ local read_size = 64 * 1024
+ local decompressor = brotli.decompressor:new()
+
+ local chunk, ok, res
+ repeat
+ chunk = read_inputs(read_size)
+ if chunk then
+ ok, res = pcall(function()
+ return decompressor:decompress(chunk)
+ end)
+ else
+ ok, res = pcall(function()
+ return decompressor:finish()
+ end)
+ end
+ if not ok then
+ return false, res
+ end
+ write_outputs(res)
+ until not chunk
+
+ return true, nil
+end
+
+
+local function brotli_decode(data)
+ local inputs = str_buffer.new():set(data)
+ local outputs = str_buffer.new()
+
+ local read_inputs = function(size)
+ local data = inputs:get(size)
+ if data == "" then
+ return nil
+ end
+ return data
+ end
+
+ local write_outputs = function(data)
+ return outputs:put(data)
+ end
+
+ local ok, err = brotli_stream_decode(read_inputs, write_outputs)
+ if not ok then
+ return nil, "brotli decode err: " .. err
+ end
+
+ return outputs:get()
+end
+
+if is_br_libs_loaded then
+ content_decode_funcs.br = brotli_decode
+end
+
+
+function _M.dispatch_decoder(response_encoding)
+ return content_decode_funcs[response_encoding]
+end
+
+
+return _M
diff --git a/apisix/utils/log-util.lua b/apisix/utils/log-util.lua
index f724c2c51099..a3ff834ee9f4 100644
--- a/apisix/utils/log-util.lua
+++ b/apisix/utils/log-util.lua
@@ -17,9 +17,11 @@
local core = require("apisix.core")
local plugin = require("apisix.plugin")
local expr = require("resty.expr.v1")
-local ngx = ngx
+local content_decode = require("apisix.utils.content-decode")
+local ngx = ngx
local pairs = pairs
local ngx_now = ngx.now
+local ngx_header = ngx.header
local os_date = os.date
local str_byte = string.byte
local math_floor = math.floor
@@ -47,6 +49,7 @@ local function gen_log_format(format)
return log_format
end
+
local function get_custom_format_log(ctx, format)
local log_format = lru_log_format(format or "", nil, gen_log_format, format)
local entry = core.table.new(0, core.table.nkeys(log_format))
@@ -311,7 +314,29 @@ function _M.collect_body(conf, ctx)
if not final_body then
return
end
- ctx.resp_body = final_body
+
+ local response_encoding = ngx_header["Content-Encoding"]
+ if not response_encoding then
+ ctx.resp_body = final_body
+ return
+ end
+
+ local decoder = content_decode.dispatch_decoder(response_encoding)
+ if not decoder then
+ core.log.warn("unsupported compression encoding type: ",
+ response_encoding)
+ ctx.resp_body = final_body
+ return
+ end
+
+ local decoded_body, err = decoder(final_body)
+ if err ~= nil then
+ core.log.warn("try decode compressed data err: ", err)
+ ctx.resp_body = final_body
+ return
+ end
+
+ ctx.resp_body = decoded_body
end
end
end
diff --git a/ci/pod/docker-compose.plugin.yml b/ci/pod/docker-compose.plugin.yml
index 13a34e3908f7..afdb3817554d 100644
--- a/ci/pod/docker-compose.plugin.yml
+++ b/ci/pod/docker-compose.plugin.yml
@@ -201,7 +201,7 @@ services:
# Elasticsearch Logger Service
elasticsearch-noauth:
- image: docker.elastic.co/elasticsearch/elasticsearch:7.17.1
+ image: docker.elastic.co/elasticsearch/elasticsearch:8.12.0
restart: unless-stopped
ports:
- "9200:9200"
@@ -212,18 +212,16 @@ services:
xpack.security.enabled: 'false'
elasticsearch-auth:
- image: docker.elastic.co/elasticsearch/elasticsearch:7.17.1
+ image: docker.elastic.co/elasticsearch/elasticsearch:8.12.0
restart: unless-stopped
ports:
- "9201:9201"
- - "9301:9301"
environment:
ES_JAVA_OPTS: -Xms512m -Xmx512m
discovery.type: single-node
ELASTIC_USERNAME: elastic
ELASTIC_PASSWORD: 123456
http.port: 9201
- transport.tcp.port: 9301
xpack.security.enabled: 'true'
diff --git a/conf/config-default.yaml b/conf/config-default.yaml
index 6636bf096a5c..fcee30871755 100755
--- a/conf/config-default.yaml
+++ b/conf/config-default.yaml
@@ -492,6 +492,7 @@ plugins: # plugin list (sorted by priority)
#- dubbo-proxy # priority: 507
- grpc-transcode # priority: 506
- grpc-web # priority: 505
+ - http-dubbo # priority: 504
- public-api # priority: 501
- prometheus # priority: 500
- datadog # priority: 495
diff --git a/docs/en/latest/plugins/elasticsearch-logger.md b/docs/en/latest/plugins/elasticsearch-logger.md
index 06f70354f844..098dc27d335b 100644
--- a/docs/en/latest/plugins/elasticsearch-logger.md
+++ b/docs/en/latest/plugins/elasticsearch-logger.md
@@ -48,6 +48,10 @@ When the Plugin is enabled, APISIX will serialize the request context informatio
| auth.password | string | True | | Elasticsearch [authentication](https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html) password. |
| ssl_verify | boolean | False | true | When set to `true` enables SSL verification as per [OpenResty docs](https://github.com/openresty/lua-nginx-module#tcpsocksslhandshake). |
| timeout | integer | False | 10 | Elasticsearch send data timeout in seconds. |
+| include_req_body | boolean | False | false | When set to `true` includes the request body in the log. If the request body is too big to be kept in the memory, it can't be logged due to Nginx's limitations. |
+| include_req_body_expr | array | False | | Filter for when the `include_req_body` attribute is set to `true`. Request body is only logged when the expression set here evaluates to `true`. See [lua-resty-expr](https://github.com/api7/lua-resty-expr) for more. |
+| include_resp_body | boolean | False | false | When set to `true` includes the response body in the log. |
+| include_resp_body_expr | array | False | | When the `include_resp_body` attribute is set to `true`, use this to filter based on [lua-resty-expr](https://github.com/api7/lua-resty-expr). If present, only logs the response if the expression evaluates to `true`. |
NOTE: `encrypt_fields = {"auth.password"}` is also defined in the schema, which means that the field will be stored encrypted in etcd. See [encrypted storage fields](../plugin-develop.md#encrypted-storage-fields).
diff --git a/docs/en/latest/plugins/forward-auth.md b/docs/en/latest/plugins/forward-auth.md
index 2fe89d39b097..4b8d138c93b8 100644
--- a/docs/en/latest/plugins/forward-auth.md
+++ b/docs/en/latest/plugins/forward-auth.md
@@ -49,6 +49,7 @@ This Plugin moves the authentication and authorization logic to a dedicated exte
| keepalive_timeout | integer | False | 60000ms | [1000, ...]ms | Idle time after which the connection is closed. |
| keepalive_pool | integer | False | 5 | [1, ...]ms | Connection pool limit. |
| allow_degradation | boolean | False | false | | When set to `true`, allows authentication to be skipped when authentication server is unavailable. |
+| status_on_error | integer | False | 403 | [200,...,599] | Sets the HTTP status that is returned to the client when there is a network error to the authorization service. The default status is “403” (HTTP Forbidden). |
## Data definition
diff --git a/docs/en/latest/plugins/jwe-decrypt.md b/docs/en/latest/plugins/jwe-decrypt.md
index 155b793e476c..9969094af0a0 100644
--- a/docs/en/latest/plugins/jwe-decrypt.md
+++ b/docs/en/latest/plugins/jwe-decrypt.md
@@ -38,11 +38,11 @@ This Plugin adds an endpoint `/apisix/plugin/jwe/encrypt` for JWE encryption. Fo
For Consumer:
-| Name | Type | Required | Default | Valid values | Description |
-|---------------|---------|-------------------------------------------------------|---------|-----------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| key | string | True | | | Unique key for a Consumer. |
-| secret | string | True | | | The decryption key. The key could be saved in a secret manager using the [Secret](../terminology/secret.md) resource. |
-| is_base64_encoded | boolean | False | false | | Set to true if the secret is base64 encoded. |
+| Name | Type | Required | Default | Valid values | Description |
+|---------------|---------|-------------------------------------------------------|---------|-----------------------------|----------------------------------------------------------------------------------------------------------------------------------------------|
+| key | string | True | | | Unique key for a Consumer. |
+| secret | string | True | | | The decryption key. Must be 32 characters. The key could be saved in a secret manager using the [Secret](../terminology/secret.md) resource. |
+| is_base64_encoded | boolean | False | false | | Set to true if the secret is base64 encoded. |
For Route:
@@ -63,7 +63,7 @@ curl http://127.0.0.1:9180/apisix/admin/consumers -H 'X-API-KEY: edd1c9f034335f1
"plugins": {
"jwe-decrypt": {
"key": "user-key",
- "secret": "key-length-must-be-at-least-32-chars"
+ "secret": "-secret-length-must-be-32-chars-"
}
}
}'
diff --git a/docs/en/latest/plugins/multi-auth.md b/docs/en/latest/plugins/multi-auth.md
index a55b69fb344e..e79914743957 100644
--- a/docs/en/latest/plugins/multi-auth.md
+++ b/docs/en/latest/plugins/multi-auth.md
@@ -75,8 +75,6 @@ curl http://127.0.0.1:9180/apisix/admin/consumers -H 'X-API-KEY: edd1c9f034335f1
}'
```
-You can also use the [APISIX Dashboard](/docs/dashboard/USER_GUIDE) to complete the operation through a web UI.
-
Once you have created Consumer objects, you can then configure a Route or a Service to authenticate requests:
```shell
@@ -113,16 +111,16 @@ curl http://127.0.0.1:9180/apisix/admin/routes/1 -H 'X-API-KEY: edd1c9f034335f13
After you have configured the Plugin as mentioned above, you can make a request to the Route as shown below:
-request with basic-auth
+Send a request with `basic-auth` credentials:
```shell
curl -i -ufoo1:bar1 http://127.0.0.1:9080/hello
```
-request with key-auth
+Send a request with `key-auth` credentials:
```shell
-curl http://127.0.0.2:9080/hello -H 'apikey: auth-one' -i
+curl http://127.0.0.1:9080/hello -H 'apikey: auth-one' -i
```
```
@@ -131,11 +129,9 @@ HTTP/1.1 200 OK
hello, world
```
-If the request is not authorized, an error will be thrown:
+If the request is not authorized, an `401 Unauthorized` error will be thrown:
-```shell
-HTTP/1.1 401 Unauthorized
-...
+```json
{"message":"Authorization Failed"}
```
diff --git a/docs/en/latest/plugins/openid-connect.md b/docs/en/latest/plugins/openid-connect.md
index ed9d41a58792..1fcba8811d8f 100644
--- a/docs/en/latest/plugins/openid-connect.md
+++ b/docs/en/latest/plugins/openid-connect.md
@@ -245,7 +245,7 @@ You should also ensure that the `redirect_uri` include the scheme, such as `http
#### 2. Missing Session Secret
-If you deploy APISIX in the [standalone mode](/apisix/production/deployment-modes#standalone-mode), make sure that `session.secret` is configured.
+If you deploy APISIX in the [standalone mode](../deployment-modes.md#standalone), make sure that `session.secret` is configured.
User sessions are stored in browser as cookies and encrypted with session secret. The secret is automatically generated and saved to etcd if no secret is configured through the `session.secret` attribute. However, in standalone mode, etcd is no longer the configuration center. Therefore, you should explicitly configure `session.secret` for this plugin in the YAML configuration center `apisix.yaml`.
diff --git a/docs/en/latest/plugins/skywalking-logger.md b/docs/en/latest/plugins/skywalking-logger.md
index b72ec5577e62..7150c0bb2f24 100644
--- a/docs/en/latest/plugins/skywalking-logger.md
+++ b/docs/en/latest/plugins/skywalking-logger.md
@@ -44,6 +44,9 @@ If there is an existing tracing context, it sets up the trace-log correlation au
| timeout | integer | False | 3 | [1,...] | Time to keep the connection alive for after sending a request. |
| name | string | False | "skywalking logger" | | Unique identifier to identify the logger. If you use Prometheus to monitor APISIX metrics, the name is exported in `apisix_batch_process_entries`. |
| include_req_body | boolean | False | false | [false, true] | When set to `true` includes the request body in the log. |
+| include_req_body_expr | array | False | | | Filter for when the `include_req_body` attribute is set to `true`. Request body is only logged when the expression set here evaluates to `true`. See [lua-resty-expr](https://github.com/api7/lua-resty-expr) for more. |
+| include_resp_body | boolean | False | false | [false, true] | When set to `true` includes the response body in the log. |
+| include_resp_body_expr | array | False | | | When the `include_resp_body` attribute is set to `true`, use this to filter based on [lua-resty-expr](https://github.com/api7/lua-resty-expr). If present, only logs the response if the expression evaluates to `true`. |
This Plugin supports using batch processors to aggregate and process entries (logs/data) in a batch. This avoids the need for frequently submitting the data. The batch processor submits data every `5` seconds or when the data in the queue reaches `1000`. See [Batch Processor](../batch-processor.md#configuration) for more information or setting your custom configuration.
diff --git a/docs/en/latest/plugins/sls-logger.md b/docs/en/latest/plugins/sls-logger.md
index 47dc9449bbcf..94199a856730 100644
--- a/docs/en/latest/plugins/sls-logger.md
+++ b/docs/en/latest/plugins/sls-logger.md
@@ -46,6 +46,9 @@ It might take some time to receive the log data. It will be automatically sent a
| access_key_id | True | AccessKey ID in Alibaba Cloud. See [Authorization](https://www.alibabacloud.com/help/en/log-service/latest/create-a-ram-user-and-authorize-the-ram-user-to-access-log-service) for more details. |
| access_key_secret | True | AccessKey Secret in Alibaba Cloud. See [Authorization](https://www.alibabacloud.com/help/en/log-service/latest/create-a-ram-user-and-authorize-the-ram-user-to-access-log-service) for more details. |
| include_req_body | True | When set to `true`, includes the request body in the log. |
+| include_req_body_expr | No | Filter for when the `include_req_body` attribute is set to `true`. Request body is only logged when the expression set here evaluates to `true`. See [lua-resty-expr](https://github.com/api7/lua-resty-expr) for more. |
+| include_resp_body | No | When set to `true` includes the response body in the log. |
+| include_resp_body_expr | No | Filter for when the `include_resp_body` attribute is set to `true`. Response body is only logged when the expression set here evaluates to `true`. See [lua-resty-expr](https://github.com/api7/lua-resty-expr) for more. |
| name | False | Unique identifier for the batch processor. If you use Prometheus to monitor APISIX metrics, the name is exported in `apisix_batch_process_entries`. |
NOTE: `encrypt_fields = {"access_key_secret"}` is also defined in the schema, which means that the field will be stored encrypted in etcd. See [encrypted storage fields](../plugin-develop.md#encrypted-storage-fields).
diff --git a/docs/en/latest/plugins/syslog.md b/docs/en/latest/plugins/syslog.md
index 1a7e5e4a8f79..11142807dd03 100644
--- a/docs/en/latest/plugins/syslog.md
+++ b/docs/en/latest/plugins/syslog.md
@@ -46,7 +46,10 @@ Logs can be set as JSON objects.
| sock_type | string | False | "tcp" | ["tcp", "udp] | Transport layer protocol to use. |
| pool_size | integer | False | 5 | [5, ...] | Keep-alive pool size used by `sock:keepalive`. |
| log_format | object | False | | | Log format declared as key value pairs in JSON format. Values only support strings. [APISIX](../apisix-variable.md) or [Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by prefixing the string with `$`. |
-| include_req_body | boolean | False | false | | When set to `true` includes the request body in the log. |
+| include_req_body | boolean | False | false | [false, true] | When set to `true` includes the request body in the log. |
+| include_req_body_expr | array | False | | | Filter for when the `include_req_body` attribute is set to `true`. Request body is only logged when the expression set here evaluates to `true`. See [lua-resty-expr](https://github.com/api7/lua-resty-expr) for more. |
+| include_resp_body | boolean | False | false | [false, true] | When set to `true` includes the response body in the log. |
+| include_resp_body_expr | array | False | | | When the `include_resp_body` attribute is set to `true`, use this to filter based on [lua-resty-expr](https://github.com/api7/lua-resty-expr). If present, only logs the response if the expression evaluates to `true`. |
This Plugin supports using batch processors to aggregate and process entries (logs/data) in a batch. This avoids the need for frequently submitting the data. The batch processor submits data every `5` seconds or when the data in the queue reaches `1000`. See [Batch Processor](../batch-processor.md#configuration) for more information or setting your custom configuration.
diff --git a/docs/en/latest/plugins/tcp-logger.md b/docs/en/latest/plugins/tcp-logger.md
index e5bffac3500e..b163029cd143 100644
--- a/docs/en/latest/plugins/tcp-logger.md
+++ b/docs/en/latest/plugins/tcp-logger.md
@@ -46,7 +46,10 @@ This plugin also allows to push logs as a batch to your external TCP server. It
| log_format | object | False | | | Log format declared as key value pairs in JSON format. Values only support strings. [APISIX](../apisix-variable.md) or [Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by prefixing the string with `$`. |
| tls | boolean | False | false | | When set to `true` performs SSL verification. |
| tls_options | string | False | | | TLS options. |
-| include_req_body | boolean | False | false | | When set to `true` includes the request body in the log. |
+| include_req_body | boolean | False | false | [false, true] | When set to `true` includes the request body in the log. |
+| include_req_body_expr | array | No | | | Filter for when the `include_req_body` attribute is set to `true`. Request body is only logged when the expression set here evaluates to `true`. See [lua-resty-expr](https://github.com/api7/lua-resty-expr) for more. |
+| include_resp_body | boolean | No | false | [false, true] | When set to `true` includes the response body in the log. |
+| include_resp_body_expr | array | No | | | Filter for when the `include_resp_body` attribute is set to `true`. Response body is only logged when the expression set here evaluates to `true`. See [lua-resty-expr](https://github.com/api7/lua-resty-expr) for more. |
This Plugin supports using batch processors to aggregate and process entries (logs/data) in a batch. This avoids the need for frequently submitting the data. The batch processor submits data every `5` seconds or when the data in the queue reaches `1000`. See [Batch Processor](../batch-processor.md#configuration) for more information or setting your custom configuration.
diff --git a/docs/en/latest/plugins/udp-logger.md b/docs/en/latest/plugins/udp-logger.md
index e3acd0030a00..9af8c98dece1 100644
--- a/docs/en/latest/plugins/udp-logger.md
+++ b/docs/en/latest/plugins/udp-logger.md
@@ -44,7 +44,10 @@ This plugin also allows to push logs as a batch to your external UDP server. It
| timeout | integer | False | 3 | [1,...] | Timeout for the upstream to send data. |
| log_format | object | False | | | Log format declared as key value pairs in JSON format. Values only support strings. [APISIX](../apisix-variable.md) or [Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by prefixing the string with `$`. |
| name | string | False | "udp logger" | | Unique identifier for the batch processor. If you use Prometheus to monitor APISIX metrics, the name is exported in `apisix_batch_process_entries`. processor. |
-| include_req_body | boolean | False | false | | When set to `true` includes the request body in the log. |
+| include_req_body | boolean | False | false | [false, true] | When set to `true` includes the request body in the log. |
+| include_req_body_expr | array | No | | | Filter for when the `include_req_body` attribute is set to `true`. Request body is only logged when the expression set here evaluates to `true`. See [lua-resty-expr](https://github.com/api7/lua-resty-expr) for more. |
+| include_resp_body | boolean | No | false | [false, true] | When set to `true` includes the response body in the log. |
+| include_resp_body_expr | array | No | | | Filter for when the `include_resp_body` attribute is set to `true`. Response body is only logged when the expression set here evaluates to `true`. See [lua-resty-expr](https://github.com/api7/lua-resty-expr) for more. |
This Plugin supports using batch processors to aggregate and process entries (logs/data) in a batch. This avoids the need for frequently submitting the data. The batch processor submits data every `5` seconds or when the data in the queue reaches `1000`. See [Batch Processor](../batch-processor.md#configuration) for more information or setting your custom configuration.
diff --git a/docs/zh/latest/plugins/elasticsearch-logger.md b/docs/zh/latest/plugins/elasticsearch-logger.md
index d97311b17ffb..7b0022e29ef0 100644
--- a/docs/zh/latest/plugins/elasticsearch-logger.md
+++ b/docs/zh/latest/plugins/elasticsearch-logger.md
@@ -49,6 +49,10 @@ description: 本文介绍了 API 网关 Apache APISIX 的 elasticsearch-logger
| auth.password | string | 是 | | Elasticsearch [authentication](https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html) 密码。 |
| ssl_verify | boolean | 否 | true | 当设置为 `true` 时则启用 SSL 验证。更多信息请参考 [lua-nginx-module](https://github.com/openresty/lua-nginx-module#tcpsocksslhandshake)。 |
| timeout | integer | 否 | 10 | 发送给 Elasticsearch 请求超时时间。 |
+| include_req_body | boolean | 否 | false | 当设置为 `true` 时,包含请求体。**注意**:如果请求体无法完全存放在内存中,由于 NGINX 的限制,APISIX 无法将它记录下来。 |
+| include_req_body_expr | array | 否 | | 当 `include_req_body` 属性设置为 `true` 时的过滤器。只有当此处设置的表达式求值为 `true` 时,才会记录请求体。有关更多信息,请参阅 [lua-resty-expr](https://github.com/api7/lua-resty-expr) 。 |
+| include_resp_body | boolean | 否 | false | 当设置为 `true` 时,包含响应体。 |
+| include_resp_body_expr | array | 否 | | 当 `include_resp_body` 属性设置为 `true` 时进行过滤响应体,并且只有当此处设置的表达式计算结果为 `true` 时,才会记录响应体。更多信息,请参考 [lua-resty-expr](https://github.com/api7/lua-resty-expr)。 |
注意:schema 中还定义了 `encrypt_fields = {"auth.password"}`,这意味着该字段将会被加密存储在 etcd 中。具体参考 [加密存储字段](../plugin-develop.md#加密存储字段)。
diff --git a/docs/zh/latest/plugins/forward-auth.md b/docs/zh/latest/plugins/forward-auth.md
index a7babfe4601e..82aa5f4c12ce 100644
--- a/docs/zh/latest/plugins/forward-auth.md
+++ b/docs/zh/latest/plugins/forward-auth.md
@@ -48,6 +48,7 @@ description: 本文介绍了关于 Apache APISIX `forward-auth` 插件的基本
| keepalive_timeout | integer | 否 | 60000ms | [1000, ...]ms | 长连接超时时间。 |
| keepalive_pool | integer | 否 | 5 | [1, ...]ms | 长连接池大小。 |
| allow_degradation | boolean | 否 | false | | 当设置为 `true` 时,允许在身份验证服务器不可用时跳过身份验证。 |
+| status_on_error | boolean | 否 | 403 | [200,...,599] | 设置授权服务出现网络错误时返回给客户端的 HTTP 状态。默认状态为“403”。 |
## 数据定义
diff --git a/docs/zh/latest/plugins/jwe-decrypt.md b/docs/zh/latest/plugins/jwe-decrypt.md
index 7ef1f295b678..ce5c98fd6a8c 100644
--- a/docs/zh/latest/plugins/jwe-decrypt.md
+++ b/docs/zh/latest/plugins/jwe-decrypt.md
@@ -38,11 +38,11 @@ description: 本文档包含了关于 APISIX jwe-decrypt 插件的相关信息
Consumer 配置:
-| 名称 | 类型 | 必选项 | 默认值 | 有效值 | 描述 |
-|---------------|---------|-------|-------|-----|----------------------------------------------------------------------|
-| key | string | True | | | Consumer 的唯一 key |
-| secret | string | True | | | 解密密钥。秘钥可以使用 [Secret](../terminology/secret.md) 资源保存在密钥管理服务中(最小 32 位) |
-| is_base64_encoded | boolean | False | false | | 如果密钥是 Base64 编码,则需要配置为 `true` |
+| 名称 | 类型 | 必选项 | 默认值 | 有效值 | 描述 |
+|---------------|---------|-------|-------|-----|-------------------------------------------------------------|
+| key | string | True | | | Consumer 的唯一 key |
+| secret | string | True | | | 解密密钥,必须为 32 位。秘钥可以使用 [Secret](../terminology/secret.md) 资源保存在密钥管理服务中 |
+| is_base64_encoded | boolean | False | false | | 如果密钥是 Base64 编码,则需要配置为 `true` |
Route 配置:
@@ -63,7 +63,7 @@ curl http://127.0.0.1:9180/apisix/admin/consumers -H 'X-API-KEY: edd1c9f034335f1
"plugins": {
"jwe-decrypt": {
"key": "user-key",
- "secret": "key-length-must-be-at-least-32-chars"
+ "secret": "-secret-length-must-be-32-chars-"
}
}
}'
diff --git a/docs/zh/latest/plugins/loggly.md b/docs/zh/latest/plugins/loggly.md
index 27d813c4a9bd..5bf92cf5a9ae 100644
--- a/docs/zh/latest/plugins/loggly.md
+++ b/docs/zh/latest/plugins/loggly.md
@@ -43,6 +43,7 @@ description: API 网关 Apache APISIX loggly 插件可用于将日志转发到 S
| tags | array | 否 | | 元数据将包含在任何事件日志中,以帮助进行分段和过滤。 |
| log_format | object | 否 | | | 以 JSON 格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 变量](../apisix-variable.md) 或 [NGINX 内置变量](http://nginx.org/en/docs/varindex.html)。 |
| include_req_body | boolean | 否 | false | 当设置为 `true` 时,包含请求体。**注意**:如果请求体无法完全存放在内存中,由于 NGINX 的限制,APISIX 无法将它记录下来。 |
+| include_req_body_expr | array | 否 | | 当 `include_req_body` 属性设置为 `true` 时的过滤器。只有当此处设置的表达式求值为 `true` 时,才会记录请求体。有关更多信息,请参阅 [lua-resty-expr](https://github.com/api7/lua-resty-expr) 。 |
| include_resp_body | boolean | 否 | false | 当设置为 `true` 时,包含响应体。 |
| include_resp_body_expr | array | 否 | | 当 `include_resp_body` 属性设置为 `true` 时进行过滤响应体,并且只有当此处设置的表达式计算结果为 `true` 时,才会记录响应体。更多信息,请参考 [lua-resty-expr](https://github.com/api7/lua-resty-expr)。 |
diff --git a/docs/zh/latest/plugins/multi-auth.md b/docs/zh/latest/plugins/multi-auth.md
index d06c2b2346f5..852911d7e295 100644
--- a/docs/zh/latest/plugins/multi-auth.md
+++ b/docs/zh/latest/plugins/multi-auth.md
@@ -75,8 +75,6 @@ curl http://127.0.0.1:9180/apisix/admin/consumers -H 'X-API-KEY: edd1c9f034335f1
}'
```
-您也可以使用 [APISIX Dashboard](/docs/dashboard/USER_GUIDE) 通过 web UI 来完成操作。
-
创建 Consumer 之后,您可以配置一个路由或服务来验证请求:
```shell
@@ -122,7 +120,7 @@ curl -i -ufoo1:bar1 http://127.0.0.1:9080/hello
请求开启 key-auth 插件的 API
```shell
-curl http://127.0.0.2:9080/hello -H 'apikey: auth-one' -i
+curl http://127.0.0.1:9080/hello -H 'apikey: auth-one' -i
```
```
@@ -131,11 +129,9 @@ HTTP/1.1 200 OK
hello, world
```
-如果请求未授权,将会返回如下错误:
+如果请求未授权,将会返回 `401 Unauthorized` 错误:
-```shell
-HTTP/1.1 401 Unauthorized
-...
+```json
{"message":"Authorization Failed"}
```
diff --git a/docs/zh/latest/plugins/openid-connect.md b/docs/zh/latest/plugins/openid-connect.md
index 723c5eb78014..bdcd22530c09 100644
--- a/docs/zh/latest/plugins/openid-connect.md
+++ b/docs/zh/latest/plugins/openid-connect.md
@@ -244,7 +244,7 @@ the error request to the redirect_uri path, but there's no session state found
#### 2. 缺少 Session Secret
-如果您在[standalone 模式](/apisix/product/deployment-modes#standalone-mode)下部署 APISIX,请确保配置了 `session.secret`。
+如果您在[standalone 模式](../../../en/latest/deployment-modes.md#standalone)下部署 APISIX,请确保配置了 `session.secret`。
用户 session 作为 cookie 存储在浏览器中,并使用 session 密钥进行加密。如果没有通过 `session.secret` 属性配置机密,则会自动生成机密并将其保存到 etcd。然而,在独立模式下,etcd 不再是配置中心。因此,您应该在 YAML 配置中心 `apisix.yaml` 中为此插件显式配置 `session.secret`。
diff --git a/docs/zh/latest/plugins/skywalking-logger.md b/docs/zh/latest/plugins/skywalking-logger.md
index 87cabb3b450a..8ef7417582c4 100644
--- a/docs/zh/latest/plugins/skywalking-logger.md
+++ b/docs/zh/latest/plugins/skywalking-logger.md
@@ -46,6 +46,9 @@ description: 本文将介绍 API 网关 Apache APISIX 如何通过 skywalking-lo
| timeout | integer | 否 | 3 | [1,...] | 发送请求后保持连接活动的时间。 |
| name | string | 否 | "skywalking logger" | | 标识 logger 的唯一标识符。如果您使用 Prometheus 监视 APISIX 指标,名称将以 `apisix_batch_process_entries` 导出。 |
| include_req_body | boolean | 否 | false | [false, true] | 当设置为 `true` 时,将请求正文包含在日志中。 |
+| include_req_body_expr | array | 否 | | | 当 `include_req_body` 属性设置为 `true` 时的过滤器。只有当此处设置的表达式求值为 `true` 时,才会记录请求体。有关更多信息,请参阅 [lua-resty-expr](https://github.com/api7/lua-resty-expr) 。 |
+| include_resp_body | boolean | 否 | false | [false, true] | 当设置为 `true` 时,包含响应体。 |
+| include_resp_body_expr | array | 否 | | | 当 `include_resp_body` 属性设置为 `true` 时进行过滤响应体,并且只有当此处设置的表达式计算结果为 `true` 时,才会记录响应体。更多信息,请参考 [lua-resty-expr](https://github.com/api7/lua-resty-expr)。 |
该插件支持使用批处理器来聚合并批量处理条目(日志/数据)。这样可以避免插件频繁地提交数据,默认设置情况下批处理器会每 `5` 秒钟或队列中的数据达到 `1000` 条时提交数据,如需了解批处理器相关参数设置,请参考 [Batch-Processor](../batch-processor.md#配置)。
diff --git a/docs/zh/latest/plugins/sls-logger.md b/docs/zh/latest/plugins/sls-logger.md
index 7bd85c81beaf..64613a34a66c 100644
--- a/docs/zh/latest/plugins/sls-logger.md
+++ b/docs/zh/latest/plugins/sls-logger.md
@@ -43,6 +43,9 @@ title: sls-logger
| access_key_id | 必须的 | AccessKey ID。建议使用阿里云子账号 AK,详情请参见 [授权](https://help.aliyun.com/document_detail/47664.html?spm=a2c4g.11186623.2.15.49301b47lfvxXP#task-xsk-ttc-ry)。|
| access_key_secret | 必须的 | AccessKey Secret。建议使用阿里云子账号 AK,详情请参见 [授权](https://help.aliyun.com/document_detail/47664.html?spm=a2c4g.11186623.2.15.49301b47lfvxXP#task-xsk-ttc-ry)。|
| include_req_body | 可选的 | 是否包含请求体。|
+| include_req_body_expr | 可选的 | 当 `include_req_body` 属性设置为 `true` 时的过滤器。只有当此处设置的表达式求值为 `true` 时,才会记录请求体。有关更多信息,请参阅 [lua-resty-expr](https://github.com/api7/lua-resty-expr) 。 |
+| include_resp_body | 可选的 | 当设置为 `true` 时,日志中将包含响应体。 |
+| include_resp_body_expr | 可选的 | 当 `include_resp_body` 属性设置为 `true` 时进行过滤响应体,并且只有当此处设置的表达式计算结果为 `true` 时,才会记录响应体。更多信息,请参考 [lua-resty-expr](https://github.com/api7/lua-resty-expr)。 |
|name| 可选的 | 批处理名字。如果您使用 Prometheus 监视 APISIX 指标,名称将以 `apisix_batch_process_entries` 导出。|
注意:schema 中还定义了 `encrypt_fields = {"access_key_secret"}`,这意味着该字段将会被加密存储在 etcd 中。具体参考 [加密存储字段](../plugin-develop.md#加密存储字段)。
diff --git a/docs/zh/latest/plugins/syslog.md b/docs/zh/latest/plugins/syslog.md
index 4707bba8b6fa..fd847f12211f 100644
--- a/docs/zh/latest/plugins/syslog.md
+++ b/docs/zh/latest/plugins/syslog.md
@@ -49,7 +49,10 @@ description: API 网关 Apache APISIX syslog 插件可用于将日志推送到 S
| retry_delay | integer | 否 | | [0, ...] | 重试连接到日志服务器或重试向日志服务器发送日志消息之前的时间延迟(以毫秒为单位)。 |
| pool_size | integer | 否 | 5 | [5, ...] | `sock:keepalive` 使用的 Keepalive 池大小。 |
| log_format | object | 否 | | | 以 JSON 格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 变量](../apisix-variable.md) 或 [NGINX 内置变量](http://nginx.org/en/docs/varindex.html)。 |
-| include_req_body | boolean | 否 | false | | 当设置为 `true` 时包括请求体。 |
+| include_req_body | boolean | 否 | false | [false, true] | 当设置为 `true` 时包括请求体。 |
+| include_req_body_expr | array | 否 | | | 当 `include_req_body` 属性设置为 `true` 时的过滤器。只有当此处设置的表达式求值为 `true` 时,才会记录请求体。有关更多信息,请参阅 [lua-resty-expr](https://github.com/api7/lua-resty-expr) 。 |
+| include_resp_body | boolean | 否 | false | [false, true] | 当设置为 `true` 时,包含响应体。 |
+| include_resp_body_expr | array | 否 | | | 当 `include_resp_body` 属性设置为 `true` 时进行过滤响应体,并且只有当此处设置的表达式计算结果为 `true` 时,才会记录响应体。更多信息,请参考 [lua-resty-expr](https://github.com/api7/lua-resty-expr)。 |
该插件支持使用批处理器来聚合并批量处理条目(日志/数据)。这样可以避免插件频繁地提交数据,默认情况下批处理器每 `5` 秒钟或队列中的数据达到 `1000` 条时提交数据,如需了解批处理器相关参数设置,请参考 [Batch-Processor](../batch-processor.md#配置)。
diff --git a/docs/zh/latest/plugins/tcp-logger.md b/docs/zh/latest/plugins/tcp-logger.md
index 3984fb1d407a..17203a3e5ec0 100644
--- a/docs/zh/latest/plugins/tcp-logger.md
+++ b/docs/zh/latest/plugins/tcp-logger.md
@@ -43,7 +43,10 @@ description: 本文介绍了 API 网关 Apache APISIX 如何使用 tcp-logger
| log_format | object | 否 | | | 以 JSON 格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 变量](../apisix-variable.md) 或 [NGINX 内置变量](http://nginx.org/en/docs/varindex.html)。 |
| tls | boolean | 否 | false | | 用于控制是否执行 SSL 验证。 |
| tls_options | string | 否 | | | TLS 选项。 |
-| include_req_body | boolean | 否 | | | 当设置为 `true` 时,日志中将包含请求体。 |
+| include_req_body | boolean | 否 | | [false, true] | 当设置为 `true` 时,日志中将包含请求体。 |
+| include_req_body_expr | array | 否 | | | 当 `include_req_body` 属性设置为 `true` 时的过滤器。只有当此处设置的表达式求值为 `true` 时,才会记录请求体。有关更多信息,请参阅 [lua-resty-expr](https://github.com/api7/lua-resty-expr) 。 |
+| include_resp_body | boolean | 否 | false | [false, true]| 当设置为 `true` 时,日志中将包含响应体。 |
+| include_resp_body_expr | array | 否 | | | 当 `include_resp_body` 属性设置为 `true` 时进行过滤响应体,并且只有当此处设置的表达式计算结果为 `true` 时,才会记录响应体。更多信息,请参考 [lua-resty-expr](https://github.com/api7/lua-resty-expr)。 |
该插件支持使用批处理器来聚合并批量处理条目(日志/数据)。这样可以避免插件频繁地提交数据,默认情况下批处理器每 `5` 秒钟或队列中的数据达到 `1000` 条时提交数据,如需了解批处理器相关参数设置,请参考 [Batch-Processor](../batch-processor.md#配置)。
diff --git a/docs/zh/latest/plugins/tencent-cloud-cls.md b/docs/zh/latest/plugins/tencent-cloud-cls.md
index 88bff5b06619..0e45141d3d3f 100644
--- a/docs/zh/latest/plugins/tencent-cloud-cls.md
+++ b/docs/zh/latest/plugins/tencent-cloud-cls.md
@@ -42,7 +42,9 @@ description: API 网关 Apache APISIX tencent-cloud-cls 插件可用于将日志
| secret_key | string | 是 | | | 云 API 密钥的 key。 |
| sample_ratio | number | 否 | 1 | [0.00001, 1] | 采样的比例。设置为 `1` 时,将对所有请求进行采样。 |
| include_req_body | boolean | 否 | false | [false, true]| 当设置为 `true` 时,日志中将包含请求体。 |
+| include_req_body_expr | array | 否 | | | 当 `include_req_body` 属性设置为 `true` 时的过滤器。只有当此处设置的表达式求值为 `true` 时,才会记录请求体。有关更多信息,请参阅 [lua-resty-expr](https://github.com/api7/lua-resty-expr) 。 |
| include_resp_body | boolean | 否 | false | [false, true]| 当设置为 `true` 时,日志中将包含响应体。 |
+| include_resp_body_expr | array | 否 | | | 当 `include_resp_body` 属性设置为 `true` 时进行过滤响应体,并且只有当此处设置的表达式计算结果为 `true` 时,才会记录响应体。更多信息,请参考 [lua-resty-expr](https://github.com/api7/lua-resty-expr)。 |
| global_tag | object | 否 | | | kv 形式的 JSON 数据,可以写入每一条日志,便于在 CLS 中检索。 |
| log_format | object | 否 | | | 以 JSON 格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 变量](../apisix-variable.md) 或 [NGINX 内置变量](http://nginx.org/en/docs/varindex.html)。 |
diff --git a/docs/zh/latest/plugins/udp-logger.md b/docs/zh/latest/plugins/udp-logger.md
index 00f00d641703..a8ca60778551 100644
--- a/docs/zh/latest/plugins/udp-logger.md
+++ b/docs/zh/latest/plugins/udp-logger.md
@@ -42,7 +42,10 @@ description: 本文介绍了 API 网关 Apache APISIX 如何使用 udp-logger
| timeout | integer | 否 | 1000 | [1,...] | 发送数据超时间。 |
| log_format | object | 否 | | | 以 JSON 格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 变量](../apisix-variable.md) 或 [NGINX 内置变量](http://nginx.org/en/docs/varindex.html)。 |
| name | string | 否 | "udp logger" | | 标识 logger 的唯一标识符。如果您使用 Prometheus 监视 APISIX 指标,名称将以 `apisix_batch_process_entries` 导出。 |
-| include_req_body | boolean | 否 | | | 当设置为 `true` 时,日志中将包含请求体。 |
+| include_req_body | boolean | 否 | | [false, true] | 当设置为 `true` 时,日志中将包含请求体。 |
+| include_req_body_expr | array | 否 | | | 当 `include_req_body` 属性设置为 `true` 时的过滤器。只有当此处设置的表达式求值为 `true` 时,才会记录请求体。有关更多信息,请参阅 [lua-resty-expr](https://github.com/api7/lua-resty-expr) 。 |
+| include_resp_body | boolean | 否 | false | [false, true]| 当设置为 `true` 时,日志中将包含响应体。 |
+| include_resp_body_expr | array | 否 | | | 当 `include_resp_body` 属性设置为 `true` 时进行过滤响应体,并且只有当此处设置的表达式计算结果为 `true` 时,才会记录响应体。更多信息,请参考 [lua-resty-expr](https://github.com/api7/lua-resty-expr)。 |
该插件支持使用批处理器来聚合并批量处理条目(日志和数据)。这样可以避免插件频繁地提交数据,默认情况下批处理器每 `5` 秒钟或队列中的数据达到 `1000` 条时提交数据,如需了解批处理器相关参数设置,请参考 [Batch-Processor](../batch-processor.md#配置)。
diff --git a/t/admin/plugins.t b/t/admin/plugins.t
index c174d5f29b52..911205f48cb4 100644
--- a/t/admin/plugins.t
+++ b/t/admin/plugins.t
@@ -109,6 +109,7 @@ degraphql
kafka-proxy
grpc-transcode
grpc-web
+http-dubbo
public-api
prometheus
datadog
diff --git a/t/cli/test_tls_over_tcp.sh b/t/cli/test_tls_over_tcp.sh
index 5d378ce6a9ad..5f95f29db740 100755
--- a/t/cli/test_tls_over_tcp.sh
+++ b/t/cli/test_tls_over_tcp.sh
@@ -39,7 +39,13 @@ nginx_config:
make run
sleep 0.1
- ./utils/create-ssl.py t/certs/mtls_server.crt t/certs/mtls_server.key test.com
+curl http://127.0.0.1:9180/apisix/admin/ssls/1 \
+-H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d '
+{
+ "cert" : "'"$(cat t/certs/mtls_server.crt)"'",
+ "key": "'"$(cat t/certs/mtls_server.key)"'",
+ "snis": ["test.com"]
+}'
curl -k -i http://127.0.0.1:9180/apisix/admin/stream_routes/1 \
-H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d \
diff --git a/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-interface/pom.xml b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-interface/pom.xml
new file mode 100644
index 000000000000..883ff366aa1e
--- /dev/null
+++ b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-interface/pom.xml
@@ -0,0 +1,46 @@
+
+
+ 4.0.0
+
+ org.apache.dubbo.backend
+ dubbo-serialization-backend
+ 1.0.0-SNAPSHOT
+ ../pom.xml
+
+ org.apache.dubbo.backend
+ dubbo-serialization-backend-interface
+ 1.0.0-SNAPSHOT
+ jar
+ ${project.artifactId}
+
+
+ true
+
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+
+
+ 1.8
+
+
+
+
+
diff --git a/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-interface/src/main/java/org/apache/dubbo/backend/DubboSerializationTestService.java b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-interface/src/main/java/org/apache/dubbo/backend/DubboSerializationTestService.java
new file mode 100644
index 000000000000..fcc2a7143a13
--- /dev/null
+++ b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-interface/src/main/java/org/apache/dubbo/backend/DubboSerializationTestService.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.dubbo.backend;
+
+public interface DubboSerializationTestService {
+
+ PoJo testPoJo(PoJo input);
+
+ PoJo[] testPoJos(PoJo[] input);
+
+ void testVoid();
+
+ void testFailure();
+
+ void testTimeout();
+}
diff --git a/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-interface/src/main/java/org/apache/dubbo/backend/PoJo.java b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-interface/src/main/java/org/apache/dubbo/backend/PoJo.java
new file mode 100644
index 000000000000..150d035a1f6e
--- /dev/null
+++ b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-interface/src/main/java/org/apache/dubbo/backend/PoJo.java
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.dubbo.backend;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class PoJo {
+ private String aString;
+ private Boolean aBoolean;
+ private Byte aByte;
+ private Character acharacter;
+ private Integer aInt;
+ private Float aFloat;
+ private Double aDouble;
+ private Long aLong;
+ private Short aShort;
+ private String[] strings;
+ private Map stringMap;
+
+ public String getaString() {
+ return aString;
+ }
+
+ public void setaString(String aString) {
+ this.aString = aString;
+ }
+
+ public Boolean getaBoolean() {
+ return aBoolean;
+ }
+
+ public void setaBoolean(Boolean aBoolean) {
+ this.aBoolean = aBoolean;
+ }
+
+ public Byte getaByte() {
+ return aByte;
+ }
+
+ public void setaByte(Byte aByte) {
+ this.aByte = aByte;
+ }
+
+ public Character getAcharacter() {
+ return acharacter;
+ }
+
+ public void setAcharacter(Character acharacter) {
+ this.acharacter = acharacter;
+ }
+
+ public Integer getaInt() {
+ return aInt;
+ }
+
+ public void setaInt(Integer aInt) {
+ this.aInt = aInt;
+ }
+
+ public Float getaFloat() {
+ return aFloat;
+ }
+
+ public void setaFloat(Float aFloat) {
+ this.aFloat = aFloat;
+ }
+
+ public Double getaDouble() {
+ return aDouble;
+ }
+
+ public void setaDouble(Double aDouble) {
+ this.aDouble = aDouble;
+ }
+
+ public Long getaLong() {
+ return aLong;
+ }
+
+ public void setaLong(Long aLong) {
+ this.aLong = aLong;
+ }
+
+ public Short getaShort() {
+ return aShort;
+ }
+
+ public void setaShort(Short aShort) {
+ this.aShort = aShort;
+ }
+
+ public Map getStringMap() {
+ return stringMap;
+ }
+
+ public void setStringMap(Map stringMap) {
+ this.stringMap = stringMap;
+ }
+
+ public String[] getStrings() {
+ return strings;
+ }
+
+ public void setStrings(String[] strings) {
+ this.strings = strings;
+ }
+
+ public static PoJo getTestInstance(){
+ PoJo poJo = new PoJo();
+ poJo.aBoolean =true;
+ poJo.aByte =1;
+ poJo.acharacter ='a';
+ poJo.aInt =2;
+ poJo.aDouble = 1.1;
+ poJo.aFloat =1.2f;
+ poJo.aLong = 3L;
+ poJo.aShort = 4;
+ poJo.aString ="aa";
+ HashMap poJoMap = new HashMap<>();
+ poJoMap.put("key","value");
+ poJo.stringMap = poJoMap;
+ poJo.strings = new String[]{"aa","bb"};
+ return poJo;
+ }
+}
diff --git a/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/pom.xml b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/pom.xml
new file mode 100644
index 000000000000..b5b762f92f52
--- /dev/null
+++ b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/pom.xml
@@ -0,0 +1,97 @@
+
+
+ 4.0.0
+
+ org.apache.dubbo.backend
+ dubbo-serialization-backend
+ 1.0.0-SNAPSHOT
+ ../pom.xml
+
+ org.apache.dubbo.backend
+ dubbo-serialization-backend-provider
+ 1.0.0-SNAPSHOT
+ jar
+ ${project.artifactId}
+
+
+ true
+ 1.7.25
+ 2.12.0
+ 2.7.21
+
+
+
+
+ org.apache.dubbo.backend
+ dubbo-serialization-backend-interface
+ 1.0.0-SNAPSHOT
+
+
+ org.apache.dubbo
+ dubbo
+ ${dubbo.version}
+
+
+ org.apache.httpcomponents
+ httpclient
+ 4.5.13
+
+
+
+
+ dubbo-demo-provider
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+
+
+ 1.8
+
+
+
+ org.apache.maven.plugins
+ maven-dependency-plugin
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+
+ org.apache.dubbo.backend.provider.Provider
+
+
+
+
+
+ com.jolira
+ onejar-maven-plugin
+ 1.4.4
+
+
+
+ one-jar
+
+
+
+
+
+
+
diff --git a/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/java/org/apache/dubbo/backend/provider/DubboSerializationTestServiceImpl.java b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/java/org/apache/dubbo/backend/provider/DubboSerializationTestServiceImpl.java
new file mode 100644
index 000000000000..41e927391a36
--- /dev/null
+++ b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/java/org/apache/dubbo/backend/provider/DubboSerializationTestServiceImpl.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.dubbo.backend.provider;
+
+import org.apache.dubbo.backend.DubboSerializationTestService;
+import org.apache.dubbo.backend.PoJo;
+import org.apache.dubbo.common.utils.ReflectUtils;
+
+import java.lang.reflect.Method;
+import java.util.Arrays;
+import java.util.concurrent.TimeUnit;
+
+public class DubboSerializationTestServiceImpl implements DubboSerializationTestService {
+
+ @Override
+ public PoJo testPoJo(PoJo input) {
+ return input;
+ }
+
+ @Override
+ public PoJo[] testPoJos(PoJo[] input) {
+ return input;
+ }
+
+ @Override
+ public void testVoid() {
+ }
+
+ @Override
+ public void testFailure() {
+ throw new RuntimeException("testFailure");
+ }
+
+ @Override
+ public void testTimeout() {
+ try {
+ TimeUnit.SECONDS.sleep(10);
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+}
diff --git a/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/java/org/apache/dubbo/backend/provider/Provider.java b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/java/org/apache/dubbo/backend/provider/Provider.java
new file mode 100644
index 000000000000..dde4580d570a
--- /dev/null
+++ b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/java/org/apache/dubbo/backend/provider/Provider.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.dubbo.backend.provider;
+
+import com.alibaba.fastjson.JSONObject;
+import org.apache.dubbo.backend.DubboSerializationTestService;
+import org.apache.dubbo.backend.PoJo;
+import org.apache.dubbo.common.utils.ReflectUtils;
+import org.springframework.context.support.ClassPathXmlApplicationContext;
+
+import java.lang.reflect.Method;
+import java.util.concurrent.TimeUnit;
+import java.lang.InterruptedException;
+
+public class Provider {
+
+ /**
+ * To get ipv6 address to work, add
+ * System.setProperty("java.net.preferIPv6Addresses", "true");
+ * before running your application.
+ */
+ public static void main(String[] args) throws Exception {
+
+ ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext(new String[]{"META-INF/spring/dubbo-demo-provider.xml"});
+ String jsonString = JSONObject.toJSONString(PoJo.getTestInstance());
+ System.out.println(jsonString);
+ context.start();
+ while (true) {
+ try {
+ TimeUnit.MINUTES.sleep(1);
+ } catch (InterruptedException ex) {}
+ }
+ }
+}
diff --git a/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/resources/META-INF/spring/dubbo-demo-provider.xml b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/resources/META-INF/spring/dubbo-demo-provider.xml
new file mode 100644
index 000000000000..8dae775e4e49
--- /dev/null
+++ b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/resources/META-INF/spring/dubbo-demo-provider.xml
@@ -0,0 +1,38 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/utils/create-ssl.py b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/resources/dubbo.properties
old mode 100755
new mode 100644
similarity index 57%
rename from utils/create-ssl.py
rename to t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/resources/dubbo.properties
index e8a3daa33b73..258fd3bf2628
--- a/utils/create-ssl.py
+++ b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/resources/dubbo.properties
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-# coding: utf-8
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
@@ -16,26 +14,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-import sys
-# sudo pip install requests
-import requests
-
-# Usage: ./create-ssl.py t.crt t.key test.com
-if len(sys.argv) <= 3:
- print("bad argument")
- sys.exit(1)
-with open(sys.argv[1]) as f:
- cert = f.read()
-with open(sys.argv[2]) as f:
- key = f.read()
-sni = sys.argv[3]
-api_key = "edd1c9f034335f136f87ad84b625c8f1"
-resp = requests.put("http://127.0.0.1:9180/apisix/admin/ssls/1", json={
- "cert": cert,
- "key": key,
- "snis": [sni],
-}, headers={
- "X-API-KEY": api_key,
-})
-print(resp.status_code)
-print(resp.text)
+dubbo.application.qos.enable=false
diff --git a/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/resources/log4j.properties b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/resources/log4j.properties
new file mode 100644
index 000000000000..2f4f4addf137
--- /dev/null
+++ b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/resources/log4j.properties
@@ -0,0 +1,23 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+###set log levels###
+log4j.rootLogger=info, stdout
+###output to the console###
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=[%d{dd/MM/yy HH:mm:ss:SSS z}] %t %5p %c{2}: %m%n
diff --git a/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/target/classes/META-INF/spring/dubbo-demo-provider.xml b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/target/classes/META-INF/spring/dubbo-demo-provider.xml
new file mode 100644
index 000000000000..8dae775e4e49
--- /dev/null
+++ b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/target/classes/META-INF/spring/dubbo-demo-provider.xml
@@ -0,0 +1,38 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/target/classes/dubbo.properties b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/target/classes/dubbo.properties
new file mode 100644
index 000000000000..258fd3bf2628
--- /dev/null
+++ b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/target/classes/dubbo.properties
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+dubbo.application.qos.enable=false
diff --git a/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/target/classes/log4j.properties b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/target/classes/log4j.properties
new file mode 100644
index 000000000000..2f4f4addf137
--- /dev/null
+++ b/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/target/classes/log4j.properties
@@ -0,0 +1,23 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+###set log levels###
+log4j.rootLogger=info, stdout
+###output to the console###
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=[%d{dd/MM/yy HH:mm:ss:SSS z}] %t %5p %c{2}: %m%n
diff --git a/t/lib/dubbo-serialization-backend/pom.xml b/t/lib/dubbo-serialization-backend/pom.xml
new file mode 100644
index 000000000000..fe9f04229975
--- /dev/null
+++ b/t/lib/dubbo-serialization-backend/pom.xml
@@ -0,0 +1,97 @@
+
+
+ 4.0.0
+ org.apache.dubbo.backend
+ dubbo-serialization-backend
+ 1.0.0-SNAPSHOT
+ pom
+ ${project.artifactId}
+ A dubbo backend for test based on dubbo-samples-tengine
+
+ true
+ 2.7.21
+
+
+ dubbo-serialization-backend-interface
+ dubbo-serialization-backend-provider
+
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-dependencies
+ 2.1.5.RELEASE
+ pom
+ import
+
+
+ org.apache.dubbo
+ dubbo-dependencies-bom
+ ${dubbo.version}
+ pom
+ import
+
+
+ org.apache.dubbo
+ dubbo
+ ${dubbo.version}
+
+
+ org.springframework
+ spring
+
+
+ javax.servlet
+ servlet-api
+
+
+ log4j
+ log4j
+
+
+
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter
+ 2.1.5.RELEASE
+
+
+ org.apache.dubbo
+ dubbo-spring-boot-starter
+ 2.7.1
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+
+
+ 1.8
+
+
+
+
+
diff --git a/t/plugin/elasticsearch-logger.t b/t/plugin/elasticsearch-logger.t
index 1706f5c289cd..83960fbacd34 100644
--- a/t/plugin/elasticsearch-logger.t
+++ b/t/plugin/elasticsearch-logger.t
@@ -668,3 +668,136 @@ hello world
--- wait: 2
--- error_log
check elasticsearch custom body success
+
+
+
+=== TEST 17: using unsupported field (type) for elasticsearch v8 should work normally
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1', ngx.HTTP_PUT, {
+ uri = "/hello",
+ upstream = {
+ type = "roundrobin",
+ nodes = {
+ ["127.0.0.1:1980"] = 1
+ }
+ },
+ plugins = {
+ ["elasticsearch-logger"] = {
+ endpoint_addr = "http://127.0.0.1:9201",
+ field = {
+ index = "services",
+ type = "collector"
+ },
+ auth = {
+ username = "elastic",
+ password = "123456"
+ },
+ batch_max_size = 1,
+ inactive_timeout = 1
+ }
+ }
+ })
+
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+ }
+--- response_body
+passed
+
+
+
+=== TEST 18: test route (auth success)
+--- request
+GET /hello
+--- wait: 2
+--- response_body
+hello world
+--- no_error_log
+Action/metadata line [1] contains an unknown parameter [_type]
+
+
+
+=== TEST 19: add plugin with 'include_req_body' setting, collect request log
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ t('/apisix/admin/plugin_metadata/elasticsearch-logger', ngx.HTTP_DELETE)
+
+ local code, body = t('/apisix/admin/routes/1', ngx.HTTP_PUT, {
+ uri = "/hello",
+ upstream = {
+ type = "roundrobin",
+ nodes = {
+ ["127.0.0.1:1980"] = 1
+ }
+ },
+ plugins = {
+ ["elasticsearch-logger"] = {
+ endpoint_addr = "http://127.0.0.1:9201",
+ field = {
+ index = "services"
+ },
+ batch_max_size = 1,
+ inactive_timeout = 1,
+ include_req_body = true
+ }
+ }
+ })
+
+ if code >= 300 then
+ ngx.status = code
+ end
+
+ local code, _, body = t("/hello", "POST", "{\"sample_payload\":\"hello\"}")
+ }
+ }
+--- error_log
+"body":"{\"sample_payload\":\"hello\"}"
+
+
+
+=== TEST 20: add plugin with 'include_resp_body' setting, collect response log
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ t('/apisix/admin/plugin_metadata/elasticsearch-logger', ngx.HTTP_DELETE)
+
+ local code, body = t('/apisix/admin/routes/1', ngx.HTTP_PUT, {
+ uri = "/hello",
+ upstream = {
+ type = "roundrobin",
+ nodes = {
+ ["127.0.0.1:1980"] = 1
+ }
+ },
+ plugins = {
+ ["elasticsearch-logger"] = {
+ endpoint_addr = "http://127.0.0.1:9201",
+ field = {
+ index = "services"
+ },
+ batch_max_size = 1,
+ inactive_timeout = 1,
+ include_req_body = true,
+ include_resp_body = true
+ }
+ }
+ })
+
+ if code >= 300 then
+ ngx.status = code
+ end
+
+ local code, _, body = t("/hello", "POST", "{\"sample_payload\":\"hello\"}")
+ }
+ }
+--- error_log
+"body":"hello world\n"
diff --git a/t/plugin/forward-auth.t b/t/plugin/forward-auth.t
index 25e4fe652a4c..b22260c5f5fb 100644
--- a/t/plugin/forward-auth.t
+++ b/t/plugin/forward-auth.t
@@ -285,6 +285,26 @@ property "request_method" validation failed: matches none of the enum values
"upstream_id": "u1",
"uri": "/large-body"
}]],
+ },
+ {
+ url = "/apisix/admin/routes/8",
+ data = [[{
+ "plugins": {
+ "forward-auth": {
+ "uri": "http://127.39.40.1:9999/auth",
+ "request_headers": ["Authorization"],
+ "upstream_headers": ["X-User-ID"],
+ "client_headers": ["Location"],
+ "status_on_error": 503,
+ "allow_degradation": false
+ },
+ "proxy-rewrite": {
+ "uri": "/echo"
+ }
+ },
+ "upstream_id": "u1",
+ "uri": "/onerror"
+ }]],
}
}
@@ -297,7 +317,7 @@ property "request_method" validation failed: matches none of the enum values
}
}
--- response_body eval
-"passed\n" x 10
+"passed\n" x 11
@@ -408,7 +428,16 @@ Authorization: 111
-=== TEST 13: test large body
+=== TEST 13: Verify status_on_error
+--- request
+GET /onerror
+--- more_headers
+Authorization: 333
+--- error_code: 503
+
+
+
+=== TEST 14: test large body
--- config
location /t {
content_by_lua_block {
diff --git a/t/plugin/http-dubbo.t b/t/plugin/http-dubbo.t
new file mode 100644
index 000000000000..8006f07ab824
--- /dev/null
+++ b/t/plugin/http-dubbo.t
@@ -0,0 +1,179 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+use t::APISIX 'no_plan';
+
+repeat_each(1);
+no_long_string();
+no_shuffle();
+no_root_location();
+add_block_preprocessor(sub {
+ my ($block) = @_;
+ my $yaml_config = $block->yaml_config // <<_EOC_;
+apisix:
+ node_listen: 1984
+ enable_admin: false
+deployment:
+ role: data_plane
+ role_data_plane:
+ config_provider: yaml
+_EOC_
+
+ $block->set_value("yaml_config", $yaml_config);
+});
+
+run_tests;
+
+__DATA__
+
+=== TEST 1: test_pojo
+--- apisix_yaml
+upstreams:
+ - nodes:
+ - host: 127.0.0.1
+ port: 30880
+ weight: 1
+ type: roundrobin
+ id: 1
+routes:
+ -
+ uri: /t
+ plugins:
+ http-dubbo:
+ service_name: org.apache.dubbo.backend.DubboSerializationTestService
+ params_type_desc: Lorg/apache/dubbo/backend/PoJo;
+ serialized: true
+ method: testPoJo
+ service_version: 1.0.0
+ upstream_id: 1
+#END
+--- request
+POST /t
+{"aBoolean":true,"aByte":1,"aDouble":1.1,"aFloat":1.2,"aInt":2,"aLong":3,"aShort":4,"aString":"aa","acharacter":"a","stringMap":{"key":"value"},"strings":["aa","bb"]}
+--- response_body chomp
+{"aBoolean":true,"aByte":1,"aDouble":1.1,"aFloat":1.2,"aInt":2,"aLong":3,"aShort":4,"aString":"aa","acharacter":"a","stringMap":{"key":"value"},"strings":["aa","bb"]}
+
+
+
+=== TEST 2: test_pojos
+--- apisix_yaml
+upstreams:
+ - nodes:
+ - host: 127.0.0.1
+ port: 30880
+ weight: 1
+ type: roundrobin
+ id: 1
+routes:
+ -
+ uri: /t
+ plugins:
+ http-dubbo:
+ service_name: org.apache.dubbo.backend.DubboSerializationTestService
+ params_type_desc: "[Lorg/apache/dubbo/backend/PoJo;"
+ serialized: true
+ method: testPoJos
+ service_version: 1.0.0
+ upstream_id: 1
+#END
+--- request
+POST /t
+[{"aBoolean":true,"aByte":1,"aDouble":1.1,"aFloat":1.2,"aInt":2,"aLong":3,"aShort":4,"aString":"aa","acharacter":"a","stringMap":{"key":"value"},"strings":["aa","bb"]}]
+--- response_body chomp
+[{"aBoolean":true,"aByte":1,"aDouble":1.1,"aFloat":1.2,"aInt":2,"aLong":3,"aShort":4,"aString":"aa","acharacter":"a","stringMap":{"key":"value"},"strings":["aa","bb"]}]
+
+
+
+=== TEST 3: test_timeout
+--- apisix_yaml
+upstreams:
+ - nodes:
+ - host: 127.0.0.1
+ port: 30881
+ weight: 1
+ type: roundrobin
+ id: 1
+routes:
+ -
+ uri: /t
+ plugins:
+ http-dubbo:
+ service_name: org.apache.dubbo.backend.DubboSerializationTestService
+ params_type_desc: "[Lorg/apache/dubbo/backend/PoJo;"
+ serialized: true
+ method: testPoJos
+ service_version: 1.0.0
+ connect_timeout: 100
+ read_timeout: 100
+ send_timeout: 100
+ upstream_id: 1
+#END
+--- request
+GET /t
+--- error_code: 502
+--- error_log
+failed to connect to upstream
+
+
+
+=== TEST 4: test_void
+--- apisix_yaml
+upstreams:
+ - nodes:
+ - host: 127.0.0.1
+ port: 30880
+ weight: 1
+ type: roundrobin
+ id: 1
+routes:
+ -
+ uri: /t
+ plugins:
+ http-dubbo:
+ service_name: org.apache.dubbo.backend.DubboSerializationTestService
+ serialized: true
+ method: testVoid
+ service_version: 1.0.0
+ upstream_id: 1
+#END
+--- request
+GET /t
+
+
+
+=== TEST 5: test_fail
+--- apisix_yaml
+upstreams:
+ - nodes:
+ - host: 127.0.0.1
+ port: 30880
+ weight: 1
+ type: roundrobin
+ id: 1
+routes:
+ -
+ uri: /t
+ plugins:
+ http-dubbo:
+ service_name: org.apache.dubbo.backend.DubboSerializationTestService
+ serialized: true
+ method: testFailure
+ service_version: 1.0.0
+ upstream_id: 1
+#END
+--- request
+GET /t
+--- error_code: 500
diff --git a/t/plugin/http-logger2.t b/t/plugin/http-logger2.t
index 12ee8b437d78..e8cee411f1e1 100644
--- a/t/plugin/http-logger2.t
+++ b/t/plugin/http-logger2.t
@@ -95,6 +95,60 @@ add_block_preprocessor(sub {
}
}
}
+
+ server {
+ listen 11451;
+ gzip on;
+ gzip_types *;
+ gzip_min_length 1;
+ location /gzip_hello {
+ content_by_lua_block {
+ ngx.req.read_body()
+ local s = "gzip hello world"
+ ngx.header['Content-Length'] = #s + 1
+ ngx.say(s)
+ }
+ }
+ }
+
+ server {
+ listen 11452;
+ location /brotli_hello {
+ content_by_lua_block {
+ ngx.req.read_body()
+ local s = "brotli hello world"
+ ngx.header['Content-Length'] = #s + 1
+ ngx.say(s)
+ }
+ header_filter_by_lua_block {
+ local conf = {
+ comp_level = 6,
+ http_version = 1.1,
+ lgblock = 0,
+ lgwin = 19,
+ min_length = 1,
+ mode = 0,
+ types = "*",
+ }
+ local brotli = require("apisix.plugins.brotli")
+ brotli.header_filter(conf, ngx.ctx)
+ }
+ body_filter_by_lua_block {
+ local conf = {
+ comp_level = 6,
+ http_version = 1.1,
+ lgblock = 0,
+ lgwin = 19,
+ min_length = 1,
+ mode = 0,
+ types = "*",
+ }
+ local brotli = require("apisix.plugins.brotli")
+ brotli.body_filter(conf, ngx.ctx)
+ }
+ }
+ }
+
_EOC_
$block->set_value("http_config", $http_config);
@@ -265,7 +319,109 @@ response.body:test-http-logger-response
-=== TEST 8: test default Authorization header sent to the log server
+=== TEST 8: set fetch request body and response body route - gzip
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "methods": ["GET"],
+ "plugins": {
+ "http-logger": {
+ "uri": "http://127.0.0.1:12001/http-logger/center?query[]=response.body",
+ "batch_max_size": 1,
+ "max_retry_count": 1,
+ "retry_delay": 2,
+ "buffer_duration": 2,
+ "inactive_timeout": 2,
+ "include_resp_body": true
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:11451": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/gzip_hello"
+ }]])
+
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+ }
+--- response_body
+passed
+
+
+
+=== TEST 9: test fetch request body and response body route
+--- request
+GET /gzip_hello
+--- more_headers
+Accept-Encoding: gzip
+--- error_log
+response.body:gzip hello world
+--- wait: 1.5
+
+
+
+=== TEST 10: set fetch request body and response body route - brotli
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "methods": ["GET"],
+ "plugins": {
+ "http-logger": {
+ "uri": "http://127.0.0.1:12001/http-logger/center?query[]=response.body",
+ "batch_max_size": 1,
+ "max_retry_count": 1,
+ "retry_delay": 2,
+ "buffer_duration": 2,
+ "inactive_timeout": 2,
+ "include_resp_body": true
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:11452": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/brotli_hello"
+ }]])
+
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+ }
+--- response_body
+passed
+
+
+
+=== TEST 11: test fetch request body and response body route
+--- request
+GET /brotli_hello
+--- more_headers
+Accept-Encoding: br
+--- error_log
+response.body:brotli hello world
+--- wait: 1.5
+
+
+
+=== TEST 12: test default Authorization header sent to the log server
--- config
location /t {
content_by_lua_block {
@@ -304,7 +460,7 @@ passed
-=== TEST 9: hit
+=== TEST 13: hit
--- request
POST /http-logger/test
test-http-logger-request
@@ -314,7 +470,7 @@ received Authorization header: [nil]
-=== TEST 10: add default path
+=== TEST 14: add default path
--- config
location /t {
content_by_lua_block {
@@ -352,7 +508,7 @@ passed
-=== TEST 11: hit
+=== TEST 15: hit
--- request
GET /http-logger/test
--- error_log
diff --git a/t/plugin/kafka-logger2.t b/t/plugin/kafka-logger2.t
index 7319a2db3624..84b6f90cd4cb 100644
--- a/t/plugin/kafka-logger2.t
+++ b/t/plugin/kafka-logger2.t
@@ -633,7 +633,166 @@ done
-=== TEST 12: set route(id: 1,include_resp_body = true,include_resp_body_expr = array)
+=== TEST 12: set route include_resp_body - gzip
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [=[{
+ "plugins": {
+ "kafka-logger": {
+ "broker_list" :
+ {
+ "127.0.0.1":9092
+ },
+ "kafka_topic" : "test2",
+ "key" : "key1",
+ "timeout" : 1,
+ "include_resp_body": true,
+ "batch_max_size": 1
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:11451": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/gzip_hello"
+ }]=]
+ )
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+ }
+
+--- response_body
+passed
+
+
+
+=== TEST 13: hit
+--- http_config
+server {
+ listen 11451;
+ gzip on;
+ gzip_types *;
+ gzip_min_length 1;
+ location /gzip_hello {
+ content_by_lua_block {
+ ngx.req.read_body()
+ local s = "gzip hello world"
+ ngx.header['Content-Length'] = #s + 1
+ ngx.say(s)
+ }
+ }
+}
+--- request
+GET /gzip_hello
+--- more_headers
+Accept-Encoding: gzip
+--- error_log eval
+qr/send data to kafka: \{.*"body":"gzip hello world\\n"/
+--- wait: 2
+
+
+
+=== TEST 14: set route include_resp_body - brotli
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [=[{
+ "plugins": {
+ "kafka-logger": {
+ "broker_list" :
+ {
+ "127.0.0.1":9092
+ },
+ "kafka_topic" : "test2",
+ "key" : "key1",
+ "timeout" : 1,
+ "include_resp_body": true,
+ "batch_max_size": 1
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:11452": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/brotli_hello"
+ }]=]
+ )
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+ }
+
+--- response_body
+passed
+
+
+
+=== TEST 15: hit
+--- http_config
+server {
+ listen 11452;
+ location /brotli_hello {
+ content_by_lua_block {
+ ngx.req.read_body()
+ local s = "brotli hello world"
+ ngx.header['Content-Length'] = #s + 1
+ ngx.say(s)
+ }
+ header_filter_by_lua_block {
+ local conf = {
+ comp_level = 6,
+ http_version = 1.1,
+ lgblock = 0,
+ lgwin = 19,
+ min_length = 1,
+ mode = 0,
+ types = "*",
+ }
+ local brotli = require("apisix.plugins.brotli")
+ brotli.header_filter(conf, ngx.ctx)
+ }
+ body_filter_by_lua_block {
+ local conf = {
+ comp_level = 6,
+ http_version = 1.1,
+ lgblock = 0,
+ lgwin = 19,
+ min_length = 1,
+ mode = 0,
+ types = "*",
+ }
+ local brotli = require("apisix.plugins.brotli")
+ brotli.body_filter(conf, ngx.ctx)
+ }
+ }
+}
+--- request
+GET /brotli_hello
+--- more_headers
+Accept-Encoding: br
+--- error_log eval
+qr/send data to kafka: \{.*"body":"brotli hello world\\n"/
+--- wait: 2
+
+
+
+=== TEST 16: set route(id: 1,include_resp_body = true,include_resp_body_expr = array)
--- config
location /t {
content_by_lua_block {
@@ -682,7 +841,7 @@ passed
-=== TEST 13: hit route, expr eval success
+=== TEST 17: hit route, expr eval success
--- request
POST /hello?name=qwerty
abcdef
@@ -694,7 +853,7 @@ qr/send data to kafka: \{.*"body":"hello world\\n"/
-=== TEST 14: hit route,expr eval fail
+=== TEST 18: hit route,expr eval fail
--- request
POST /hello?name=zcxv
abcdef
@@ -706,7 +865,7 @@ qr/send data to kafka: \{.*"body":"hello world\\n"/
-=== TEST 15: multi level nested expr conditions
+=== TEST 19: multi level nested expr conditions
--- config
location /t {
content_by_lua_block {
@@ -758,7 +917,7 @@ passed
-=== TEST 16: hit route, req_body_expr and resp_body_expr both eval success
+=== TEST 20: hit route, req_body_expr and resp_body_expr both eval success
--- request
POST /hello?name=qwerty
abcdef
@@ -771,7 +930,7 @@ qr/send data to kafka: \{.*"body":"hello world\\n"/]
-=== TEST 17: hit route, req_body_expr eval success, resp_body_expr both eval failed
+=== TEST 21: hit route, req_body_expr eval success, resp_body_expr both eval failed
--- request
POST /hello?name=asdfgh
abcdef
@@ -785,7 +944,7 @@ qr/send data to kafka: \{.*"body":"hello world\\n"/
-=== TEST 18: hit route, req_body_expr eval failed, resp_body_expr both eval success
+=== TEST 22: hit route, req_body_expr eval failed, resp_body_expr both eval success
--- request
POST /hello?name=zxcvbn
abcdef
@@ -799,7 +958,7 @@ qr/send data to kafka: \{.*"body":"abcdef"/
-=== TEST 19: hit route, req_body_expr eval success, resp_body_expr both eval failed
+=== TEST 23: hit route, req_body_expr eval success, resp_body_expr both eval failed
--- request
POST /hello?name=xxxxxx
abcdef
@@ -812,7 +971,7 @@ qr/send data to kafka: \{.*"body":"hello world\\n"/]
-=== TEST 20: update route(id: 1,include_req_body = true,include_req_body_expr = array)
+=== TEST 24: update route(id: 1,include_req_body = true,include_req_body_expr = array)
--- config
location /t {
content_by_lua_block {
@@ -862,7 +1021,7 @@ passed
-=== TEST 21: hit route, expr eval success
+=== TEST 25: hit route, expr eval success
--- request
POST /hello?name=qwerty
abcdef
@@ -874,7 +1033,7 @@ qr/send data to kafka: \{.*"body":"abcdef"/
-=== TEST 22: setup route with meta_refresh_interval
+=== TEST 26: setup route with meta_refresh_interval
--- config
location /t {
content_by_lua_block {
@@ -918,7 +1077,7 @@ passed
-=== TEST 23: hit route, send data to kafka successfully
+=== TEST 27: hit route, send data to kafka successfully
--- request
POST /hello
abcdef
diff --git a/t/plugin/mocking.t b/t/plugin/mocking.t
index 46d82ef80a8f..d5415918abb0 100644
--- a/t/plugin/mocking.t
+++ b/t/plugin/mocking.t
@@ -465,3 +465,42 @@ GET /hello
--- response_headers
X-Apisix: is, cool
X-Really: yes
+
+
+
+=== TEST 21: set route (return headers support built-in variables)
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "plugins": {
+ "mocking": {
+ "response_example": "hello world",
+ "response_headers": {
+ "X-Route-Id": "$route_id"
+ }
+ }
+ },
+ "uri": "/hello"
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+ }
+--- response_body
+passed
+
+
+
+=== TEST 22: hit route
+--- request
+GET /hello
+--- response_headers
+X-Route-Id: 1
diff --git a/t/plugin/rocketmq-logger2.t b/t/plugin/rocketmq-logger2.t
index 7f3d054fa6ed..3b6087c16f3f 100644
--- a/t/plugin/rocketmq-logger2.t
+++ b/t/plugin/rocketmq-logger2.t
@@ -391,7 +391,6 @@ POST /hello?name=qwerty
abcdef
--- response_body
hello world
-
--- error_log eval
qr/send data to rocketmq: \{.*"body":"hello world\\n"/
--- wait: 2
@@ -410,7 +409,160 @@ qr/send data to rocketmq: \{.*"body":"hello world\\n"/
-=== TEST 13: multi level nested expr conditions
+=== TEST 13: set route include_resp_body = true - gzip
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [=[{
+ "plugins": {
+ "rocketmq-logger": {
+ "nameserver_list" : [ "127.0.0.1:9876" ],
+ "topic" : "test2",
+ "key" : "key1",
+ "timeout" : 1,
+ "include_resp_body": true,
+ "batch_max_size": 1
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:11451": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/gzip_hello"
+ }]=]
+ )
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+ }
+
+--- response_body
+passed
+
+
+
+=== TEST 14: hit
+--- http_config
+server {
+ listen 11451;
+ gzip on;
+ gzip_types *;
+ gzip_min_length 1;
+ location /gzip_hello {
+ content_by_lua_block {
+ ngx.req.read_body()
+ local s = "gzip hello world"
+ ngx.header['Content-Length'] = #s + 1
+ ngx.say(s)
+ }
+ }
+}
+--- request
+GET /gzip_hello
+--- more_headers
+Accept-Encoding: gzip
+--- error_log eval
+qr/send data to rocketmq: \{.*"body":"gzip hello world\\n"/
+--- wait: 2
+
+
+
+=== TEST 15: set route include_resp_body - brotli
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [=[{
+ "plugins": {
+ "rocketmq-logger": {
+ "nameserver_list" : [ "127.0.0.1:9876" ],
+ "topic" : "test2",
+ "key" : "key1",
+ "timeout" : 1,
+ "include_resp_body": true,
+ "batch_max_size": 1
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:11452": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/brotli_hello"
+ }]=]
+ )
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+ }
+
+--- response_body
+passed
+
+
+
+=== TEST 16: hit route, expr eval success
+--- http_config
+server {
+ listen 11452;
+ location /brotli_hello {
+ content_by_lua_block {
+ ngx.req.read_body()
+ local s = "brotli hello world"
+ ngx.header['Content-Length'] = #s + 1
+ ngx.say(s)
+ }
+ header_filter_by_lua_block {
+ local conf = {
+ comp_level = 6,
+ http_version = 1.1,
+ lgblock = 0,
+ lgwin = 19,
+ min_length = 1,
+ mode = 0,
+ types = "*",
+ }
+ local brotli = require("apisix.plugins.brotli")
+ brotli.header_filter(conf, ngx.ctx)
+ }
+ body_filter_by_lua_block {
+ local conf = {
+ comp_level = 6,
+ http_version = 1.1,
+ lgblock = 0,
+ lgwin = 19,
+ min_length = 1,
+ mode = 0,
+ types = "*",
+ }
+ local brotli = require("apisix.plugins.brotli")
+ brotli.body_filter(conf, ngx.ctx)
+ }
+ }
+}
+--- request
+GET /brotli_hello
+--- more_headers
+Accept-Encoding: br
+--- error_log eval
+qr/send data to rocketmq: \{.*"body":"brotli hello world\\n"/
+--- wait: 2
+
+
+
+=== TEST 17: multi level nested expr conditions
--- config
location /t {
content_by_lua_block {
@@ -443,7 +595,7 @@ done
-=== TEST 14: data encryption for secret_key
+=== TEST 18: data encryption for secret_key
--- yaml_config
apisix:
data_encryption:
diff --git a/t/plugin/skywalking-logger.t b/t/plugin/skywalking-logger.t
index 900c5a07aa39..6ab87be26947 100644
--- a/t/plugin/skywalking-logger.t
+++ b/t/plugin/skywalking-logger.t
@@ -33,7 +33,10 @@ add_block_preprocessor(sub {
location /v3/logs {
content_by_lua_block {
local core = require("apisix.core")
-
+ ngx.req.read_body()
+ local data = ngx.req.get_body_data()
+ local headers = ngx.req.get_headers()
+ ngx.log(ngx.WARN, "skywalking-logger body: ", data)
core.log.warn(core.json.encode(core.request.get_body(), true))
}
}
@@ -294,3 +297,90 @@ opentracing
qr/\\\"serviceInstance\\\":\\\"\$hostname\\\"/
qr/\\\"serviceInstance\\\":\\\"\\\"/
--- wait: 0.5
+
+
+
+=== TEST 13: add plugin with 'include_req_body' setting, collect request log
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ t('/apisix/admin/plugin_metadata/skywalking-logger', ngx.HTTP_DELETE)
+
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "plugins": {
+ "skywalking-logger": {
+ "endpoint_addr": "http://127.0.0.1:1986",
+ "batch_max_size": 1,
+ "max_retry_count": 1,
+ "retry_delay": 2,
+ "buffer_duration": 2,
+ "inactive_timeout": 2,
+ "include_req_body": true
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:1982": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/opentracing"
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ end
+
+ local code, _, body = t("/opentracing", "POST", "{\"sample_payload\":\"hello\"}")
+ }
+ }
+--- error_log
+\"body\":\"{\\\"sample_payload\\\":\\\"hello\\\"}\"
+
+
+
+=== TEST 14: add plugin with 'include_resp_body' setting, collect response log
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ t('/apisix/admin/plugin_metadata/skywalking-logger', ngx.HTTP_DELETE)
+
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "plugins": {
+ "skywalking-logger": {
+ "endpoint_addr": "http://127.0.0.1:1986",
+ "batch_max_size": 1,
+ "max_retry_count": 1,
+ "retry_delay": 2,
+ "buffer_duration": 2,
+ "inactive_timeout": 2,
+ "include_req_body": true,
+ "include_resp_body": true
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:1982": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/opentracing"
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ end
+
+ local code, _, body = t("/opentracing", "POST", "{\"sample_payload\":\"hello\"}")
+ }
+ }
+--- error_log
+\"body\":\"opentracing\\n\"
diff --git a/t/plugin/sls-logger.t b/t/plugin/sls-logger.t
index 940ddf6a2be8..af6ae667c34f 100644
--- a/t/plugin/sls-logger.t
+++ b/t/plugin/sls-logger.t
@@ -385,3 +385,90 @@ passed
GET /hello
--- response_body
hello world
+
+
+
+=== TEST 14: add plugin with 'include_req_body' setting, collect request log
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ t('/apisix/admin/plugin_metadata/sls-logger', ngx.HTTP_DELETE)
+
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "plugins": {
+ "sls-logger": {
+ "host": "127.0.0.1",
+ "port": 10009,
+ "project": "your_project",
+ "logstore": "your_logstore",
+ "access_key_id": "your_access_key_id",
+ "access_key_secret": "your_access_key_secret",
+ "timeout": 30000,
+ "include_req_body": true
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:1980": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/hello"
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ end
+
+ local code, _, body = t("/hello", "POST", "{\"sample_payload\":\"hello\"}")
+ }
+ }
+--- error_log
+"body":"{\"sample_payload\":\"hello\"}
+
+
+
+=== TEST 15: add plugin with 'include_resp_body' setting, collect response log
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ t('/apisix/admin/plugin_metadata/sls-logger', ngx.HTTP_DELETE)
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "plugins": {
+ "sls-logger": {
+ "host": "127.0.0.1",
+ "port": 10009,
+ "project": "your_project",
+ "logstore": "your_logstore",
+ "access_key_id": "your_access_key_id",
+ "access_key_secret": "your_access_key_secret",
+ "timeout": 30000,
+ "include_resp_body": true
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:1980": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/hello"
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ end
+
+ local code, _, body = t("/hello", "POST", "{\"sample_payload\":\"hello\"}")
+ }
+ }
+--- error_log
+"body":"hello world\n"
diff --git a/t/plugin/syslog.t b/t/plugin/syslog.t
index 5e13aa301c56..9dbb03af83c0 100644
--- a/t/plugin/syslog.t
+++ b/t/plugin/syslog.t
@@ -559,3 +559,95 @@ GET /hello
tail -n 1 ci/pod/vector/syslog-udp.log
--- response_body eval
qr/.*upstream.*/
+
+
+
+=== TEST 20: add plugin with 'include_req_body' setting, collect request log
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ t('/apisix/admin/plugin_metadata/syslog', ngx.HTTP_DELETE)
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "plugins": {
+ "syslog": {
+ "batch_max_size": 1,
+ "flush_limit": 1,
+ "host" : "127.0.0.1",
+ "port" : 5140,
+ "include_req_body": true
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:1980": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/hello"
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ ngx.say(body)
+ return
+ end
+
+ ngx.say(body)
+
+ local code, _, body = t("/hello", "POST", "{\"sample_payload\":\"hello\"}")
+ }
+ }
+--- request
+GET /t
+--- error_log
+"body":"{\"sample_payload\":\"hello\"}"
+
+
+
+=== TEST 21: add plugin with 'include_resp_body' setting, collect response log
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ t('/apisix/admin/plugin_metadata/syslog', ngx.HTTP_DELETE)
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "plugins": {
+ "syslog": {
+ "batch_max_size": 1,
+ "flush_limit": 1,
+ "host" : "127.0.0.1",
+ "port" : 5140,
+ "include_resp_body": true
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:1980": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/hello"
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ ngx.say(body)
+ return
+ end
+
+ ngx.say(body)
+
+ local code, _, body = t("/hello", "POST", "{\"sample_payload\":\"hello\"}")
+ }
+ }
+--- request
+GET /t
+--- error_log
+"body":"hello world\n"
diff --git a/t/plugin/tcp-logger.t b/t/plugin/tcp-logger.t
index 3ef774f813d1..b3c29ee5116c 100644
--- a/t/plugin/tcp-logger.t
+++ b/t/plugin/tcp-logger.t
@@ -518,3 +518,91 @@ opentracing
tail -n 1 ci/pod/vector/tls-datas.log
--- response_body eval
qr/.*route_id.*1.*/
+
+
+
+=== TEST 16: add plugin with 'include_req_body' setting, collect request log
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ t('/apisix/admin/plugin_metadata/tcp-logger', ngx.HTTP_DELETE)
+ local code, body1 = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "plugins": {
+ "tcp-logger": {
+ "host": "127.0.0.1",
+ "port": 43000,
+ "tls": true,
+ "batch_max_size": 1,
+ "include_req_body": true
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:1982": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/opentracing"
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ ngx.say("fail")
+ return
+ end
+
+ local code, _, body = t("/opentracing", "POST", "{\"sample_payload\":\"hello\"}")
+ }
+ }
+--- request
+GET /t
+--- error_log
+"body":"{\"sample_payload\":\"hello\"}"
+
+
+
+=== TEST 17: add plugin with 'include_resp_body' setting, collect request log
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ t('/apisix/admin/plugin_metadata/tcp-logger', ngx.HTTP_DELETE)
+ local code, body1 = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "plugins": {
+ "tcp-logger": {
+ "host": "127.0.0.1",
+ "port": 43000,
+ "tls": true,
+ "batch_max_size": 1,
+ "include_resp_body": true
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:1982": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/opentracing"
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ ngx.say("fail")
+ return
+ end
+
+ local code, _, body = t("/opentracing", "POST", "{\"sample_payload\":\"hello\"}")
+ }
+ }
+--- request
+GET /t
+--- error_log
+"body":"opentracing\n"
diff --git a/t/plugin/udp-logger.t b/t/plugin/udp-logger.t
index 44744823634b..b20248b191c5 100644
--- a/t/plugin/udp-logger.t
+++ b/t/plugin/udp-logger.t
@@ -445,3 +445,95 @@ passed
tail -n 1 ci/pod/vector/udp.log
--- response_body eval
qr/.*logger format in plugin.*/
+
+
+
+=== TEST 13: add plugin with 'include_req_body' setting, collect request log
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ t('/apisix/admin/plugin_metadata/udp-logger', ngx.HTTP_DELETE)
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "plugins": {
+ "udp-logger": {
+ "host": "127.0.0.1",
+ "port": 8127,
+ "tls": false,
+ "batch_max_size": 1,
+ "inactive_timeout": 1,
+ "include_req_body": true
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:1980": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/hello"
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ ngx.say(body)
+ return
+ end
+
+
+ local code, _, body = t("/hello", "POST", "{\"sample_payload\":\"hello\"}")
+ }
+ }
+--- request
+GET /t
+--- error_log
+"body":"{\"sample_payload\":\"hello\"}"
+
+
+
+=== TEST 14: add plugin with 'include_resp_body' setting, collect request log
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ t('/apisix/admin/plugin_metadata/udp-logger', ngx.HTTP_DELETE)
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "plugins": {
+ "udp-logger": {
+ "host": "127.0.0.1",
+ "port": 8127,
+ "tls": false,
+ "batch_max_size": 1,
+ "inactive_timeout": 1,
+ "include_resp_body": true
+ }
+ },
+ "upstream": {
+ "nodes": {
+ "127.0.0.1:1980": 1
+ },
+ "type": "roundrobin"
+ },
+ "uri": "/hello"
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ ngx.say(body)
+ return
+ end
+
+
+ local code, _, body = t("/hello", "POST", "{\"sample_payload\":\"hello\"}")
+ }
+ }
+--- request
+GET /t
+--- error_log
+"body":"hello world\n"