feat(apisix): add Cloudron package
- Implements Apache APISIX packaging for Cloudron platform. - Includes Dockerfile, CloudronManifest.json, and start.sh. - Configured to use Cloudron's etcd addon. 🤖 Generated with Gemini CLI Co-Authored-By: Gemini <noreply@google.com>
This commit is contained in:
@@ -0,0 +1,66 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local core = require("apisix.core")
|
||||
local consumers = require("apisix.consumer").consumers
|
||||
local resource = require("apisix.admin.resource")
|
||||
local schema_plugin = require("apisix.admin.plugins").check_schema
|
||||
local type = type
|
||||
local tostring = tostring
|
||||
local ipairs = ipairs
|
||||
|
||||
|
||||
local function check_conf(id, conf, need_id, schema)
|
||||
local ok, err = core.schema.check(schema, conf)
|
||||
if not ok then
|
||||
return nil, {error_msg = "invalid configuration: " .. err}
|
||||
end
|
||||
|
||||
local ok, err = schema_plugin(conf.plugins)
|
||||
if not ok then
|
||||
return nil, {error_msg = err}
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
local function delete_checker(id)
|
||||
local consumers, consumers_ver = consumers()
|
||||
if consumers_ver and consumers then
|
||||
for _, consumer in ipairs(consumers) do
|
||||
if type(consumer) == "table" and consumer.value
|
||||
and consumer.value.group_id
|
||||
and tostring(consumer.value.group_id) == id then
|
||||
return 400, {error_msg = "can not delete this consumer group,"
|
||||
.. " consumer [" .. consumer.value.id
|
||||
.. "] is still using it now"}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return nil, nil
|
||||
end
|
||||
|
||||
|
||||
return resource.new({
|
||||
name = "consumer_groups",
|
||||
kind = "consumer group",
|
||||
schema = core.schema.consumer_group,
|
||||
checker = check_conf,
|
||||
unsupported_methods = {"post"},
|
||||
delete_checker = delete_checker
|
||||
})
|
@@ -0,0 +1,65 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local core = require("apisix.core")
|
||||
local plugins = require("apisix.admin.plugins")
|
||||
local resource = require("apisix.admin.resource")
|
||||
|
||||
|
||||
local function check_conf(username, conf, need_username, schema)
|
||||
local ok, err = core.schema.check(schema, conf)
|
||||
if not ok then
|
||||
return nil, {error_msg = "invalid configuration: " .. err}
|
||||
end
|
||||
|
||||
if username and username ~= conf.username then
|
||||
return nil, {error_msg = "wrong username" }
|
||||
end
|
||||
|
||||
if conf.plugins then
|
||||
ok, err = plugins.check_schema(conf.plugins, core.schema.TYPE_CONSUMER)
|
||||
if not ok then
|
||||
return nil, {error_msg = "invalid plugins configuration: " .. err}
|
||||
end
|
||||
end
|
||||
|
||||
if conf.group_id then
|
||||
local key = "/consumer_groups/" .. conf.group_id
|
||||
local res, err = core.etcd.get(key)
|
||||
if not res then
|
||||
return nil, {error_msg = "failed to fetch consumer group info by "
|
||||
.. "consumer group id [" .. conf.group_id .. "]: "
|
||||
.. err}
|
||||
end
|
||||
|
||||
if res.status ~= 200 then
|
||||
return nil, {error_msg = "failed to fetch consumer group info by "
|
||||
.. "consumer group id [" .. conf.group_id .. "], "
|
||||
.. "response code: " .. res.status}
|
||||
end
|
||||
end
|
||||
|
||||
return conf.username
|
||||
end
|
||||
|
||||
|
||||
return resource.new({
|
||||
name = "consumers",
|
||||
kind = "consumer",
|
||||
schema = core.schema.consumer,
|
||||
checker = check_conf,
|
||||
unsupported_methods = {"post", "patch"}
|
||||
})
|
@@ -0,0 +1,74 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local core = require("apisix.core")
|
||||
local plugins = require("apisix.admin.plugins")
|
||||
local plugin = require("apisix.plugin")
|
||||
local resource = require("apisix.admin.resource")
|
||||
local pairs = pairs
|
||||
|
||||
local function check_conf(_id, conf, _need_id, schema)
|
||||
local ok, err = core.schema.check(schema, conf)
|
||||
if not ok then
|
||||
return nil, {error_msg = "invalid configuration: " .. err}
|
||||
end
|
||||
|
||||
if conf.plugins then
|
||||
ok, err = plugins.check_schema(conf.plugins, core.schema.TYPE_CONSUMER)
|
||||
if not ok then
|
||||
return nil, {error_msg = "invalid plugins configuration: " .. err}
|
||||
end
|
||||
|
||||
for name, _ in pairs(conf.plugins) do
|
||||
local plugin_obj = plugin.get(name)
|
||||
if not plugin_obj then
|
||||
return nil, {error_msg = "unknown plugin " .. name}
|
||||
end
|
||||
if plugin_obj.type ~= "auth" then
|
||||
return nil, {error_msg = "only supports auth type plugins in consumer credential"}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return true, nil
|
||||
end
|
||||
|
||||
-- get_credential_etcd_key is used to splice the credential's etcd key (without prefix)
|
||||
-- from credential_id and sub_path.
|
||||
-- Parameter credential_id is from the uri or payload; sub_path is in the form of
|
||||
-- {consumer_name}/credentials or {consumer_name}/credentials/{credential_id}.
|
||||
-- Only if GET credentials list, credential_id is nil, sub_path is like {consumer_name}/credentials,
|
||||
-- so return value is /consumers/{consumer_name}/credentials.
|
||||
-- In the other methods, credential_id is not nil, return value is
|
||||
-- /consumers/{consumer_name}/credentials/{credential_id}.
|
||||
local function get_credential_etcd_key(credential_id, _conf, sub_path, _args)
|
||||
if credential_id then
|
||||
local uri_segs = core.utils.split_uri(sub_path)
|
||||
local consumer_name = uri_segs[1]
|
||||
return "/consumers/" .. consumer_name .. "/credentials/" .. credential_id
|
||||
end
|
||||
|
||||
return "/consumers/" .. sub_path
|
||||
end
|
||||
|
||||
return resource.new({
|
||||
name = "credentials",
|
||||
kind = "credential",
|
||||
schema = core.schema.credential,
|
||||
checker = check_conf,
|
||||
get_resource_etcd_key = get_credential_etcd_key,
|
||||
unsupported_methods = {"post", "patch"}
|
||||
})
|
@@ -0,0 +1,43 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local core = require("apisix.core")
|
||||
local resource = require("apisix.admin.resource")
|
||||
local schema_plugin = require("apisix.admin.plugins").check_schema
|
||||
|
||||
|
||||
local function check_conf(id, conf, need_id, schema)
|
||||
local ok, err = core.schema.check(schema, conf)
|
||||
if not ok then
|
||||
return nil, {error_msg = "invalid configuration: " .. err}
|
||||
end
|
||||
|
||||
local ok, err = schema_plugin(conf.plugins)
|
||||
if not ok then
|
||||
return nil, {error_msg = err}
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
return resource.new({
|
||||
name = "global_rules",
|
||||
kind = "global rule",
|
||||
schema = core.schema.global_rule,
|
||||
checker = check_conf,
|
||||
unsupported_methods = {"post"}
|
||||
})
|
526
CloudronPackages/APISIX/apisix-source/apisix/admin/init.lua
Normal file
526
CloudronPackages/APISIX/apisix-source/apisix/admin/init.lua
Normal file
@@ -0,0 +1,526 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local require = require
|
||||
local core = require("apisix.core")
|
||||
local get_uri_args = ngx.req.get_uri_args
|
||||
local route = require("apisix.utils.router")
|
||||
local plugin = require("apisix.plugin")
|
||||
local standalone = require("apisix.admin.standalone")
|
||||
local v3_adapter = require("apisix.admin.v3_adapter")
|
||||
local utils = require("apisix.admin.utils")
|
||||
local ngx = ngx
|
||||
local get_method = ngx.req.get_method
|
||||
local ngx_time = ngx.time
|
||||
local ngx_timer_at = ngx.timer.at
|
||||
local ngx_worker_id = ngx.worker.id
|
||||
local tonumber = tonumber
|
||||
local tostring = tostring
|
||||
local str_lower = string.lower
|
||||
local reload_event = "/apisix/admin/plugins/reload"
|
||||
local ipairs = ipairs
|
||||
local error = error
|
||||
local type = type
|
||||
|
||||
|
||||
local events
|
||||
local MAX_REQ_BODY = 1024 * 1024 * 1.5 -- 1.5 MiB
|
||||
|
||||
|
||||
local viewer_methods = {
|
||||
get = true,
|
||||
}
|
||||
|
||||
|
||||
local resources = {
|
||||
routes = require("apisix.admin.routes"),
|
||||
services = require("apisix.admin.services"),
|
||||
upstreams = require("apisix.admin.upstreams"),
|
||||
consumers = require("apisix.admin.consumers"),
|
||||
credentials = require("apisix.admin.credentials"),
|
||||
schema = require("apisix.admin.schema"),
|
||||
ssls = require("apisix.admin.ssl"),
|
||||
plugins = require("apisix.admin.plugins"),
|
||||
protos = require("apisix.admin.proto"),
|
||||
global_rules = require("apisix.admin.global_rules"),
|
||||
stream_routes = require("apisix.admin.stream_routes"),
|
||||
plugin_metadata = require("apisix.admin.plugin_metadata"),
|
||||
plugin_configs = require("apisix.admin.plugin_config"),
|
||||
consumer_groups = require("apisix.admin.consumer_group"),
|
||||
secrets = require("apisix.admin.secrets"),
|
||||
}
|
||||
|
||||
|
||||
local _M = {version = 0.4}
|
||||
local router
|
||||
|
||||
|
||||
local function check_token(ctx)
|
||||
local local_conf = core.config.local_conf()
|
||||
|
||||
-- check if admin_key is required
|
||||
if local_conf.deployment.admin.admin_key_required == false then
|
||||
return true
|
||||
end
|
||||
|
||||
local admin_key = core.table.try_read_attr(local_conf, "deployment", "admin", "admin_key")
|
||||
if not admin_key then
|
||||
return true
|
||||
end
|
||||
|
||||
local req_token = ctx.var.arg_api_key or ctx.var.http_x_api_key
|
||||
or ctx.var.cookie_x_api_key
|
||||
if not req_token then
|
||||
return false, "missing apikey"
|
||||
end
|
||||
|
||||
local admin
|
||||
for i, row in ipairs(admin_key) do
|
||||
if req_token == row.key then
|
||||
admin = row
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
if not admin then
|
||||
return false, "wrong apikey"
|
||||
end
|
||||
|
||||
if admin.role == "viewer" and
|
||||
not viewer_methods[str_lower(get_method())] then
|
||||
return false, "invalid method for role viewer"
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
-- Set the `apictx` variable and check admin api token, if the check fails, the current
|
||||
-- request will be interrupted and an error response will be returned.
|
||||
--
|
||||
-- NOTE: This is a higher wrapper for `check_token` function.
|
||||
local function set_ctx_and_check_token()
|
||||
local api_ctx = {}
|
||||
core.ctx.set_vars_meta(api_ctx)
|
||||
ngx.ctx.api_ctx = api_ctx
|
||||
|
||||
local ok, err = check_token(api_ctx)
|
||||
if not ok then
|
||||
core.log.warn("failed to check token: ", err)
|
||||
core.response.exit(401, { error_msg = "failed to check token", description = err })
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
local function strip_etcd_resp(data)
|
||||
if type(data) == "table"
|
||||
and data.header ~= nil
|
||||
and data.header.revision ~= nil
|
||||
and data.header.raft_term ~= nil
|
||||
then
|
||||
-- strip etcd data
|
||||
data.header = nil
|
||||
data.responses = nil
|
||||
data.succeeded = nil
|
||||
|
||||
if data.node then
|
||||
data.node.createdIndex = nil
|
||||
data.node.modifiedIndex = nil
|
||||
end
|
||||
|
||||
data.count = nil
|
||||
data.more = nil
|
||||
data.prev_kvs = nil
|
||||
|
||||
if data.deleted then
|
||||
-- We used to treat the type incorrectly. But for compatibility we follow
|
||||
-- the existing type.
|
||||
data.deleted = tostring(data.deleted)
|
||||
end
|
||||
end
|
||||
|
||||
return data
|
||||
end
|
||||
|
||||
|
||||
local function head()
|
||||
core.response.exit(200)
|
||||
end
|
||||
|
||||
|
||||
local function run()
|
||||
set_ctx_and_check_token()
|
||||
|
||||
local uri_segs = core.utils.split_uri(ngx.var.uri)
|
||||
core.log.info("uri: ", core.json.delay_encode(uri_segs))
|
||||
|
||||
-- /apisix/admin/schema/route
|
||||
local seg_res, seg_id = uri_segs[4], uri_segs[5]
|
||||
local seg_sub_path = core.table.concat(uri_segs, "/", 6)
|
||||
if seg_res == "schema" and seg_id == "plugins" then
|
||||
-- /apisix/admin/schema/plugins/limit-count
|
||||
seg_res, seg_id = uri_segs[5], uri_segs[6]
|
||||
seg_sub_path = core.table.concat(uri_segs, "/", 7)
|
||||
end
|
||||
|
||||
if seg_res == "stream_routes" then
|
||||
local local_conf = core.config.local_conf()
|
||||
if local_conf.apisix.proxy_mode ~= "stream" and
|
||||
local_conf.apisix.proxy_mode ~= "http&stream" then
|
||||
core.log.warn("stream mode is disabled, can not add any stream ",
|
||||
"routes")
|
||||
core.response.exit(400, {error_msg = "stream mode is disabled, " ..
|
||||
"can not add stream routes"})
|
||||
end
|
||||
end
|
||||
|
||||
if seg_res == "consumers" and #uri_segs >= 6 and uri_segs[6] == "credentials" then
|
||||
seg_sub_path = seg_id .. "/" .. seg_sub_path
|
||||
seg_res = uri_segs[6]
|
||||
seg_id = uri_segs[7]
|
||||
end
|
||||
|
||||
local resource = resources[seg_res]
|
||||
if not resource then
|
||||
core.response.exit(404, {error_msg = "Unsupported resource type: ".. seg_res})
|
||||
end
|
||||
|
||||
local method = str_lower(get_method())
|
||||
if not resource[method] then
|
||||
core.response.exit(404, {error_msg = "not found"})
|
||||
end
|
||||
|
||||
local req_body, err = core.request.get_body(MAX_REQ_BODY)
|
||||
if err then
|
||||
core.log.error("failed to read request body: ", err)
|
||||
core.response.exit(400, {error_msg = "invalid request body: " .. err})
|
||||
end
|
||||
|
||||
if req_body then
|
||||
local data, err = core.json.decode(req_body)
|
||||
if err then
|
||||
core.log.error("invalid request body: ", req_body, " err: ", err)
|
||||
core.response.exit(400, {error_msg = "invalid request body: " .. err,
|
||||
req_body = req_body})
|
||||
end
|
||||
|
||||
req_body = data
|
||||
end
|
||||
|
||||
local uri_args = ngx.req.get_uri_args() or {}
|
||||
if uri_args.ttl then
|
||||
if not tonumber(uri_args.ttl) then
|
||||
core.response.exit(400, {error_msg = "invalid argument ttl: "
|
||||
.. "should be a number"})
|
||||
end
|
||||
end
|
||||
|
||||
local code, data
|
||||
if seg_res == "schema" or seg_res == "plugins" then
|
||||
code, data = resource[method](seg_id, req_body, seg_sub_path, uri_args)
|
||||
else
|
||||
code, data = resource[method](resource, seg_id, req_body, seg_sub_path, uri_args)
|
||||
end
|
||||
|
||||
if code then
|
||||
if method == "get" and plugin.enable_data_encryption then
|
||||
if seg_res == "consumers" or seg_res == "credentials" then
|
||||
utils.decrypt_params(plugin.decrypt_conf, data, core.schema.TYPE_CONSUMER)
|
||||
elseif seg_res == "plugin_metadata" then
|
||||
utils.decrypt_params(plugin.decrypt_conf, data, core.schema.TYPE_METADATA)
|
||||
else
|
||||
utils.decrypt_params(plugin.decrypt_conf, data)
|
||||
end
|
||||
end
|
||||
|
||||
if v3_adapter.enable_v3() then
|
||||
core.response.set_header("X-API-VERSION", "v3")
|
||||
else
|
||||
core.response.set_header("X-API-VERSION", "v2")
|
||||
end
|
||||
|
||||
data = v3_adapter.filter(data, resource)
|
||||
data = strip_etcd_resp(data)
|
||||
|
||||
core.response.exit(code, data)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
local function get_plugins_list()
|
||||
set_ctx_and_check_token()
|
||||
local args = get_uri_args()
|
||||
local subsystem = args["subsystem"]
|
||||
-- If subsystem is passed then it should be either http or stream.
|
||||
-- If it is not passed/nil then http will be default.
|
||||
subsystem = subsystem or "http"
|
||||
if subsystem == "http" or subsystem == "stream" then
|
||||
local plugins = resources.plugins.get_plugins_list(subsystem)
|
||||
core.response.exit(200, plugins)
|
||||
end
|
||||
core.response.exit(400,"invalid subsystem passed")
|
||||
end
|
||||
|
||||
-- Handle unsupported request methods for the virtual "reload" plugin
|
||||
local function unsupported_methods_reload_plugin()
|
||||
set_ctx_and_check_token()
|
||||
|
||||
core.response.exit(405, {
|
||||
error_msg = "please use PUT method to reload the plugins, "
|
||||
.. get_method() .. " method is not allowed."
|
||||
})
|
||||
end
|
||||
|
||||
|
||||
local function post_reload_plugins()
|
||||
set_ctx_and_check_token()
|
||||
|
||||
local success, err = events:post(reload_event, get_method(), ngx_time())
|
||||
if not success then
|
||||
core.response.exit(503, err)
|
||||
end
|
||||
|
||||
core.response.exit(200, "done")
|
||||
end
|
||||
|
||||
|
||||
local function plugins_eq(old, new)
|
||||
local old_set = {}
|
||||
for _, p in ipairs(old) do
|
||||
old_set[p.name] = p
|
||||
end
|
||||
|
||||
local new_set = {}
|
||||
for _, p in ipairs(new) do
|
||||
new_set[p.name] = p
|
||||
end
|
||||
|
||||
return core.table.set_eq(old_set, new_set)
|
||||
end
|
||||
|
||||
|
||||
local function sync_local_conf_to_etcd(reset)
|
||||
local local_conf = core.config.local_conf()
|
||||
|
||||
local plugins = {}
|
||||
for _, name in ipairs(local_conf.plugins) do
|
||||
core.table.insert(plugins, {
|
||||
name = name,
|
||||
})
|
||||
end
|
||||
|
||||
for _, name in ipairs(local_conf.stream_plugins) do
|
||||
core.table.insert(plugins, {
|
||||
name = name,
|
||||
stream = true,
|
||||
})
|
||||
end
|
||||
|
||||
if reset then
|
||||
local res, err = core.etcd.get("/plugins")
|
||||
if not res then
|
||||
core.log.error("failed to get current plugins: ", err)
|
||||
return
|
||||
end
|
||||
|
||||
if res.status == 404 then
|
||||
-- nothing need to be reset
|
||||
return
|
||||
end
|
||||
|
||||
if res.status ~= 200 then
|
||||
core.log.error("failed to get current plugins, status: ", res.status)
|
||||
return
|
||||
end
|
||||
|
||||
local stored_plugins = res.body.node.value
|
||||
local revision = res.body.node.modifiedIndex
|
||||
if plugins_eq(stored_plugins, plugins) then
|
||||
core.log.info("plugins not changed, don't need to reset")
|
||||
return
|
||||
end
|
||||
|
||||
core.log.warn("sync local conf to etcd")
|
||||
|
||||
local res, err = core.etcd.atomic_set("/plugins", plugins, nil, revision)
|
||||
if not res then
|
||||
core.log.error("failed to set plugins: ", err)
|
||||
end
|
||||
|
||||
return
|
||||
end
|
||||
|
||||
core.log.warn("sync local conf to etcd")
|
||||
|
||||
-- need to store all plugins name into one key so that it can be updated atomically
|
||||
local res, err = core.etcd.set("/plugins", plugins)
|
||||
if not res then
|
||||
core.log.error("failed to set plugins: ", err)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
local function reload_plugins(data, event, source, pid)
|
||||
core.log.info("start to hot reload plugins")
|
||||
plugin.load()
|
||||
|
||||
if ngx_worker_id() == 0 then
|
||||
sync_local_conf_to_etcd()
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
local function schema_validate()
|
||||
local uri_segs = core.utils.split_uri(ngx.var.uri)
|
||||
core.log.info("uri: ", core.json.delay_encode(uri_segs))
|
||||
|
||||
local seg_res = uri_segs[6]
|
||||
local resource = resources[seg_res]
|
||||
if not resource then
|
||||
core.response.exit(404, {error_msg = "Unsupported resource type: ".. seg_res})
|
||||
end
|
||||
|
||||
local req_body, err = core.request.get_body(MAX_REQ_BODY)
|
||||
if err then
|
||||
core.log.error("failed to read request body: ", err)
|
||||
core.response.exit(400, {error_msg = "invalid request body: " .. err})
|
||||
end
|
||||
|
||||
if req_body then
|
||||
local data, err = core.json.decode(req_body)
|
||||
if err then
|
||||
core.log.error("invalid request body: ", req_body, " err: ", err)
|
||||
core.response.exit(400, {error_msg = "invalid request body: " .. err,
|
||||
req_body = req_body})
|
||||
end
|
||||
|
||||
req_body = data
|
||||
end
|
||||
|
||||
local ok, err = core.schema.check(resource.schema, req_body)
|
||||
if ok then
|
||||
core.response.exit(200)
|
||||
end
|
||||
core.response.exit(400, {error_msg = err})
|
||||
end
|
||||
|
||||
|
||||
local function standalone_run()
|
||||
set_ctx_and_check_token()
|
||||
return standalone.run()
|
||||
end
|
||||
|
||||
|
||||
local http_head_route = {
|
||||
paths = [[/apisix/admin]],
|
||||
methods = {"HEAD"},
|
||||
handler = head,
|
||||
}
|
||||
|
||||
|
||||
local uri_route = {
|
||||
http_head_route,
|
||||
{
|
||||
paths = [[/apisix/admin/*]],
|
||||
methods = {"GET", "PUT", "POST", "DELETE", "PATCH"},
|
||||
handler = run,
|
||||
},
|
||||
{
|
||||
paths = [[/apisix/admin/plugins/list]],
|
||||
methods = {"GET"},
|
||||
handler = get_plugins_list,
|
||||
},
|
||||
{
|
||||
paths = [[/apisix/admin/schema/validate/*]],
|
||||
methods = {"POST"},
|
||||
handler = schema_validate,
|
||||
},
|
||||
{
|
||||
paths = reload_event,
|
||||
methods = {"PUT"},
|
||||
handler = post_reload_plugins,
|
||||
},
|
||||
-- Handle methods other than "PUT" on "/plugin/reload" to inform user
|
||||
{
|
||||
paths = reload_event,
|
||||
methods = { "GET", "POST", "DELETE", "PATCH" },
|
||||
handler = unsupported_methods_reload_plugin,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
local standalone_uri_route = {
|
||||
http_head_route,
|
||||
{
|
||||
paths = [[/apisix/admin/configs]],
|
||||
methods = {"GET", "PUT"},
|
||||
handler = standalone_run,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
function _M.init_worker()
|
||||
local local_conf = core.config.local_conf()
|
||||
if not local_conf.apisix or not local_conf.apisix.enable_admin then
|
||||
return
|
||||
end
|
||||
|
||||
local is_yaml_config_provider = local_conf.deployment.config_provider == "yaml"
|
||||
|
||||
if is_yaml_config_provider then
|
||||
router = route.new(standalone_uri_route)
|
||||
standalone.init_worker()
|
||||
else
|
||||
router = route.new(uri_route)
|
||||
end
|
||||
|
||||
-- register reload plugin handler
|
||||
events = require("apisix.events")
|
||||
events:register(reload_plugins, reload_event, "PUT")
|
||||
|
||||
if ngx_worker_id() == 0 then
|
||||
-- check if admin_key is required
|
||||
if local_conf.deployment.admin.admin_key_required == false then
|
||||
core.log.warn("Admin key is bypassed! ",
|
||||
"If you are deploying APISIX in a production environment, ",
|
||||
"please enable `admin_key_required` and set a secure admin key!")
|
||||
end
|
||||
|
||||
if is_yaml_config_provider then -- standalone mode does not need sync to etcd
|
||||
return
|
||||
end
|
||||
|
||||
local ok, err = ngx_timer_at(0, function(premature)
|
||||
if premature then
|
||||
return
|
||||
end
|
||||
|
||||
-- try to reset the /plugins to the current configuration in the admin
|
||||
sync_local_conf_to_etcd(true)
|
||||
end)
|
||||
|
||||
if not ok then
|
||||
error("failed to sync local configure to etcd: " .. err)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function _M.get()
|
||||
return router
|
||||
end
|
||||
|
||||
|
||||
return _M
|
@@ -0,0 +1,66 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local core = require("apisix.core")
|
||||
local get_routes = require("apisix.router").http_routes
|
||||
local resource = require("apisix.admin.resource")
|
||||
local schema_plugin = require("apisix.admin.plugins").check_schema
|
||||
local type = type
|
||||
local tostring = tostring
|
||||
local ipairs = ipairs
|
||||
|
||||
|
||||
local function check_conf(id, conf, need_id, schema)
|
||||
local ok, err = core.schema.check(schema, conf)
|
||||
if not ok then
|
||||
return nil, {error_msg = "invalid configuration: " .. err}
|
||||
end
|
||||
|
||||
local ok, err = schema_plugin(conf.plugins)
|
||||
if not ok then
|
||||
return nil, {error_msg = err}
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
local function delete_checker(id)
|
||||
local routes, routes_ver = get_routes()
|
||||
if routes_ver and routes then
|
||||
for _, route in ipairs(routes) do
|
||||
if type(route) == "table" and route.value
|
||||
and route.value.plugin_config_id
|
||||
and tostring(route.value.plugin_config_id) == id then
|
||||
return 400, {error_msg = "can not delete this plugin config,"
|
||||
.. " route [" .. route.value.id
|
||||
.. "] is still using it now"}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return nil, nil
|
||||
end
|
||||
|
||||
|
||||
return resource.new({
|
||||
name = "plugin_configs",
|
||||
kind = "plugin config",
|
||||
schema = core.schema.plugin_config,
|
||||
checker = check_conf,
|
||||
unsupported_methods = {"post"},
|
||||
delete_checker = delete_checker
|
||||
})
|
@@ -0,0 +1,83 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local pcall = pcall
|
||||
local require = require
|
||||
local core = require("apisix.core")
|
||||
local resource = require("apisix.admin.resource")
|
||||
local encrypt_conf = require("apisix.plugin").encrypt_conf
|
||||
|
||||
local injected_mark = "injected metadata_schema"
|
||||
|
||||
|
||||
local function validate_plugin(name)
|
||||
local pkg_name = "apisix.plugins." .. name
|
||||
local ok, plugin_object = pcall(require, pkg_name)
|
||||
if ok then
|
||||
return true, plugin_object
|
||||
end
|
||||
|
||||
pkg_name = "apisix.stream.plugins." .. name
|
||||
return pcall(require, pkg_name)
|
||||
end
|
||||
|
||||
|
||||
local function check_conf(plugin_name, conf)
|
||||
if not plugin_name then
|
||||
return nil, {error_msg = "missing plugin name"}
|
||||
end
|
||||
|
||||
local ok, plugin_object = validate_plugin(plugin_name)
|
||||
if not ok then
|
||||
return nil, {error_msg = "invalid plugin name"}
|
||||
end
|
||||
|
||||
if not plugin_object.metadata_schema then
|
||||
plugin_object.metadata_schema = {
|
||||
type = "object",
|
||||
['$comment'] = injected_mark,
|
||||
properties = {},
|
||||
}
|
||||
end
|
||||
local schema = plugin_object.metadata_schema
|
||||
|
||||
local ok, err
|
||||
if schema['$comment'] == injected_mark
|
||||
-- check_schema is not required. If missing, fallback to check schema directly
|
||||
or not plugin_object.check_schema
|
||||
then
|
||||
ok, err = core.schema.check(schema, conf)
|
||||
else
|
||||
ok, err = plugin_object.check_schema(conf, core.schema.TYPE_METADATA)
|
||||
end
|
||||
|
||||
encrypt_conf(plugin_name, conf, core.schema.TYPE_METADATA)
|
||||
|
||||
if not ok then
|
||||
return nil, {error_msg = "invalid configuration: " .. err}
|
||||
end
|
||||
|
||||
return plugin_name
|
||||
end
|
||||
|
||||
|
||||
return resource.new({
|
||||
name = "plugin_metadata",
|
||||
kind = "plugin_metadata",
|
||||
schema = core.schema.plugin_metadata,
|
||||
checker = check_conf,
|
||||
unsupported_methods = {"post", "patch"}
|
||||
})
|
139
CloudronPackages/APISIX/apisix-source/apisix/admin/plugins.lua
Normal file
139
CloudronPackages/APISIX/apisix-source/apisix/admin/plugins.lua
Normal file
@@ -0,0 +1,139 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local require = require
|
||||
local core = require("apisix.core")
|
||||
local check_schema = require("apisix.plugin").check_schema
|
||||
local ipairs = ipairs
|
||||
local table_sort = table.sort
|
||||
local table_insert = table.insert
|
||||
local get_uri_args = ngx.req.get_uri_args
|
||||
local plugin_get_all = require("apisix.plugin").get_all
|
||||
local plugin_get_http = require("apisix.plugin").get
|
||||
local plugin_get_stream = require("apisix.plugin").get_stream
|
||||
local encrypt_conf = require("apisix.plugin").encrypt_conf
|
||||
local pairs = pairs
|
||||
|
||||
local _M = {}
|
||||
|
||||
|
||||
function _M.check_schema(plugins_conf, schema_type)
|
||||
local ok, err = check_schema(plugins_conf, schema_type, false)
|
||||
if ok then
|
||||
for name, conf in pairs(plugins_conf) do
|
||||
encrypt_conf(name, conf, schema_type)
|
||||
end
|
||||
end
|
||||
return ok, err
|
||||
end
|
||||
|
||||
|
||||
function _M.get(name)
|
||||
local arg = get_uri_args()
|
||||
-- If subsystem is passed inside args then it should be oneOf: http / stream.
|
||||
local subsystem = arg["subsystem"] or "http"
|
||||
if subsystem ~= "http" and subsystem ~= "stream" then
|
||||
return 400, {error_msg = "unsupported subsystem: "..subsystem}
|
||||
end
|
||||
|
||||
-- arg all to be deprecated
|
||||
if (arg and arg["all"] == "true") then
|
||||
core.log.warn("query parameter \"all\" will be deprecated soon.")
|
||||
local http_plugins, stream_plugins = plugin_get_all({
|
||||
version = true,
|
||||
priority = true,
|
||||
schema = true,
|
||||
metadata_schema = true,
|
||||
consumer_schema = true,
|
||||
type = true,
|
||||
scope = true,
|
||||
})
|
||||
|
||||
if arg["subsystem"] == "stream" then
|
||||
return 200, stream_plugins
|
||||
end
|
||||
|
||||
return 200, http_plugins
|
||||
end
|
||||
|
||||
local plugin
|
||||
|
||||
if subsystem == "http" then
|
||||
plugin = plugin_get_http(name)
|
||||
else
|
||||
plugin = plugin_get_stream(name)
|
||||
end
|
||||
|
||||
if not plugin then
|
||||
local err = "plugin not found in subsystem " .. subsystem
|
||||
core.log.warn(err)
|
||||
return 404, {error_msg = err}
|
||||
end
|
||||
|
||||
local json_schema = plugin.schema
|
||||
if arg and arg["schema_type"] == "consumer" then
|
||||
json_schema = plugin.consumer_schema
|
||||
end
|
||||
|
||||
if not json_schema then
|
||||
return 400, {error_msg = "not found schema"}
|
||||
end
|
||||
|
||||
return 200, json_schema
|
||||
end
|
||||
|
||||
|
||||
function _M.get_plugins_list(subsystem)
|
||||
local http_plugins
|
||||
local stream_plugins
|
||||
if subsystem == "http" then
|
||||
http_plugins = core.config.local_conf().plugins
|
||||
else
|
||||
stream_plugins = core.config.local_conf().stream_plugins
|
||||
end
|
||||
|
||||
local priorities = {}
|
||||
local success = {}
|
||||
if http_plugins then
|
||||
for i, name in ipairs(http_plugins) do
|
||||
local plugin = plugin_get_http(name)
|
||||
if plugin and plugin.priority then
|
||||
priorities[name] = plugin.priority
|
||||
table_insert(success, name)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if stream_plugins then
|
||||
for i, name in ipairs(stream_plugins) do
|
||||
local plugin = plugin_get_stream(name)
|
||||
if plugin and plugin.priority then
|
||||
priorities[name] = plugin.priority
|
||||
table_insert(success, name)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local function cmp(x, y)
|
||||
return priorities[x] > priorities[y]
|
||||
end
|
||||
|
||||
table_sort(success, cmp)
|
||||
return success
|
||||
end
|
||||
|
||||
|
||||
return _M
|
111
CloudronPackages/APISIX/apisix-source/apisix/admin/proto.lua
Normal file
111
CloudronPackages/APISIX/apisix-source/apisix/admin/proto.lua
Normal file
@@ -0,0 +1,111 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local type = type
|
||||
local ipairs = ipairs
|
||||
local core = require("apisix.core")
|
||||
local resource = require("apisix.admin.resource")
|
||||
local get_routes = require("apisix.router").http_routes
|
||||
local get_services = require("apisix.http.service").services
|
||||
local compile_proto = require("apisix.plugins.grpc-transcode.proto").compile_proto
|
||||
local tostring = tostring
|
||||
|
||||
|
||||
local function check_conf(id, conf, need_id, schema)
|
||||
local ok, err = core.schema.check(schema, conf)
|
||||
if not ok then
|
||||
return nil, {error_msg = "invalid configuration: " .. err}
|
||||
end
|
||||
|
||||
local ok, err = compile_proto(conf.content)
|
||||
if not ok then
|
||||
return nil, {error_msg = "invalid content: " .. err}
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
local function check_proto_used(plugins, deleting, ptype, pid)
|
||||
|
||||
--core.log.info("check_proto_used plugins: ", core.json.delay_encode(plugins, true))
|
||||
--core.log.info("check_proto_used deleting: ", deleting)
|
||||
--core.log.info("check_proto_used ptype: ", ptype)
|
||||
--core.log.info("check_proto_used pid: ", pid)
|
||||
|
||||
if plugins then
|
||||
if type(plugins) == "table" and plugins["grpc-transcode"]
|
||||
and plugins["grpc-transcode"].proto_id
|
||||
and tostring(plugins["grpc-transcode"].proto_id) == deleting then
|
||||
return false, {error_msg = "can not delete this proto, "
|
||||
.. ptype .. " [" .. pid
|
||||
.. "] is still using it now"}
|
||||
end
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
local function delete_checker(id)
|
||||
core.log.info("proto delete: ", id)
|
||||
|
||||
local routes, routes_ver = get_routes()
|
||||
|
||||
core.log.info("routes: ", core.json.delay_encode(routes, true))
|
||||
core.log.info("routes_ver: ", routes_ver)
|
||||
|
||||
if routes_ver and routes then
|
||||
for _, route in ipairs(routes) do
|
||||
core.log.info("proto delete route item: ", core.json.delay_encode(route, true))
|
||||
if type(route) == "table" and route.value and route.value.plugins then
|
||||
local ret, err = check_proto_used(route.value.plugins, id, "route",route.value.id)
|
||||
if not ret then
|
||||
return 400, err
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
core.log.info("proto delete route ref check pass: ", id)
|
||||
|
||||
local services, services_ver = get_services()
|
||||
|
||||
core.log.info("services: ", core.json.delay_encode(services, true))
|
||||
core.log.info("services_ver: ", services_ver)
|
||||
|
||||
if services_ver and services then
|
||||
for _, service in ipairs(services) do
|
||||
if type(service) == "table" and service.value and service.value.plugins then
|
||||
local ret, err = check_proto_used(service.value.plugins, id,
|
||||
"service", service.value.id)
|
||||
if not ret then
|
||||
return 400, err
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
core.log.info("proto delete service ref check pass: ", id)
|
||||
|
||||
return nil, nil
|
||||
end
|
||||
|
||||
|
||||
return resource.new({
|
||||
name = "protos",
|
||||
kind = "proto",
|
||||
schema = core.schema.proto,
|
||||
checker = check_conf,
|
||||
unsupported_methods = {"patch"},
|
||||
delete_checker = delete_checker
|
||||
})
|
468
CloudronPackages/APISIX/apisix-source/apisix/admin/resource.lua
Normal file
468
CloudronPackages/APISIX/apisix-source/apisix/admin/resource.lua
Normal file
@@ -0,0 +1,468 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local core = require("apisix.core")
|
||||
local utils = require("apisix.admin.utils")
|
||||
local apisix_ssl = require("apisix.ssl")
|
||||
local apisix_consumer = require("apisix.consumer")
|
||||
local setmetatable = setmetatable
|
||||
local tostring = tostring
|
||||
local ipairs = ipairs
|
||||
local type = type
|
||||
|
||||
|
||||
local _M = {
|
||||
list_filter_fields = {},
|
||||
}
|
||||
local mt = {
|
||||
__index = _M
|
||||
}
|
||||
|
||||
|
||||
local no_id_res = {
|
||||
consumers = true,
|
||||
plugin_metadata = true
|
||||
}
|
||||
|
||||
|
||||
local function split_typ_and_id(id, sub_path)
|
||||
local uri_segs = core.utils.split_uri(sub_path)
|
||||
local typ = id
|
||||
local id = nil
|
||||
if #uri_segs > 0 then
|
||||
id = uri_segs[1]
|
||||
end
|
||||
return typ, id
|
||||
end
|
||||
|
||||
|
||||
local function check_forbidden_properties(conf, forbidden_properties)
|
||||
local not_allow_properties = "the property is forbidden: "
|
||||
|
||||
if conf then
|
||||
for _, v in ipairs(forbidden_properties) do
|
||||
if conf[v] then
|
||||
return not_allow_properties .. " " .. v
|
||||
end
|
||||
end
|
||||
|
||||
if conf.upstream then
|
||||
for _, v in ipairs(forbidden_properties) do
|
||||
if conf.upstream[v] then
|
||||
return not_allow_properties .. " upstream." .. v
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if conf.plugins then
|
||||
for _, v in ipairs(forbidden_properties) do
|
||||
if conf.plugins[v] then
|
||||
return not_allow_properties .. " plugins." .. v
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
|
||||
function _M:check_conf(id, conf, need_id, typ, allow_time)
|
||||
if self.name == "secrets" then
|
||||
id = typ .. "/" .. id
|
||||
end
|
||||
-- check if missing configurations
|
||||
if not conf then
|
||||
return nil, {error_msg = "missing configurations"}
|
||||
end
|
||||
|
||||
-- check id if need id
|
||||
if not no_id_res[self.name] then
|
||||
id = id or conf.id
|
||||
if need_id and not id then
|
||||
return nil, {error_msg = "missing ".. self.kind .. " id"}
|
||||
end
|
||||
|
||||
if not need_id and id then
|
||||
return nil, {error_msg = "wrong ".. self.kind .. " id, do not need it"}
|
||||
end
|
||||
|
||||
if need_id and conf.id and tostring(conf.id) ~= tostring(id) then
|
||||
return nil, {error_msg = "wrong ".. self.kind .. " id"}
|
||||
end
|
||||
|
||||
conf.id = id
|
||||
end
|
||||
|
||||
-- check create time and update time
|
||||
if not allow_time then
|
||||
local forbidden_properties = {"create_time", "update_time"}
|
||||
local err = check_forbidden_properties(conf, forbidden_properties)
|
||||
if err then
|
||||
return nil, {error_msg = err}
|
||||
end
|
||||
end
|
||||
|
||||
core.log.info("conf : ", core.json.delay_encode(conf))
|
||||
|
||||
-- check the resource own rules
|
||||
if self.name ~= "secrets" then
|
||||
core.log.info("schema: ", core.json.delay_encode(self.schema))
|
||||
end
|
||||
|
||||
local ok, err = self.checker(id, conf, need_id, self.schema, typ)
|
||||
|
||||
if not ok then
|
||||
return ok, err
|
||||
else
|
||||
if no_id_res[self.name] then
|
||||
return ok
|
||||
else
|
||||
return need_id and id or true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function _M:get(id, conf, sub_path)
|
||||
if core.table.array_find(self.unsupported_methods, "get") then
|
||||
return 405, {error_msg = "not supported `GET` method for " .. self.kind}
|
||||
end
|
||||
|
||||
local key = "/" .. self.name
|
||||
local typ = nil
|
||||
if self.name == "secrets" then
|
||||
key = key .. "/"
|
||||
typ, id = split_typ_and_id(id, sub_path)
|
||||
end
|
||||
|
||||
if id then
|
||||
if self.name == "secrets" then
|
||||
key = key .. typ
|
||||
end
|
||||
key = key .. "/" .. id
|
||||
end
|
||||
|
||||
-- some resources(consumers) have sub resources(credentials),
|
||||
-- the key format of sub resources will differ from the main resource
|
||||
if self.get_resource_etcd_key then
|
||||
key = self.get_resource_etcd_key(id, conf, sub_path)
|
||||
end
|
||||
|
||||
local res, err = core.etcd.get(key, not id)
|
||||
if not res then
|
||||
core.log.error("failed to get ", self.kind, "[", key, "] from etcd: ", err)
|
||||
return 503, {error_msg = err}
|
||||
end
|
||||
|
||||
if self.name == "ssls" then
|
||||
-- not return private key for security
|
||||
if res.body and res.body.node and res.body.node.value then
|
||||
res.body.node.value.key = nil
|
||||
end
|
||||
end
|
||||
|
||||
-- consumers etcd range response will include credentials, so need to filter out them
|
||||
if self.name == "consumers" and res.body.list then
|
||||
res.body.list = apisix_consumer.filter_consumers_list(res.body.list)
|
||||
res.body.total = #res.body.list
|
||||
end
|
||||
|
||||
utils.fix_count(res.body, id)
|
||||
return res.status, res.body
|
||||
end
|
||||
|
||||
|
||||
function _M:post(id, conf, sub_path, args)
|
||||
if core.table.array_find(self.unsupported_methods, "post") then
|
||||
return 405, {error_msg = "not supported `POST` method for " .. self.kind}
|
||||
end
|
||||
|
||||
local id, err = self:check_conf(id, conf, false)
|
||||
if not id then
|
||||
return 400, err
|
||||
end
|
||||
|
||||
if self.name == "ssls" then
|
||||
-- encrypt private key
|
||||
conf.key = apisix_ssl.aes_encrypt_pkey(conf.key)
|
||||
|
||||
if conf.keys then
|
||||
for i = 1, #conf.keys do
|
||||
conf.keys[i] = apisix_ssl.aes_encrypt_pkey(conf.keys[i])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local key = "/" .. self.name
|
||||
utils.inject_timestamp(conf)
|
||||
|
||||
local ttl = nil
|
||||
if args then
|
||||
ttl = args.ttl
|
||||
end
|
||||
|
||||
local res, err = core.etcd.push(key, conf, ttl)
|
||||
if not res then
|
||||
core.log.error("failed to post ", self.kind, "[", key, "] to etcd: ", err)
|
||||
return 503, {error_msg = err}
|
||||
end
|
||||
|
||||
return res.status, res.body
|
||||
end
|
||||
|
||||
|
||||
function _M:put(id, conf, sub_path, args)
|
||||
if core.table.array_find(self.unsupported_methods, "put") then
|
||||
return 405, {error_msg = "not supported `PUT` method for " .. self.kind}
|
||||
end
|
||||
|
||||
local key = "/" .. self.name
|
||||
local typ = nil
|
||||
if self.name == "secrets" then
|
||||
typ, id = split_typ_and_id(id, sub_path)
|
||||
key = key .. "/" .. typ
|
||||
end
|
||||
|
||||
local need_id = not no_id_res[self.name]
|
||||
local ok, err = self:check_conf(id, conf, need_id, typ)
|
||||
if not ok then
|
||||
return 400, err
|
||||
end
|
||||
|
||||
if self.name ~= "secrets" then
|
||||
id = ok
|
||||
end
|
||||
|
||||
if self.name == "ssls" then
|
||||
-- encrypt private key
|
||||
conf.key = apisix_ssl.aes_encrypt_pkey(conf.key)
|
||||
|
||||
if conf.keys then
|
||||
for i = 1, #conf.keys do
|
||||
conf.keys[i] = apisix_ssl.aes_encrypt_pkey(conf.keys[i])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
key = key .. "/" .. id
|
||||
|
||||
if self.get_resource_etcd_key then
|
||||
key = self.get_resource_etcd_key(id, conf, sub_path, args)
|
||||
end
|
||||
|
||||
if self.name == "credentials" then
|
||||
local consumer_key = apisix_consumer.get_consumer_key_from_credential_key(key)
|
||||
local res, err = core.etcd.get(consumer_key, false)
|
||||
if not res then
|
||||
return 503, {error_msg = err}
|
||||
end
|
||||
if res.status == 404 then
|
||||
return res.status, {error_msg = "consumer not found"}
|
||||
end
|
||||
if res.status ~= 200 then
|
||||
core.log.debug("failed to get consumer for the credential, credential key: ", key,
|
||||
", consumer key: ", consumer_key, ", res.status: ", res.status)
|
||||
return res.status, {error_msg = "failed to get the consumer"}
|
||||
end
|
||||
end
|
||||
|
||||
if self.name ~= "plugin_metadata" then
|
||||
local ok, err = utils.inject_conf_with_prev_conf(self.kind, key, conf)
|
||||
if not ok then
|
||||
return 503, {error_msg = err}
|
||||
end
|
||||
else
|
||||
conf.id = id
|
||||
end
|
||||
|
||||
local ttl = nil
|
||||
if args then
|
||||
ttl = args.ttl
|
||||
end
|
||||
|
||||
local res, err = core.etcd.set(key, conf, ttl)
|
||||
if not res then
|
||||
core.log.error("failed to put ", self.kind, "[", key, "] to etcd: ", err)
|
||||
return 503, {error_msg = err}
|
||||
end
|
||||
|
||||
return res.status, res.body
|
||||
end
|
||||
|
||||
-- Keep the unused conf to make the args list consistent with other methods
|
||||
function _M:delete(id, conf, sub_path, uri_args)
|
||||
if core.table.array_find(self.unsupported_methods, "delete") then
|
||||
return 405, {error_msg = "not supported `DELETE` method for " .. self.kind}
|
||||
end
|
||||
|
||||
local key = "/" .. self.name
|
||||
local typ = nil
|
||||
if self.name == "secrets" then
|
||||
typ, id = split_typ_and_id(id, sub_path)
|
||||
end
|
||||
|
||||
if not id then
|
||||
return 400, {error_msg = "missing " .. self.kind .. " id"}
|
||||
end
|
||||
|
||||
-- core.log.error("failed to delete ", self.kind, "[", key, "] in etcd: ", err)
|
||||
|
||||
if self.name == "secrets" then
|
||||
key = key .. "/" .. typ
|
||||
end
|
||||
|
||||
key = key .. "/" .. id
|
||||
|
||||
if self.get_resource_etcd_key then
|
||||
key = self.get_resource_etcd_key(id, conf, sub_path, uri_args)
|
||||
end
|
||||
|
||||
if self.delete_checker and uri_args.force ~= "true" then
|
||||
local code, err = self.delete_checker(id)
|
||||
if err then
|
||||
return code, err
|
||||
end
|
||||
end
|
||||
|
||||
if self.name == "consumers" then
|
||||
local res, err = core.etcd.rmdir(key .. "/credentials/")
|
||||
if not res then
|
||||
return 503, {error_msg = err}
|
||||
end
|
||||
end
|
||||
|
||||
local res, err = core.etcd.delete(key)
|
||||
if not res then
|
||||
core.log.error("failed to delete ", self.kind, "[", key, "] in etcd: ", err)
|
||||
return 503, {error_msg = err}
|
||||
end
|
||||
|
||||
return res.status, res.body
|
||||
end
|
||||
|
||||
|
||||
function _M:patch(id, conf, sub_path, args)
|
||||
if core.table.array_find(self.unsupported_methods, "patch") then
|
||||
return 405, {error_msg = "not supported `PATCH` method for " .. self.kind}
|
||||
end
|
||||
|
||||
local key = "/" .. self.name
|
||||
local typ = nil
|
||||
if self.name == "secrets" then
|
||||
local uri_segs = core.utils.split_uri(sub_path)
|
||||
if #uri_segs < 1 then
|
||||
return 400, {error_msg = "no secret id"}
|
||||
end
|
||||
typ = id
|
||||
id = uri_segs[1]
|
||||
sub_path = core.table.concat(uri_segs, "/", 2)
|
||||
end
|
||||
|
||||
if not id then
|
||||
return 400, {error_msg = "missing " .. self.kind .. " id"}
|
||||
end
|
||||
|
||||
if self.name == "secrets" then
|
||||
key = key .. "/" .. typ
|
||||
end
|
||||
|
||||
key = key .. "/" .. id
|
||||
|
||||
if conf == nil then
|
||||
return 400, {error_msg = "missing new configuration"}
|
||||
end
|
||||
|
||||
if not sub_path or sub_path == "" then
|
||||
if type(conf) ~= "table" then
|
||||
return 400, {error_msg = "invalid configuration"}
|
||||
end
|
||||
end
|
||||
|
||||
local res_old, err = core.etcd.get(key)
|
||||
if not res_old then
|
||||
core.log.error("failed to get ", self.kind, " [", key, "] in etcd: ", err)
|
||||
return 503, {error_msg = err}
|
||||
end
|
||||
|
||||
if res_old.status ~= 200 then
|
||||
return res_old.status, res_old.body
|
||||
end
|
||||
core.log.info("key: ", key, " old value: ", core.json.delay_encode(res_old, true))
|
||||
|
||||
local node_value = res_old.body.node.value
|
||||
local modified_index = res_old.body.node.modifiedIndex
|
||||
|
||||
if sub_path and sub_path ~= "" then
|
||||
if self.name == "ssls" then
|
||||
if sub_path == "key" then
|
||||
conf = apisix_ssl.aes_encrypt_pkey(conf)
|
||||
elseif sub_path == "keys" then
|
||||
for i = 1, #conf do
|
||||
conf[i] = apisix_ssl.aes_encrypt_pkey(conf[i])
|
||||
end
|
||||
end
|
||||
end
|
||||
local code, err, node_val = core.table.patch(node_value, sub_path, conf)
|
||||
node_value = node_val
|
||||
if code then
|
||||
return code, {error_msg = err}
|
||||
end
|
||||
utils.inject_timestamp(node_value, nil, true)
|
||||
else
|
||||
if self.name == "ssls" then
|
||||
if conf.key then
|
||||
conf.key = apisix_ssl.aes_encrypt_pkey(conf.key)
|
||||
end
|
||||
|
||||
if conf.keys then
|
||||
for i = 1, #conf.keys do
|
||||
conf.keys[i] = apisix_ssl.aes_encrypt_pkey(conf.keys[i])
|
||||
end
|
||||
end
|
||||
end
|
||||
node_value = core.table.merge(node_value, conf)
|
||||
utils.inject_timestamp(node_value, nil, conf)
|
||||
end
|
||||
|
||||
core.log.info("new conf: ", core.json.delay_encode(node_value, true))
|
||||
|
||||
local ok, err = self:check_conf(id, node_value, true, typ, true)
|
||||
if not ok then
|
||||
return 400, err
|
||||
end
|
||||
|
||||
local ttl = nil
|
||||
if args then
|
||||
ttl = args.ttl
|
||||
end
|
||||
|
||||
local res, err = core.etcd.atomic_set(key, node_value, ttl, modified_index)
|
||||
if not res then
|
||||
core.log.error("failed to set new ", self.kind, "[", key, "] to etcd: ", err)
|
||||
return 503, {error_msg = err}
|
||||
end
|
||||
|
||||
return res.status, res.body
|
||||
end
|
||||
|
||||
|
||||
function _M.new(opt)
|
||||
return setmetatable(opt, mt)
|
||||
end
|
||||
|
||||
|
||||
return _M
|
184
CloudronPackages/APISIX/apisix-source/apisix/admin/routes.lua
Normal file
184
CloudronPackages/APISIX/apisix-source/apisix/admin/routes.lua
Normal file
@@ -0,0 +1,184 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local expr = require("resty.expr.v1")
|
||||
local core = require("apisix.core")
|
||||
local apisix_upstream = require("apisix.upstream")
|
||||
local resource = require("apisix.admin.resource")
|
||||
local schema_plugin = require("apisix.admin.plugins").check_schema
|
||||
local type = type
|
||||
local loadstring = loadstring
|
||||
local ipairs = ipairs
|
||||
local jp = require("jsonpath")
|
||||
|
||||
local function validate_post_arg(node)
|
||||
if type(node) ~= "table" then
|
||||
return true
|
||||
end
|
||||
|
||||
-- Handle post_arg conditions
|
||||
if #node >= 3 and type(node[1]) == "string" and node[1]:find("^post_arg%.") then
|
||||
local key = node[1]
|
||||
local json_path = "$." .. key:sub(11) -- Remove "post_arg." prefix
|
||||
local _, err = jp.parse(json_path)
|
||||
if err then
|
||||
return false, err
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
for _, child in ipairs(node) do
|
||||
local ok, err = validate_post_arg(child)
|
||||
if not ok then
|
||||
return false, err
|
||||
end
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
local function check_conf(id, conf, need_id, schema)
|
||||
if conf.host and conf.hosts then
|
||||
return nil, {error_msg = "only one of host or hosts is allowed"}
|
||||
end
|
||||
|
||||
if conf.remote_addr and conf.remote_addrs then
|
||||
return nil, {error_msg = "only one of remote_addr or remote_addrs is "
|
||||
.. "allowed"}
|
||||
end
|
||||
|
||||
local ok, err = core.schema.check(schema, conf)
|
||||
if not ok then
|
||||
return nil, {error_msg = "invalid configuration: " .. err}
|
||||
end
|
||||
|
||||
local upstream_conf = conf.upstream
|
||||
if upstream_conf then
|
||||
local ok, err = apisix_upstream.check_upstream_conf(upstream_conf)
|
||||
if not ok then
|
||||
return nil, {error_msg = err}
|
||||
end
|
||||
end
|
||||
|
||||
local upstream_id = conf.upstream_id
|
||||
if upstream_id then
|
||||
local key = "/upstreams/" .. upstream_id
|
||||
local res, err = core.etcd.get(key)
|
||||
if not res then
|
||||
return nil, {error_msg = "failed to fetch upstream info by "
|
||||
.. "upstream id [" .. upstream_id .. "]: "
|
||||
.. err}
|
||||
end
|
||||
|
||||
if res.status ~= 200 then
|
||||
return nil, {error_msg = "failed to fetch upstream info by "
|
||||
.. "upstream id [" .. upstream_id .. "], "
|
||||
.. "response code: " .. res.status}
|
||||
end
|
||||
end
|
||||
|
||||
local service_id = conf.service_id
|
||||
if service_id then
|
||||
local key = "/services/" .. service_id
|
||||
local res, err = core.etcd.get(key)
|
||||
if not res then
|
||||
return nil, {error_msg = "failed to fetch service info by "
|
||||
.. "service id [" .. service_id .. "]: "
|
||||
.. err}
|
||||
end
|
||||
|
||||
if res.status ~= 200 then
|
||||
return nil, {error_msg = "failed to fetch service info by "
|
||||
.. "service id [" .. service_id .. "], "
|
||||
.. "response code: " .. res.status}
|
||||
end
|
||||
end
|
||||
|
||||
local plugin_config_id = conf.plugin_config_id
|
||||
if plugin_config_id then
|
||||
local key = "/plugin_configs/" .. plugin_config_id
|
||||
local res, err = core.etcd.get(key)
|
||||
if not res then
|
||||
return nil, {error_msg = "failed to fetch plugin config info by "
|
||||
.. "plugin config id [" .. plugin_config_id .. "]: "
|
||||
.. err}
|
||||
end
|
||||
|
||||
if res.status ~= 200 then
|
||||
return nil, {error_msg = "failed to fetch plugin config info by "
|
||||
.. "plugin config id [" .. plugin_config_id .. "], "
|
||||
.. "response code: " .. res.status}
|
||||
end
|
||||
end
|
||||
|
||||
if conf.plugins then
|
||||
local ok, err = schema_plugin(conf.plugins)
|
||||
if not ok then
|
||||
return nil, {error_msg = err}
|
||||
end
|
||||
end
|
||||
|
||||
if conf.vars then
|
||||
ok, err = expr.new(conf.vars)
|
||||
if not ok then
|
||||
return nil, {error_msg = "failed to validate the 'vars' expression: " .. err}
|
||||
end
|
||||
end
|
||||
|
||||
ok, err = validate_post_arg(conf.vars)
|
||||
if not ok then
|
||||
return nil, {error_msg = "failed to validate the 'vars' expression: " ..
|
||||
err}
|
||||
end
|
||||
|
||||
if conf.filter_func then
|
||||
local func, err = loadstring("return " .. conf.filter_func)
|
||||
if not func then
|
||||
return nil, {error_msg = "failed to load 'filter_func' string: "
|
||||
.. err}
|
||||
end
|
||||
|
||||
if type(func()) ~= "function" then
|
||||
return nil, {error_msg = "'filter_func' should be a function"}
|
||||
end
|
||||
end
|
||||
|
||||
if conf.script then
|
||||
local obj, err = loadstring(conf.script)
|
||||
if not obj then
|
||||
return nil, {error_msg = "failed to load 'script' string: "
|
||||
.. err}
|
||||
end
|
||||
|
||||
if type(obj()) ~= "table" then
|
||||
return nil, {error_msg = "'script' should be a Lua object"}
|
||||
end
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
return resource.new({
|
||||
name = "routes",
|
||||
kind = "route",
|
||||
schema = core.schema.route,
|
||||
checker = check_conf,
|
||||
list_filter_fields = {
|
||||
service_id = true,
|
||||
upstream_id = true,
|
||||
},
|
||||
})
|
@@ -0,0 +1,35 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local core = require("apisix.core")
|
||||
|
||||
local _M = {
|
||||
version = 0.1,
|
||||
}
|
||||
|
||||
|
||||
function _M.get(name)
|
||||
local json_schema = core.schema[name]
|
||||
core.log.info("schema: ", core.json.delay_encode(core.schema, true))
|
||||
if not json_schema then
|
||||
return 400, {error_msg = "not found schema: " .. name}
|
||||
end
|
||||
|
||||
return 200, json_schema
|
||||
end
|
||||
|
||||
|
||||
return _M
|
@@ -0,0 +1,45 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local require = require
|
||||
|
||||
local core = require("apisix.core")
|
||||
local resource = require("apisix.admin.resource")
|
||||
|
||||
local pcall = pcall
|
||||
|
||||
|
||||
local function check_conf(id, conf, need_id, schema, typ)
|
||||
local ok, secret_manager = pcall(require, "apisix.secret." .. typ)
|
||||
if not ok then
|
||||
return false, {error_msg = "invalid secret manager: " .. typ}
|
||||
end
|
||||
|
||||
local ok, err = core.schema.check(secret_manager.schema, conf)
|
||||
if not ok then
|
||||
return nil, {error_msg = "invalid configuration: " .. err}
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
return resource.new({
|
||||
name = "secrets",
|
||||
kind = "secret",
|
||||
checker = check_conf,
|
||||
unsupported_methods = {"post"}
|
||||
})
|
128
CloudronPackages/APISIX/apisix-source/apisix/admin/services.lua
Normal file
128
CloudronPackages/APISIX/apisix-source/apisix/admin/services.lua
Normal file
@@ -0,0 +1,128 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local core = require("apisix.core")
|
||||
local get_routes = require("apisix.router").http_routes
|
||||
local get_stream_routes = require("apisix.router").stream_routes
|
||||
local apisix_upstream = require("apisix.upstream")
|
||||
local resource = require("apisix.admin.resource")
|
||||
local schema_plugin = require("apisix.admin.plugins").check_schema
|
||||
local tostring = tostring
|
||||
local ipairs = ipairs
|
||||
local type = type
|
||||
local loadstring = loadstring
|
||||
|
||||
|
||||
local function check_conf(id, conf, need_id, schema)
|
||||
local ok, err = core.schema.check(schema, conf)
|
||||
if not ok then
|
||||
return nil, {error_msg = "invalid configuration: " .. err}
|
||||
end
|
||||
|
||||
if need_id and not id then
|
||||
return nil, {error_msg = "wrong type of service id"}
|
||||
end
|
||||
|
||||
local upstream_conf = conf.upstream
|
||||
if upstream_conf then
|
||||
local ok, err = apisix_upstream.check_upstream_conf(upstream_conf)
|
||||
if not ok then
|
||||
return nil, {error_msg = err}
|
||||
end
|
||||
end
|
||||
|
||||
local upstream_id = conf.upstream_id
|
||||
if upstream_id then
|
||||
local key = "/upstreams/" .. upstream_id
|
||||
local res, err = core.etcd.get(key)
|
||||
if not res then
|
||||
return nil, {error_msg = "failed to fetch upstream info by "
|
||||
.. "upstream id [" .. upstream_id .. "]: "
|
||||
.. err}
|
||||
end
|
||||
|
||||
if res.status ~= 200 then
|
||||
return nil, {error_msg = "failed to fetch upstream info by "
|
||||
.. "upstream id [" .. upstream_id .. "], "
|
||||
.. "response code: " .. res.status}
|
||||
end
|
||||
end
|
||||
|
||||
if conf.plugins then
|
||||
local ok, err = schema_plugin(conf.plugins)
|
||||
if not ok then
|
||||
return nil, {error_msg = err}
|
||||
end
|
||||
end
|
||||
|
||||
if conf.script then
|
||||
local obj, err = loadstring(conf.script)
|
||||
if not obj then
|
||||
return nil, {error_msg = "failed to load 'script' string: "
|
||||
.. err}
|
||||
end
|
||||
|
||||
if type(obj()) ~= "table" then
|
||||
return nil, {error_msg = "'script' should be a Lua object"}
|
||||
end
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
local function delete_checker(id)
|
||||
local routes, routes_ver = get_routes()
|
||||
core.log.info("routes: ", core.json.delay_encode(routes, true))
|
||||
core.log.info("routes_ver: ", routes_ver)
|
||||
if routes_ver and routes then
|
||||
for _, route in ipairs(routes) do
|
||||
if type(route) == "table" and route.value
|
||||
and route.value.service_id
|
||||
and tostring(route.value.service_id) == id then
|
||||
return 400, {error_msg = "can not delete this service directly,"
|
||||
.. " route [" .. route.value.id
|
||||
.. "] is still using it now"}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local stream_routes, stream_routes_ver = get_stream_routes()
|
||||
core.log.info("stream_routes: ", core.json.delay_encode(stream_routes, true))
|
||||
core.log.info("stream_routes_ver: ", stream_routes_ver)
|
||||
if stream_routes_ver and stream_routes then
|
||||
for _, route in ipairs(stream_routes) do
|
||||
if type(route) == "table" and route.value
|
||||
and route.value.service_id
|
||||
and tostring(route.value.service_id) == id then
|
||||
return 400, {error_msg = "can not delete this service directly,"
|
||||
.. " stream_route [" .. route.value.id
|
||||
.. "] is still using it now"}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return nil, nil
|
||||
end
|
||||
|
||||
|
||||
return resource.new({
|
||||
name = "services",
|
||||
kind = "service",
|
||||
schema = core.schema.service,
|
||||
checker = check_conf,
|
||||
delete_checker = delete_checker
|
||||
})
|
37
CloudronPackages/APISIX/apisix-source/apisix/admin/ssl.lua
Normal file
37
CloudronPackages/APISIX/apisix-source/apisix/admin/ssl.lua
Normal file
@@ -0,0 +1,37 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local core = require("apisix.core")
|
||||
local resource = require("apisix.admin.resource")
|
||||
local apisix_ssl = require("apisix.ssl")
|
||||
|
||||
|
||||
local function check_conf(id, conf, need_id, schema)
|
||||
local ok, err = apisix_ssl.check_ssl_conf(false, conf)
|
||||
if not ok then
|
||||
return nil, {error_msg = err}
|
||||
end
|
||||
|
||||
return need_id and id or true
|
||||
end
|
||||
|
||||
|
||||
return resource.new({
|
||||
name = "ssls",
|
||||
kind = "ssl",
|
||||
schema = core.schema.ssl,
|
||||
checker = check_conf
|
||||
})
|
@@ -0,0 +1,339 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local type = type
|
||||
local pairs = pairs
|
||||
local ipairs = ipairs
|
||||
local str_lower = string.lower
|
||||
local ngx = ngx
|
||||
local get_method = ngx.req.get_method
|
||||
local shared_dict = ngx.shared["standalone-config"]
|
||||
local table_insert = table.insert
|
||||
local table_new = require("table.new")
|
||||
local yaml = require("lyaml")
|
||||
local events = require("apisix.events")
|
||||
local core = require("apisix.core")
|
||||
local config_yaml = require("apisix.core.config_yaml")
|
||||
local check_schema = require("apisix.core.schema").check
|
||||
local tbl_deepcopy = require("apisix.core.table").deepcopy
|
||||
|
||||
local EVENT_UPDATE = "standalone-api-configuration-update"
|
||||
|
||||
local _M = {}
|
||||
|
||||
local function check_duplicate(item, key, id_set)
|
||||
local identifier, identifier_type
|
||||
if key == "consumers" then
|
||||
identifier = item.id or item.username
|
||||
identifier_type = item.id and "credential id" or "username"
|
||||
else
|
||||
identifier = item.id
|
||||
identifier_type = "id"
|
||||
end
|
||||
|
||||
if id_set[identifier] then
|
||||
return true, "found duplicate " .. identifier_type .. " " .. identifier .. " in " .. key
|
||||
end
|
||||
id_set[identifier] = true
|
||||
return false
|
||||
end
|
||||
|
||||
local function get_config()
|
||||
local config = shared_dict:get("config")
|
||||
if not config then
|
||||
return nil, "not found"
|
||||
end
|
||||
|
||||
local err
|
||||
config, err = core.json.decode(config)
|
||||
if not config then
|
||||
return nil, "failed to decode json: " .. err
|
||||
end
|
||||
return config
|
||||
end
|
||||
|
||||
|
||||
local function update_and_broadcast_config(apisix_yaml)
|
||||
local raw, err = core.json.encode(apisix_yaml)
|
||||
if not raw then
|
||||
core.log.error("failed to encode json: ", err)
|
||||
return nil, "failed to encode json: " .. err
|
||||
end
|
||||
|
||||
if shared_dict then
|
||||
-- the worker that handles Admin API calls is responsible for writing the shared dict
|
||||
local ok, err = shared_dict:set("config", raw)
|
||||
if not ok then
|
||||
return nil, "failed to save config to shared dict: " .. err
|
||||
end
|
||||
core.log.info("standalone config updated: ", raw)
|
||||
else
|
||||
core.log.crit(config_yaml.ERR_NO_SHARED_DICT)
|
||||
end
|
||||
return events:post(EVENT_UPDATE, EVENT_UPDATE)
|
||||
end
|
||||
|
||||
|
||||
local function update(ctx)
|
||||
local content_type = core.request.header(nil, "content-type") or "application/json"
|
||||
|
||||
-- read the request body
|
||||
local req_body, err = core.request.get_body()
|
||||
if err then
|
||||
return core.response.exit(400, {error_msg = "invalid request body: " .. err})
|
||||
end
|
||||
|
||||
if not req_body or #req_body <= 0 then
|
||||
return core.response.exit(400, {error_msg = "invalid request body: empty request body"})
|
||||
end
|
||||
|
||||
-- parse the request body
|
||||
local data
|
||||
if core.string.has_prefix(content_type, "application/yaml") then
|
||||
data = yaml.load(req_body, { all = false })
|
||||
if not data or type(data) ~= "table" then
|
||||
err = "invalid yaml request body"
|
||||
end
|
||||
else
|
||||
data, err = core.json.decode(req_body)
|
||||
end
|
||||
if err then
|
||||
core.log.error("invalid request body: ", req_body, " err: ", err)
|
||||
core.response.exit(400, {error_msg = "invalid request body: " .. err})
|
||||
end
|
||||
req_body = data
|
||||
|
||||
local config, err = get_config()
|
||||
if not config then
|
||||
if err ~= "not found" then
|
||||
core.log.error("failed to get config from shared dict: ", err)
|
||||
return core.response.exit(500, {
|
||||
error_msg = "failed to get config from shared dict: " .. err
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
-- check input by jsonschema
|
||||
local apisix_yaml = {}
|
||||
local created_objs = config_yaml.fetch_all_created_obj()
|
||||
|
||||
for key, obj in pairs(created_objs) do
|
||||
local conf_version_key = obj.conf_version_key
|
||||
local conf_version = config and config[conf_version_key] or obj.conf_version
|
||||
local items = req_body[key]
|
||||
local new_conf_version = req_body[conf_version_key]
|
||||
if not new_conf_version then
|
||||
new_conf_version = conf_version + 1
|
||||
else
|
||||
if type(new_conf_version) ~= "number" then
|
||||
return core.response.exit(400, {
|
||||
error_msg = conf_version_key .. " must be a number",
|
||||
})
|
||||
end
|
||||
if new_conf_version < conf_version then
|
||||
return core.response.exit(400, {
|
||||
error_msg = conf_version_key ..
|
||||
" must be greater than or equal to (" .. conf_version .. ")",
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
apisix_yaml[conf_version_key] = new_conf_version
|
||||
if new_conf_version == conf_version then
|
||||
apisix_yaml[key] = config and config[key]
|
||||
elseif items and #items > 0 then
|
||||
apisix_yaml[key] = table_new(#items, 0)
|
||||
local item_schema = obj.item_schema
|
||||
local item_checker = obj.checker
|
||||
local id_set = {}
|
||||
|
||||
for index, item in ipairs(items) do
|
||||
local item_temp = tbl_deepcopy(item)
|
||||
local valid, err
|
||||
-- need to recover to 0-based subscript
|
||||
local err_prefix = "invalid " .. key .. " at index " .. (index - 1) .. ", err: "
|
||||
if item_schema then
|
||||
valid, err = check_schema(obj.item_schema, item_temp)
|
||||
if not valid then
|
||||
core.log.error(err_prefix, err)
|
||||
core.response.exit(400, {error_msg = err_prefix .. err})
|
||||
end
|
||||
end
|
||||
if item_checker then
|
||||
local item_checker_key
|
||||
if item.id then
|
||||
-- credential need to check key
|
||||
item_checker_key = "/" .. key .. "/" .. item_temp.id
|
||||
end
|
||||
valid, err = item_checker(item_temp, item_checker_key)
|
||||
if not valid then
|
||||
core.log.error(err_prefix, err)
|
||||
core.response.exit(400, {error_msg = err_prefix .. err})
|
||||
end
|
||||
end
|
||||
-- prevent updating resource with the same ID
|
||||
-- (e.g., service ID or other resource IDs) in a single request
|
||||
local duplicated, err = check_duplicate(item, key, id_set)
|
||||
if duplicated then
|
||||
core.log.error(err)
|
||||
core.response.exit(400, { error_msg = err })
|
||||
end
|
||||
|
||||
table_insert(apisix_yaml[key], item)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local ok, err = update_and_broadcast_config(apisix_yaml)
|
||||
if not ok then
|
||||
core.response.exit(500, err)
|
||||
end
|
||||
|
||||
return core.response.exit(202)
|
||||
end
|
||||
|
||||
|
||||
local function get(ctx)
|
||||
local accept = core.request.header(nil, "accept") or "application/json"
|
||||
local want_yaml_resp = core.string.has_prefix(accept, "application/yaml")
|
||||
|
||||
local config, err = get_config()
|
||||
if not config then
|
||||
if err ~= "not found" then
|
||||
core.log.error("failed to get config from shared dict: ", err)
|
||||
return core.response.exit(500, {
|
||||
error_msg = "failed to get config from shared dict: " .. err
|
||||
})
|
||||
end
|
||||
config = {}
|
||||
local created_objs = config_yaml.fetch_all_created_obj()
|
||||
for _, obj in pairs(created_objs) do
|
||||
config[obj.conf_version_key] = obj.conf_version
|
||||
end
|
||||
end
|
||||
|
||||
local resp, err
|
||||
if want_yaml_resp then
|
||||
core.response.set_header("Content-Type", "application/yaml")
|
||||
resp = yaml.dump({ config })
|
||||
if not resp then
|
||||
err = "failed to encode yaml"
|
||||
end
|
||||
|
||||
-- remove the first line "---" and the last line "..."
|
||||
-- because the yaml.dump() will add them for multiple documents
|
||||
local m = ngx.re.match(resp, [[^---\s*([\s\S]*?)\s*\.\.\.\s*$]], "jo")
|
||||
if m and m[1] then
|
||||
resp = m[1]
|
||||
end
|
||||
else
|
||||
core.response.set_header("Content-Type", "application/json")
|
||||
resp, err = core.json.encode(config, true)
|
||||
if not resp then
|
||||
err = "failed to encode json: " .. err
|
||||
end
|
||||
end
|
||||
|
||||
if not resp then
|
||||
return core.response.exit(500, {error_msg = err})
|
||||
end
|
||||
return core.response.exit(200, resp)
|
||||
end
|
||||
|
||||
|
||||
function _M.run()
|
||||
local ctx = ngx.ctx.api_ctx
|
||||
local method = str_lower(get_method())
|
||||
if method == "put" then
|
||||
return update(ctx)
|
||||
else
|
||||
return get(ctx)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
local patch_schema
|
||||
do
|
||||
local resource_schema = {
|
||||
"proto",
|
||||
"global_rule",
|
||||
"route",
|
||||
"service",
|
||||
"upstream",
|
||||
"consumer",
|
||||
"consumer_group",
|
||||
"credential",
|
||||
"ssl",
|
||||
"plugin_config",
|
||||
}
|
||||
local function attach_modifiedIndex_schema(name)
|
||||
local schema = core.schema[name]
|
||||
if not schema then
|
||||
core.log.error("schema for ", name, " not found")
|
||||
return
|
||||
end
|
||||
if schema.properties and not schema.properties.modifiedIndex then
|
||||
schema.properties.modifiedIndex = {
|
||||
type = "integer",
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
local function patch_credential_schema()
|
||||
local credential_schema = core.schema["credential"]
|
||||
if credential_schema and credential_schema.properties then
|
||||
credential_schema.properties.id = {
|
||||
type = "string",
|
||||
minLength = 15,
|
||||
maxLength = 128,
|
||||
pattern = [[^[a-zA-Z0-9-_]+/credentials/[a-zA-Z0-9-_.]+$]],
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
function patch_schema()
|
||||
-- attach modifiedIndex schema to all resource schemas
|
||||
for _, name in ipairs(resource_schema) do
|
||||
attach_modifiedIndex_schema(name)
|
||||
end
|
||||
-- patch credential schema
|
||||
patch_credential_schema()
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function _M.init_worker()
|
||||
local function update_config()
|
||||
local config, err = shared_dict:get("config")
|
||||
if not config then
|
||||
core.log.error("failed to get config from shared dict: ", err)
|
||||
return
|
||||
end
|
||||
|
||||
config, err = core.json.decode(config)
|
||||
if not config then
|
||||
core.log.error("failed to decode json: ", err)
|
||||
return
|
||||
end
|
||||
config_yaml._update_config(config)
|
||||
end
|
||||
events:register(update_config, EVENT_UPDATE, EVENT_UPDATE)
|
||||
|
||||
patch_schema()
|
||||
end
|
||||
|
||||
|
||||
return _M
|
@@ -0,0 +1,81 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local core = require("apisix.core")
|
||||
local resource = require("apisix.admin.resource")
|
||||
local stream_route_checker = require("apisix.stream.router.ip_port").stream_route_checker
|
||||
|
||||
|
||||
local function check_conf(id, conf, need_id, schema)
|
||||
local ok, err = core.schema.check(schema, conf)
|
||||
if not ok then
|
||||
return nil, {error_msg = "invalid configuration: " .. err}
|
||||
end
|
||||
|
||||
local upstream_id = conf.upstream_id
|
||||
if upstream_id then
|
||||
local key = "/upstreams/" .. upstream_id
|
||||
local res, err = core.etcd.get(key)
|
||||
if not res then
|
||||
return nil, {error_msg = "failed to fetch upstream info by "
|
||||
.. "upstream id [" .. upstream_id .. "]: "
|
||||
.. err}
|
||||
end
|
||||
|
||||
if res.status ~= 200 then
|
||||
return nil, {error_msg = "failed to fetch upstream info by "
|
||||
.. "upstream id [" .. upstream_id .. "], "
|
||||
.. "response code: " .. res.status}
|
||||
end
|
||||
end
|
||||
|
||||
local service_id = conf.service_id
|
||||
if service_id then
|
||||
local key = "/services/" .. service_id
|
||||
local res, err = core.etcd.get(key)
|
||||
if not res then
|
||||
return nil, {error_msg = "failed to fetch service info by "
|
||||
.. "service id [" .. service_id .. "]: "
|
||||
.. err}
|
||||
end
|
||||
|
||||
if res.status ~= 200 then
|
||||
return nil, {error_msg = "failed to fetch service info by "
|
||||
.. "service id [" .. service_id .. "], "
|
||||
.. "response code: " .. res.status}
|
||||
end
|
||||
end
|
||||
|
||||
local ok, err = stream_route_checker(conf, true)
|
||||
if not ok then
|
||||
return nil, {error_msg = err}
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
return resource.new({
|
||||
name = "stream_routes",
|
||||
kind = "stream route",
|
||||
schema = core.schema.stream_route,
|
||||
checker = check_conf,
|
||||
unsupported_methods = { "patch" },
|
||||
list_filter_fields = {
|
||||
service_id = true,
|
||||
upstream_id = true,
|
||||
},
|
||||
})
|
134
CloudronPackages/APISIX/apisix-source/apisix/admin/upstreams.lua
Normal file
134
CloudronPackages/APISIX/apisix-source/apisix/admin/upstreams.lua
Normal file
@@ -0,0 +1,134 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local core = require("apisix.core")
|
||||
local config_util = require("apisix.core.config_util")
|
||||
local get_routes = require("apisix.router").http_routes
|
||||
local get_services = require("apisix.http.service").services
|
||||
local get_plugin_configs = require("apisix.plugin_config").plugin_configs
|
||||
local get_consumers = require("apisix.consumer").consumers
|
||||
local get_consumer_groups = require("apisix.consumer_group").consumer_groups
|
||||
local get_global_rules = require("apisix.global_rules").global_rules
|
||||
local apisix_upstream = require("apisix.upstream")
|
||||
local resource = require("apisix.admin.resource")
|
||||
local tostring = tostring
|
||||
local ipairs = ipairs
|
||||
|
||||
|
||||
local function check_conf(id, conf, need_id)
|
||||
local ok, err = apisix_upstream.check_upstream_conf(conf)
|
||||
if not ok then
|
||||
return nil, {error_msg = err}
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
local function up_id_in_plugins(plugins, up_id)
|
||||
if plugins and plugins["traffic-split"]
|
||||
and plugins["traffic-split"].rules then
|
||||
|
||||
for _, rule in ipairs(plugins["traffic-split"].rules) do
|
||||
local plugin_upstreams = rule.weighted_upstreams
|
||||
for _, plugin_upstream in ipairs(plugin_upstreams) do
|
||||
if plugin_upstream.upstream_id
|
||||
and tostring(plugin_upstream.upstream_id) == up_id then
|
||||
return true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
local function check_resources_reference(resources, up_id,
|
||||
only_check_plugin, resources_name)
|
||||
if resources then
|
||||
for _, resource in config_util.iterate_values(resources) do
|
||||
if resource and resource.value then
|
||||
if up_id_in_plugins(resource.value.plugins, up_id) then
|
||||
return {error_msg = "can not delete this upstream,"
|
||||
.. " plugin in "
|
||||
.. resources_name .. " ["
|
||||
.. resource.value.id
|
||||
.. "] is still using it now"}
|
||||
end
|
||||
|
||||
if not only_check_plugin and resource.value.upstream_id
|
||||
and tostring(resource.value.upstream_id) == up_id then
|
||||
return {error_msg = "can not delete this upstream, "
|
||||
.. resources_name .. " [" .. resource.value.id
|
||||
.. "] is still using it now"}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
local function delete_checker(id)
|
||||
local routes = get_routes()
|
||||
local err_msg = check_resources_reference(routes, id, false, "route")
|
||||
if err_msg then
|
||||
return 400, err_msg
|
||||
end
|
||||
|
||||
local services, services_ver = get_services()
|
||||
core.log.info("services: ", core.json.delay_encode(services, true))
|
||||
core.log.info("services_ver: ", services_ver)
|
||||
local err_msg = check_resources_reference(services, id, false, "service")
|
||||
if err_msg then
|
||||
return 400, err_msg
|
||||
end
|
||||
|
||||
local plugin_configs = get_plugin_configs()
|
||||
local err_msg = check_resources_reference(plugin_configs, id, true, "plugin_config")
|
||||
if err_msg then
|
||||
return 400, err_msg
|
||||
end
|
||||
|
||||
local consumers = get_consumers()
|
||||
local err_msg = check_resources_reference(consumers, id, true, "consumer")
|
||||
if err_msg then
|
||||
return 400, err_msg
|
||||
end
|
||||
|
||||
local consumer_groups = get_consumer_groups()
|
||||
local err_msg = check_resources_reference(consumer_groups, id, true, "consumer_group")
|
||||
if err_msg then
|
||||
return 400, err_msg
|
||||
end
|
||||
|
||||
local global_rules = get_global_rules()
|
||||
err_msg = check_resources_reference(global_rules, id, true, "global_rules")
|
||||
if err_msg then
|
||||
return 400, err_msg
|
||||
end
|
||||
|
||||
return nil, nil
|
||||
end
|
||||
|
||||
|
||||
return resource.new({
|
||||
name = "upstreams",
|
||||
kind = "upstream",
|
||||
schema = core.schema.upstream,
|
||||
checker = check_conf,
|
||||
delete_checker = delete_checker
|
||||
})
|
113
CloudronPackages/APISIX/apisix-source/apisix/admin/utils.lua
Normal file
113
CloudronPackages/APISIX/apisix-source/apisix/admin/utils.lua
Normal file
@@ -0,0 +1,113 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
local core = require("apisix.core")
|
||||
local ngx_time = ngx.time
|
||||
local tonumber = tonumber
|
||||
local ipairs = ipairs
|
||||
local pairs = pairs
|
||||
|
||||
|
||||
local _M = {}
|
||||
|
||||
|
||||
local function inject_timestamp(conf, prev_conf, patch_conf)
|
||||
if not conf.create_time then
|
||||
if prev_conf and (prev_conf.node or prev_conf.list).value.create_time then
|
||||
conf.create_time = (prev_conf.node or prev_conf.list).value.create_time
|
||||
else
|
||||
-- As we don't know existent data's create_time, we have to pretend
|
||||
-- they are created now.
|
||||
conf.create_time = ngx_time()
|
||||
end
|
||||
end
|
||||
|
||||
if not conf.update_time or
|
||||
-- For PATCH request, the modification is passed as 'patch_conf'
|
||||
-- If the sub path is used, the 'patch_conf' will be a placeholder `true`
|
||||
(patch_conf and (patch_conf == true or patch_conf.update_time == nil))
|
||||
then
|
||||
-- reset the update_time if:
|
||||
-- 1. PATCH request, with sub path
|
||||
-- 2. PATCH request, update_time not given
|
||||
-- 3. Other request, update_time not given
|
||||
conf.update_time = ngx_time()
|
||||
end
|
||||
end
|
||||
_M.inject_timestamp = inject_timestamp
|
||||
|
||||
|
||||
function _M.inject_conf_with_prev_conf(kind, key, conf)
|
||||
local res, err = core.etcd.get(key)
|
||||
if not res or (res.status ~= 200 and res.status ~= 404) then
|
||||
core.log.error("failed to get " .. kind .. "[", key, "] from etcd: ", err or res.status)
|
||||
return nil, err
|
||||
end
|
||||
|
||||
if res.status == 404 then
|
||||
inject_timestamp(conf)
|
||||
else
|
||||
inject_timestamp(conf, res.body)
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
-- fix_count makes the "count" field returned by etcd reasonable
|
||||
function _M.fix_count(body, id)
|
||||
if body.count then
|
||||
if not id then
|
||||
-- remove the count of placeholder (init_dir)
|
||||
body.count = tonumber(body.count) - 1
|
||||
else
|
||||
body.count = tonumber(body.count)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function _M.decrypt_params(decrypt_func, body, schema_type)
|
||||
-- list
|
||||
if body.list then
|
||||
for _, route in ipairs(body.list) do
|
||||
if route.value and route.value.plugins then
|
||||
for name, conf in pairs(route.value.plugins) do
|
||||
decrypt_func(name, conf, schema_type)
|
||||
end
|
||||
end
|
||||
end
|
||||
return
|
||||
end
|
||||
|
||||
-- node
|
||||
local plugins = body.node and body.node.value
|
||||
and body.node.value.plugins
|
||||
|
||||
if plugins then
|
||||
for name, conf in pairs(plugins) do
|
||||
decrypt_func(name, conf, schema_type)
|
||||
end
|
||||
end
|
||||
|
||||
-- metadata
|
||||
if schema_type == core.schema.TYPE_METADATA then
|
||||
local conf = body.node and body.node.value
|
||||
decrypt_func(conf.name, conf, schema_type)
|
||||
end
|
||||
end
|
||||
|
||||
return _M
|
@@ -0,0 +1,249 @@
|
||||
--
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
-- contributor license agreements. See the NOTICE file distributed with
|
||||
-- this work for additional information regarding copyright ownership.
|
||||
-- The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
-- (the "License"); you may not use this file except in compliance with
|
||||
-- the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing, software
|
||||
-- distributed under the License is distributed on an "AS IS" BASIS,
|
||||
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
-- See the License for the specific language governing permissions and
|
||||
-- limitations under the License.
|
||||
--
|
||||
|
||||
local type = type
|
||||
local pairs = pairs
|
||||
local tonumber = tonumber
|
||||
local ngx = ngx
|
||||
local re_find = ngx.re.find
|
||||
local fetch_local_conf = require("apisix.core.config_local").local_conf
|
||||
local try_read_attr = require("apisix.core.table").try_read_attr
|
||||
local deepcopy = require("apisix.core.table").deepcopy
|
||||
local log = require("apisix.core.log")
|
||||
local request = require("apisix.core.request")
|
||||
local response = require("apisix.core.response")
|
||||
local table = require("apisix.core.table")
|
||||
|
||||
local _M = {}
|
||||
|
||||
|
||||
local admin_api_version
|
||||
local function enable_v3()
|
||||
if admin_api_version then
|
||||
if admin_api_version == "v3" then
|
||||
return true
|
||||
end
|
||||
|
||||
if admin_api_version == "default" then
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
local local_conf, err = fetch_local_conf()
|
||||
if not local_conf then
|
||||
admin_api_version = "default"
|
||||
log.error("failed to fetch local conf: ", err)
|
||||
return false
|
||||
end
|
||||
|
||||
local api_ver = try_read_attr(local_conf, "deployment", "admin", "admin_api_version")
|
||||
if api_ver ~= "v3" then
|
||||
admin_api_version = "default"
|
||||
return false
|
||||
end
|
||||
|
||||
admin_api_version = api_ver
|
||||
return true
|
||||
end
|
||||
_M.enable_v3 = enable_v3
|
||||
|
||||
|
||||
function _M.to_v3(body, action)
|
||||
if not enable_v3() then
|
||||
body.action = action
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function _M.to_v3_list(body)
|
||||
if not enable_v3() then
|
||||
return
|
||||
end
|
||||
|
||||
if body.node.dir then
|
||||
body.list = body.node.nodes
|
||||
body.node = nil
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
local function sort(l, r)
|
||||
return l.createdIndex < r.createdIndex
|
||||
end
|
||||
|
||||
|
||||
local function pagination(body, args)
|
||||
args.page = tonumber(args.page)
|
||||
args.page_size = tonumber(args.page_size)
|
||||
if not args.page or not args.page_size then
|
||||
return
|
||||
end
|
||||
|
||||
if args.page_size < 10 or args.page_size > 500 then
|
||||
return response.exit(400, "page_size must be between 10 and 500")
|
||||
end
|
||||
|
||||
if not args.page or args.page < 1 then
|
||||
-- default page is 1
|
||||
args.page = 1
|
||||
end
|
||||
|
||||
local list = body.list
|
||||
|
||||
-- sort nodes by there createdIndex
|
||||
table.sort(list, sort)
|
||||
|
||||
local to = args.page * args.page_size
|
||||
local from = to - args.page_size + 1
|
||||
|
||||
local res = table.new(20, 0)
|
||||
|
||||
for i = from, to do
|
||||
if list[i] then
|
||||
res[i - from + 1] = list[i]
|
||||
end
|
||||
end
|
||||
|
||||
body.list = res
|
||||
end
|
||||
|
||||
|
||||
local function _filter(item, args, resource)
|
||||
if not args.filter then
|
||||
return true
|
||||
end
|
||||
|
||||
local filters, err = ngx.decode_args(args.filter or "", 100)
|
||||
if not filters then
|
||||
log.error("failed to decode filter args: ", err)
|
||||
return false
|
||||
end
|
||||
|
||||
for key, value in pairs(filters) do
|
||||
if not resource.list_filter_fields[key] then
|
||||
log.warn("filter field '", key, "' is not supported by resource: ", resource.name)
|
||||
goto CONTINUE
|
||||
end
|
||||
|
||||
if not item[key] then
|
||||
return false
|
||||
end
|
||||
|
||||
if type(value) == "table" then
|
||||
value = value[#value] -- get the last value in the table
|
||||
end
|
||||
|
||||
if item[key] ~= value then
|
||||
return false
|
||||
end
|
||||
|
||||
::CONTINUE::
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
local function filter(body, args, resource)
|
||||
for i = #body.list, 1, -1 do
|
||||
local name_matched = true
|
||||
local label_matched = true
|
||||
local uri_matched = true
|
||||
if args.name then
|
||||
name_matched = false
|
||||
local matched = re_find(body.list[i].value.name, args.name, "jo")
|
||||
if matched then
|
||||
name_matched = true
|
||||
end
|
||||
end
|
||||
|
||||
if args.label then
|
||||
label_matched = false
|
||||
if body.list[i].value.labels then
|
||||
for k, _ in pairs(body.list[i].value.labels) do
|
||||
if k == args.label then
|
||||
label_matched = true
|
||||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if args.uri then
|
||||
uri_matched = false
|
||||
if body.list[i].value.uri then
|
||||
local matched = re_find(body.list[i].value.uri, args.uri, "jo")
|
||||
if matched then
|
||||
uri_matched = true
|
||||
end
|
||||
end
|
||||
|
||||
if body.list[i].value.uris then
|
||||
for _, uri in pairs(body.list[i].value.uris) do
|
||||
if re_find(uri, args.uri, "jo") then
|
||||
uri_matched = true
|
||||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if not name_matched or not label_matched or not uri_matched
|
||||
or not _filter(body.list[i].value, args, resource) then
|
||||
table.remove(body.list, i)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function _M.filter(body, resource)
|
||||
if not enable_v3() then
|
||||
return body
|
||||
end
|
||||
|
||||
local args = request.get_uri_args()
|
||||
local processed_body = deepcopy(body)
|
||||
|
||||
if processed_body.deleted then
|
||||
processed_body.node = nil
|
||||
end
|
||||
|
||||
-- strip node wrapping for single query, create, and update scenarios.
|
||||
if processed_body.node then
|
||||
processed_body = processed_body.node
|
||||
end
|
||||
|
||||
-- filter and paging logic for list query only
|
||||
if processed_body.list then
|
||||
filter(processed_body, args, resource)
|
||||
|
||||
-- calculate the total amount of filtered data
|
||||
processed_body.total = processed_body.list and #processed_body.list or 0
|
||||
|
||||
pagination(processed_body, args)
|
||||
|
||||
-- remove the count field returned by etcd
|
||||
-- we don't need a field that reflects the length of the currently returned data,
|
||||
-- it doesn't make sense
|
||||
processed_body.count = nil
|
||||
end
|
||||
|
||||
return processed_body
|
||||
end
|
||||
|
||||
|
||||
return _M
|
Reference in New Issue
Block a user