From 54cc5f7308e4c54d2507e942e684c1b1d464744f Mon Sep 17 00:00:00 2001 From: ReachableCEO Date: Thu, 4 Sep 2025 09:42:47 -0500 Subject: [PATCH] feat(apisix): add Cloudron package MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Implements Apache APISIX packaging for Cloudron platform. - Includes Dockerfile, CloudronManifest.json, and start.sh. - Configured to use Cloudron's etcd addon. 🤖 Generated with Gemini CLI Co-Authored-By: Gemini --- CloudronPackages/APISIX/CloudronManifest.json | 33 + CloudronPackages/APISIX/Dockerfile | 58 + .../apisix-source/.devcontainer/Dockerfile | 38 + .../.devcontainer/devcontainer.json | 14 + .../.devcontainer/docker-compose.yml | 37 + .../.github/ISSUE_TEMPLATE/bug_report.yml | 68 + .../.github/ISSUE_TEMPLATE/config.yml | 5 + .../ISSUE_TEMPLATE/feature_request.yml | 23 + .../.github/ISSUE_TEMPLATE/improve_docs.yml | 33 + .../.github/ISSUE_TEMPLATE/request_help.yml | 36 + .../.github/PULL_REQUEST_TEMPLATE.md | 33 + .../apisix-source/.github/dependabot.yml | 10 + .../apisix-source/.github/workflows/build.yml | 177 + .../.github/workflows/check-changelog.yml | 27 + .../apisix-source/.github/workflows/cli.yml | 68 + .../.github/workflows/close-unresponded.yml | 39 + .../.github/workflows/code-lint.yml | 48 + .../.github/workflows/doc-lint.yml | 58 + .../.github/workflows/gm-cron.yaml.disabled | 182 + .../.github/workflows/gm.yml.disabled | 93 + .../.github/workflows/kubernetes-ci.yml | 79 + .../.github/workflows/license-checker.yml | 37 + .../.github/workflows/link-check.yml | 49 + .../apisix-source/.github/workflows/lint.yml | 46 + .../workflows/push-dev-image-on-commit.yml | 109 + .../.github/workflows/redhat-ci.yaml | 179 + .../.github/workflows/semantic.yml | 35 + .../.github/workflows/source-install.yml | 124 + .../apisix-source/.github/workflows/stale.yml | 52 + .../.github/workflows/tars-ci.yml | 55 + .../.github/workflows/update-labels.yml | 62 + .../APISIX/apisix-source/.gitmodules | 9 + .../APISIX/apisix-source/.ignore_words | 11 + .../APISIX/apisix-source/.licenserc.yaml | 60 + .../APISIX/apisix-source/.markdownlint.yml | 34 + .../APISIX/apisix-source/CHANGELOG.md | 1936 ++++++++++ .../APISIX/apisix-source/CODE_OF_CONDUCT.md | 121 + .../APISIX/apisix-source/CODE_STYLE.md | 440 +++ .../APISIX/apisix-source/CONTRIBUTING.md | 152 + CloudronPackages/APISIX/apisix-source/LICENSE | 219 ++ .../APISIX/apisix-source/MAINTAIN.md | 62 + .../APISIX/apisix-source/Makefile | 523 +++ CloudronPackages/APISIX/apisix-source/NOTICE | 5 + .../APISIX/apisix-source/README.md | 241 ++ .../APISIX/apisix-source/THREAT_MODEL.md | 60 + .../apisix-source/Vision-and-Milestones.md | 40 + .../apisix-source/apisix-master-0.rockspec | 108 + .../apisix/admin/consumer_group.lua | 66 + .../apisix-source/apisix/admin/consumers.lua | 65 + .../apisix/admin/credentials.lua | 74 + .../apisix/admin/global_rules.lua | 43 + .../apisix-source/apisix/admin/init.lua | 526 +++ .../apisix/admin/plugin_config.lua | 66 + .../apisix/admin/plugin_metadata.lua | 83 + .../apisix-source/apisix/admin/plugins.lua | 139 + .../apisix-source/apisix/admin/proto.lua | 111 + .../apisix-source/apisix/admin/resource.lua | 468 +++ .../apisix-source/apisix/admin/routes.lua | 184 + .../apisix-source/apisix/admin/schema.lua | 35 + .../apisix-source/apisix/admin/secrets.lua | 45 + .../apisix-source/apisix/admin/services.lua | 128 + .../APISIX/apisix-source/apisix/admin/ssl.lua | 37 + .../apisix-source/apisix/admin/standalone.lua | 339 ++ .../apisix/admin/stream_routes.lua | 81 + .../apisix-source/apisix/admin/upstreams.lua | 134 + .../apisix-source/apisix/admin/utils.lua | 113 + .../apisix-source/apisix/admin/v3_adapter.lua | 249 ++ .../apisix-source/apisix/api_router.lua | 116 + .../APISIX/apisix-source/apisix/balancer.lua | 400 ++ .../apisix-source/apisix/balancer/chash.lua | 154 + .../apisix-source/apisix/balancer/ewma.lua | 243 ++ .../apisix/balancer/least_conn.lua | 113 + .../apisix/balancer/priority.lua | 81 + .../apisix/balancer/roundrobin.lua | 89 + .../apisix-source/apisix/cli/apisix.lua | 40 + .../apisix-source/apisix/cli/config.lua | 385 ++ .../APISIX/apisix-source/apisix/cli/env.lua | 115 + .../APISIX/apisix-source/apisix/cli/etcd.lua | 405 ++ .../APISIX/apisix-source/apisix/cli/file.lua | 343 ++ .../APISIX/apisix-source/apisix/cli/ip.lua | 66 + .../apisix-source/apisix/cli/ngx_tpl.lua | 998 +++++ .../APISIX/apisix-source/apisix/cli/ops.lua | 1013 +++++ .../apisix-source/apisix/cli/schema.lua | 450 +++ .../APISIX/apisix-source/apisix/cli/util.lua | 189 + .../APISIX/apisix-source/apisix/constants.lua | 46 + .../APISIX/apisix-source/apisix/consumer.lua | 334 ++ .../apisix-source/apisix/consumer_group.lua | 55 + .../apisix-source/apisix/control/router.lua | 212 ++ .../apisix-source/apisix/control/v1.lua | 506 +++ .../APISIX/apisix-source/apisix/core.lua | 68 + .../apisix-source/apisix/core/config_etcd.lua | 1168 ++++++ .../apisix/core/config_local.lua | 71 + .../apisix-source/apisix/core/config_util.lua | 219 ++ .../apisix-source/apisix/core/config_xds.lua | 378 ++ .../apisix-source/apisix/core/config_yaml.lua | 579 +++ .../APISIX/apisix-source/apisix/core/ctx.lua | 463 +++ .../apisix-source/apisix/core/dns/client.lua | 164 + .../APISIX/apisix-source/apisix/core/env.lua | 109 + .../APISIX/apisix-source/apisix/core/etcd.lua | 676 ++++ .../apisix-source/apisix/core/event.lua | 45 + .../APISIX/apisix-source/apisix/core/id.lua | 169 + .../APISIX/apisix-source/apisix/core/io.lua | 50 + .../APISIX/apisix-source/apisix/core/ip.lua | 80 + .../APISIX/apisix-source/apisix/core/json.lua | 132 + .../APISIX/apisix-source/apisix/core/log.lua | 173 + .../apisix-source/apisix/core/lrucache.lua | 193 + .../APISIX/apisix-source/apisix/core/math.lua | 41 + .../APISIX/apisix-source/apisix/core/os.lua | 118 + .../apisix-source/apisix/core/profile.lua | 67 + .../apisix-source/apisix/core/pubsub.lua | 238 ++ .../apisix-source/apisix/core/request.lua | 382 ++ .../apisix-source/apisix/core/resolver.lua | 96 + .../apisix-source/apisix/core/response.lua | 231 ++ .../apisix-source/apisix/core/schema.lua | 71 + .../apisix-source/apisix/core/string.lua | 136 + .../apisix-source/apisix/core/table.lua | 287 ++ .../apisix-source/apisix/core/timer.lua | 108 + .../apisix-source/apisix/core/utils.lua | 465 +++ .../apisix-source/apisix/core/version.lua | 24 + .../APISIX/apisix-source/apisix/debug.lua | 315 ++ .../apisix/discovery/consul/init.lua | 691 ++++ .../apisix/discovery/consul/schema.lua | 92 + .../apisix/discovery/consul_kv/init.lua | 439 +++ .../apisix/discovery/consul_kv/schema.lua | 88 + .../apisix/discovery/dns/init.lua | 89 + .../apisix/discovery/dns/schema.lua | 48 + .../apisix/discovery/eureka/init.lua | 223 ++ .../apisix/discovery/eureka/schema.lua | 40 + .../apisix-source/apisix/discovery/init.lua | 43 + .../discovery/kubernetes/informer_factory.lua | 377 ++ .../apisix/discovery/kubernetes/init.lua | 694 ++++ .../apisix/discovery/kubernetes/schema.lua | 217 ++ .../apisix/discovery/nacos/init.lua | 392 ++ .../apisix/discovery/nacos/schema.lua | 59 + .../apisix/discovery/tars/init.lua | 367 ++ .../apisix/discovery/tars/schema.lua | 45 + .../APISIX/apisix-source/apisix/events.lua | 139 + .../apisix-source/apisix/global_rules.lua | 56 + .../apisix-source/apisix/http/route.lua | 153 + .../apisix/http/router/radixtree_host_uri.lua | 193 + .../apisix/http/router/radixtree_uri.lua | 57 + .../router/radixtree_uri_with_parameter.lua | 57 + .../apisix-source/apisix/http/service.lua | 70 + .../apisix/include/apisix/model/pubsub.proto | 143 + .../APISIX/apisix-source/apisix/init.lua | 1253 +++++++ .../apisix-source/apisix/inspect/dbg.lua | 163 + .../apisix-source/apisix/inspect/init.lua | 128 + .../APISIX/apisix-source/apisix/patch.lua | 384 ++ .../APISIX/apisix-source/apisix/plugin.lua | 1285 +++++++ .../apisix-source/apisix/plugin_config.lua | 88 + .../plugins/ai-aws-content-moderation.lua | 161 + .../apisix/plugins/ai-drivers/aimlapi.lua | 24 + .../apisix/plugins/ai-drivers/deepseek.lua | 24 + .../apisix/plugins/ai-drivers/openai-base.lua | 255 ++ .../plugins/ai-drivers/openai-compatible.lua | 18 + .../apisix/plugins/ai-drivers/openai.lua | 24 + .../apisix/plugins/ai-drivers/schema.lua | 44 + .../apisix/plugins/ai-prompt-decorator.lua | 117 + .../apisix/plugins/ai-prompt-guard.lua | 153 + .../apisix/plugins/ai-prompt-template.lua | 146 + .../apisix/plugins/ai-proxy-multi.lua | 227 ++ .../apisix-source/apisix/plugins/ai-proxy.lua | 57 + .../apisix/plugins/ai-proxy/base.lua | 50 + .../apisix/plugins/ai-proxy/schema.lua | 219 ++ .../apisix-source/apisix/plugins/ai-rag.lua | 156 + .../ai-rag/embeddings/azure_openai.lua | 88 + .../ai-rag/vector-search/azure_ai_search.lua | 83 + .../apisix/plugins/ai-rate-limiting.lua | 234 ++ .../apisix/plugins/ai-request-rewrite.lua | 231 ++ .../apisix-source/apisix/plugins/ai.lua | 324 ++ .../apisix/plugins/api-breaker.lua | 267 ++ .../apisix/plugins/attach-consumer-label.lua | 68 + .../apisix/plugins/authz-casbin.lua | 135 + .../apisix/plugins/authz-casdoor.lua | 176 + .../apisix/plugins/authz-keycloak.lua | 790 ++++ .../apisix/plugins/aws-lambda.lua | 187 + .../apisix/plugins/azure-functions.lua | 61 + .../apisix/plugins/basic-auth.lua | 189 + .../apisix/plugins/batch-requests.lua | 309 ++ .../apisix/plugins/body-transformer.lua | 261 ++ .../apisix-source/apisix/plugins/brotli.lua | 248 ++ .../apisix-source/apisix/plugins/cas-auth.lua | 201 + .../apisix/plugins/chaitin-waf.lua | 421 +++ .../apisix/plugins/clickhouse-logger.lua | 208 ++ .../apisix/plugins/client-control.lua | 76 + .../apisix/plugins/consumer-restriction.lua | 164 + .../apisix-source/apisix/plugins/cors.lua | 402 ++ .../apisix-source/apisix/plugins/csrf.lua | 168 + .../apisix-source/apisix/plugins/datadog.lua | 251 ++ .../apisix/plugins/degraphql.lua | 160 + .../apisix/plugins/dubbo-proxy.lua | 69 + .../apisix-source/apisix/plugins/echo.lua | 121 + .../apisix/plugins/elasticsearch-logger.lua | 281 ++ .../apisix/plugins/error-log-logger.lua | 510 +++ .../apisix/plugins/example-plugin.lua | 152 + .../apisix/plugins/ext-plugin-post-req.lua | 40 + .../apisix/plugins/ext-plugin-post-resp.lua | 183 + .../apisix/plugins/ext-plugin-pre-req.lua | 40 + .../apisix/plugins/ext-plugin/helper.lua | 81 + .../apisix/plugins/ext-plugin/init.lua | 1025 +++++ .../apisix/plugins/fault-injection.lua | 175 + .../apisix/plugins/file-logger.lua | 184 + .../apisix/plugins/forward-auth.lua | 164 + .../apisix-source/apisix/plugins/gm.lua | 175 + .../apisix/plugins/google-cloud-logging.lua | 265 ++ .../apisix/plugins/grpc-transcode.lua | 211 ++ .../apisix/plugins/grpc-transcode/proto.lua | 279 ++ .../apisix/plugins/grpc-transcode/request.lua | 72 + .../plugins/grpc-transcode/response.lua | 144 + .../apisix/plugins/grpc-transcode/util.lua | 202 + .../apisix-source/apisix/plugins/grpc-web.lua | 228 ++ .../apisix-source/apisix/plugins/gzip.lua | 170 + .../apisix/plugins/hmac-auth.lua | 372 ++ .../apisix/plugins/http-dubbo.lua | 262 ++ .../apisix/plugins/http-logger.lua | 223 ++ .../apisix-source/apisix/plugins/inspect.lua | 61 + .../apisix/plugins/ip-restriction.lua | 26 + .../apisix/plugins/ip-restriction/init.lua | 122 + .../apisix/plugins/jwe-decrypt.lua | 279 ++ .../apisix-source/apisix/plugins/jwt-auth.lua | 331 ++ .../apisix/plugins/kafka-logger.lua | 327 ++ .../apisix/plugins/kafka-proxy.lua | 62 + .../apisix-source/apisix/plugins/key-auth.lua | 124 + .../apisix-source/apisix/plugins/lago.lua | 229 ++ .../apisix/plugins/ldap-auth.lua | 160 + .../apisix/plugins/limit-conn.lua | 94 + .../apisix/plugins/limit-conn/init.lua | 171 + .../limit-conn/limit-conn-redis-cluster.lua | 78 + .../plugins/limit-conn/limit-conn-redis.lua | 85 + .../apisix/plugins/limit-conn/util.lua | 81 + .../apisix/plugins/limit-count.lua | 51 + .../apisix/plugins/limit-count/init.lua | 332 ++ .../plugins/limit-count/limit-count-local.lua | 79 + .../limit-count/limit-count-redis-cluster.lua | 83 + .../plugins/limit-count/limit-count-redis.lua | 89 + .../apisix/plugins/limit-req.lua | 183 + .../limit-req/limit-req-redis-cluster.lua | 50 + .../plugins/limit-req/limit-req-redis.lua | 54 + .../apisix/plugins/limit-req/util.lua | 78 + .../apisix/plugins/log-rotate.lua | 327 ++ .../apisix-source/apisix/plugins/loggly.lua | 351 ++ .../apisix/plugins/loki-logger.lua | 251 ++ .../apisix/plugins/mcp-bridge.lua | 173 + .../apisix/plugins/mcp/broker/shared_dict.lua | 90 + .../apisix/plugins/mcp/broker/utils.lua | 21 + .../apisix/plugins/mcp/server.lua | 116 + .../apisix/plugins/mcp/server_wrapper.lua | 106 + .../apisix/plugins/mcp/transport/sse.lua | 44 + .../apisix-source/apisix/plugins/mocking.lua | 243 ++ .../apisix/plugins/multi-auth.lua | 105 + .../apisix/plugins/node-status.lua | 98 + .../apisix/plugins/ocsp-stapling.lua | 220 ++ .../apisix-source/apisix/plugins/opa.lua | 152 + .../apisix/plugins/opa/helper.lua | 117 + .../apisix/plugins/openfunction.lua | 35 + .../apisix/plugins/openid-connect.lua | 717 ++++ .../apisix/plugins/opentelemetry.lua | 426 +++ .../apisix/plugins/openwhisk.lua | 143 + .../apisix/plugins/prometheus.lua | 58 + .../apisix/plugins/prometheus/exporter.lua | 569 +++ .../plugins/proxy-cache/disk_handler.lua | 102 + .../apisix/plugins/proxy-cache/init.lua | 198 + .../apisix/plugins/proxy-cache/memory.lua | 84 + .../plugins/proxy-cache/memory_handler.lua | 332 ++ .../apisix/plugins/proxy-cache/util.lua | 102 + .../apisix/plugins/proxy-control.lua | 65 + .../apisix/plugins/proxy-mirror.lua | 133 + .../apisix/plugins/proxy-rewrite.lua | 398 ++ .../apisix/plugins/public-api.lua | 55 + .../apisix-source/apisix/plugins/real-ip.lua | 185 + .../apisix-source/apisix/plugins/redirect.lua | 264 ++ .../apisix/plugins/referer-restriction.lua | 141 + .../apisix/plugins/request-id.lua | 120 + .../apisix/plugins/request-validation.lua | 120 + .../apisix/plugins/response-rewrite.lua | 390 ++ .../apisix/plugins/rocketmq-logger.lua | 191 + .../apisix/plugins/server-info.lua | 316 ++ .../plugins/serverless-post-function.lua | 17 + .../plugins/serverless-pre-function.lua | 17 + .../plugins/serverless/generic-upstream.lua | 136 + .../apisix/plugins/serverless/init.lua | 124 + .../apisix/plugins/skywalking-logger.lua | 194 + .../apisix/plugins/skywalking.lua | 158 + .../apisix/plugins/sls-logger.lua | 197 + .../apisix/plugins/splunk-hec-logging.lua | 186 + .../apisix-source/apisix/plugins/syslog.lua | 99 + .../apisix/plugins/syslog/init.lua | 112 + .../apisix/plugins/tcp-logger.lua | 161 + .../apisix/plugins/tencent-cloud-cls.lua | 146 + .../plugins/tencent-cloud-cls/cls-sdk.lua | 329 ++ .../apisix/plugins/traffic-split.lua | 305 ++ .../apisix/plugins/ua-restriction.lua | 178 + .../apisix/plugins/udp-logger.lua | 145 + .../apisix/plugins/uri-blocker.lua | 108 + .../apisix/plugins/wolf-rbac.lua | 492 +++ .../apisix-source/apisix/plugins/workflow.lua | 161 + .../apisix-source/apisix/plugins/zipkin.lua | 318 ++ .../apisix/plugins/zipkin/codec.lua | 114 + .../apisix/plugins/zipkin/random_sampler.lua | 37 + .../apisix/plugins/zipkin/reporter.lua | 184 + .../apisix-source/apisix/pubsub/kafka.lua | 149 + .../APISIX/apisix-source/apisix/router.lua | 131 + .../apisix-source/apisix/schema_def.lua | 1094 ++++++ .../APISIX/apisix-source/apisix/script.lua | 59 + .../APISIX/apisix-source/apisix/secret.lua | 227 ++ .../apisix-source/apisix/secret/aws.lua | 140 + .../apisix-source/apisix/secret/gcp.lua | 202 + .../apisix-source/apisix/secret/vault.lua | 122 + .../APISIX/apisix-source/apisix/ssl.lua | 342 ++ .../apisix/ssl/router/radixtree_sni.lua | 332 ++ .../apisix/stream/plugins/ip-restriction.lua | 26 + .../apisix/stream/plugins/limit-conn.lua | 61 + .../apisix/stream/plugins/mqtt-proxy.lua | 186 + .../apisix/stream/plugins/prometheus.lua | 48 + .../apisix/stream/plugins/syslog.lua | 80 + .../apisix/stream/router/ip_port.lua | 249 ++ .../apisix-source/apisix/stream/xrpc.lua | 121 + .../apisix/stream/xrpc/metrics.lua | 50 + .../stream/xrpc/protocols/dubbo/init.lua | 231 ++ .../stream/xrpc/protocols/dubbo/schema.lua | 32 + .../stream/xrpc/protocols/redis/commands.lua | 222 ++ .../stream/xrpc/protocols/redis/init.lua | 499 +++ .../stream/xrpc/protocols/redis/metrics.lua | 33 + .../stream/xrpc/protocols/redis/schema.lua | 59 + .../apisix/stream/xrpc/runner.lua | 279 ++ .../apisix-source/apisix/stream/xrpc/sdk.lua | 202 + .../APISIX/apisix-source/apisix/timers.lua | 103 + .../APISIX/apisix-source/apisix/upstream.lua | 659 ++++ .../apisix-source/apisix/utils/auth.lua | 24 + .../apisix/utils/batch-processor-manager.lua | 158 + .../apisix/utils/batch-processor.lua | 235 ++ .../apisix/utils/content-decode.lua | 112 + .../apisix/utils/google-cloud-oauth.lua | 130 + .../apisix-source/apisix/utils/log-util.lua | 403 ++ .../apisix/utils/redis-schema.lua | 81 + .../apisix-source/apisix/utils/redis.lua | 74 + .../apisix/utils/rediscluster.lua | 60 + .../apisix-source/apisix/utils/rfc5424.lua | 114 + .../apisix-source/apisix/utils/router.lua | 34 + .../apisix-source/apisix/utils/upstream.lua | 133 + .../APISIX/apisix-source/apisix/wasm.lua | 203 + .../APISIX/apisix-source/autodocs/config.ld | 11 + .../APISIX/apisix-source/autodocs/generate.sh | 51 + .../APISIX/apisix-source/autodocs/ldoc.ltp | 98 + .../fake-apisix/conf/cert/apisix.crt | 27 + .../fake-apisix/conf/cert/apisix.key | 39 + .../fake-apisix/conf/cert/openssl.conf | 40 + .../benchmark/fake-apisix/conf/nginx.conf | 131 + .../benchmark/fake-apisix/lua/apisix.lua | 74 + .../APISIX/apisix-source/benchmark/run.sh | 155 + .../benchmark/server/conf/nginx.conf | 42 + .../APISIX/apisix-source/bin/apisix | 48 + .../apisix-source/ci/backup-docker-images.sh | 50 + .../apisix-source/ci/check_changelog_prs.ts | 238 ++ .../APISIX/apisix-source/ci/common.sh | 217 ++ .../apisix-source/ci/free_disk_space.sh | 48 + .../ci/init-common-test-service.sh | 27 + .../ci/init-last-test-service.sh | 45 + .../ci/init-plugin-test-service.sh | 73 + .../APISIX/apisix-source/ci/kubernetes-ci.sh | 33 + .../ci/linux-install-etcd-client.sh | 31 + .../ci/linux-install-openresty.sh | 62 + ...rrent_luarocks_in_customed_nginx_runner.sh | 21 + .../linux_apisix_current_luarocks_runner.sh | 89 + .../ci/linux_openresty_common_runner.sh | 127 + .../ci/linux_openresty_runner.sh | 21 + .../ci/linux_openresty_tongsuo_runner.sh | 53 + .../ci/pod/docker-compose.common.yml | 113 + .../ci/pod/docker-compose.first.yml | 304 ++ .../ci/pod/docker-compose.last.yml | 97 + .../ci/pod/docker-compose.plugin.yml | 400 ++ .../apisix-source/ci/pod/etcd/env/common.env | 1 + .../ci/pod/eureka/env/common.env | 7 + .../ci/pod/kafka/kafka-server/env/common.env | 3 + .../ci/pod/kafka/kafka-server/env/common2.env | 8 + .../ci/pod/kafka/kafka-server/env/last.env | 8 + .../ci/pod/kafka/kafka-server/kafka_jaas.conf | 23 + .../pod/kafka/zookeeper-server/env/common.env | 1 + .../ci/pod/keycloak/kcadm_configure_basic.sh | 85 + .../ci/pod/keycloak/kcadm_configure_cas.sh | 37 + .../keycloak/kcadm_configure_university.sh | 90 + .../ci/pod/keycloak/server.crt.pem | 21 + .../ci/pod/keycloak/server.key.pem | 28 + .../apisix-source/ci/pod/nacos/env/common.env | 6 + .../ci/pod/nacos/env/service.env | 2 + .../ci/pod/nacos/healthcheck/Dockerfile | 30 + .../healthcheck/nacos-server-healthcheck.sh | 27 + .../healthcheck/nacos-service-healthcheck.sh | 43 + .../ci/pod/nacos/service/Dockerfile | 32 + .../APISIX/apisix-source/ci/pod/opa/data.json | 30 + .../APISIX/apisix-source/ci/pod/opa/echo.rego | 20 + .../apisix-source/ci/pod/opa/example.rego | 55 + .../apisix-source/ci/pod/opa/with_route.rego | 24 + .../pod/openfunction/build-function-image.sh | 28 + .../function-example/test-body/go.mod | 31 + .../function-example/test-body/go.sum | 1760 +++++++++ .../function-example/test-body/hello.go | 37 + .../function-example/test-header/go.mod | 3 + .../function-example/test-header/hello.go | 30 + .../function-example/test-uri/go.mod | 32 + .../function-example/test-uri/go.sum | 2615 +++++++++++++ .../function-example/test-uri/hello.go | 38 + .../ci/pod/otelcol-contrib/config.yaml | 32 + .../apisix-source/ci/pod/vector/vector.toml | 111 + .../APISIX/apisix-source/ci/redhat-ci.sh | 116 + .../APISIX/apisix-source/ci/tars-ci.sh | 33 + .../conf/cert/ssl_PLACE_HOLDER.crt | 27 + .../conf/cert/ssl_PLACE_HOLDER.key | 39 + .../APISIX/apisix-source/conf/config.yaml | 63 + .../apisix-source/conf/config.yaml.example | 712 ++++ .../APISIX/apisix-source/conf/debug.yaml | 36 + .../APISIX/apisix-source/conf/mime.types | 98 + .../compose/apisix_conf/master/config.yaml | 36 + .../docker/compose/docker-compose-master.yaml | 52 + .../docker/debian-dev/Dockerfile | 80 + .../docker/debian-dev/docker-entrypoint.sh | 63 + .../docker/debian-dev/install-brotli.sh | 42 + .../docker/utils/check_standalone_config.sh | 35 + .../apisix-source/docs/assets/images/MA.jpeg | Bin 0 -> 45199 bytes .../apisix-source/docs/assets/images/OA.jpg | Bin 0 -> 27192 bytes .../docs/assets/images/apache.png | Bin 0 -> 8491 bytes .../images/apisix-multi-lang-support.png | Bin 0 -> 212677 bytes .../docs/assets/images/apisix-seed.svg | 3 + .../docs/assets/images/apisix.png | Bin 0 -> 273572 bytes .../images/aws-caddy-php-welcome-page.png | Bin 0 -> 402001 bytes .../docs/assets/images/aws-define-route.png | Bin 0 -> 123108 bytes .../docs/assets/images/aws-define-service.png | Bin 0 -> 79089 bytes .../docs/assets/images/aws-fargate-cdk.png | Bin 0 -> 382699 bytes .../docs/assets/images/aws-nlb-ip-addr.png | Bin 0 -> 125882 bytes .../docs/assets/images/benchmark-1.jpg | Bin 0 -> 56104 bytes .../docs/assets/images/benchmark-2.jpg | Bin 0 -> 55841 bytes .../build-devcontainers-vscode-command.png | Bin 0 -> 19999 bytes ...build-devcontainers-vscode-progressbar.png | Bin 0 -> 6033 bytes .../docs/assets/images/consumer-internal.png | Bin 0 -> 46289 bytes .../docs/assets/images/consumer-who.png | Bin 0 -> 28312 bytes .../assets/images/contributor-over-time.png | Bin 0 -> 49306 bytes .../control-plane-service-discovery.png | Bin 0 -> 94513 bytes .../docs/assets/images/create-a-route.png | Bin 0 -> 360565 bytes .../docs/assets/images/dashboard.jpeg | Bin 0 -> 290334 bytes .../assets/images/deployment-cp_and_dp.png | Bin 0 -> 59955 bytes .../assets/images/deployment-traditional.png | Bin 0 -> 48338 bytes .../docs/assets/images/discovery-cn.png | Bin 0 -> 42581 bytes .../docs/assets/images/discovery.png | Bin 0 -> 46310 bytes .../docs/assets/images/external-plugin.png | Bin 0 -> 301675 bytes .../docs/assets/images/flamegraph-1.jpg | Bin 0 -> 226090 bytes .../docs/assets/images/flamegraph-2.jpg | Bin 0 -> 245013 bytes .../docs/assets/images/flow-load-plugin.png | Bin 0 -> 509206 bytes .../assets/images/flow-plugin-internal.png | Bin 0 -> 100980 bytes .../images/flow-software-architecture.png | Bin 0 -> 334625 bytes .../health_check_node_state_diagram.png | Bin 0 -> 55197 bytes .../images/health_check_status_page.png | Bin 0 -> 23044 bytes .../docs/assets/images/latency-1.jpg | Bin 0 -> 51511 bytes .../docs/assets/images/latency-2.jpg | Bin 0 -> 52263 bytes .../docs/assets/images/list-of-routes.png | Bin 0 -> 407723 bytes .../assets/images/plugin/authz-keycloak.png | Bin 0 -> 51957 bytes .../assets/images/plugin/basic-auth-1.png | Bin 0 -> 69203 bytes .../assets/images/plugin/basic-auth-2.png | Bin 0 -> 5932 bytes .../docs/assets/images/plugin/grafana-1.png | Bin 0 -> 172664 bytes .../docs/assets/images/plugin/grafana-2.png | Bin 0 -> 166250 bytes .../docs/assets/images/plugin/grafana-3.png | Bin 0 -> 181975 bytes .../docs/assets/images/plugin/grafana-4.png | Bin 0 -> 54301 bytes .../docs/assets/images/plugin/hmac-auth-1.png | Bin 0 -> 68701 bytes .../docs/assets/images/plugin/hmac-auth-2.png | Bin 0 -> 88280 bytes .../docs/assets/images/plugin/inspect.png | Bin 0 -> 31490 bytes .../docs/assets/images/plugin/jaeger-1.png | Bin 0 -> 349947 bytes .../docs/assets/images/plugin/jaeger-2.png | Bin 0 -> 317479 bytes .../docs/assets/images/plugin/jwt-auth-1.png | Bin 0 -> 65449 bytes .../docs/assets/images/plugin/jwt-auth-2.png | Bin 0 -> 82630 bytes .../docs/assets/images/plugin/jwt-auth-3.png | Bin 0 -> 298562 bytes .../docs/assets/images/plugin/key-auth-1.png | Bin 0 -> 65554 bytes .../docs/assets/images/plugin/key-auth-2.png | Bin 0 -> 81728 bytes .../assets/images/plugin/limit-conn-1.png | Bin 0 -> 13027 bytes .../assets/images/plugin/limit-count-1.png | Bin 0 -> 13354 bytes .../docs/assets/images/plugin/limit-req-1.png | Bin 0 -> 11571 bytes .../assets/images/plugin/loggly-dashboard.png | Bin 0 -> 292312 bytes .../docs/assets/images/plugin/oauth-1.png | Bin 0 -> 39482 bytes .../assets/images/plugin/prometheus-1.png | Bin 0 -> 270640 bytes .../assets/images/plugin/prometheus-2.png | Bin 0 -> 287226 bytes .../assets/images/plugin/prometheus01.png | Bin 0 -> 153072 bytes .../assets/images/plugin/prometheus02.png | Bin 0 -> 207617 bytes .../assets/images/plugin/skywalking-1.png | Bin 0 -> 4948 bytes .../assets/images/plugin/skywalking-3.png | Bin 0 -> 62575 bytes .../assets/images/plugin/skywalking-4.png | Bin 0 -> 30340 bytes .../assets/images/plugin/skywalking-5.png | Bin 0 -> 49605 bytes .../assets/images/plugin/sls-logger-1.png | Bin 0 -> 429438 bytes .../images/plugin/splunk-hec-admin-cn.png | Bin 0 -> 462846 bytes .../images/plugin/splunk-hec-admin-en.png | Bin 0 -> 444224 bytes .../docs/assets/images/plugin/wolf-rbac-1.png | Bin 0 -> 68988 bytes .../docs/assets/images/plugin/wolf-rbac-2.png | Bin 0 -> 89248 bytes .../docs/assets/images/plugin/zipkin-1.jpg | Bin 0 -> 247284 bytes .../docs/assets/images/plugin/zipkin-1.png | Bin 0 -> 7647 bytes .../docs/assets/images/plugin/zipkin-2.jpg | Bin 0 -> 117181 bytes .../assets/images/pubsub-architecture.svg | 4 + .../docs/assets/images/requesturl.jpg | Bin 0 -> 29728 bytes .../docs/assets/images/routes-example.png | Bin 0 -> 97069 bytes .../docs/assets/images/secret.png | Bin 0 -> 81803 bytes .../docs/assets/images/service-example.png | Bin 0 -> 94893 bytes .../docs/assets/images/skip-mtls.png | Bin 0 -> 26312 bytes .../update-docker-desktop-file-sharing.png | Bin 0 -> 166679 bytes .../docs/assets/images/upstream-example.png | Bin 0 -> 95051 bytes .../assets/other/apisix-plugin-design.graffle | Bin 0 -> 14912 bytes .../other/json/apisix-grafana-dashboard.json | 1984 ++++++++++ .../apisix-source/docs/en/latest/FAQ.md | 775 ++++ .../apisix-source/docs/en/latest/admin-api.md | 1783 +++++++++ .../docs/en/latest/apisix-variable.md | 54 + .../en/latest/architecture-design/apisix.md | 51 + .../apisix-source/docs/en/latest/aws.md | 276 ++ .../docs/en/latest/batch-processor.md | 149 + .../apisix-source/docs/en/latest/benchmark.md | 151 + ...ld-apisix-dev-environment-devcontainers.md | 119 + .../build-apisix-dev-environment-on-mac.md | 94 + .../docs/en/latest/building-apisix.md | 267 ++ .../docs/en/latest/certificate.md | 328 ++ .../apisix-source/docs/en/latest/config.json | 428 +++ .../docs/en/latest/control-api.md | 555 +++ .../latest/customize-nginx-configuration.md | 63 + .../docs/en/latest/debug-function.md | 162 + .../docs/en/latest/debug-mode.md | 140 + .../docs/en/latest/deployment-modes.md | 1032 +++++ .../apisix-source/docs/en/latest/discovery.md | 307 ++ .../docs/en/latest/discovery/consul.md | 344 ++ .../docs/en/latest/discovery/consul_kv.md | 314 ++ .../control-plane-service-discovery.md | 72 + .../docs/en/latest/discovery/dns.md | 155 + .../docs/en/latest/discovery/eureka.md | 25 + .../docs/en/latest/discovery/kubernetes.md | 406 ++ .../docs/en/latest/discovery/nacos.md | 280 ++ .../plugins-hmac-auth-generate-signature.md | 204 + .../docs/en/latest/external-plugin.md | 122 + .../docs/en/latest/getting-started/README.md | 71 + .../getting-started/configure-routes.md | 73 + .../getting-started/key-authentication.md | 184 + .../latest/getting-started/load-balancing.md | 99 + .../latest/getting-started/rate-limiting.md | 104 + .../docs/en/latest/grpc-proxy.md | 122 + .../apisix-source/docs/en/latest/http3.md | 186 + .../docs/en/latest/install-dependencies.md | 52 + .../docs/en/latest/installation-guide.md | 340 ++ .../docs/en/latest/internal/plugin-runner.md | 78 + .../en/latest/internal/testing-framework.md | 376 ++ .../apisix-source/docs/en/latest/mtls.md | 210 ++ .../docs/en/latest/plugin-develop.md | 503 +++ .../plugins/ai-aws-content-moderation.md | 247 ++ .../en/latest/plugins/ai-prompt-decorator.md | 109 + .../docs/en/latest/plugins/ai-prompt-guard.md | 89 + .../en/latest/plugins/ai-prompt-template.md | 102 + .../docs/en/latest/plugins/ai-proxy-multi.md | 1005 +++++ .../docs/en/latest/plugins/ai-proxy.md | 453 +++ .../docs/en/latest/plugins/ai-rag.md | 235 ++ .../en/latest/plugins/ai-rate-limiting.md | 873 +++++ .../en/latest/plugins/ai-request-rewrite.md | 177 + .../docs/en/latest/plugins/api-breaker.md | 136 + .../latest/plugins/attach-consumer-label.md | 180 + .../docs/en/latest/plugins/authz-casbin.md | 263 ++ .../docs/en/latest/plugins/authz-casdoor.md | 118 + .../docs/en/latest/plugins/authz-keycloak.md | 241 ++ .../docs/en/latest/plugins/aws-lambda.md | 217 ++ .../docs/en/latest/plugins/azure-functions.md | 199 + .../docs/en/latest/plugins/basic-auth.md | 514 +++ .../docs/en/latest/plugins/batch-requests.md | 225 ++ .../en/latest/plugins/body-transformer.md | 609 +++ .../docs/en/latest/plugins/brotli.md | 138 + .../docs/en/latest/plugins/cas-auth.md | 117 + .../docs/en/latest/plugins/chaitin-waf.md | 284 ++ .../en/latest/plugins/clickhouse-logger.md | 207 ++ .../docs/en/latest/plugins/client-control.md | 113 + .../en/latest/plugins/consumer-restriction.md | 347 ++ .../docs/en/latest/plugins/cors.md | 142 + .../docs/en/latest/plugins/csrf.md | 154 + .../docs/en/latest/plugins/datadog.md | 164 + .../docs/en/latest/plugins/degraphql.md | 337 ++ .../docs/en/latest/plugins/dubbo-proxy.md | 191 + .../docs/en/latest/plugins/echo.md | 116 + .../en/latest/plugins/elasticsearch-logger.md | 445 +++ .../en/latest/plugins/error-log-logger.md | 181 + .../en/latest/plugins/ext-plugin-post-req.md | 33 + .../en/latest/plugins/ext-plugin-post-resp.md | 111 + .../en/latest/plugins/ext-plugin-pre-req.md | 107 + .../docs/en/latest/plugins/fault-injection.md | 293 ++ .../docs/en/latest/plugins/file-logger.md | 226 ++ .../docs/en/latest/plugins/forward-auth.md | 186 + .../docs/en/latest/plugins/gm.md | 31 + .../en/latest/plugins/google-cloud-logging.md | 223 ++ .../docs/en/latest/plugins/grpc-transcode.md | 391 ++ .../docs/en/latest/plugins/grpc-web.md | 110 + .../docs/en/latest/plugins/gzip.md | 123 + .../docs/en/latest/plugins/hmac-auth.md | 760 ++++ .../docs/en/latest/plugins/http-dubbo.md | 128 + .../docs/en/latest/plugins/http-logger.md | 194 + .../docs/en/latest/plugins/inspect.md | 188 + .../docs/en/latest/plugins/ip-restriction.md | 154 + .../docs/en/latest/plugins/jwe-decrypt.md | 198 + .../docs/en/latest/plugins/jwt-auth.md | 911 +++++ .../docs/en/latest/plugins/kafka-logger.md | 249 ++ .../docs/en/latest/plugins/kafka-proxy.md | 83 + .../docs/en/latest/plugins/key-auth.md | 571 +++ .../docs/en/latest/plugins/lago.md | 255 ++ .../docs/en/latest/plugins/ldap-auth.md | 168 + .../docs/en/latest/plugins/limit-conn.md | 420 +++ .../docs/en/latest/plugins/limit-count.md | 507 +++ .../docs/en/latest/plugins/limit-req.md | 284 ++ .../docs/en/latest/plugins/log-rotate.md | 118 + .../docs/en/latest/plugins/loggly.md | 183 + .../docs/en/latest/plugins/loki-logger.md | 403 ++ .../docs/en/latest/plugins/mocking.md | 250 ++ .../docs/en/latest/plugins/mqtt-proxy.md | 169 + .../docs/en/latest/plugins/multi-auth.md | 164 + .../docs/en/latest/plugins/node-status.md | 128 + .../docs/en/latest/plugins/ocsp-stapling.md | 142 + .../docs/en/latest/plugins/opa.md | 327 ++ .../docs/en/latest/plugins/openfunction.md | 170 + .../docs/en/latest/plugins/openid-connect.md | 257 ++ .../docs/en/latest/plugins/opentelemetry.md | 220 ++ .../docs/en/latest/plugins/openwhisk.md | 139 + .../docs/en/latest/plugins/prometheus.md | 476 +++ .../docs/en/latest/plugins/proxy-cache.md | 379 ++ .../docs/en/latest/plugins/proxy-control.md | 103 + .../docs/en/latest/plugins/proxy-mirror.md | 145 + .../docs/en/latest/plugins/proxy-rewrite.md | 509 +++ .../docs/en/latest/plugins/public-api.md | 245 ++ .../docs/en/latest/plugins/real-ip.md | 202 + .../docs/en/latest/plugins/redirect.md | 172 + .../en/latest/plugins/referer-restriction.md | 135 + .../docs/en/latest/plugins/request-id.md | 296 ++ .../en/latest/plugins/request-validation.md | 528 +++ .../en/latest/plugins/response-rewrite.md | 313 ++ .../docs/en/latest/plugins/rocketmq-logger.md | 280 ++ .../docs/en/latest/plugins/server-info.md | 118 + .../docs/en/latest/plugins/serverless.md | 144 + .../en/latest/plugins/skywalking-logger.md | 344 ++ .../docs/en/latest/plugins/skywalking.md | 176 + .../docs/en/latest/plugins/sls-logger.md | 184 + .../en/latest/plugins/splunk-hec-logging.md | 212 ++ .../docs/en/latest/plugins/syslog.md | 154 + .../docs/en/latest/plugins/tcp-logger.md | 189 + .../en/latest/plugins/tencent-cloud-cls.md | 196 + .../docs/en/latest/plugins/traffic-split.md | 637 ++++ .../docs/en/latest/plugins/ua-restriction.md | 159 + .../docs/en/latest/plugins/udp-logger.md | 186 + .../docs/en/latest/plugins/uri-blocker.md | 120 + .../docs/en/latest/plugins/wolf-rbac.md | 296 ++ .../docs/en/latest/plugins/workflow.md | 386 ++ .../docs/en/latest/plugins/zipkin.md | 265 ++ .../apisix-source/docs/en/latest/profile.md | 140 + .../apisix-source/docs/en/latest/pubsub.md | 148 + .../docs/en/latest/pubsub/kafka.md | 128 + .../docs/en/latest/router-radixtree.md | 415 +++ .../docs/en/latest/ssl-protocol.md | 352 ++ .../docs/en/latest/status-api.md | 78 + .../docs/en/latest/stream-proxy.md | 243 ++ .../docs/en/latest/support-fips-in-apisix.md | 60 + .../docs/en/latest/terminology/api-gateway.md | 44 + .../en/latest/terminology/consumer-group.md | 122 + .../docs/en/latest/terminology/consumer.md | 174 + .../docs/en/latest/terminology/credential.md | 151 + .../docs/en/latest/terminology/global-rule.md | 70 + .../en/latest/terminology/plugin-config.md | 171 + .../en/latest/terminology/plugin-metadata.md | 83 + .../docs/en/latest/terminology/plugin.md | 346 ++ .../docs/en/latest/terminology/route.md | 91 + .../docs/en/latest/terminology/router.md | 56 + .../docs/en/latest/terminology/script.md | 39 + .../docs/en/latest/terminology/secret.md | 349 ++ .../docs/en/latest/terminology/service.md | 118 + .../docs/en/latest/terminology/upstream.md | 249 ++ .../tutorials/add-multiple-api-versions.md | 245 ++ .../latest/tutorials/cache-api-responses.md | 231 ++ .../latest/tutorials/client-to-apisix-mtls.md | 334 ++ .../docs/en/latest/tutorials/expose-api.md | 123 + .../docs/en/latest/tutorials/health-check.md | 240 ++ .../docs/en/latest/tutorials/keycloak-oidc.md | 467 +++ .../latest/tutorials/manage-api-consumers.md | 264 ++ .../tutorials/monitor-api-health-check.md | 192 + .../en/latest/tutorials/observe-your-api.md | 267 ++ .../docs/en/latest/tutorials/protect-api.md | 132 + .../tutorials/websocket-authentication.md | 129 + .../upgrade-guide-from-2.15.x-to-3.0.0.md | 454 +++ .../apisix-source/docs/en/latest/wasm.md | 122 + .../apisix-source/docs/en/latest/xrpc.md | 211 ++ .../docs/en/latest/xrpc/redis.md | 132 + .../apisix-source/docs/zh/latest/CHANGELOG.md | 1593 ++++++++ .../docs/zh/latest/CODE_STYLE.md | 426 +++ .../apisix-source/docs/zh/latest/FAQ.md | 777 ++++ .../apisix-source/docs/zh/latest/README.md | 259 ++ .../apisix-source/docs/zh/latest/admin-api.md | 1717 +++++++++ .../docs/zh/latest/apisix-variable.md | 53 + .../zh/latest/architecture-design/apisix.md | 48 + .../docs/zh/latest/batch-processor.md | 147 + .../apisix-source/docs/zh/latest/benchmark.md | 150 + .../build-apisix-dev-environment-on-mac.md | 94 + .../docs/zh/latest/building-apisix.md | 265 ++ .../docs/zh/latest/certificate.md | 324 ++ .../apisix-source/docs/zh/latest/config.json | 357 ++ .../docs/zh/latest/control-api.md | 236 ++ .../latest/customize-nginx-configuration.md | 63 + .../docs/zh/latest/debug-function.md | 163 + .../docs/zh/latest/debug-mode.md | 110 + .../apisix-source/docs/zh/latest/discovery.md | 290 ++ .../control-plane-service-discovery.md | 72 + .../docs/zh/latest/discovery/dns.md | 146 + .../docs/zh/latest/discovery/eureka.md | 25 + .../docs/zh/latest/discovery/kubernetes.md | 403 ++ .../docs/zh/latest/discovery/nacos.md | 283 ++ .../docs/zh/latest/external-plugin.md | 111 + .../docs/zh/latest/getting-started/README.md | 71 + .../getting-started/configure-routes.md | 71 + .../getting-started/key-authentication.md | 184 + .../latest/getting-started/load-balancing.md | 99 + .../latest/getting-started/rate-limiting.md | 101 + .../docs/zh/latest/grpc-proxy.md | 124 + .../apisix-source/docs/zh/latest/http3.md | 186 + .../docs/zh/latest/install-dependencies.md | 52 + .../docs/zh/latest/installation-guide.md | 330 ++ .../apisix-source/docs/zh/latest/mtls.md | 205 + .../docs/zh/latest/plugin-develop.md | 480 +++ .../docs/zh/latest/plugins/api-breaker.md | 137 + .../latest/plugins/attach-consumer-label.md | 180 + .../docs/zh/latest/plugins/authz-casbin.md | 272 ++ .../docs/zh/latest/plugins/authz-casdoor.md | 115 + .../docs/zh/latest/plugins/authz-keycloak.md | 216 ++ .../docs/zh/latest/plugins/aws-lambda.md | 224 ++ .../docs/zh/latest/plugins/azure-functions.md | 215 ++ .../docs/zh/latest/plugins/basic-auth.md | 512 +++ .../docs/zh/latest/plugins/batch-requests.md | 234 ++ .../zh/latest/plugins/body-transformer.md | 609 +++ .../docs/zh/latest/plugins/brotli.md | 133 + .../docs/zh/latest/plugins/chaitin-waf.md | 263 ++ .../zh/latest/plugins/clickhouse-logger.md | 209 ++ .../docs/zh/latest/plugins/client-control.md | 116 + .../zh/latest/plugins/consumer-restriction.md | 353 ++ .../docs/zh/latest/plugins/cors.md | 127 + .../docs/zh/latest/plugins/csrf.md | 155 + .../docs/zh/latest/plugins/datadog.md | 210 ++ .../docs/zh/latest/plugins/dubbo-proxy.md | 156 + .../docs/zh/latest/plugins/echo.md | 133 + .../zh/latest/plugins/elasticsearch-logger.md | 446 +++ .../zh/latest/plugins/error-log-logger.md | 192 + .../zh/latest/plugins/ext-plugin-post-req.md | 33 + .../zh/latest/plugins/ext-plugin-post-resp.md | 119 + .../zh/latest/plugins/ext-plugin-pre-req.md | 110 + .../docs/zh/latest/plugins/fault-injection.md | 299 ++ .../docs/zh/latest/plugins/file-logger.md | 242 ++ .../docs/zh/latest/plugins/forward-auth.md | 189 + .../docs/zh/latest/plugins/gm.md | 191 + .../zh/latest/plugins/google-cloud-logging.md | 231 ++ .../docs/zh/latest/plugins/grpc-transcode.md | 396 ++ .../docs/zh/latest/plugins/grpc-web.md | 113 + .../docs/zh/latest/plugins/gzip.md | 126 + .../docs/zh/latest/plugins/hmac-auth.md | 760 ++++ .../docs/zh/latest/plugins/http-dubbo.md | 124 + .../docs/zh/latest/plugins/http-logger.md | 191 + .../docs/zh/latest/plugins/ip-restriction.md | 153 + .../docs/zh/latest/plugins/jwe-decrypt.md | 199 + .../docs/zh/latest/plugins/jwt-auth.md | 907 +++++ .../docs/zh/latest/plugins/kafka-logger.md | 247 ++ .../docs/zh/latest/plugins/key-auth.md | 570 +++ .../docs/zh/latest/plugins/ldap-auth.md | 167 + .../docs/zh/latest/plugins/limit-conn.md | 420 +++ .../docs/zh/latest/plugins/limit-count.md | 508 +++ .../docs/zh/latest/plugins/limit-req.md | 289 ++ .../docs/zh/latest/plugins/log-rotate.md | 113 + .../docs/zh/latest/plugins/loggly.md | 184 + .../docs/zh/latest/plugins/loki-logger.md | 407 ++ .../docs/zh/latest/plugins/mocking.md | 255 ++ .../docs/zh/latest/plugins/mqtt-proxy.md | 174 + .../docs/zh/latest/plugins/multi-auth.md | 175 + .../docs/zh/latest/plugins/node-status.md | 125 + .../docs/zh/latest/plugins/ocsp-stapling.md | 144 + .../docs/zh/latest/plugins/opa.md | 329 ++ .../docs/zh/latest/plugins/openfunction.md | 171 + .../docs/zh/latest/plugins/openid-connect.md | 257 ++ .../docs/zh/latest/plugins/opentelemetry.md | 217 ++ .../docs/zh/latest/plugins/openwhisk.md | 148 + .../docs/zh/latest/plugins/prometheus.md | 474 +++ .../docs/zh/latest/plugins/proxy-cache.md | 379 ++ .../docs/zh/latest/plugins/proxy-control.md | 104 + .../docs/zh/latest/plugins/proxy-mirror.md | 145 + .../docs/zh/latest/plugins/proxy-rewrite.md | 509 +++ .../docs/zh/latest/plugins/public-api.md | 242 ++ .../docs/zh/latest/plugins/real-ip.md | 202 + .../docs/zh/latest/plugins/redirect.md | 177 + .../zh/latest/plugins/referer-restriction.md | 142 + .../docs/zh/latest/plugins/request-id.md | 292 ++ .../zh/latest/plugins/request-validation.md | 528 +++ .../zh/latest/plugins/response-rewrite.md | 313 ++ .../docs/zh/latest/plugins/rocketmq-logger.md | 225 ++ .../docs/zh/latest/plugins/server-info.md | 118 + .../docs/zh/latest/plugins/serverless.md | 147 + .../zh/latest/plugins/skywalking-logger.md | 343 ++ .../docs/zh/latest/plugins/skywalking.md | 180 + .../docs/zh/latest/plugins/sls-logger.md | 180 + .../zh/latest/plugins/splunk-hec-logging.md | 218 ++ .../docs/zh/latest/plugins/syslog.md | 144 + .../docs/zh/latest/plugins/tcp-logger.md | 194 + .../zh/latest/plugins/tencent-cloud-cls.md | 202 + .../docs/zh/latest/plugins/traffic-split.md | 637 ++++ .../docs/zh/latest/plugins/ua-restriction.md | 159 + .../docs/zh/latest/plugins/udp-logger.md | 191 + .../docs/zh/latest/plugins/uri-blocker.md | 117 + .../docs/zh/latest/plugins/wolf-rbac.md | 301 ++ .../docs/zh/latest/plugins/workflow.md | 386 ++ .../docs/zh/latest/plugins/zipkin.md | 265 ++ .../apisix-source/docs/zh/latest/profile.md | 42 + .../docs/zh/latest/router-radixtree.md | 353 ++ .../docs/zh/latest/ssl-protocol.md | 353 ++ .../docs/zh/latest/status-api.md | 78 + .../docs/zh/latest/stream-proxy.md | 234 ++ .../docs/zh/latest/support-fips-in-apisix.md | 60 + .../docs/zh/latest/terminology/api-gateway.md | 42 + .../zh/latest/terminology/consumer-group.md | 124 + .../docs/zh/latest/terminology/consumer.md | 177 + .../docs/zh/latest/terminology/credential.md | 152 + .../docs/zh/latest/terminology/global-rule.md | 73 + .../zh/latest/terminology/plugin-config.md | 178 + .../zh/latest/terminology/plugin-metadata.md | 85 + .../docs/zh/latest/terminology/plugin.md | 348 ++ .../docs/zh/latest/terminology/route.md | 136 + .../docs/zh/latest/terminology/router.md | 56 + .../docs/zh/latest/terminology/script.md | 46 + .../docs/zh/latest/terminology/secret.md | 351 ++ .../docs/zh/latest/terminology/service.md | 124 + .../docs/zh/latest/terminology/upstream.md | 255 ++ .../latest/tutorials/client-to-apisix-mtls.md | 328 ++ .../docs/zh/latest/tutorials/expose-api.md | 126 + .../docs/zh/latest/tutorials/health-check.md | 240 ++ .../docs/zh/latest/tutorials/keycloak-oidc.md | 467 +++ .../zh/latest/tutorials/observe-your-api.md | 246 ++ .../docs/zh/latest/tutorials/protect-api.md | 138 + .../upgrade-guide-from-2.15.x-to-3.0.0.md | 516 +++ .../apisix-source/docs/zh/latest/wasm.md | 84 + .../example/apisix/plugins/3rd-party.lua | 51 + .../apisix/stream/plugins/3rd-party.lua | 51 + .../example/build-dev-image.dockerfile | 32 + .../APISIX/apisix-source/example/my_hook.lua | 29 + .../apisix-source/logos/apache-apisix.png | Bin 0 -> 573324 bytes .../apisix-source/logos/apisix-white-bg.jpg | Bin 0 -> 97975 bytes .../logos/cncf-landscape-white-bg.jpg | Bin 0 -> 131627 bytes .../apisix-source/logos/cncf-white-bg.jpg | Bin 0 -> 47439 bytes .../APISIX/apisix-source/powered-by.md | 129 + .../APISIX/apisix-source/t/APISIX.pm | 1029 +++++ .../APISIX/apisix-source/t/admin/api.t | 246 ++ .../APISIX/apisix-source/t/admin/balancer.t | 243 ++ .../t/admin/consumer-group-force-delete.t | 163 + .../apisix-source/t/admin/consumer-group.t | 549 +++ .../APISIX/apisix-source/t/admin/consumers.t | 362 ++ .../APISIX/apisix-source/t/admin/consumers2.t | 176 + .../apisix-source/t/admin/credentials.t | 494 +++ .../APISIX/apisix-source/t/admin/filter.t | 1055 ++++++ .../apisix-source/t/admin/global-rules.t | 506 +++ .../apisix-source/t/admin/global-rules2.t | 146 + .../apisix-source/t/admin/health-check.t | 521 +++ .../apisix-source/t/admin/metadata.spec.ts | 170 + .../APISIX/apisix-source/t/admin/metadata.t | 36 + .../t/admin/plugin-configs-force-delete.t | 163 + .../apisix-source/t/admin/plugin-configs.t | 523 +++ .../apisix-source/t/admin/plugin-metadata.t | 335 ++ .../apisix-source/t/admin/plugin-metadata2.t | 61 + .../apisix-source/t/admin/plugins-reload.t | 429 +++ .../APISIX/apisix-source/t/admin/plugins.t | 480 +++ .../t/admin/protos-force-delete.t | 175 + .../APISIX/apisix-source/t/admin/protos.t | 216 ++ .../APISIX/apisix-source/t/admin/resources.t | 55 + .../t/admin/response_body_format.t | 255 ++ .../t/admin/routes-array-nodes.t | 115 + .../APISIX/apisix-source/t/admin/routes.t | 788 ++++ .../APISIX/apisix-source/t/admin/routes2.t | 653 ++++ .../APISIX/apisix-source/t/admin/routes3.t | 743 ++++ .../APISIX/apisix-source/t/admin/routes4.t | 795 ++++ .../t/admin/routes_request_body.t | 274 ++ .../apisix-source/t/admin/schema-validate.t | 441 +++ .../APISIX/apisix-source/t/admin/schema.t | 250 ++ .../APISIX/apisix-source/t/admin/secrets.t | 279 ++ .../t/admin/services-array-nodes.t | 105 + .../t/admin/services-force-delete.t | 156 + .../t/admin/services-string-id.t | 745 ++++ .../APISIX/apisix-source/t/admin/services.t | 1281 +++++++ .../APISIX/apisix-source/t/admin/services2.t | 300 ++ .../APISIX/apisix-source/t/admin/ssl.t | 802 ++++ .../APISIX/apisix-source/t/admin/ssl2.t | 496 +++ .../APISIX/apisix-source/t/admin/ssl3.t | 63 + .../APISIX/apisix-source/t/admin/ssl4.t | 510 +++ .../APISIX/apisix-source/t/admin/ssl5.t | 86 + .../APISIX/apisix-source/t/admin/ssls.t | 75 + .../t/admin/standalone-healthcheck.t | 128 + .../apisix-source/t/admin/standalone.spec.ts | 442 +++ .../APISIX/apisix-source/t/admin/standalone.t | 258 ++ .../t/admin/stream-routes-disable.t | 66 + .../apisix-source/t/admin/stream-routes.t | 653 ++++ .../APISIX/apisix-source/t/admin/token.t | 179 + .../t/admin/upstream-array-nodes.t | 435 +++ .../t/admin/upstream-force-delete.t | 154 + .../APISIX/apisix-source/t/admin/upstream.t | 725 ++++ .../APISIX/apisix-source/t/admin/upstream2.t | 295 ++ .../APISIX/apisix-source/t/admin/upstream3.t | 768 ++++ .../APISIX/apisix-source/t/admin/upstream4.t | 668 ++++ .../APISIX/apisix-source/t/admin/upstream5.t | 599 +++ .../APISIX/apisix-source/t/apisix.luacov | 38 + .../t/assets/ai-proxy-response.json | 15 + .../assets/content-moderation-responses.json | 224 ++ .../apisix-source/t/assets/embeddings.json | 25 + .../APISIX/apisix-source/t/certs/apisix.crt | 27 + .../APISIX/apisix-source/t/certs/apisix.key | 39 + .../t/certs/apisix_admin_ssl.crt | 33 + .../t/certs/apisix_admin_ssl.key | 51 + .../apisix-source/t/certs/apisix_ecc.crt | 13 + .../apisix-source/t/certs/apisix_ecc.key | 8 + .../apisix-source/t/certs/client_enc.crt | 12 + .../apisix-source/t/certs/client_enc.key | 5 + .../apisix-source/t/certs/client_sign.crt | 12 + .../apisix-source/t/certs/client_sign.key | 5 + .../APISIX/apisix-source/t/certs/etcd.key | 28 + .../APISIX/apisix-source/t/certs/etcd.pem | 22 + .../APISIX/apisix-source/t/certs/gm_ca.crt | 26 + .../apisix-source/t/certs/incorrect.crt | 12 + .../apisix-source/t/certs/incorrect.key | 12 + .../t/certs/localhost_slapd_cert.pem | 21 + .../t/certs/localhost_slapd_key.pem | 28 + .../APISIX/apisix-source/t/certs/mtls_ca.crt | 20 + .../APISIX/apisix-source/t/certs/mtls_ca.key | 27 + .../apisix-source/t/certs/mtls_client.crt | 20 + .../apisix-source/t/certs/mtls_client.key | 27 + .../apisix-source/t/certs/mtls_server.crt | 21 + .../apisix-source/t/certs/mtls_server.key | 27 + .../apisix-source/t/certs/ocsp/ecc_good.crt | 45 + .../apisix-source/t/certs/ocsp/ecc_good.key | 8 + .../apisix-source/t/certs/ocsp/index.txt | 4 + .../apisix-source/t/certs/ocsp/rsa_good.crt | 50 + .../apisix-source/t/certs/ocsp/rsa_good.key | 27 + .../t/certs/ocsp/rsa_revoked.crt | 50 + .../t/certs/ocsp/rsa_revoked.key | 27 + .../t/certs/ocsp/rsa_unknown.crt | 50 + .../t/certs/ocsp/rsa_unknown.key | 27 + .../apisix-source/t/certs/ocsp/signer.crt | 22 + .../apisix-source/t/certs/ocsp/signer.key | 27 + .../apisix-source/t/certs/openssl-test2.conf | 40 + .../APISIX/apisix-source/t/certs/openssl.conf | 40 + .../APISIX/apisix-source/t/certs/private.pem | 27 + .../APISIX/apisix-source/t/certs/public.pem | 9 + .../apisix-source/t/certs/server_1024.crt | 12 + .../apisix-source/t/certs/server_1024.key | 16 + .../apisix-source/t/certs/server_enc.crt | 12 + .../apisix-source/t/certs/server_enc.key | 5 + .../apisix-source/t/certs/server_sign.crt | 12 + .../apisix-source/t/certs/server_sign.key | 5 + .../APISIX/apisix-source/t/certs/test-dot.crt | 18 + .../APISIX/apisix-source/t/certs/test-dot.key | 28 + .../APISIX/apisix-source/t/certs/test2.crt | 28 + .../APISIX/apisix-source/t/certs/test2.key | 39 + .../apisix-source/t/certs/vector_logs_ca.crt | 21 + .../apisix-source/t/certs/vector_logs_ca.key | 27 + .../t/certs/vector_logs_server.crt | 19 + .../t/certs/vector_logs_server.key | 27 + .../t/chaos/delayetcd/delayetcd.go | 200 + .../APISIX/apisix-source/t/chaos/e2e.go | 25 + .../APISIX/apisix-source/t/chaos/e2e_test.go | 31 + .../APISIX/apisix-source/t/chaos/go.mod | 52 + .../APISIX/apisix-source/t/chaos/go.sum | 1120 ++++++ .../t/chaos/killetcd/killetcd.go | 164 + .../t/chaos/kubernetes/deployment.yaml | 115 + .../t/chaos/kubernetes/service.yaml | 43 + .../apisix-source/t/chaos/utils/Dockerfile | 76 + .../apisix-source/t/chaos/utils/kube_utils.go | 132 + .../t/chaos/utils/setup_chaos_utils.sh | 58 + .../apisix-source/t/chaos/utils/utils.go | 292 ++ .../APISIX/apisix-source/t/cli/cli.t | 60 + .../t/cli/cli_envsubst_confusion.t | 111 + .../APISIX/apisix-source/t/cli/common.sh | 43 + .../t/cli/docker-compose-etcd-cluster.yaml | 72 + .../apisix-source/t/cli/test_access_log.sh | 262 ++ .../APISIX/apisix-source/t/cli/test_admin.sh | 492 +++ .../apisix-source/t/cli/test_admin_mtls.sh | 55 + .../apisix-source/t/cli/test_admin_ui.sh | 148 + .../apisix-source/t/cli/test_apisix_mirror.sh | 63 + .../apisix-source/t/cli/test_ci_only.sh | 66 + .../APISIX/apisix-source/t/cli/test_cmd.sh | 224 ++ .../apisix-source/t/cli/test_control.sh | 181 + .../apisix-source/t/cli/test_core_config.sh | 73 + .../t/cli/test_deployment_control_plane.sh | 69 + .../t/cli/test_deployment_data_plane.sh | 83 + ...eployment_data_plane_with_readonly_etcd.sh | 178 + .../t/cli/test_deployment_traditional.sh | 124 + .../APISIX/apisix-source/t/cli/test_dns.sh | 175 + .../APISIX/apisix-source/t/cli/test_dubbo.sh | 55 + .../APISIX/apisix-source/t/cli/test_etcd.sh | 201 + .../t/cli/test_etcd_healthcheck.sh | 145 + .../apisix-source/t/cli/test_etcd_mtls.sh | 210 ++ .../t/cli/test_etcd_sync_event_handle.sh | 133 + .../apisix-source/t/cli/test_etcd_tls.sh | 74 + .../apisix-source/t/cli/test_http_config.sh | 66 + .../apisix-source/t/cli/test_kubernetes.sh | 113 + .../APISIX/apisix-source/t/cli/test_main.sh | 1002 +++++ .../apisix-source/t/cli/test_makefile.sh | 42 + .../t/cli/test_opentelemetry_set_ngx_var.sh | 48 + .../apisix-source/t/cli/test_prometheus.sh | 181 + .../t/cli/test_prometheus_reload.sh | 91 + .../cli/test_prometheus_run_in_privileged.sh | 113 + .../t/cli/test_prometheus_stream.sh | 96 + .../t/cli/test_proxy_mirror_timeout.sh | 43 + .../t/cli/test_route_match_with_graphql.sh | 98 + .../apisix-source/t/cli/test_serverless.sh | 107 + .../apisix-source/t/cli/test_snippet.sh | 131 + .../apisix-source/t/cli/test_standalone.sh | 157 + .../apisix-source/t/cli/test_status_api.sh | 78 + .../apisix-source/t/cli/test_stream_config.sh | 111 + .../apisix-source/t/cli/test_tls_over_tcp.sh | 67 + .../apisix-source/t/cli/test_upstream_mtls.sh | 211 ++ .../t/cli/test_validate_config.sh | 206 + .../APISIX/apisix-source/t/cli/test_wasm.sh | 66 + .../t/cli/test_zipkin_set_ngx_var.sh | 48 + .../t/config-center-json/consumer-group.t | 212 ++ .../t/config-center-json/consumer.t | 93 + .../t/config-center-json/global-rule.t | 160 + .../t/config-center-json/plugin-configs.t | 175 + .../t/config-center-json/plugin-metadata.t | 100 + .../t/config-center-json/plugin.t | 291 ++ .../t/config-center-json/route-service.t | 379 ++ .../t/config-center-json/route-upstream.t | 244 ++ .../t/config-center-json/route.t | 364 ++ .../t/config-center-json/secret.t | 458 +++ .../apisix-source/t/config-center-json/ssl.t | 191 + .../t/config-center-json/stream-route.t | 148 + .../t/config-center-yaml/consumer-group.t | 140 + .../t/config-center-yaml/consumer.t | 88 + .../t/config-center-yaml/global-rule.t | 136 + .../t/config-center-yaml/plugin-configs.t | 144 + .../t/config-center-yaml/plugin-metadata.t | 89 + .../t/config-center-yaml/plugin.t | 229 ++ .../t/config-center-yaml/route-service.t | 297 ++ .../t/config-center-yaml/route-upstream.t | 206 + .../t/config-center-yaml/route.t | 297 ++ .../t/config-center-yaml/secret.t | 390 ++ .../apisix-source/t/config-center-yaml/ssl.t | 315 ++ .../t/config-center-yaml/stream-route.t | 127 + .../t/control/control-healthcheck-bug-fix.t | 134 + .../apisix-source/t/control/discovery.t | 221 ++ .../APISIX/apisix-source/t/control/gc.t | 66 + .../apisix-source/t/control/healthcheck.t | 305 ++ .../apisix-source/t/control/plugin-api.t | 55 + .../apisix-source/t/control/plugin-metadata.t | 113 + .../apisix-source/t/control/plugins-reload.t | 341 ++ .../APISIX/apisix-source/t/control/routes.t | 142 + .../APISIX/apisix-source/t/control/schema.t | 149 + .../APISIX/apisix-source/t/control/services.t | 188 + .../apisix-source/t/control/upstreams.t | 146 + .../apisix-source/t/core/config-default.t | 140 + .../APISIX/apisix-source/t/core/config.t | 347 ++ .../APISIX/apisix-source/t/core/config_etcd.t | 516 +++ .../APISIX/apisix-source/t/core/config_util.t | 119 + .../APISIX/apisix-source/t/core/ctx.t | 917 +++++ .../APISIX/apisix-source/t/core/ctx2.t | 449 +++ .../APISIX/apisix-source/t/core/ctx3.t | 101 + .../apisix-source/t/core/ctx_with_params.t | 153 + .../APISIX/apisix-source/t/core/env.t | 181 + .../apisix-source/t/core/etcd-auth-fail.t | 95 + .../APISIX/apisix-source/t/core/etcd-auth.t | 97 + .../APISIX/apisix-source/t/core/etcd-mtls.t | 282 ++ .../APISIX/apisix-source/t/core/etcd-sync.t | 159 + .../APISIX/apisix-source/t/core/etcd-write.t | 1107 ++++++ .../APISIX/apisix-source/t/core/etcd.t | 429 +++ .../APISIX/apisix-source/t/core/json.t | 153 + .../APISIX/apisix-source/t/core/log.t | 200 + .../APISIX/apisix-source/t/core/lrucache.t | 271 ++ .../APISIX/apisix-source/t/core/os.t | 91 + .../APISIX/apisix-source/t/core/profile.t | 52 + .../APISIX/apisix-source/t/core/random.t | 73 + .../APISIX/apisix-source/t/core/request.t | 492 +++ .../APISIX/apisix-source/t/core/resolver.t | 151 + .../APISIX/apisix-source/t/core/response.t | 202 + .../APISIX/apisix-source/t/core/schema.t | 148 + .../APISIX/apisix-source/t/core/schema_def.t | 239 ++ .../APISIX/apisix-source/t/core/string.t | 139 + .../APISIX/apisix-source/t/core/table.t | 361 ++ .../APISIX/apisix-source/t/core/timer.t | 53 + .../APISIX/apisix-source/t/core/uid.t | 42 + .../APISIX/apisix-source/t/core/utils.t | 395 ++ .../APISIX/apisix-source/t/coredns/Corefile | 4 + .../apisix-source/t/coredns/db.test.local | 53 + .../APISIX/apisix-source/t/debug/debug-mode.t | 347 ++ .../apisix-source/t/debug/dynamic-hook.t | 454 +++ .../APISIX/apisix-source/t/debug/hook.t | 153 + .../APISIX/apisix-source/t/discovery/consul.t | 783 ++++ .../apisix-source/t/discovery/consul2.t | 334 ++ .../apisix-source/t/discovery/consul_dump.t | 511 +++ .../apisix-source/t/discovery/consul_kv.t | 698 ++++ .../t/discovery/consul_kv_dump.t | 390 ++ .../apisix-source/t/discovery/dns/mix.t | 131 + .../apisix-source/t/discovery/dns/sanity.t | 463 +++ .../APISIX/apisix-source/t/discovery/eureka.t | 117 + .../APISIX/apisix-source/t/discovery/nacos.t | 1068 ++++++ .../APISIX/apisix-source/t/discovery/nacos2.t | 342 ++ .../APISIX/apisix-source/t/discovery/nacos3.t | 638 ++++ .../t/discovery/reset-healthchecker.t | 169 + .../apisix-source/t/discovery/stream/consul.t | 278 ++ .../t/discovery/stream/consul_kv.t | 269 ++ .../apisix-source/t/discovery/stream/dns.t | 342 ++ .../apisix-source/t/discovery/stream/eureka.t | 95 + .../apisix-source/t/discovery/stream/nacos.t | 92 + .../apisix-source/t/error_page/error_page.t | 239 ++ .../apisix-source/t/fake-plugin-exit.lua | 46 + .../apisix-source/t/fuzzing/client_abort.py | 74 + .../apisix-source/t/fuzzing/http_upstream.py | 100 + .../APISIX/apisix-source/t/fuzzing/public.py | 138 + .../apisix-source/t/fuzzing/requirements.txt | 4 + .../t/fuzzing/serverless_route_test.py | 106 + .../apisix-source/t/fuzzing/simple_http.py | 134 + .../t/fuzzing/simpleroute_test.py | 87 + .../t/fuzzing/upstream/nginx.conf | 75 + .../t/fuzzing/vars_route_test.py | 88 + .../APISIX/apisix-source/t/gm/gm.t | 257 ++ .../t/grpc_server_example/echo.pb | Bin 0 -> 997 bytes .../t/grpc_server_example/go.mod | 10 + .../t/grpc_server_example/go.sum | 1117 ++++++ .../t/grpc_server_example/main.go | 346 ++ .../t/grpc_server_example/proto.pb | Bin 0 -> 298 bytes .../t/grpc_server_example/proto/echo.pb.go | 236 ++ .../t/grpc_server_example/proto/echo.proto | 35 + .../grpc_server_example/proto/echo_grpc.pb.go | 105 + .../proto/helloworld.pb.go | 851 +++++ .../proto/helloworld.proto | 92 + .../proto/helloworld_grpc.pb.go | 459 +++ .../t/grpc_server_example/proto/import.pb.go | 220 ++ .../t/grpc_server_example/proto/import.proto | 29 + .../t/grpc_server_example/proto/src.pb.go | 179 + .../t/grpc_server_example/proto/src.proto | 32 + .../grpc_server_example/proto/src_grpc.pb.go | 105 + .../apisix-source/t/http3/admin/basic.t | 108 + .../APISIX/apisix-source/t/jest.config.ts | 31 + .../t/kubernetes/configs/account.yaml | 44 + .../t/kubernetes/configs/endpoint.yaml | 58 + .../t/kubernetes/configs/kind.yaml | 22 + .../t/kubernetes/discovery/kubernetes.t | 423 +++ .../t/kubernetes/discovery/kubernetes2.t | 751 ++++ .../t/kubernetes/discovery/kubernetes3.t | 455 +++ .../kubernetes/discovery/stream/kubernetes.t | 344 ++ .../t/lib/apisix/plugins/jwt-auth.lua | 122 + .../apisix/plugins/prometheus/exporter.lua | 39 + .../t/lib/chaitin_waf_server.lua | 60 + .../dubbo-backend-interface/pom.xml | 45 + .../org/apache/dubbo/backend/DemoService.java | 50 + .../dubbo-backend-provider/pom.xml | 96 + .../backend/provider/DemoServiceImpl.java | 69 + .../dubbo/backend/provider/Provider.java | 40 + .../META-INF/spring/dubbo-demo-provider.xml | 39 + .../src/main/resources/dubbo.properties | 17 + .../src/main/resources/log4j.properties | 23 + .../apisix-source/t/lib/dubbo-backend/pom.xml | 97 + .../pom.xml | 46 + .../DubboSerializationTestService.java | 30 + .../java/org/apache/dubbo/backend/PoJo.java | 140 + .../pom.xml | 97 + .../DubboSerializationTestServiceImpl.java | 57 + .../dubbo/backend/provider/Provider.java | 48 + .../META-INF/spring/dubbo-demo-provider.xml | 38 + .../src/main/resources/dubbo.properties | 17 + .../src/main/resources/log4j.properties | 23 + .../t/lib/dubbo-serialization-backend/pom.xml | 97 + .../APISIX/apisix-source/t/lib/etcd.proto | 32 + .../APISIX/apisix-source/t/lib/ext-plugin.lua | 652 ++++ .../apisix-source/t/lib/grafana_loki.lua | 63 + .../APISIX/apisix-source/t/lib/keycloak.lua | 136 + .../apisix-source/t/lib/keycloak_cas.lua | 215 ++ .../apisix-source/t/lib/mock_layer4.lua | 78 + .../APISIX/apisix-source/t/lib/pubsub.lua | 128 + .../APISIX/apisix-source/t/lib/server.lua | 787 ++++ .../APISIX/apisix-source/t/lib/test_admin.lua | 272 ++ .../apisix-source/t/lib/test_inspect.lua | 62 + .../APISIX/apisix-source/t/misc/patch.t | 218 ++ .../apisix-source/t/misc/pre-function.t | 325 ++ .../APISIX/apisix-source/t/misc/timers.t | 54 + .../apisix-source/t/node/chash-balance.t | 656 ++++ .../apisix-source/t/node/chash-hashon.t | 742 ++++ .../t/node/client-mtls-openresty.t | 272 ++ .../APISIX/apisix-source/t/node/client-mtls.t | 655 ++++ .../apisix-source/t/node/consumer-group.t | 312 ++ .../apisix-source/t/node/consumer-plugin.t | 464 +++ .../apisix-source/t/node/consumer-plugin2.t | 470 +++ .../apisix-source/t/node/consumer-plugin3.t | 159 + .../t/node/credential-plugin-basic-auth.t | 137 + .../credential-plugin-incremental-effective.t | 125 + .../t/node/credential-plugin-jwt-auth.t | 137 + .../t/node/credential-plugin-key-auth.t | 137 + .../credential-plugin-multi-credentials.t | 236 ++ .../credential-plugin-set-request-header.t | 245 ++ ...credential-plugin-work-with-other-plugin.t | 171 + .../apisix-source/t/node/data_encrypt.t | 571 +++ .../apisix-source/t/node/data_encrypt2.t | 742 ++++ .../APISIX/apisix-source/t/node/ewma.t | 360 ++ .../APISIX/apisix-source/t/node/filter_func.t | 75 + .../APISIX/apisix-source/t/node/global-rule.t | 419 +++ .../apisix-source/t/node/grpc-proxy-mtls.t | 102 + .../apisix-source/t/node/grpc-proxy-stream.t | 134 + .../apisix-source/t/node/grpc-proxy-unary.t | 142 + .../APISIX/apisix-source/t/node/grpc-proxy.t | 287 ++ .../t/node/healthcheck-discovery.t | 201 + .../apisix-source/t/node/healthcheck-https.t | 341 ++ .../apisix-source/t/node/healthcheck-ipv6.t | 148 + .../t/node/healthcheck-leak-bugfix.t | 112 + .../t/node/healthcheck-multiple-worker.t | 141 + .../t/node/healthcheck-passive-resty-events.t | 382 ++ .../t/node/healthcheck-passive.t | 344 ++ .../t/node/healthcheck-stop-checker.t | 253 ++ .../APISIX/apisix-source/t/node/healthcheck.t | 916 +++++ .../apisix-source/t/node/healthcheck2.t | 362 ++ .../apisix-source/t/node/healthcheck3.t | 122 + .../APISIX/apisix-source/t/node/hosts.t | 97 + .../APISIX/apisix-source/t/node/http_host.t | 68 + .../APISIX/apisix-source/t/node/https-proxy.t | 163 + .../apisix-source/t/node/invalid-port.t | 105 + .../apisix-source/t/node/invalid-route.t | 160 + .../apisix-source/t/node/invalid-service.t | 115 + .../apisix-source/t/node/invalid-upstream.t | 132 + .../APISIX/apisix-source/t/node/least_conn.t | 151 + .../APISIX/apisix-source/t/node/least_conn2.t | 105 + .../APISIX/apisix-source/t/node/merge-route.t | 511 +++ .../apisix-source/t/node/not-exist-service.t | 103 + .../apisix-source/t/node/not-exist-upstream.t | 82 + .../apisix-source/t/node/plugin-configs.t | 410 ++ .../APISIX/apisix-source/t/node/plugin.t | 44 + .../APISIX/apisix-source/t/node/plugin1.t | 104 + .../t/node/priority-balancer/health-checker.t | 184 + .../t/node/priority-balancer/sanity.t | 322 ++ .../apisix-source/t/node/remote-addr-ipv6.t | 118 + .../APISIX/apisix-source/t/node/remote-addr.t | 154 + .../apisix-source/t/node/remote_addrs.t | 111 + .../apisix-source/t/node/route-delete.t | 142 + .../t/node/route-domain-with-local-dns.t | 86 + .../apisix-source/t/node/route-domain.t | 212 ++ .../apisix-source/t/node/route-filter-func.t | 74 + .../APISIX/apisix-source/t/node/route-host.t | 160 + .../apisix-source/t/node/route-status.t | 252 ++ .../APISIX/apisix-source/t/node/route-uris.t | 80 + .../APISIX/apisix-source/t/node/rr-balance.t | 316 ++ .../apisix-source/t/node/sanity-radixtree.t | 139 + .../apisix-source/t/node/service-empty.t | 89 + .../apisix-source/t/node/ssl-protocols.t | 298 ++ .../APISIX/apisix-source/t/node/ssl.t | 243 ++ .../apisix-source/t/node/timeout-upstream.t | 191 + .../t/node/upstream-array-nodes.t | 199 + .../t/node/upstream-discovery-dynamic.t | 133 + .../apisix-source/t/node/upstream-discovery.t | 510 +++ .../t/node/upstream-domain-with-special-dns.t | 230 ++ .../upstream-domain-with-special-ipv6-dns.t | 70 + .../apisix-source/t/node/upstream-domain.t | 415 +++ .../apisix-source/t/node/upstream-ipv6.t | 272 ++ .../t/node/upstream-keepalive-pool.t | 807 ++++ .../apisix-source/t/node/upstream-mtls.t | 684 ++++ .../apisix-source/t/node/upstream-node-dns.t | 558 +++ .../apisix-source/t/node/upstream-retries.t | 304 ++ .../t/node/upstream-status-5xx.t | 407 ++ .../t/node/upstream-status-all.t | 465 +++ .../apisix-source/t/node/upstream-websocket.t | 295 ++ .../APISIX/apisix-source/t/node/upstream.t | 630 ++++ .../APISIX/apisix-source/t/node/vars.t | 343 ++ .../apisix-source/t/node/wildcard-host.t | 102 + .../APISIX/apisix-source/t/package.json | 25 + .../ai-aws-content-moderation-secrets.t | 213 ++ .../t/plugin/ai-aws-content-moderation.t | 301 ++ .../t/plugin/ai-aws-content-moderation2.t | 92 + .../t/plugin/ai-prompt-decorator.t | 293 ++ .../apisix-source/t/plugin/ai-prompt-guard.t | 413 +++ .../t/plugin/ai-prompt-template.t | 403 ++ .../t/plugin/ai-proxy-multi.balancer.t | 360 ++ .../plugin/ai-proxy-multi.openai-compatible.t | 296 ++ .../apisix-source/t/plugin/ai-proxy-multi.t | 606 +++ .../apisix-source/t/plugin/ai-proxy-multi2.t | 347 ++ .../t/plugin/ai-proxy.openai-compatible.t | 321 ++ .../APISIX/apisix-source/t/plugin/ai-proxy.t | 673 ++++ .../APISIX/apisix-source/t/plugin/ai-proxy2.t | 315 ++ .../APISIX/apisix-source/t/plugin/ai-rag.t | 392 ++ .../apisix-source/t/plugin/ai-rate-limiting.t | 1047 ++++++ .../t/plugin/ai-request-rewrite.t | 739 ++++ .../t/plugin/ai-request-rewrite2.t | 287 ++ .../APISIX/apisix-source/t/plugin/ai.t | 908 +++++ .../APISIX/apisix-source/t/plugin/ai2.t | 428 +++ .../APISIX/apisix-source/t/plugin/ai3.t | 263 ++ .../APISIX/apisix-source/t/plugin/ai4.t | 473 +++ .../APISIX/apisix-source/t/plugin/ai5.t | 270 ++ .../apisix-source/t/plugin/api-breaker.t | 654 ++++ .../t/plugin/attach-consumer-label.t | 465 +++ .../apisix-source/t/plugin/authz-casbin.t | 446 +++ .../t/plugin/authz-casbin/model.conf | 14 + .../t/plugin/authz-casbin/policy.csv | 3 + .../apisix-source/t/plugin/authz-casdoor.t | 514 +++ .../apisix-source/t/plugin/authz-keycloak.t | 647 ++++ .../apisix-source/t/plugin/authz-keycloak2.t | 743 ++++ .../apisix-source/t/plugin/authz-keycloak3.t | 178 + .../apisix-source/t/plugin/authz-keycloak4.t | 245 ++ .../apisix-source/t/plugin/aws-lambda.t | 277 ++ .../apisix-source/t/plugin/azure-functions.t | 510 +++ .../t/plugin/basic-auth-anonymous-consumer.t | 224 ++ .../apisix-source/t/plugin/basic-auth.t | 622 ++++ .../t/plugin/batch-requests-grpc.t | 205 + .../apisix-source/t/plugin/batch-requests.t | 1021 +++++ .../apisix-source/t/plugin/batch-requests2.t | 446 +++ .../t/plugin/body-transformer-multipart.t | 269 ++ .../apisix-source/t/plugin/body-transformer.t | 1129 ++++++ .../t/plugin/body-transformer2.t | 134 + .../APISIX/apisix-source/t/plugin/brotli.t | 785 ++++ .../APISIX/apisix-source/t/plugin/cas-auth.t | 223 ++ .../t/plugin/chaitin-waf-reject.t | 212 ++ .../t/plugin/chaitin-waf-timeout.t | 139 + .../apisix-source/t/plugin/chaitin-waf.t | 407 ++ .../t/plugin/clickhouse-logger.t | 315 ++ .../t/plugin/clickhouse-logger2.t | 244 ++ .../apisix-source/t/plugin/client-control.t | 187 + .../apisix-source/t/plugin/consumer-bug-fix.t | 137 + .../t/plugin/consumer-restriction.t | 1408 +++++++ .../t/plugin/consumer-restriction2.t | 414 +++ .../APISIX/apisix-source/t/plugin/cors.t | 929 +++++ .../APISIX/apisix-source/t/plugin/cors2.t | 176 + .../APISIX/apisix-source/t/plugin/cors3.t | 422 +++ .../APISIX/apisix-source/t/plugin/cors4.t | 751 ++++ .../APISIX/apisix-source/t/plugin/csrf.t | 390 ++ .../t/plugin/custom_sort_plugins.t | 633 ++++ .../APISIX/apisix-source/t/plugin/datadog.t | 537 +++ .../APISIX/apisix-source/t/plugin/degraphql.t | 422 +++ .../t/plugin/dubbo-proxy/route.t | 321 ++ .../t/plugin/dubbo-proxy/upstream.t | 163 + .../APISIX/apisix-source/t/plugin/echo.t | 298 ++ .../t/plugin/elasticsearch-logger.t | 994 +++++ .../t/plugin/error-log-logger-clickhouse.t | 295 ++ .../t/plugin/error-log-logger-kafka.t | 203 + .../t/plugin/error-log-logger-skywalking.t | 229 ++ .../apisix-source/t/plugin/error-log-logger.t | 447 +++ .../APISIX/apisix-source/t/plugin/example.t | 341 ++ .../t/plugin/ext-plugin/conf_token.t | 141 + .../t/plugin/ext-plugin/extra-info.t | 355 ++ .../t/plugin/ext-plugin/http-req-call.t | 809 ++++ .../t/plugin/ext-plugin/request-body.t | 201 + .../t/plugin/ext-plugin/response.t | 432 +++ .../t/plugin/ext-plugin/runner.sh | 23 + .../ext-plugin/runner_can_not_terminated.sh | 23 + .../t/plugin/ext-plugin/sanity.t | 713 ++++ .../t/plugin/ext-plugin/sanity2.t | 65 + .../apisix-source/t/plugin/fault-injection.t | 1104 ++++++ .../apisix-source/t/plugin/fault-injection2.t | 186 + .../t/plugin/file-logger-reopen.t | 169 + .../apisix-source/t/plugin/file-logger.t | 340 ++ .../apisix-source/t/plugin/file-logger2.t | 516 +++ .../apisix-source/t/plugin/forward-auth.t | 405 ++ .../apisix-source/t/plugin/forward-auth2.t | 185 + .../t/plugin/google-cloud-logging.t | 832 +++++ .../config-https-domain.json | 9 + .../google-cloud-logging/config-https-ip.json | 9 + .../t/plugin/google-cloud-logging/config.json | 9 + .../t/plugin/google-cloud-logging2.t | 441 +++ .../t/plugin/grpc-transcode-reload-bugfix.t | 75 + .../apisix-source/t/plugin/grpc-transcode.t | 763 ++++ .../apisix-source/t/plugin/grpc-transcode2.t | 796 ++++ .../apisix-source/t/plugin/grpc-transcode3.t | 621 ++++ .../APISIX/apisix-source/t/plugin/grpc-web.t | 355 ++ .../t/plugin/grpc-web/a6/route.pb.go | 290 ++ .../t/plugin/grpc-web/a6/route.proto | 36 + .../grpc-web/a6/route_grpc_web_bin_pb.js | 194 + .../grpc-web/a6/route_grpc_web_text_pb.js | 194 + .../t/plugin/grpc-web/a6/route_pb.js | 356 ++ .../apisix-source/t/plugin/grpc-web/client.js | 100 + .../apisix-source/t/plugin/grpc-web/go.mod | 8 + .../apisix-source/t/plugin/grpc-web/go.sum | 1112 ++++++ .../t/plugin/grpc-web/package-lock.json | 52 + .../t/plugin/grpc-web/package.json | 8 + .../apisix-source/t/plugin/grpc-web/req.bin | Bin 0 -> 14 bytes .../apisix-source/t/plugin/grpc-web/server.go | 106 + .../apisix-source/t/plugin/grpc-web/setup.sh | 25 + .../APISIX/apisix-source/t/plugin/gzip.t | 542 +++ .../t/plugin/hmac-auth-anonymous-consumer.t | 189 + .../APISIX/apisix-source/t/plugin/hmac-auth.t | 1174 ++++++ .../apisix-source/t/plugin/hmac-auth2.t | 150 + .../apisix-source/t/plugin/hmac-auth3.t | 280 ++ .../apisix-source/t/plugin/hmac-auth4.t | 280 ++ .../apisix-source/t/plugin/http-dubbo.t | 179 + .../apisix-source/t/plugin/http-logger-json.t | 239 ++ .../t/plugin/http-logger-log-format.t | 569 +++ .../t/plugin/http-logger-new-line.t | 288 ++ .../apisix-source/t/plugin/http-logger.t | 717 ++++ .../apisix-source/t/plugin/http-logger2.t | 515 +++ .../APISIX/apisix-source/t/plugin/inspect.t | 557 +++ .../apisix-source/t/plugin/ip-restriction.t | 847 +++++ .../apisix-source/t/plugin/jwe-decrypt.t | 585 +++ .../t/plugin/jwt-auth-anonymous-consumer.t | 224 ++ .../APISIX/apisix-source/t/plugin/jwt-auth.t | 1224 ++++++ .../APISIX/apisix-source/t/plugin/jwt-auth2.t | 455 +++ .../APISIX/apisix-source/t/plugin/jwt-auth3.t | 608 +++ .../APISIX/apisix-source/t/plugin/jwt-auth4.t | 352 ++ .../t/plugin/kafka-logger-large-body.t | 946 +++++ .../t/plugin/kafka-logger-log-format.t | 163 + .../apisix-source/t/plugin/kafka-logger.t | 762 ++++ .../apisix-source/t/plugin/kafka-logger2.t | 1090 ++++++ .../apisix-source/t/plugin/kafka-logger3.t | 120 + .../apisix-source/t/plugin/kafka-proxy.t | 122 + .../t/plugin/key-auth-anonymous-consumer.t | 223 ++ .../t/plugin/key-auth-upstream-domain-node.t | 247 ++ .../APISIX/apisix-source/t/plugin/key-auth.t | 709 ++++ .../apisix-source/t/plugin/lago.spec.mts | 352 ++ .../APISIX/apisix-source/t/plugin/lago.t | 77 + .../APISIX/apisix-source/t/plugin/ldap-auth.t | 616 +++ .../t/plugin/limit-conn-redis-cluster.t | 339 ++ .../apisix-source/t/plugin/limit-conn-redis.t | 810 ++++ .../apisix-source/t/plugin/limit-conn.t | 1202 ++++++ .../apisix-source/t/plugin/limit-conn2.t | 475 +++ .../apisix-source/t/plugin/limit-conn3.t | 126 + .../t/plugin/limit-count-redis-cluster.t | 544 +++ .../t/plugin/limit-count-redis-cluster2.t | 139 + .../t/plugin/limit-count-redis-cluster3.t | 185 + .../t/plugin/limit-count-redis.t | 562 +++ .../t/plugin/limit-count-redis2.t | 372 ++ .../t/plugin/limit-count-redis3.t | 374 ++ .../t/plugin/limit-count-redis4.t | 136 + .../apisix-source/t/plugin/limit-count.t | 1192 ++++++ .../apisix-source/t/plugin/limit-count2.t | 805 ++++ .../apisix-source/t/plugin/limit-count3.t | 413 +++ .../apisix-source/t/plugin/limit-count4.t | 204 + .../apisix-source/t/plugin/limit-count5.t | 202 + .../t/plugin/limit-req-redis-cluster.t | 605 +++ .../apisix-source/t/plugin/limit-req-redis.t | 653 ++++ .../APISIX/apisix-source/t/plugin/limit-req.t | 561 +++ .../apisix-source/t/plugin/limit-req2.t | 317 ++ .../apisix-source/t/plugin/limit-req3.t | 114 + .../apisix-source/t/plugin/log-rotate.t | 218 ++ .../apisix-source/t/plugin/log-rotate2.t | 203 + .../apisix-source/t/plugin/log-rotate3.t | 207 ++ .../APISIX/apisix-source/t/plugin/loggly.t | 845 +++++ .../apisix-source/t/plugin/loki-logger.t | 425 +++ .../apisix-source/t/plugin/mcp-bridge.t | 100 + .../plugin/mcp/assets/bridge-list-tools.json | 232 ++ .../apisix-source/t/plugin/mcp/bridge.spec.ts | 84 + .../apisix-source/t/plugin/mcp/jest.config.ts | 28 + .../apisix-source/t/plugin/mcp/package.json | 15 + .../apisix-source/t/plugin/mcp/pnpm-lock.yaml | 3304 +++++++++++++++++ .../apisix-source/t/plugin/mcp/tsconfig.json | 9 + .../APISIX/apisix-source/t/plugin/mocking.t | 506 +++ .../apisix-source/t/plugin/multi-auth.t | 613 +++ .../apisix-source/t/plugin/multi-auth2.t | 368 ++ .../apisix-source/t/plugin/node-status.t | 138 + .../apisix-source/t/plugin/ocsp-stapling.t | 676 ++++ .../APISIX/apisix-source/t/plugin/opa.t | 225 ++ .../APISIX/apisix-source/t/plugin/opa2.t | 314 ++ .../apisix-source/t/plugin/openfunction.t | 331 ++ .../apisix-source/t/plugin/openid-connect.t | 1572 ++++++++ .../plugin/openid-connect/configuration.json | 75 + .../apisix-source/t/plugin/openid-connect2.t | 403 ++ .../apisix-source/t/plugin/openid-connect3.t | 111 + .../apisix-source/t/plugin/openid-connect4.t | 311 ++ .../apisix-source/t/plugin/openid-connect5.t | 233 ++ .../apisix-source/t/plugin/openid-connect6.t | 365 ++ .../apisix-source/t/plugin/openid-connect7.t | 473 +++ .../apisix-source/t/plugin/openid-connect8.t | 444 +++ .../apisix-source/t/plugin/opentelemetry.t | 436 +++ .../apisix-source/t/plugin/opentelemetry2.t | 136 + .../apisix-source/t/plugin/opentelemetry3.t | 203 + .../t/plugin/opentelemetry4-bugfix-pb-state.t | 195 + .../apisix-source/t/plugin/opentelemetry5.t | 209 ++ .../APISIX/apisix-source/t/plugin/openwhisk.t | 468 +++ .../APISIX/apisix-source/t/plugin/plugin.t | 767 ++++ .../t/plugin/prometheus-metric-expire.t | 132 + .../apisix-source/t/plugin/prometheus.t | 632 ++++ .../apisix-source/t/plugin/prometheus2.t | 923 +++++ .../apisix-source/t/plugin/prometheus3.t | 262 ++ .../apisix-source/t/plugin/prometheus4.t | 323 ++ .../apisix-source/t/plugin/proxy-cache/disk.t | 755 ++++ .../t/plugin/proxy-cache/memory.t | 706 ++++ .../apisix-source/t/plugin/proxy-control.t | 134 + .../apisix-source/t/plugin/proxy-mirror.t | 912 +++++ .../apisix-source/t/plugin/proxy-mirror2.t | 128 + .../apisix-source/t/plugin/proxy-mirror3.t | 76 + .../apisix-source/t/plugin/proxy-rewrite.t | 1233 ++++++ .../apisix-source/t/plugin/proxy-rewrite2.t | 232 ++ .../apisix-source/t/plugin/proxy-rewrite3.t | 1001 +++++ .../apisix-source/t/plugin/public-api.t | 200 + .../APISIX/apisix-source/t/plugin/real-ip.t | 472 +++ .../APISIX/apisix-source/t/plugin/redirect.t | 1078 ++++++ .../APISIX/apisix-source/t/plugin/redirect2.t | 106 + .../t/plugin/referer-restriction.t | 269 ++ .../apisix-source/t/plugin/request-id.t | 507 +++ .../apisix-source/t/plugin/request-id2.t | 188 + .../t/plugin/request-validation.t | 1785 +++++++++ .../t/plugin/request-validation2.t | 79 + .../apisix-source/t/plugin/response-rewrite.t | 735 ++++ .../t/plugin/response-rewrite2.t | 693 ++++ .../t/plugin/response-rewrite3.t | 677 ++++ .../t/plugin/rocketmq-logger-log-format.t | 160 + .../apisix-source/t/plugin/rocketmq-logger.t | 574 +++ .../apisix-source/t/plugin/rocketmq-logger2.t | 659 ++++ .../apisix-source/t/plugin/security-warning.t | 570 +++ .../t/plugin/security-warning2.t | 654 ++++ .../apisix-source/t/plugin/server-info.t | 104 + .../apisix-source/t/plugin/serverless.t | 606 +++ .../t/plugin/skywalking-logger.t | 386 ++ .../apisix-source/t/plugin/skywalking.t | 476 +++ .../apisix-source/t/plugin/sls-logger.t | 531 +++ .../t/plugin/splunk-hec-logging.t | 465 +++ .../APISIX/apisix-source/t/plugin/syslog.t | 653 ++++ .../apisix-source/t/plugin/tcp-logger.t | 608 +++ .../t/plugin/tencent-cloud-cls.t | 693 ++++ .../apisix-source/t/plugin/traffic-split.t | 790 ++++ .../apisix-source/t/plugin/traffic-split2.t | 864 +++++ .../apisix-source/t/plugin/traffic-split3.t | 784 ++++ .../apisix-source/t/plugin/traffic-split4.t | 744 ++++ .../apisix-source/t/plugin/traffic-split5.t | 636 ++++ .../apisix-source/t/plugin/ua-restriction.t | 761 ++++ .../apisix-source/t/plugin/udp-logger.t | 539 +++ .../apisix-source/t/plugin/uri-blocker.t | 470 +++ .../APISIX/apisix-source/t/plugin/wolf-rbac.t | 737 ++++ .../t/plugin/workflow-without-case.t | 85 + .../APISIX/apisix-source/t/plugin/workflow.t | 745 ++++ .../APISIX/apisix-source/t/plugin/workflow2.t | 318 ++ .../APISIX/apisix-source/t/plugin/zipkin.t | 484 +++ .../APISIX/apisix-source/t/plugin/zipkin2.t | 260 ++ .../APISIX/apisix-source/t/plugin/zipkin3.t | 131 + .../APISIX/apisix-source/t/pnpm-lock.yaml | 3016 +++++++++++++++ .../APISIX/apisix-source/t/pubsub/kafka.t | 372 ++ .../APISIX/apisix-source/t/pubsub/pubsub.t | 237 ++ .../APISIX/apisix-source/t/router/graphql.t | 374 ++ .../apisix-source/t/router/multi-ssl-certs.t | 342 ++ .../t/router/radixtree-host-uri-priority.t | 171 + .../t/router/radixtree-host-uri.t | 285 ++ .../t/router/radixtree-host-uri2.t | 388 ++ .../t/router/radixtree-host-uri3.t | 355 ++ .../apisix-source/t/router/radixtree-method.t | 113 + .../apisix-source/t/router/radixtree-sni.t | 826 +++++ .../apisix-source/t/router/radixtree-sni2.t | 822 ++++ .../apisix-source/t/router/radixtree-sni3.t | 283 ++ .../t/router/radixtree-uri-host.t | 561 +++ .../t/router/radixtree-uri-keep-end-slash.t | 87 + .../t/router/radixtree-uri-multiple.t | 210 ++ .../t/router/radixtree-uri-priority.t | 178 + .../t/router/radixtree-uri-sanity.t | 381 ++ .../t/router/radixtree-uri-vars.t | 439 +++ .../t/router/radixtree-uri-with-parameter.t | 272 ++ .../t/router/radixtree-uri-with-parameter2.t | 108 + .../APISIX/apisix-source/t/script/script.t | 99 + .../t/script/script_distribute.t | 150 + .../apisix-source/t/script/script_test.lua | 43 + .../APISIX/apisix-source/t/secret/aws.t | 316 ++ .../apisix-source/t/secret/conf/error.json | 9 + .../apisix-source/t/secret/conf/success.json | 10 + .../APISIX/apisix-source/t/secret/gcp.t | 737 ++++ .../apisix-source/t/secret/secret_lru.t | 98 + .../APISIX/apisix-source/t/secret/vault.t | 295 ++ .../apisix-source/t/sse_server_example/go.mod | 3 + .../t/sse_server_example/main.go | 58 + .../t/stream-node/healthcheck-resty-events.t | 290 ++ .../healthcheck-resty-worker-events.t | 271 ++ .../APISIX/apisix-source/t/stream-node/mtls.t | 335 ++ .../t/stream-node/priority-balancer.t | 153 + .../apisix-source/t/stream-node/random.t | 79 + .../t/stream-node/sanity-repeat.t | 134 + .../t/stream-node/sanity-with-service.t | 294 ++ .../apisix-source/t/stream-node/sanity.t | 403 ++ .../APISIX/apisix-source/t/stream-node/sni.t | 341 ++ .../APISIX/apisix-source/t/stream-node/tls.t | 135 + .../t/stream-node/upstream-domain.t | 197 + .../t/stream-node/upstream-tls.t | 142 + .../t/stream-plugin/ip-restriction.t | 159 + .../t/stream-plugin/limit-conn.t | 336 ++ .../t/stream-plugin/limit-conn2.t | 134 + .../t/stream-plugin/mqtt-proxy.t | 395 ++ .../t/stream-plugin/mqtt-proxy2.t | 184 + .../apisix-source/t/stream-plugin/plugin.t | 78 + .../t/stream-plugin/prometheus.t | 158 + .../apisix-source/t/stream-plugin/syslog.t | 416 +++ .../APISIX/apisix-source/t/tars/conf/tars.sql | 539 +++ .../t/tars/discovery/stream/tars.t | 212 ++ .../apisix-source/t/tars/discovery/tars.t | 391 ++ .../APISIX/apisix-source/t/ts/admin_api.ts | 42 + .../APISIX/apisix-source/t/ts/utils.ts | 18 + .../APISIX/apisix-source/t/tsconfig.esm.json | 9 + .../APISIX/apisix-source/t/tsconfig.json | 11 + .../apisix-source/t/utils/batch-processor.t | 483 +++ .../APISIX/apisix-source/t/utils/rfc5424.t | 83 + .../apisix-source/t/wasm/fault-injection.t | 280 ++ .../t/wasm/fault-injection/main.go | 108 + .../apisix-source/t/wasm/forward-auth.go | 219 ++ .../apisix-source/t/wasm/forward-auth.t | 253 ++ .../APISIX/apisix-source/t/wasm/global-rule.t | 175 + .../APISIX/apisix-source/t/wasm/go.mod | 10 + .../APISIX/apisix-source/t/wasm/go.sum | 16 + .../APISIX/apisix-source/t/wasm/log/main.go | 81 + .../apisix-source/t/wasm/request-body.t | 251 ++ .../apisix-source/t/wasm/request-body/main.go | 99 + .../apisix-source/t/wasm/response-rewrite.t | 188 + .../t/wasm/response-rewrite/main.go | 114 + .../APISIX/apisix-source/t/wasm/route.t | 464 +++ .../apisix-source/t/xds-library/config_xds.t | 129 + .../t/xds-library/config_xds_2.t | 239 ++ .../apisix-source/t/xds-library/export.go | 26 + .../apisix-source/t/xds-library/main.go | 137 + .../APISIX/apisix-source/t/xds-library/xds.h | 55 + .../stream/xrpc/protocols/pingpong/init.lua | 287 ++ .../stream/xrpc/protocols/pingpong/schema.lua | 52 + .../APISIX/apisix-source/t/xrpc/dubbo.t | 168 + .../APISIX/apisix-source/t/xrpc/pingpong.t | 781 ++++ .../APISIX/apisix-source/t/xrpc/pingpong2.t | 753 ++++ .../APISIX/apisix-source/t/xrpc/pingpong3.t | 193 + .../APISIX/apisix-source/t/xrpc/prometheus.t | 273 ++ .../APISIX/apisix-source/t/xrpc/redis.t | 783 ++++ .../APISIX/apisix-source/t/xrpc/redis2.t | 202 + .../apisix-source/utils/check-category.py | 72 + .../utils/check-lua-code-style.sh | 32 + .../utils/check-merge-conflict.sh | 24 + .../apisix-source/utils/check-plugins-code.sh | 73 + .../utils/check-test-code-style.sh | 38 + .../apisix-source/utils/check-version.sh | 82 + .../apisix-source/utils/fix-zh-doc-segment.py | 62 + .../apisix-source/utils/gen-vote-contents.sh | 93 + .../utils/install-dependencies.sh | 174 + .../utils/linux-install-luarocks.sh | 68 + CloudronPackages/APISIX/start.sh | 28 + 1608 files changed, 388342 insertions(+) create mode 100644 CloudronPackages/APISIX/CloudronManifest.json create mode 100644 CloudronPackages/APISIX/Dockerfile create mode 100644 CloudronPackages/APISIX/apisix-source/.devcontainer/Dockerfile create mode 100644 CloudronPackages/APISIX/apisix-source/.devcontainer/devcontainer.json create mode 100644 CloudronPackages/APISIX/apisix-source/.devcontainer/docker-compose.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/bug_report.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/config.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/feature_request.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/improve_docs.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/request_help.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/PULL_REQUEST_TEMPLATE.md create mode 100644 CloudronPackages/APISIX/apisix-source/.github/dependabot.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/build.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/check-changelog.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/cli.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/close-unresponded.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/code-lint.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/doc-lint.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/gm-cron.yaml.disabled create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/gm.yml.disabled create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/kubernetes-ci.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/license-checker.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/link-check.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/lint.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/push-dev-image-on-commit.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/redhat-ci.yaml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/semantic.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/source-install.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/stale.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/tars-ci.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.github/workflows/update-labels.yml create mode 100644 CloudronPackages/APISIX/apisix-source/.gitmodules create mode 100644 CloudronPackages/APISIX/apisix-source/.ignore_words create mode 100644 CloudronPackages/APISIX/apisix-source/.licenserc.yaml create mode 100644 CloudronPackages/APISIX/apisix-source/.markdownlint.yml create mode 100644 CloudronPackages/APISIX/apisix-source/CHANGELOG.md create mode 100644 CloudronPackages/APISIX/apisix-source/CODE_OF_CONDUCT.md create mode 100644 CloudronPackages/APISIX/apisix-source/CODE_STYLE.md create mode 100644 CloudronPackages/APISIX/apisix-source/CONTRIBUTING.md create mode 100644 CloudronPackages/APISIX/apisix-source/LICENSE create mode 100644 CloudronPackages/APISIX/apisix-source/MAINTAIN.md create mode 100644 CloudronPackages/APISIX/apisix-source/Makefile create mode 100644 CloudronPackages/APISIX/apisix-source/NOTICE create mode 100644 CloudronPackages/APISIX/apisix-source/README.md create mode 100644 CloudronPackages/APISIX/apisix-source/THREAT_MODEL.md create mode 100644 CloudronPackages/APISIX/apisix-source/Vision-and-Milestones.md create mode 100644 CloudronPackages/APISIX/apisix-source/apisix-master-0.rockspec create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/consumer_group.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/consumers.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/credentials.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/global_rules.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/plugin_config.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/plugin_metadata.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/plugins.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/proto.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/resource.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/routes.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/secrets.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/services.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/ssl.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/standalone.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/stream_routes.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/upstreams.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/utils.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/admin/v3_adapter.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/api_router.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/balancer.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/balancer/chash.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/balancer/ewma.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/balancer/least_conn.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/balancer/priority.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/balancer/roundrobin.lua create mode 100755 CloudronPackages/APISIX/apisix-source/apisix/cli/apisix.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/cli/config.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/cli/env.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/cli/etcd.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/cli/file.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/cli/ip.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/cli/ngx_tpl.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/cli/ops.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/cli/schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/cli/util.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/constants.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/consumer.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/consumer_group.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/control/router.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/control/v1.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/config_etcd.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/config_local.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/config_util.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/config_xds.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/config_yaml.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/ctx.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/dns/client.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/env.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/etcd.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/event.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/id.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/io.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/ip.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/json.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/log.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/lrucache.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/math.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/os.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/profile.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/pubsub.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/request.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/resolver.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/response.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/string.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/table.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/timer.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/utils.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/core/version.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/debug.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/consul/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/consul/schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/consul_kv/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/consul_kv/schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/dns/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/dns/schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/eureka/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/eureka/schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/kubernetes/informer_factory.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/kubernetes/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/kubernetes/schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/nacos/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/nacos/schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/tars/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/discovery/tars/schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/events.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/global_rules.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/http/route.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/http/router/radixtree_host_uri.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/http/router/radixtree_uri.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/http/router/radixtree_uri_with_parameter.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/http/service.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/include/apisix/model/pubsub.proto create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/inspect/dbg.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/inspect/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/patch.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugin.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugin_config.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-aws-content-moderation.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/aimlapi.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/deepseek.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/openai-base.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/openai-compatible.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/openai.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-prompt-decorator.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-prompt-guard.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-prompt-template.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy-multi.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy/base.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy/schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rag.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rag/embeddings/azure_openai.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rag/vector-search/azure_ai_search.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rate-limiting.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-request-rewrite.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ai.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/api-breaker.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/attach-consumer-label.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/authz-casbin.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/authz-casdoor.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/authz-keycloak.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/aws-lambda.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/azure-functions.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/basic-auth.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/batch-requests.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/body-transformer.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/brotli.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/cas-auth.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/chaitin-waf.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/clickhouse-logger.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/client-control.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/consumer-restriction.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/cors.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/csrf.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/datadog.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/degraphql.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/dubbo-proxy.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/echo.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/elasticsearch-logger.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/error-log-logger.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/example-plugin.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin-post-req.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin-post-resp.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin-pre-req.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin/helper.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/fault-injection.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/file-logger.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/forward-auth.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/gm.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/google-cloud-logging.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/proto.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/request.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/response.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/util.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-web.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/gzip.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/hmac-auth.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/http-dubbo.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/http-logger.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/inspect.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ip-restriction.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ip-restriction/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/jwe-decrypt.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/jwt-auth.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/kafka-logger.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/kafka-proxy.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/key-auth.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/lago.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ldap-auth.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/limit-conn-redis-cluster.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/limit-conn-redis.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/util.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/limit-count-local.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/limit-count-redis-cluster.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/limit-count-redis.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req/limit-req-redis-cluster.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req/limit-req-redis.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req/util.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/log-rotate.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/loggly.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/loki-logger.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp-bridge.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/broker/shared_dict.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/broker/utils.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/server.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/server_wrapper.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/transport/sse.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/mocking.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/multi-auth.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/node-status.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ocsp-stapling.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/opa.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/opa/helper.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/openfunction.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/openid-connect.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/opentelemetry.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/openwhisk.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/prometheus.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/prometheus/exporter.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/disk_handler.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/memory.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/memory_handler.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/util.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-control.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-mirror.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-rewrite.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/public-api.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/real-ip.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/redirect.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/referer-restriction.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/request-id.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/request-validation.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/response-rewrite.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/rocketmq-logger.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/server-info.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless-post-function.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless-pre-function.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless/generic-upstream.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/skywalking-logger.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/skywalking.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/sls-logger.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/splunk-hec-logging.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/syslog.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/syslog/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/tcp-logger.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/tencent-cloud-cls.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/tencent-cloud-cls/cls-sdk.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/traffic-split.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/ua-restriction.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/udp-logger.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/uri-blocker.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/wolf-rbac.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/workflow.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin/codec.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin/random_sampler.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin/reporter.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/pubsub/kafka.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/router.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/schema_def.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/script.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/secret.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/secret/aws.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/secret/gcp.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/secret/vault.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/ssl.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/ssl/router/radixtree_sni.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/ip-restriction.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/limit-conn.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/mqtt-proxy.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/prometheus.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/syslog.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/router/ip_port.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/metrics.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/dubbo/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/dubbo/schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/commands.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/metrics.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/runner.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/sdk.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/timers.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/upstream.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/utils/auth.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/utils/batch-processor-manager.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/utils/batch-processor.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/utils/content-decode.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/utils/google-cloud-oauth.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/utils/log-util.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/utils/redis-schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/utils/redis.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/utils/rediscluster.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/utils/rfc5424.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/utils/router.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/utils/upstream.lua create mode 100644 CloudronPackages/APISIX/apisix-source/apisix/wasm.lua create mode 100644 CloudronPackages/APISIX/apisix-source/autodocs/config.ld create mode 100755 CloudronPackages/APISIX/apisix-source/autodocs/generate.sh create mode 100644 CloudronPackages/APISIX/apisix-source/autodocs/ldoc.ltp create mode 100644 CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/cert/apisix.crt create mode 100644 CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/cert/apisix.key create mode 100644 CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/cert/openssl.conf create mode 100644 CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/nginx.conf create mode 100644 CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/lua/apisix.lua create mode 100755 CloudronPackages/APISIX/apisix-source/benchmark/run.sh create mode 100644 CloudronPackages/APISIX/apisix-source/benchmark/server/conf/nginx.conf create mode 100755 CloudronPackages/APISIX/apisix-source/bin/apisix create mode 100644 CloudronPackages/APISIX/apisix-source/ci/backup-docker-images.sh create mode 100755 CloudronPackages/APISIX/apisix-source/ci/check_changelog_prs.ts create mode 100644 CloudronPackages/APISIX/apisix-source/ci/common.sh create mode 100644 CloudronPackages/APISIX/apisix-source/ci/free_disk_space.sh create mode 100755 CloudronPackages/APISIX/apisix-source/ci/init-common-test-service.sh create mode 100755 CloudronPackages/APISIX/apisix-source/ci/init-last-test-service.sh create mode 100755 CloudronPackages/APISIX/apisix-source/ci/init-plugin-test-service.sh create mode 100755 CloudronPackages/APISIX/apisix-source/ci/kubernetes-ci.sh create mode 100755 CloudronPackages/APISIX/apisix-source/ci/linux-install-etcd-client.sh create mode 100755 CloudronPackages/APISIX/apisix-source/ci/linux-install-openresty.sh create mode 100755 CloudronPackages/APISIX/apisix-source/ci/linux_apisix_current_luarocks_in_customed_nginx_runner.sh create mode 100755 CloudronPackages/APISIX/apisix-source/ci/linux_apisix_current_luarocks_runner.sh create mode 100755 CloudronPackages/APISIX/apisix-source/ci/linux_openresty_common_runner.sh create mode 100755 CloudronPackages/APISIX/apisix-source/ci/linux_openresty_runner.sh create mode 100755 CloudronPackages/APISIX/apisix-source/ci/linux_openresty_tongsuo_runner.sh create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.common.yml create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.first.yml create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.last.yml create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.plugin.yml create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/etcd/env/common.env create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/eureka/env/common.env create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/env/common.env create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/env/common2.env create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/env/last.env create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/kafka_jaas.conf create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/kafka/zookeeper-server/env/common.env create mode 100755 CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/kcadm_configure_basic.sh create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/kcadm_configure_cas.sh create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/kcadm_configure_university.sh create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/server.crt.pem create mode 100755 CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/server.key.pem create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/nacos/env/common.env create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/nacos/env/service.env create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/nacos/healthcheck/Dockerfile create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/nacos/healthcheck/nacos-server-healthcheck.sh create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/nacos/healthcheck/nacos-service-healthcheck.sh create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/nacos/service/Dockerfile create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/opa/data.json create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/opa/echo.rego create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/opa/example.rego create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/opa/with_route.rego create mode 100755 CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/build-function-image.sh create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-body/go.mod create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-body/go.sum create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-body/hello.go create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-header/go.mod create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-header/hello.go create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-uri/go.mod create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-uri/go.sum create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-uri/hello.go create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/otelcol-contrib/config.yaml create mode 100644 CloudronPackages/APISIX/apisix-source/ci/pod/vector/vector.toml create mode 100755 CloudronPackages/APISIX/apisix-source/ci/redhat-ci.sh create mode 100755 CloudronPackages/APISIX/apisix-source/ci/tars-ci.sh create mode 100644 CloudronPackages/APISIX/apisix-source/conf/cert/ssl_PLACE_HOLDER.crt create mode 100644 CloudronPackages/APISIX/apisix-source/conf/cert/ssl_PLACE_HOLDER.key create mode 100644 CloudronPackages/APISIX/apisix-source/conf/config.yaml create mode 100644 CloudronPackages/APISIX/apisix-source/conf/config.yaml.example create mode 100644 CloudronPackages/APISIX/apisix-source/conf/debug.yaml create mode 100644 CloudronPackages/APISIX/apisix-source/conf/mime.types create mode 100644 CloudronPackages/APISIX/apisix-source/docker/compose/apisix_conf/master/config.yaml create mode 100644 CloudronPackages/APISIX/apisix-source/docker/compose/docker-compose-master.yaml create mode 100644 CloudronPackages/APISIX/apisix-source/docker/debian-dev/Dockerfile create mode 100644 CloudronPackages/APISIX/apisix-source/docker/debian-dev/docker-entrypoint.sh create mode 100644 CloudronPackages/APISIX/apisix-source/docker/debian-dev/install-brotli.sh create mode 100644 CloudronPackages/APISIX/apisix-source/docker/utils/check_standalone_config.sh create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/MA.jpeg create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/OA.jpg create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/apache.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/apisix-multi-lang-support.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/apisix-seed.svg create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/apisix.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/aws-caddy-php-welcome-page.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/aws-define-route.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/aws-define-service.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/aws-fargate-cdk.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/aws-nlb-ip-addr.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/benchmark-1.jpg create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/benchmark-2.jpg create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/build-devcontainers-vscode-command.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/build-devcontainers-vscode-progressbar.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/consumer-internal.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/consumer-who.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/contributor-over-time.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/control-plane-service-discovery.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/create-a-route.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/dashboard.jpeg create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/deployment-cp_and_dp.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/deployment-traditional.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/discovery-cn.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/discovery.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/external-plugin.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/flamegraph-1.jpg create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/flamegraph-2.jpg create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/flow-load-plugin.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/flow-plugin-internal.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/flow-software-architecture.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/health_check_node_state_diagram.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/health_check_status_page.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/latency-1.jpg create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/latency-2.jpg create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/list-of-routes.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/authz-keycloak.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/basic-auth-1.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/basic-auth-2.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/grafana-1.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/grafana-2.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/grafana-3.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/grafana-4.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/hmac-auth-1.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/hmac-auth-2.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/inspect.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/jaeger-1.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/jaeger-2.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/jwt-auth-1.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/jwt-auth-2.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/jwt-auth-3.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/key-auth-1.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/key-auth-2.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/limit-conn-1.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/limit-count-1.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/limit-req-1.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/loggly-dashboard.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/oauth-1.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/prometheus-1.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/prometheus-2.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/prometheus01.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/prometheus02.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/skywalking-1.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/skywalking-3.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/skywalking-4.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/skywalking-5.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/sls-logger-1.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/splunk-hec-admin-cn.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/splunk-hec-admin-en.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/wolf-rbac-1.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/wolf-rbac-2.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/zipkin-1.jpg create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/zipkin-1.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/plugin/zipkin-2.jpg create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/pubsub-architecture.svg create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/requesturl.jpg create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/routes-example.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/secret.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/service-example.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/skip-mtls.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/update-docker-desktop-file-sharing.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/images/upstream-example.png create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/other/apisix-plugin-design.graffle create mode 100644 CloudronPackages/APISIX/apisix-source/docs/assets/other/json/apisix-grafana-dashboard.json create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/FAQ.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/admin-api.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/apisix-variable.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/architecture-design/apisix.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/aws.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/batch-processor.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/benchmark.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/build-apisix-dev-environment-devcontainers.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/build-apisix-dev-environment-on-mac.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/building-apisix.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/certificate.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/config.json create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/control-api.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/customize-nginx-configuration.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/debug-function.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/debug-mode.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/deployment-modes.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/discovery.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/discovery/consul.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/discovery/consul_kv.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/discovery/control-plane-service-discovery.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/discovery/dns.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/discovery/eureka.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/discovery/kubernetes.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/discovery/nacos.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/examples/plugins-hmac-auth-generate-signature.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/external-plugin.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/getting-started/README.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/getting-started/configure-routes.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/getting-started/key-authentication.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/getting-started/load-balancing.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/getting-started/rate-limiting.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/grpc-proxy.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/http3.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/install-dependencies.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/installation-guide.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/internal/plugin-runner.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/internal/testing-framework.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/mtls.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugin-develop.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ai-aws-content-moderation.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ai-prompt-decorator.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ai-prompt-guard.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ai-prompt-template.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ai-proxy-multi.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ai-proxy.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ai-rag.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ai-rate-limiting.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ai-request-rewrite.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/api-breaker.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/attach-consumer-label.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/authz-casbin.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/authz-casdoor.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/authz-keycloak.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/aws-lambda.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/azure-functions.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/basic-auth.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/batch-requests.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/body-transformer.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/brotli.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/cas-auth.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/chaitin-waf.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/clickhouse-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/client-control.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/consumer-restriction.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/cors.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/csrf.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/datadog.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/degraphql.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/dubbo-proxy.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/echo.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/elasticsearch-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/error-log-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ext-plugin-post-req.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ext-plugin-post-resp.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ext-plugin-pre-req.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/fault-injection.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/file-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/forward-auth.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/gm.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/google-cloud-logging.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/grpc-transcode.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/grpc-web.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/gzip.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/hmac-auth.md create mode 100755 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/http-dubbo.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/http-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/inspect.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ip-restriction.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/jwe-decrypt.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/jwt-auth.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/kafka-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/kafka-proxy.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/key-auth.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/lago.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ldap-auth.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/limit-conn.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/limit-count.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/limit-req.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/log-rotate.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/loggly.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/loki-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/mocking.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/mqtt-proxy.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/multi-auth.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/node-status.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ocsp-stapling.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/opa.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/openfunction.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/openid-connect.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/opentelemetry.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/openwhisk.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/prometheus.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/proxy-cache.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/proxy-control.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/proxy-mirror.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/proxy-rewrite.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/public-api.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/real-ip.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/redirect.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/referer-restriction.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/request-id.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/request-validation.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/response-rewrite.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/rocketmq-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/server-info.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/serverless.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/skywalking-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/skywalking.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/sls-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/splunk-hec-logging.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/syslog.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/tcp-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/tencent-cloud-cls.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/traffic-split.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/ua-restriction.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/udp-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/uri-blocker.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/wolf-rbac.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/workflow.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/plugins/zipkin.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/profile.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/pubsub.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/pubsub/kafka.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/router-radixtree.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/ssl-protocol.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/status-api.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/stream-proxy.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/support-fips-in-apisix.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/terminology/api-gateway.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/terminology/consumer-group.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/terminology/consumer.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/terminology/credential.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/terminology/global-rule.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/terminology/plugin-config.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/terminology/plugin-metadata.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/terminology/plugin.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/terminology/route.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/terminology/router.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/terminology/script.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/terminology/secret.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/terminology/service.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/terminology/upstream.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/tutorials/add-multiple-api-versions.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/tutorials/cache-api-responses.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/tutorials/client-to-apisix-mtls.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/tutorials/expose-api.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/tutorials/health-check.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/tutorials/keycloak-oidc.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/tutorials/manage-api-consumers.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/tutorials/monitor-api-health-check.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/tutorials/observe-your-api.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/tutorials/protect-api.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/tutorials/websocket-authentication.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/upgrade-guide-from-2.15.x-to-3.0.0.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/wasm.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/xrpc.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/en/latest/xrpc/redis.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/CHANGELOG.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/CODE_STYLE.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/FAQ.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/README.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/admin-api.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/apisix-variable.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/architecture-design/apisix.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/batch-processor.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/benchmark.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/build-apisix-dev-environment-on-mac.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/building-apisix.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/certificate.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/config.json create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/control-api.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/customize-nginx-configuration.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/debug-function.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/debug-mode.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/discovery.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/discovery/control-plane-service-discovery.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/discovery/dns.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/discovery/eureka.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/discovery/kubernetes.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/discovery/nacos.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/external-plugin.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/getting-started/README.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/getting-started/configure-routes.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/getting-started/key-authentication.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/getting-started/load-balancing.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/getting-started/rate-limiting.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/grpc-proxy.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/http3.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/install-dependencies.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/installation-guide.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/mtls.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugin-develop.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/api-breaker.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/attach-consumer-label.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/authz-casbin.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/authz-casdoor.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/authz-keycloak.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/aws-lambda.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/azure-functions.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/basic-auth.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/batch-requests.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/body-transformer.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/brotli.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/chaitin-waf.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/clickhouse-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/client-control.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/consumer-restriction.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/cors.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/csrf.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/datadog.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/dubbo-proxy.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/echo.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/elasticsearch-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/error-log-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/ext-plugin-post-req.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/ext-plugin-post-resp.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/ext-plugin-pre-req.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/fault-injection.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/file-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/forward-auth.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/gm.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/google-cloud-logging.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/grpc-transcode.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/grpc-web.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/gzip.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/hmac-auth.md create mode 100755 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/http-dubbo.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/http-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/ip-restriction.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/jwe-decrypt.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/jwt-auth.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/kafka-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/key-auth.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/ldap-auth.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/limit-conn.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/limit-count.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/limit-req.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/log-rotate.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/loggly.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/loki-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/mocking.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/mqtt-proxy.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/multi-auth.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/node-status.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/ocsp-stapling.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/opa.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/openfunction.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/openid-connect.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/opentelemetry.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/openwhisk.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/prometheus.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/proxy-cache.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/proxy-control.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/proxy-mirror.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/proxy-rewrite.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/public-api.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/real-ip.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/redirect.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/referer-restriction.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/request-id.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/request-validation.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/response-rewrite.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/rocketmq-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/server-info.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/serverless.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/skywalking-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/skywalking.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/sls-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/splunk-hec-logging.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/syslog.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/tcp-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/tencent-cloud-cls.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/traffic-split.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/ua-restriction.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/udp-logger.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/uri-blocker.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/wolf-rbac.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/workflow.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/plugins/zipkin.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/profile.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/router-radixtree.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/ssl-protocol.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/status-api.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/stream-proxy.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/support-fips-in-apisix.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/terminology/api-gateway.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/terminology/consumer-group.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/terminology/consumer.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/terminology/credential.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/terminology/global-rule.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/terminology/plugin-config.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/terminology/plugin-metadata.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/terminology/plugin.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/terminology/route.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/terminology/router.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/terminology/script.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/terminology/secret.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/terminology/service.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/terminology/upstream.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/tutorials/client-to-apisix-mtls.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/tutorials/expose-api.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/tutorials/health-check.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/tutorials/keycloak-oidc.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/tutorials/observe-your-api.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/tutorials/protect-api.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/upgrade-guide-from-2.15.x-to-3.0.0.md create mode 100644 CloudronPackages/APISIX/apisix-source/docs/zh/latest/wasm.md create mode 100644 CloudronPackages/APISIX/apisix-source/example/apisix/plugins/3rd-party.lua create mode 100644 CloudronPackages/APISIX/apisix-source/example/apisix/stream/plugins/3rd-party.lua create mode 100644 CloudronPackages/APISIX/apisix-source/example/build-dev-image.dockerfile create mode 100644 CloudronPackages/APISIX/apisix-source/example/my_hook.lua create mode 100644 CloudronPackages/APISIX/apisix-source/logos/apache-apisix.png create mode 100644 CloudronPackages/APISIX/apisix-source/logos/apisix-white-bg.jpg create mode 100644 CloudronPackages/APISIX/apisix-source/logos/cncf-landscape-white-bg.jpg create mode 100644 CloudronPackages/APISIX/apisix-source/logos/cncf-white-bg.jpg create mode 100644 CloudronPackages/APISIX/apisix-source/powered-by.md create mode 100644 CloudronPackages/APISIX/apisix-source/t/APISIX.pm create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/api.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/balancer.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/consumer-group-force-delete.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/consumer-group.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/consumers.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/consumers2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/credentials.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/filter.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/global-rules.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/global-rules2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/health-check.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/metadata.spec.ts create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/metadata.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/plugin-configs-force-delete.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/plugin-configs.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/plugin-metadata.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/plugin-metadata2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/plugins-reload.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/plugins.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/protos-force-delete.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/protos.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/resources.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/response_body_format.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/routes-array-nodes.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/routes.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/routes2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/routes3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/routes4.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/routes_request_body.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/schema-validate.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/schema.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/secrets.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/services-array-nodes.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/services-force-delete.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/services-string-id.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/services.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/services2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/ssl.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/ssl2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/ssl3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/ssl4.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/ssl5.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/ssls.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/standalone-healthcheck.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/standalone.spec.ts create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/standalone.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/stream-routes-disable.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/stream-routes.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/token.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/upstream-array-nodes.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/upstream-force-delete.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/upstream.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/upstream2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/upstream3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/upstream4.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/admin/upstream5.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/apisix.luacov create mode 100644 CloudronPackages/APISIX/apisix-source/t/assets/ai-proxy-response.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/assets/content-moderation-responses.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/assets/embeddings.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/apisix.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/apisix.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/apisix_admin_ssl.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/apisix_admin_ssl.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/apisix_ecc.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/apisix_ecc.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/client_enc.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/client_enc.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/client_sign.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/client_sign.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/etcd.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/etcd.pem create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/gm_ca.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/incorrect.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/incorrect.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/localhost_slapd_cert.pem create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/localhost_slapd_key.pem create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/mtls_ca.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/mtls_ca.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/mtls_client.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/mtls_client.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/mtls_server.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/mtls_server.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/ocsp/ecc_good.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/ocsp/ecc_good.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/ocsp/index.txt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/ocsp/rsa_good.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/ocsp/rsa_good.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/ocsp/rsa_revoked.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/ocsp/rsa_revoked.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/ocsp/rsa_unknown.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/ocsp/rsa_unknown.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/ocsp/signer.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/ocsp/signer.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/openssl-test2.conf create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/openssl.conf create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/private.pem create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/public.pem create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/server_1024.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/server_1024.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/server_enc.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/server_enc.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/server_sign.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/server_sign.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/test-dot.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/test-dot.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/test2.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/test2.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/vector_logs_ca.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/vector_logs_ca.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/vector_logs_server.crt create mode 100644 CloudronPackages/APISIX/apisix-source/t/certs/vector_logs_server.key create mode 100644 CloudronPackages/APISIX/apisix-source/t/chaos/delayetcd/delayetcd.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/chaos/e2e.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/chaos/e2e_test.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/chaos/go.mod create mode 100644 CloudronPackages/APISIX/apisix-source/t/chaos/go.sum create mode 100644 CloudronPackages/APISIX/apisix-source/t/chaos/killetcd/killetcd.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/chaos/kubernetes/deployment.yaml create mode 100644 CloudronPackages/APISIX/apisix-source/t/chaos/kubernetes/service.yaml create mode 100644 CloudronPackages/APISIX/apisix-source/t/chaos/utils/Dockerfile create mode 100644 CloudronPackages/APISIX/apisix-source/t/chaos/utils/kube_utils.go create mode 100755 CloudronPackages/APISIX/apisix-source/t/chaos/utils/setup_chaos_utils.sh create mode 100644 CloudronPackages/APISIX/apisix-source/t/chaos/utils/utils.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/cli/cli.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/cli/cli_envsubst_confusion.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/cli/common.sh create mode 100644 CloudronPackages/APISIX/apisix-source/t/cli/docker-compose-etcd-cluster.yaml create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_access_log.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_admin.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_admin_mtls.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_admin_ui.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_apisix_mirror.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_ci_only.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_cmd.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_control.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_core_config.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_deployment_control_plane.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_deployment_data_plane.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_deployment_data_plane_with_readonly_etcd.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_deployment_traditional.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_dns.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_dubbo.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_etcd.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_etcd_healthcheck.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_etcd_mtls.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_etcd_sync_event_handle.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_etcd_tls.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_http_config.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_kubernetes.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_main.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_makefile.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_opentelemetry_set_ngx_var.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_prometheus.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_prometheus_reload.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_prometheus_run_in_privileged.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_prometheus_stream.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_proxy_mirror_timeout.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_route_match_with_graphql.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_serverless.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_snippet.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_standalone.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_status_api.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_stream_config.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_tls_over_tcp.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_upstream_mtls.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_validate_config.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_wasm.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/cli/test_zipkin_set_ngx_var.sh create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-json/consumer-group.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-json/consumer.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-json/global-rule.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-json/plugin-configs.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-json/plugin-metadata.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-json/plugin.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-json/route-service.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-json/route-upstream.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-json/route.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-json/secret.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-json/ssl.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-json/stream-route.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-yaml/consumer-group.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-yaml/consumer.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-yaml/global-rule.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-yaml/plugin-configs.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-yaml/plugin-metadata.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-yaml/plugin.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-yaml/route-service.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-yaml/route-upstream.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-yaml/route.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-yaml/secret.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-yaml/ssl.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/config-center-yaml/stream-route.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/control/control-healthcheck-bug-fix.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/control/discovery.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/control/gc.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/control/healthcheck.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/control/plugin-api.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/control/plugin-metadata.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/control/plugins-reload.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/control/routes.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/control/schema.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/control/services.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/control/upstreams.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/config-default.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/config.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/config_etcd.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/config_util.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/ctx.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/ctx2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/ctx3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/ctx_with_params.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/env.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/etcd-auth-fail.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/etcd-auth.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/etcd-mtls.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/etcd-sync.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/etcd-write.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/etcd.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/json.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/log.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/lrucache.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/os.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/profile.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/random.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/request.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/resolver.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/response.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/schema.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/schema_def.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/string.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/table.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/timer.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/uid.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/core/utils.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/coredns/Corefile create mode 100644 CloudronPackages/APISIX/apisix-source/t/coredns/db.test.local create mode 100644 CloudronPackages/APISIX/apisix-source/t/debug/debug-mode.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/debug/dynamic-hook.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/debug/hook.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/consul.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/consul2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/consul_dump.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/consul_kv.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/consul_kv_dump.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/dns/mix.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/dns/sanity.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/eureka.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/nacos.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/nacos2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/nacos3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/reset-healthchecker.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/stream/consul.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/stream/consul_kv.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/stream/dns.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/stream/eureka.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/discovery/stream/nacos.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/error_page/error_page.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/fake-plugin-exit.lua create mode 100755 CloudronPackages/APISIX/apisix-source/t/fuzzing/client_abort.py create mode 100755 CloudronPackages/APISIX/apisix-source/t/fuzzing/http_upstream.py create mode 100644 CloudronPackages/APISIX/apisix-source/t/fuzzing/public.py create mode 100644 CloudronPackages/APISIX/apisix-source/t/fuzzing/requirements.txt create mode 100644 CloudronPackages/APISIX/apisix-source/t/fuzzing/serverless_route_test.py create mode 100755 CloudronPackages/APISIX/apisix-source/t/fuzzing/simple_http.py create mode 100755 CloudronPackages/APISIX/apisix-source/t/fuzzing/simpleroute_test.py create mode 100644 CloudronPackages/APISIX/apisix-source/t/fuzzing/upstream/nginx.conf create mode 100644 CloudronPackages/APISIX/apisix-source/t/fuzzing/vars_route_test.py create mode 100644 CloudronPackages/APISIX/apisix-source/t/gm/gm.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/echo.pb create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/go.mod create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/go.sum create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/main.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/proto.pb create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/proto/echo.pb.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/proto/echo.proto create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/proto/echo_grpc.pb.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/proto/helloworld.pb.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/proto/helloworld.proto create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/proto/helloworld_grpc.pb.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/proto/import.pb.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/proto/import.proto create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/proto/src.pb.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/proto/src.proto create mode 100644 CloudronPackages/APISIX/apisix-source/t/grpc_server_example/proto/src_grpc.pb.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/http3/admin/basic.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/jest.config.ts create mode 100644 CloudronPackages/APISIX/apisix-source/t/kubernetes/configs/account.yaml create mode 100644 CloudronPackages/APISIX/apisix-source/t/kubernetes/configs/endpoint.yaml create mode 100644 CloudronPackages/APISIX/apisix-source/t/kubernetes/configs/kind.yaml create mode 100644 CloudronPackages/APISIX/apisix-source/t/kubernetes/discovery/kubernetes.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/kubernetes/discovery/kubernetes2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/kubernetes/discovery/kubernetes3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/kubernetes/discovery/stream/kubernetes.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/apisix/plugins/jwt-auth.lua create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/apisix/plugins/prometheus/exporter.lua create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/chaitin_waf_server.lua create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-backend/dubbo-backend-interface/pom.xml create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-backend/dubbo-backend-interface/src/main/java/org/apache/dubbo/backend/DemoService.java create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-backend/dubbo-backend-provider/pom.xml create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-backend/dubbo-backend-provider/src/main/java/org/apache/dubbo/backend/provider/DemoServiceImpl.java create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-backend/dubbo-backend-provider/src/main/java/org/apache/dubbo/backend/provider/Provider.java create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-backend/dubbo-backend-provider/src/main/resources/META-INF/spring/dubbo-demo-provider.xml create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-backend/dubbo-backend-provider/src/main/resources/dubbo.properties create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-backend/dubbo-backend-provider/src/main/resources/log4j.properties create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-backend/pom.xml create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-interface/pom.xml create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-interface/src/main/java/org/apache/dubbo/backend/DubboSerializationTestService.java create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-interface/src/main/java/org/apache/dubbo/backend/PoJo.java create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/pom.xml create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/java/org/apache/dubbo/backend/provider/DubboSerializationTestServiceImpl.java create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/java/org/apache/dubbo/backend/provider/Provider.java create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/resources/META-INF/spring/dubbo-demo-provider.xml create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/resources/dubbo.properties create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-serialization-backend/dubbo-serialization-backend-provider/src/main/resources/log4j.properties create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/dubbo-serialization-backend/pom.xml create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/etcd.proto create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/ext-plugin.lua create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/grafana_loki.lua create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/keycloak.lua create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/keycloak_cas.lua create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/mock_layer4.lua create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/pubsub.lua create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/server.lua create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/test_admin.lua create mode 100644 CloudronPackages/APISIX/apisix-source/t/lib/test_inspect.lua create mode 100644 CloudronPackages/APISIX/apisix-source/t/misc/patch.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/misc/pre-function.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/misc/timers.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/chash-balance.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/chash-hashon.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/client-mtls-openresty.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/client-mtls.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/consumer-group.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/consumer-plugin.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/consumer-plugin2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/consumer-plugin3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/credential-plugin-basic-auth.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/credential-plugin-incremental-effective.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/credential-plugin-jwt-auth.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/credential-plugin-key-auth.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/credential-plugin-multi-credentials.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/credential-plugin-set-request-header.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/credential-plugin-work-with-other-plugin.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/data_encrypt.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/data_encrypt2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/ewma.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/filter_func.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/global-rule.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/grpc-proxy-mtls.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/grpc-proxy-stream.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/grpc-proxy-unary.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/grpc-proxy.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/healthcheck-discovery.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/healthcheck-https.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/healthcheck-ipv6.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/healthcheck-leak-bugfix.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/healthcheck-multiple-worker.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/healthcheck-passive-resty-events.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/healthcheck-passive.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/healthcheck-stop-checker.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/healthcheck.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/healthcheck2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/healthcheck3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/hosts.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/http_host.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/https-proxy.t create mode 100755 CloudronPackages/APISIX/apisix-source/t/node/invalid-port.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/invalid-route.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/invalid-service.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/invalid-upstream.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/least_conn.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/least_conn2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/merge-route.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/not-exist-service.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/not-exist-upstream.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/plugin-configs.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/plugin.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/plugin1.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/priority-balancer/health-checker.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/priority-balancer/sanity.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/remote-addr-ipv6.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/remote-addr.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/remote_addrs.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/route-delete.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/route-domain-with-local-dns.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/route-domain.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/route-filter-func.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/route-host.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/route-status.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/route-uris.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/rr-balance.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/sanity-radixtree.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/service-empty.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/ssl-protocols.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/ssl.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/timeout-upstream.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/upstream-array-nodes.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/upstream-discovery-dynamic.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/upstream-discovery.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/upstream-domain-with-special-dns.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/upstream-domain-with-special-ipv6-dns.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/upstream-domain.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/upstream-ipv6.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/upstream-keepalive-pool.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/upstream-mtls.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/upstream-node-dns.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/upstream-retries.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/upstream-status-5xx.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/upstream-status-all.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/upstream-websocket.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/upstream.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/vars.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/node/wildcard-host.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/package.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-aws-content-moderation-secrets.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-aws-content-moderation.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-aws-content-moderation2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-prompt-decorator.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-prompt-guard.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-prompt-template.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-proxy-multi.balancer.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-proxy-multi.openai-compatible.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-proxy-multi.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-proxy-multi2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-proxy.openai-compatible.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-proxy.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-proxy2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-rag.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-rate-limiting.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-request-rewrite.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai-request-rewrite2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai4.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ai5.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/api-breaker.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/attach-consumer-label.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/authz-casbin.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/authz-casbin/model.conf create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/authz-casbin/policy.csv create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/authz-casdoor.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/authz-keycloak.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/authz-keycloak2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/authz-keycloak3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/authz-keycloak4.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/aws-lambda.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/azure-functions.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/basic-auth-anonymous-consumer.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/basic-auth.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/batch-requests-grpc.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/batch-requests.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/batch-requests2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/body-transformer-multipart.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/body-transformer.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/body-transformer2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/brotli.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/cas-auth.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/chaitin-waf-reject.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/chaitin-waf-timeout.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/chaitin-waf.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/clickhouse-logger.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/clickhouse-logger2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/client-control.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/consumer-bug-fix.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/consumer-restriction.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/consumer-restriction2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/cors.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/cors2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/cors3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/cors4.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/csrf.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/custom_sort_plugins.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/datadog.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/degraphql.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/dubbo-proxy/route.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/dubbo-proxy/upstream.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/echo.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/elasticsearch-logger.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/error-log-logger-clickhouse.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/error-log-logger-kafka.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/error-log-logger-skywalking.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/error-log-logger.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/example.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ext-plugin/conf_token.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ext-plugin/extra-info.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ext-plugin/http-req-call.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ext-plugin/request-body.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ext-plugin/response.t create mode 100755 CloudronPackages/APISIX/apisix-source/t/plugin/ext-plugin/runner.sh create mode 100755 CloudronPackages/APISIX/apisix-source/t/plugin/ext-plugin/runner_can_not_terminated.sh create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ext-plugin/sanity.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ext-plugin/sanity2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/fault-injection.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/fault-injection2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/file-logger-reopen.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/file-logger.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/file-logger2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/forward-auth.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/forward-auth2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/google-cloud-logging.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/google-cloud-logging/config-https-domain.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/google-cloud-logging/config-https-ip.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/google-cloud-logging/config.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/google-cloud-logging2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-transcode-reload-bugfix.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-transcode.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-transcode2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-transcode3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-web.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-web/a6/route.pb.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-web/a6/route.proto create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-web/a6/route_grpc_web_bin_pb.js create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-web/a6/route_grpc_web_text_pb.js create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-web/a6/route_pb.js create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-web/client.js create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-web/go.mod create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-web/go.sum create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-web/package-lock.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-web/package.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-web/req.bin create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-web/server.go create mode 100755 CloudronPackages/APISIX/apisix-source/t/plugin/grpc-web/setup.sh create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/gzip.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/hmac-auth-anonymous-consumer.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/hmac-auth.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/hmac-auth2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/hmac-auth3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/hmac-auth4.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/http-dubbo.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/http-logger-json.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/http-logger-log-format.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/http-logger-new-line.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/http-logger.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/http-logger2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/inspect.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ip-restriction.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/jwe-decrypt.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/jwt-auth-anonymous-consumer.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/jwt-auth.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/jwt-auth2.t create mode 100755 CloudronPackages/APISIX/apisix-source/t/plugin/jwt-auth3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/jwt-auth4.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/kafka-logger-large-body.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/kafka-logger-log-format.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/kafka-logger.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/kafka-logger2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/kafka-logger3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/kafka-proxy.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/key-auth-anonymous-consumer.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/key-auth-upstream-domain-node.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/key-auth.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/lago.spec.mts create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/lago.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ldap-auth.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-conn-redis-cluster.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-conn-redis.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-conn.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-conn2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-conn3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-count-redis-cluster.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-count-redis-cluster2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-count-redis-cluster3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-count-redis.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-count-redis2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-count-redis3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-count-redis4.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-count.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-count2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-count3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-count4.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-count5.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-req-redis-cluster.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-req-redis.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-req.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-req2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/limit-req3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/log-rotate.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/log-rotate2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/log-rotate3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/loggly.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/loki-logger.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/mcp-bridge.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/mcp/assets/bridge-list-tools.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/mcp/bridge.spec.ts create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/mcp/jest.config.ts create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/mcp/package.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/mcp/pnpm-lock.yaml create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/mcp/tsconfig.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/mocking.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/multi-auth.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/multi-auth2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/node-status.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ocsp-stapling.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/opa.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/opa2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/openfunction.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/openid-connect.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/openid-connect/configuration.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/openid-connect2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/openid-connect3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/openid-connect4.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/openid-connect5.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/openid-connect6.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/openid-connect7.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/openid-connect8.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/opentelemetry.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/opentelemetry2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/opentelemetry3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/opentelemetry4-bugfix-pb-state.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/opentelemetry5.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/openwhisk.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/plugin.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/prometheus-metric-expire.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/prometheus.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/prometheus2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/prometheus3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/prometheus4.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/proxy-cache/disk.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/proxy-cache/memory.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/proxy-control.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/proxy-mirror.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/proxy-mirror2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/proxy-mirror3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/proxy-rewrite.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/proxy-rewrite2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/proxy-rewrite3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/public-api.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/real-ip.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/redirect.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/redirect2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/referer-restriction.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/request-id.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/request-id2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/request-validation.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/request-validation2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/response-rewrite.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/response-rewrite2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/response-rewrite3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/rocketmq-logger-log-format.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/rocketmq-logger.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/rocketmq-logger2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/security-warning.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/security-warning2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/server-info.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/serverless.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/skywalking-logger.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/skywalking.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/sls-logger.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/splunk-hec-logging.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/syslog.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/tcp-logger.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/tencent-cloud-cls.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/traffic-split.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/traffic-split2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/traffic-split3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/traffic-split4.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/traffic-split5.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/ua-restriction.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/udp-logger.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/uri-blocker.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/wolf-rbac.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/workflow-without-case.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/workflow.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/workflow2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/zipkin.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/zipkin2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/plugin/zipkin3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/pnpm-lock.yaml create mode 100644 CloudronPackages/APISIX/apisix-source/t/pubsub/kafka.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/pubsub/pubsub.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/graphql.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/multi-ssl-certs.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-host-uri-priority.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-host-uri.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-host-uri2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-host-uri3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-method.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-sni.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-sni2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-sni3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-uri-host.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-uri-keep-end-slash.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-uri-multiple.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-uri-priority.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-uri-sanity.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-uri-vars.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-uri-with-parameter.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/router/radixtree-uri-with-parameter2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/script/script.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/script/script_distribute.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/script/script_test.lua create mode 100644 CloudronPackages/APISIX/apisix-source/t/secret/aws.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/secret/conf/error.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/secret/conf/success.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/secret/gcp.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/secret/secret_lru.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/secret/vault.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/sse_server_example/go.mod create mode 100644 CloudronPackages/APISIX/apisix-source/t/sse_server_example/main.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-node/healthcheck-resty-events.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-node/healthcheck-resty-worker-events.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-node/mtls.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-node/priority-balancer.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-node/random.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-node/sanity-repeat.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-node/sanity-with-service.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-node/sanity.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-node/sni.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-node/tls.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-node/upstream-domain.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-node/upstream-tls.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-plugin/ip-restriction.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-plugin/limit-conn.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-plugin/limit-conn2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-plugin/mqtt-proxy.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-plugin/mqtt-proxy2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-plugin/plugin.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-plugin/prometheus.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/stream-plugin/syslog.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/tars/conf/tars.sql create mode 100644 CloudronPackages/APISIX/apisix-source/t/tars/discovery/stream/tars.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/tars/discovery/tars.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/ts/admin_api.ts create mode 100644 CloudronPackages/APISIX/apisix-source/t/ts/utils.ts create mode 100644 CloudronPackages/APISIX/apisix-source/t/tsconfig.esm.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/tsconfig.json create mode 100644 CloudronPackages/APISIX/apisix-source/t/utils/batch-processor.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/utils/rfc5424.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/wasm/fault-injection.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/wasm/fault-injection/main.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/wasm/forward-auth.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/wasm/forward-auth.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/wasm/global-rule.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/wasm/go.mod create mode 100644 CloudronPackages/APISIX/apisix-source/t/wasm/go.sum create mode 100644 CloudronPackages/APISIX/apisix-source/t/wasm/log/main.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/wasm/request-body.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/wasm/request-body/main.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/wasm/response-rewrite.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/wasm/response-rewrite/main.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/wasm/route.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/xds-library/config_xds.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/xds-library/config_xds_2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/xds-library/export.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/xds-library/main.go create mode 100644 CloudronPackages/APISIX/apisix-source/t/xds-library/xds.h create mode 100644 CloudronPackages/APISIX/apisix-source/t/xrpc/apisix/stream/xrpc/protocols/pingpong/init.lua create mode 100644 CloudronPackages/APISIX/apisix-source/t/xrpc/apisix/stream/xrpc/protocols/pingpong/schema.lua create mode 100644 CloudronPackages/APISIX/apisix-source/t/xrpc/dubbo.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/xrpc/pingpong.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/xrpc/pingpong2.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/xrpc/pingpong3.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/xrpc/prometheus.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/xrpc/redis.t create mode 100644 CloudronPackages/APISIX/apisix-source/t/xrpc/redis2.t create mode 100755 CloudronPackages/APISIX/apisix-source/utils/check-category.py create mode 100755 CloudronPackages/APISIX/apisix-source/utils/check-lua-code-style.sh create mode 100755 CloudronPackages/APISIX/apisix-source/utils/check-merge-conflict.sh create mode 100755 CloudronPackages/APISIX/apisix-source/utils/check-plugins-code.sh create mode 100755 CloudronPackages/APISIX/apisix-source/utils/check-test-code-style.sh create mode 100755 CloudronPackages/APISIX/apisix-source/utils/check-version.sh create mode 100755 CloudronPackages/APISIX/apisix-source/utils/fix-zh-doc-segment.py create mode 100755 CloudronPackages/APISIX/apisix-source/utils/gen-vote-contents.sh create mode 100755 CloudronPackages/APISIX/apisix-source/utils/install-dependencies.sh create mode 100755 CloudronPackages/APISIX/apisix-source/utils/linux-install-luarocks.sh create mode 100644 CloudronPackages/APISIX/start.sh diff --git a/CloudronPackages/APISIX/CloudronManifest.json b/CloudronPackages/APISIX/CloudronManifest.json new file mode 100644 index 0000000..c0a91fa --- /dev/null +++ b/CloudronPackages/APISIX/CloudronManifest.json @@ -0,0 +1,33 @@ +{ + "id": "apisix", + "title": "Apache APISIX", + "description": "Apache APISIX is a dynamic, real-time, high-performance API gateway.", + "tagline": "High-performance API Gateway", + "icon": "https://cdn.cloudron.io/icons/apisix.svg", + "main": { + "type": "docker", + "image": "cloudron/base:4.2.0", + "ports": { + "9080/tcp": "APISIX HTTP/HTTPS Port" + }, + "healthCheck": { + "url": "/" + } + }, + "manifestVersion": 2, + "addons": { + "etcd": {} + }, + "environment": { + "APISIX_ETCD_HOST": { + "type": "string", + "description": "etcd host for APISIX", + "required": true + }, + "APISIX_ETCD_PORT": { + "type": "string", + "description": "etcd port for APISIX", + "required": true + } + } +} \ No newline at end of file diff --git a/CloudronPackages/APISIX/Dockerfile b/CloudronPackages/APISIX/Dockerfile new file mode 100644 index 0000000..0776c2a --- /dev/null +++ b/CloudronPackages/APISIX/Dockerfile @@ -0,0 +1,58 @@ +FROM cloudron/base:4.2.0 AS build + +ENV DEBIAN_FRONTEND=noninteractive +ENV ENV_INST_LUADIR=/usr/local/apisix + +COPY apisix-source /apisix + +WORKDIR /apisix + +RUN set -x + && apt-get -y update --fix-missing + && apt-get install -y + make + git + sudo + libyaml-dev + libldap2-dev + && make deps + && mkdir -p ${ENV_INST_LUADIR} + && cp -r deps ${ENV_INST_LUADIR} + && make install + +FROM cloudron/base:4.2.0 + +# Install the runtime libyaml package +RUN apt-get -y update --fix-missing + && apt-get install -y libyaml-0-2 + && apt-get remove --purge --auto-remove -y + && mkdir -p /usr/local/apisix/ui + +COPY --from=build /usr/local/apisix /usr/local/apisix +COPY --from=build /usr/local/openresty /usr/local/openresty +COPY --from=build /usr/bin/apisix /usr/bin/apisix +# Assuming UI files are in apisix-source/ui, adjust if needed +COPY apisix-source/ui/ /usr/local/apisix/ui/ + +# Install brotli (from upstream install-brotli.sh) +RUN apt-get update && apt-get install -y + libbrotli-dev + --no-install-recommends && + rm -rf /var/lib/apt/lists/* + +ENV PATH=$PATH:/usr/local/openresty/luajit/bin:/usr/local/openresty/nginx/sbin:/usr/local/openresty/bin + +WORKDIR /usr/local/apisix + +RUN ln -sf /dev/stdout /usr/local/apisix/logs/access.log + && ln -sf /dev/stderr /usr/local/apisix/logs/error.log + +EXPOSE 9080 9443 + +# Copy our custom start.sh +COPY start.sh /usr/local/bin/start.sh +RUN chmod +x /usr/local/bin/start.sh + +ENTRYPOINT ["/usr/local/bin/start.sh"] + +STOPSIGNAL SIGQUIT diff --git a/CloudronPackages/APISIX/apisix-source/.devcontainer/Dockerfile b/CloudronPackages/APISIX/apisix-source/.devcontainer/Dockerfile new file mode 100644 index 0000000..4402db7 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.devcontainer/Dockerfile @@ -0,0 +1,38 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +FROM ubuntu:24.04 + +RUN apt update && export DEBIAN_FRONTEND=noninteractive \ + && apt install -y sudo git make gcc tini + +COPY Makefile .requirements apisix-master-0.rockspec ./ +COPY utils/install-dependencies.sh utils/linux-install-luarocks.sh utils/ + +RUN make install-runtime + +RUN cpanm --notest Test::Nginx IPC::Run > build.log 2>&1 || (cat build.log && exit 1) + +ARG ETCD_VER=v3.5.17 +ARG BUILDARCH +RUN curl -L https://github.com/etcd-io/etcd/releases/download/${ETCD_VER}/etcd-${ETCD_VER}-linux-${BUILDARCH}.tar.gz -o /tmp/etcd-${ETCD_VER}-linux-${BUILDARCH}.tar.gz \ + && mkdir -p /tmp/etcd-download-test \ + && tar xzvf /tmp/etcd-${ETCD_VER}-linux-${BUILDARCH}.tar.gz -C /tmp/etcd-download-test --strip-components=1 \ + && mv /tmp/etcd-download-test/etcdctl /usr/bin \ + && rm -rf /tmp/* + +ENTRYPOINT [ "tini", "--" ] diff --git a/CloudronPackages/APISIX/apisix-source/.devcontainer/devcontainer.json b/CloudronPackages/APISIX/apisix-source/.devcontainer/devcontainer.json new file mode 100644 index 0000000..7151076 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.devcontainer/devcontainer.json @@ -0,0 +1,14 @@ +{ + "name": "APISIX", + "dockerComposeFile": ["docker-compose.yml"], + "service": "apisix", + "workspaceFolder": "/workspace", + "privileged": true, + "postCreateCommand": "bash -c 'cd /workspace && rm -rf test-nginx && git config --global --add safe.directory /workspace && git submodule update --init --recursive && git clone https://github.com/openresty/test-nginx.git --depth 1 --single-branch -b master && make deps'", + "customizations": { + "vscode": { + "extensions": ["ms-vscode.makefile-tools", "ms-azuretools.vscode-docker", "sumneko.lua"] + } + }, + "forwardPorts": [9080, 9180, 2379] +} diff --git a/CloudronPackages/APISIX/apisix-source/.devcontainer/docker-compose.yml b/CloudronPackages/APISIX/apisix-source/.devcontainer/docker-compose.yml new file mode 100644 index 0000000..f238f23 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.devcontainer/docker-compose.yml @@ -0,0 +1,37 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +services: + apisix: + build: + context: .. + dockerfile: .devcontainer/Dockerfile + command: sleep infinity + volumes: + - ..:/workspace:cached + network_mode: service:etcd + etcd: + image: bitnami/etcd:3.5 + volumes: + - etcd_data:/bitnami/etcd + environment: + ALLOW_NONE_AUTHENTICATION: "yes" + ETCD_ADVERTISE_CLIENT_URLS: "http://127.0.0.1:2379" + ETCD_LISTEN_CLIENT_URLS: "http://0.0.0.0:2379" + +volumes: + etcd_data: diff --git a/CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/bug_report.yml b/CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 0000000..de8c29d --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,68 @@ +name: "Bug Report" +description: Report a bug to help improve the project. +title: "bug: " +body: + - type: markdown + attributes: + value: | + Thank you for taking the time to report this bug! + + _The more information you share, the faster we can identify and fix the bug._ + + Prior to opening the issue, please make sure that you: + + - Use English to communicate. + - Search the [open issues](https://github.com/apache/apisix/issues) and [discussion forum](https://github.com/apache/apisix/discussions) to avoid duplicating the issue. + + - type: textarea + id: current-behavior + attributes: + label: Current Behavior + description: Describe the issue you are facing. + placeholder: | + What is the issue with the current behavior? + validations: + required: true + - type: textarea + id: expected-behavior + attributes: + label: Expected Behavior + description: Describe what you expected to happen. + placeholder: | + What did you expect to happen instead? + validations: + required: false + - type: textarea + id: error + attributes: + label: Error Logs + description: Paste the error logs if any. You can change the [log level](https://github.com/apache/apisix/blob/617c325628f33961be67f61f0fa8002afc370e42/docs/en/latest/FAQ.md#how-to-change-the-log-level) to get a verbose error log. + validations: + required: false + - type: textarea + id: steps + attributes: + label: Steps to Reproduce + description: Share the steps you took so that we can reproduce the issue. Reports without proper steps details will likely be closed. + placeholder: | + 1. Run APISIX via the Docker image. + 2. Create a Route with the Admin API. + 3. Try configuring ... + 4. ... + validations: + required: true + - type: textarea + id: environment + attributes: + label: Environment + description: Share your environment details. Reports without proper environment details will likely be closed. + value: | + - APISIX version (run `apisix version`): + - Operating system (run `uname -a`): + - OpenResty / Nginx version (run `openresty -V` or `nginx -V`): + - etcd version, if relevant (run `curl http://127.0.0.1:9090/v1/server_info`): + - APISIX Dashboard version, if relevant: + - Plugin runner version, for issues related to plugin runners: + - LuaRocks version, for installation issues (run `luarocks --version`): + validations: + required: true diff --git a/CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/config.yml b/CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..f07f3eb --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: false +contact_links: + - name: APISIX Discussion Forum + url: https://github.com/apache/apisix/discussions + about: Please ask and answer questions here. diff --git a/CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/feature_request.yml b/CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 0000000..a2b5714 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,23 @@ +name: "Feature Request" +description: Suggest an enhancement to APISIX. +title: "feat: As a user, I want to ..., so that ..." +body: + - type: markdown + attributes: + value: | + _The more information you share, the faster we can help you._ + + Prior to opening the issue, please make sure that you: + + - Use English to communicate. + - Search the [open issues](https://github.com/apache/apisix/issues) and [discussion forum](https://github.com/apache/apisix/discussions) to avoid duplicating the issue. + + - type: textarea + id: description + attributes: + label: Description + description: Describe the feature you would like to see. + placeholder: | + As a user, I want to ..., so that... + validations: + required: true diff --git a/CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/improve_docs.yml b/CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/improve_docs.yml new file mode 100644 index 0000000..ef737b0 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/improve_docs.yml @@ -0,0 +1,33 @@ +name: "Documentation Issue" +description: Issues related to documentation. +title: "docs: " +labels: [doc] +body: + - type: markdown + attributes: + value: | + _The more information you share, the faster we can help you._ + + Prior to opening the issue, please make sure that you: + + - Use English to communicate. + - Search the [open issues](https://github.com/apache/apisix/issues) and [discussion forum](https://github.com/apache/apisix/discussions) to avoid duplicating the issue. + + - type: textarea + id: current-state + attributes: + label: Current State + description: Describe the current state of the documentation. + placeholder: | + The documentation for the API in this page (url) is missing ... + validations: + required: true + - type: textarea + id: desired-state + attributes: + label: Desired State + description: Describe the desired state the documentation should be in. + placeholder: | + There should be line mentioning how the API behaves when ... + validations: + required: true diff --git a/CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/request_help.yml b/CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/request_help.yml new file mode 100644 index 0000000..dc0d6b9 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/ISSUE_TEMPLATE/request_help.yml @@ -0,0 +1,36 @@ +name: "Request Help" +description: Stuck? Ask for help! +title: "help request: " +body: + - type: markdown + attributes: + value: | + _The more information you share, the faster we can help you._ + + Prior to opening the issue, please make sure that you: + + - Use English to communicate. + - Search the [open issues](https://github.com/apache/apisix/issues) and [discussion forum](https://github.com/apache/apisix/discussions) to avoid duplicating the issue. + + - type: textarea + id: description + attributes: + label: Description + description: Describe the issue you are facing and what you need help with. + validations: + required: true + - type: textarea + id: environment + attributes: + label: Environment + description: Share your environment details. Reports without proper environment details will likely be closed. + value: | + - APISIX version (run `apisix version`): + - Operating system (run `uname -a`): + - OpenResty / Nginx version (run `openresty -V` or `nginx -V`): + - etcd version, if relevant (run `curl http://127.0.0.1:9090/v1/server_info`): + - APISIX Dashboard version, if relevant: + - Plugin runner version, for issues related to plugin runners: + - LuaRocks version, for installation issues (run `luarocks --version`): + validations: + required: true diff --git a/CloudronPackages/APISIX/apisix-source/.github/PULL_REQUEST_TEMPLATE.md b/CloudronPackages/APISIX/apisix-source/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..5860ce4 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,33 @@ +### Description + + + + +#### Which issue(s) this PR fixes: + +Fixes # + +### Checklist + +- [ ] I have explained the need for this PR and the problem it solves +- [ ] I have explained the changes or the new features added to this PR +- [ ] I have added tests corresponding to this change +- [ ] I have updated the documentation to reflect this change +- [ ] I have verified that this change is backward compatible (If not, please discuss on the [APISIX mailing list](https://github.com/apache/apisix/tree/master#community) first) + + diff --git a/CloudronPackages/APISIX/apisix-source/.github/dependabot.yml b/CloudronPackages/APISIX/apisix-source/.github/dependabot.yml new file mode 100644 index 0000000..5737055 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/dependabot.yml @@ -0,0 +1,10 @@ +# Set update schedule for GitHub Actions + +version: 2 +updates: + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + # Check for updates to GitHub Actions every weekday + interval: "daily" diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/build.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/build.yml new file mode 100644 index 0000000..185d919 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/build.yml @@ -0,0 +1,177 @@ +name: CI + +on: + push: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + pull_request: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + build: + strategy: + fail-fast: false + matrix: + platform: + - ubuntu-latest + os_name: + - linux_openresty + events_module: + - lua-resty-worker-events + - lua-resty-events + test_dir: + - t/plugin/[a-k]* + - t/plugin/[l-z]* + - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/discovery t/error_page t/http3/admin t/misc + - t/node t/pubsub t/router t/script t/secret t/stream-node t/utils t/wasm t/xds-library t/xrpc + + runs-on: ${{ matrix.platform }} + timeout-minutes: 90 + env: + SERVER_NAME: ${{ matrix.os_name }} + OPENRESTY_VERSION: default + + steps: + - name: Check out code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup Go + uses: actions/setup-go@v5 + with: + go-version: "1.17" + + - name: Cache deps + uses: actions/cache@v4 + env: + cache-name: cache-deps + with: + path: deps + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-${{ hashFiles('apisix-master-0.rockspec') }} + + - name: Extract test type + shell: bash + id: test_env + run: | + test_dir="${{ matrix.test_dir }}" + if [[ $test_dir =~ 't/plugin' ]]; then + echo "type=plugin" >>$GITHUB_OUTPUT + fi + if [[ $test_dir =~ 't/admin ' ]]; then + echo "type=first" >>$GITHUB_OUTPUT + fi + if [[ $test_dir =~ ' t/xrpc' ]]; then + echo "type=last" >>$GITHUB_OUTPUT + fi + + - name: Free disk space + run: | + bash ./ci/free_disk_space.sh + + - name: Linux Before install + run: sudo ./ci/${{ matrix.os_name }}_runner.sh before_install + + - name: Linux Install + run: | + sudo --preserve-env=OPENRESTY_VERSION \ + ./ci/${{ matrix.os_name }}_runner.sh do_install + + - name: Linux launch common services + run: | + make ci-env-up project_compose_ci=ci/pod/docker-compose.common.yml + sudo ./ci/init-common-test-service.sh + + - name: Cache images + id: cache-images + uses: actions/cache@v4 + env: + cache-name: cache-apisix-docker-images + with: + path: docker-images-backup + key: ${{ runner.os }}-${{ env.cache-name }}-${{ steps.test_env.outputs.type }}-${{ hashFiles(format('./ci/pod/docker-compose.{0}.yml', steps.test_env.outputs.type )) }} + + - if: ${{ steps.cache-images.outputs.cache-hit == 'true' }} + name: Load saved docker images + run: | + if [[ -f docker-images-backup/apisix-images.tar ]]; then + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before + docker load --input docker-images-backup/apisix-images.tar + echo "loaded docker images" + + # preserve storage space + rm docker-images-backup/apisix-images.tar + + make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml + if [[ ${{ steps.test_env.outputs.type }} != first ]]; then + sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after + fi + fi + - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} + name: Linux launch services + run: | + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before + [[ ${{ steps.test_env.outputs.type }} == plugin ]] && ./ci/pod/openfunction/build-function-image.sh + make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after + echo "Linux launch services, done." + - name: Start Dubbo Backend + if: matrix.os_name == 'linux_openresty' && (steps.test_env.outputs.type == 'plugin' || steps.test_env.outputs.type == 'last') + run: | + cur_dir=$(pwd) + sudo apt update + sudo apt install -y maven openjdk-8-jdk + sudo update-java-alternatives --set java-1.8.0-openjdk-amd64 + export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 + export PATH=$JAVA_HOME/bin:$PATH + cd t/lib/dubbo-backend + mvn package + cd dubbo-backend-provider/target + java \ + -Djava.net.preferIPv4Stack=true \ + -jar dubbo-demo-provider.one-jar.jar > /tmp/java.log & + cd $cur_dir/t/lib/dubbo-serialization-backend + mvn package + cd dubbo-serialization-backend-provider/target + java \ + -Djava.net.preferIPv4Stack=true \ + -jar dubbo-demo-provider.one-jar.jar > /tmp/java2.log & + + - name: Build xDS library + if: steps.test_env.outputs.type == 'last' + run: | + cd t/xds-library + go build -o libxds.so -buildmode=c-shared main.go export.go + + - name: Build wasm code + if: matrix.os_name == 'linux_openresty' && steps.test_env.outputs.type == 'last' + run: | + export TINYGO_VER=0.20.0 + wget https://github.com/tinygo-org/tinygo/releases/download/v${TINYGO_VER}/tinygo_${TINYGO_VER}_amd64.deb 2>/dev/null + sudo dpkg -i tinygo_${TINYGO_VER}_amd64.deb + cd t/wasm && find . -type f -name "*.go" | xargs -Ip tinygo build -o p.wasm -scheduler=none -target=wasi p + + - name: Linux Script + env: + TEST_FILE_SUB_DIR: ${{ matrix.test_dir }} + TEST_EVENTS_MODULE: ${{ matrix.events_module }} + run: sudo -E ./ci/${{ matrix.os_name }}_runner.sh script + + - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} + name: Save docker images + run: | + echo "start backing up, $(date)" + bash ./ci/backup-docker-images.sh ${{ steps.test_env.outputs.type }} + echo "backup done, $(date)" diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/check-changelog.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/check-changelog.yml new file mode 100644 index 0000000..0efef10 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/check-changelog.yml @@ -0,0 +1,27 @@ +name: Check Changelog + +on: + push: + paths: + - 'CHANGELOG.md' + - 'ci/check_changelog_prs.ts' + pull_request: + paths: + - 'CHANGELOG.md' + - 'ci/check_changelog_prs.ts' + +jobs: + check-changelog: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + + - name: Run check_changelog_prs script + working-directory: ci + run: | + curl -fsSL https://bun.sh/install | bash + export PATH="$HOME/.bun/bin:$PATH" + bun run check_changelog_prs.ts diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/cli.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/cli.yml new file mode 100644 index 0000000..1840f9a --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/cli.yml @@ -0,0 +1,68 @@ +name: CLI Test + +on: + push: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + pull_request: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + build: + strategy: + fail-fast: false + matrix: + platform: + - ubuntu-latest + job_name: + - linux_apisix_current_luarocks + - linux_apisix_current_luarocks_in_customed_nginx + + runs-on: ${{ matrix.platform }} + timeout-minutes: 30 + env: + SERVER_NAME: ${{ matrix.job_name }} + OPENRESTY_VERSION: default + + steps: + - name: Check out code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Cache deps + uses: actions/cache@v4 + env: + cache-name: cache-deps + with: + path: deps + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.job_name }}-${{ hashFiles('apisix-master-0.rockspec') }} + + - name: Linux launch common services + run: | + project_compose_ci=ci/pod/docker-compose.common.yml make ci-env-up + + - name: Linux Before install + run: sudo ./ci/${{ matrix.job_name }}_runner.sh before_install + + - name: Linux Install + run: | + sudo --preserve-env=OPENRESTY_VERSION \ + ./ci/${{ matrix.job_name }}_runner.sh do_install + + - name: Linux Script + run: | + sudo chmod +x /home/runner + sudo ./ci/${{ matrix.job_name }}_runner.sh script diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/close-unresponded.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/close-unresponded.yml new file mode 100644 index 0000000..9508af7 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/close-unresponded.yml @@ -0,0 +1,39 @@ +name: Check Issues + +on: + workflow_dispatch: + schedule: + - cron: '0 10 * * *' + +permissions: + contents: read + +jobs: + prune_stale: + permissions: + issues: write # for actions/stale to close stale issues + name: Prune Unresponded + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - name: Prune Stale + uses: actions/stale@v8 + with: + days-before-issue-stale: 60 + days-before-issue-close: 3 + stale-issue-message: > + Due to lack of the reporter's response this issue has been labeled with "no response". + It will be close in 3 days if no further activity occurs. If this issue is still + relevant, please simply write any comment. Even if closed, you can still revive the + issue at any time or discuss it on the dev@apisix.apache.org list. + Thank you for your contributions. + close-issue-message: > + This issue has been closed due to lack of activity. If you think that + is incorrect, or the issue requires additional review, you can revive the issue at + any time. + # Issues with these labels will never be considered stale. + only-labels: 'wait for update' + stale-issue-label: 'no response' + exempt-issue-labels: "don't close" + ascending: true diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/code-lint.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/code-lint.yml new file mode 100644 index 0000000..0fdbf28 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/code-lint.yml @@ -0,0 +1,48 @@ +name: Code Lint + +on: + pull_request: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + +permissions: + contents: read + +jobs: + lint: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v4 + - name: Install + run: | + . ./ci/common.sh + export_or_prefix + export OPENRESTY_VERSION=default + + sudo -E ./ci/linux-install-openresty.sh + ./utils/linux-install-luarocks.sh + sudo -E luarocks install luacheck + + - name: Script + run: | + . ./ci/common.sh + export_or_prefix + make lint + + sc-lint: + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Shellcheck code + run: | + scversion="latest" + wget -qO- "https://github.com/koalaman/shellcheck/releases/download/${scversion?}/shellcheck-${scversion?}.linux.x86_64.tar.xz" | tar -xJv + cp -av "shellcheck-${scversion}/shellcheck" /usr/local/bin/ + shellcheck --version + git ls-files -- "*.sh" | xargs -t shellcheck diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/doc-lint.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/doc-lint.yml new file mode 100644 index 0000000..962671d --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/doc-lint.yml @@ -0,0 +1,58 @@ +name: Doc Lint + +on: + push: + paths: + - "docs/**" + - "**/*.md" + - ".github/workflows/doc-lint.yml" + pull_request: + branches: [master, "release/**"] + paths: + - "docs/**" + - "**/*.md" + - ".github/workflows/doc-lint.yml" + +permissions: + contents: read + +jobs: + markdownlint: + name: 🍇 Markdown + runs-on: ubuntu-latest + timeout-minutes: 1 + steps: + - uses: actions/checkout@v4 + - name: 🚀 Use Node.js + uses: actions/setup-node@v4.4.0 + with: + node-version: "12.x" + - run: npm install -g markdownlint-cli@0.25.0 + - run: markdownlint '**/*.md' + - name: check category + run: | + ./utils/check-category.py + - name: check Chinese doc + run: | + sudo pip3 install zhon + ./utils/fix-zh-doc-segment.py > \ + /tmp/check.log 2>&1 || (cat /tmp/check.log && exit 1) + if grep "find broken newline in file: " /tmp/check.log; then + cat /tmp/check.log + echo "Newline can't appear in the middle of Chinese sentences." + echo "You need to run ./utils/fix-zh-doc-segment.py to fix them." + exit 1 + fi + + Chinse-Copywriting-lint: + name: Chinese Copywriting + runs-on: ubuntu-latest + timeout-minutes: 1 + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Check Chinese copywriting + uses: ./.github/actions/autocorrect + with: + args: autocorrect --lint --no-diff-bg-color ./docs/zh/latest/ diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/gm-cron.yaml.disabled b/CloudronPackages/APISIX/apisix-source/.github/workflows/gm-cron.yaml.disabled new file mode 100644 index 0000000..46447e4 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/gm-cron.yaml.disabled @@ -0,0 +1,182 @@ +name: CI GM (cron) + +on: + schedule: + # UTC 7:30 every Friday + - cron: "30 7 * * 5" + +permissions: + contents: read + +jobs: + build: + strategy: + fail-fast: false + matrix: + platform: + - ubuntu-latest + os_name: + - linux_openresty_tongsuo + test_dir: + - t/plugin/[a-k]* + - t/plugin/[l-z]* + - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/discovery t/error_page t/misc + - t/node t/pubsub t/router t/script t/stream-node t/utils t/wasm t/xds-library t/xrpc + + runs-on: ${{ matrix.platform }} + timeout-minutes: 90 + env: + SERVER_NAME: ${{ matrix.os_name }} + OPENRESTY_VERSION: default + + # TODO: refactor the workflows to reduce duplicate parts. Maybe we can write them in shell + # scripts or a separate action? + steps: + - name: Check out code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup Go + uses: actions/setup-go@v5 + with: + go-version: "1.17" + + - name: Cache deps + uses: actions/cache@v4 + env: + cache-name: cache-deps + with: + path: deps + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-${{ hashFiles('apisix-master-0.rockspec') }} + + - name: Cache Tongsuo compilation + id: cache-tongsuo + uses: actions/cache@v4 + env: + cache-name: cache-tongsuo + with: + path: ./tongsuo + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-tongsuo-ver + + - name: Test SSL Env + id: test_ssl_env + shell: bash + if: steps.cache-tongsuo.outputs.cache-hit != 'true' + run: | + echo "compile_tongsuo=true" >>$GITHUB_OUTPUT + + - name: Extract test type + shell: bash + id: test_env + run: | + test_dir="${{ matrix.test_dir }}" + if [[ $test_dir =~ 't/plugin' ]]; then + echo "type=plugin" >>$GITHUB_OUTPUT + fi + if [[ $test_dir =~ 't/admin ' ]]; then + echo "type=first" >>$GITHUB_OUTPUT + fi + if [[ $test_dir =~ ' t/xrpc' ]]; then + echo "type=last" >>$GITHUB_OUTPUT + fi + + - name: Free disk space + run: | + bash ./ci/free_disk_space.sh + + - name: Linux launch common services + run: | + make ci-env-up project_compose_ci=ci/pod/docker-compose.common.yml + sudo ./ci/init-common-test-service.sh + + - name: Cache images + id: cache-images + uses: actions/cache@v4 + env: + cache-name: cache-apisix-docker-images + with: + path: docker-images-backup + key: ${{ runner.os }}-${{ env.cache-name }}-${{ steps.test_env.outputs.type }}-${{ hashFiles(format('./ci/pod/docker-compose.{0}.yml', steps.test_env.outputs.type )) }} + + - if: ${{ steps.cache-images.outputs.cache-hit == 'true' }} + name: Load saved docker images + run: | + if [[ -f docker-images-backup/apisix-images.tar ]]; then + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before + docker load --input docker-images-backup/apisix-images.tar + rm docker-images-backup/apisix-images.tar + make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml + echo "loaded docker images" + if [[ ${{ steps.test_env.outputs.type }} != first ]]; then + sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after + fi + fi + - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} + name: Linux launch services + run: | + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before + [[ ${{ steps.test_env.outputs.type }} == plugin ]] && ./ci/pod/openfunction/build-function-image.sh + make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after + echo "Linux launch services, done." + + - name: Start Dubbo Backend + if: steps.test_env.outputs.type == 'plugin' + run: | + cur_dir=$(pwd) + sudo apt update + sudo apt install -y maven openjdk-8-jdk + sudo update-java-alternatives --set java-1.8.0-openjdk-amd64 + export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 + export PATH=$JAVA_HOME/bin:$PATH + cd t/lib/dubbo-backend + mvn package + cd dubbo-backend-provider/target + java \ + -Djava.net.preferIPv4Stack=true \ + -jar dubbo-demo-provider.one-jar.jar > /tmp/java.log & + cd $cur_dir/t/lib/dubbo-serialization-backend + mvn package + cd dubbo-serialization-backend-provider/target + java \ + -Djava.net.preferIPv4Stack=true \ + -jar dubbo-demo-provider.one-jar.jar > /tmp/java2.log & + + - name: Build xDS library + if: steps.test_env.outputs.type == 'last' + run: | + cd t/xds-library + go build -o libxds.so -buildmode=c-shared main.go export.go + + - name: Build wasm code + if: steps.test_env.outputs.type == 'last' + run: | + export TINYGO_VER=0.20.0 + wget https://github.com/tinygo-org/tinygo/releases/download/v${TINYGO_VER}/tinygo_${TINYGO_VER}_amd64.deb 2>/dev/null + sudo dpkg -i tinygo_${TINYGO_VER}_amd64.deb + cd t/wasm && find . -type f -name "*.go" | xargs -Ip tinygo build -o p.wasm -scheduler=none -target=wasi p + + - name: Linux Before install + env: + COMPILE_TONGSUO: ${{ steps.test_ssl_env.outputs.compile_tongsuo }} + run: | + sudo --preserve-env=COMPILE_TONGSUO \ + ./ci/${{ matrix.os_name }}_runner.sh before_install + + - name: Linux Install + run: | + sudo --preserve-env=OPENRESTY_VERSION \ + ./ci/${{ matrix.os_name }}_runner.sh do_install + + - name: Linux Script + env: + TEST_FILE_SUB_DIR: ${{ matrix.test_dir }} + run: sudo -E ./ci/${{ matrix.os_name }}_runner.sh script + + - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} + name: Save docker images + run: | + echo "start backing up, $(date)" + bash ./ci/backup-docker-images.sh ${{ steps.test_env.outputs.type }} + echo "backup done, $(date)" diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/gm.yml.disabled b/CloudronPackages/APISIX/apisix-source/.github/workflows/gm.yml.disabled new file mode 100644 index 0000000..44260e0 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/gm.yml.disabled @@ -0,0 +1,93 @@ +name: CI GM + +on: + push: + branches: [master] + paths-ignore: + - 'docs/**' + - '**/*.md' + pull_request: + branches: [master] + paths-ignore: + - 'docs/**' + - '**/*.md' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + build: + strategy: + fail-fast: false + matrix: + platform: + - ubuntu-latest + os_name: + - linux_openresty_tongsuo + test_dir: + - t/gm + + runs-on: ${{ matrix.platform }} + timeout-minutes: 90 + env: + SERVER_NAME: ${{ matrix.os_name }} + OPENRESTY_VERSION: default + + steps: + - name: Check out code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Cache deps + uses: actions/cache@v4 + env: + cache-name: cache-deps + with: + path: deps + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-${{ hashFiles('apisix-master-0.rockspec') }} + + - name: Cache Tongsuo compilation + id: cache-tongsuo + uses: actions/cache@v4 + env: + cache-name: cache-tongsuo + with: + path: ./tongsuo + # TODO: use a fixed release once they have created one. + # See https://github.com/Tongsuo-Project/Tongsuo/issues/318 + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-tongsuo-ver + + - name: Test SSL Env + id: test_ssl_env + shell: bash + if: steps.cache-tongsuo.outputs.cache-hit != 'true' + run: | + echo "compile_tongsuo=true" >>$GITHUB_OUTPUT + + - name: Linux launch common services + run: | + make ci-env-up project_compose_ci=ci/pod/docker-compose.common.yml + sudo ./ci/init-common-test-service.sh + + - name: Linux Before install + env: + COMPILE_TONGSUO: ${{ steps.test_ssl_env.outputs.compile_tongsuo }} + run: | + sudo --preserve-env=COMPILE_TONGSUO \ + ./ci/${{ matrix.os_name }}_runner.sh before_install + + - name: Linux Do install + run: | + sudo --preserve-env=OPENRESTY_VERSION \ + ./ci/${{ matrix.os_name }}_runner.sh do_install + + - name: Linux Script + env: + TEST_FILE_SUB_DIR: ${{ matrix.test_dir }} + run: | + sudo -E ./ci/${{ matrix.os_name }}_runner.sh script diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/kubernetes-ci.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/kubernetes-ci.yml new file mode 100644 index 0000000..16f3343 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/kubernetes-ci.yml @@ -0,0 +1,79 @@ +name: CI Kubernetes + +on: + push: + branches: [ master, 'release/**' ] + paths-ignore: + - 'docs/**' + - '**/*.md' + pull_request: + branches: [ master, 'release/**' ] + paths-ignore: + - 'docs/**' + - '**/*.md' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + kubernetes-discovery: + strategy: + fail-fast: false + matrix: + platform: + - ubuntu-latest + os_name: + - linux_openresty + + runs-on: ${{ matrix.platform }} + timeout-minutes: 15 + env: + SERVER_NAME: ${{ matrix.os_name }} + OPENRESTY_VERSION: default + + steps: + - name: Check out code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup kubernetes cluster + run: | + KUBERNETES_VERSION="v1.22.7" + + kind create cluster --name apisix-test --config ./t/kubernetes/configs/kind.yaml --image kindest/node:${KUBERNETES_VERSION} + + kubectl wait --for=condition=Ready nodes --all --timeout=180s + + kubectl apply -f ./t/kubernetes/configs/account.yaml + + kubectl apply -f ./t/kubernetes/configs/endpoint.yaml + + KUBERNETES_CLIENT_TOKEN_CONTENT=$(kubectl get secrets | grep apisix-test | awk '{system("kubectl get secret -o jsonpath={.data.token} "$1" | base64 --decode")}') + + KUBERNETES_CLIENT_TOKEN_DIR="/tmp/var/run/secrets/kubernetes.io/serviceaccount" + + KUBERNETES_CLIENT_TOKEN_FILE=${KUBERNETES_CLIENT_TOKEN_DIR}/token + + mkdir -p ${KUBERNETES_CLIENT_TOKEN_DIR} + echo -n "$KUBERNETES_CLIENT_TOKEN_CONTENT" > ${KUBERNETES_CLIENT_TOKEN_FILE} + + echo 'KUBERNETES_SERVICE_HOST=127.0.0.1' + echo 'KUBERNETES_SERVICE_PORT=6443' + echo 'KUBERNETES_CLIENT_TOKEN='"${KUBERNETES_CLIENT_TOKEN_CONTENT}" + echo 'KUBERNETES_CLIENT_TOKEN_FILE='${KUBERNETES_CLIENT_TOKEN_FILE} + + kubectl proxy -p 6445 & + + - name: Linux Install + run: | + sudo ./ci/${{ matrix.os_name }}_runner.sh before_install + sudo --preserve-env=OPENRESTY_VERSION ./ci/${{ matrix.os_name }}_runner.sh do_install + + - name: Run test cases + run: | + ./ci/kubernetes-ci.sh run_case diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/license-checker.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/license-checker.yml new file mode 100644 index 0000000..bae5d6c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/license-checker.yml @@ -0,0 +1,37 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +name: License checker + +on: + push: + branches: [master, 'release/**'] + pull_request: + branches: [master, 'release/**'] + +jobs: + check-license: + runs-on: ubuntu-latest + timeout-minutes: 3 + + steps: + - uses: actions/checkout@v4 + - name: Check License Header + uses: apache/skywalking-eyes@v0.6.0 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/link-check.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/link-check.yml new file mode 100644 index 0000000..75758a9 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/link-check.yml @@ -0,0 +1,49 @@ +name: 'Link Checker' + +# **What it does**: Renders the content of every page and check all internal links. +# **Why we have it**: To make sure all links connect correctly. +# **Who does it impact**: Docs content. + +on: + workflow_dispatch: + push: + # branches: [master, 'release/**'] + paths: + - '**/*.md' + - '**/link-check.yml' + pull_request: + branches: [master, "release/**"] + paths: + - '**/*.md' + - '**/link-check.yml' + +permissions: + contents: read + # Needed for the 'trilom/file-changes-action' action + pull-requests: read + +# This allows a subsequently queued workflow run to interrupt previous runs +concurrency: + group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' + cancel-in-progress: true + +jobs: + check-links: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Get script + run: | + wget https://raw.githubusercontent.com/xuruidong/markdown-link-checker/main/link_checker.py + + - name: Setup python + uses: actions/setup-python@v5 + with: + python-version: '3.9' + + - name: Link check (critical, all files) + run: | + # python link_checker.py ./ --enable-external --ignore "http://apisix.iresty.com" "https://www.upyun.com" "https://github.com/apache/apisix/actions/workflows/build.yml/badge.svg" "https://httpbin.org/" "https://en.wikipedia.org/wiki/Cache" + python link_checker.py ./ diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/lint.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/lint.yml new file mode 100644 index 0000000..a3244ca --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/lint.yml @@ -0,0 +1,46 @@ +name: ❄️ Lint + +on: [push, pull_request] + +permissions: + contents: read + +jobs: + misc: + name: misc checker + runs-on: ubuntu-latest + steps: + - name: Check out code. + uses: actions/checkout@v4 + - name: spell check + run: | + pip install codespell==2.1.0 + # codespell considers some repo name in go.sum are misspelled + git grep --cached -l '' | grep -v go.sum | grep -v pnpm-lock.yaml |xargs codespell --ignore-words=.ignore_words --skip="*.ts,*.mts" + - name: Merge conflict + run: | + bash ./utils/check-merge-conflict.sh + - name: Plugin Code + run: | + bash ./utils/check-plugins-code.sh + + ci-eclint: + runs-on: ubuntu-latest + timeout-minutes: 5 + + steps: + - name: Check out code + uses: actions/checkout@v4 + + - name: Setup Nodejs env + uses: actions/setup-node@v4.4.0 + with: + node-version: '12' + + - name: Install eclint + run: | + sudo npm install -g eclint + + - name: Run eclint + run: | + eclint check diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/push-dev-image-on-commit.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/push-dev-image-on-commit.yml new file mode 100644 index 0000000..793347b --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/push-dev-image-on-commit.yml @@ -0,0 +1,109 @@ +name: Build and Push `apisix:dev` to DockerHub on Commit + +on: + pull_request: + paths-ignore: + - "docs/**" + - "**/*.md" + push: + paths-ignore: + - "docs/**" + - "**/*.md" + workflow_dispatch: + +jobs: + build-test-push: + strategy: + matrix: + include: + - runner: ubuntu-24.04 + arch: amd64 + - runner: ubuntu-24.04-arm + arch: arm64 + + runs-on: ${{ matrix.runner }} + + env: + APISIX_DOCKER_TAG: master-debian-dev + ENV_OS_ARCH: ${{ matrix.arch }} + DOCKER_BUILDKIT: 1 + + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Build APISIX Dashboard + run: | + # install node.js and pnpm + sudo n lts + corepack enable pnpm + + # prepare apisix-dashboard source code + source .requirements + git clone --revision=${APISIX_DASHBOARD_COMMIT} --depth 1 https://github.com/apache/apisix-dashboard.git + pushd apisix-dashboard + + # compile + pnpm install --frozen-lockfile + pnpm run build + popd + + # copy the dist files to the ui directory + mkdir ui + cp -r apisix-dashboard/dist/* ui/ + rm -r apisix-dashboard + + - name: Build and run + run: | + make build-on-debian-dev + docker compose -f ./docker/compose/docker-compose-master.yaml up -d + sleep 30 + docker logs compose-apisix-1 + + - name: Test APISIX + run: | + curl http://127.0.0.1:9180/apisix/admin/routes/1 \ + -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' + { + "uri": "/get", + "upstream": { + "type": "roundrobin", + "nodes": { "httpbin.org:80": 1 } + } + }' + + result_code=$(curl -I -m 10 -o /dev/null -s -w %{http_code} http://127.0.0.1:9080/get) + if [[ $result_code -ne 200 ]]; then + printf "result_code: %s\n" "$result_code" + exit 125 + fi + + - name: Login to Docker Hub + if: github.ref == 'refs/heads/master' + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USER }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Push apisix image to Docker Hub + if: github.ref == 'refs/heads/master' + run: | + make push-on-debian-dev + + merge-tags: + needs: build-test-push + if: github.ref == 'refs/heads/master' + runs-on: ubuntu-latest + steps: + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USER }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Merge architecture-specific tags + run: | + make merge-dev-tags diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/redhat-ci.yaml b/CloudronPackages/APISIX/apisix-source/.github/workflows/redhat-ci.yaml new file mode 100644 index 0000000..190919c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/redhat-ci.yaml @@ -0,0 +1,179 @@ +name: CI Redhat UBI - Daily + +on: + schedule: + - cron: "0 0 * * *" + pull_request: + branches: [master] + paths-ignore: + - 'docs/**' + - '**/*.md' +concurrency: + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + test_apisix: + name: run ci on redhat ubi + runs-on: ubuntu-latest + timeout-minutes: 90 + strategy: + fail-fast: false + matrix: + events_module: + - lua-resty-worker-events + - lua-resty-events + test_dir: + - t/plugin/[a-k]* + - t/plugin/[l-z]* + - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/discovery t/error_page t/misc + - t/node t/pubsub t/router t/script t/secret t/stream-node t/utils t/xds-library + + steps: + - name: Check out code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Cache deps + uses: actions/cache@v4 + env: + cache-name: cache-deps + with: + path: deps + key: ${{ runner.os }}-${{ env.cache-name }}-ubi8.6-${{ hashFiles('apisix-master-0.rockspec') }} + + - name: Extract branch name + if: ${{ startsWith(github.ref, 'refs/heads/release/') }} + id: branch_env + shell: bash + run: | + echo "version=${GITHUB_REF##*/}" >>$GITHUB_OUTPUT + + - name: Extract test type + shell: bash + id: test_env + run: | + test_dir="${{ matrix.test_dir }}" + if [[ $test_dir =~ 't/plugin' ]]; then + echo "type=plugin" >>$GITHUB_OUTPUT + fi + if [[ $test_dir =~ 't/admin ' ]]; then + echo "type=first" >>$GITHUB_OUTPUT + fi + if [[ $test_dir =~ ' t/xds-library' ]]; then + echo "type=last" >>$GITHUB_OUTPUT + fi + + - name: Free disk space + run: | + bash ./ci/free_disk_space.sh + + - name: Linux launch common services + run: | + make ci-env-up project_compose_ci=ci/pod/docker-compose.common.yml + sudo ./ci/init-common-test-service.sh + + - name: Build rpm package + if: ${{ startsWith(github.ref, 'refs/heads/release/') }} + run: | + export VERSION=${{ steps.branch_env.outputs.version }} + sudo gem install --no-document fpm + git clone --depth 1 https://github.com/api7/apisix-build-tools.git + + # move codes under build tool + mkdir ./apisix-build-tools/apisix + for dir in `ls|grep -v "^apisix-build-tools$"`;do cp -r $dir ./apisix-build-tools/apisix/;done + + cd apisix-build-tools + make package type=rpm app=apisix version=${VERSION} checkout=release/${VERSION} image_base=ubi image_tag=8.6 local_code_path=./apisix + cd .. + rm -rf $(ls -1 -I apisix-build-tools -I t -I utils -I ci --ignore=Makefile -I "*.rockspec") + + - name: Start Dubbo Backend + run: | + cur_dir=$(pwd) + sudo apt update + sudo apt install -y maven + cd t/lib/dubbo-backend + mvn package + cd dubbo-backend-provider/target + java \ + -Djava.net.preferIPv4Stack=true \ + -jar dubbo-demo-provider.one-jar.jar > /tmp/java.log & + cd $cur_dir/t/lib/dubbo-serialization-backend + mvn package + cd dubbo-serialization-backend-provider/target + java \ + -Djava.net.preferIPv4Stack=true \ + -jar dubbo-demo-provider.one-jar.jar > /tmp/java2.log & + + - name: Build xDS library + if: steps.test_env.outputs.type == 'last' + run: | + cd t/xds-library + go build -o libxds.so -buildmode=c-shared main.go export.go + + - name: Run redhat docker and mapping apisix into container + env: + TEST_FILE_SUB_DIR: ${{ matrix.test_dir }} + TEST_EVENTS_MODULE: ${{ matrix.events_module }} + run: | + docker run -itd -v ${{ github.workspace }}:/apisix --env TEST_FILE_SUB_DIR="$TEST_FILE_SUB_DIR" --env TEST_EVENTS_MODULE="$TEST_EVENTS_MODULE" --name ubiInstance --net="host" --dns 8.8.8.8 --dns-search apache.org registry.access.redhat.com/ubi8/ubi:8.6 /bin/bash + + - name: Cache images + id: cache-images + uses: actions/cache@v4 + env: + cache-name: cache-apisix-docker-images + with: + path: docker-images-backup + key: ${{ runner.os }}-${{ env.cache-name }}-${{ steps.test_env.outputs.type }}-${{ hashFiles(format('./ci/pod/docker-compose.{0}.yml', steps.test_env.outputs.type )) }} + + - if: ${{ steps.cache-images.outputs.cache-hit == 'true' }} + name: Load saved docker images + run: | + if [[ -f docker-images-backup/apisix-images.tar ]]; then + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before + docker load --input docker-images-backup/apisix-images.tar + rm docker-images-backup/apisix-images.tar + make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml + echo "loaded docker images" + if [[ ${{ steps.test_env.outputs.type }} != first ]]; then + sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after + fi + fi + - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} + name: Linux launch services + run: | + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before + [[ ${{ steps.test_env.outputs.type }} == plugin ]] && ./ci/pod/openfunction/build-function-image.sh + make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after + echo "Linux launch services, done." + + - name: Install dependencies + run: | + docker exec ubiInstance bash -c "cd apisix && chmod +x ./ci/redhat-ci.sh && ./ci/redhat-ci.sh install_dependencies" + + - name: Install rpm package + if: ${{ startsWith(github.ref, 'refs/heads/release/') }} + run: | + docker exec ubiInstance bash -c "cd apisix && rpm -iv --prefix=/apisix ./apisix-build-tools/output/apisix-${{ steps.branch_env.outputs.version }}-0.ubi8.6.x86_64.rpm" + # Dependencies are attached with rpm, so revert `make deps` + docker exec ubiInstance bash -c "cd apisix && rm -rf deps" + docker exec ubiInstance bash -c "cd apisix && mv usr/bin . && mv usr/local/apisix/* ." + + - name: Run test cases + run: | + docker exec ubiInstance bash -c "cd apisix && chmod +x ./ci/redhat-ci.sh && ./ci/redhat-ci.sh run_case" + + - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} + name: Save docker images + run: | + echo "start backing up, $(date)" + bash ./ci/backup-docker-images.sh ${{ steps.test_env.outputs.type }} + echo "backup done, $(date)" diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/semantic.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/semantic.yml new file mode 100644 index 0000000..85df2c0 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/semantic.yml @@ -0,0 +1,35 @@ +name: "PR Lint" + +on: + pull_request_target: + types: + - opened + - edited + - synchronize + +jobs: + main: + name: Validate PR title + runs-on: ubuntu-latest + steps: + - name: Check out repository code + uses: actions/checkout@v4 + with: + submodules: recursive + - uses: ./.github/actions/action-semantic-pull-request + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + types: | + feat + fix + docs + style + refactor + perf + test + build + ci + chore + revert + change diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/source-install.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/source-install.yml new file mode 100644 index 0000000..c21435f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/source-install.yml @@ -0,0 +1,124 @@ +name: Source Code Install + +on: + push: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + pull_request: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + install-on-multi-platform: + strategy: + fail-fast: false + matrix: + platform: + - ubuntu-latest + os_platform: + - ubuntu + - redhat + services: + etcd: + image: bitnami/etcd:3.5.4 + ports: + - 2379:2379 + - 2380:2380 + env: + ALLOW_NONE_AUTHENTICATION: yes + ETCD_ADVERTISE_CLIENT_URLS: http://0.0.0.0:2379 + httpbin: + image: kennethreitz/httpbin + ports: + - 8088:80 + + runs-on: ${{ matrix.platform }} + timeout-minutes: 30 + + steps: + - name: Check out code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Cache deps + uses: actions/cache@v4 + env: + cache-name: cache-deps + with: + path: deps + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_platform }}-${{ hashFiles('apisix-master-0.rockspec') }} + + - name: Install and start apisix on ${{ matrix.os_platform }} + env: + INSTALL_PLATFORM: ${{ matrix.os_platform }} + run: | + if [[ $INSTALL_PLATFORM == "ubuntu" ]]; then + sudo apt-get update + sudo apt-get install -y git sudo make + make deps + sudo make install + apisix start + elif [[ $INSTALL_PLATFORM == "redhat" ]]; then + docker run -itd -v ${{ github.workspace }}:/apisix --name ubi8 --net="host" --dns 8.8.8.8 --dns-search apache.org registry.access.redhat.com/ubi8/ubi:8.6 /bin/bash + docker exec ubi8 bash -c "yum install -y git sudo make" + docker exec ubi8 bash -c "cd apisix && make deps" + docker exec ubi8 bash -c "cd apisix && make install" + docker exec ubi8 bash -c "cd apisix && apisix start" + elif [[ $INSTALL_PLATFORM == "centos7" ]]; then + docker run -itd -v ${{ github.workspace }}:/apisix --name centos7Instance --net="host" --dns 8.8.8.8 --dns-search apache.org docker.io/centos:7 /bin/bash + docker exec centos7Instance bash -c "yum install -y git sudo make" + docker exec centos7Instance bash -c "cd apisix && make deps" + docker exec centos7Instance bash -c "cd apisix && make install" + docker exec centos7Instance bash -c "cd apisix && apisix start" + fi + sleep 6 + + - name: Test apisix + run: | + wget https://github.com/mikefarah/yq/releases/download/3.4.1/yq_linux_amd64 -O /usr/bin/yq && sudo chmod +x /usr/bin/yq + get_admin_key() { + local admin_key=$(yq '.deployment.admin.admin_key[0].key' conf/config.yaml) + echo "$admin_key" + } + export admin_key=$(get_admin_key); echo $admin_key + cat conf/config.yaml + curl -v http://127.0.0.1:9180/apisix/admin/routes/1 \ + -H "X-API-KEY: $admin_key" -X PUT -d ' + { + "uri": "/get", + "upstream": { + "type": "roundrobin", + "nodes": { + "127.0.0.1:8088": 1 + } + } + }' + result_code=`curl -I -m 10 -o /dev/null -s -w %{http_code} http://127.0.0.1:9080/get` + if [[ $result_code -ne 200 ]]; then + printf "result_code: %s\n" "$result_code" + echo "===============access.log===============" + cat logs/access.log + echo "===============error.log===============" + cat logs/error.log + exit 125 + fi + + - name: Check error log + run: | + if grep -q '\[error\]' logs/error.log; then + echo "=====found error log=====" + cat /usr/local/apisix/logs/error.log + exit 125 + fi diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/stale.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/stale.yml new file mode 100644 index 0000000..3bd686e --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/stale.yml @@ -0,0 +1,52 @@ +name: Stable Test + +on: + workflow_dispatch: + schedule: + - cron: '0 10 * * *' + +permissions: + contents: read + +jobs: + prune_stale: + permissions: + issues: write # for actions/stale to close stale issues + pull-requests: write # for actions/stale to close stale PRs + name: Prune Stale + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - name: Prune Stale + uses: actions/stale@v8 + with: + days-before-issue-stale: 350 + days-before-issue-close: 14 + stale-issue-message: > + This issue has been marked as stale due to 350 days of inactivity. + It will be closed in 2 weeks if no further activity occurs. If this issue is still + relevant, please simply write any comment. Even if closed, you can still revive the + issue at any time or discuss it on the dev@apisix.apache.org list. + Thank you for your contributions. + close-issue-message: > + This issue has been closed due to lack of activity. If you think that + is incorrect, or the issue requires additional review, you can revive the issue at + any time. + days-before-pr-stale: 60 + days-before-pr-close: 28 + stale-pr-message: > + This pull request has been marked as stale due to 60 days of inactivity. + It will be closed in 4 weeks if no further activity occurs. If you think + that's incorrect or this pull request should instead be reviewed, please simply + write any comment. Even if closed, you can still revive the PR at any time or + discuss it on the dev@apisix.apache.org list. + Thank you for your contributions. + close-pr-message: > + This pull request/issue has been closed due to lack of activity. If you think that + is incorrect, or the pull request requires review, you can revive the PR at any time. + # Issues with these labels will never be considered stale. + exempt-issue-labels: 'bug,enhancement,good first issue' + stale-issue-label: 'stale' + stale-pr-label: 'stale' + ascending: true diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/tars-ci.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/tars-ci.yml new file mode 100644 index 0000000..8e2dba4 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/tars-ci.yml @@ -0,0 +1,55 @@ +name: CI Tars + +on: + push: + branches: [ master, 'release/**' ] + paths-ignore: + - 'docs/**' + - '**/*.md' + pull_request: + branches: [ master, 'release/**' ] + paths-ignore: + - 'docs/**' + - '**/*.md' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + tars: + strategy: + fail-fast: false + matrix: + platform: + - ubuntu-latest + os_name: + - linux_openresty + + runs-on: ${{ matrix.platform }} + timeout-minutes: 15 + env: + SERVER_NAME: ${{ matrix.os_name }} + OPENRESTY_VERSION: default + + steps: + - name: Check out code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup Tars MySql + run: | + docker run -d -p 3306:3306 -v $PWD/t/tars/conf/tars.sql:/docker-entrypoint-initdb.d/tars.sql -e MYSQL_ROOT_PASSWORD=tars2022 mysql:5.7 + + - name: Linux Install + run: | + sudo ./ci/${{ matrix.os_name }}_runner.sh before_install + sudo --preserve-env=OPENRESTY_VERSION ./ci/${{ matrix.os_name }}_runner.sh do_install + + - name: Run test cases + run: | + ./ci/tars-ci.sh run_case diff --git a/CloudronPackages/APISIX/apisix-source/.github/workflows/update-labels.yml b/CloudronPackages/APISIX/apisix-source/.github/workflows/update-labels.yml new file mode 100644 index 0000000..bc974d9 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.github/workflows/update-labels.yml @@ -0,0 +1,62 @@ +name: Update labels when user responds in issue and pr +permissions: + issues: write + pull-requests: write + +on: + issue_comment: + types: [created] + pull_request_review_comment: + types: [created] + +jobs: + issue_commented: + if: github.event.issue && !github.event.issue.pull_request && github.event.comment.user.login == github.event.issue.user.login && contains(github.event.issue.labels.*.name, 'wait for update') && !contains(github.event.issue.labels.*.name, 'user responded') + runs-on: ubuntu-latest + steps: + - name: update labels when user responds + uses: actions/github-script@v7 + with: + script: | + github.rest.issues.addLabels({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + labels: ["user responded"] + }) + github.rest.issues.removeLabel({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + name: "wait for update" + }) + + pr_commented: + if: github.event.issue && github.event.issue.pull_request && github.event.comment.user.login == github.event.issue.user.login && (contains(github.event.issue.labels.*.name, 'wait for update') || contains(github.event.issue.labels.*.name, 'discuss') || contains(github.event.issue.labels.*.name, 'need test cases')) && !contains(github.event.issue.labels.*.name, 'user responded') + runs-on: ubuntu-latest + steps: + - name: update label when user responds + uses: actions/github-script@v7 + with: + script: | + github.rest.issues.addLabels({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + labels: ["user responded"] + }) + + pr_review_commented: + if: github.event.pull_request && github.event.comment.user.login == github.event.pull_request.user.login && (contains(github.event.pull_request.labels.*.name, 'wait for update') || contains(github.event.pull_request.labels.*.name, 'discuss') || contains(github.event.issue.labels.*.name, 'need test cases')) && !contains(github.event.pull_request.labels.*.name, 'user responded') + runs-on: ubuntu-latest + steps: + - name: update label when user responds + uses: actions/github-script@v7 + with: + script: | + github.rest.issues.addLabels({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + labels: ["user responded"] + }) diff --git a/CloudronPackages/APISIX/apisix-source/.gitmodules b/CloudronPackages/APISIX/apisix-source/.gitmodules new file mode 100644 index 0000000..9819855 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.gitmodules @@ -0,0 +1,9 @@ +[submodule "t/toolkit"] + path = t/toolkit + url = https://github.com/api7/test-toolkit.git +[submodule ".github/actions/action-semantic-pull-request"] + path = .github/actions/action-semantic-pull-request + url = https://github.com/amannn/action-semantic-pull-request.git +[submodule ".github/actions/autocorrect"] + path = .github/actions/autocorrect + url = https://github.com/huacnlee/autocorrect.git diff --git a/CloudronPackages/APISIX/apisix-source/.ignore_words b/CloudronPackages/APISIX/apisix-source/.ignore_words new file mode 100644 index 0000000..86683d3 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.ignore_words @@ -0,0 +1,11 @@ +iam +te +ba +ue +shttp +nd +hel +nulll +smove +aks +nin diff --git a/CloudronPackages/APISIX/apisix-source/.licenserc.yaml b/CloudronPackages/APISIX/apisix-source/.licenserc.yaml new file mode 100644 index 0000000..86edebf --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.licenserc.yaml @@ -0,0 +1,60 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +header: + license: + spdx-id: Apache-2.0 + copyright-owner: Apache Software Foundation + + license-location-threshold: 360 + + paths-ignore: + - '.gitignore' + - '.gitattributes' + - '.gitmodules' + - 'LICENSE' + - 'NOTICE' + - '**/*.json' + - '**/*.key' + - '**/*.crt' + - '**/*.pem' + - '**/*.pb.go' + - '**/pnpm-lock.yaml' + - '.github/' + - 'conf/mime.types' + - '**/*.svg' + # Exclude CI env_file + - 'ci/pod/**/*.env' + # eyes has some limitation to handle git pattern + - '**/*.log' + # Exclude test toolkit files + - 't/toolkit' + - 'go.mod' + - 'go.sum' + # Exclude non-Apache licensed files + - 'apisix/balancer/ewma.lua' + # Exclude plugin-specific configuration files + - 't/plugin/authz-casbin' + - 't/coredns' + - 't/fuzzing/requirements.txt' + - 'autodocs/' + - 'docs/**/*.md' + - '.ignore_words' + - '.luacheckrc' + # Exclude file contains certificate revocation information + - 't/certs/ocsp/index.txt' + + comment: on-failure diff --git a/CloudronPackages/APISIX/apisix-source/.markdownlint.yml b/CloudronPackages/APISIX/apisix-source/.markdownlint.yml new file mode 100644 index 0000000..36d2485 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/.markdownlint.yml @@ -0,0 +1,34 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +MD001: false +MD004: false +MD005: false +MD006: false +MD007: false +MD010: false +MD013: false +MD014: false +MD024: false +MD026: false +MD029: false +MD033: false +MD034: false +MD036: false +MD040: false +MD041: false +MD046: false diff --git a/CloudronPackages/APISIX/apisix-source/CHANGELOG.md b/CloudronPackages/APISIX/apisix-source/CHANGELOG.md new file mode 100644 index 0000000..41c430a --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/CHANGELOG.md @@ -0,0 +1,1936 @@ +--- +title: Changelog +--- + + + +## Table of Contents + +- [3.13.0](#3130) +- [3.12.0](#3120) +- [3.11.0](#3110) +- [3.10.0](#3100) +- [3.9.0](#390) +- [3.8.0](#380) +- [3.7.0](#370) +- [3.6.0](#360) +- [3.5.0](#350) +- [3.4.0](#340) +- [3.3.0](#330) +- [3.2.1](#321) +- [3.2.0](#320) +- [3.1.0](#310) +- [3.0.0](#300) +- [3.0.0-beta](#300-beta) +- [2.15.3](#2153) +- [2.15.2](#2152) +- [2.15.1](#2151) +- [2.15.0](#2150) +- [2.14.1](#2141) +- [2.14.0](#2140) +- [2.13.3](#2133) +- [2.13.2](#2132) +- [2.13.1](#2131) +- [2.13.0](#2130) +- [2.12.1](#2121) +- [2.12.0](#2120) +- [2.11.0](#2110) +- [2.10.5](#2105) +- [2.10.4](#2104) +- [2.10.3](#2103) +- [2.10.2](#2102) +- [2.10.1](#2101) +- [2.10.0](#2100) +- [2.9.0](#290) +- [2.8.0](#280) +- [2.7.0](#270) +- [2.6.0](#260) +- [2.5.0](#250) +- [2.4.0](#240) +- [2.3.0](#230) +- [2.2.0](#220) +- [2.1.0](#210) +- [2.0.0](#200) +- [1.5.0](#150) +- [1.4.1](#141) +- [1.4.0](#140) +- [1.3.0](#130) +- [1.2.0](#120) +- [1.1.0](#110) +- [1.0.0](#100) +- [0.9.0](#090) +- [0.8.0](#080) +- [0.7.0](#070) +- [0.6.0](#060) + +## 3.13.0 + +**The changes marked with :warning: are not backward compatible.** + +### Change + +- :warning: mark server-info plugin as deprecated [#12244](https://github.com/apache/apisix/pull/12244) +- :warning: fill in the metadata of resource schema [#12224](https://github.com/apache/apisix/pull/12224). +This PR sets additionalProperties to false for consumer credentials. + +### Bugfixes + +- fix: running stale healthchecker when new node count <= 1 [#12118](https://github.com/apache/apisix/pull/12118) +- fix: release healthchecker on 0 nodes [#12126](https://github.com/apache/apisix/pull/12126) +- fix: only parse and validate apisix.yaml in cli when startup [#12216](https://github.com/apache/apisix/pull/12216) +- fix(standalone): API-driven mode does not properly handle consumer schema [#12256](https://github.com/apache/apisix/pull/12256) +- fix: added restriction for TLSv1.3 cross-SNI session resumption [#12366](https://github.com/apache/apisix/pull/12366) +- fix: flaky t/admin/filter.t due to url encoding for query params [#12370](https://github.com/apache/apisix/pull/12370) +- fix(workflow/push-dev-image-on-commit): remove already defined uses [#12365](https://github.com/apache/apisix/pull/12365) +- fix(workflow): use runners with different architectures instead of QEMU [#12322](https://github.com/apache/apisix/pull/12322) +- fix: kubernetes service discovery single mode data dump [#12284](https://github.com/apache/apisix/pull/12284) +- fix: handle consul nil port cases by defaulting to port 80 [#12304](https://github.com/apache/apisix/pull/12304) +- fix: check if config contains duplicate resources in API-driven standalone mode [#12317](https://github.com/apache/apisix/pull/12317) +- fix: original key being modified causing cache inconsistency [#12299](https://github.com/apache/apisix/pull/12299) +- fix: access to the apisix dashboard in dev returns 404 [#12376](https://github.com/apache/apisix/pull/12376) + +### Core + +- feat(consumer): consumer username allows - in it [#12296](https://github.com/apache/apisix/pull/12296) +- chore: change log level to debug to avoid unnecessary logs [#12361](https://github.com/apache/apisix/pull/12361) +- chore: change log level from warn to info for stale batch processor removal [#12297](https://github.com/apache/apisix/pull/12297) +- feat(standalone): allow more characters in credential_id for API-driven mode [#12295](https://github.com/apache/apisix/pull/12295) +- feat: add standalone admin api [#12179](https://github.com/apache/apisix/pull/12179) +- feat: support health checker for stream subsystem [#12180](https://github.com/apache/apisix/pull/12180) +- feat(standalone): support revision in API-driven standalone mode like etcd [#12214](https://github.com/apache/apisix/pull/12214) +- feat: add healthcheck for sync configuration [#12200](https://github.com/apache/apisix/pull/12200) +- perf: compare service discovery nodes by address [#12258](https://github.com/apache/apisix/pull/12258) +- feat: fill in the metadata of resource schema [#12224](https://github.com/apache/apisix/pull/12224) +- feat: add embedded apisix dashboard ui [#12276](https://github.com/apache/apisix/pull/12276) +- feat: add apisix dashboard to dev image [#12369](https://github.com/apache/apisix/pull/12369) +- feat: add max pending entries option to batch-processor [#12338](https://github.com/apache/apisix/pull/12338) +- feat(standalone): support JSON format [#12333](https://github.com/apache/apisix/pull/12333) +- feat: enhance admin api filter [#12291](https://github.com/apache/apisix/pull/12291) +- feat: add warning for data plane writing to etcd [#12241](https://github.com/apache/apisix/pull/12241) +- chore: upgrade openresty version to v1.27.1.2 [#12307](https://github.com/apache/apisix/pull/12307) +- chore: upgrade luarocks version to 3.12.0 [#12305](https://github.com/apache/apisix/pull/12305) + +### Plugins + +- refactor(ai-proxy): move read_response into ai_driver.request function [#12101](https://github.com/apache/apisix/pull/12101) +- refactor: mcp server framework implementation [#12168](https://github.com/apache/apisix/pull/12168) +- feat: add mcp-bridge plugin [#12151](https://github.com/apache/apisix/pull/12151) +- feat: add lago plugin [#12196](https://github.com/apache/apisix/pull/12196) +- feat: add headers attribute for loki-logger [#12243](https://github.com/apache/apisix/pull/12243) +- feat: expose apisix version in prometheus node info metric [#12367](https://github.com/apache/apisix/pull/12367) + +## Doc improvements + +- docs: update stream proxy doc for proxy_mode and some formatting [#12108](https://github.com/apache/apisix/pull/12108) +- docs: improve loki-logger plugin docs [#11921](https://github.com/apache/apisix/pull/11921) +- docs: improve ua-restriction plugin docs [#11956](https://github.com/apache/apisix/pull/11956) +- docs: improve elasticsearch-logger plugin docs [#11922](https://github.com/apache/apisix/pull/11922) +- fix file logger example wrong data structure [#12125](https://github.com/apache/apisix/pull/12125) +- docs: improve limit-req plugin docs [#11873](https://github.com/apache/apisix/pull/11873) +- docs: improve body-transformer plugin docs [#11856](https://github.com/apache/apisix/pull/11856) +- docs: update ai-rate-limiting and ai-rag docs [#12107](https://github.com/apache/apisix/pull/12107) +- docs: improve basic-auth docs and update docs for anonymous consumer [#11859](https://github.com/apache/apisix/pull/11859) +- docs: improve key-auth docs and update docs for anonymous consumer [#11860](https://github.com/apache/apisix/pull/11860) +- docs: improve hmac-auth plugin docs and update docs for anonymous consumer [#11867](https://github.com/apache/apisix/pull/11867) +- docs: improve jwt-auth plugin docs and update docs for anonymous consumer [#11865](https://github.com/apache/apisix/pull/11865) +- docs: improve request-validation plugin docs [#11853](https://github.com/apache/apisix/pull/11853) +- docs: update variable in building apisix from source [#11640](https://github.com/apache/apisix/pull/11640) +- docs: update readme with APISIX AI Gateway product link and MCP feature [#12166](https://github.com/apache/apisix/pull/12166) +- docs: improve plugin-develop docs [#12242](https://github.com/apache/apisix/pull/12242) +- docs: fix typo in real-ip.md [#12236](https://github.com/apache/apisix/pull/12236) +- docs: the configuration type of the WASM plugin can be an object. [#12251](https://github.com/apache/apisix/pull/12251) + +## Developer productivity + +- feat: support devcontainer for containerized development of APISIX [#11765](https://github.com/apache/apisix/pull/11765) + +## 3.12.0 + +**The changes marked with :warning: are not backward compatible.** + +### Change + +- :warning: replace plugin attribute with plugin metadata in `opentelemetry` plugin [#11940](https://github.com/apache/apisix/pull/11940) +- :warning: refactor: ai-content-moderation to ai-aws-content-moderation [#12010](https://github.com/apache/apisix/pull/12010) +- add expiration time for all Prometheus metrics [#11838](https://github.com/apache/apisix/pull/11838) +- allow workflow config without case [#11787](https://github.com/apache/apisix/pull/11787) +- unify google-cloud-oauth.lua file [#11596](https://github.com/apache/apisix/pull/11596) +- :warning: ai-proxy remove passthrough [#12014](https://github.com/apache/apisix/pull/12014) +- :warning: remove model options' `stream` default value [#12013](https://github.com/apache/apisix/pull/12013) +- :warning: grpc-web response contains two trailer chunks [#11988](https://github.com/apache/apisix/pull/11988). +This PR returns `405 Method not allowed` instead of `400 Bad Request` when request HTTP method errors. +- :warning: disallow empty key configuration attributes [#11852](https://github.com/apache/apisix/pull/11852) +- :warning: set default value of ssl_trusted_certificate to system [#11993](https://github.com/apache/apisix/pull/11993) + +### Bugfixes + +- Fix: timeout risk in usages of lua-resty-aws [#12070](https://github.com/apache/apisix/pull/12070) +- Fix: ai-rate-limiting not allowed to limit to a single instance [#12061](https://github.com/apache/apisix/pull/12061) +- Fix: update watch_ctx.revision to avoid multiple resyncs [#12021](https://github.com/apache/apisix/pull/12021) +- Fix: ai-proxy remove passthrough [#12014](https://github.com/apache/apisix/pull/12014) +- Fix: ai-proxy dead loop when retrying [#12012](https://github.com/apache/apisix/pull/12012) +- Fix: error while trying to log table in ai-content-moderation plugin [#11994](https://github.com/apache/apisix/pull/11994) +- Fix: resync etcd when a lower revision is found [#12015](https://github.com/apache/apisix/pull/12015) +- Fix: remove model options' `stream` default value [#12013](https://github.com/apache/apisix/pull/12013) +- Fix: grpc-web response contains two trailer chunks [#11988](https://github.com/apache/apisix/pull/11988) +- Fix: event_id is nil in chaitin-waf [#11651](https://github.com/apache/apisix/pull/11651) +- Fix: race condition problem while update upstream.nodes [#11916](https://github.com/apache/apisix/pull/11916) +- Fix: `upstream_obj.upstream` should not be a string [#11932](https://github.com/apache/apisix/pull/11932) +- Fix: query params in override.endpoint not being sent to LLMs [#11863](https://github.com/apache/apisix/pull/11863) +- Fix: add support for ignoring "load" global variable [#11862](https://github.com/apache/apisix/pull/11862) +- Fix: corrupt data in routes() response due to healthchecker data [#11844](https://github.com/apache/apisix/pull/11844) +- Fix: deepcopy should copy same table exactly only once [#11861](https://github.com/apache/apisix/pull/11861) +- Fix: disallow empty key configuration attributes [#11852](https://github.com/apache/apisix/pull/11852) +- Fix: etcd watch restart when receive invalid revision [#11833](https://github.com/apache/apisix/pull/11833) +- Fix: missing parsed_url nil check [#11637](https://github.com/apache/apisix/pull/11637) +- Fix: use `plugin.get` to fetch plugin configured in multi-auth plugin [#11794](https://github.com/apache/apisix/pull/11794) +- Fix: allow special characters in uri params [#11788](https://github.com/apache/apisix/pull/11788) +- Fix: add nil check to conf in body-transformer [#11768](https://github.com/apache/apisix/pull/11768) +- Fix: use max_req_body_bytes field in custom_format [#11771](https://github.com/apache/apisix/pull/11771) +- Fix: health checker can't be released due to health parent being released early [#11760](https://github.com/apache/apisix/pull/11760) +- Fix: use right modifiedIndex for consumer when use credential [#11649](https://github.com/apache/apisix/pull/11649) + +### Core + +- set default value of ssl_trusted_certificate to system [#11993](https://github.com/apache/apisix/pull/11993) +- upgrade openresty version to v1.27.11 [#11936](https://github.com/apache/apisix/pull/11936) +- Support the use of system-provided CA certs in `ssl_trusted_certificate` [#11809](https://github.com/apache/apisix/pull/11809) +- support _meta.pre_function to execute custom logic before execution of each phase [#11793](https://github.com/apache/apisix/pull/11793) +- support anonymous consumer [#11917](https://github.com/apache/apisix/pull/11917) +- accelerate the creation of the consumer cache [#11840](https://github.com/apache/apisix/pull/11840) +- replace 'string.find' with 'core.string.find' [#11886](https://github.com/apache/apisix/pull/11886) +- workflow plugin registration [#11832](https://github.com/apache/apisix/pull/11832) + +### Plugins + +- refactor ai-proxy and ai-proxy-multi [#12030](https://github.com/apache/apisix/pull/12030) +- support embeddings API [#12062](https://github.com/apache/apisix/pull/12062) +- implement rate limiting based fallback strategy [#12047](https://github.com/apache/apisix/pull/12047) +- ai-rate-limiting plugin [#12037](https://github.com/apache/apisix/pull/12037) +- add `valid_issuers` field in `openid-connect` plugin [#12002](https://github.com/apache/apisix/pull/12002) +- add ai-prompt-guard plugin [#12008](https://github.com/apache/apisix/pull/12008) +- add jwt audience validator [#11987](https://github.com/apache/apisix/pull/11987) +- store JWT in the request context [#11675](https://github.com/apache/apisix/pull/11675) +- support proxying openai compatible LLMs [#12004](https://github.com/apache/apisix/pull/12004) +- add `ai-proxy-multi` plugin [#11986](https://github.com/apache/apisix/pull/11986) [#12030](https://github.com/apache/apisix/pull/12030) +- make rate limiting response header names configurable [#11831](https://github.com/apache/apisix/pull/11831) +- support mulipart content-type in `body-transformer` [#11767](https://github.com/apache/apisix/pull/11767) +- plugins in multi-auth returns error instead of logging it [#11775](https://github.com/apache/apisix/pull/11775) +- support configuring `key_claim_name` [#11772](https://github.com/apache/apisix/pull/11772) +- add Total request per second panel in grafana dashboard [#11692](https://github.com/apache/apisix/pull/11692) +- add ai-rag plugin [#11568](https://github.com/apache/apisix/pull/11568) +- add ai-content-moderation plugin [#11541](https://github.com/apache/apisix/pull/11541) +- use setmetatable to set hidden variables without effecting serialisation [#11770](https://github.com/apache/apisix/pull/11770) + +## 3.11.0 + +**The changes marked with :warning: are not backward compatible.** + +### Change + +- :warning: remove JWT signing endpoint and no longer require a private key to be uploaded in the jwt-auth plugin. [#11597](https://github.com/apache/apisix/pull/11597) +- :warning: rewrite hmac-auth plugin for usability [#11581](https://github.com/apache/apisix/pull/11581) + +### Plugins + +- allow configuring keepalive_timeout in splunk-logger [#11611](https://github.com/apache/apisix/pull/11611) +- add plugin attach-consmer-label [#11604](https://github.com/apache/apisix/pull/11604) +- ai-proxy plugin [#11499](https://github.com/apache/apisix/pull/11499) +- ai-prompt-decorator plugin [#11515](https://github.com/apache/apisix/pull/11515) +- ai-prompt-template plugin [#11517](https://github.com/apache/apisix/pull/11517) + +### Bugfixes + +- Fix: adjust the position of enums in pb_option_def [#11448](https://github.com/apache/apisix/pull/11448) +- Fix: encryption/decryption for non-auth plugins in consumer [#11600](https://github.com/apache/apisix/pull/11600) +- Fix: confusion when substituting ENV in config file [#11545](https://github.com/apache/apisix/pull/11545) + +### Core + +- support gcp secret manager [#11436](https://github.com/apache/apisix/pull/11436) +- support aws secret manager [#11417](https://github.com/apache/apisix/pull/11417) +- add credential resource and include `X-Consumer-Username`, `X-Credential-Identifier`, and `X-Consumer-Custom-ID` headers in requests to upstream services [#11601](https://github.com/apache/apisix/pull/11601) + +## 3.10.0 + +**The changes marked with :warning: are not backward compatible.** + +### Change + +- :warning: remove `core.grpc` module [#11427](https://github.com/apache/apisix/pull/11427) +- add max req/resp body size attributes [#11133](https://github.com/apache/apisix/pull/11133) +- :warning: autogenerate admin api key if not passed [#11080](https://github.com/apache/apisix/pull/11080) +- :warning: enable sensitive fields encryption by default [#11076](https://github.com/apache/apisix/pull/11076) +- support more sensitive fields for encryption [#11095](https://github.com/apache/apisix/pull/11095) +- :warning: avoid overwriting `Access-Control-Expose-Headers` response header [#11136](https://github.com/apache/apisix/pull/11136) +This change removes the default `*` value for `expose_headers` and only sets the header when explicitly configured. +- :warning: add a default limit of 100 for `get_headers()` [#11140](https://github.com/apache/apisix/pull/11140) +- :warning: core.request.header return strings instead of table [#11127](https://github.com/apache/apisix/pull/11127) +This function now always returns strings, previously it returned tables when duplicate headers existed. + +### Plugins + +- allow set headers in introspection request [#11090](https://github.com/apache/apisix/pull/11090) + +### Bugfixes + +- Fix: add libyaml-dev dependency for apt [#11291](https://github.com/apache/apisix/pull/11291) +- Fix: etcd sync data checker should work [#11457](https://github.com/apache/apisix/pull/11457) +- Fix: plugin metadata add id value for etcd checker [#11452](https://github.com/apache/apisix/pull/11452) +- Fix: allow trailing period in SNI and CN for SSL [#11414](https://github.com/apache/apisix/pull/11414) +- Fix: filter out illegal INT(string) formats [#11367](https://github.com/apache/apisix/pull/11367) +- Fix: make the message clearer when API key is missing [#11370](https://github.com/apache/apisix/pull/11370) +- Fix: report consumer username tag in datadog [#11354](https://github.com/apache/apisix/pull/11354) +- Fix: after updating the header, get the old value from the ctx.var [#11329](https://github.com/apache/apisix/pull/11329) +- Fix: ssl key rotation caused request failure [#11305](https://github.com/apache/apisix/pull/11305) +- Fix: validation fails causing etcd events not to be handled correctly [#11268](https://github.com/apache/apisix/pull/11268) +- Fix: stream route matcher is nil after first match [#11269](https://github.com/apache/apisix/pull/11269) +- Fix: rectify the way to fetch secret resource by id [#11164](https://github.com/apache/apisix/pull/11164) +- Fix: multi-auth raise 500 error when use default conf [#11145](https://github.com/apache/apisix/pull/11145) +- Fix: avoid overwriting `Access-Control-Expose-Headers` response header [#11136](https://github.com/apache/apisix/pull/11136) +- Fix: close session in case of error to avoid blocked session [#11089](https://github.com/apache/apisix/pull/11089) +- Fix: restore `pb.state` appropriately [#11135](https://github.com/apache/apisix/pull/11135) +- Fix: add a default limit of 100 for `get_headers()` [#11140](https://github.com/apache/apisix/pull/11140) +- Fix: disable features when prometheus plugin is turned off [#11117](https://github.com/apache/apisix/pull/11117) +- Fix: add post request headers only if auth request method is POST [#11021](https://github.com/apache/apisix/pull/11021) +- Fix: core.request.header return strings instead of table [#11127](https://github.com/apache/apisix/pull/11127) +- Fix: brotli partial response [#11087](https://github.com/apache/apisix/pull/11087) +- Fix: the port value greater than 65535 should not be allowed [#11043](https://github.com/apache/apisix/pull/11043) + +### Core + +- upgrade openresty version to 1.25.3.2 [#11419](https://github.com/apache/apisix/pull/11419) +- move config-default.yaml to hardcoded lua file [#11343](https://github.com/apache/apisix/pull/11343) +- warn log when sending requests to external services insecurely [#11403](https://github.com/apache/apisix/pull/11403) +- update casbin to 1.41.9 [#11400](https://github.com/apache/apisix/pull/11400) +- update lua-resty-t1k to 1.1.5 [#11391](https://github.com/apache/apisix/pull/11391) +- support store ssl.keys ssl.certs in secrets mamager [#11339](https://github.com/apache/apisix/pull/11339) +- move tinyyaml to lyaml [#11312](https://github.com/apache/apisix/pull/11312) +- support hcv namespace [#11277](https://github.com/apache/apisix/pull/11277) +- add discovery k8s dump data interface [#11111](https://github.com/apache/apisix/pull/11111) +- make fetch_secrets use cache for performance [#11201](https://github.com/apache/apisix/pull/11201) +- replace 'string.len' with '#' [#11078](https://github.com/apache/apisix/pull/11078) + +## 3.9.0 + +**The changes marked with :warning: are not backward compatible.** + +### Change + +- :warning: use apisix.enable_http2 to enable HTTP/2 in APISIX [#11032](https://github.com/apache/apisix/pull/11032) +- :warning: unify the keyring and key_encrypt_salt fields [#10771](https://github.com/apache/apisix/pull/10771) + +### Core + +- :sunrise: Support HTTP3/QUIC + - [#10989](https://github.com/apache/apisix/pull/10989) + - [#11010](https://github.com/apache/apisix/pull/11010) + - [#11027](https://github.com/apache/apisix/pull/11027) +- :sunrise: add plugins/reload to control api [#10905](https://github.com/apache/apisix/pull/10905) +- :sunrise: consul deduplicate and sort [#10941](https://github.com/apache/apisix/pull/10941) +- :sunrise: support uri_arg_ when use radixtree_uri_with_parameter [#10645](https://github.com/apache/apisix/pull/10645) + +### Plugins + +- :sunrise: add session.cookie configuration [#10919](https://github.com/apache/apisix/pull/10919) +- :sunrise: support endpointslices in kubernetes discovery [#10916](https://github.com/apache/apisix/pull/10916) +- :sunrise: add redis and redis-cluster in limit-req [#10874](https://github.com/apache/apisix/pull/10874) +- :sunrise: support expire prometheus metrics [#10869](https://github.com/apache/apisix/pull/10869) +- :sunrise: add redis and redis-cluster in limit-conn [#10866](https://github.com/apache/apisix/pull/10866) +- :sunrise: allow configuring allow-headers in grpc-web plugin [#10904](https://github.com/apache/apisix/pull/10904) +- :sunrise: Add forward-auth plugin exception configuration status_on_error [#10898](https://github.com/apache/apisix/pull/10898) +- :sunrise: add option to include request body and response body in log util [#10888](https://github.com/apache/apisix/pull/10888) +- :sunrise: support compressed responses in loggers [#10884](https://github.com/apache/apisix/pull/10884) +- :sunrise: add http-dubbo plugin [#10703](https://github.com/apache/apisix/pull/10703) +- :sunrise: support built-in variables in response_headers in mocking plugin [#10872](https://github.com/apache/apisix/pull/10872) +- :sunrise: support other data formats without warnings [#10862](https://github.com/apache/apisix/pull/10862) +- :sunrise: add ocsp-stapling plugin [#10817](https://github.com/apache/apisix/pull/10817) + +### Bug Fixes + +- Fix: keep different strategy response header consistency [#11048](https://github.com/apache/apisix/pull/11048) +- Fix: add apisix/plugin/limit-req to makefile [#10955](https://github.com/apache/apisix/pull/10959) +- Fix: wrong namespace related endpoint in k8s [#10917](https://github.com/apache/apisix/pull/10917) +- Fix: when delete the secret cause 500 error [#10902](https://github.com/apache/apisix/pull/10902) +- Fix: jwe-decrypt secret length restriction [#10928](https://github.com/apache/apisix/pull/10928) +- Fix: unnecessary YAML Config reloads [#9065](https://github.com/apache/apisix/pull/9065) +- Fix: real_payload was overridden by malicious payload [#10982](https://github.com/apache/apisix/pull/10982) +- Fix: all origins could pass when allow_origins_by_metadata is set [#10948](https://github.com/apache/apisix/pull/10948) +- Fix: add compatibility headers [#10828](https://github.com/apache/apisix/pull/10828) +- Fix: missing trailers issue [#10851](https://github.com/apache/apisix/pull/10851) +- Fix: decryption failure [#10843](https://github.com/apache/apisix/pull/10843) +- Fix: linux-install-luarocks are not compatible with the openresty environment [#10813](https://github.com/apache/apisix/pull/10813) +- Fix: server-side sessions locked by not calling explicit session:close() [#10788](https://github.com/apache/apisix/pull/10788) +- Fix: skip brotli compression for upstream compressed response [#10740](https://github.com/apache/apisix/pull/10740) +- Fix: use_jwks breaking authentication header [#10670](https://github.com/apache/apisix/pull/10670) +- Fix: authz_keycloak plugin giving 500 error [#10763](https://github.com/apache/apisix/pull/10763) + +## 3.8.0 + +### Core + +- :sunrise: Support the use of lua-resty-events module for better performance: + - [#10550](https://github.com/apache/apisix/pull/10550) + - [#10558](https://github.com/apache/apisix/pull/10558) +- :sunrise: Upgrade OpenSSL 1.1.1 to OpenSSL 3: [#10724](https://github.com/apache/apisix/pull/10724) + +### Plugins + +- :sunrise: Add jwe-decrypt plugin: [#10252](https://github.com/apache/apisix/pull/10252) +- :sunrise: Support brotli when use filters.regex option (response-rewrite): [#10733](https://github.com/apache/apisix/pull/10733) +- :sunrise: Add multi-auth plugin: [#10482](https://github.com/apache/apisix/pull/10482) +- :sunrise: Add `required scopes` configuration property to `openid-connect` plugin: [#10493](https://github.com/apache/apisix/pull/10493) +- :sunrise: Support for the Timing-Allow-Origin header (cors): [#9365](https://github.com/apache/apisix/pull/9365) +- :sunrise: Add brotli plugin: [#10515](https://github.com/apache/apisix/pull/10515) +- :sunrise: Body-transformer plugin enhancement(#10472): [#10496](https://github.com/apache/apisix/pull/10496) +- :sunrise: Set minLength of redis_cluster_nodes to 1 for limit-count plugin: [#10612](https://github.com/apache/apisix/pull/10612) +- :sunrise: Allow to use environment variables for limit-count plugin settings: [#10607](https://github.com/apache/apisix/pull/10607) + +### Bugfixes + +- Fix: When the upstream nodes are of array type, the port should be an optional field: [#10477](https://github.com/apache/apisix/pull/10477) +- Fix: Incorrect variable extraction in fault-injection plugin: [#10485](https://github.com/apache/apisix/pull/10485) +- Fix: All consumers should share the same counter (limit-count): [#10541](https://github.com/apache/apisix/pull/10541) +- Fix: Safely remove upstream when sending route to opa plugin: [#10552](https://github.com/apache/apisix/pull/10552) +- Fix: Missing etcd init_dir and unable to list resource: [#10569](https://github.com/apache/apisix/pull/10569) +- Fix: Forward-auth request body is too large: [#10589](https://github.com/apache/apisix/pull/10589) +- Fix: Memory leak caused by timer that never quit: [#10614](https://github.com/apache/apisix/pull/10614) +- Fix: Do not invoke add_header if value resolved as nil in proxy-rewrite plugin: [#10619](https://github.com/apache/apisix/pull/10619) +- Fix: Frequent traversal of all keys in etcd leads to high CPU usage: [#10671](https://github.com/apache/apisix/pull/10671) +- Fix: For prometheus upstream_status metrics, mostly_healthy is healthy: [#10639](https://github.com/apache/apisix/pull/10639) +- Fix: Avoid getting a nil value in log phase in zipkin: [#10666](https://github.com/apache/apisix/pull/10666) +- Fix: Enable openid-connect plugin without redirect_uri got 500 error: [#7690](https://github.com/apache/apisix/pull/7690) +- Fix: Add redirect_after_logout_uri for ODIC that do not have an end_session_endpoint: [#10653](https://github.com/apache/apisix/pull/10653) +- Fix: Response-rewrite filters.regex does not apply when content-encoding is gzip: [#10637](https://github.com/apache/apisix/pull/10637) +- Fix: The leak of prometheus metrics: [#10655](https://github.com/apache/apisix/pull/10655) +- Fix: Authz-keycloak add return detail err: [#10691](https://github.com/apache/apisix/pull/10691) +- Fix: upstream nodes was not updated correctly by service discover: [#10722](https://github.com/apache/apisix/pull/10722) +- Fix: apisix restart failed: [#10696](https://github.com/apache/apisix/pull/10696) + +## 3.7.0 + +**The changes marked with :warning: are not backward compatible.** + +### Change + +- :warning: Creating core resources does not allow passing in `create_time` and `update_time`: [#10232](https://github.com/apache/apisix/pull/10232) +- :warning: Remove self-contained info fields `exptime` and `validity_start` and `validity_end` from ssl schema: [10323](https://github.com/apache/apisix/pull/10323) +- :warning: Replace `route` with `apisix.route_name`, `service` with `apisix.service_name` in the attributes of opentelemetry plugin to follow the standards for span name and attributes: [#10393](https://github.com/apache/apisix/pull/10393) + +### Core + +- :sunrise: Added token to support access control for consul discovery: [#10278](https://github.com/apache/apisix/pull/10278) +- :sunrise: Support configuring `service_id` in stream_route to reference service resources: [#10298](https://github.com/apache/apisix/pull/10298) +- :sunrise: Using `apisix-runtime` as the apisix runtime: + - [#10415](https://github.com/apache/apisix/pull/10415) + - [#10427](https://github.com/apache/apisix/pull/10427) + +### Plugins + +- :sunrise: Add tests for authz-keycloak with apisix secrets: [#10353](https://github.com/apache/apisix/pull/10353) +- :sunrise: Add authorization params to openid-connect plugin: [#10058](https://github.com/apache/apisix/pull/10058) +- :sunrise: Support set variable in zipkin plugin: [#10361](https://github.com/apache/apisix/pull/10361) +- :sunrise: Support Nacos ak/sk authentication: [#10445](https://github.com/apache/apisix/pull/10445) + +### Bugfixes + +- Fix: Use warn log for get healthcheck target status failure: + - [#10156](https://github.com/apache/apisix/pull/10156) +- Fix: Keep healthcheck target state when upstream changes: + - [#10312](https://github.com/apache/apisix/pull/10312) + - [#10307](https://github.com/apache/apisix/pull/10307) +- Fix: Add name field in plugin_config schema for consistency: [#10315](https://github.com/apache/apisix/pull/10315) +- Fix: Optimize tls in upstream_schema and wrong variable: [#10269](https://github.com/apache/apisix/pull/10269) +- Fix(consul): Failed to exit normally: [#10342](https://github.com/apache/apisix/pull/10342) +- Fix: The request header with `Content-Type: application/x-www-form-urlencoded;charset=utf-8` will cause vars condition `post_arg_xxx` matching to failed: [#10372](https://github.com/apache/apisix/pull/10372) +- Fix: Make install failed on mac: [#10403](https://github.com/apache/apisix/pull/10403) +- Fix(log-rotate): Log compression timeout caused data loss: [#8620](https://github.com/apache/apisix/pull/8620) +- Fix(kafka-logger): Remove 0 from enum of required_acks: [#10469](https://github.com/apache/apisix/pull/10469) + +## 3.6.0 + +**The changes marked with :warning: are not backward compatible.** + +### Change + +- :warning: Remove gRPC support between APISIX and etcd and remove `etcd.use_grpc` configuration option: [#10015](https://github.com/apache/apisix/pull/10015) +- :warning: Remove conf server. The data plane no longer supports direct communication with the control plane, and the configuration should be adjusted from `config_provider: control_plane` to `config_provider: etcd`: [#10012](https://github.com/apache/apisix/pull/10012) +- :warning: Enforce strict schema validation on the properties of the core APISIX resources: [#10233](https://github.com/apache/apisix/pull/10233) + +### Core + +- :sunrise: Support configuring the buffer size of the access log: [#10225](https://github.com/apache/apisix/pull/10225) +- :sunrise: Support the use of local DNS resolvers in service discovery by configuring `resolv_conf`: [#9770](https://github.com/apache/apisix/pull/9770) +- :sunrise: Remove Rust dependency for installation: [#10121](https://github.com/apache/apisix/pull/10121) +- :sunrise: Support Dubbo protocol in xRPC [#9660](https://github.com/apache/apisix/pull/9660) + +### Plugins + +- :sunrise: Support https in traffic-split plugin: [#9115](https://github.com/apache/apisix/pull/9115) +- :sunrise: Support rewrite request body in external plugin:[#9990](https://github.com/apache/apisix/pull/9990) +- :sunrise: Support set nginx variables in opentelemetry plugin: [#8871](https://github.com/apache/apisix/pull/8871) +- :sunrise: Support unix sock host pattern in the chaitin-waf plugin: [#10161](https://github.com/apache/apisix/pull/10161) + +### Bugfixes + +- Fix GraphQL POST request route matching exception: [#10198](https://github.com/apache/apisix/pull/10198) +- Fix error on array of multiline string in `apisix.yaml`: [#10193](https://github.com/apache/apisix/pull/10193) +- Add error handlers for invalid `cache_zone` configuration in the `proxy-cache` plugin: [#10138](https://github.com/apache/apisix/pull/10138) + +## 3.5.0 + +**The changes marked with :warning: are not backward compatible.** + +### Change + +- :warning: remove snowflake algorithm in the request-id plugin: [#9715](https://github.com/apache/apisix/pull/9715) +- :warning: No longer compatible with OpenResty 1.19, it needs to be upgraded to 1.21+: [#9913](https://github.com/apache/apisix/pull/9913) +- :warning: Remove the configuration item `apisix.stream_proxy.only`, the L4/L7 proxy needs to be enabled through the configuration item `apisix.proxy_mode`: [#9607](https://github.com/apache/apisix/pull/9607) +- :warning: The admin-api `/apisix/admin/plugins?all=true` marked as deprecated: [#9580](https://github.com/apache/apisix/pull/9580) +- :warning: allowlist and denylist can't be enabled at the same time in ua-restriction plugin: [#9841](https://github.com/apache/apisix/pull/9841) + +### Core + +- :sunrise: Support host level dynamic setting of tls protocol version: [#9903](https://github.com/apache/apisix/pull/9903) +- :sunrise: Support force delete resource: [#9810](https://github.com/apache/apisix/pull/9810) +- :sunrise: Support pulling env vars from yaml keys: [#9855](https://github.com/apache/apisix/pull/9855) +- :sunrise: Add schema validate API in admin-api: [#10065](https://github.com/apache/apisix/pull/10065) + +### Plugins + +- :sunrise: Add chaitin-waf plugin: [#9838](https://github.com/apache/apisix/pull/9838) +- :sunrise: Support vars for file-logger plugin: [#9712](https://github.com/apache/apisix/pull/9712) +- :sunrise: Support adding response headers for mock plugin: [#9720](https://github.com/apache/apisix/pull/9720) +- :sunrise: Support regex_uri with unsafe_uri for proxy-rewrite plugin: [#9813](https://github.com/apache/apisix/pull/9813) +- :sunrise: Support set client_email field for google-cloud-logging plugin: [#9813](https://github.com/apache/apisix/pull/9813) +- :sunrise: Support sending headers upstream returned by OPA server for opa plugin: [#9710](https://github.com/apache/apisix/pull/9710) +- :sunrise: Support configuring proxy server for openid-connect plugin: [#9948](https://github.com/apache/apisix/pull/9948) + +### Bugfixes + +- Fix(log-rotate): the max_kept configuration doesn't work when using custom name: [#9749](https://github.com/apache/apisix/pull/9749) +- Fix(limit_conn): do not use the http variable in stream mode: [#9816](https://github.com/apache/apisix/pull/9816) +- Fix(loki-logger): getting an error with log_labels: [#9850](https://github.com/apache/apisix/pull/9850) +- Fix(limit-count): X-RateLimit-Reset shouldn't be set to 0 after request be rejected: [#9978](https://github.com/apache/apisix/pull/9978) +- Fix(nacos): attempt to index upvalue 'applications' (a nil value): [#9960](https://github.com/apache/apisix/pull/9960) +- Fix(etcd): can't sync etcd data if key has special character: [#9967](https://github.com/apache/apisix/pull/9967) +- Fix(tencent-cloud-cls): dns parsing failure: [#9843](https://github.com/apache/apisix/pull/9843) +- Fix(reload): worker not exited when executing quit or reload command [#9909](https://github.com/apache/apisix/pull/9909) +- Fix(traffic-split): upstream_id validity verification [#10008](https://github.com/apache/apisix/pull/10008) + +## 3.4.0 + +### Core + +- :sunrise: Support route-level MTLS [#9322](https://github.com/apache/apisix/pull/9322) +- :sunrise: Support id schema for global_rules [#9517](https://github.com/apache/apisix/pull/9517) +- :sunrise: Support use a single long http connection to watch all resources for etcd [#9456](https://github.com/apache/apisix/pull/9456) +- :sunrise: Support max len 256 for ssl label [#9301](https://github.com/apache/apisix/pull/9301) + +### Plugins + +- :sunrise: Support multiple regex pattern matching for proxy_rewrite plugin [#9194](https://github.com/apache/apisix/pull/9194) +- :sunrise: Add loki-logger plugin [#9399](https://github.com/apache/apisix/pull/9399) +- :sunrise: Allow user configure DEFAULT_BUCKETS for prometheus plugin [#9673](https://github.com/apache/apisix/pull/9673) + +### Bugfixes + +- Fix(body-transformer): xml2lua: replace empty table with empty string [#9669](https://github.com/apache/apisix/pull/9669) +- Fix: opentelemetry and grpc-transcode plugins cannot work together [#9606](https://github.com/apache/apisix/pull/9606) +- Fix(skywalking-logger, error-log-logger): support $hostname in skywalking service_instance_name [#9401](https://github.com/apache/apisix/pull/9401) +- Fix(admin): fix secrets do not support to update attributes by PATCH [#9510](https://github.com/apache/apisix/pull/9510) +- Fix(http-logger): default request path should be '/' [#9472](https://github.com/apache/apisix/pull/9472) +- Fix: syslog plugin doesn't work [#9425](https://github.com/apache/apisix/pull/9425) +- Fix: wrong log format for splunk-hec-logging [#9478](https://github.com/apache/apisix/pull/9478) +- Fix(etcd): reuse cli and enable keepalive [#9420](https://github.com/apache/apisix/pull/9420) +- Fix: upstream key config add mqtt_client_id support [#9450](https://github.com/apache/apisix/pull/9450) +- Fix: body-transformer plugin return raw body anytime [#9446](https://github.com/apache/apisix/pull/9446) +- Fix(wolf-rbac): other plugin in consumer not effective when consumer used wolf-rbac plugin [#9298](https://github.com/apache/apisix/pull/9298) +- Fix: always parse domain when host is domain name [#9332](https://github.com/apache/apisix/pull/9332) +- Fix: response-rewrite plugin can't add only one character [#9372](https://github.com/apache/apisix/pull/9372) +- Fix(consul): support to fetch only health endpoint [#9204](https://github.com/apache/apisix/pull/9204) + +## 3.3.0 + +**The changes marked with :warning: are not backward compatible.** + +### Change + +- :warning: Change the default router from `radixtree_uri` to `radixtree_host_uri`: [#9047](https://github.com/apache/apisix/pull/9047) +- :warning: CORS plugin will add `Vary: Origin` header when `allow_origin` is not `*`: [#9010](https://github.com/apache/apisix/pull/9010) + +### Core + +- :sunrise: Support store route's cert in secrets manager: [#9247](https://github.com/apache/apisix/pull/9247) +- :sunrise: Support bypassing Admin API Auth by configuration: [#9147](https://github.com/apache/apisix/pull/9147) + +### Plugins + +- :sunrise: Support header injection for `fault-injection` plugin: [#9039](https://github.com/apache/apisix/pull/9039) +- :sunrise: Support variable when rewrite header in `proxy-rewrite` plugin: [#9112](https://github.com/apache/apisix/pull/9112) +- :sunrise: `limit-count` plugin supports `username` and `ssl` for redis policy: [#9185](https://github.com/apache/apisix/pull/9185) + +### Bugfixes + +- Fix etcd data sync exception: [#8493](https://github.com/apache/apisix/pull/8493) +- Fix invalidate cache in `core.request.add_header` and fix some calls: [#8824](https://github.com/apache/apisix/pull/8824) +- Fix the high CPU and memory usage cause by healthcheck impl: [#9015](https://github.com/apache/apisix/pull/9015) +- Consider using `allow_origins_by_regex` only when it is not `nil`: [#9028](https://github.com/apache/apisix/pull/9028) +- Check upstream reference in `traffic-split` plugin when delete upstream: [#9044](https://github.com/apache/apisix/pull/9044) +- Fix failing to connect to etcd at startup: [#9077](https://github.com/apache/apisix/pull/9077) +- Fix health checker leak for domain nodes: [#9090](https://github.com/apache/apisix/pull/9090) +- Prevent non `127.0.0.0/24` to access admin api with empty admin_key: [#9146](https://github.com/apache/apisix/pull/9146) +- Ensure `hold_body_chunk` should use separate buffer for each plugin in case of pollution: [#9266](https://github.com/apache/apisix/pull/9266) +- Ensure `batch-requests` plugin read trailer headers if existed: [#9289](https://github.com/apache/apisix/pull/9289) +- Ensure `proxy-rewrite` should set `ngx.var.uri`: [#9309](https://github.com/apache/apisix/pull/9309) + +## 3.2.1 + +**This is an LTS maintenance release and you can see the CHANGELOG in `release/3.2` branch.** + +[https://github.com/apache/apisix/blob/release/3.2/CHANGELOG.md#321](https://github.com/apache/apisix/blob/release/3.2/CHANGELOG.md#321) + +## 3.2.0 + +### Change + +- Deprecated separate Vault configuration in jwt-auth. Users can use secret to achieve the same function: [#8660](https://github.com/apache/apisix/pull/8660) + +### Core + +- :sunrise: Support Vault token to configure secret through environment variables: [#8866](https://github.com/apache/apisix/pull/8866) +- :sunrise: Supports service discovery on stream subsystem: + - [#8583](https://github.com/apache/apisix/pull/8583) + - [#8593](https://github.com/apache/apisix/pull/8593) + - [#8584](https://github.com/apache/apisix/pull/8584) + - [#8640](https://github.com/apache/apisix/pull/8640) + - [#8633](https://github.com/apache/apisix/pull/8633) + - [#8696](https://github.com/apache/apisix/pull/8696) + - [#8826](https://github.com/apache/apisix/pull/8826) + +### Plugins + +- :sunrise: Add RESTful to graphQL conversion plugin: [#8959](https://github.com/apache/apisix/pull/8959) +- :sunrise: Supports setting the log format on each log plugin: + - [#8806](https://github.com/apache/apisix/pull/8806) + - [#8643](https://github.com/apache/apisix/pull/8643) +- :sunrise: Add request body/response body conversion plugin: [#8766](https://github.com/apache/apisix/pull/8766) +- :sunrise: Support sending error logs to Kafka: [#8693](https://github.com/apache/apisix/pull/8693) +- :sunrise: limit-count plugin supports X-RateLimit-Reset: [#8578](https://github.com/apache/apisix/pull/8578) +- :sunrise: limit-count plugin supports setting TLS to access Redis cluster: [#8558](https://github.com/apache/apisix/pull/8558) +- :sunrise: consumer-restriction plugin supports permission control via consumer_group_id: [#8567](https://github.com/apache/apisix/pull/8567) + +### Bugfixes + +- Fix mTLS protection when the host and SNI mismatch: [#8967](https://github.com/apache/apisix/pull/8967) +- The proxy-rewrite plugin should escape URI parameter parts if they do not come from user config: [#8888](https://github.com/apache/apisix/pull/8888) +- Admin API PATCH operation should return 200 status code after success: [#8855](https://github.com/apache/apisix/pull/8855) +- Under certain conditions, the reload after etcd synchronization failure does not take effect: [#8736](https://github.com/apache/apisix/pull/8736) +- Fix the problem that the nodes found by the Consul service discovery are incomplete: [#8651](https://github.com/apache/apisix/pull/8651) +- Fix grpc-transcode plugin's conversion of Map data: [#8731](https://github.com/apache/apisix/pull/8731) +- External plugins should be able to set the content-type response header: [#8588](https://github.com/apache/apisix/pull/8588) +- When hotloading plugins, redundant timers may be left behind if the request-id plugin initializes the snowflake generator incorrectly: [#8556](https://github.com/apache/apisix/pull/8556) +- Close previous proto synchronizer for grpc-transcode when hotloading plugins: [#8557](https://github.com/apache/apisix/pull/8557) + +## 3.1.0 + +### Core + +- :sunrise: Support for etcd configuration synchronization via gRPC: + - [#8485](https://github.com/apache/apisix/pull/8485) + - [#8450](https://github.com/apache/apisix/pull/8450) + - [#8411](https://github.com/apache/apisix/pull/8411) +- :sunrise: Support for configuring encrypted fields in plugins: + - [#8487](https://github.com/apache/apisix/pull/8487) + - [#8403](https://github.com/apache/apisix/pull/8403) +- :sunrise: Support for placing partial fields in Vault or environment variable using secret resources: + - [#8448](https://github.com/apache/apisix/pull/8448) + - [#8421](https://github.com/apache/apisix/pull/8421) + - [#8412](https://github.com/apache/apisix/pull/8412) + - [#8394](https://github.com/apache/apisix/pull/8394) + - [#8390](https://github.com/apache/apisix/pull/8390) +- :sunrise: Allows upstream configuration in the stream subsystem as a domain name: [#8500](https://github.com/apache/apisix/pull/8500) +- :sunrise: Support Consul service discovery: [#8380](https://github.com/apache/apisix/pull/8380) + +### Plugin + +- :sunrise: Optimize resource usage for prometheus collection: [#8434](https://github.com/apache/apisix/pull/8434) +- :sunrise: Add inspect plugin for easy debugging: [#8400](https://github.com/apache/apisix/pull/8400) +- :sunrise: jwt-auth plugin supports parameters to hide authentication token from upstream : [#8206](https://github.com/apache/apisix/pull/8206) +- :sunrise: proxy-rewrite plugin supports adding new request headers without overwriting existing request headers with the same name: [#8336](https://github.com/apache/apisix/pull/8336) +- :sunrise: grpc-transcode plugin supports setting the grpc-status-details-bin response header into the response body: [#7639](https://github.com/apache/apisix/pull/7639) +- :sunrise: proxy-mirror plugin supports setting the prefix: [#8261](https://github.com/apache/apisix/pull/8261) + +### Bugfix + +- Fix the problem that the plug-in configured under service object cannot take effect in time under some circumstances: [#8482](https://github.com/apache/apisix/pull/8482) +- Fix an occasional 502 problem when http and grpc share the same upstream connection due to connection pool reuse: [#8364](https://github.com/apache/apisix/pull/8364) +- file-logger should avoid buffer-induced log truncation when writing logs: [#7884](https://github.com/apache/apisix/pull/7884) +- max_kept parameter of log-rotate plugin should take effect on compressed files: [#8366](https://github.com/apache/apisix/pull/8366) +- Fix userinfo not being set when use_jwks is true in the openid-connect plugin: [#8347](https://github.com/apache/apisix/pull/8347) +- Fix an issue where x-forwarded-host cannot be changed in the proxy-rewrite plugin: [#8200](https://github.com/apache/apisix/pull/8200) +- Fix a bug where disabling the v3 admin API resulted in missing response bodies under certain circumstances: [#8349](https://github.com/apache/apisix/pull/8349) +- In zipkin plugin, pass trace ID even if there is a rejected sampling decision: [#8099](https://github.com/apache/apisix/pull/8099) +- Fix `_meta.filter` in plugin configuration not working with variables assigned after upstream response and custom variables in APISIX. + - [#8162](https://github.com/apache/apisix/pull/8162) + - [#8256](https://github.com/apache/apisix/pull/8256) + +## 3.0.0 + +### Change + +- `enable_cpu_affinity` is disabled by default to avoid this configuration affecting the behavior of APSISIX deployed in the container: [#8074](https://github.com/apache/apisix/pull/8074) + +### Core + +- :sunrise: Added Consumer Group entity to manage multiple consumers: [#7980](https://github.com/apache/apisix/pull/7980) +- :sunrise: Supports configuring the order in which DNS resolves domain name types: [#7935](https://github.com/apache/apisix/pull/7935) +- :sunrise: Support configuring multiple `key_encrypt_salt` for rotation: [#7925](https://github.com/apache/apisix/pull/7925) + +### Plugin + +- :sunrise: Added ai plugin to dynamically optimize the execution path of APISIX according to the scene: + - [#8102](https://github.com/apache/apisix/pull/8102) + - [#8113](https://github.com/apache/apisix/pull/8113) + - [#8120](https://github.com/apache/apisix/pull/8120) + - [#8128](https://github.com/apache/apisix/pull/8128) + - [#8130](https://github.com/apache/apisix/pull/8130) + - [#8149](https://github.com/apache/apisix/pull/8149) + - [#8157](https://github.com/apache/apisix/pull/8157) +- :sunrise: Support `session_secret` in openid-connect plugin to resolve the inconsistency of `session_secret` among multiple workers: [#8068](https://github.com/apache/apisix/pull/8068) +- :sunrise: Support sasl config in kafka-logger plugin: [#8050](https://github.com/apache/apisix/pull/8050) +- :sunrise: Support set resolve domain in proxy-mirror plugin: [#7861](https://github.com/apache/apisix/pull/7861) +- :sunrise: Support `brokers` property in kafka-logger plugin, which supports different broker to set the same host: [#7999](https://github.com/apache/apisix/pull/7999) +- :sunrise: Support get response body in ext-plugin-post-resp: [#7947](https://github.com/apache/apisix/pull/7947) +- :sunrise: Added cas-auth plugin to support CAS authentication: [#7932](https://github.com/apache/apisix/pull/7932) + +### Bugfix + +- Conditional expressions of workflow plugin should support operators: [#8121](https://github.com/apache/apisix/pull/8121) +- Fix loading problem of batch processor plugin when prometheus plugin is disabled: [#8079](https://github.com/apache/apisix/pull/8079) +- When APISIX starts, delete the old conf server sock file if it exists: [#8022](https://github.com/apache/apisix/pull/8022) +- Disable core.grpc when gRPC-client-nginx-module module is not compiled: [#8007](https://github.com/apache/apisix/pull/8007) + +## 3.0.0-beta + +Here we use 2.99.0 as the version number in the source code instead of the code name +`3.0.0-beta` for two reasons: + +1. avoid unexpected errors when some programs try to compare the +version, as `3.0.0-beta` contains `3.0.0` and is longer than it. +2. some package system might not allow package which has a suffix +after the version number. + +### Change + +#### Moves the config_center, etcd and Admin API configuration to the deployment + +We've adjusted the configuration in the static configuration file, so you need to update the configuration in `config.yaml` as well: + +- The `config_center` function is now implemented by `config_provider` under `deployment`: [#7901](https://github.com/apache/apisix/pull/7901) +- The `etcd` field is moved to `deployment`: [#7860](https://github.com/apache/apisix/pull/7860) +- The following Admin API configuration is moved to the `admin` field under `deployment`: [#7823](https://github.com/apache/apisix/pull/7823) + - admin_key + - enable_admin_cors + - allow_admin + - admin_listen + - https_admin + - admin_api_mtls + - admin_api_version + +You can refer to the latest `config-default.yaml` for details. + +#### Removing multiple deprecated configurations + +With the new 3.0 release, we took the opportunity to clean out many configurations that were previously marked as deprecated. + +In the static configuration, we removed several fields as follows: + +- Removed `enable_http2` and `listen_port` from `apisix.ssl`: [#7717](https://github.com/apache/apisix/pull/7717) +- Removed `apisix.port_admin`: [#7716](https://github.com/apache/apisix/pull/7716) +- Removed `etcd.health_check_retry`: [#7676](https://github.com/apache/apisix/pull/7676) +- Removed `nginx_config.http.lua_shared_dicts`: [#7677](https://github.com/apache/apisix/pull/7677) +- Removed `apisix.real_ip_header`: [#7696](https://github.com/apache/apisix/pull/7696) + +In the dynamic configuration, we made the following adjustments: + +- Moved `disable` of the plugin configuration under `_meta`: [#7707](https://github.com/apache/apisix/pull/7707) +- Removed `service_protocol` from the Route: [#7701](https://github.com/apache/apisix/pull/7701) + +There are also specific plugin level changes: + +- Removed `audience` field from authz-keycloak: [#7683](https://github.com/apache/apisix/pull/7683) +- Removed `upstream` field from mqtt-proxy: [#7694](https://github.com/apache/apisix/pull/7694) +- tcp-related configuration placed under the `tcp` field in error-log-logger: [#7700](https://github.com/apache/apisix/pull/7700) +- Removed `max_retry_times` and `retry_interval` fields from syslog: [#7699](https://github.com/apache/apisix/pull/7699) +- The `scheme` field has been removed from proxy-rewrite: [#7695](https://github.com/apache/apisix/pull/7695) + +#### New Admin API response format + +We have adjusted the response format of the Admin API in several PRs as follows: + +- [#7630](https://github.com/apache/apisix/pull/7630) +- [#7622](https://github.com/apache/apisix/pull/7622) + +The new response format is shown below: + +Returns a single configuration: + +```json +{ + "modifiedIndex": 2685183, + "value": { + "id": "1", + ... + }, + "key": "/apisix/routes/1", + "createdIndex": 2684956 +} +``` + +Returns multiple configurations: + +```json +{ + "list": [ + { + "modifiedIndex": 2685183, + "value": { + "id": "1", + ... + }, + "key": "/apisix/routes/1", + "createdIndex": 2684956 + }, + { + "modifiedIndex": 2685163, + "value": { + "id": "2", + ... + }, + "key": "/apisix/routes/2", + "createdIndex": 2685163 + } + ], + "total": 2 +} +``` + +#### Other + +- Port of Admin API changed to 9180: [#7806](https://github.com/apache/apisix/pull/7806) +- We only support OpenResty 1.19.3.2 and above: [#7625](https://github.com/apache/apisix/pull/7625) +- Adjusted the priority of the Plugin Config object so that the priority of a plugin configuration with the same name changes from Consumer > Plugin Config > Route > Service to Consumer > Route > Plugin Config > Service: [#7614](https://github.com/apache/apisix/pull/7614) + +### Core + +- Integrating grpc-client-nginx-module to APISIX: [#7917](https://github.com/apache/apisix/pull/7917) +- k8s service discovery support for configuring multiple clusters: [#7895](https://github.com/apache/apisix/pull/7895) + +### Plugin + +- Support for injecting header with specified prefix in opentelemetry plugin: [#7822](https://github.com/apache/apisix/pull/7822) +- Added openfunction plugin: [#7634](https://github.com/apache/apisix/pull/7634) +- Added elasticsearch-logger plugin: [#7643](https://github.com/apache/apisix/pull/7643) +- response-rewrite plugin supports adding response bodies: [#7794](https://github.com/apache/apisix/pull/7794) +- log-rorate supports specifying the maximum size to cut logs: [#7749](https://github.com/apache/apisix/pull/7749) +- Added workflow plug-in. + - [#7760](https://github.com/apache/apisix/pull/7760) + - [#7771](https://github.com/apache/apisix/pull/7771) +- Added Tencent Cloud Log Service plugin: [#7593](https://github.com/apache/apisix/pull/7593) +- jwt-auth supports ES256 algorithm: [#7627](https://github.com/apache/apisix/pull/7627) +- ldap-auth internal implementation, switching from lualdap to lua-resty-ldap: [#7590](https://github.com/apache/apisix/pull/7590) +- http request metrics within the prometheus plugin supports setting additional labels via variables: [#7549](https://github.com/apache/apisix/pull/7549) +- The clickhouse-logger plugin supports specifying multiple clickhouse endpoints: [#7517](https://github.com/apache/apisix/pull/7517) + +### Bugfix + +- gRPC proxy sets :authority request header to configured upstream Host: [#7939](https://github.com/apache/apisix/pull/7939) +- response-rewrite writing to an empty body may cause AIPSIX to fail to respond to the request: [#7836](https://github.com/apache/apisix/pull/7836) +- Fix the problem that when using Plugin Config and Consumer at the same time, there is a certain probability that the plugin configuration is not updated: [#7965](https://github.com/apache/apisix/pull/7965) +- Only reopen log files once when log cutting: [#7869](https://github.com/apache/apisix/pull/7869) +- Passive health checks should not be enabled by default: [#7850](https://github.com/apache/apisix/pull/7850) +- The zipkin plugin should pass trace IDs upstream even if it does not sample: [#7833](https://github.com/apache/apisix/pull/7833) +- Correction of opentelemetry span kind to server: [#7830](https://github.com/apache/apisix/pull/7830) +- in limit-count plugin, different routes with the same configuration should not share the same counter: [#7750](https://github.com/apache/apisix/pull/7750) +- Fix occasional exceptions thrown when removing clean_handler: [#7648](https://github.com/apache/apisix/pull/7648) +- Allow direct use of IPv6 literals when configuring upstream nodes: [#7594](https://github.com/apache/apisix/pull/7594) +- The wolf-rbac plugin adjusts the way it responds to errors: + - [#7561](https://github.com/apache/apisix/pull/7561) + - [#7497](https://github.com/apache/apisix/pull/7497) +- the phases after proxy didn't run when 500 error happens before proxy: [#7703](https://github.com/apache/apisix/pull/7703) +- avoid error when multiple plugins associated with consumer and have rewrite phase: [#7531](https://github.com/apache/apisix/pull/7531) +- upgrade lua-resty-etcd to 1.8.3 which fixes various issues: [#7565](https://github.com/apache/apisix/pull/7565) + +## 2.15.3 + +**This is an LTS maintenance release and you can see the CHANGELOG in `release/2.15` branch.** + +[https://github.com/apache/apisix/blob/release/2.15/CHANGELOG.md#2153](https://github.com/apache/apisix/blob/release/2.15/CHANGELOG.md#2153) + +## 2.15.2 + +**This is an LTS maintenance release and you can see the CHANGELOG in `release/2.15` branch.** + +[https://github.com/apache/apisix/blob/release/2.15/CHANGELOG.md#2152](https://github.com/apache/apisix/blob/release/2.15/CHANGELOG.md#2152) + +## 2.15.1 + +**This is an LTS maintenance release and you can see the CHANGELOG in `release/2.15` branch.** + +[https://github.com/apache/apisix/blob/release/2.15/CHANGELOG.md#2151](https://github.com/apache/apisix/blob/release/2.15/CHANGELOG.md#2151) + +## 2.15.0 + +### Change + +- We now map the grpc error code OUT_OF_RANGE to http code 400 in grpc-transcode plugin: [#7419](https://github.com/apache/apisix/pull/7419) +- Rename health_check_retry configuration in etcd section of `config-default.yaml` to startup_retry: [#7304](https://github.com/apache/apisix/pull/7304) +- Remove `upstream.enable_websocket` which is deprecated since 2020: [#7222](https://github.com/apache/apisix/pull/7222) + +### Core + +- Support running plugins conditionally: [#7453](https://github.com/apache/apisix/pull/7453) +- Allow users to specify plugin execution priority: [#7273](https://github.com/apache/apisix/pull/7273) +- Support getting upstream certificate from ssl object: [#7221](https://github.com/apache/apisix/pull/7221) +- Allow customizing error response in the plugin: [#7128](https://github.com/apache/apisix/pull/7128) +- Add metrics to xRPC Redis proxy: [#7183](https://github.com/apache/apisix/pull/7183) +- Introduce deployment role to simplify the deployment of APISIX: + - [#7405](https://github.com/apache/apisix/pull/7405) + - [#7417](https://github.com/apache/apisix/pull/7417) + - [#7392](https://github.com/apache/apisix/pull/7392) + - [#7365](https://github.com/apache/apisix/pull/7365) + - [#7249](https://github.com/apache/apisix/pull/7249) + +### Plugin + +- Add ngx.shared.dict statistic in promethues plugin: [#7412](https://github.com/apache/apisix/pull/7412) +- Allow using unescaped raw URL in proxy-rewrite plugin: [#7401](https://github.com/apache/apisix/pull/7401) +- Add PKCE support to the openid-connect plugin: [#7370](https://github.com/apache/apisix/pull/7370) +- Support custom log format in sls-logger plugin: [#7328](https://github.com/apache/apisix/pull/7328) +- Export some params for kafka-client in kafka-logger plugin: [#7266](https://github.com/apache/apisix/pull/7266) +- Add support for capturing OIDC refresh tokens in openid-connect plugin: [#7220](https://github.com/apache/apisix/pull/7220) +- Add prometheus plugin in stream subsystem: [#7174](https://github.com/apache/apisix/pull/7174) + +### Bugfix + +- clear remain state from the latest try before retrying in Kubernetes discovery: [#7506](https://github.com/apache/apisix/pull/7506) +- the query string was repeated twice when enabling both http_to_https and append_query_string in the redirect plugin: [#7433](https://github.com/apache/apisix/pull/7433) +- don't send empty Authorization header by default in http-logger: [#7444](https://github.com/apache/apisix/pull/7444) +- ensure both `group` and `disable` configurations can be used in limit-count: [#7384](https://github.com/apache/apisix/pull/7384) +- adjust the execution priority of request-id so the tracing plugins can use the request id: [#7281](https://github.com/apache/apisix/pull/7281) +- correct the transcode of repeated Message in grpc-transcode: [#7231](https://github.com/apache/apisix/pull/7231) +- var missing in proxy-cache cache key should be ignored: [#7168](https://github.com/apache/apisix/pull/7168) +- reduce memory usage when abnormal weights are given in chash: [#7103](https://github.com/apache/apisix/pull/7103) +- cache should be bypassed when the method mismatch in proxy-cache: [#7111](https://github.com/apache/apisix/pull/7111) +- Upstream keepalive should consider TLS param: +    - [#7054](https://github.com/apache/apisix/pull/7054) +    - [#7466](https://github.com/apache/apisix/pull/7466) +- The redirect plugin sets a correct port during redirecting HTTP to HTTPS: +    - [#7065](https://github.com/apache/apisix/pull/7065) + +## 2.14.1 + +### Bugfix + +- The "unix:" in the `real_ip_from` configuration should not break the batch-requests plugin: [#7106](https://github.com/apache/apisix/pull/7106) + +## 2.14.0 + +### Change + +- To adapt the change of OpenTelemetry spec, the default port of OTLP/HTTP is changed to 4318: [#7007](https://github.com/apache/apisix/pull/7007) + +### Core + +- Introduce an experimental feature to allow subscribing Kafka message via APISIX. This feature is based on the pubsub framework running above websocket: + - [#7028](https://github.com/apache/apisix/pull/7028) + - [#7032](https://github.com/apache/apisix/pull/7032) +- Introduce an experimental framework called xRPC to manage non-HTTP L7 traffic: + - [#6885](https://github.com/apache/apisix/pull/6885) + - [#6901](https://github.com/apache/apisix/pull/6901) + - [#6919](https://github.com/apache/apisix/pull/6919) + - [#6960](https://github.com/apache/apisix/pull/6960) + - [#6965](https://github.com/apache/apisix/pull/6965) + - [#7040](https://github.com/apache/apisix/pull/7040) +- Now we support adding delay according to the command & key during proxying Redis traffic, which is built above xRPC: + - [#6999](https://github.com/apache/apisix/pull/6999) +- Introduce an experimental support to configure APISIX via xDS: + - [#6614](https://github.com/apache/apisix/pull/6614) + - [#6759](https://github.com/apache/apisix/pull/6759) +- Add `normalize_uri_like_servlet` option to normalize uri like servlet: [#6984](https://github.com/apache/apisix/pull/6984) +- Zookeeper service discovery via apisix-seed: [#6751](https://github.com/apache/apisix/pull/6751) + +### Plugin + +- The real-ip plugin supports recursive IP search like `real_ip_recursive`: [#6988](https://github.com/apache/apisix/pull/6988) +- The api-breaker plugin allows configuring response: [#6949](https://github.com/apache/apisix/pull/6949) +- The response-rewrite plugin supports body filters: [#6750](https://github.com/apache/apisix/pull/6750) +- The request-id plugin adds nanoid algorithm to generate ID: [#6779](https://github.com/apache/apisix/pull/6779) +- The file-logger plugin can cache & reopen file handler: [#6721](https://github.com/apache/apisix/pull/6721) +- Add casdoor plugin: [#6382](https://github.com/apache/apisix/pull/6382) +- The authz-keycloak plugin supports password grant: [#6586](https://github.com/apache/apisix/pull/6586) + +### Bugfix + +- Upstream keepalive should consider TLS param: [#7054](https://github.com/apache/apisix/pull/7054) +- Do not expose internal error message to the client: + - [#6982](https://github.com/apache/apisix/pull/6982) + - [#6859](https://github.com/apache/apisix/pull/6859) + - [#6854](https://github.com/apache/apisix/pull/6854) + - [#6853](https://github.com/apache/apisix/pull/6853) + - [#6846](https://github.com/apache/apisix/pull/6846) +- DNS supports SRV record with port 0: [#6739](https://github.com/apache/apisix/pull/6739) +- client mTLS was ignored sometimes in TLS session reuse: [#6906](https://github.com/apache/apisix/pull/6906) +- The grpc-web plugin doesn't override Access-Control-Allow-Origin header in response: [#6842](https://github.com/apache/apisix/pull/6842) +- The syslog plugin's default timeout is corrected: [#6807](https://github.com/apache/apisix/pull/6807) +- The authz-keycloak plugin's `access_denied_redirect_uri` was bypassed sometimes: [#6794](https://github.com/apache/apisix/pull/6794) +- Handle `USR2` signal properly: [#6758](https://github.com/apache/apisix/pull/6758) +- The redirect plugin set a correct port during redirecting HTTP to HTTPS: + - [#7065](https://github.com/apache/apisix/pull/7065) + - [#6686](https://github.com/apache/apisix/pull/6686) +- Admin API rejects unknown stream plugin: [#6813](https://github.com/apache/apisix/pull/6813) + +## 2.13.3 + +**This is an LTS maintenance release and you can see the CHANGELOG in `release/2.13` branch.** + +[https://github.com/apache/apisix/blob/release/2.13/CHANGELOG.md#2133](https://github.com/apache/apisix/blob/release/2.13/CHANGELOG.md#2133) + +## 2.13.2 + +**This is an LTS maintenance release and you can see the CHANGELOG in `release/2.13` branch.** + +[https://github.com/apache/apisix/blob/release/2.13/CHANGELOG.md#2132](https://github.com/apache/apisix/blob/release/2.13/CHANGELOG.md#2132) + +## 2.13.1 + +**This is an LTS maintenance release and you can see the CHANGELOG in `release/2.13` branch.** + +[https://github.com/apache/apisix/blob/release/2.13/CHANGELOG.md#2131](https://github.com/apache/apisix/blob/release/2.13/CHANGELOG.md#2131) + +## 2.13.0 + +### Change + +- change(syslog): correct the configuration [#6551](https://github.com/apache/apisix/pull/6551) +- change(server-info): use a new approach(keepalive) to report DP info [#6202](https://github.com/apache/apisix/pull/6202) +- change(admin): empty nodes should be encoded as array [#6384](https://github.com/apache/apisix/pull/6384) +- change(prometheus): replace wrong apisix_nginx_http_current_connections{state="total"} label [#6327](https://github.com/apache/apisix/pull/6327) +- change: don't expose public API by default & remove plugin interceptor [#6196](https://github.com/apache/apisix/pull/6196) + +### Core + +- :sunrise: feat: add delayed_body_filter phase [#6605](https://github.com/apache/apisix/pull/6605) +- :sunrise: feat: support for reading environment variables from yaml configuration files [#6505](https://github.com/apache/apisix/pull/6505) +- :sunrise: feat: rerun rewrite phase for newly added plugins in consumer [#6502](https://github.com/apache/apisix/pull/6502) +- :sunrise: feat: add config to control write all status to x-upsream-apisix-status [#6392](https://github.com/apache/apisix/pull/6392) +- :sunrise: feat: add kubernetes discovery module [#4880](https://github.com/apache/apisix/pull/4880) +- :sunrise: feat(graphql): support http get and post json request [#6343](https://github.com/apache/apisix/pull/6343) + +### Plugin + +- :sunrise: feat: jwt-auth support custom parameters [#6561](https://github.com/apache/apisix/pull/6561) +- :sunrise: feat: set cors allow origins by plugin metadata [#6546](https://github.com/apache/apisix/pull/6546) +- :sunrise: feat: support post_logout_redirect_uri config in openid-connect plugin [#6455](https://github.com/apache/apisix/pull/6455) +- :sunrise: feat: mocking plugin [#5940](https://github.com/apache/apisix/pull/5940) +- :sunrise: feat(error-log-logger): add clickhouse for error-log-logger [#6256](https://github.com/apache/apisix/pull/6256) +- :sunrise: feat: clickhouse logger [#6215](https://github.com/apache/apisix/pull/6215) +- :sunrise: feat(grpc-transcode): support .pb file [#6264](https://github.com/apache/apisix/pull/6264) +- :sunrise: feat: development of Loggly logging plugin [#6113](https://github.com/apache/apisix/pull/6113) +- :sunrise: feat: add opentelemetry plugin [#6119](https://github.com/apache/apisix/pull/6119) +- :sunrise: feat: add public api plugin [#6145](https://github.com/apache/apisix/pull/6145) +- :sunrise: feat: add CSRF plugin [#5727](https://github.com/apache/apisix/pull/5727) + +### Bugfix + +- fix(skywalking,opentelemetry): trace request rejected by auth [#6617](https://github.com/apache/apisix/pull/6617) +- fix(log-rotate): should rotate logs strictly hourly(or minutely) [#6521](https://github.com/apache/apisix/pull/6521) +- fix: deepcopy doesn't copy the metatable [#6623](https://github.com/apache/apisix/pull/6623) +- fix(request-validate): handle duplicate key in JSON [#6625](https://github.com/apache/apisix/pull/6625) +- fix(prometheus): conflict between global rule and route configure [#6579](https://github.com/apache/apisix/pull/6579) +- fix(proxy-rewrite): when conf.headers are missing,conf.method can make effect [#6300](https://github.com/apache/apisix/pull/6300) +- fix(traffic-split): failed to match rule when the first rule failed [#6292](https://github.com/apache/apisix/pull/6292) +- fix(config_etcd): skip resync_delay while etcd watch timeout [#6259](https://github.com/apache/apisix/pull/6259) +- fix(proto): avoid sharing state [#6199](https://github.com/apache/apisix/pull/6199) +- fix(limit-count): keep the counter if the plugin conf is the same [#6151](https://github.com/apache/apisix/pull/6151) +- fix(admin): correct the count field of plugin-metadata/global-rule [#6155](https://github.com/apache/apisix/pull/6155) +- fix: add missing labels after merging route and service [#6177](https://github.com/apache/apisix/pull/6177) + +## 2.12.1 + +**This is an LTS maintenance release and you can see the CHANGELOG in `release/2.12` branch.** + +[https://github.com/apache/apisix/blob/release/2.12/CHANGELOG.md#2121](https://github.com/apache/apisix/blob/release/2.12/CHANGELOG.md#2121) + +## 2.12.0 + +### Change + +- change(serverless): rename "balancer" phase to "before_proxy" [#5992](https://github.com/apache/apisix/pull/5992) +- change: don't promise to support Tengine [#5961](https://github.com/apache/apisix/pull/5961) +- change: enable HTTP when stream proxy is set and enable_admin is true [#5867](https://github.com/apache/apisix/pull/5867) + +### Core + +- :sunrise: feat(L4): support TLS over TCP upstream [#6030](https://github.com/apache/apisix/pull/6030) +- :sunrise: feat: support registering custom variable [#5941](https://github.com/apache/apisix/pull/5941) +- :sunrise: feat(vault): vault lua module, integration with jwt-auth authentication plugin [#5745](https://github.com/apache/apisix/pull/5745) +- :sunrise: feat: enable L4 stream logging [#5768](https://github.com/apache/apisix/pull/5768) +- :sunrise: feat: add http_server_location_configuration_snippet configuration [#5740](https://github.com/apache/apisix/pull/5740) +- :sunrise: feat: support resolve default value when environment not set [#5675](https://github.com/apache/apisix/pull/5675) +- :sunrise: feat(wasm): run in http header_filter [#5544](https://github.com/apache/apisix/pull/5544) + +### Plugin + +- :sunrise: feat: support hide the authentication header in basic-auth with a config [#6039](https://github.com/apache/apisix/pull/6039) +- :sunrise: feat: set proxy_request_buffering dynamically [#6075](https://github.com/apache/apisix/pull/6075) +- :sunrise: feat(mqtt): balance by client id [#6079](https://github.com/apache/apisix/pull/6079) +- :sunrise: feat: add forward-auth plugin [#6037](https://github.com/apache/apisix/pull/6037) +- :sunrise: feat(grpc-web): support gRPC-Web Proxy [#5964](https://github.com/apache/apisix/pull/5964) +- :sunrise: feat(limit-count): add constant key type [#5984](https://github.com/apache/apisix/pull/5984) +- :sunrise: feat(limit-count): allow sharing counter [#5881](https://github.com/apache/apisix/pull/5881) +- :sunrise: feat(splunk): support splunk hec logging plugin [#5819](https://github.com/apache/apisix/pull/5819) +- :sunrise: feat: basic support OPA plugin [#5734](https://github.com/apache/apisix/pull/5734) +- :sunrise: feat: rocketmq logger [#5653](https://github.com/apache/apisix/pull/5653) +- :sunrise: feat(mqtt-proxy): support using route's upstream [#5666](https://github.com/apache/apisix/pull/5666) +- :sunrise: feat(ext-plugin): support to get request body [#5600](https://github.com/apache/apisix/pull/5600) +- :sunrise: feat(plugins): aws lambda serverless [#5594](https://github.com/apache/apisix/pull/5594) +- :sunrise: feat(http/kafka-logger): support to log response body [#5550](https://github.com/apache/apisix/pull/5550) +- :sunrise: feat: Apache OpenWhisk plugin [#5518](https://github.com/apache/apisix/pull/5518) +- :sunrise: feat(plugin): support google cloud logging service [#5538](https://github.com/apache/apisix/pull/5538) + +### Bugfix + +- fix: the prometheus labels are inconsistent when error-log-logger is enabled [#6055](https://github.com/apache/apisix/pull/6055) +- fix(ipv6): allow disabling IPv6 resolve [#6023](https://github.com/apache/apisix/pull/6023) +- fix(mqtt): handle properties for MQTT 5 [#5916](https://github.com/apache/apisix/pull/5916) +- fix(sls-logger): unable to get millisecond part of the timestamp [#5820](https://github.com/apache/apisix/pull/5820) +- fix(mqtt-proxy): client id can be empty [#5816](https://github.com/apache/apisix/pull/5816) +- fix(ext-plugin): don't use stale key [#5782](https://github.com/apache/apisix/pull/5782) +- fix(log-rotate): race between reopen log & compression [#5715](https://github.com/apache/apisix/pull/5715) +- fix(batch-processor): we didn't free stale object actually [#5700](https://github.com/apache/apisix/pull/5700) +- fix: data pollution after passive health check is changed [#5589](https://github.com/apache/apisix/pull/5589) + +## 2.11.0 + +### Change + +- change(wolf-rbac): change default port number and add `authType` parameter to documentation [#5477](https://github.com/apache/apisix/pull/5477) + +### Core + +- :sunrise: feat: support advanced matching based on post form [#5409](https://github.com/apache/apisix/pull/5409) +- :sunrise: feat: initial wasm support [#5288](https://github.com/apache/apisix/pull/5288) +- :sunrise: feat(control): expose services[#5271](https://github.com/apache/apisix/pull/5271) +- :sunrise: feat(control): add dump upstream api [#5259](https://github.com/apache/apisix/pull/5259) +- :sunrise: feat: etcd cluster single node failure APISIX startup failure [#5158](https://github.com/apache/apisix/pull/5158) +- :sunrise: feat: support specify custom sni in etcd conf [#5206](https://github.com/apache/apisix/pull/5206) + +### Plugin + +- :sunrise: feat(plugin): azure serverless functions [#5479](https://github.com/apache/apisix/pull/5479) +- :sunrise: feat(kafka-logger): supports logging request body [#5501](https://github.com/apache/apisix/pull/5501) +- :sunrise: feat: provide skywalking logger plugin [#5478](https://github.com/apache/apisix/pull/5478) +- :sunrise: feat(plugins): Datadog for metrics collection [#5372](https://github.com/apache/apisix/pull/5372) +- :sunrise: feat(limit-* plugin): fallback to remote_addr when key is missing [#5422](https://github.com/apache/apisix/pull/5422) +- :sunrise: feat(limit-count): support multiple variables as key [#5378](https://github.com/apache/apisix/pull/5378) +- :sunrise: feat(limit-conn): support multiple variables as key [#5354](https://github.com/apache/apisix/pull/5354) +- :sunrise: feat(proxy-rewrite): rewrite method [#5292](https://github.com/apache/apisix/pull/5292) +- :sunrise: feat(limit-req): support multiple variables as key [#5302](https://github.com/apache/apisix/pull/5302) +- :sunrise: feat(proxy-cache): support memory-based strategy [#5028](https://github.com/apache/apisix/pull/5028) +- :sunrise: feat(ext-plugin): avoid sending conf request more times [#5183](https://github.com/apache/apisix/pull/5183) +- :sunrise: feat: Add ldap-auth plugin [#3894](https://github.com/apache/apisix/pull/3894) + +## 2.10.5 + +**This is an LTS maintenance release and you can see the CHANGELOG in `release/2.10` branch.** + +[https://github.com/apache/apisix/blob/release/2.10/CHANGELOG.md#2105](https://github.com/apache/apisix/blob/release/2.10/CHANGELOG.md#2105) + +## 2.10.4 + +**This is an LTS maintenance release and you can see the CHANGELOG in `release/2.10` branch.** + +[https://github.com/apache/apisix/blob/release/2.10/CHANGELOG.md#2104](https://github.com/apache/apisix/blob/release/2.10/CHANGELOG.md#2104) + +## 2.10.3 + +**This is an LTS maintenance release and you can see the CHANGELOG in `release/2.10` branch.** + +[https://github.com/apache/apisix/blob/release/2.10/CHANGELOG.md#2103](https://github.com/apache/apisix/blob/release/2.10/CHANGELOG.md#2103) + +## 2.10.2 + +**This is an LTS maintenance release and you can see the CHANGELOG in `release/2.10` branch.** + +[https://github.com/apache/apisix/blob/release/2.10/CHANGELOG.md#2102](https://github.com/apache/apisix/blob/release/2.10/CHANGELOG.md#2102) + +## 2.10.1 + +**This is an LTS maintenance release and you can see the CHANGELOG in `release/2.10` branch.** + +[https://github.com/apache/apisix/blob/release/2.10/CHANGELOG.md#2101](https://github.com/apache/apisix/blob/release/2.10/CHANGELOG.md#2101) + +## 2.10.0 + +### Change + +- change(debug): move 'enable_debug' form config.yaml to debug.yaml [#5046](https://github.com/apache/apisix/pull/5046) +- change: use a new name to customize lua_shared_dict in nginx.conf [#5030](https://github.com/apache/apisix/pull/5030) +- change: drop the support of shell script installation [#4985](https://github.com/apache/apisix/pull/4985) + +### Core + +- :sunrise: feat(debug-mode): add dynamic debug mode [#5012](https://github.com/apache/apisix/pull/5012) +- :sunrise: feat: allow injecting logic to APISIX's method [#5068](https://github.com/apache/apisix/pull/5068) +- :sunrise: feat: allow configuring fallback SNI [#5000](https://github.com/apache/apisix/pull/5000) +- :sunrise: feat(stream_route): support CIDR in ip match [#4980](https://github.com/apache/apisix/pull/4980) +- :sunrise: feat: allow route to inherit hosts from service [#4977](https://github.com/apache/apisix/pull/4977) +- :sunrise: feat: support configurating the node listening address[#4856](https://github.com/apache/apisix/pull/4856) + +### Plugin + +- :sunrise: feat(hmac-auth): Add validate request body for hmac auth plugin [#5038](https://github.com/apache/apisix/pull/5038) +- :sunrise: feat(proxy-mirror): support mirror requests sample_ratio [#4965](https://github.com/apache/apisix/pull/4965) +- :sunrise: feat(referer-restriction): add blacklist and message [#4916](https://github.com/apache/apisix/pull/4916) +- :sunrise: feat(kafka-logger): add cluster name support [#4876](https://github.com/apache/apisix/pull/4876) +- :sunrise: feat(kafka-logger): add required_acks option [#4878](https://github.com/apache/apisix/pull/4878) +- :sunrise: feat(uri-blocker): add case insensitive switch [#4868](https://github.com/apache/apisix/pull/4868) + +### Bugfix + +- fix(radixtree_host_uri): correct matched host [#5124](https://github.com/apache/apisix/pull/5124) +- fix(radixtree_host_uri): correct matched path [#5104](https://github.com/apache/apisix/pull/5104) +- fix(nacos): distinguish services that has same name but in different groups or namespaces [#5083](https://github.com/apache/apisix/pull/5083) +- fix(nacos): continue to process other services when request failed [#5112](https://github.com/apache/apisix/pull/5112) +- fix(ssl): match sni in case-insensitive way [#5074](https://github.com/apache/apisix/pull/5074) +- fix(upstream): should not override default keepalive value [#5054](https://github.com/apache/apisix/pull/5054) +- fix(DNS): prefer SRV in service discovery [#4992](https://github.com/apache/apisix/pull/4992) +- fix(consul): retry connecting after a delay [#4979](https://github.com/apache/apisix/pull/4979) +- fix: avoid copying unwanted data when the domain's IP changed [#4952](https://github.com/apache/apisix/pull/4952) +- fix(plugin_config): recover plugin when plugin_config changed [#4888](https://github.com/apache/apisix/pull/4888) + +## 2.9.0 + +### Change + +- change: rename plugin's balancer method to before_proxy [#4697](https://github.com/apache/apisix/pull/4697) + +### Core + +- :sunrise: feat: increase timers limitation [#4843](https://github.com/apache/apisix/pull/4843) +- :sunrise: feat: make A/B test APISIX easier by removing "additionalProperties = false" [#4797](https://github.com/apache/apisix/pull/4797) +- :sunrise: feat: support dash in args (#4519) [#4676](https://github.com/apache/apisix/pull/4676) +- :sunrise: feat(admin): reject invalid proto [#4750](https://github.com/apache/apisix/pull/4750) + +### Plugin + +- :sunrise: feat(ext-plugin): support ExtraInfo [#4835](https://github.com/apache/apisix/pull/4835) +- :sunrise: feat(gzip): support special * to match any type [#4817](https://github.com/apache/apisix/pull/4817) +- :sunrise: feat(real-ip): implement the first version [#4813](https://github.com/apache/apisix/pull/4813) +- :sunrise: feat(limit-*): add custom reject-message for traffic control [#4808](https://github.com/apache/apisix/pull/4808) +- :sunrise: feat: Request-ID plugin add snowflake algorithm [#4559](https://github.com/apache/apisix/pull/4559) +- :sunrise: feat: Added authz-casbin plugin and doc and tests for it [#4710](https://github.com/apache/apisix/pull/4710) +- :sunrise: feat: add error log skywalking reporter [#4633](https://github.com/apache/apisix/pull/4633) +- :sunrise: feat(ext-plugin): send the idempotent key when preparing conf [#4736](https://github.com/apache/apisix/pull/4736) + +### Bugfix + +- fix: the issue that plugins in global rule may be cached to route [#4867](https://github.com/apache/apisix/pull/4867) +- fix(grpc-transcode): support converting nested message [#4859](https://github.com/apache/apisix/pull/4859) +- fix(authz-keycloak): set permissions as empty table when lazy_load_path is false [#4845](https://github.com/apache/apisix/pull/4845) +- fix(proxy-cache): keep cache_method same with nginx's proxy_cache_methods [#4814](https://github.com/apache/apisix/pull/4814) +- fix(admin): inject updatetime when the request is PATCH with sub path [#4765](https://github.com/apache/apisix/pull/4765) +- fix(admin): check username for updating consumer [#4756](https://github.com/apache/apisix/pull/4756) +- fix(error-log-logger): avoid sending stale error log [#4690](https://github.com/apache/apisix/pull/4690) +- fix(grpc-transcode): handle enum type [#4706](https://github.com/apache/apisix/pull/4706) +- fix: when a request caused a 500 error, the status was converted to 405 [#4696](https://github.com/apache/apisix/pull/4696) + +## 2.8.0 + +### Change + +- change: enable stream proxy only by default [#4580](https://github.com/apache/apisix/pull/4580) + +### Core + +- :sunrise: feat: allow user-defined balancer with metadata in node [#4605](https://github.com/apache/apisix/pull/4605) +- :sunrise: feat: Add option retry_timeout that like nginx's proxy_next_upstream_timeout [#4574](https://github.com/apache/apisix/pull/4574) +- :sunrise: feat: enable balancer phase for plugins [#4549](https://github.com/apache/apisix/pull/4549) +- :sunrise: feat: allow setting separate keepalive pool [#4506](https://github.com/apache/apisix/pull/4506) +- :sunrise: feat: enable etcd health-check [#4191](https://github.com/apache/apisix/pull/4191) + +### Plugin + +- :sunrise: feat: add gzip plugin [#4640](https://github.com/apache/apisix/pull/4640) +- :sunrise: feat(plugin): Add new plugin ua-restriction for bot spider restriction [#4587](https://github.com/apache/apisix/pull/4587) +- :sunrise: feat(stream): add ip-restriction [#4602](https://github.com/apache/apisix/pull/4602) +- :sunrise: feat(stream): add limit-conn [#4515](https://github.com/apache/apisix/pull/4515) +- :sunrise: feat: increase ext-plugin timeout to 60s [#4557](https://github.com/apache/apisix/pull/4557) +- :sunrise: feat(key-auth): supporting key-auth plugin to get key from query string [#4490](https://github.com/apache/apisix/pull/4490) +- :sunrise: feat(kafka-logger): support for specified the log formats via admin API. [#4483](https://github.com/apache/apisix/pull/4483) + +### Bugfix + +- fix(stream): sni router is broken when session reuses [#4607](https://github.com/apache/apisix/pull/4607) +- fix: the limit-conn plugin cannot effectively intercept requests in special scenarios [#4585](https://github.com/apache/apisix/pull/4585) +- fix: ref check while deleting proto via Admin API [#4575](https://github.com/apache/apisix/pull/4575) +- fix(skywalking): handle conflict between global rule and route [#4589](https://github.com/apache/apisix/pull/4589) +- fix: `ctx.var.cookie_*` cookie not found log [#4564](https://github.com/apache/apisix/pull/4564) +- fix(request-id): we can use different ids with the same request [#4479](https://github.com/apache/apisix/pull/4479) + +## 2.7.0 + +### Change + +- change: check metadata_schema with check_schema like the other schema [#4381](https://github.com/apache/apisix/pull/4381) +- change(echo): remove odd auth_value [#4055](https://github.com/apache/apisix/pull/4055) +- fix(admin): correct the resources' count field and change its type to integer [#4385](https://github.com/apache/apisix/pull/4385) + +### Core + +- :sunrise: feat(stream): support client certificate verification [#4445](https://github.com/apache/apisix/pull/4445) +- :sunrise: feat(stream): accept tls over tcp [#4409](https://github.com/apache/apisix/pull/4409) +- :sunrise: feat(stream): support domain in the upstream [#4386](https://github.com/apache/apisix/pull/4386) +- :sunrise: feat(cli): wrap nginx quit cmd [#4360](https://github.com/apache/apisix/pull/4360) +- :sunrise: feat: allow to set custom timeout for route [#4340](https://github.com/apache/apisix/pull/4340) +- :sunrise: feat: nacos discovery support group [#4325](https://github.com/apache/apisix/pull/4325) +- :sunrise: feat: nacos discovery support namespace [#4313](https://github.com/apache/apisix/pull/4313) + +### Plugin + +- :sunrise: feat(client-control): set client_max_body_size dynamically [#4423](https://github.com/apache/apisix/pull/4423) +- :sunrise: feat(ext-plugin): stop the runner with SIGTERM [#4367](https://github.com/apache/apisix/pull/4367) +- :sunrise: feat(limit-req) support nodelay [#4395](https://github.com/apache/apisix/pull/4395) +- :sunrise: feat(mqtt-proxy): support domain [#4391](https://github.com/apache/apisix/pull/4391) +- :sunrise: feat(redirect): support appending query string [#4298](https://github.com/apache/apisix/pull/4298) + +### Bugfix + +- fix: solve memory leak when the client aborts [#4405](https://github.com/apache/apisix/pull/4405) +- fix(etcd): check res.body.error before accessing the data [#4371](https://github.com/apache/apisix/pull/4371) +- fix(ext-plugin): when token is stale, refresh token and try again [#4345](https://github.com/apache/apisix/pull/4345) +- fix(ext-plugin): pass environment variables [#4349](https://github.com/apache/apisix/pull/4349) +- fix: ensure the plugin is always reloaded [#4319](https://github.com/apache/apisix/pull/4319) + +## 2.6.0 + +### Change + +- change(prometheus): redesign the latency metrics & update grafana [#3993](https://github.com/apache/apisix/pull/3993) +- change(prometheus): don't expose metrics to internet [#3994](https://github.com/apache/apisix/pull/3994) +- change(limit-count): ensure redis cluster name is set correctly [#3910](https://github.com/apache/apisix/pull/3910) +- change: drop support of OpenResty 1.15 [#3960](https://github.com/apache/apisix/pull/3960) + +### Core + +- :sunrise: feat: support passing different host headers in multiple nodes [#4208](https://github.com/apache/apisix/pull/4208) +- :sunrise: feat: add 50x html for error page [#4164](https://github.com/apache/apisix/pull/4164) +- :sunrise: feat: support to use upstream_id in stream_route [#4121](https://github.com/apache/apisix/pull/4121) +- :sunrise: feat: support client certificate verification [#4034](https://github.com/apache/apisix/pull/4034) +- :sunrise: feat: add nacos support [#3820](https://github.com/apache/apisix/pull/3820) +- :sunrise: feat: patch tcp.sock.connect to use our DNS resolver [#4114](https://github.com/apache/apisix/pull/4114) + +### Plugin + +- :sunrise: feat(redirect): support uri encoding [#4244](https://github.com/apache/apisix/pull/4244) +- :sunrise: feat(key-auth): allow customizing header [#4013](https://github.com/apache/apisix/pull/4013) +- :sunrise: feat(response-rewrite): allow using variable in the header [#4194](https://github.com/apache/apisix/pull/4194) +- :sunrise: feat(ext-plugin): APISIX can support Java, Go and other languages to implement custom plugin [#4183](https://github.com/apache/apisix/pull/4183) + +### Bugfix + +- fix(DNS): support IPv6 resolver [#4242](https://github.com/apache/apisix/pull/4242) +- fix(healthcheck): only one_loop is needed in the passive health check report [#4116](https://github.com/apache/apisix/pull/4116) +- fix(traffic-split): configure multiple "rules", the request will be confused between upstream [#4092](https://github.com/apache/apisix/pull/4092) +- fix: ensure upstream with domain is cached [#4061](https://github.com/apache/apisix/pull/4061) +- fix: be compatible with the router created before 2.5 [#4056](https://github.com/apache/apisix/pull/4056) +- fix(standalone): the conf should be available during start [#4027](https://github.com/apache/apisix/pull/4027) +- fix: ensure atomic operation in limit-count plugin [#3991](https://github.com/apache/apisix/pull/3991) + +## 2.5.0 + +**The changes marked with :warning: are not backward compatible.** +**Please upgrade your data accordingly before upgrading to this version.** +**[#3809](https://github.com/apache/apisix/pull/3809) Means that empty vars will make the route fail to match any requests.** + +### Change + +- :warning: change: remove unused consumer.id [#3868](https://github.com/apache/apisix/pull/3868) +- :warning: change: remove deprecated upstream.enable_websocket [#3854](https://github.com/apache/apisix/pull/3854) +- change(zipkin): rearrange the child span [#3877](https://github.com/apache/apisix/pull/3877) + +### Core + +- :sunrise: feat: support mTLS with etcd [#3905](https://github.com/apache/apisix/pull/3905) +- :warning: feat: upgrade lua-resty-expr/radixtree to support logical expression [#3809](https://github.com/apache/apisix/pull/3809) +- :sunrise: feat: load etcd configuration when apisix starts [#3799](https://github.com/apache/apisix/pull/3799) +- :sunrise: feat: let balancer support priority [#3755](https://github.com/apache/apisix/pull/3755) +- :sunrise: feat: add control api for discovery module [#3742](https://github.com/apache/apisix/pull/3742) + +### Plugin + +- :sunrise: feat(skywalking): allow destroy and configure report interval for reporter [#3925](https://github.com/apache/apisix/pull/3925) +- :sunrise: feat(traffic-split): the upstream pass_host needs to support IP mode [#3870](https://github.com/apache/apisix/pull/3870) +- :sunrise: feat: Add filter on HTTP methods for consumer-restriction plugin [#3691](https://github.com/apache/apisix/pull/3691) +- :sunrise: feat: add allow_origins_by_regex to cors plugin [#3839](https://github.com/apache/apisix/pull/3839) +- :sunrise: feat: support conditional response rewrite [#3577](https://github.com/apache/apisix/pull/3577) + +### Bugfix + +- fix(error-log-logger): the logger should be run in each process [#3912](https://github.com/apache/apisix/pull/3912) +- fix: use the builtin server by default [#3907](https://github.com/apache/apisix/pull/3907) +- fix(traffic-split): binding upstream via upstream_id is invalid [#3842](https://github.com/apache/apisix/pull/3842) +- fix: correct the validation for ssl_trusted_certificate [#3832](https://github.com/apache/apisix/pull/3832) +- fix: don't override cache relative headers [#3789](https://github.com/apache/apisix/pull/3789) +- fix: fail to run `make deps` on macOS [#3718](https://github.com/apache/apisix/pull/3718) + +## 2.4.0 + +### Change + +- change: global rules should not be executed on the internal api by default [#3396](https://github.com/apache/apisix/pull/3396) +- change: default to cache DNS record according to the TTL [#3530](https://github.com/apache/apisix/pull/3530) + +### Core + +- :sunrise: feat: support SRV record [#3686](https://github.com/apache/apisix/pull/3686) +- :sunrise: feat: add dns discovery [#3629](https://github.com/apache/apisix/pull/3629) +- :sunrise: feat: add consul kv discovery module [#3615](https://github.com/apache/apisix/pull/3615) +- :sunrise: feat: support to bind plugin config by `plugin_config_id` [#3567](https://github.com/apache/apisix/pull/3567) +- :sunrise: feat: support listen http2 with plaintext [#3547](https://github.com/apache/apisix/pull/3547) +- :sunrise: feat: support DNS AAAA record [#3484](https://github.com/apache/apisix/pull/3484) + +### Plugin + +- :sunrise: feat: the traffic-split plugin supports upstream_id [#3512](https://github.com/apache/apisix/pull/3512) +- :sunrise: feat(zipkin): support b3 req header [#3551](https://github.com/apache/apisix/pull/3551) + +### Bugfix + +- fix(chash): ensure retry can try every node [#3651](https://github.com/apache/apisix/pull/3651) +- fix: script does not work when the route is bound to a service [#3678](https://github.com/apache/apisix/pull/3678) +- fix: use openssl111 in openresty dir in precedence [#3603](https://github.com/apache/apisix/pull/3603) +- fix(zipkin): don't cache the per-req sample ratio [#3522](https://github.com/apache/apisix/pull/3522) + +For more changes, please refer to [Milestone](https://github.com/apache/apisix/milestone/13) + +## 2.3.0 + +### Change + +- fix: use luajit by default when run apisix [#3335](https://github.com/apache/apisix/pull/3335) +- feat: use luasocket instead of curl in etcd.lua [#2965](https://github.com/apache/apisix/pull/2965) + +### Core + +- :sunrise: feat: support to communicate with etcd by TLS without verification in command line [#3415](https://github.com/apache/apisix/pull/3415) +- :sunrise: feat: chaos test on route could still works when etcd is down [#3404](https://github.com/apache/apisix/pull/3404) +- :sunrise: feat: ewma use p2c to improve performance [#3300](https://github.com/apache/apisix/pull/3300) +- :sunrise: feat: support specifying https in upstream to talk with https backend [#3430](https://github.com/apache/apisix/pull/3430) +- :sunrise: feat: allow customizing lua_package_path & lua_package_cpath [#3417](https://github.com/apache/apisix/pull/3417) +- :sunrise: feat: allow to pass SNI in HTTPS proxy [#3420](https://github.com/apache/apisix/pull/3420) +- :sunrise: feat: support gRPCS [#3411](https://github.com/apache/apisix/pull/3411) +- :sunrise: feat: allow getting upstream health check status via control API [#3345](https://github.com/apache/apisix/pull/3345) +- :sunrise: feat: support dubbo [#3224](https://github.com/apache/apisix/pull/3224) +- :sunrise: feat: load balance by least connections [#3304](https://github.com/apache/apisix/pull/3304) + +### Plugin + +- :sunrise: feat: kafka-logger implemented reuse kafka producer [#3429](https://github.com/apache/apisix/pull/3429) +- :sunrise: feat(authz-keycloak): dynamic scope and resource mapping. [#3308](https://github.com/apache/apisix/pull/3308) +- :sunrise: feat: proxy-rewrite host support host with port [#3428](https://github.com/apache/apisix/pull/3428) +- :sunrise: feat(fault-injection): support conditional fault injection using nginx variables [#3363](https://github.com/apache/apisix/pull/3363) + +### Bugfix + +- fix(standalone): require consumer's id to be the same as username [#3394](https://github.com/apache/apisix/pull/3394) +- fix: support upstream_id & consumer with grpc [#3387](https://github.com/apache/apisix/pull/3387) +- fix: set conf info when global rule is hit without matched rule [#3332](https://github.com/apache/apisix/pull/3332) +- fix: avoid caching outdated discovery upstream nodes [#3295](https://github.com/apache/apisix/pull/3295) +- fix: create the health checker in `access` phase [#3240](https://github.com/apache/apisix/pull/3240) +- fix: make set_more_retries() work when upstream_type is chash [#2676](https://github.com/apache/apisix/pull/2676) + +For more changes, please refer to [Milestone](https://github.com/apache/apisix/milestone/12) + +## 2.2.0 + +### Change + +- disable node-status plugin by default [#2968](https://github.com/apache/apisix/pull/2968) +- k8s_deployment_info is no longer allowed in upstream [#3098](https://github.com/apache/apisix/pull/3098) +- don't treat route segment with ':' as parameter by default [#3154](https://github.com/apache/apisix/pull/3154) + +### Core + +- :sunrise: allow create consumers with multiple auth plugins [#2898](https://github.com/apache/apisix/pull/2898) +- :sunrise: increase the delay before resync etcd [#2977](https://github.com/apache/apisix/pull/2977) +- :sunrise: support enable/disable route [#2943](https://github.com/apache/apisix/pull/2943) +- :sunrise: route according to the graphql attributes [#2964](https://github.com/apache/apisix/pull/2964) +- :sunrise: share etcd auth token [#2932](https://github.com/apache/apisix/pull/2932) +- :sunrise: add control API [#3048](https://github.com/apache/apisix/pull/3048) + +### Plugin + +- :sunrise: feat(limt-count): use 'remote_addr' as default key [#2927](https://github.com/apache/apisix/pull/2927) +- :sunrise: feat(fault-injection): support Nginx variable in abort.body [#2986](https://github.com/apache/apisix/pull/2986) +- :sunrise: feat: implement new plugin `server-info` [#2926](https://github.com/apache/apisix/pull/2926) +- :sunrise: feat: add batch process metrics [#3070](https://github.com/apache/apisix/pull/3070) +- :sunrise: feat: Implement traffic splitting plugin [#2935](https://github.com/apache/apisix/pull/2935) +- :sunrise: feat: the proxy-rewrite plugin support pass nginx variable within header [#3144](https://github.com/apache/apisix/pull/3144) +- :sunrise: feat: Make headers to add to request in openid-connect plugin configurable [#2903](https://github.com/apache/apisix/pull/2903) +- :sunrise: feat: support var in upstream_uri on proxy-rewrite plugin [#3139](https://github.com/apache/apisix/pull/3139) + +### Bugfix + +- basic-auth plugin should run in rewrite phases. [#2905](https://github.com/apache/apisix/pull/2905) +- fixed the non effective config update in http/udp-logger [#2901](https://github.com/apache/apisix/pull/2901) +- always necessary to save the data of the limit concurrency, and release the statistical status in the log phase [#2465](https://github.com/apache/apisix/pull/2465) +- avoid duplicate auto-generated id [#3003](https://github.com/apache/apisix/pull/3003) +- fix: ctx being contaminated due to a new feature of openresty 1.19. **For openresty 1.19 users, it is recommended to upgrade the APISIX version as soon as possible.** [#3105](https://github.com/apache/apisix/pull/3105) +- fix: correct the validation of route.vars [#3124](https://github.com/apache/apisix/pull/3124) + +For more changes, please refer to [Milestone](https://github.com/apache/apisix/milestone/10) + +## 2.1.0 + +### Core + +- :sunrise: **support ENV variable in configuration.** [#2743](https://github.com/apache/apisix/pull/2743) +- :sunrise: **support TLS connection with etcd.** [#2548](https://github.com/apache/apisix/pull/2548) +- generate create/update_time automatically. [#2740](https://github.com/apache/apisix/pull/2740) +- add a deprecate log for enable_websocket in upstream.[#2691](https://github.com/apache/apisix/pull/2691) +- add a deprecate log for consumer id.[#2829](https://github.com/apache/apisix/pull/2829) +- Added `X-APISIX-Upstream-Status` header to distinguish 5xx errors from upstream or APISIX itself. [#2817](https://github.com/apache/apisix/pull/2817) +- support Nginx configuration snippet. [#2803](https://github.com/apache/apisix/pull/2803) + +### Plugin + +- :sunrise: **Upgrade protocol to support Apache Skywalking 8.0**[#2389](https://github.com/apache/apisix/pull/2389). So this version only supports skywalking 8.0 protocol. This plugin is disabled by default, you need to modify config.yaml to enable, which is not backward compatible. +- :sunrise: add aliyun sls logging plugin.[#2169](https://github.com/apache/apisix/issues/2169) +- proxy-cache: the cache_zone field in the schema should be optional.[#2776](https://github.com/apache/apisix/pull/2776) +- fix: validate plugin configuration in the DP [#2856](https://github.com/apache/apisix/pull/2856) + +### Bugfix + +- :bug: fix(etcd): handle etcd compaction.[#2687](https://github.com/apache/apisix/pull/2687) +- fix: move `conf/cert` to `t/certs` and disable ssl by default, which is not backward compatible. [#2112](https://github.com/apache/apisix/pull/2112) +- fix: check decrypt key to prevent lua thread aborted [#2815](https://github.com/apache/apisix/pull/2815) + +### Not downward compatible features in future versions + +-In the 2.3 release, the consumer will only support user names and discard the id. The consumer needs to manually clean up the id field in etcd, otherwise the schema verification will report an error during use +-In the 2.3 release, opening websocket on upstream will no longer be supported +-In version 3.0, the data plane and control plane will be separated into two independent ports, that is, the current port 9080 will only process data plane requests, and no longer process admin API requests + +For more changes, please refer to [Milestone](https://github.com/apache/apisix/milestone/8) + +## 2.0.0 + +This is release candidate. + +### Core + +- :sunrise: **Migrate from etcd v2 to v3 protocol, which is not backward compatible. Apache APISIX only supports etcd 3.4 and above versions.** [#2036](https://github.com/apache/apisix/pull/2036) +- add labels for upstream object.[#2279](https://github.com/apache/apisix/pull/2279) +- add managed fields in json schema for resources, such as create_time and update_time.[#2444](https://github.com/apache/apisix/pull/2444) +- use interceptors to protect plugin's route[#2416](https://github.com/apache/apisix/pull/2416) +- support multiple ports for http and https listen.[#2409](https://github.com/apache/apisix/pull/2409) +- implement `core.sleep`.[#2397](https://github.com/apache/apisix/pull/2397) + +### Plugin + +- :sunrise: **add AK/SK(HMAC) auth plugin.**[#2192](https://github.com/apache/apisix/pull/2192) +- :sunrise: add referer-restriction plugin.[#2352](https://github.com/apache/apisix/pull/2352) +- `limit-count` support to use `redis` cluster.[#2406](https://github.com/apache/apisix/pull/2406) +- feat(proxy-cache): store the temporary file under cache directory. [#2317](https://github.com/apache/apisix/pull/2317) +- feat(http-logger): support for specified the log formats via admin API [#2309](https://github.com/apache/apisix/pull/2309) + +### Bugfix + +- :bug: **`high priority`** When the data plane receives an instruction to delete a resource(router or upstream etc.), it does not properly clean up the cache, resulting in the existing resources cannot be found. This problem only occurs in the case of long and frequent deletion operations.[#2168](https://github.com/apache/apisix/pull/2168) +- fix routing priority does not take effect.[#2447](https://github.com/apache/apisix/pull/2447) +- set random seed for each worker process at `init_worker` phase, only `init` phase is not enough.[#2357](https://github.com/apache/apisix/pull/2357) +- remove unsupported algorithm in jwt plugin.[#2356](https://github.com/apache/apisix/pull/2356) +- return correct response code when `http_to_https` enabled in redirect plugin.[#2311](https://github.com/apache/apisix/pull/2311) + +For more changes, please refer to [Milestone](https://github.com/apache/apisix/milestone/7) + +### CVE + +- Fixed Admin API default access token vulnerability + +## 1.5.0 + +### Core + +- Admin API: support authentication with SSL certificates. [1747](https://github.com/apache/apisix/pull/1747) +- Admin API: support both standard `PATCH` and sub path `PATCH`. [1930](https://github.com/apache/apisix/pull/1930) +- HealthCheck: supports custom host port. [1914](https://github.com/apache/apisix/pull/1914) +- Upstream: supports turning off the default retry mechanism. [1919](https://github.com/apache/apisix/pull/1919) +- URI: supports delete the '/' at the end of the `URI`. [1766](https://github.com/apache/apisix/pull/1766) + +### New Plugin + +- :sunrise: **Request Validator** [1709](https://github.com/apache/apisix/pull/1709) + +### Improvements + +- change: nginx worker_shutdown_timeout is changed from 3s to recommended value 240s. [1883](https://github.com/apache/apisix/pull/1883) +- change: the `healthcheck` timeout time type changed from `integer` to `number`. [1892](https://github.com/apache/apisix/pull/1892) +- change: the `request-validation` plugin input parameter supports `Schema` validation. [1920](https://github.com/apache/apisix/pull/1920) +- change: add comments for Makefile `install` command. [1912](https://github.com/apache/apisix/pull/1912) +- change: update comment for config.yaml `etcd.timeout` configuration. [1929](https://github.com/apache/apisix/pull/1929) +- change: add more prometheus metrics. [1888](https://github.com/apache/apisix/pull/1888) +- change: add more configuration options for `cors` plugin. [1963](https://github.com/apache/apisix/pull/1963) + +### Bugfix + +- fixed: failed to get `host` in health check configuration. [1871](https://github.com/apache/apisix/pull/1871) +- fixed: should not save the runtime data of plugin into `etcd`. [1910](https://github.com/apache/apisix/pull/1910) +- fixed: run `apisix start` several times will start multi nginx processes. [1913](https://github.com/apache/apisix/pull/1913) +- fixed: read the request body from the temporary file if it was cached. [1863](https://github.com/apache/apisix/pull/1863) +- fixed: batch processor name and error return type. [1927](https://github.com/apache/apisix/pull/1927) +- fixed: failed to read redis.ttl in `limit-count` plugin. [1928](https://github.com/apache/apisix/pull/1928) +- fixed: passive health check seems never provide a healthy report. [1918](https://github.com/apache/apisix/pull/1918) +- fixed: avoid to modify the original plugin conf. [1958](https://github.com/apache/apisix/pull/1958) +- fixed: the test case of `invalid-upstream` is unstable and sometimes fails to run. [1925](https://github.com/apache/apisix/pull/1925) + +### Doc + +- doc: added APISIX Lua Coding Style Guide. [1874](https://github.com/apache/apisix/pull/1874) +- doc: fixed link syntax in README.md. [1894](https://github.com/apache/apisix/pull/1894) +- doc: fixed image links in zh-cn benchmark. [1896](https://github.com/apache/apisix/pull/1896) +- doc: fixed typos in `FAQ`、`admin-api`、`architecture-design`、`discovery`、`prometheus`、`proxy-rewrite`、`redirect`、`http-logger` documents. [1916](https://github.com/apache/apisix/pull/1916) +- doc: added improvements for OSx unit tests and request validation plugin. [1926](https://github.com/apache/apisix/pull/1926) +- doc: fixed typos in `architecture-design` document. [1938](https://github.com/apache/apisix/pull/1938) +- doc: added the default import path of `Nginx` for unit testing in `Linux` and `macOS` systems in the `how-to-build` document. [1936](https://github.com/apache/apisix/pull/1936) +- doc: add `request-validation` plugin chinese document. [1932](https://github.com/apache/apisix/pull/1932) +- doc: fixed file path of `gRPC transcoding` in `README`. [1945](https://github.com/apache/apisix/pull/1945) +- doc: fixed `uri-blocker` plugin path error in `README`. [1950](https://github.com/apache/apisix/pull/1950) +- doc: fixed `grpc-transcode` plugin path error in `README`. [1946](https://github.com/apache/apisix/pull/1946) +- doc: removed unnecessary configurations for `k8s` document. [1891](https://github.com/apache/apisix/pull/1891) + +## 1.4.1 + +### Bugfix + +- Fix: multiple SSL certificates are configured, but only one certificate working fine. [1818](https://github.com/apache/incubator-apisix/pull/1818) + +## 1.4.0 + +### Core + +- Admin API: Support unique names for routes [1655](https://github.com/apache/incubator-apisix/pull/1655) +- Optimization of log buffer size and flush time [1570](https://github.com/apache/incubator-apisix/pull/1570) + +### New plugins + +- :sunrise: **Apache Skywalking plugin** [1241](https://github.com/apache/incubator-apisix/pull/1241) +- :sunrise: **Keycloak Identity Server Plugin** [1701](https://github.com/apache/incubator-apisix/pull/1701) +- :sunrise: **Echo Plugin** [1632](https://github.com/apache/incubator-apisix/pull/1632) +- :sunrise: **Consume Restriction Plugin** [1437](https://github.com/apache/incubator-apisix/pull/1437) + +### Improvements + +- Batch Request : Copy all headers to every request [1697](https://github.com/apache/incubator-apisix/pull/1697) +- SSL private key encryption [1678](https://github.com/apache/incubator-apisix/pull/1678) +- Improvement of docs for multiple plugins + +## 1.3.0 + +The 1.3 version is mainly for security update. + +### Security + +- reject invalid header[#1462](https://github.com/apache/incubator-apisix/pull/1462) and uri safe encode[#1461](https://github.com/apache/incubator-apisix/pull/1461) +- only allow 127.0.0.1 access admin API and dashboard by default. [#1458](https://github.com/apache/incubator-apisix/pull/1458) + +### Plugin + +- :sunrise: **add batch request plugin**. [#1388](https://github.com/apache/incubator-apisix/pull/1388) +- implemented plugin `sys logger`. [#1414](https://github.com/apache/incubator-apisix/pull/1414) + +## 1.2.0 + +The 1.2 version brings many new features, including core and plugins. + +### Core + +- :sunrise: **support etcd cluster**. [#1283](https://github.com/apache/incubator-apisix/pull/1283) +- using the local DNS resolver by default, which is friendly for k8s. [#1387](https://github.com/apache/incubator-apisix/pull/1387) +- support to run `header_filter`, `body_filter` and `log` phases for global rules. [#1364](https://github.com/apache/incubator-apisix/pull/1364) +- changed the `lua/apisix` dir to `apisix`(**not backward compatible**). [#1351](https://github.com/apache/incubator-apisix/pull/1351) +- add dashboard as submodule. [#1360](https://github.com/apache/incubator-apisix/pull/1360) +- allow adding custom shared dict. [#1367](https://github.com/apache/incubator-apisix/pull/1367) + +### Plugin + +- :sunrise: **add Apache Kafka plugin**. [#1312](https://github.com/apache/incubator-apisix/pull/1312) +- :sunrise: **add CORS plugin**. [#1327](https://github.com/apache/incubator-apisix/pull/1327) +- :sunrise: **add TCP logger plugin**. [#1221](https://github.com/apache/incubator-apisix/pull/1221) +- :sunrise: **add UDP logger plugin**. [1070](https://github.com/apache/incubator-apisix/pull/1070) +- :sunrise: **add proxy mirror plugin**. [#1288](https://github.com/apache/incubator-apisix/pull/1288) +- :sunrise: **add proxy cache plugin**. [#1153](https://github.com/apache/incubator-apisix/pull/1153) +- drop websocket enable control in proxy-rewrite plugin(**not backward compatible**). [1332](https://github.com/apache/incubator-apisix/pull/1332) +- Adding support to public key based introspection for OAuth plugin. [#1266](https://github.com/apache/incubator-apisix/pull/1266) +- response-rewrite plugin support binary data to client by base64. [#1381](https://github.com/apache/incubator-apisix/pull/1381) +- plugin `grpc-transcode` supports grpc deadline. [#1149](https://github.com/apache/incubator-apisix/pull/1149) +- support password auth for limit-count-redis. [#1150](https://github.com/apache/incubator-apisix/pull/1150) +- Zipkin plugin add service name and report local server IP. [#1386](https://github.com/apache/incubator-apisix/pull/1386) +- add `change_pwd` and `user_info` for Wolf-Rbac plugin. [#1204](https://github.com/apache/incubator-apisix/pull/1204) + +### Admin API + +- :sunrise: support key-based authentication for Admin API(**not backward compatible**). [#1169](https://github.com/apache/incubator-apisix/pull/1169) +- hide SSL private key in admin API. [#1240](https://github.com/apache/incubator-apisix/pull/1240) + +### Bugfix + +- missing `clear` table before to reuse table (**will cause memory leak**). [#1134](https://github.com/apache/incubator-apisix/pull/1134) +- print warning error message if the yaml route file is invalid. [#1141](https://github.com/apache/incubator-apisix/pull/1141) +- the balancer IP may be nil, use an empty string instead. [#1166](https://github.com/apache/incubator-apisix/pull/1166) +- plugin node-status and heartbeat don't have schema. [#1249](https://github.com/apache/incubator-apisix/pull/1249) +- the plugin basic-auth needs required field. [#1251](https://github.com/apache/incubator-apisix/pull/1251) +- check the count of upstream valid node. [#1292](https://github.com/apache/incubator-apisix/pull/1292) + +## 1.1.0 + +This release is mainly to strengthen the stability of the code and add more documentation. + +### Core + +- always specify perl include path when running test cases. [#1097](https://github.com/apache/incubator-apisix/pull/1097) +- Feature: Add support for PROXY Protocol. [#1113](https://github.com/apache/incubator-apisix/pull/1113) +- enhancement: add verify command to verify apisix configuration(nginx.conf). [#1112](https://github.com/apache/incubator-apisix/pull/1112) +- feature: increase the default size of the core file. [#1105](https://github.com/apache/incubator-apisix/pull/1105) +- feature: make the number of file is as configurable as the connections. [#1098](https://github.com/apache/incubator-apisix/pull/1098) +- core: improve the core.log module. [#1093](https://github.com/apache/incubator-apisix/pull/1093) +- Modify bin/apisix to support the SO_REUSEPORT. [#1085](https://github.com/apache/incubator-apisix/pull/1085) + +### Doc + +- doc: add link to download grafana meta data. [#1119](https://github.com/apache/incubator-apisix/pull/1119) +- doc: Update README.md. [#1118](https://github.com/apache/incubator-apisix/pull/1118) +- doc: doc: add wolf-rbac plugin. [#1116](https://github.com/apache/incubator-apisix/pull/1116) +- doc: update the download link of rpm. [#1108](https://github.com/apache/incubator-apisix/pull/1108) +- doc: add more english article. [#1092](https://github.com/apache/incubator-apisix/pull/1092) +- Adding contribution guidelines for the documentation. [#1086](https://github.com/apache/incubator-apisix/pull/1086) +- doc: getting-started.md check. [#1084](https://github.com/apache/incubator-apisix/pull/1084) +- Added additional information and refactoring sentences. [#1078](https://github.com/apache/incubator-apisix/pull/1078) +- Update admin-api-cn.md. [#1067](https://github.com/apache/incubator-apisix/pull/1067) +- Update architecture-design-cn.md. [#1065](https://github.com/apache/incubator-apisix/pull/1065) + +### CI + +- ci: remove patch which is no longer necessary and removed in the upst. [#1090](https://github.com/apache/incubator-apisix/pull/1090) +- fix path error when install with luarocks. [#1068](https://github.com/apache/incubator-apisix/pull/1068) +- travis: run a apisix instance which intalled by luarocks. [#1063](https://github.com/apache/incubator-apisix/pull/1063) + +### Plugins + +- feature: Add wolf rbac plugin. [#1095](https://github.com/apache/incubator-apisix/pull/1095) +- Adding UDP logger plugin. [#1070](https://github.com/apache/incubator-apisix/pull/1070) +- enhancement: using internal request instead of external request in node-status plugin. [#1109](https://github.com/apache/incubator-apisix/pull/1109) + +## 1.0.0 + +This release is mainly to strengthen the stability of the code and add more documentation. + +### Core + +- :sunrise: Support routing priority. You can match different upstream services based on conditions such as header, args, priority, etc. under the same URI. [#998](https://github.com/apache/incubator-apisix/pull/998) +- When no route is matched, an error message is returned. To distinguish it from other 404 requests. [#1013](https://github.com/apache/incubator-apisix/pull/1013) +- The address of the dashboard `/apisix/admin` supports CORS. [#982](https://github.com/apache/incubator-apisix/pull/982) +- The jsonschema validator returns a clearer error message. [#1011](https://github.com/apache/incubator-apisix/pull/1011) +- Upgrade the `ngx_var` module to version 0.5. [#1005](https://github.com/apache/incubator-apisix/pull/1005) +- Upgrade the `lua-resty-etcd` module to version 0.8. [#980](https://github.com/apache/incubator-apisix/pull/980) +- In development mode, the number of workers is automatically adjusted to 1. [#926](https://github.com/apache/incubator-apisix/pull/926) +- Remove the nginx.conf file from the code repository. It is automatically generated every time and cannot be modified manually. [#974](https://github.com/apache/incubator-apisix/pull/974) + +### Doc + +- Added documentation on how to customize development plugins. [#909](https://github.com/apache/incubator-apisix/pull/909) +- fixed example's bugs in the serverless plugin documentation. [#1006](https://github.com/apache/incubator-apisix/pull/1006) +- Added documentation for using the Oauth plugin. [#987](https://github.com/apache/incubator-apisix/pull/987) +- Added dashboard compiled documentation. [#985](https://github.com/apache/incubator-apisix/pull/985) +- Added documentation on how to perform a/b testing. [#957](https://github.com/apache/incubator-apisix/pull/957) +- Added documentation on how to enable the MQTT plugin. [#916](https://github.com/apache/incubator-apisix/pull/916) + +### Test case + +- Add test cases for key-auth plugin under normal circumstances. [#964](https://github.com/apache/incubator-apisix/pull/964/) +- Added tests for gRPC transcode pb options. [#920](https://github.com/apache/incubator-apisix/pull/920) + +## 0.9.0 + +This release brings many new features, such as support for running APISIX with Tengine, +an advanced debugging mode that is more developer friendly, and a new URI redirection plugin. + +### Core + +- :sunrise: Supported to run APISIX with tengine. [#683](https://github.com/apache/incubator-apisix/pull/683) +- :sunrise: Enabled HTTP2 and supported to set ssl_protocols. [#663](https://github.com/apache/incubator-apisix/pull/663) +- :sunrise: Advanced Debug Mode, Target module function's input arguments or returned value would be printed once this option is enabled. [#614](https://github.com/apache/incubator-apisix/pull/641) +- Support to install APISIX without dashboard. [#686](https://github.com/apache/incubator-apisix/pull/686) +- Removed router R3 [#725](https://github.com/apache/incubator-apisix/pull/725) + +### Plugins + +- [Redirect URI](https://github.com/apache/incubator-apisix/blob/master/docs/en/latest/plugins/redirect.md): Redirect URI plugin. [#732](https://github.com/apache/incubator-apisix/pull/732) +- [Proxy Rewrite](https://github.com/apache/incubator-apisix/blob/master/docs/en/latest/plugins/proxy-rewrite.md): Supported remove `header` feature. [#658](https://github.com/apache/incubator-apisix/pull/658) +- [Limit Count](https://github.com/apache/incubator-apisix/blob/master/docs/en/latest/plugins/limit-count.md): Supported global limit count with `Redis Server`.[#624](https://github.com/apache/incubator-apisix/pull/624) + +### lua-resty-* + +- lua-resty-radixtree + - Support for `host + uri` as an index. +- lua-resty-jsonschema + - This extension is a JSON data validator that replaces the existing `lua-rapidjson` extension. + +### Bugfix + +- key-auth plugin cannot run accurately in the case of multiple consumers. [#826](https://github.com/apache/incubator-apisix/pull/826) +- Exported schema for plugin serverless. [#787](https://github.com/apache/incubator-apisix/pull/787) +- Discard args of uri when using proxy-write plugin [#642](https://github.com/apache/incubator-apisix/pull/642) +- Zipkin plugin not set tracing data to request header. [#715](https://github.com/apache/incubator-apisix/pull/715) +- Skipped check cjson for luajit environment in apisix CLI. [#652](https://github.com/apache/incubator-apisix/pull/652) +- Skipped to init etcd if use local file as config center. [#737](https://github.com/apache/incubator-apisix/pull/737) +- Support more built-in parameters when set chash balancer. [#775](https://github.com/apache/incubator-apisix/pull/775) + +### Dependencies + +- Replace the `lua-rapidjson` module with `lua-resty-jsonschema` global, `lua-resty-jsonschema` is faster and easier to compile. + +## 0.8.0 + +> Released on 2019/09/30 + +This release brings many new features, such as stream proxy, support MQTT protocol proxy, +and support for ARM platform, and proxy rewrite plugin. + +### Core + +- :sunrise: **[support standalone mode](https://github.com/apache/apisix/blob/master/docs/en/latest/deployment-modes.md#standalone)**: using yaml to update configurations of APISIX, more friendly to kubernetes. [#464](https://github.com/apache/incubator-apisix/pull/464) +- :sunrise: **[support stream proxy](https://github.com/apache/incubator-apisix/blob/master/docs/en/latest/stream-proxy.md)**. [#513](https://github.com/apache/incubator-apisix/pull/513) +- :sunrise: support consumer bind plugins. [#544](https://github.com/apache/incubator-apisix/pull/544) +- support domain name in upstream, not only IP. [#522](https://github.com/apache/incubator-apisix/pull/522) +- ignored upstream node when it's weight is 0. [#536](https://github.com/apache/incubator-apisix/pull/536) + +### Plugins + +- :sunrise: **[MQTT Proxy](https://github.com/apache/incubator-apisix/blob/master/docs/en/latest/plugins/mqtt-proxy.md)**: support to load balance MQTT by `client_id`, both support MQTT 3.1 and 5.0. [#513](https://github.com/apache/incubator-apisix/pull/513) +- [proxy-rewrite](https://github.com/apache/incubator-apisix/blob/master/docs/en/latest/plugins/proxy-rewrite.md): rewrite uri, + schema, host for upstream. [#594](https://github.com/apache/incubator-apisix/pull/594) + +### ARM + +- :sunrise: **APISIX can run normally under Ubuntu 18.04 of ARM64 architecture**, so you can use APISIX as IoT gateway with MQTT plugin. + +### lua-resty-* + +- lua-resty-ipmatcher + - support IPv6 + - IP white/black list, route. +- lua-resty-radixtree + - allow to specify multiple host, remote_addr and uri. + - allow to define user-function to filter request. + - use `lua-resty-ipmatcher` instead of `lua-resty-iputils`, `lua-resty-ipmatcher` matches fast and support IPv6. + +### Bugfix + +- healthcheck: the checker name is wrong if APISIX works under multiple processes. [#568](https://github.com/apache/incubator-apisix/issues/568) + +### Dependencies + +- removed `lua-tinyyaml` from source code base, and install through Luarocks. + +## 0.7.0 + +> Released on 2019/09/06 + +This release brings many new features, such as IP black and white list, gPRC protocol transcoding, IPv6, IdP (identity provider) services, serverless, Change the default route to radix tree (**not downward compatible**), and more. + +### Core + +- :sunrise: **[gRPC transcoding](https://github.com/apache/apisix/blob/master/docs/en/latest/plugins/grpc-transcode.md)**: supports protocol transcoding so that clients can access your gRPC API by using HTTP/JSON. [#395](https://github.com/apache/incubator-apisix/issues/395) +- :sunrise: **[radix tree router](https://github.com/apache/incubator-apisix/blob/master/docs/en/latest/router-radixtree.md)**: The radix tree is used as the default router implementation. It supports the uri, host, cookie, request header, request parameters, Nginx built-in variables, etc. as the routing conditions, and supports common operators such as equal, greater than, less than, etc., more powerful and flexible.**IMPORTANT: This change is not downward compatible. All users who use historical versions need to manually modify their routing to work properly.** [#414](https://github.com/apache/incubator-apisix/issues/414) +- Dynamic upstream supports more parameters, you can specify the upstream uri and host, and whether to enable websocket. [#451](https://github.com/apache/incubator-apisix/pull/451) +- Support for get values from cookies directly from `ctx.var`. [#449](https://github.com/apache/incubator-apisix/pull/449) +- Routing support IPv6. [#331](https://github.com/apache/incubator-apisix/issues/331) + +### Plugins + +- :sunrise: **[serverless](https://github.com/apache/incubator-apisix/blob/master/docs/en/latest/plugins/serverless.md)**: With serverless support, users can dynamically run any Lua function on a gateway node. Users can also use this feature as a lightweight plugin.[#86](https://github.com/apache/incubator-apisix/pull/86) +- :sunrise: **support IdP**: Support external authentication services, such as Auth0, okta, etc., users can use this to connect to Oauth2.0 and other authentication methods. [#447](https://github.com/apache/incubator-apisix/pull/447) +- [rate limit](https://github.com/apache/incubator-apisix/blob/master/docs/en/latest/plugins/limit-conn.md): Support for more restricted keys, such as `X-Forwarded-For` and `X-Real-IP`, and allows users to use Nginx variables, request headers, and request parameters as keys. [#228](https://github.com/apache/incubator-apisix/issues/228) +- [IP black and white list](https://github.com/apache/incubator-apisix/blob/master/docs/en/latest/plugins/ip-restriction.md) Support IP black and white list for security. [#398](https://github.com/apache/incubator-apisix/pull/398) + +### CLI + +- Add the `version` directive to get the version number of APISIX. [#420](https://github.com/apache/incubator-apisix/issues/420) + +### Admin + +- The `PATCH` API is supported and can be modified individually for a configuration without submitting the entire configuration. [#365](https://github.com/apache/incubator-apisix/pull/365) + +### Dashboard + +- :sunrise: **Add the online version of the dashboard**,users can [experience APISIX](http://apisix.iresty.com/) without install. [#374](https://github.com/apache/incubator-apisix/issues/374) + +[Back to TOC](#table-of-contents) + +## 0.6.0 + +> Released on 2019/08/05 + +This release brings many new features such as health check and circuit breaker, debug mode, opentracing and JWT auth. And add **built-in dashboard**. + +### Core + +- :sunrise: **[Health Check and Circuit Breaker](https://github.com/apache/incubator-apisix/blob/master/docs/en/latest/tutorials/health-check.md)**: Enable health check on the upstream node, and will automatically filter unhealthy nodes during load balancing to ensure system stability. [#249](https://github.com/apache/incubator-apisix/pull/249) +- Anti-ReDoS(Regular expression Denial of Service). [#252](https://github.com/apache/incubator-apisix/pull/250) +- supported debug mode. [#319](https://github.com/apache/incubator-apisix/pull/319) +- allowed to use different router. [#364](https://github.com/apache/incubator-apisix/pull/364) +- supported to match route by host + uri. [#325](https://github.com/apache/incubator-apisix/pull/325) +- allowed plugins to handler balance phase. [#299](https://github.com/apache/incubator-apisix/pull/299) +- added desc for upstream and service in schema. [#289](https://github.com/apache/incubator-apisix/pull/289) + +### Plugins + +- :sunrise: **[OpenTracing](https://github.com/apache/incubator-apisix/blob/master/docs/en/latest/plugins/zipkin.md)**: support Zipkin and Apache SkyWalking. [#304](https://github.com/apache/incubator-apisix/pull/304) +- [JWT auth](https://github.com/apache/apisix/blob/master/docs/en/latest/plugins/jwt-auth.md). [#303](https://github.com/apache/incubator-apisix/pull/303) + +### CLI + +- support multiple ips of `allow`. [#340](https://github.com/apache/incubator-apisix/pull/340) +- supported real_ip configure in nginx.conf and added functions to get ip and remote ip. [#236](https://github.com/apache/incubator-apisix/pull/236) + +### Dashboard + +- :sunrise: **add built-in dashboard**. [#327](https://github.com/apache/incubator-apisix/pull/327) + +### Test + +- support OSX in Travis CI. [#217](https://github.com/apache/incubator-apisix/pull/217) +- installed all of the dependencies to `deps` folder. [#248](https://github.com/apache/incubator-apisix/pull/248) + +[Back to TOC](#table-of-contents) diff --git a/CloudronPackages/APISIX/apisix-source/CODE_OF_CONDUCT.md b/CloudronPackages/APISIX/apisix-source/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..fe93188 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/CODE_OF_CONDUCT.md @@ -0,0 +1,121 @@ + + +*The following is copied for your convenience from . If there's a discrepancy between the two, let us know or submit a PR to fix it.* + +# Code of Conduct # + +## Introduction ## + +This code of conduct applies to all spaces managed by the Apache +Software Foundation, including IRC, all public and private mailing +lists, issue trackers, wikis, blogs, Twitter, and any other +communication channel used by our communities. A code of conduct which +is specific to in-person events (ie., conferences) is codified in the +published ASF anti-harassment policy. + +We expect this code of conduct to be honored by everyone who +participates in the Apache community formally or informally, or claims +any affiliation with the Foundation, in any Foundation-related +activities and especially when representing the ASF, in any role. + +This code __is not exhaustive or complete__. It serves to distill our +common understanding of a collaborative, shared environment and goals. +We expect it to be followed in spirit as much as in the letter, so that +it can enrich all of us and the technical communities in which we participate. + +## Specific Guidelines ## + +We strive to: + +1. __Be open.__ We invite anyone to participate in our community. We preferably use public methods of communication for project-related messages, unless discussing something sensitive. This applies to messages for help or project-related support, too; not only is a public support request much more likely to result in an answer to a question, it also makes sure that any inadvertent mistakes made by people answering will be more easily detected and corrected. + +2. __Be `empathetic`, welcoming, friendly, and patient.__ We work together to resolve conflict, assume good intentions, and do our best to act in an empathetic fashion. We may all experience some frustration from time to time, but we do not allow frustration to turn into a personal attack. A community where people feel uncomfortable or threatened is not a productive one. We should be respectful when dealing with other community members as well as with people outside our community. + +3. __Be collaborative.__ Our work will be used by other people, and in turn we will depend on the work of others. When we make something for the benefit of the project, we are willing to explain to others how it works, so that they can build on the work to make it even better. Any decision we make will affect users and colleagues, and we take those consequences seriously when making decisions. + +4. __Be inquisitive.__ Nobody knows everything! Asking questions early avoids many problems later, so questions are encouraged, though they may be directed to the appropriate forum. Those who are asked should be responsive and helpful, within the context of our shared goal of improving Apache project code. + +5. __Be careful in the words that we choose.__ Whether we are participating as professionals or volunteers, we value professionalism in all interactions, and take responsibility for our own speech. Be kind to others. Do not insult or put down other participants. Harassment and other exclusionary behaviour are not acceptable. This includes, but is not limited to: + + * Violent threats or language directed against another person. + * Sexist, racist, or otherwise discriminatory jokes and language. + * Posting sexually explicit or violent material. + * Posting (or threatening to post) other people's personally identifying information ("doxing"). + * Sharing private content, such as emails sent privately or non-publicly, or unlogged forums such as IRC channel history. + * Personal insults, especially those using racist or sexist terms. + * Unwelcome sexual attention. + * Excessive or unnecessary profanity. + * Repeated harassment of others. In general, if someone asks you to stop, then stop. + * Advocating for, or encouraging, any of the above behaviour. + +6. __Be concise.__ Keep in mind that what you write once will be read by hundreds of people. Writing a short email means people can understand the conversation as efficiently as possible. Short emails should always strive to be empathetic, welcoming, friendly and patient. When a long explanation is necessary, consider adding a summary.

+ + Try to bring new ideas to a conversation so that each mail adds something unique to the thread, keeping in mind that the rest of the thread still contains the other messages with arguments that have already been made. + + Try to stay on topic, especially in discussions that are already fairly large. + +7. __Step down considerately.__ Members of every project come and go. When somebody leaves or disengages from the project they should tell people they are leaving and take the proper steps to ensure that others can pick up where they left off. In doing so, they should remain respectful of those who continue to participate in the project and should not misrepresent the project's goals or achievements. Likewise, community members should respect any individual's choice to leave the project.

+ +## Diversity Statement ## + +Apache welcomes and encourages participation by everyone. We are committed to being a community that everyone feels good about joining. Although we may not be able to satisfy everyone, we will always work to treat everyone well. + +No matter how you identify yourself or how others perceive you: we welcome you. Though no list can hope to be comprehensive, we explicitly honour diversity in: age, culture, ethnicity, genotype, gender identity or expression, language, national origin, neurotype, phenotype, political beliefs, profession, race, religion, sexual orientation, socioeconomic status, subculture and technical ability. + +Though we welcome people fluent in all languages, Apache development is conducted in English. + +Standards for behaviour in the Apache community are detailed in the Code of Conduct above. We expect participants in our community to meet these standards in all their interactions and to help others to do so as well. + +## Reporting Guidelines ## + +While this code of conduct should be adhered to by participants, we recognize that sometimes people may have a bad day, or be unaware of some of the guidelines in this code of conduct. When that happens, you may reply to them and point out this code of conduct. Such messages may be in public or in private, whatever is most appropriate. However, regardless of whether the message is public or not, it should still adhere to the relevant parts of this code of conduct; in particular, it should not be abusive or disrespectful. + +If you believe someone is violating this code of conduct, you may reply to +them and point out this code of conduct. Such messages may be in public or in +private, whatever is most appropriate. Assume good faith; it is more likely +that participants are unaware of their bad behaviour than that they +intentionally try to degrade the quality of the discussion. Should there be +difficulties in dealing with the situation, you may report your compliance +issues in confidence to either: + + * President of the Apache Software Foundation: Sam Ruby (rubys at intertwingly dot net) + +or one of our volunteers: + + * [Mark Thomas](http://home.apache.org/~markt/coc.html) + * [Joan Touzet](http://home.apache.org/~wohali/) + * [Sharan Foga](http://home.apache.org/~sharan/coc.html) + +If the violation is in documentation or code, for example inappropriate pronoun usage or word choice within official documentation, we ask that people report these privately to the project in question at private@project.apache.org, and, if they have sufficient ability within the project, to resolve or remove the concerning material, being mindful of the perspective of the person originally reporting the issue. + +## End Notes ## + +This Code defines __empathy__ as "a vicarious participation in the emotions, ideas, or opinions of others; the ability to imagine oneself in the condition or predicament of another." __Empathetic__ is the adjectival form of empathy. + +This statement thanks the following, on which it draws for content and inspiration: + + * [CouchDB Project Code of conduct](http://couchdb.apache.org/conduct.html) + * [Fedora Project Code of Conduct](http://fedoraproject.org/code-of-conduct) + * [Django Code of Conduct](https://www.djangoproject.com/conduct/) + * [Debian Code of Conduct](http://www.debian.org/vote/2014/vote_002) + * [Twitter Open Source Code of Conduct](https://github.com/twitter/code-of-conduct/blob/master/code-of-conduct.md) + * [Mozilla Code of Conduct/Draft](https://wiki.mozilla.org/Code_of_Conduct/Draft#Conflicts_of_Interest) + * [Python Diversity Appendix](https://www.python.org/community/diversity/) + * [Python Mentors Home Page](http://pythonmentors.com/) diff --git a/CloudronPackages/APISIX/apisix-source/CODE_STYLE.md b/CloudronPackages/APISIX/apisix-source/CODE_STYLE.md new file mode 100644 index 0000000..f6c0cc6 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/CODE_STYLE.md @@ -0,0 +1,440 @@ +--- +title: APISIX Lua Coding Style Guide +--- + + + +## Indentation + +Use 4 spaces as an indent: + +```lua +--No +if a then +ngx.say("hello") +end +``` + +```lua +--Yes +if a then + ngx.say("hello") +end +``` + +You can simplify the operation by changing the tab to 4 spaces in the editor you are using. + +## Space + +On both sides of the operator, you need to use a space to separate: + +```lua +--No +local i=1 +local s = "apisix" +``` + +```lua +--Yes +local i = 1 +local s = "apisix" +``` + +## Blank line + +Many developers will add a semicolon at the end of the line: + +```lua +--No +if a then +    ngx.say("hello"); +end; +``` + +Adding a semicolon will make the Lua code look ugly and unnecessary. Also, don't want to save the number of lines in the code, the latter turns the multi-line code into one line in order to appear "simple". This will not know when the positioning error is in the end of the code: + +```lua +--No +if a then ngx.say("hello") end +``` + +```lua +--Yes +if a then + ngx.say("hello") +end +``` + +The functions needs to be separated by two blank lines: + +```lua +--No +local function foo() +end +local function bar() +end +``` + +```lua +--Yes +local function foo() +end + + +local function bar() +end +``` + +If there are multiple if elseif branches, they need a blank line to separate them: + +```lua +--No +if a == 1 then + foo() +elseif a== 2 then + bar() +elseif a == 3 then + run() +else + error() +end +``` + +```lua +--Yes +if a == 1 then + foo() + +elseif a == 2 then + bar() + +elseif a == 3 then + run() + +else + error() +end +``` + +## Maximum length per line + +Each line cannot exceed 100 characters. If it exceeds, you need to wrap and align: + +```lua +--No +return limit_conn_new("plugin-limit-conn", conf.conn, conf.burst, conf.default_conn_delay) +``` + +```lua +--Yes +return limit_conn_new("plugin-limit-conn", conf.conn, conf.burst, + conf.default_conn_delay) +``` + +When the linefeed is aligned, the correspondence between the upper and lower lines should be reflected. For the example above, the parameters of the second line of functions are to the right of the left parenthesis of the first line. + +If it is a string stitching alignment, you need to put `..` in the next line: + +```lua +--No +return limit_conn_new("plugin-limit-conn" .. "plugin-limit-conn" .. + "plugin-limit-conn") +``` + +```lua +--Yes +return limit_conn_new("plugin-limit-conn" .. "plugin-limit-conn" + .. "plugin-limit-conn") +``` + +```lua +--Yes +return "param1", "plugin-limit-conn" + .. "plugin-limit-conn" +``` + +## Variable + +Local variables should always be used, not global variables: + +```lua +--No +i = 1 +s = "apisix" +``` + +```lua +--Yes +local i = 1 +local s = "apisix" +``` + +Variable naming uses the `snake_case` style: + +```lua +--No +local IndexArr = 1 +local str_Name = "apisix" +``` + +```lua +--Yes +local index_arr = 1 +local str_name = "apisix" +``` + +Use all capitalization for constants: + +```lua +--No +local max_int = 65535 +local server_name = "apisix" +``` + +```lua +--Yes +local MAX_INT = 65535 +local SERVER_NAME = "apisix" +``` + +## Table + +Use `table.new` to pre-allocate the table: + +```lua +--No +local t = {} +for i = 1, 100 do + t[i] = i +end +``` + +```lua +--Yes +local new_tab = require "table.new" +local t = new_tab(100, 0) +for i = 1, 100 do + t[i] = i +end +``` + +Don't use `nil` in an array: + +```lua +--No +local t = {1, 2, nil, 3} +``` + +If you must use null values, use `ngx.null` to indicate: + +```lua +--Yes +local t = {1, 2, ngx.null, 3} +``` + +## String + +Do not splicing strings on the hot code path: + +```lua +--No +local s = "" +for i = 1, 100000 do + s = s .. "a" +end +``` + +```lua +--Yes +local new_tab = require "table.new" +local t = new_tab(100000, 0) +for i = 1, 100000 do + t[i] = "a" +end +local s = table.concat(t, "") +``` + +## Function + +The naming of functions also follows `snake_case`: + +```lua +--No +local function testNginx() +end +``` + +```lua +--Yes +local function test_nginx() +end +``` + +The function should return as early as possible: + +```lua +--No +local function check(age, name) + local ret = true + if age < 20 then + ret = false + end + + if name == "a" then + ret = false + end + -- do something else + return ret +end +``` + +```lua +--Yes +local function check(age, name) + if age < 20 then + return false + end + + if name == "a" then + return false + end + -- do something else + return true +end +``` + +The function should return ``, `err`. +The first return value means successful or not, if not, the second return value specifies the error message. +The error message can be ignored in some cases. + +```lua +--No +local function check() + return "failed" +end +``` + +```lua +--Yes +local function check() + return false, "failed" +end +``` + +## Module + +All require libraries must be localized: + +```lua +--No +local function foo() + local ok, err = ngx.timer.at(delay, handler) +end +``` + +```lua +--Yes +local timer_at = ngx.timer.at + +local function foo() + local ok, err = timer_at(delay, handler) +end +``` + +For style unification, `require` and `ngx` also need to be localized: + +```lua +--No +local core = require("apisix.core") +local timer_at = ngx.timer.at + +local function foo() + local ok, err = timer_at(delay, handler) +end +``` + +```lua +--Yes +local ngx = ngx +local require = require +local core = require("apisix.core") +local timer_at = ngx.timer.at + +local function foo() + local ok, err = timer_at(delay, handler) +end +``` + +## Error handling + +For functions that return with error information, the error information must be judged and processed: + +```lua +--No +local sock = ngx.socket.tcp() +local ok = sock:connect("www.google.com", 80) +ngx.say("successfully connected to google!") +``` + +```lua +--Yes +local sock = ngx.socket.tcp() +local ok, err = sock:connect("www.google.com", 80) +if not ok then + ngx.say("failed to connect to google: ", err) + return +end +ngx.say("successfully connected to google!") +``` + +The function you wrote yourself, the error message is to be returned as a second parameter in the form of a string: + +```lua +--No +local function foo() + local ok, err = func() + if not ok then + return false + end + return true +end +``` + +```lua +--No +local function foo() + local ok, err = func() + if not ok then + return false, {msg = err} + end + return true +end +``` + +```lua +--Yes +local function foo() + local ok, err = func() + if not ok then + return false, "failed to call func(): " .. err + end + return true +end +``` diff --git a/CloudronPackages/APISIX/apisix-source/CONTRIBUTING.md b/CloudronPackages/APISIX/apisix-source/CONTRIBUTING.md new file mode 100644 index 0000000..872e2a5 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/CONTRIBUTING.md @@ -0,0 +1,152 @@ + + +# Contributing to APISIX + +Firstly, thanks for your interest in contributing! I hope that this will be a pleasant first experience for you, and that you will return to continue +contributing. + +## How to contribute? + +Most of the contributions that we receive are code contributions, but you can also contribute to the documentation or simply report solid bugs for us to fix. Nor is code the only way to contribute to the project. We strongly value documentation, integration with other project, and gladly accept improvements for these aspects. + +For new contributors, please take a look at issues with a tag called [Good first issue](https://github.com/apache/apisix/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) or [Help wanted](https://github.com/apache/apisix/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22). + +## How to report a bug? + +* **Ensure the bug was not already reported** by searching on GitHub under [Issues](https://github.com/apache/apisix/issues). + +* If you're unable to find an open issue addressing the problem, [open a new one](https://github.com/apache/apisix/issues/new). Be sure to include a **title and clear description**, as much relevant information as possible, and a **code sample** or an **executable test case** demonstrating the expected behavior that is not occurring. + +## How to add a new feature or change an existing one + +_Before making any significant changes, please [open an issue](https://github.com/apache/apisix/issues)._ Discussing your proposed changes ahead of time will make the contribution process smooth for everyone. + +Once we've discussed your changes and you've got your code ready, make sure that tests are passing and open your pull request. Your PR is most likely to be accepted if it: + +* Update the README.md with details of changes to the interface. +* Includes tests for new functionality. +* References the original issue in the description, e.g. "Resolves #123". +* Has a [good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html). +* Ensure your pull request's title starts from one of the word in the `types` section of [semantic.yml](https://github.com/apache/apisix/blob/master/.github/workflows/semantic.yml). +* Follow the [PR manners](https://raw.githubusercontent.com/apache/apisix/master/.github/PULL_REQUEST_TEMPLATE.md) + +## Contribution Guidelines for Documentation + +* Linting/Style + + For linting both our Markdown and YAML files we use: + + - npm based [markdownlint-cli](https://www.npmjs.com/package/markdownlint-cli) + + For linting all files' license header we use: + + - [license-eye](https://github.com/apache/skywalking-eyes) + + For linting our shell files we use: + + - [shellcheck](https://github.com/koalaman/shellcheck) + + For linting our zh document files we use: + + - [autocorrect](https://github.com/huacnlee/autocorrect) + +* Active Voice + + In general, use active voice when formulating the sentence instead of passive voice. A sentence written in the active voice will emphasize + the person or thing who is performing an action (eg.The dog chased the ball). In contrast, the passive voice will highlight + the recipient of the action (The ball was chased by the dog). Therefore use the passive voice, only when it's less important + who or what completed the action and more important that the action was completed. For example: + + - Recommended: The key-auth plugin authenticates the requests. + - Not recommended: The requests are authenticated by the key-auth plugin. + +* Capitalization: + + * For titles of a section, capitalize the first letter of each word except for the [closed-class words](https://en.wikipedia.org/wiki/Part_of_speech#Open_and_closed_classes) + such as determiners, pronouns, conjunctions, and prepositions. Use the following [link](https://capitalizemytitle.com/#Chicago) for guidance. + - Recommended: Authentication **with** APISIX + + * For normal sentences, don't [capitalize](https://www.grammarly.com/blog/capitalization-rules/) random words in the middle of the sentences. + Use the Chicago manual for capitalization rules for the documentation. + +* Second Person + + In general, use second person in your docs rather than first person. For example: + + - Recommended: You are recommended to use the docker based deployment. + - Not Recommended: We recommend to use the docker based deployment. + +* Spellings + + Use [American spellings](https://www.oxfordinternationalenglish.com/differences-in-british-and-american-spelling/) when + contributing to the documentation. + +* Voice + + * Use a friendly and conversational tone. Always use simple sentences. If the sentence is lengthy try to break it in to smaller sentences. + +## Check code style and test case style + +* code style + * Please take a look at [APISIX Lua Coding Style Guide](CODE_STYLE.md). + * Use tool to check your code statically by command: `make lint`. + +```shell + # install `luacheck` first before run it + $ luarocks install luacheck + # check source code + $ make lint + ./utils/check-lua-code-style.sh + + luacheck -q apisix t/lib + Total: 0 warnings / 0 errors in 146 files + + find apisix -name *.lua ! -wholename apisix/cli/ngx_tpl.lua -exec ./utils/lj-releng {} + + + grep -E ERROR.*.lua: /tmp/check.log + + true + + [ -s /tmp/error.log ] + ./utils/check-test-code-style.sh + + find t -name '*.t' -exec grep -E '\-\-\-\s+(SKIP|ONLY|LAST|FIRST)$' '{}' + + + true + + '[' -s /tmp/error.log ']' + + find t -name '*.t' -exec ./utils/reindex '{}' + + + grep done. /tmp/check.log + + true + + '[' -s /tmp/error.log ']' +``` + + The `lj-releng` and `reindex` will be downloaded automatically by `make lint` if not exists. + +* test case style + * Use tool to check your test case style statically by command, eg: `make lint`. + * When the test file is too large, for example > 800 lines, you should split it to a new file. + Please take a look at `t/plugin/limit-conn.t` and `t/plugin/limit-conn2.t`. + * For more details, see the [testing framework](https://github.com/apache/apisix/blob/master/docs/en/latest/internal/testing-framework.md) + +## Contributor gifts + +If you have contributed to Apache APISIX, no matter it is a code contribution to fix a bug or a feature request, or a documentation change, Congratulations! You are eligible to receive the APISIX special gifts with a digital certificate! It's always been the community effort that has made Apache APISIX be understood and used by more developers. + +![Contributor gifts](https://static.apiseven.com/2022/12/29/63acfb2f208e1.png) + +Contributors can request gifts by filling out this [Google form](https://forms.gle/DhPL96LnJwuaHjHU7) or [QQ Form](https://wj.qq.com/s2/11438041/7b07/). After filling in the form, please wait patiently. The community needs some time to review submissions. + +## Do you have questions about the source code? + +- **QQ Group**: 781365357(recommended), 578997126, 552030619 +- Join in `apisix` channel at [Apache Slack](http://s.apache.org/slack-invite). If the link is not working, find the latest one at [Apache INFRA WIKI](https://cwiki.apache.org/confluence/display/INFRA/Slack+Guest+Invites). diff --git a/CloudronPackages/APISIX/apisix-source/LICENSE b/CloudronPackages/APISIX/apisix-source/LICENSE new file mode 100644 index 0000000..5cadce4 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/LICENSE @@ -0,0 +1,219 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +======================================================================= +Apache APISIX Subcomponents: + +The Apache APISIX project contains subcomponents with separate copyright +notices and license terms. Your use of the source code for the these +subcomponents is subject to the terms and conditions of the following +licenses. + +======================================================================== +Apache 2.0 licenses +======================================================================== + +The following components are provided under the Apache License. See project link for details. +The text of each license is the standard Apache 2.0 license. + + ewma.lua file from kubernetes/ingress-nginx: https://github.com/kubernetes/ingress-nginx Apache 2.0 + hello.go file from OpenFunction/samples: https://github.com/OpenFunction/samples Apache 2.0 diff --git a/CloudronPackages/APISIX/apisix-source/MAINTAIN.md b/CloudronPackages/APISIX/apisix-source/MAINTAIN.md new file mode 100644 index 0000000..795aa8c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/MAINTAIN.md @@ -0,0 +1,62 @@ + + +## Release steps + +### Release patch version + +1. Create a [pull request](https://github.com/apache/apisix/commit/7db31a1a7186b966bc0f066539d4de8011871012) (contains the changelog and version change) to master + > The changelog only needs to provide a link to the minor branch. +2. Create a [pull request](https://github.com/apache/apisix/commit/21d7673c6e8ff995677456cdebc8ded5afbb3d0a) (contains the backport commits, and the change in step 1) to minor branch + > This should include those PRs that contain the `need backport` tag since the last patch release. Also, the title of these PRs need to be added to the changelog of the minor branch. +3. Merge it into minor branch +4. Package a vote artifact to Apache's dev-apisix repo. The artifact can be created via `VERSION=x.y.z make release-src` +5. Send the [vote email](https://lists.apache.org/thread/vq4qtwqro5zowpdqhx51oznbjy87w9d0) to dev@apisix.apache.org + > After executing the `VERSION=x.y.z make release-src` command, the content of the vote email will be automatically generated in the `./release` directory named `apache-apisix-${x.y.z}-vote-contents` +6. When the vote is passed, send the [vote result email](https://lists.apache.org/thread/k2frnvj4zj9oynsbr7h7nd6n6m3q5p89) to dev@apisix.apache.org +7. Move the vote artifact to Apache's apisix repo +8. Register the release info in https://reporter.apache.org/addrelease.html?apisix +9. Create a [GitHub release](https://github.com/apache/apisix/releases/tag/2.10.2) from the minor branch +10. Update [APISIX's website](https://github.com/apache/apisix-website/commit/f9104bdca50015722ab6e3714bbcd2d17e5c5bb3) if the version number is the largest +11. Update APISIX rpm package + > Go to [apisix-build-tools](https://github.com/api7/apisix-build-tools) repository and create a new tag named `apisix-${x.y.z}` to automatically submit the + package to yum repo +12. - If the version number is the largest, update [APISIX docker](https://github.com/apache/apisix-docker/commit/829d45559c303bea7edde5bebe9fcf4938071601) in [APISIX docker repository](https://github.com/apache/apisix-docker), after PR merged, then create a new branch from master, named as `release/apisix-${version}`, e.g. `release/apisix-2.10.2`. + - If released an LTS version and the version number less than the current largest(e.g. the current largest version number is 2.14.1, but the LTS version 2.13.2 is to be released), submit a PR like [APISIX docker](https://github.com/apache/apisix-docker/pull/322) in [APISIX docker repository](https://github.com/apache/apisix-docker) and named as `release/apisix-${version}`, e.g. `release/apisix-2.13.2`, after PR reviewed, don't need to merged PR, just close the PR and push the branch to APISIX docker repository. +13. Update [APISIX helm chart](https://github.com/apache/apisix-helm-chart/pull/234) if the version number is the largest +14. Send the [ANNOUNCE email](https://lists.apache.org/thread.html/ree7b06e6eac854fd42ba4f302079661a172f514a92aca2ef2f1aa7bb%40%3Cdev.apisix.apache.org%3E) to dev@apisix.apache.org & announce@apache.org + +### Release minor version + +1. Create a minor branch, and create [pull request](https://github.com/apache/apisix/commit/bc6ddf51f15e41fffea6c5bd7d01da9838142b66) to master branch from it +2. Package a vote artifact to Apache's dev-apisix repo. The artifact can be created via `VERSION=x.y.z make release-src` +3. Send the [vote email](https://lists.apache.org/thread/q8zq276o20r5r9qjkg074nfzb77xwry9) to dev@apisix.apache.org + > After executing the `VERSION=x.y.z make release-src` command, the content of the vote email will be automatically generated in the `./release` directory named `apache-apisix-${x.y.z}-vote-contents` +4. When the vote is passed, send the [vote result email](https://lists.apache.org/thread/p1m9s116rojlhb91g38cj8646393qkz7) to dev@apisix.apache.org +5. Move the vote artifact to Apache's apisix repo +6. Register the release info in https://reporter.apache.org/addrelease.html?apisix +7. Create a [GitHub release](https://github.com/apache/apisix/releases/tag/2.10.0) from the minor branch +8. Merge the pull request into master branch +9. Update [APISIX's website](https://github.com/apache/apisix-website/commit/7bf0ab5a1bbd795e6571c4bb89a6e646115e7ca3) +10. Update APISIX rpm package. + > Go to [apisix-build-tools](https://github.com/api7/apisix-build-tools) repository and create a new tag named `apisix-${x.y.z}` to automatically submit the rpm package to yum repo +11. - If the version number is the largest, update [APISIX docker](https://github.com/apache/apisix-docker/commit/829d45559c303bea7edde5bebe9fcf4938071601) in [APISIX docker repository](https://github.com/apache/apisix-docker), after PR merged, then create a new branch from master, named as `release/apisix-${version}`, e.g. `release/apisix-2.10.2`. + - If released an LTS version and the version number less than the current largest(e.g. the current largest version number is 2.14.1, but the LTS version 2.13.2 is to be released), submit a PR like [APISIX docker](https://github.com/apache/apisix-docker/pull/322) in [APISIX docker repository](https://github.com/apache/apisix-docker) and named as `release/apisix-${version}`, e.g. `release/apisix-2.13.2`, after PR reviewed, don't need to merged PR, just close the PR and push the branch to APISIX docker repository. +12. Update [APISIX helm chart](https://github.com/apache/apisix-helm-chart/pull/234) +13. Send the [ANNOUNCE email](https://lists.apache.org/thread/4s4msqwl1tq13p9dnv3hx7skbgpkozw1) to dev@apisix.apache.org & announce@apache.org diff --git a/CloudronPackages/APISIX/apisix-source/Makefile b/CloudronPackages/APISIX/apisix-source/Makefile new file mode 100644 index 0000000..423b240 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/Makefile @@ -0,0 +1,523 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Makefile basic env setting +.DEFAULT_GOAL := help +# add pipefail support for default shell +SHELL := /bin/bash -o pipefail + + +# Project basic setting +VERSION ?= master +project_name ?= apache-apisix +project_release_name ?= $(project_name)-$(VERSION)-src + +OTEL_CONFIG ?= ./ci/pod/otelcol-contrib/data-otlp.json + +# Hyperconverged Infrastructure +ENV_OS_NAME ?= $(shell uname -s | tr '[:upper:]' '[:lower:]') +ENV_OS_ARCH ?= $(shell uname -m | tr '[:upper:]' '[:lower:]') +ENV_APISIX ?= $(CURDIR)/bin/apisix +ENV_GIT ?= git +ENV_TAR ?= tar +ENV_INSTALL ?= install +ENV_RM ?= rm -vf +ENV_DOCKER ?= docker +ENV_DOCKER_COMPOSE ?= docker compose --project-directory $(CURDIR) -p $(project_name) -f $(project_compose_ci) +ENV_NGINX ?= $(ENV_NGINX_EXEC) -p $(CURDIR) -c $(CURDIR)/conf/nginx.conf +ENV_NGINX_EXEC := $(shell command -v openresty 2>/dev/null || command -v nginx 2>/dev/null) +ENV_OPENSSL_PREFIX ?= /usr/local/openresty/openssl3 +ENV_LIBYAML_INSTALL_PREFIX ?= /usr +ENV_LUAROCKS ?= luarocks +## These variables can be injected by luarocks +ENV_INST_PREFIX ?= /usr +ENV_INST_LUADIR ?= $(ENV_INST_PREFIX)/share/lua/5.1 +ENV_INST_BINDIR ?= $(ENV_INST_PREFIX)/bin +ENV_RUNTIME_VER ?= $(shell $(ENV_NGINX_EXEC) -V 2>&1 | tr ' ' '\n' | grep 'APISIX_RUNTIME_VER' | cut -d '=' -f2) + +IMAGE_NAME = apache/apisix +ENV_APISIX_IMAGE_TAG_NAME ?= $(IMAGE_NAME):$(VERSION) + +-include .requirements +export + +ifneq ($(shell whoami), root) + ENV_LUAROCKS_FLAG_LOCAL := --local +endif + +ifdef ENV_LUAROCKS_SERVER + ENV_LUAROCKS_SERVER_OPT := --server $(ENV_LUAROCKS_SERVER) +endif + +ifneq ($(shell test -d $(ENV_OPENSSL_PREFIX) && echo -n yes), yes) + ENV_NGINX_PREFIX := $(shell $(ENV_NGINX_EXEC) -V 2>&1 | grep -Eo 'prefix=(.*)/nginx\s+' | grep -Eo '/.*/') + ifeq ($(shell test -d $(addprefix $(ENV_NGINX_PREFIX), openssl3) && echo -n yes), yes) + ENV_OPENSSL_PREFIX := $(addprefix $(ENV_NGINX_PREFIX), openssl3) + endif +endif + + +# Makefile basic extension function +_color_red =\E[1;31m +_color_green =\E[1;32m +_color_yellow =\E[1;33m +_color_blue =\E[1;34m +_color_wipe =\E[0m + + +define func_echo_status + printf "[%b info %b] %s\n" "$(_color_blue)" "$(_color_wipe)" $(1) +endef + + +define func_echo_warn_status + printf "[%b info %b] %s\n" "$(_color_yellow)" "$(_color_wipe)" $(1) +endef + + +define func_echo_success_status + printf "[%b info %b] %s\n" "$(_color_green)" "$(_color_wipe)" $(1) +endef + + +define func_check_folder + if [[ ! -d $(1) ]]; then \ + mkdir -p $(1); \ + $(call func_echo_status, 'folder check -> create `$(1)`'); \ + else \ + $(call func_echo_success_status, 'folder check -> found `$(1)`'); \ + fi +endef + + +# Makefile target +.PHONY: runtime +runtime: +ifeq ($(ENV_NGINX_EXEC), ) +ifeq ("$(wildcard /usr/local/openresty/bin/openresty)", "") + @$(call func_echo_warn_status, "WARNING: OpenResty not found. You have to install OpenResty and add the binary file to PATH before install Apache APISIX.") + exit 1 +else + $(eval ENV_NGINX_EXEC := /usr/local/openresty/bin/openresty) + @$(call func_echo_status, "Use openresty as default runtime") +endif +endif + + +### help : Show Makefile rules +### If there're awk failures, please make sure +### you are using awk or gawk +.PHONY: help +help: + @$(call func_echo_success_status, "Makefile rules:") + @awk '{ if(match($$0, /^\s*#{3}\s*([^:]+)\s*:\s*(.*)$$/, res)){ printf(" make %-15s : %-10s\n", res[1], res[2]) } }' Makefile + + +### deps : Installing dependencies +.PHONY: deps +deps: install-runtime + $(eval ENV_LUAROCKS_VER := $(shell $(ENV_LUAROCKS) --version | grep -E -o "luarocks [0-9]+.")) + @if [ '$(ENV_LUAROCKS_VER)' = 'luarocks 3.' ]; then \ + mkdir -p ~/.luarocks; \ + $(ENV_LUAROCKS) config $(ENV_LUAROCKS_FLAG_LOCAL) variables.OPENSSL_LIBDIR $(addprefix $(ENV_OPENSSL_PREFIX), /lib); \ + $(ENV_LUAROCKS) config $(ENV_LUAROCKS_FLAG_LOCAL) variables.OPENSSL_INCDIR $(addprefix $(ENV_OPENSSL_PREFIX), /include); \ + $(ENV_LUAROCKS) config $(ENV_LUAROCKS_FLAG_LOCAL) variables.YAML_DIR $(ENV_LIBYAML_INSTALL_PREFIX); \ + $(ENV_LUAROCKS) install apisix-master-0.rockspec --tree deps --only-deps $(ENV_LUAROCKS_SERVER_OPT); \ + else \ + $(call func_echo_warn_status, "WARNING: You're not using LuaRocks 3.x; please remove the luarocks and reinstall it via https://raw.githubusercontent.com/apache/apisix/master/utils/linux-install-luarocks.sh"); \ + exit 1; \ + fi + + +### undeps : Uninstalling dependencies +.PHONY: undeps +undeps: uninstall-rocks uninstall-runtime + + +.PHONY: uninstall-rocks +uninstall-rocks: + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_LUAROCKS) purge --tree=deps + @$(call func_echo_success_status, "$@ -> [ Done ]") + + +### utils : Installation tools +.PHONY: utils +utils: +ifeq ("$(wildcard utils/lj-releng)", "") + wget -qP utils https://raw.githubusercontent.com/iresty/openresty-devel-utils/master/lj-releng + chmod a+x utils/lj-releng +endif +ifeq ("$(wildcard utils/reindex)", "") + wget -qP utils https://raw.githubusercontent.com/iresty/openresty-devel-utils/master/reindex + chmod a+x utils/reindex +endif + + +### lint : Lint source code +.PHONY: lint +lint: utils + @$(call func_echo_status, "$@ -> [ Start ]") + ./utils/check-lua-code-style.sh + ./utils/check-test-code-style.sh + @$(call func_echo_success_status, "$@ -> [ Done ]") + + +### init : Initialize the runtime environment +.PHONY: init +init: runtime + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_APISIX) init + $(ENV_APISIX) init_etcd + @$(call func_echo_success_status, "$@ -> [ Done ]") + + +### run : Start the apisix server +.PHONY: run +run: runtime + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_APISIX) start + @$(call func_echo_success_status, "$@ -> [ Done ]") + + +### quit : Stop the apisix server, exit gracefully +.PHONY: quit +quit: runtime + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_APISIX) quit + @$(call func_echo_success_status, "$@ -> [ Done ]") + + +### stop : Stop the apisix server, exit immediately +.PHONY: stop +stop: runtime + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_APISIX) stop + @$(call func_echo_success_status, "$@ -> [ Done ]") + + +### verify : Verify the configuration of apisix server +.PHONY: verify +verify: runtime + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_NGINX) -t + @$(call func_echo_success_status, "$@ -> [ Done ]") + + +### clean : Remove generated files +.PHONY: clean +clean: + @$(call func_echo_status, "$@ -> [ Start ]") + rm -rf logs/ + @$(call func_echo_success_status, "$@ -> [ Done ]") + + +### reload : Reload the apisix server +.PHONY: reload +reload: runtime + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_APISIX) reload + @$(call func_echo_success_status, "$@ -> [ Done ]") + +.PHONY: install-runtime +install-runtime: +ifneq ($(ENV_RUNTIME_VER), $(APISIX_RUNTIME)) + ./utils/install-dependencies.sh + @sudo $(ENV_INSTALL) /usr/local/openresty/bin/openresty $(ENV_INST_BINDIR)/openresty +endif + +.PHONY: uninstall-runtime +uninstall-runtime: + ./utils/install-dependencies.sh uninstall + rm -rf /usr/local/openresty + rm -f $(ENV_INST_BINDIR)/openresty + +### install : Install the apisix (only for luarocks) +.PHONY: install +install: runtime + $(ENV_INSTALL) -d /usr/local/apisix/ + $(ENV_INSTALL) -d /usr/local/apisix/logs/ + $(ENV_INSTALL) -d /usr/local/apisix/conf/cert + $(ENV_INSTALL) conf/mime.types /usr/local/apisix/conf/mime.types + $(ENV_INSTALL) conf/config.yaml /usr/local/apisix/conf/config.yaml + $(ENV_INSTALL) conf/debug.yaml /usr/local/apisix/conf/debug.yaml + $(ENV_INSTALL) conf/cert/* /usr/local/apisix/conf/cert/ + + # directories listed in alphabetical order + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix + $(ENV_INSTALL) apisix/*.lua $(ENV_INST_LUADIR)/apisix/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/admin + $(ENV_INSTALL) apisix/admin/*.lua $(ENV_INST_LUADIR)/apisix/admin/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/balancer + $(ENV_INSTALL) apisix/balancer/*.lua $(ENV_INST_LUADIR)/apisix/balancer/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/cli + $(ENV_INSTALL) apisix/cli/*.lua $(ENV_INST_LUADIR)/apisix/cli/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/control + $(ENV_INSTALL) apisix/control/*.lua $(ENV_INST_LUADIR)/apisix/control/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/core + $(ENV_INSTALL) apisix/core/*.lua $(ENV_INST_LUADIR)/apisix/core/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/core/dns + $(ENV_INSTALL) apisix/core/dns/*.lua $(ENV_INST_LUADIR)/apisix/core/dns + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/discovery + $(ENV_INSTALL) apisix/discovery/*.lua $(ENV_INST_LUADIR)/apisix/discovery/ + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/discovery/{consul,consul_kv,dns,eureka,nacos,kubernetes,tars} + $(ENV_INSTALL) apisix/discovery/consul/*.lua $(ENV_INST_LUADIR)/apisix/discovery/consul + $(ENV_INSTALL) apisix/discovery/consul_kv/*.lua $(ENV_INST_LUADIR)/apisix/discovery/consul_kv + $(ENV_INSTALL) apisix/discovery/dns/*.lua $(ENV_INST_LUADIR)/apisix/discovery/dns + $(ENV_INSTALL) apisix/discovery/eureka/*.lua $(ENV_INST_LUADIR)/apisix/discovery/eureka + $(ENV_INSTALL) apisix/discovery/kubernetes/*.lua $(ENV_INST_LUADIR)/apisix/discovery/kubernetes + $(ENV_INSTALL) apisix/discovery/nacos/*.lua $(ENV_INST_LUADIR)/apisix/discovery/nacos + $(ENV_INSTALL) apisix/discovery/tars/*.lua $(ENV_INST_LUADIR)/apisix/discovery/tars + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/http + $(ENV_INSTALL) apisix/http/*.lua $(ENV_INST_LUADIR)/apisix/http/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/http/router + $(ENV_INSTALL) apisix/http/router/*.lua $(ENV_INST_LUADIR)/apisix/http/router/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/include/apisix/model + $(ENV_INSTALL) apisix/include/apisix/model/*.proto $(ENV_INST_LUADIR)/apisix/include/apisix/model/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/inspect + $(ENV_INSTALL) apisix/inspect/*.lua $(ENV_INST_LUADIR)/apisix/inspect/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins + $(ENV_INSTALL) apisix/plugins/*.lua $(ENV_INST_LUADIR)/apisix/plugins/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/ext-plugin + $(ENV_INSTALL) apisix/plugins/ext-plugin/*.lua $(ENV_INST_LUADIR)/apisix/plugins/ext-plugin/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/grpc-transcode + $(ENV_INSTALL) apisix/plugins/grpc-transcode/*.lua $(ENV_INST_LUADIR)/apisix/plugins/grpc-transcode/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/ip-restriction + $(ENV_INSTALL) apisix/plugins/ip-restriction/*.lua $(ENV_INST_LUADIR)/apisix/plugins/ip-restriction/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/limit-conn + $(ENV_INSTALL) apisix/plugins/limit-conn/*.lua $(ENV_INST_LUADIR)/apisix/plugins/limit-conn/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/limit-req + $(ENV_INSTALL) apisix/plugins/limit-req/*.lua $(ENV_INST_LUADIR)/apisix/plugins/limit-req/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/limit-count + $(ENV_INSTALL) apisix/plugins/limit-count/*.lua $(ENV_INST_LUADIR)/apisix/plugins/limit-count/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/opa + $(ENV_INSTALL) apisix/plugins/opa/*.lua $(ENV_INST_LUADIR)/apisix/plugins/opa/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/prometheus + $(ENV_INSTALL) apisix/plugins/prometheus/*.lua $(ENV_INST_LUADIR)/apisix/plugins/prometheus/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/proxy-cache + $(ENV_INSTALL) apisix/plugins/proxy-cache/*.lua $(ENV_INST_LUADIR)/apisix/plugins/proxy-cache/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/serverless + $(ENV_INSTALL) apisix/plugins/serverless/*.lua $(ENV_INST_LUADIR)/apisix/plugins/serverless/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/syslog + $(ENV_INSTALL) apisix/plugins/syslog/*.lua $(ENV_INST_LUADIR)/apisix/plugins/syslog/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/tencent-cloud-cls + $(ENV_INSTALL) apisix/plugins/tencent-cloud-cls/*.lua $(ENV_INST_LUADIR)/apisix/plugins/tencent-cloud-cls/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/pubsub + $(ENV_INSTALL) apisix/pubsub/*.lua $(ENV_INST_LUADIR)/apisix/pubsub/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/secret + $(ENV_INSTALL) apisix/secret/*.lua $(ENV_INST_LUADIR)/apisix/secret/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/zipkin + $(ENV_INSTALL) apisix/plugins/zipkin/*.lua $(ENV_INST_LUADIR)/apisix/plugins/zipkin/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/ssl/router + $(ENV_INSTALL) apisix/ssl/router/*.lua $(ENV_INST_LUADIR)/apisix/ssl/router/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/stream + $(ENV_INSTALL) apisix/stream/*.lua $(ENV_INST_LUADIR)/apisix/stream/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/stream/plugins + $(ENV_INSTALL) apisix/stream/plugins/*.lua $(ENV_INST_LUADIR)/apisix/stream/plugins/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/stream/router + $(ENV_INSTALL) apisix/stream/router/*.lua $(ENV_INST_LUADIR)/apisix/stream/router/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/stream/xrpc + $(ENV_INSTALL) apisix/stream/xrpc/*.lua $(ENV_INST_LUADIR)/apisix/stream/xrpc/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/stream/xrpc/protocols/redis + $(ENV_INSTALL) apisix/stream/xrpc/protocols/redis/*.lua $(ENV_INST_LUADIR)/apisix/stream/xrpc/protocols/redis/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/stream/xrpc/protocols/dubbo + $(ENV_INSTALL) apisix/stream/xrpc/protocols/dubbo/*.lua $(ENV_INST_LUADIR)/apisix/stream/xrpc/protocols/dubbo/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/utils + $(ENV_INSTALL) apisix/utils/*.lua $(ENV_INST_LUADIR)/apisix/utils/ + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/ai-proxy + $(ENV_INSTALL) apisix/plugins/ai-proxy/*.lua $(ENV_INST_LUADIR)/apisix/plugins/ai-proxy + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/ai-drivers + $(ENV_INSTALL) apisix/plugins/ai-drivers/*.lua $(ENV_INST_LUADIR)/apisix/plugins/ai-drivers + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/ai-rag/embeddings + $(ENV_INSTALL) apisix/plugins/ai-rag/embeddings/*.lua $(ENV_INST_LUADIR)/apisix/plugins/ai-rag/embeddings + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/ai-rag/vector-search + $(ENV_INSTALL) apisix/plugins/ai-rag/vector-search/*.lua $(ENV_INST_LUADIR)/apisix/plugins/ai-rag/vector-search + + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/mcp/broker + $(ENV_INSTALL) -d $(ENV_INST_LUADIR)/apisix/plugins/mcp/transport + $(ENV_INSTALL) apisix/plugins/mcp/*.lua $(ENV_INST_LUADIR)/apisix/plugins/mcp + $(ENV_INSTALL) apisix/plugins/mcp/broker/*.lua $(ENV_INST_LUADIR)/apisix/plugins/mcp/broker + $(ENV_INSTALL) apisix/plugins/mcp/transport/*.lua $(ENV_INST_LUADIR)/apisix/plugins/mcp/transport + + $(ENV_INSTALL) bin/apisix $(ENV_INST_BINDIR)/apisix + + +### uninstall : Uninstall the apisix +.PHONY: uninstall +uninstall: + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_RM) -r /usr/local/apisix + $(ENV_RM) -r $(ENV_INST_LUADIR)/apisix + $(ENV_RM) $(ENV_INST_BINDIR)/apisix + @$(call func_echo_success_status, "$@ -> [ Done ]") + + +### test : Run the test case +.PHONY: test +test: runtime + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_GIT) submodule update --init --recursive + prove -I../test-nginx/lib -I./ -r -s t/ + @$(call func_echo_success_status, "$@ -> [ Done ]") + + +### license-check : Check project source code for Apache License +.PHONY: license-check +license-check: + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_DOCKER) run -it --rm -v $(CURDIR):/github/workspace apache/skywalking-eyes header check + @$(call func_echo_success_status, "$@ -> [ Done ]") + + +.PHONY: release-src +release-src: compress-tar + @$(call func_echo_status, "$@ -> [ Start ]") + gpg --batch --yes --armor --detach-sig $(project_release_name).tgz + shasum -a 512 $(project_release_name).tgz > $(project_release_name).tgz.sha512 + + $(call func_check_folder,release) + mv $(project_release_name).tgz release/$(project_release_name).tgz + mv $(project_release_name).tgz.asc release/$(project_release_name).tgz.asc + mv $(project_release_name).tgz.sha512 release/$(project_release_name).tgz.sha512 + ./utils/gen-vote-contents.sh $(VERSION) + @$(call func_echo_success_status, "$@ -> [ Done ]") + + +.PHONY: compress-tar +compress-tar: + # The $VERSION can be major.minor.patch (from developer) + # or major.minor (from the branch name in the CI) + $(ENV_TAR) -zcvf $(project_release_name).tgz \ + ./apisix \ + ./bin \ + ./conf \ + ./apisix-master-0.rockspec \ + LICENSE \ + Makefile \ + NOTICE \ + *.md + + +### container +### ci-env-up : CI env launch +.PHONY: ci-env-up +ci-env-up: + @$(call func_echo_status, "$@ -> [ Start ]") + touch $(OTEL_CONFIG) + chmod 777 $(OTEL_CONFIG) + $(ENV_DOCKER_COMPOSE) up -d + @$(call func_echo_success_status, "$@ -> [ Done ]") + + +### ci-env-ps : CI env ps +.PHONY: ci-env-ps +ci-env-ps: + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_DOCKER_COMPOSE) ps + @$(call func_echo_success_status, "$@ -> [ Done ]") + + +### ci-env-rebuild : CI env image rebuild +.PHONY: ci-env-rebuild +ci-env-rebuild: + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_DOCKER_COMPOSE) build + @$(call func_echo_success_status, "$@ -> [ Done ]") + + +### ci-env-down : CI env destroy +.PHONY: ci-env-down +ci-env-down: + @$(call func_echo_status, "$@ -> [ Start ]") + rm $(OTEL_CONFIG) + $(ENV_DOCKER_COMPOSE) down + @$(call func_echo_success_status, "$@ -> [ Done ]") + +### ci-env-stop : CI env temporary stop +.PHONY: ci-env-stop +ci-env-stop: + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_DOCKER_COMPOSE) stop + @$(call func_echo_success_status, "$@ -> [ Done ]") + +### build-on-debian-dev : Build apache/apisix:xx-debian-dev image +.PHONY: build-on-debian-dev +build-on-debian-dev: + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_DOCKER) build -t $(ENV_APISIX_IMAGE_TAG_NAME)-debian-dev \ + --build-arg TARGETARCH=$(ENV_OS_ARCH) \ + --build-arg CODE_PATH=. \ + --build-arg ENTRYPOINT_PATH=./docker/debian-dev/docker-entrypoint.sh \ + --build-arg INSTALL_BROTLI=./docker/debian-dev/install-brotli.sh \ + --build-arg CHECK_STANDALONE_CONFIG=./docker/utils/check_standalone_config.sh \ + -f ./docker/debian-dev/Dockerfile . + @$(call func_echo_success_status, "$@ -> [ Done ]") + +.PHONY: push-on-debian-dev +push-on-debian-dev: + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_DOCKER) tag $(ENV_APISIX_IMAGE_TAG_NAME)-debian-dev $(IMAGE_NAME):dev-$(ENV_OS_ARCH) + $(ENV_DOCKER) push $(IMAGE_NAME):dev-$(ENV_OS_ARCH) + @$(call func_echo_success_status, "$@ -> [ Done ]") + +### merge-dev-tags : Merge architecture-specific dev tags into a single dev tag +.PHONY: merge-dev-tags +merge-dev-tags: + @$(call func_echo_status, "$@ -> [ Start ]") + $(ENV_DOCKER) manifest create $(IMAGE_NAME):dev \ + $(IMAGE_NAME):dev-amd64 \ + $(IMAGE_NAME):dev-arm64 + $(ENV_DOCKER) manifest push $(IMAGE_NAME):dev + @$(call func_echo_success_status, "$@ -> [ Done ]") diff --git a/CloudronPackages/APISIX/apisix-source/NOTICE b/CloudronPackages/APISIX/apisix-source/NOTICE new file mode 100644 index 0000000..fdab115 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/NOTICE @@ -0,0 +1,5 @@ +Apache APISIX +Copyright 2019-2025 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). diff --git a/CloudronPackages/APISIX/apisix-source/README.md b/CloudronPackages/APISIX/apisix-source/README.md new file mode 100644 index 0000000..a61b52d --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/README.md @@ -0,0 +1,241 @@ + + +# Apache APISIX API Gateway | AI Gateway + +APISIX logo + +[![Build Status](https://github.com/apache/apisix/actions/workflows/build.yml/badge.svg?branch=master)](https://github.com/apache/apisix/actions/workflows/build.yml) +[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://github.com/apache/apisix/blob/master/LICENSE) +[![Commit activity](https://img.shields.io/github/commit-activity/m/apache/apisix)](https://github.com/apache/apisix/graphs/commit-activity) +[![Average time to resolve an issue](http://isitmaintained.com/badge/resolution/apache/apisix.svg)](http://isitmaintained.com/project/apache/apisix "Average time to resolve an issue") +[![Percentage of issues still open](http://isitmaintained.com/badge/open/apache/apisix.svg)](http://isitmaintained.com/project/apache/apisix "Percentage of issues still open") +[![Slack](https://badgen.net/badge/Slack/Join%20Apache%20APISIX?icon=slack)](https://apisix.apache.org/slack) + +**Apache APISIX** is a dynamic, real-time, high-performance API Gateway. + +APISIX API Gateway provides rich traffic management features such as load balancing, dynamic upstream, canary release, circuit breaking, authentication, observability, and more. + +APISIX can serve as an **[AI Gateway](https://apisix.apache.org/ai-gateway/)** through its flexible plugin system, providing AI proxying, load balancing for LLMs, retries and fallbacks, token-based rate limiting, and robust security to ensure the efficiency and reliability of AI agents. APISIX also provides the [`mcp-bridge`](https://apisix.apache.org/blog/2025/04/21/host-mcp-server-with-api-gateway/) plugin to seamlessly convert stdio-based MCP servers to scalable HTTP SSE services. + +You can use APISIX API Gateway to handle traditional north-south traffic, as well as east-west traffic between services. It can also be used as a [k8s ingress controller](https://github.com/apache/apisix-ingress-controller). + +The technical architecture of Apache APISIX: + +![Technical architecture of Apache APISIX](docs/assets/images/apisix.png) + +## Community + +- [Kindly Write a Review](https://www.g2.com/products/apache-apisix/reviews) for APISIX in G2. +- Mailing List: Mail to dev-subscribe@apisix.apache.org, follow the reply to subscribe to the mailing list. +- Slack Workspace - [invitation link](https://apisix.apache.org/slack) (Please open an [issue](https://apisix.apache.org/docs/general/submit-issue) if this link is expired), and then join the #apisix channel (Channels -> Browse channels -> search for "apisix"). +- ![Twitter Follow](https://img.shields.io/twitter/follow/ApacheAPISIX?style=social) - follow and interact with us using hashtag `#ApacheAPISIX` +- [Documentation](https://apisix.apache.org/docs/) +- [Discussions](https://github.com/apache/apisix/discussions) +- [Blog](https://apisix.apache.org/blog) + +## Features + +You can use APISIX API Gateway as a traffic entrance to process all business data, including dynamic routing, dynamic upstream, dynamic certificates, +A/B testing, canary release, blue-green deployment, limit rate, defense against malicious attacks, metrics, monitoring alarms, service observability, service governance, etc. + +- **All platforms** + + - Cloud-Native: Platform agnostic, No vendor lock-in, APISIX API Gateway can run from bare-metal to Kubernetes. + - Supports ARM64: Don't worry about the lock-in of the infra technology. + +- **Multi protocols** + + - [TCP/UDP Proxy](docs/en/latest/stream-proxy.md): Dynamic TCP/UDP proxy. + - [Dubbo Proxy](docs/en/latest/plugins/dubbo-proxy.md): Dynamic HTTP to Dubbo proxy. + - [Dynamic MQTT Proxy](docs/en/latest/plugins/mqtt-proxy.md): Supports to load balance MQTT by `client_id`, both support MQTT [3.1.\*](http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html), [5.0](https://docs.oasis-open.org/mqtt/mqtt/v5.0/mqtt-v5.0.html). + - [gRPC proxy](docs/en/latest/grpc-proxy.md): Proxying gRPC traffic. + - [gRPC Web Proxy](docs/en/latest/plugins/grpc-web.md): Proxying gRPC Web traffic to gRPC Service. + - [gRPC transcoding](docs/en/latest/plugins/grpc-transcode.md): Supports protocol transcoding so that clients can access your gRPC API by using HTTP/JSON. + - Proxy Websocket + - Proxy Protocol + - HTTP(S) Forward Proxy + - [SSL](docs/en/latest/certificate.md): Dynamically load an SSL certificate + - [HTTP/3 with QUIC](docs/en/latest/http3.md) + +- **Full Dynamic** + + - [Hot Updates And Hot Plugins](docs/en/latest/terminology/plugin.md): Continuously updates its configurations and plugins without restarts! + - [Proxy Rewrite](docs/en/latest/plugins/proxy-rewrite.md): Support rewrite the `host`, `uri`, `schema`, `method`, `headers` of the request before send to upstream. + - [Response Rewrite](docs/en/latest/plugins/response-rewrite.md): Set customized response status code, body and header to the client. + - Dynamic Load Balancing: Round-robin load balancing with weight. + - Hash-based Load Balancing: Load balance with consistent hashing sessions. + - [Health Checks](docs/en/latest/tutorials/health-check.md): Enable health check on the upstream node and will automatically filter unhealthy nodes during load balancing to ensure system stability. + - Circuit-Breaker: Intelligent tracking of unhealthy upstream services. + - [Proxy Mirror](docs/en/latest/plugins/proxy-mirror.md): Provides the ability to mirror client requests. + - [Traffic Split](docs/en/latest/plugins/traffic-split.md): Allows users to incrementally direct percentages of traffic between various upstreams. + +- **Fine-grained routing** + + - [Supports full path matching and prefix matching](docs/en/latest/router-radixtree.md#how-to-use-libradixtree-in-apisix) + - [Support all Nginx built-in variables as conditions for routing](docs/en/latest/router-radixtree.md#how-to-filter-route-by-nginx-builtin-variable), so you can use `cookie`, `args`, etc. as routing conditions to implement canary release, A/B testing, etc. + - Support [various operators as judgment conditions for routing](https://github.com/iresty/lua-resty-radixtree#operator-list), for example `{"arg_age", ">", 24}` + - Support [custom route matching function](https://github.com/iresty/lua-resty-radixtree/blob/master/t/filter-fun.t#L10) + - IPv6: Use IPv6 to match the route. + - Support [TTL](docs/en/latest/admin-api.md#route) + - [Support priority](docs/en/latest/router-radixtree.md#3-match-priority) + - [Support Batch Http Requests](docs/en/latest/plugins/batch-requests.md) + - [Support filtering route by GraphQL attributes](docs/en/latest/router-radixtree.md#how-to-filter-route-by-graphql-attributes) + +- **Security** + + - Rich authentication & authorization support: + * [key-auth](docs/en/latest/plugins/key-auth.md) + * [JWT](docs/en/latest/plugins/jwt-auth.md) + * [basic-auth](docs/en/latest/plugins/basic-auth.md) + * [wolf-rbac](docs/en/latest/plugins/wolf-rbac.md) + * [casbin](docs/en/latest/plugins/authz-casbin.md) + * [keycloak](docs/en/latest/plugins/authz-keycloak.md) + * [casdoor](docs/en/latest/plugins/authz-casdoor.md) + - [IP Whitelist/Blacklist](docs/en/latest/plugins/ip-restriction.md) + - [Referer Whitelist/Blacklist](docs/en/latest/plugins/referer-restriction.md) + - [IdP](docs/en/latest/plugins/openid-connect.md): Support external Identity platforms, such as Auth0, okta, etc.. + - [Limit-req](docs/en/latest/plugins/limit-req.md) + - [Limit-count](docs/en/latest/plugins/limit-count.md) + - [Limit-concurrency](docs/en/latest/plugins/limit-conn.md) + - Anti-ReDoS(Regular expression Denial of Service): Built-in policies to Anti ReDoS without configuration. + - [CORS](docs/en/latest/plugins/cors.md) Enable CORS(Cross-origin resource sharing) for your API. + - [URI Blocker](docs/en/latest/plugins/uri-blocker.md): Block client request by URI. + - [Request Validator](docs/en/latest/plugins/request-validation.md) + - [CSRF](docs/en/latest/plugins/csrf.md) Based on the [`Double Submit Cookie`](https://en.wikipedia.org/wiki/Cross-site_request_forgery#Double_Submit_Cookie) way, protect your API from CSRF attacks. + +- **OPS friendly** + + - Zipkin tracing: [Zipkin](docs/en/latest/plugins/zipkin.md) + - Open source APM: support [Apache SkyWalking](docs/en/latest/plugins/skywalking.md) + - Works with external service discovery: In addition to the built-in etcd, it also supports [Consul](docs/en/latest/discovery/consul.md), [Consul_kv](docs/en/latest/discovery/consul_kv.md), [Nacos](docs/en/latest/discovery/nacos.md), [Eureka](docs/en/latest/discovery/eureka.md) and [Zookeeper (CP)](https://github.com/api7/apisix-seed/blob/main/docs/en/latest/zookeeper.md). + - Monitoring And Metrics: [Prometheus](docs/en/latest/plugins/prometheus.md) + - Clustering: APISIX nodes are stateless, creates clustering of the configuration center, please refer to [etcd Clustering Guide](https://etcd.io/docs/v3.5/op-guide/clustering/). + - High availability: Support to configure multiple etcd addresses in the same cluster. + - [Dashboard](https://github.com/apache/apisix-dashboard) + - Version Control: Supports rollbacks of operations. + - CLI: start\stop\reload APISIX through the command line. + - [Standalone](docs/en/latest/deployment-modes.md#standalone): Supports to load route rules from local YAML file, it is more friendly such as under the kubernetes(k8s). + - [Global Rule](docs/en/latest/terminology/global-rule.md): Allows to run any plugin for all request, eg: limit rate, IP filter etc. + - High performance: The single-core QPS reaches 18k with an average delay of fewer than 0.2 milliseconds. + - [Fault Injection](docs/en/latest/plugins/fault-injection.md) + - [REST Admin API](docs/en/latest/admin-api.md): Using the REST Admin API to control Apache APISIX, which only allows 127.0.0.1 access by default, you can modify the `allow_admin` field in `conf/config.yaml` to specify a list of IPs that are allowed to call the Admin API. Also, note that the Admin API uses key auth to verify the identity of the caller. + - External Loggers: Export access logs to external log management tools. ([HTTP Logger](docs/en/latest/plugins/http-logger.md), [TCP Logger](docs/en/latest/plugins/tcp-logger.md), [Kafka Logger](docs/en/latest/plugins/kafka-logger.md), [UDP Logger](docs/en/latest/plugins/udp-logger.md), [RocketMQ Logger](docs/en/latest/plugins/rocketmq-logger.md), [SkyWalking Logger](docs/en/latest/plugins/skywalking-logger.md), [Alibaba Cloud Logging(SLS)](docs/en/latest/plugins/sls-logger.md), [Google Cloud Logging](docs/en/latest/plugins/google-cloud-logging.md), [Splunk HEC Logging](docs/en/latest/plugins/splunk-hec-logging.md), [File Logger](docs/en/latest/plugins/file-logger.md), [SolarWinds Loggly Logging](docs/en/latest/plugins/loggly.md), [TencentCloud CLS](docs/en/latest/plugins/tencent-cloud-cls.md)). + - [ClickHouse](docs/en/latest/plugins/clickhouse-logger.md): push logs to ClickHouse. + - [Elasticsearch](docs/en/latest/plugins/elasticsearch-logger.md): push logs to Elasticsearch. + - [Datadog](docs/en/latest/plugins/datadog.md): push custom metrics to the DogStatsD server, comes bundled with [Datadog agent](https://docs.datadoghq.com/agent/), over the UDP protocol. DogStatsD basically is an implementation of StatsD protocol which collects the custom metrics for Apache APISIX agent, aggregates it into a single data point and sends it to the configured Datadog server. + - [Helm charts](https://github.com/apache/apisix-helm-chart) + - [HashiCorp Vault](https://www.vaultproject.io/): Support secret management solution for accessing secrets from Vault secure storage backed in a low trust environment. Currently, RS256 keys (public-private key pairs) or secret keys can be linked from vault in jwt-auth authentication plugin using [APISIX Secret](docs/en/latest/terminology/secret.md) resource. + +- **Highly scalable** + - [Custom plugins](docs/en/latest/plugin-develop.md): Allows hooking of common phases, such as `rewrite`, `access`, `header filter`, `body filter` and `log`, also allows to hook the `balancer` stage. + - [Plugin can be written in Java/Go/Python](docs/en/latest/external-plugin.md) + - [Plugin can be written with Proxy Wasm SDK](docs/en/latest/wasm.md) + - Custom load balancing algorithms: You can use custom load balancing algorithms during the `balancer` phase. + - Custom routing: Support users to implement routing algorithms themselves. + +- **Multi-Language support** + - Apache APISIX is a multi-language gateway for plugin development and provides support via `RPC` and `Wasm`. + ![Multi Language Support into Apache APISIX](docs/assets/images/external-plugin.png) + - The RPC way, is the current way. Developers can choose the language according to their needs and after starting an independent process with the RPC, it exchanges data with APISIX through local RPC communication. Till this moment, APISIX has support for [Java](https://github.com/apache/apisix-java-plugin-runner), [Golang](https://github.com/apache/apisix-go-plugin-runner), [Python](https://github.com/apache/apisix-python-plugin-runner) and Node.js. + - The Wasm or WebAssembly, is an experimental way. APISIX can load and run Wasm bytecode via APISIX [wasm plugin](https://github.com/apache/apisix/blob/master/docs/en/latest/wasm.md) written with the [Proxy Wasm SDK](https://github.com/proxy-wasm/spec#sdks). Developers only need to write the code according to the SDK and then compile it into a Wasm bytecode that runs on Wasm VM with APISIX. + +- **Serverless** + - [Lua functions](docs/en/latest/plugins/serverless.md): Invoke functions in each phase in APISIX. + - [AWS Lambda](docs/en/latest/plugins/aws-lambda.md): Integration with AWS Lambda function as a dynamic upstream to proxy all requests for a particular URI to the AWS API gateway endpoint. Supports authorization via api key and AWS IAM access secret. + - [Azure Functions](docs/en/latest/plugins/azure-functions.md): Seamless integration with Azure Serverless Function as a dynamic upstream to proxy all requests for a particular URI to the Microsoft Azure cloud. + - [Apache OpenWhisk](docs/en/latest/plugins/openwhisk.md): Seamless integration with Apache OpenWhisk as a dynamic upstream to proxy all requests for a particular URI to your own OpenWhisk cluster. + +## Get Started + +1. Installation + + Please refer to [install documentation](https://apisix.apache.org/docs/apisix/installation-guide/). + +2. Getting started + + The getting started guide is a great way to learn the basics of APISIX. Just follow the steps in [Getting Started](https://apisix.apache.org/docs/apisix/getting-started/). + + Further, you can follow the documentation to try more [plugins](docs/en/latest/plugins). + +3. Admin API + + Apache APISIX provides [REST Admin API](docs/en/latest/admin-api.md) to dynamically control the Apache APISIX cluster. + +4. Plugin development + + You can refer to [plugin development guide](docs/en/latest/plugin-develop.md), and sample plugin `example-plugin`'s code implementation. + Reading [plugin concept](docs/en/latest/terminology/plugin.md) would help you learn more about the plugin. + +For more documents, please refer to [Apache APISIX Documentation site](https://apisix.apache.org/docs/apisix/getting-started/) + +## Benchmark + +Using AWS's eight-core server, APISIX's QPS reaches 140,000 with a latency of only 0.2 ms. + +[Benchmark script](benchmark/run.sh) has been open sourced, welcome to try and contribute. + +[APISIX also works perfectly in AWS graviton3 C7g.](https://apisix.apache.org/blog/2022/06/07/installation-performance-test-of-apigateway-apisix-on-aws-graviton3) + +## User Stories + +- [European eFactory Platform: API Security Gateway – Using APISIX in the eFactory Platform](https://www.efactory-project.eu/post/api-security-gateway-using-apisix-in-the-efactory-platform) +- [Copernicus Reference System Software](https://github.com/COPRS/infrastructure/wiki/Networking-trade-off) +- [More Stories](https://apisix.apache.org/blog/tags/case-studies/) + +## Who Uses APISIX API Gateway? + +A wide variety of companies and organizations use APISIX API Gateway for research, production and commercial product, below are some of them: + +- Airwallex +- Bilibili +- CVTE +- European eFactory Platform +- European Copernicus Reference System +- Geely +- HONOR +- Horizon Robotics +- iQIYI +- Lenovo +- NASA JPL +- Nayuki +- OPPO +- QingCloud +- Swisscom +- Tencent Game +- Travelsky +- vivo +- Sina Weibo +- WeCity +- WPS +- XPENG +- Zoom + +## Logos + +- [Apache APISIX logo(PNG)](https://github.com/apache/apisix/tree/master/logos/apache-apisix.png) +- [Apache APISIX logo source](https://apache.org/logos/#apisix) + +## Acknowledgments + +Inspired by Kong and Orange. + +## License + +[Apache 2.0 License](https://github.com/apache/apisix/tree/master/LICENSE) diff --git a/CloudronPackages/APISIX/apisix-source/THREAT_MODEL.md b/CloudronPackages/APISIX/apisix-source/THREAT_MODEL.md new file mode 100644 index 0000000..c10560c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/THREAT_MODEL.md @@ -0,0 +1,60 @@ + + +## Threat Model + +Here is the threat model of Apache APISIX, which is relative to our developers and operators. + +### Where the system might be attacked + +As a proxy, Apache APISIX needs to be able to run in front of untrusted downstream traffic. + +However, some features need to assume the downstream traffic is trusted. They should be either +not exposed to the internet by default (for example, listening to 127.0.0.1), or disclaim in +the doc explicitly. + +As Apache APISIX is evolving rapidly, some newly added features may not be strong enough to defend against potential attacks. +Therefore, we need to divide the features into two groups: premature and mature ones. +Features that are just merged in half a year or are declared as experimental are premature. +Premature features are not fully tested on the battlefield and are not covered by the security policy normally. + +Additionally, we require the components below are trustable: + +1. the upstream +2. the configuration +3. the way we relay the configuration +4. the 3rd party components involved in the Apache APISIX, for example, the authorization server + +### How can we reduce the likelihood or impact of a potential threat + +As the user: +First of all, don't expose the components which are required to be trustable to the internet, including the control plane (Dashboard or something else) and the configuration relay mechanism (etcd or etcd adapter or something else). + +Then, harden the trusted components. For example, + +1. if possible, enable authentication or use https for the etcd +2. read the doc and disable plugins that are not needed, so that we can reduce the attack vector +3. restrict and audit the change of configuration + +As the developer: +We should keep security in mind, and validate the input from the client before use. + +As the maintainer: +We should keep security in mind, and review the code line by line. +We are open to discussion from the security researchers. diff --git a/CloudronPackages/APISIX/apisix-source/Vision-and-Milestones.md b/CloudronPackages/APISIX/apisix-source/Vision-and-Milestones.md new file mode 100644 index 0000000..333d991 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/Vision-and-Milestones.md @@ -0,0 +1,40 @@ + + +### Vision + +Apache APISIX is an open source API gateway designed to help developers connect any APIs securely and efficiently in any environment. + +Managing thousands or tens of thousands of APIs and microservices in a multi-cloud and hybrid cloud environment is not an easy task. +There will be many challenges as authentication, observability, security, etc. + +Apache APISIX, a community-driven project, hopes to help everyone better manage and use APIs through the power of developers. +Every developer's contribution will used by thousands of companies and served by billions of users. + +### Milestones + +Apache APISIX has relatively complete features for north-south traffic, +and will be iterated around the following directions in the next 6 months (if you have any ideas, feel free to create issue to discuss): + +- More complete support for Gateway API on APISIX ingress controller +- Add support for service mesh +- User-friendly documentation +- More plugins for public cloud and SaaS services +- Java/Go plugins and Wasm production-ready +- Add dynamic debugging tools for Apache APISIX diff --git a/CloudronPackages/APISIX/apisix-source/apisix-master-0.rockspec b/CloudronPackages/APISIX/apisix-source/apisix-master-0.rockspec new file mode 100644 index 0000000..82ca9d8 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix-master-0.rockspec @@ -0,0 +1,108 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +package = "apisix" +version = "master-0" +supported_platforms = {"linux"} + +source = { + url = "git://github.com/apache/apisix", + branch = "master", +} + +description = { + summary = "Apache APISIX is a cloud-native microservices API gateway, delivering the ultimate performance, security, open source and scalable platform for all your APIs and microservices.", + homepage = "https://github.com/apache/apisix", + license = "Apache License 2.0", +} + +dependencies = { + "lua-resty-ctxdump = 0.1-0", + "lyaml = 6.2.8", + "api7-lua-resty-dns-client = 7.0.1", + "lua-resty-template = 2.0", + "lua-resty-etcd = 1.10.5", + "api7-lua-resty-http = 0.2.2-0", + "lua-resty-balancer = 0.04", + "lua-resty-ngxvar = 0.5.2", + "lua-resty-jit-uuid = 0.0.7", + "lua-resty-worker-events = 1.0.0", + "lua-resty-healthcheck-api7 = 3.2.0", + "api7-lua-resty-jwt = 0.2.5", + "lua-resty-hmac-ffi = 0.06-1", + "lua-resty-cookie = 0.2.0-1", + "lua-resty-session = 3.10", + "opentracing-openresty = 0.1", + "lua-resty-radixtree = 2.9.2", + "lua-protobuf = 0.5.2-1", + "lua-resty-openidc = 1.7.6-3", + "luafilesystem = 1.7.0-2", + "nginx-lua-prometheus-api7 = 0.20240201-1", + "jsonschema = 0.9.8", + "lua-resty-ipmatcher = 0.6.1", + "lua-resty-kafka = 0.23-0", + "lua-resty-logger-socket = 2.0.1-0", + "skywalking-nginx-lua = 1.0.1", + "base64 = 1.5-2", + "binaryheap = 0.4", + "api7-dkjson = 0.1.1", + "resty-redis-cluster = 1.05-1", + "lua-resty-expr = 1.3.2", + "graphql = 0.0.2", + "argparse = 0.7.1-1", + "luasocket = 3.1.0-1", + "luasec = 1.3.2-1", + "lua-resty-consul = 0.3-2", + "penlight = 1.13.1", + "ext-plugin-proto = 0.6.1", + "casbin = 1.41.9-1", + "inspect == 3.1.1", + "lua-resty-rocketmq = 0.3.0-0", + "opentelemetry-lua = 0.2-3", + "net-url = 0.9-1", + "xml2lua = 1.5-2", + "nanoid = 0.1-1", + "lua-resty-mediador = 0.1.2-1", + "lua-resty-ldap = 0.1.0-0", + "lua-resty-t1k = 1.1.5", + "brotli-ffi = 0.3-1", + "lua-ffi-zlib = 0.6-0", + "jsonpath = 1.0-1", + "api7-lua-resty-aws == 2.0.2-1", + "multipart = 0.5.9-1", +} + +build = { + type = "make", + build_variables = { + CFLAGS="$(CFLAGS)", + LIBFLAG="$(LIBFLAG)", + LUA_LIBDIR="$(LUA_LIBDIR)", + LUA_BINDIR="$(LUA_BINDIR)", + LUA_INCDIR="$(LUA_INCDIR)", + LUA="$(LUA)", + OPENSSL_INCDIR="$(OPENSSL_INCDIR)", + OPENSSL_LIBDIR="$(OPENSSL_LIBDIR)", + }, + install_variables = { + ENV_INST_PREFIX="$(PREFIX)", + ENV_INST_BINDIR="$(BINDIR)", + ENV_INST_LIBDIR="$(LIBDIR)", + ENV_INST_LUADIR="$(LUADIR)", + ENV_INST_CONFDIR="$(CONFDIR)", + }, +} diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/consumer_group.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/consumer_group.lua new file mode 100644 index 0000000..9aed58f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/consumer_group.lua @@ -0,0 +1,66 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local consumers = require("apisix.consumer").consumers +local resource = require("apisix.admin.resource") +local schema_plugin = require("apisix.admin.plugins").check_schema +local type = type +local tostring = tostring +local ipairs = ipairs + + +local function check_conf(id, conf, need_id, schema) + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, {error_msg = "invalid configuration: " .. err} + end + + local ok, err = schema_plugin(conf.plugins) + if not ok then + return nil, {error_msg = err} + end + + return true +end + + +local function delete_checker(id) + local consumers, consumers_ver = consumers() + if consumers_ver and consumers then + for _, consumer in ipairs(consumers) do + if type(consumer) == "table" and consumer.value + and consumer.value.group_id + and tostring(consumer.value.group_id) == id then + return 400, {error_msg = "can not delete this consumer group," + .. " consumer [" .. consumer.value.id + .. "] is still using it now"} + end + end + end + + return nil, nil +end + + +return resource.new({ + name = "consumer_groups", + kind = "consumer group", + schema = core.schema.consumer_group, + checker = check_conf, + unsupported_methods = {"post"}, + delete_checker = delete_checker +}) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/consumers.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/consumers.lua new file mode 100644 index 0000000..e027890 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/consumers.lua @@ -0,0 +1,65 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local plugins = require("apisix.admin.plugins") +local resource = require("apisix.admin.resource") + + +local function check_conf(username, conf, need_username, schema) + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, {error_msg = "invalid configuration: " .. err} + end + + if username and username ~= conf.username then + return nil, {error_msg = "wrong username" } + end + + if conf.plugins then + ok, err = plugins.check_schema(conf.plugins, core.schema.TYPE_CONSUMER) + if not ok then + return nil, {error_msg = "invalid plugins configuration: " .. err} + end + end + + if conf.group_id then + local key = "/consumer_groups/" .. conf.group_id + local res, err = core.etcd.get(key) + if not res then + return nil, {error_msg = "failed to fetch consumer group info by " + .. "consumer group id [" .. conf.group_id .. "]: " + .. err} + end + + if res.status ~= 200 then + return nil, {error_msg = "failed to fetch consumer group info by " + .. "consumer group id [" .. conf.group_id .. "], " + .. "response code: " .. res.status} + end + end + + return conf.username +end + + +return resource.new({ + name = "consumers", + kind = "consumer", + schema = core.schema.consumer, + checker = check_conf, + unsupported_methods = {"post", "patch"} +}) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/credentials.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/credentials.lua new file mode 100644 index 0000000..3622867 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/credentials.lua @@ -0,0 +1,74 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local plugins = require("apisix.admin.plugins") +local plugin = require("apisix.plugin") +local resource = require("apisix.admin.resource") +local pairs = pairs + +local function check_conf(_id, conf, _need_id, schema) + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, {error_msg = "invalid configuration: " .. err} + end + + if conf.plugins then + ok, err = plugins.check_schema(conf.plugins, core.schema.TYPE_CONSUMER) + if not ok then + return nil, {error_msg = "invalid plugins configuration: " .. err} + end + + for name, _ in pairs(conf.plugins) do + local plugin_obj = plugin.get(name) + if not plugin_obj then + return nil, {error_msg = "unknown plugin " .. name} + end + if plugin_obj.type ~= "auth" then + return nil, {error_msg = "only supports auth type plugins in consumer credential"} + end + end + end + + return true, nil +end + +-- get_credential_etcd_key is used to splice the credential's etcd key (without prefix) +-- from credential_id and sub_path. +-- Parameter credential_id is from the uri or payload; sub_path is in the form of +-- {consumer_name}/credentials or {consumer_name}/credentials/{credential_id}. +-- Only if GET credentials list, credential_id is nil, sub_path is like {consumer_name}/credentials, +-- so return value is /consumers/{consumer_name}/credentials. +-- In the other methods, credential_id is not nil, return value is +-- /consumers/{consumer_name}/credentials/{credential_id}. +local function get_credential_etcd_key(credential_id, _conf, sub_path, _args) + if credential_id then + local uri_segs = core.utils.split_uri(sub_path) + local consumer_name = uri_segs[1] + return "/consumers/" .. consumer_name .. "/credentials/" .. credential_id + end + + return "/consumers/" .. sub_path +end + +return resource.new({ + name = "credentials", + kind = "credential", + schema = core.schema.credential, + checker = check_conf, + get_resource_etcd_key = get_credential_etcd_key, + unsupported_methods = {"post", "patch"} +}) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/global_rules.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/global_rules.lua new file mode 100644 index 0000000..81db850 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/global_rules.lua @@ -0,0 +1,43 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local resource = require("apisix.admin.resource") +local schema_plugin = require("apisix.admin.plugins").check_schema + + +local function check_conf(id, conf, need_id, schema) + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, {error_msg = "invalid configuration: " .. err} + end + + local ok, err = schema_plugin(conf.plugins) + if not ok then + return nil, {error_msg = err} + end + + return true +end + + +return resource.new({ + name = "global_rules", + kind = "global rule", + schema = core.schema.global_rule, + checker = check_conf, + unsupported_methods = {"post"} +}) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/init.lua new file mode 100644 index 0000000..7bcb806 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/init.lua @@ -0,0 +1,526 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local get_uri_args = ngx.req.get_uri_args +local route = require("apisix.utils.router") +local plugin = require("apisix.plugin") +local standalone = require("apisix.admin.standalone") +local v3_adapter = require("apisix.admin.v3_adapter") +local utils = require("apisix.admin.utils") +local ngx = ngx +local get_method = ngx.req.get_method +local ngx_time = ngx.time +local ngx_timer_at = ngx.timer.at +local ngx_worker_id = ngx.worker.id +local tonumber = tonumber +local tostring = tostring +local str_lower = string.lower +local reload_event = "/apisix/admin/plugins/reload" +local ipairs = ipairs +local error = error +local type = type + + +local events +local MAX_REQ_BODY = 1024 * 1024 * 1.5 -- 1.5 MiB + + +local viewer_methods = { + get = true, +} + + +local resources = { + routes = require("apisix.admin.routes"), + services = require("apisix.admin.services"), + upstreams = require("apisix.admin.upstreams"), + consumers = require("apisix.admin.consumers"), + credentials = require("apisix.admin.credentials"), + schema = require("apisix.admin.schema"), + ssls = require("apisix.admin.ssl"), + plugins = require("apisix.admin.plugins"), + protos = require("apisix.admin.proto"), + global_rules = require("apisix.admin.global_rules"), + stream_routes = require("apisix.admin.stream_routes"), + plugin_metadata = require("apisix.admin.plugin_metadata"), + plugin_configs = require("apisix.admin.plugin_config"), + consumer_groups = require("apisix.admin.consumer_group"), + secrets = require("apisix.admin.secrets"), +} + + +local _M = {version = 0.4} +local router + + +local function check_token(ctx) + local local_conf = core.config.local_conf() + + -- check if admin_key is required + if local_conf.deployment.admin.admin_key_required == false then + return true + end + + local admin_key = core.table.try_read_attr(local_conf, "deployment", "admin", "admin_key") + if not admin_key then + return true + end + + local req_token = ctx.var.arg_api_key or ctx.var.http_x_api_key + or ctx.var.cookie_x_api_key + if not req_token then + return false, "missing apikey" + end + + local admin + for i, row in ipairs(admin_key) do + if req_token == row.key then + admin = row + break + end + end + + if not admin then + return false, "wrong apikey" + end + + if admin.role == "viewer" and + not viewer_methods[str_lower(get_method())] then + return false, "invalid method for role viewer" + end + + return true +end + +-- Set the `apictx` variable and check admin api token, if the check fails, the current +-- request will be interrupted and an error response will be returned. +-- +-- NOTE: This is a higher wrapper for `check_token` function. +local function set_ctx_and_check_token() + local api_ctx = {} + core.ctx.set_vars_meta(api_ctx) + ngx.ctx.api_ctx = api_ctx + + local ok, err = check_token(api_ctx) + if not ok then + core.log.warn("failed to check token: ", err) + core.response.exit(401, { error_msg = "failed to check token", description = err }) + end +end + + +local function strip_etcd_resp(data) + if type(data) == "table" + and data.header ~= nil + and data.header.revision ~= nil + and data.header.raft_term ~= nil + then + -- strip etcd data + data.header = nil + data.responses = nil + data.succeeded = nil + + if data.node then + data.node.createdIndex = nil + data.node.modifiedIndex = nil + end + + data.count = nil + data.more = nil + data.prev_kvs = nil + + if data.deleted then + -- We used to treat the type incorrectly. But for compatibility we follow + -- the existing type. + data.deleted = tostring(data.deleted) + end + end + + return data +end + + +local function head() + core.response.exit(200) +end + + +local function run() + set_ctx_and_check_token() + + local uri_segs = core.utils.split_uri(ngx.var.uri) + core.log.info("uri: ", core.json.delay_encode(uri_segs)) + + -- /apisix/admin/schema/route + local seg_res, seg_id = uri_segs[4], uri_segs[5] + local seg_sub_path = core.table.concat(uri_segs, "/", 6) + if seg_res == "schema" and seg_id == "plugins" then + -- /apisix/admin/schema/plugins/limit-count + seg_res, seg_id = uri_segs[5], uri_segs[6] + seg_sub_path = core.table.concat(uri_segs, "/", 7) + end + + if seg_res == "stream_routes" then + local local_conf = core.config.local_conf() + if local_conf.apisix.proxy_mode ~= "stream" and + local_conf.apisix.proxy_mode ~= "http&stream" then + core.log.warn("stream mode is disabled, can not add any stream ", + "routes") + core.response.exit(400, {error_msg = "stream mode is disabled, " .. + "can not add stream routes"}) + end + end + + if seg_res == "consumers" and #uri_segs >= 6 and uri_segs[6] == "credentials" then + seg_sub_path = seg_id .. "/" .. seg_sub_path + seg_res = uri_segs[6] + seg_id = uri_segs[7] + end + + local resource = resources[seg_res] + if not resource then + core.response.exit(404, {error_msg = "Unsupported resource type: ".. seg_res}) + end + + local method = str_lower(get_method()) + if not resource[method] then + core.response.exit(404, {error_msg = "not found"}) + end + + local req_body, err = core.request.get_body(MAX_REQ_BODY) + if err then + core.log.error("failed to read request body: ", err) + core.response.exit(400, {error_msg = "invalid request body: " .. err}) + end + + if req_body then + local data, err = core.json.decode(req_body) + if err then + core.log.error("invalid request body: ", req_body, " err: ", err) + core.response.exit(400, {error_msg = "invalid request body: " .. err, + req_body = req_body}) + end + + req_body = data + end + + local uri_args = ngx.req.get_uri_args() or {} + if uri_args.ttl then + if not tonumber(uri_args.ttl) then + core.response.exit(400, {error_msg = "invalid argument ttl: " + .. "should be a number"}) + end + end + + local code, data + if seg_res == "schema" or seg_res == "plugins" then + code, data = resource[method](seg_id, req_body, seg_sub_path, uri_args) + else + code, data = resource[method](resource, seg_id, req_body, seg_sub_path, uri_args) + end + + if code then + if method == "get" and plugin.enable_data_encryption then + if seg_res == "consumers" or seg_res == "credentials" then + utils.decrypt_params(plugin.decrypt_conf, data, core.schema.TYPE_CONSUMER) + elseif seg_res == "plugin_metadata" then + utils.decrypt_params(plugin.decrypt_conf, data, core.schema.TYPE_METADATA) + else + utils.decrypt_params(plugin.decrypt_conf, data) + end + end + + if v3_adapter.enable_v3() then + core.response.set_header("X-API-VERSION", "v3") + else + core.response.set_header("X-API-VERSION", "v2") + end + + data = v3_adapter.filter(data, resource) + data = strip_etcd_resp(data) + + core.response.exit(code, data) + end +end + + +local function get_plugins_list() + set_ctx_and_check_token() + local args = get_uri_args() + local subsystem = args["subsystem"] + -- If subsystem is passed then it should be either http or stream. + -- If it is not passed/nil then http will be default. + subsystem = subsystem or "http" + if subsystem == "http" or subsystem == "stream" then + local plugins = resources.plugins.get_plugins_list(subsystem) + core.response.exit(200, plugins) + end + core.response.exit(400,"invalid subsystem passed") +end + +-- Handle unsupported request methods for the virtual "reload" plugin +local function unsupported_methods_reload_plugin() + set_ctx_and_check_token() + + core.response.exit(405, { + error_msg = "please use PUT method to reload the plugins, " + .. get_method() .. " method is not allowed." + }) +end + + +local function post_reload_plugins() + set_ctx_and_check_token() + + local success, err = events:post(reload_event, get_method(), ngx_time()) + if not success then + core.response.exit(503, err) + end + + core.response.exit(200, "done") +end + + +local function plugins_eq(old, new) + local old_set = {} + for _, p in ipairs(old) do + old_set[p.name] = p + end + + local new_set = {} + for _, p in ipairs(new) do + new_set[p.name] = p + end + + return core.table.set_eq(old_set, new_set) +end + + +local function sync_local_conf_to_etcd(reset) + local local_conf = core.config.local_conf() + + local plugins = {} + for _, name in ipairs(local_conf.plugins) do + core.table.insert(plugins, { + name = name, + }) + end + + for _, name in ipairs(local_conf.stream_plugins) do + core.table.insert(plugins, { + name = name, + stream = true, + }) + end + + if reset then + local res, err = core.etcd.get("/plugins") + if not res then + core.log.error("failed to get current plugins: ", err) + return + end + + if res.status == 404 then + -- nothing need to be reset + return + end + + if res.status ~= 200 then + core.log.error("failed to get current plugins, status: ", res.status) + return + end + + local stored_plugins = res.body.node.value + local revision = res.body.node.modifiedIndex + if plugins_eq(stored_plugins, plugins) then + core.log.info("plugins not changed, don't need to reset") + return + end + + core.log.warn("sync local conf to etcd") + + local res, err = core.etcd.atomic_set("/plugins", plugins, nil, revision) + if not res then + core.log.error("failed to set plugins: ", err) + end + + return + end + + core.log.warn("sync local conf to etcd") + + -- need to store all plugins name into one key so that it can be updated atomically + local res, err = core.etcd.set("/plugins", plugins) + if not res then + core.log.error("failed to set plugins: ", err) + end +end + + +local function reload_plugins(data, event, source, pid) + core.log.info("start to hot reload plugins") + plugin.load() + + if ngx_worker_id() == 0 then + sync_local_conf_to_etcd() + end +end + + +local function schema_validate() + local uri_segs = core.utils.split_uri(ngx.var.uri) + core.log.info("uri: ", core.json.delay_encode(uri_segs)) + + local seg_res = uri_segs[6] + local resource = resources[seg_res] + if not resource then + core.response.exit(404, {error_msg = "Unsupported resource type: ".. seg_res}) + end + + local req_body, err = core.request.get_body(MAX_REQ_BODY) + if err then + core.log.error("failed to read request body: ", err) + core.response.exit(400, {error_msg = "invalid request body: " .. err}) + end + + if req_body then + local data, err = core.json.decode(req_body) + if err then + core.log.error("invalid request body: ", req_body, " err: ", err) + core.response.exit(400, {error_msg = "invalid request body: " .. err, + req_body = req_body}) + end + + req_body = data + end + + local ok, err = core.schema.check(resource.schema, req_body) + if ok then + core.response.exit(200) + end + core.response.exit(400, {error_msg = err}) +end + + +local function standalone_run() + set_ctx_and_check_token() + return standalone.run() +end + + +local http_head_route = { + paths = [[/apisix/admin]], + methods = {"HEAD"}, + handler = head, +} + + +local uri_route = { + http_head_route, + { + paths = [[/apisix/admin/*]], + methods = {"GET", "PUT", "POST", "DELETE", "PATCH"}, + handler = run, + }, + { + paths = [[/apisix/admin/plugins/list]], + methods = {"GET"}, + handler = get_plugins_list, + }, + { + paths = [[/apisix/admin/schema/validate/*]], + methods = {"POST"}, + handler = schema_validate, + }, + { + paths = reload_event, + methods = {"PUT"}, + handler = post_reload_plugins, + }, + -- Handle methods other than "PUT" on "/plugin/reload" to inform user + { + paths = reload_event, + methods = { "GET", "POST", "DELETE", "PATCH" }, + handler = unsupported_methods_reload_plugin, + }, +} + + +local standalone_uri_route = { + http_head_route, + { + paths = [[/apisix/admin/configs]], + methods = {"GET", "PUT"}, + handler = standalone_run, + }, +} + + +function _M.init_worker() + local local_conf = core.config.local_conf() + if not local_conf.apisix or not local_conf.apisix.enable_admin then + return + end + + local is_yaml_config_provider = local_conf.deployment.config_provider == "yaml" + + if is_yaml_config_provider then + router = route.new(standalone_uri_route) + standalone.init_worker() + else + router = route.new(uri_route) + end + + -- register reload plugin handler + events = require("apisix.events") + events:register(reload_plugins, reload_event, "PUT") + + if ngx_worker_id() == 0 then + -- check if admin_key is required + if local_conf.deployment.admin.admin_key_required == false then + core.log.warn("Admin key is bypassed! ", + "If you are deploying APISIX in a production environment, ", + "please enable `admin_key_required` and set a secure admin key!") + end + + if is_yaml_config_provider then -- standalone mode does not need sync to etcd + return + end + + local ok, err = ngx_timer_at(0, function(premature) + if premature then + return + end + + -- try to reset the /plugins to the current configuration in the admin + sync_local_conf_to_etcd(true) + end) + + if not ok then + error("failed to sync local configure to etcd: " .. err) + end + end +end + + +function _M.get() + return router +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/plugin_config.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/plugin_config.lua new file mode 100644 index 0000000..153a7bd --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/plugin_config.lua @@ -0,0 +1,66 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local get_routes = require("apisix.router").http_routes +local resource = require("apisix.admin.resource") +local schema_plugin = require("apisix.admin.plugins").check_schema +local type = type +local tostring = tostring +local ipairs = ipairs + + +local function check_conf(id, conf, need_id, schema) + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, {error_msg = "invalid configuration: " .. err} + end + + local ok, err = schema_plugin(conf.plugins) + if not ok then + return nil, {error_msg = err} + end + + return true +end + + +local function delete_checker(id) + local routes, routes_ver = get_routes() + if routes_ver and routes then + for _, route in ipairs(routes) do + if type(route) == "table" and route.value + and route.value.plugin_config_id + and tostring(route.value.plugin_config_id) == id then + return 400, {error_msg = "can not delete this plugin config," + .. " route [" .. route.value.id + .. "] is still using it now"} + end + end + end + + return nil, nil +end + + +return resource.new({ + name = "plugin_configs", + kind = "plugin config", + schema = core.schema.plugin_config, + checker = check_conf, + unsupported_methods = {"post"}, + delete_checker = delete_checker +}) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/plugin_metadata.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/plugin_metadata.lua new file mode 100644 index 0000000..1387ca0 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/plugin_metadata.lua @@ -0,0 +1,83 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local pcall = pcall +local require = require +local core = require("apisix.core") +local resource = require("apisix.admin.resource") +local encrypt_conf = require("apisix.plugin").encrypt_conf + +local injected_mark = "injected metadata_schema" + + +local function validate_plugin(name) + local pkg_name = "apisix.plugins." .. name + local ok, plugin_object = pcall(require, pkg_name) + if ok then + return true, plugin_object + end + + pkg_name = "apisix.stream.plugins." .. name + return pcall(require, pkg_name) +end + + +local function check_conf(plugin_name, conf) + if not plugin_name then + return nil, {error_msg = "missing plugin name"} + end + + local ok, plugin_object = validate_plugin(plugin_name) + if not ok then + return nil, {error_msg = "invalid plugin name"} + end + + if not plugin_object.metadata_schema then + plugin_object.metadata_schema = { + type = "object", + ['$comment'] = injected_mark, + properties = {}, + } + end + local schema = plugin_object.metadata_schema + + local ok, err + if schema['$comment'] == injected_mark + -- check_schema is not required. If missing, fallback to check schema directly + or not plugin_object.check_schema + then + ok, err = core.schema.check(schema, conf) + else + ok, err = plugin_object.check_schema(conf, core.schema.TYPE_METADATA) + end + + encrypt_conf(plugin_name, conf, core.schema.TYPE_METADATA) + + if not ok then + return nil, {error_msg = "invalid configuration: " .. err} + end + + return plugin_name +end + + +return resource.new({ + name = "plugin_metadata", + kind = "plugin_metadata", + schema = core.schema.plugin_metadata, + checker = check_conf, + unsupported_methods = {"post", "patch"} +}) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/plugins.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/plugins.lua new file mode 100644 index 0000000..201f8f3 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/plugins.lua @@ -0,0 +1,139 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local check_schema = require("apisix.plugin").check_schema +local ipairs = ipairs +local table_sort = table.sort +local table_insert = table.insert +local get_uri_args = ngx.req.get_uri_args +local plugin_get_all = require("apisix.plugin").get_all +local plugin_get_http = require("apisix.plugin").get +local plugin_get_stream = require("apisix.plugin").get_stream +local encrypt_conf = require("apisix.plugin").encrypt_conf +local pairs = pairs + +local _M = {} + + +function _M.check_schema(plugins_conf, schema_type) + local ok, err = check_schema(plugins_conf, schema_type, false) + if ok then + for name, conf in pairs(plugins_conf) do + encrypt_conf(name, conf, schema_type) + end + end + return ok, err +end + + +function _M.get(name) + local arg = get_uri_args() + -- If subsystem is passed inside args then it should be oneOf: http / stream. + local subsystem = arg["subsystem"] or "http" + if subsystem ~= "http" and subsystem ~= "stream" then + return 400, {error_msg = "unsupported subsystem: "..subsystem} + end + + -- arg all to be deprecated + if (arg and arg["all"] == "true") then + core.log.warn("query parameter \"all\" will be deprecated soon.") + local http_plugins, stream_plugins = plugin_get_all({ + version = true, + priority = true, + schema = true, + metadata_schema = true, + consumer_schema = true, + type = true, + scope = true, + }) + + if arg["subsystem"] == "stream" then + return 200, stream_plugins + end + + return 200, http_plugins + end + + local plugin + + if subsystem == "http" then + plugin = plugin_get_http(name) + else + plugin = plugin_get_stream(name) + end + + if not plugin then + local err = "plugin not found in subsystem " .. subsystem + core.log.warn(err) + return 404, {error_msg = err} + end + + local json_schema = plugin.schema + if arg and arg["schema_type"] == "consumer" then + json_schema = plugin.consumer_schema + end + + if not json_schema then + return 400, {error_msg = "not found schema"} + end + + return 200, json_schema +end + + +function _M.get_plugins_list(subsystem) + local http_plugins + local stream_plugins + if subsystem == "http" then + http_plugins = core.config.local_conf().plugins + else + stream_plugins = core.config.local_conf().stream_plugins + end + + local priorities = {} + local success = {} + if http_plugins then + for i, name in ipairs(http_plugins) do + local plugin = plugin_get_http(name) + if plugin and plugin.priority then + priorities[name] = plugin.priority + table_insert(success, name) + end + end + end + + if stream_plugins then + for i, name in ipairs(stream_plugins) do + local plugin = plugin_get_stream(name) + if plugin and plugin.priority then + priorities[name] = plugin.priority + table_insert(success, name) + end + end + end + + local function cmp(x, y) + return priorities[x] > priorities[y] + end + + table_sort(success, cmp) + return success +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/proto.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/proto.lua new file mode 100644 index 0000000..f8133cc --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/proto.lua @@ -0,0 +1,111 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local type = type +local ipairs = ipairs +local core = require("apisix.core") +local resource = require("apisix.admin.resource") +local get_routes = require("apisix.router").http_routes +local get_services = require("apisix.http.service").services +local compile_proto = require("apisix.plugins.grpc-transcode.proto").compile_proto +local tostring = tostring + + +local function check_conf(id, conf, need_id, schema) + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, {error_msg = "invalid configuration: " .. err} + end + + local ok, err = compile_proto(conf.content) + if not ok then + return nil, {error_msg = "invalid content: " .. err} + end + + return true +end + + +local function check_proto_used(plugins, deleting, ptype, pid) + + --core.log.info("check_proto_used plugins: ", core.json.delay_encode(plugins, true)) + --core.log.info("check_proto_used deleting: ", deleting) + --core.log.info("check_proto_used ptype: ", ptype) + --core.log.info("check_proto_used pid: ", pid) + + if plugins then + if type(plugins) == "table" and plugins["grpc-transcode"] + and plugins["grpc-transcode"].proto_id + and tostring(plugins["grpc-transcode"].proto_id) == deleting then + return false, {error_msg = "can not delete this proto, " + .. ptype .. " [" .. pid + .. "] is still using it now"} + end + end + return true +end + +local function delete_checker(id) + core.log.info("proto delete: ", id) + + local routes, routes_ver = get_routes() + + core.log.info("routes: ", core.json.delay_encode(routes, true)) + core.log.info("routes_ver: ", routes_ver) + + if routes_ver and routes then + for _, route in ipairs(routes) do + core.log.info("proto delete route item: ", core.json.delay_encode(route, true)) + if type(route) == "table" and route.value and route.value.plugins then + local ret, err = check_proto_used(route.value.plugins, id, "route",route.value.id) + if not ret then + return 400, err + end + end + end + end + core.log.info("proto delete route ref check pass: ", id) + + local services, services_ver = get_services() + + core.log.info("services: ", core.json.delay_encode(services, true)) + core.log.info("services_ver: ", services_ver) + + if services_ver and services then + for _, service in ipairs(services) do + if type(service) == "table" and service.value and service.value.plugins then + local ret, err = check_proto_used(service.value.plugins, id, + "service", service.value.id) + if not ret then + return 400, err + end + end + end + end + core.log.info("proto delete service ref check pass: ", id) + + return nil, nil +end + + +return resource.new({ + name = "protos", + kind = "proto", + schema = core.schema.proto, + checker = check_conf, + unsupported_methods = {"patch"}, + delete_checker = delete_checker +}) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/resource.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/resource.lua new file mode 100644 index 0000000..f3a1806 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/resource.lua @@ -0,0 +1,468 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local utils = require("apisix.admin.utils") +local apisix_ssl = require("apisix.ssl") +local apisix_consumer = require("apisix.consumer") +local setmetatable = setmetatable +local tostring = tostring +local ipairs = ipairs +local type = type + + +local _M = { + list_filter_fields = {}, +} +local mt = { + __index = _M +} + + +local no_id_res = { + consumers = true, + plugin_metadata = true +} + + +local function split_typ_and_id(id, sub_path) + local uri_segs = core.utils.split_uri(sub_path) + local typ = id + local id = nil + if #uri_segs > 0 then + id = uri_segs[1] + end + return typ, id +end + + +local function check_forbidden_properties(conf, forbidden_properties) + local not_allow_properties = "the property is forbidden: " + + if conf then + for _, v in ipairs(forbidden_properties) do + if conf[v] then + return not_allow_properties .. " " .. v + end + end + + if conf.upstream then + for _, v in ipairs(forbidden_properties) do + if conf.upstream[v] then + return not_allow_properties .. " upstream." .. v + end + end + end + + if conf.plugins then + for _, v in ipairs(forbidden_properties) do + if conf.plugins[v] then + return not_allow_properties .. " plugins." .. v + end + end + end + end + + return nil +end + + +function _M:check_conf(id, conf, need_id, typ, allow_time) + if self.name == "secrets" then + id = typ .. "/" .. id + end + -- check if missing configurations + if not conf then + return nil, {error_msg = "missing configurations"} + end + + -- check id if need id + if not no_id_res[self.name] then + id = id or conf.id + if need_id and not id then + return nil, {error_msg = "missing ".. self.kind .. " id"} + end + + if not need_id and id then + return nil, {error_msg = "wrong ".. self.kind .. " id, do not need it"} + end + + if need_id and conf.id and tostring(conf.id) ~= tostring(id) then + return nil, {error_msg = "wrong ".. self.kind .. " id"} + end + + conf.id = id + end + + -- check create time and update time + if not allow_time then + local forbidden_properties = {"create_time", "update_time"} + local err = check_forbidden_properties(conf, forbidden_properties) + if err then + return nil, {error_msg = err} + end + end + + core.log.info("conf : ", core.json.delay_encode(conf)) + + -- check the resource own rules + if self.name ~= "secrets" then + core.log.info("schema: ", core.json.delay_encode(self.schema)) + end + + local ok, err = self.checker(id, conf, need_id, self.schema, typ) + + if not ok then + return ok, err + else + if no_id_res[self.name] then + return ok + else + return need_id and id or true + end + end +end + + +function _M:get(id, conf, sub_path) + if core.table.array_find(self.unsupported_methods, "get") then + return 405, {error_msg = "not supported `GET` method for " .. self.kind} + end + + local key = "/" .. self.name + local typ = nil + if self.name == "secrets" then + key = key .. "/" + typ, id = split_typ_and_id(id, sub_path) + end + + if id then + if self.name == "secrets" then + key = key .. typ + end + key = key .. "/" .. id + end + + -- some resources(consumers) have sub resources(credentials), + -- the key format of sub resources will differ from the main resource + if self.get_resource_etcd_key then + key = self.get_resource_etcd_key(id, conf, sub_path) + end + + local res, err = core.etcd.get(key, not id) + if not res then + core.log.error("failed to get ", self.kind, "[", key, "] from etcd: ", err) + return 503, {error_msg = err} + end + + if self.name == "ssls" then + -- not return private key for security + if res.body and res.body.node and res.body.node.value then + res.body.node.value.key = nil + end + end + + -- consumers etcd range response will include credentials, so need to filter out them + if self.name == "consumers" and res.body.list then + res.body.list = apisix_consumer.filter_consumers_list(res.body.list) + res.body.total = #res.body.list + end + + utils.fix_count(res.body, id) + return res.status, res.body +end + + +function _M:post(id, conf, sub_path, args) + if core.table.array_find(self.unsupported_methods, "post") then + return 405, {error_msg = "not supported `POST` method for " .. self.kind} + end + + local id, err = self:check_conf(id, conf, false) + if not id then + return 400, err + end + + if self.name == "ssls" then + -- encrypt private key + conf.key = apisix_ssl.aes_encrypt_pkey(conf.key) + + if conf.keys then + for i = 1, #conf.keys do + conf.keys[i] = apisix_ssl.aes_encrypt_pkey(conf.keys[i]) + end + end + end + + local key = "/" .. self.name + utils.inject_timestamp(conf) + + local ttl = nil + if args then + ttl = args.ttl + end + + local res, err = core.etcd.push(key, conf, ttl) + if not res then + core.log.error("failed to post ", self.kind, "[", key, "] to etcd: ", err) + return 503, {error_msg = err} + end + + return res.status, res.body +end + + +function _M:put(id, conf, sub_path, args) + if core.table.array_find(self.unsupported_methods, "put") then + return 405, {error_msg = "not supported `PUT` method for " .. self.kind} + end + + local key = "/" .. self.name + local typ = nil + if self.name == "secrets" then + typ, id = split_typ_and_id(id, sub_path) + key = key .. "/" .. typ + end + + local need_id = not no_id_res[self.name] + local ok, err = self:check_conf(id, conf, need_id, typ) + if not ok then + return 400, err + end + + if self.name ~= "secrets" then + id = ok + end + + if self.name == "ssls" then + -- encrypt private key + conf.key = apisix_ssl.aes_encrypt_pkey(conf.key) + + if conf.keys then + for i = 1, #conf.keys do + conf.keys[i] = apisix_ssl.aes_encrypt_pkey(conf.keys[i]) + end + end + end + + key = key .. "/" .. id + + if self.get_resource_etcd_key then + key = self.get_resource_etcd_key(id, conf, sub_path, args) + end + + if self.name == "credentials" then + local consumer_key = apisix_consumer.get_consumer_key_from_credential_key(key) + local res, err = core.etcd.get(consumer_key, false) + if not res then + return 503, {error_msg = err} + end + if res.status == 404 then + return res.status, {error_msg = "consumer not found"} + end + if res.status ~= 200 then + core.log.debug("failed to get consumer for the credential, credential key: ", key, + ", consumer key: ", consumer_key, ", res.status: ", res.status) + return res.status, {error_msg = "failed to get the consumer"} + end + end + + if self.name ~= "plugin_metadata" then + local ok, err = utils.inject_conf_with_prev_conf(self.kind, key, conf) + if not ok then + return 503, {error_msg = err} + end + else + conf.id = id + end + + local ttl = nil + if args then + ttl = args.ttl + end + + local res, err = core.etcd.set(key, conf, ttl) + if not res then + core.log.error("failed to put ", self.kind, "[", key, "] to etcd: ", err) + return 503, {error_msg = err} + end + + return res.status, res.body +end + +-- Keep the unused conf to make the args list consistent with other methods +function _M:delete(id, conf, sub_path, uri_args) + if core.table.array_find(self.unsupported_methods, "delete") then + return 405, {error_msg = "not supported `DELETE` method for " .. self.kind} + end + + local key = "/" .. self.name + local typ = nil + if self.name == "secrets" then + typ, id = split_typ_and_id(id, sub_path) + end + + if not id then + return 400, {error_msg = "missing " .. self.kind .. " id"} + end + + -- core.log.error("failed to delete ", self.kind, "[", key, "] in etcd: ", err) + + if self.name == "secrets" then + key = key .. "/" .. typ + end + + key = key .. "/" .. id + + if self.get_resource_etcd_key then + key = self.get_resource_etcd_key(id, conf, sub_path, uri_args) + end + + if self.delete_checker and uri_args.force ~= "true" then + local code, err = self.delete_checker(id) + if err then + return code, err + end + end + + if self.name == "consumers" then + local res, err = core.etcd.rmdir(key .. "/credentials/") + if not res then + return 503, {error_msg = err} + end + end + + local res, err = core.etcd.delete(key) + if not res then + core.log.error("failed to delete ", self.kind, "[", key, "] in etcd: ", err) + return 503, {error_msg = err} + end + + return res.status, res.body +end + + +function _M:patch(id, conf, sub_path, args) + if core.table.array_find(self.unsupported_methods, "patch") then + return 405, {error_msg = "not supported `PATCH` method for " .. self.kind} + end + + local key = "/" .. self.name + local typ = nil + if self.name == "secrets" then + local uri_segs = core.utils.split_uri(sub_path) + if #uri_segs < 1 then + return 400, {error_msg = "no secret id"} + end + typ = id + id = uri_segs[1] + sub_path = core.table.concat(uri_segs, "/", 2) + end + + if not id then + return 400, {error_msg = "missing " .. self.kind .. " id"} + end + + if self.name == "secrets" then + key = key .. "/" .. typ + end + + key = key .. "/" .. id + + if conf == nil then + return 400, {error_msg = "missing new configuration"} + end + + if not sub_path or sub_path == "" then + if type(conf) ~= "table" then + return 400, {error_msg = "invalid configuration"} + end + end + + local res_old, err = core.etcd.get(key) + if not res_old then + core.log.error("failed to get ", self.kind, " [", key, "] in etcd: ", err) + return 503, {error_msg = err} + end + + if res_old.status ~= 200 then + return res_old.status, res_old.body + end + core.log.info("key: ", key, " old value: ", core.json.delay_encode(res_old, true)) + + local node_value = res_old.body.node.value + local modified_index = res_old.body.node.modifiedIndex + + if sub_path and sub_path ~= "" then + if self.name == "ssls" then + if sub_path == "key" then + conf = apisix_ssl.aes_encrypt_pkey(conf) + elseif sub_path == "keys" then + for i = 1, #conf do + conf[i] = apisix_ssl.aes_encrypt_pkey(conf[i]) + end + end + end + local code, err, node_val = core.table.patch(node_value, sub_path, conf) + node_value = node_val + if code then + return code, {error_msg = err} + end + utils.inject_timestamp(node_value, nil, true) + else + if self.name == "ssls" then + if conf.key then + conf.key = apisix_ssl.aes_encrypt_pkey(conf.key) + end + + if conf.keys then + for i = 1, #conf.keys do + conf.keys[i] = apisix_ssl.aes_encrypt_pkey(conf.keys[i]) + end + end + end + node_value = core.table.merge(node_value, conf) + utils.inject_timestamp(node_value, nil, conf) + end + + core.log.info("new conf: ", core.json.delay_encode(node_value, true)) + + local ok, err = self:check_conf(id, node_value, true, typ, true) + if not ok then + return 400, err + end + + local ttl = nil + if args then + ttl = args.ttl + end + + local res, err = core.etcd.atomic_set(key, node_value, ttl, modified_index) + if not res then + core.log.error("failed to set new ", self.kind, "[", key, "] to etcd: ", err) + return 503, {error_msg = err} + end + + return res.status, res.body +end + + +function _M.new(opt) + return setmetatable(opt, mt) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/routes.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/routes.lua new file mode 100644 index 0000000..e13bb23 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/routes.lua @@ -0,0 +1,184 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local expr = require("resty.expr.v1") +local core = require("apisix.core") +local apisix_upstream = require("apisix.upstream") +local resource = require("apisix.admin.resource") +local schema_plugin = require("apisix.admin.plugins").check_schema +local type = type +local loadstring = loadstring +local ipairs = ipairs +local jp = require("jsonpath") + +local function validate_post_arg(node) + if type(node) ~= "table" then + return true + end + + -- Handle post_arg conditions + if #node >= 3 and type(node[1]) == "string" and node[1]:find("^post_arg%.") then + local key = node[1] + local json_path = "$." .. key:sub(11) -- Remove "post_arg." prefix + local _, err = jp.parse(json_path) + if err then + return false, err + end + return true + end + + for _, child in ipairs(node) do + local ok, err = validate_post_arg(child) + if not ok then + return false, err + end + end + return true +end + + +local function check_conf(id, conf, need_id, schema) + if conf.host and conf.hosts then + return nil, {error_msg = "only one of host or hosts is allowed"} + end + + if conf.remote_addr and conf.remote_addrs then + return nil, {error_msg = "only one of remote_addr or remote_addrs is " + .. "allowed"} + end + + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, {error_msg = "invalid configuration: " .. err} + end + + local upstream_conf = conf.upstream + if upstream_conf then + local ok, err = apisix_upstream.check_upstream_conf(upstream_conf) + if not ok then + return nil, {error_msg = err} + end + end + + local upstream_id = conf.upstream_id + if upstream_id then + local key = "/upstreams/" .. upstream_id + local res, err = core.etcd.get(key) + if not res then + return nil, {error_msg = "failed to fetch upstream info by " + .. "upstream id [" .. upstream_id .. "]: " + .. err} + end + + if res.status ~= 200 then + return nil, {error_msg = "failed to fetch upstream info by " + .. "upstream id [" .. upstream_id .. "], " + .. "response code: " .. res.status} + end + end + + local service_id = conf.service_id + if service_id then + local key = "/services/" .. service_id + local res, err = core.etcd.get(key) + if not res then + return nil, {error_msg = "failed to fetch service info by " + .. "service id [" .. service_id .. "]: " + .. err} + end + + if res.status ~= 200 then + return nil, {error_msg = "failed to fetch service info by " + .. "service id [" .. service_id .. "], " + .. "response code: " .. res.status} + end + end + + local plugin_config_id = conf.plugin_config_id + if plugin_config_id then + local key = "/plugin_configs/" .. plugin_config_id + local res, err = core.etcd.get(key) + if not res then + return nil, {error_msg = "failed to fetch plugin config info by " + .. "plugin config id [" .. plugin_config_id .. "]: " + .. err} + end + + if res.status ~= 200 then + return nil, {error_msg = "failed to fetch plugin config info by " + .. "plugin config id [" .. plugin_config_id .. "], " + .. "response code: " .. res.status} + end + end + + if conf.plugins then + local ok, err = schema_plugin(conf.plugins) + if not ok then + return nil, {error_msg = err} + end + end + + if conf.vars then + ok, err = expr.new(conf.vars) + if not ok then + return nil, {error_msg = "failed to validate the 'vars' expression: " .. err} + end + end + + ok, err = validate_post_arg(conf.vars) + if not ok then + return nil, {error_msg = "failed to validate the 'vars' expression: " .. + err} + end + + if conf.filter_func then + local func, err = loadstring("return " .. conf.filter_func) + if not func then + return nil, {error_msg = "failed to load 'filter_func' string: " + .. err} + end + + if type(func()) ~= "function" then + return nil, {error_msg = "'filter_func' should be a function"} + end + end + + if conf.script then + local obj, err = loadstring(conf.script) + if not obj then + return nil, {error_msg = "failed to load 'script' string: " + .. err} + end + + if type(obj()) ~= "table" then + return nil, {error_msg = "'script' should be a Lua object"} + end + end + + return true +end + + +return resource.new({ + name = "routes", + kind = "route", + schema = core.schema.route, + checker = check_conf, + list_filter_fields = { + service_id = true, + upstream_id = true, + }, +}) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/schema.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/schema.lua new file mode 100644 index 0000000..73d9e78 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/schema.lua @@ -0,0 +1,35 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") + +local _M = { + version = 0.1, +} + + +function _M.get(name) + local json_schema = core.schema[name] + core.log.info("schema: ", core.json.delay_encode(core.schema, true)) + if not json_schema then + return 400, {error_msg = "not found schema: " .. name} + end + + return 200, json_schema +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/secrets.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/secrets.lua new file mode 100644 index 0000000..b149ef0 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/secrets.lua @@ -0,0 +1,45 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require + +local core = require("apisix.core") +local resource = require("apisix.admin.resource") + +local pcall = pcall + + +local function check_conf(id, conf, need_id, schema, typ) + local ok, secret_manager = pcall(require, "apisix.secret." .. typ) + if not ok then + return false, {error_msg = "invalid secret manager: " .. typ} + end + + local ok, err = core.schema.check(secret_manager.schema, conf) + if not ok then + return nil, {error_msg = "invalid configuration: " .. err} + end + + return true +end + + +return resource.new({ + name = "secrets", + kind = "secret", + checker = check_conf, + unsupported_methods = {"post"} +}) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/services.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/services.lua new file mode 100644 index 0000000..4218b77 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/services.lua @@ -0,0 +1,128 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local get_routes = require("apisix.router").http_routes +local get_stream_routes = require("apisix.router").stream_routes +local apisix_upstream = require("apisix.upstream") +local resource = require("apisix.admin.resource") +local schema_plugin = require("apisix.admin.plugins").check_schema +local tostring = tostring +local ipairs = ipairs +local type = type +local loadstring = loadstring + + +local function check_conf(id, conf, need_id, schema) + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, {error_msg = "invalid configuration: " .. err} + end + + if need_id and not id then + return nil, {error_msg = "wrong type of service id"} + end + + local upstream_conf = conf.upstream + if upstream_conf then + local ok, err = apisix_upstream.check_upstream_conf(upstream_conf) + if not ok then + return nil, {error_msg = err} + end + end + + local upstream_id = conf.upstream_id + if upstream_id then + local key = "/upstreams/" .. upstream_id + local res, err = core.etcd.get(key) + if not res then + return nil, {error_msg = "failed to fetch upstream info by " + .. "upstream id [" .. upstream_id .. "]: " + .. err} + end + + if res.status ~= 200 then + return nil, {error_msg = "failed to fetch upstream info by " + .. "upstream id [" .. upstream_id .. "], " + .. "response code: " .. res.status} + end + end + + if conf.plugins then + local ok, err = schema_plugin(conf.plugins) + if not ok then + return nil, {error_msg = err} + end + end + + if conf.script then + local obj, err = loadstring(conf.script) + if not obj then + return nil, {error_msg = "failed to load 'script' string: " + .. err} + end + + if type(obj()) ~= "table" then + return nil, {error_msg = "'script' should be a Lua object"} + end + end + + return true +end + + +local function delete_checker(id) + local routes, routes_ver = get_routes() + core.log.info("routes: ", core.json.delay_encode(routes, true)) + core.log.info("routes_ver: ", routes_ver) + if routes_ver and routes then + for _, route in ipairs(routes) do + if type(route) == "table" and route.value + and route.value.service_id + and tostring(route.value.service_id) == id then + return 400, {error_msg = "can not delete this service directly," + .. " route [" .. route.value.id + .. "] is still using it now"} + end + end + end + + local stream_routes, stream_routes_ver = get_stream_routes() + core.log.info("stream_routes: ", core.json.delay_encode(stream_routes, true)) + core.log.info("stream_routes_ver: ", stream_routes_ver) + if stream_routes_ver and stream_routes then + for _, route in ipairs(stream_routes) do + if type(route) == "table" and route.value + and route.value.service_id + and tostring(route.value.service_id) == id then + return 400, {error_msg = "can not delete this service directly," + .. " stream_route [" .. route.value.id + .. "] is still using it now"} + end + end + end + + return nil, nil +end + + +return resource.new({ + name = "services", + kind = "service", + schema = core.schema.service, + checker = check_conf, + delete_checker = delete_checker +}) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/ssl.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/ssl.lua new file mode 100644 index 0000000..d13d08f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/ssl.lua @@ -0,0 +1,37 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local resource = require("apisix.admin.resource") +local apisix_ssl = require("apisix.ssl") + + +local function check_conf(id, conf, need_id, schema) + local ok, err = apisix_ssl.check_ssl_conf(false, conf) + if not ok then + return nil, {error_msg = err} + end + + return need_id and id or true +end + + +return resource.new({ + name = "ssls", + kind = "ssl", + schema = core.schema.ssl, + checker = check_conf +}) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/standalone.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/standalone.lua new file mode 100644 index 0000000..0d17b15 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/standalone.lua @@ -0,0 +1,339 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local type = type +local pairs = pairs +local ipairs = ipairs +local str_lower = string.lower +local ngx = ngx +local get_method = ngx.req.get_method +local shared_dict = ngx.shared["standalone-config"] +local table_insert = table.insert +local table_new = require("table.new") +local yaml = require("lyaml") +local events = require("apisix.events") +local core = require("apisix.core") +local config_yaml = require("apisix.core.config_yaml") +local check_schema = require("apisix.core.schema").check +local tbl_deepcopy = require("apisix.core.table").deepcopy + +local EVENT_UPDATE = "standalone-api-configuration-update" + +local _M = {} + +local function check_duplicate(item, key, id_set) + local identifier, identifier_type + if key == "consumers" then + identifier = item.id or item.username + identifier_type = item.id and "credential id" or "username" + else + identifier = item.id + identifier_type = "id" + end + + if id_set[identifier] then + return true, "found duplicate " .. identifier_type .. " " .. identifier .. " in " .. key + end + id_set[identifier] = true + return false +end + +local function get_config() + local config = shared_dict:get("config") + if not config then + return nil, "not found" + end + + local err + config, err = core.json.decode(config) + if not config then + return nil, "failed to decode json: " .. err + end + return config +end + + +local function update_and_broadcast_config(apisix_yaml) + local raw, err = core.json.encode(apisix_yaml) + if not raw then + core.log.error("failed to encode json: ", err) + return nil, "failed to encode json: " .. err + end + + if shared_dict then + -- the worker that handles Admin API calls is responsible for writing the shared dict + local ok, err = shared_dict:set("config", raw) + if not ok then + return nil, "failed to save config to shared dict: " .. err + end + core.log.info("standalone config updated: ", raw) + else + core.log.crit(config_yaml.ERR_NO_SHARED_DICT) + end + return events:post(EVENT_UPDATE, EVENT_UPDATE) +end + + +local function update(ctx) + local content_type = core.request.header(nil, "content-type") or "application/json" + + -- read the request body + local req_body, err = core.request.get_body() + if err then + return core.response.exit(400, {error_msg = "invalid request body: " .. err}) + end + + if not req_body or #req_body <= 0 then + return core.response.exit(400, {error_msg = "invalid request body: empty request body"}) + end + + -- parse the request body + local data + if core.string.has_prefix(content_type, "application/yaml") then + data = yaml.load(req_body, { all = false }) + if not data or type(data) ~= "table" then + err = "invalid yaml request body" + end + else + data, err = core.json.decode(req_body) + end + if err then + core.log.error("invalid request body: ", req_body, " err: ", err) + core.response.exit(400, {error_msg = "invalid request body: " .. err}) + end + req_body = data + + local config, err = get_config() + if not config then + if err ~= "not found" then + core.log.error("failed to get config from shared dict: ", err) + return core.response.exit(500, { + error_msg = "failed to get config from shared dict: " .. err + }) + end + end + + -- check input by jsonschema + local apisix_yaml = {} + local created_objs = config_yaml.fetch_all_created_obj() + + for key, obj in pairs(created_objs) do + local conf_version_key = obj.conf_version_key + local conf_version = config and config[conf_version_key] or obj.conf_version + local items = req_body[key] + local new_conf_version = req_body[conf_version_key] + if not new_conf_version then + new_conf_version = conf_version + 1 + else + if type(new_conf_version) ~= "number" then + return core.response.exit(400, { + error_msg = conf_version_key .. " must be a number", + }) + end + if new_conf_version < conf_version then + return core.response.exit(400, { + error_msg = conf_version_key .. + " must be greater than or equal to (" .. conf_version .. ")", + }) + end + end + + apisix_yaml[conf_version_key] = new_conf_version + if new_conf_version == conf_version then + apisix_yaml[key] = config and config[key] + elseif items and #items > 0 then + apisix_yaml[key] = table_new(#items, 0) + local item_schema = obj.item_schema + local item_checker = obj.checker + local id_set = {} + + for index, item in ipairs(items) do + local item_temp = tbl_deepcopy(item) + local valid, err + -- need to recover to 0-based subscript + local err_prefix = "invalid " .. key .. " at index " .. (index - 1) .. ", err: " + if item_schema then + valid, err = check_schema(obj.item_schema, item_temp) + if not valid then + core.log.error(err_prefix, err) + core.response.exit(400, {error_msg = err_prefix .. err}) + end + end + if item_checker then + local item_checker_key + if item.id then + -- credential need to check key + item_checker_key = "/" .. key .. "/" .. item_temp.id + end + valid, err = item_checker(item_temp, item_checker_key) + if not valid then + core.log.error(err_prefix, err) + core.response.exit(400, {error_msg = err_prefix .. err}) + end + end + -- prevent updating resource with the same ID + -- (e.g., service ID or other resource IDs) in a single request + local duplicated, err = check_duplicate(item, key, id_set) + if duplicated then + core.log.error(err) + core.response.exit(400, { error_msg = err }) + end + + table_insert(apisix_yaml[key], item) + end + end + end + + local ok, err = update_and_broadcast_config(apisix_yaml) + if not ok then + core.response.exit(500, err) + end + + return core.response.exit(202) +end + + +local function get(ctx) + local accept = core.request.header(nil, "accept") or "application/json" + local want_yaml_resp = core.string.has_prefix(accept, "application/yaml") + + local config, err = get_config() + if not config then + if err ~= "not found" then + core.log.error("failed to get config from shared dict: ", err) + return core.response.exit(500, { + error_msg = "failed to get config from shared dict: " .. err + }) + end + config = {} + local created_objs = config_yaml.fetch_all_created_obj() + for _, obj in pairs(created_objs) do + config[obj.conf_version_key] = obj.conf_version + end + end + + local resp, err + if want_yaml_resp then + core.response.set_header("Content-Type", "application/yaml") + resp = yaml.dump({ config }) + if not resp then + err = "failed to encode yaml" + end + + -- remove the first line "---" and the last line "..." + -- because the yaml.dump() will add them for multiple documents + local m = ngx.re.match(resp, [[^---\s*([\s\S]*?)\s*\.\.\.\s*$]], "jo") + if m and m[1] then + resp = m[1] + end + else + core.response.set_header("Content-Type", "application/json") + resp, err = core.json.encode(config, true) + if not resp then + err = "failed to encode json: " .. err + end + end + + if not resp then + return core.response.exit(500, {error_msg = err}) + end + return core.response.exit(200, resp) +end + + +function _M.run() + local ctx = ngx.ctx.api_ctx + local method = str_lower(get_method()) + if method == "put" then + return update(ctx) + else + return get(ctx) + end +end + + +local patch_schema +do + local resource_schema = { + "proto", + "global_rule", + "route", + "service", + "upstream", + "consumer", + "consumer_group", + "credential", + "ssl", + "plugin_config", + } + local function attach_modifiedIndex_schema(name) + local schema = core.schema[name] + if not schema then + core.log.error("schema for ", name, " not found") + return + end + if schema.properties and not schema.properties.modifiedIndex then + schema.properties.modifiedIndex = { + type = "integer", + } + end + end + + local function patch_credential_schema() + local credential_schema = core.schema["credential"] + if credential_schema and credential_schema.properties then + credential_schema.properties.id = { + type = "string", + minLength = 15, + maxLength = 128, + pattern = [[^[a-zA-Z0-9-_]+/credentials/[a-zA-Z0-9-_.]+$]], + } + end + end + + function patch_schema() + -- attach modifiedIndex schema to all resource schemas + for _, name in ipairs(resource_schema) do + attach_modifiedIndex_schema(name) + end + -- patch credential schema + patch_credential_schema() + end +end + + +function _M.init_worker() + local function update_config() + local config, err = shared_dict:get("config") + if not config then + core.log.error("failed to get config from shared dict: ", err) + return + end + + config, err = core.json.decode(config) + if not config then + core.log.error("failed to decode json: ", err) + return + end + config_yaml._update_config(config) + end + events:register(update_config, EVENT_UPDATE, EVENT_UPDATE) + + patch_schema() +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/stream_routes.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/stream_routes.lua new file mode 100644 index 0000000..699b0aa --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/stream_routes.lua @@ -0,0 +1,81 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local resource = require("apisix.admin.resource") +local stream_route_checker = require("apisix.stream.router.ip_port").stream_route_checker + + +local function check_conf(id, conf, need_id, schema) + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, {error_msg = "invalid configuration: " .. err} + end + + local upstream_id = conf.upstream_id + if upstream_id then + local key = "/upstreams/" .. upstream_id + local res, err = core.etcd.get(key) + if not res then + return nil, {error_msg = "failed to fetch upstream info by " + .. "upstream id [" .. upstream_id .. "]: " + .. err} + end + + if res.status ~= 200 then + return nil, {error_msg = "failed to fetch upstream info by " + .. "upstream id [" .. upstream_id .. "], " + .. "response code: " .. res.status} + end + end + + local service_id = conf.service_id + if service_id then + local key = "/services/" .. service_id + local res, err = core.etcd.get(key) + if not res then + return nil, {error_msg = "failed to fetch service info by " + .. "service id [" .. service_id .. "]: " + .. err} + end + + if res.status ~= 200 then + return nil, {error_msg = "failed to fetch service info by " + .. "service id [" .. service_id .. "], " + .. "response code: " .. res.status} + end + end + + local ok, err = stream_route_checker(conf, true) + if not ok then + return nil, {error_msg = err} + end + + return true +end + + +return resource.new({ + name = "stream_routes", + kind = "stream route", + schema = core.schema.stream_route, + checker = check_conf, + unsupported_methods = { "patch" }, + list_filter_fields = { + service_id = true, + upstream_id = true, + }, +}) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/upstreams.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/upstreams.lua new file mode 100644 index 0000000..6c04d93 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/upstreams.lua @@ -0,0 +1,134 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local config_util = require("apisix.core.config_util") +local get_routes = require("apisix.router").http_routes +local get_services = require("apisix.http.service").services +local get_plugin_configs = require("apisix.plugin_config").plugin_configs +local get_consumers = require("apisix.consumer").consumers +local get_consumer_groups = require("apisix.consumer_group").consumer_groups +local get_global_rules = require("apisix.global_rules").global_rules +local apisix_upstream = require("apisix.upstream") +local resource = require("apisix.admin.resource") +local tostring = tostring +local ipairs = ipairs + + +local function check_conf(id, conf, need_id) + local ok, err = apisix_upstream.check_upstream_conf(conf) + if not ok then + return nil, {error_msg = err} + end + + return true +end + + +local function up_id_in_plugins(plugins, up_id) + if plugins and plugins["traffic-split"] + and plugins["traffic-split"].rules then + + for _, rule in ipairs(plugins["traffic-split"].rules) do + local plugin_upstreams = rule.weighted_upstreams + for _, plugin_upstream in ipairs(plugin_upstreams) do + if plugin_upstream.upstream_id + and tostring(plugin_upstream.upstream_id) == up_id then + return true + end + end + end + + return false + end +end + + +local function check_resources_reference(resources, up_id, + only_check_plugin, resources_name) + if resources then + for _, resource in config_util.iterate_values(resources) do + if resource and resource.value then + if up_id_in_plugins(resource.value.plugins, up_id) then + return {error_msg = "can not delete this upstream," + .. " plugin in " + .. resources_name .. " [" + .. resource.value.id + .. "] is still using it now"} + end + + if not only_check_plugin and resource.value.upstream_id + and tostring(resource.value.upstream_id) == up_id then + return {error_msg = "can not delete this upstream, " + .. resources_name .. " [" .. resource.value.id + .. "] is still using it now"} + end + end + end + end +end + + +local function delete_checker(id) + local routes = get_routes() + local err_msg = check_resources_reference(routes, id, false, "route") + if err_msg then + return 400, err_msg + end + + local services, services_ver = get_services() + core.log.info("services: ", core.json.delay_encode(services, true)) + core.log.info("services_ver: ", services_ver) + local err_msg = check_resources_reference(services, id, false, "service") + if err_msg then + return 400, err_msg + end + + local plugin_configs = get_plugin_configs() + local err_msg = check_resources_reference(plugin_configs, id, true, "plugin_config") + if err_msg then + return 400, err_msg + end + + local consumers = get_consumers() + local err_msg = check_resources_reference(consumers, id, true, "consumer") + if err_msg then + return 400, err_msg + end + + local consumer_groups = get_consumer_groups() + local err_msg = check_resources_reference(consumer_groups, id, true, "consumer_group") + if err_msg then + return 400, err_msg + end + + local global_rules = get_global_rules() + err_msg = check_resources_reference(global_rules, id, true, "global_rules") + if err_msg then + return 400, err_msg + end + + return nil, nil +end + + +return resource.new({ + name = "upstreams", + kind = "upstream", + schema = core.schema.upstream, + checker = check_conf, + delete_checker = delete_checker +}) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/utils.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/utils.lua new file mode 100644 index 0000000..eee2787 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/utils.lua @@ -0,0 +1,113 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local ngx_time = ngx.time +local tonumber = tonumber +local ipairs = ipairs +local pairs = pairs + + +local _M = {} + + +local function inject_timestamp(conf, prev_conf, patch_conf) + if not conf.create_time then + if prev_conf and (prev_conf.node or prev_conf.list).value.create_time then + conf.create_time = (prev_conf.node or prev_conf.list).value.create_time + else + -- As we don't know existent data's create_time, we have to pretend + -- they are created now. + conf.create_time = ngx_time() + end + end + + if not conf.update_time or + -- For PATCH request, the modification is passed as 'patch_conf' + -- If the sub path is used, the 'patch_conf' will be a placeholder `true` + (patch_conf and (patch_conf == true or patch_conf.update_time == nil)) + then + -- reset the update_time if: + -- 1. PATCH request, with sub path + -- 2. PATCH request, update_time not given + -- 3. Other request, update_time not given + conf.update_time = ngx_time() + end +end +_M.inject_timestamp = inject_timestamp + + +function _M.inject_conf_with_prev_conf(kind, key, conf) + local res, err = core.etcd.get(key) + if not res or (res.status ~= 200 and res.status ~= 404) then + core.log.error("failed to get " .. kind .. "[", key, "] from etcd: ", err or res.status) + return nil, err + end + + if res.status == 404 then + inject_timestamp(conf) + else + inject_timestamp(conf, res.body) + end + + return true +end + + +-- fix_count makes the "count" field returned by etcd reasonable +function _M.fix_count(body, id) + if body.count then + if not id then + -- remove the count of placeholder (init_dir) + body.count = tonumber(body.count) - 1 + else + body.count = tonumber(body.count) + end + end +end + + +function _M.decrypt_params(decrypt_func, body, schema_type) + -- list + if body.list then + for _, route in ipairs(body.list) do + if route.value and route.value.plugins then + for name, conf in pairs(route.value.plugins) do + decrypt_func(name, conf, schema_type) + end + end + end + return + end + + -- node + local plugins = body.node and body.node.value + and body.node.value.plugins + + if plugins then + for name, conf in pairs(plugins) do + decrypt_func(name, conf, schema_type) + end + end + + -- metadata + if schema_type == core.schema.TYPE_METADATA then + local conf = body.node and body.node.value + decrypt_func(conf.name, conf, schema_type) + end +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/admin/v3_adapter.lua b/CloudronPackages/APISIX/apisix-source/apisix/admin/v3_adapter.lua new file mode 100644 index 0000000..8941eef --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/admin/v3_adapter.lua @@ -0,0 +1,249 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local type = type +local pairs = pairs +local tonumber = tonumber +local ngx = ngx +local re_find = ngx.re.find +local fetch_local_conf = require("apisix.core.config_local").local_conf +local try_read_attr = require("apisix.core.table").try_read_attr +local deepcopy = require("apisix.core.table").deepcopy +local log = require("apisix.core.log") +local request = require("apisix.core.request") +local response = require("apisix.core.response") +local table = require("apisix.core.table") + +local _M = {} + + +local admin_api_version +local function enable_v3() + if admin_api_version then + if admin_api_version == "v3" then + return true + end + + if admin_api_version == "default" then + return false + end + end + + local local_conf, err = fetch_local_conf() + if not local_conf then + admin_api_version = "default" + log.error("failed to fetch local conf: ", err) + return false + end + + local api_ver = try_read_attr(local_conf, "deployment", "admin", "admin_api_version") + if api_ver ~= "v3" then + admin_api_version = "default" + return false + end + + admin_api_version = api_ver + return true +end +_M.enable_v3 = enable_v3 + + +function _M.to_v3(body, action) + if not enable_v3() then + body.action = action + end +end + + +function _M.to_v3_list(body) + if not enable_v3() then + return + end + + if body.node.dir then + body.list = body.node.nodes + body.node = nil + end +end + + +local function sort(l, r) + return l.createdIndex < r.createdIndex +end + + +local function pagination(body, args) + args.page = tonumber(args.page) + args.page_size = tonumber(args.page_size) + if not args.page or not args.page_size then + return + end + + if args.page_size < 10 or args.page_size > 500 then + return response.exit(400, "page_size must be between 10 and 500") + end + + if not args.page or args.page < 1 then + -- default page is 1 + args.page = 1 + end + + local list = body.list + + -- sort nodes by there createdIndex + table.sort(list, sort) + + local to = args.page * args.page_size + local from = to - args.page_size + 1 + + local res = table.new(20, 0) + + for i = from, to do + if list[i] then + res[i - from + 1] = list[i] + end + end + + body.list = res +end + + +local function _filter(item, args, resource) + if not args.filter then + return true + end + + local filters, err = ngx.decode_args(args.filter or "", 100) + if not filters then + log.error("failed to decode filter args: ", err) + return false + end + + for key, value in pairs(filters) do + if not resource.list_filter_fields[key] then + log.warn("filter field '", key, "' is not supported by resource: ", resource.name) + goto CONTINUE + end + + if not item[key] then + return false + end + + if type(value) == "table" then + value = value[#value] -- get the last value in the table + end + + if item[key] ~= value then + return false + end + + ::CONTINUE:: + end + + return true +end + + +local function filter(body, args, resource) + for i = #body.list, 1, -1 do + local name_matched = true + local label_matched = true + local uri_matched = true + if args.name then + name_matched = false + local matched = re_find(body.list[i].value.name, args.name, "jo") + if matched then + name_matched = true + end + end + + if args.label then + label_matched = false + if body.list[i].value.labels then + for k, _ in pairs(body.list[i].value.labels) do + if k == args.label then + label_matched = true + break + end + end + end + end + + if args.uri then + uri_matched = false + if body.list[i].value.uri then + local matched = re_find(body.list[i].value.uri, args.uri, "jo") + if matched then + uri_matched = true + end + end + + if body.list[i].value.uris then + for _, uri in pairs(body.list[i].value.uris) do + if re_find(uri, args.uri, "jo") then + uri_matched = true + break + end + end + end + end + + if not name_matched or not label_matched or not uri_matched + or not _filter(body.list[i].value, args, resource) then + table.remove(body.list, i) + end + end +end + + +function _M.filter(body, resource) + if not enable_v3() then + return body + end + + local args = request.get_uri_args() + local processed_body = deepcopy(body) + + if processed_body.deleted then + processed_body.node = nil + end + + -- strip node wrapping for single query, create, and update scenarios. + if processed_body.node then + processed_body = processed_body.node + end + + -- filter and paging logic for list query only + if processed_body.list then + filter(processed_body, args, resource) + + -- calculate the total amount of filtered data + processed_body.total = processed_body.list and #processed_body.list or 0 + + pagination(processed_body, args) + + -- remove the count field returned by etcd + -- we don't need a field that reflects the length of the currently returned data, + -- it doesn't make sense + processed_body.count = nil + end + + return processed_body +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/api_router.lua b/CloudronPackages/APISIX/apisix-source/apisix/api_router.lua new file mode 100644 index 0000000..9fbf328 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/api_router.lua @@ -0,0 +1,116 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local router = require("apisix.utils.router") +local plugin_mod = require("apisix.plugin") +local core = require("apisix.core") +local ipairs = ipairs +local ngx_header = ngx.header +local type = type + + +local _M = {} +local match_opts = {} +local has_route_not_under_apisix + + +local fetch_api_router +do + local routes = {} +function fetch_api_router() + core.table.clear(routes) + + has_route_not_under_apisix = false + + for _, plugin in ipairs(plugin_mod.plugins) do + local api_fun = plugin.api + if api_fun then + local api_routes = api_fun() + core.log.debug("fetched api routes: ", + core.json.delay_encode(api_routes, true)) + for _, route in ipairs(api_routes) do + if route.uri == nil then + core.log.error("got nil uri in api route: ", + core.json.delay_encode(route, true)) + break + end + + local typ_uri = type(route.uri) + if not has_route_not_under_apisix then + if typ_uri == "string" then + if not core.string.has_prefix(route.uri, "/apisix/") then + has_route_not_under_apisix = true + end + else + for _, uri in ipairs(route.uri) do + if not core.string.has_prefix(uri, "/apisix/") then + has_route_not_under_apisix = true + break + end + end + end + end + + core.table.insert(routes, { + methods = route.methods, + paths = route.uri, + handler = function (api_ctx) + local code, body = route.handler(api_ctx) + if code or body then + if type(body) == "table" and ngx_header["Content-Type"] == nil then + core.response.set_header("Content-Type", "application/json") + end + + core.response.exit(code, body) + end + end + }) + end + end + end + + return router.new(routes) +end + +end -- do + + +function _M.has_route_not_under_apisix() + if has_route_not_under_apisix == nil then + return true + end + + return has_route_not_under_apisix +end + + +function _M.match(api_ctx) + local api_router = core.lrucache.global("api_router", plugin_mod.load_times, fetch_api_router) + if not api_router then + core.log.error("failed to fetch valid api router") + return false + end + + core.table.clear(match_opts) + match_opts.method = api_ctx.var.request_method + + local ok = api_router:dispatch(api_ctx.var.uri, match_opts, api_ctx) + return ok +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/balancer.lua b/CloudronPackages/APISIX/apisix-source/apisix/balancer.lua new file mode 100644 index 0000000..0fe2e65 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/balancer.lua @@ -0,0 +1,400 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local balancer = require("ngx.balancer") +local core = require("apisix.core") +local priority_balancer = require("apisix.balancer.priority") +local apisix_upstream = require("apisix.upstream") +local ipairs = ipairs +local is_http = ngx.config.subsystem == "http" +local enable_keepalive = balancer.enable_keepalive and is_http +local set_more_tries = balancer.set_more_tries +local get_last_failure = balancer.get_last_failure +local set_timeouts = balancer.set_timeouts +local ngx_now = ngx.now +local str_byte = string.byte + + +local module_name = "balancer" +local pickers = {} + +local lrucache_server_picker = core.lrucache.new({ + ttl = 300, count = 256 +}) +local lrucache_addr = core.lrucache.new({ + ttl = 300, count = 1024 * 4 +}) + + +local _M = { + version = 0.2, + name = module_name, +} + + +local function transform_node(new_nodes, node) + if not new_nodes._priority_index then + new_nodes._priority_index = {} + end + + if not new_nodes[node.priority] then + new_nodes[node.priority] = {} + core.table.insert(new_nodes._priority_index, node.priority) + end + + new_nodes[node.priority][node.host .. ":" .. node.port] = node.weight + return new_nodes +end + + +local function fetch_health_nodes(upstream, checker) + local nodes = upstream.nodes + if not checker then + local new_nodes = core.table.new(0, #nodes) + for _, node in ipairs(nodes) do + new_nodes = transform_node(new_nodes, node) + end + return new_nodes + end + + local host = upstream.checks and upstream.checks.active and upstream.checks.active.host + local port = upstream.checks and upstream.checks.active and upstream.checks.active.port + local up_nodes = core.table.new(0, #nodes) + for _, node in ipairs(nodes) do + local ok, err = checker:get_target_status(node.host, port or node.port, host) + if ok then + up_nodes = transform_node(up_nodes, node) + elseif err then + core.log.warn("failed to get health check target status, addr: ", + node.host, ":", port or node.port, ", host: ", host, ", err: ", err) + end + end + + if core.table.nkeys(up_nodes) == 0 then + core.log.warn("all upstream nodes is unhealthy, use default") + for _, node in ipairs(nodes) do + up_nodes = transform_node(up_nodes, node) + end + end + + return up_nodes +end + + +local function create_server_picker(upstream, checker) + local picker = pickers[upstream.type] + if not picker then + pickers[upstream.type] = require("apisix.balancer." .. upstream.type) + picker = pickers[upstream.type] + end + + if picker then + local nodes = upstream.nodes + local addr_to_domain = {} + for _, node in ipairs(nodes) do + if node.domain then + local addr = node.host .. ":" .. node.port + addr_to_domain[addr] = node.domain + end + end + + local up_nodes = fetch_health_nodes(upstream, checker) + + if #up_nodes._priority_index > 1 then + core.log.info("upstream nodes: ", core.json.delay_encode(up_nodes)) + local server_picker = priority_balancer.new(up_nodes, upstream, picker) + server_picker.addr_to_domain = addr_to_domain + return server_picker + end + + core.log.info("upstream nodes: ", + core.json.delay_encode(up_nodes[up_nodes._priority_index[1]])) + local server_picker = picker.new(up_nodes[up_nodes._priority_index[1]], upstream) + server_picker.addr_to_domain = addr_to_domain + return server_picker + end + + return nil, "invalid balancer type: " .. upstream.type, 0 +end + + +local function parse_addr(addr) + local host, port, err = core.utils.parse_addr(addr) + return {host = host, port = port}, err +end + + +-- set_balancer_opts will be called in balancer phase and before any tries +local function set_balancer_opts(route, ctx) + local up_conf = ctx.upstream_conf + + -- If the matched route has timeout config, prefer to use the route config. + local timeout = nil + if route and route.value and route.value.timeout then + timeout = route.value.timeout + else + if up_conf.timeout then + timeout = up_conf.timeout + end + end + if timeout then + local ok, err = set_timeouts(timeout.connect, timeout.send, + timeout.read) + if not ok then + core.log.error("could not set upstream timeouts: ", err) + end + end + + local retries = up_conf.retries + if not retries or retries < 0 then + retries = #up_conf.nodes - 1 + end + + if retries > 0 then + if up_conf.retry_timeout and up_conf.retry_timeout > 0 then + ctx.proxy_retry_deadline = ngx_now() + up_conf.retry_timeout + end + local ok, err = set_more_tries(retries) + if not ok then + core.log.error("could not set upstream retries: ", err) + elseif err then + core.log.warn("could not set upstream retries: ", err) + end + end +end + + +local function parse_server_for_upstream_host(picked_server, upstream_scheme) + local standard_port = apisix_upstream.scheme_to_port[upstream_scheme] + local host = picked_server.domain or picked_server.host + if upstream_scheme and (not standard_port or standard_port ~= picked_server.port) then + host = host .. ":" .. picked_server.port + end + return host +end + + +-- pick_server will be called: +-- 1. in the access phase so that we can set headers according to the picked server +-- 2. each time we need to retry upstream +local function pick_server(route, ctx) + core.log.info("route: ", core.json.delay_encode(route, true)) + core.log.info("ctx: ", core.json.delay_encode(ctx, true)) + local up_conf = ctx.upstream_conf + + for _, node in ipairs(up_conf.nodes) do + if core.utils.parse_ipv6(node.host) and str_byte(node.host, 1) ~= str_byte("[") then + node.host = '[' .. node.host .. ']' + end + end + + local nodes_count = #up_conf.nodes + if nodes_count == 1 then + local node = up_conf.nodes[1] + ctx.balancer_ip = node.host + ctx.balancer_port = node.port + node.upstream_host = parse_server_for_upstream_host(node, ctx.upstream_scheme) + return node + end + + local version = ctx.upstream_version + local key = ctx.upstream_key + local checker = ctx.up_checker + + ctx.balancer_try_count = (ctx.balancer_try_count or 0) + 1 + if ctx.balancer_try_count > 1 then + if ctx.server_picker and ctx.server_picker.after_balance then + ctx.server_picker.after_balance(ctx, true) + end + + if checker then + local state, code = get_last_failure() + local host = up_conf.checks and up_conf.checks.active and up_conf.checks.active.host + local port = up_conf.checks and up_conf.checks.active and up_conf.checks.active.port + if state == "failed" then + if code == 504 then + checker:report_timeout(ctx.balancer_ip, port or ctx.balancer_port, host) + else + checker:report_tcp_failure(ctx.balancer_ip, port or ctx.balancer_port, host) + end + else + checker:report_http_status(ctx.balancer_ip, port or ctx.balancer_port, host, code) + end + end + end + + if checker then + version = version .. "#" .. checker.status_ver + end + + -- the same picker will be used in the whole request, especially during the retry + local server_picker = ctx.server_picker + if not server_picker then + server_picker = lrucache_server_picker(key, version, + create_server_picker, up_conf, checker) + end + if not server_picker then + return nil, "failed to fetch server picker" + end + + local server, err = server_picker.get(ctx) + if not server then + err = err or "no valid upstream node" + return nil, "failed to find valid upstream server, " .. err + end + ctx.balancer_server = server + + local domain = server_picker.addr_to_domain[server] + local res, err = lrucache_addr(server, nil, parse_addr, server) + if err then + core.log.error("failed to parse server addr: ", server, " err: ", err) + return core.response.exit(502) + end + + res.domain = domain + ctx.balancer_ip = res.host + ctx.balancer_port = res.port + ctx.server_picker = server_picker + res.upstream_host = parse_server_for_upstream_host(res, ctx.upstream_scheme) + + return res +end + + +-- for test +_M.pick_server = pick_server + + +local set_current_peer +do + local pool_opt = {} + local default_keepalive_pool + + function set_current_peer(server, ctx) + local up_conf = ctx.upstream_conf + local keepalive_pool = up_conf.keepalive_pool + + if enable_keepalive then + if not keepalive_pool then + if not default_keepalive_pool then + local local_conf = core.config.local_conf() + local up_keepalive_conf = + core.table.try_read_attr(local_conf, "nginx_config", + "http", "upstream") + default_keepalive_pool = {} + default_keepalive_pool.idle_timeout = + core.config_util.parse_time_unit(up_keepalive_conf.keepalive_timeout) + default_keepalive_pool.size = up_keepalive_conf.keepalive + default_keepalive_pool.requests = up_keepalive_conf.keepalive_requests + end + + keepalive_pool = default_keepalive_pool + end + + local idle_timeout = keepalive_pool.idle_timeout + local size = keepalive_pool.size + local requests = keepalive_pool.requests + + core.table.clear(pool_opt) + pool_opt.pool_size = size + + local scheme = up_conf.scheme + local pool = scheme .. "#" .. server.host .. "#" .. server.port + -- other TLS schemes don't use http balancer keepalive + if (scheme == "https" or scheme == "grpcs") then + local sni = ctx.var.upstream_host + pool = pool .. "#" .. sni + + if up_conf.tls and up_conf.tls.client_cert then + pool = pool .. "#" .. up_conf.tls.client_cert + end + end + pool_opt.pool = pool + + local ok, err = balancer.set_current_peer(server.host, server.port, + pool_opt) + if not ok then + return ok, err + end + + return balancer.enable_keepalive(idle_timeout, requests) + end + + return balancer.set_current_peer(server.host, server.port) + end +end + + +function _M.run(route, ctx, plugin_funcs) + local server, err + + if ctx.picked_server then + -- use the server picked in the access phase + server = ctx.picked_server + ctx.picked_server = nil + + set_balancer_opts(route, ctx) + + else + if ctx.proxy_retry_deadline and ctx.proxy_retry_deadline < ngx_now() then + -- retry count is (try count - 1) + core.log.error("proxy retry timeout, retry count: ", (ctx.balancer_try_count or 1) - 1, + ", deadline: ", ctx.proxy_retry_deadline, " now: ", ngx_now()) + return core.response.exit(502) + end + -- retry + server, err = pick_server(route, ctx) + if not server then + core.log.error("failed to pick server: ", err) + return core.response.exit(502) + end + + local header_changed + local pass_host = ctx.pass_host + if pass_host == "node" then + local host = server.upstream_host + if host ~= ctx.var.upstream_host then + -- retried node has a different host + ctx.var.upstream_host = host + header_changed = true + end + end + + local _, run = plugin_funcs("before_proxy") + -- always recreate request as the request may be changed by plugins + if run or header_changed then + balancer.recreate_request() + end + end + + core.log.info("proxy request to ", server.host, ":", server.port) + + local ok, err = set_current_peer(server, ctx) + if not ok then + core.log.error("failed to set server peer [", server.host, ":", + server.port, "] err: ", err) + return core.response.exit(502) + end + + ctx.proxy_passed = true +end + + +function _M.init_worker() +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/balancer/chash.lua b/CloudronPackages/APISIX/apisix-source/apisix/balancer/chash.lua new file mode 100644 index 0000000..f0e971a --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/balancer/chash.lua @@ -0,0 +1,154 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local resty_chash = require("resty.chash") +local str_char = string.char +local str_gsub = string.gsub +local pairs = pairs + + +local CONSISTENT_POINTS = 160 -- points per server, taken from `resty.chash` + + +local _M = {} + + +local function fetch_chash_hash_key(ctx, upstream) + local key = upstream.key + local hash_on = upstream.hash_on or "vars" + local chash_key + + if hash_on == "consumer" then + chash_key = ctx.consumer_name + elseif hash_on == "vars" then + chash_key = ctx.var[key] + elseif hash_on == "header" then + chash_key = ctx.var["http_" .. key] + elseif hash_on == "cookie" then + chash_key = ctx.var["cookie_" .. key] + elseif hash_on == "vars_combinations" then + local err, n_resolved + chash_key, err, n_resolved = core.utils.resolve_var(key, ctx.var) + if err then + core.log.error("could not resolve vars in ", key, " error: ", err) + end + + if n_resolved == 0 then + chash_key = nil + end + end + + if not chash_key then + chash_key = ctx.var["remote_addr"] + core.log.warn("chash_key fetch is nil, use default chash_key ", + "remote_addr: ", chash_key) + end + core.log.info("upstream key: ", key) + core.log.info("hash_on: ", hash_on) + core.log.info("chash_key: ", core.json.delay_encode(chash_key)) + + return chash_key +end + + +function _M.new(up_nodes, upstream) + local str_null = str_char(0) + + local nodes_count = 0 + local safe_limit = 0 + local gcd = 0 + local servers, nodes = {}, {} + + for serv, weight in pairs(up_nodes) do + if gcd == 0 then + gcd = weight + else + gcd = core.math.gcd(gcd, weight) + end + end + + if gcd == 0 then + -- all nodes' weight are 0 + gcd = 1 + end + + for serv, weight in pairs(up_nodes) do + local id = str_gsub(serv, ":", str_null) + + nodes_count = nodes_count + 1 + weight = weight / gcd + safe_limit = safe_limit + weight + servers[id] = serv + nodes[id] = weight + end + safe_limit = safe_limit * CONSISTENT_POINTS + + local picker = resty_chash:new(nodes) + return { + upstream = upstream, + get = function (ctx) + local id + if ctx.balancer_tried_servers then + if ctx.balancer_tried_servers_count == nodes_count then + return nil, "all upstream servers tried" + end + + -- the 'safe_limit' is a best effort limit to prevent infinite loop caused by bug + for i = 1, safe_limit do + id, ctx.chash_last_server_index = picker:next(ctx.chash_last_server_index) + if not ctx.balancer_tried_servers[servers[id]] then + break + end + end + else + local chash_key = fetch_chash_hash_key(ctx, upstream) + id, ctx.chash_last_server_index = picker:find(chash_key) + end + -- core.log.warn("chash id: ", id, " val: ", servers[id]) + return servers[id] + end, + after_balance = function (ctx, before_retry) + if not before_retry then + if ctx.balancer_tried_servers then + core.tablepool.release("balancer_tried_servers", ctx.balancer_tried_servers) + ctx.balancer_tried_servers = nil + end + + return nil + end + + if not ctx.balancer_tried_servers then + ctx.balancer_tried_servers = core.tablepool.fetch("balancer_tried_servers", 0, 2) + end + + ctx.balancer_tried_servers[ctx.balancer_server] = true + ctx.balancer_tried_servers_count = (ctx.balancer_tried_servers_count or 0) + 1 + end, + before_retry_next_priority = function (ctx) + if ctx.balancer_tried_servers then + core.tablepool.release("balancer_tried_servers", ctx.balancer_tried_servers) + ctx.balancer_tried_servers = nil + end + + ctx.balancer_tried_servers_count = 0 + end, + } +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/balancer/ewma.lua b/CloudronPackages/APISIX/apisix-source/apisix/balancer/ewma.lua new file mode 100644 index 0000000..c533a01 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/balancer/ewma.lua @@ -0,0 +1,243 @@ +-- Original Authors: Shiv Nagarajan & Scott Francis +-- Accessed: March 12, 2018 +-- Inspiration drawn from: +-- https://github.com/twitter/finagle/blob/1bc837c4feafc0096e43c0e98516a8e1c50c4421 +-- /finagle-core/src/main/scala/com/twitter/finagle/loadbalancer/PeakEwma.scala +local core = require("apisix.core") +local resty_lock = require("resty.lock") + +local nkeys = core.table.nkeys +local table_insert = core.table.insert +local ngx = ngx +local ngx_shared = ngx.shared +local ngx_now = ngx.now +local math = math +local pairs = pairs +local ipairs = ipairs +local next = next +local error = error + +local DECAY_TIME = 10 -- this value is in seconds +local LOCK_KEY = ":ewma_key" + +local shm_ewma = ngx_shared["balancer-ewma"] +local shm_last_touched_at = ngx_shared["balancer-ewma-last-touched-at"] + +local lrucache_addr = core.lrucache.new({ttl = 300, count = 1024}) +local lrucache_trans_format = core.lrucache.new({ttl = 300, count = 256}) + +local ewma_lock, ewma_lock_err = resty_lock:new("balancer-ewma-locks", {timeout = 0, exptime = 0.1}) + +local _M = {name = "ewma"} + +local function lock(upstream) + local _, err = ewma_lock:lock(upstream .. LOCK_KEY) + if err and err ~= "timeout" then + core.log.error("EWMA Balancer failed to lock: ", err) + end + + return err +end + +local function unlock() + local ok, err = ewma_lock:unlock() + if not ok then + core.log.error("EWMA Balancer failed to unlock: ", err) + end + + return err +end + +local function decay_ewma(ewma, last_touched_at, rtt, now) + local td = now - last_touched_at + td = math.max(td, 0) + local weight = math.exp(-td / DECAY_TIME) + + ewma = ewma * weight + rtt * (1.0 - weight) + return ewma +end + +local function store_stats(upstream, ewma, now) + local success, err, forcible = shm_last_touched_at:set(upstream, now) + if not success then + core.log.error("shm_last_touched_at:set failed: ", err) + end + if forcible then + core.log.warn("shm_last_touched_at:set valid items forcibly overwritten") + end + + success, err, forcible = shm_ewma:set(upstream, ewma) + if not success then + core.log.error("shm_ewma:set failed: ", err) + end + if forcible then + core.log.warn("shm_ewma:set valid items forcibly overwritten") + end +end + +local function get_or_update_ewma(upstream, rtt, update) + if update then + local lock_err = lock(upstream) + if lock_err ~= nil then + return 0, lock_err + end + end + + local ewma = shm_ewma:get(upstream) or 0 + + local now = ngx_now() + local last_touched_at = shm_last_touched_at:get(upstream) or 0 + ewma = decay_ewma(ewma, last_touched_at, rtt, now) + + if not update then + return ewma, nil + end + + store_stats(upstream, ewma, now) + + unlock() + + return ewma, nil +end + +local function get_upstream_name(upstream) + return upstream.host .. ":" .. upstream.port +end + +local function score(upstream) + -- Original implementation used names + -- Endpoints don't have names, so passing in IP:Port as key instead + local upstream_name = get_upstream_name(upstream) + return get_or_update_ewma(upstream_name, 0, false) +end + +local function parse_addr(addr) + local host, port, err = core.utils.parse_addr(addr) + return {host = host, port = port}, err +end + +local function _trans_format(up_nodes) + -- trans + -- {"1.2.3.4:80":100,"5.6.7.8:8080":100} + -- into + -- [{"host":"1.2.3.4","port":"80"},{"host":"5.6.7.8","port":"8080"}] + local peers = {} + local res, err + + for addr, _ in pairs(up_nodes) do + res, err = lrucache_addr(addr, nil, parse_addr, addr) + if not err then + core.table.insert(peers, res) + else + core.log.error('parse_addr error: ', addr, err) + end + end + + return next(peers) and peers or nil +end + +local function _ewma_find(ctx, up_nodes) + local peers + + if not up_nodes or nkeys(up_nodes) == 0 then + return nil, 'up_nodes empty' + end + + if ctx.balancer_tried_servers and ctx.balancer_tried_servers_count == nkeys(up_nodes) then + return nil, "all upstream servers tried" + end + + peers = lrucache_trans_format(up_nodes, ctx.upstream_version, _trans_format, up_nodes) + if not peers then + return nil, 'up_nodes trans error' + end + + local filtered_peers + if ctx.balancer_tried_servers then + for _, peer in ipairs(peers) do + if not ctx.balancer_tried_servers[get_upstream_name(peer)] then + if not filtered_peers then + filtered_peers = {} + end + + table_insert(filtered_peers, peer) + end + end + else + filtered_peers = peers + end + + local endpoint = filtered_peers[1] + + if #filtered_peers > 1 then + local a, b = math.random(1, #filtered_peers), math.random(1, #filtered_peers - 1) + if b >= a then + b = b + 1 + end + + local backendpoint + endpoint, backendpoint = filtered_peers[a], filtered_peers[b] + if score(endpoint) > score(backendpoint) then + endpoint = backendpoint + end + end + + return get_upstream_name(endpoint) +end + +local function _ewma_after_balance(ctx, before_retry) + if before_retry then + if not ctx.balancer_tried_servers then + ctx.balancer_tried_servers = core.tablepool.fetch("balancer_tried_servers", 0, 2) + end + + ctx.balancer_tried_servers[ctx.balancer_server] = true + ctx.balancer_tried_servers_count = (ctx.balancer_tried_servers_count or 0) + 1 + + return nil + end + + if ctx.balancer_tried_servers then + core.tablepool.release("balancer_tried_servers", ctx.balancer_tried_servers) + ctx.balancer_tried_servers = nil + end + + local response_time = ctx.var.upstream_response_time or 0 + local connect_time = ctx.var.upstream_connect_time or 0 + local rtt = connect_time + response_time + local upstream = ctx.var.upstream_addr + + if not upstream then + return nil, "no upstream addr found" + end + + return get_or_update_ewma(upstream, rtt, true) +end + +function _M.new(up_nodes, upstream) + if not shm_ewma or not shm_last_touched_at then + return nil, "dictionary not find" + end + + if not ewma_lock then + error(ewma_lock_err) + end + + return { + upstream = upstream, + get = function(ctx) + return _ewma_find(ctx, up_nodes) + end, + after_balance = _ewma_after_balance, + before_retry_next_priority = function (ctx) + if ctx.balancer_tried_servers then + core.tablepool.release("balancer_tried_servers", ctx.balancer_tried_servers) + ctx.balancer_tried_servers = nil + end + + ctx.balancer_tried_servers_count = 0 + end, + } +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/balancer/least_conn.lua b/CloudronPackages/APISIX/apisix-source/apisix/balancer/least_conn.lua new file mode 100644 index 0000000..8923d17 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/balancer/least_conn.lua @@ -0,0 +1,113 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local binaryHeap = require("binaryheap") +local ipairs = ipairs +local pairs = pairs + + +local _M = {} + + +local function least_score(a, b) + return a.score < b.score +end + + +function _M.new(up_nodes, upstream) + local servers_heap = binaryHeap.minUnique(least_score) + for server, weight in pairs(up_nodes) do + local score = 1 / weight + -- Note: the argument order of insert is different from others + servers_heap:insert({ + server = server, + effect_weight = 1 / weight, + score = score, + }, server) + end + + return { + upstream = upstream, + get = function (ctx) + local server, info, err + if ctx.balancer_tried_servers then + local tried_server_list = {} + while true do + server, info = servers_heap:peek() + -- we need to let the retry > #nodes so this branch can be hit and + -- the request will retry next priority of nodes + if server == nil then + err = "all upstream servers tried" + break + end + + if not ctx.balancer_tried_servers[server] then + break + end + + servers_heap:pop() + core.table.insert(tried_server_list, info) + end + + for _, info in ipairs(tried_server_list) do + servers_heap:insert(info, info.server) + end + else + server, info = servers_heap:peek() + end + + if not server then + return nil, err + end + + info.score = info.score + info.effect_weight + servers_heap:update(server, info) + return server + end, + after_balance = function (ctx, before_retry) + local server = ctx.balancer_server + local info = servers_heap:valueByPayload(server) + info.score = info.score - info.effect_weight + servers_heap:update(server, info) + + if not before_retry then + if ctx.balancer_tried_servers then + core.tablepool.release("balancer_tried_servers", ctx.balancer_tried_servers) + ctx.balancer_tried_servers = nil + end + + return nil + end + + if not ctx.balancer_tried_servers then + ctx.balancer_tried_servers = core.tablepool.fetch("balancer_tried_servers", 0, 2) + end + + ctx.balancer_tried_servers[server] = true + end, + before_retry_next_priority = function (ctx) + if ctx.balancer_tried_servers then + core.tablepool.release("balancer_tried_servers", ctx.balancer_tried_servers) + ctx.balancer_tried_servers = nil + end + end, + } +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/balancer/priority.lua b/CloudronPackages/APISIX/apisix-source/apisix/balancer/priority.lua new file mode 100644 index 0000000..af5d60c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/balancer/priority.lua @@ -0,0 +1,81 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local ipairs = ipairs + + +local _M = {} + + +local function max_priority(a, b) + return a > b +end + + +function _M.new(up_nodes, upstream, picker_mod) + local priority_index = up_nodes._priority_index + core.table.sort(priority_index, max_priority) + + local pickers = core.table.new(#priority_index, 0) + for i, priority in ipairs(priority_index) do + local picker, err = picker_mod.new(up_nodes[priority], upstream) + if not picker then + return nil, "failed to create picker with priority " .. priority .. ": " .. err + end + if not picker.before_retry_next_priority then + return nil, "picker should define 'before_retry_next_priority' to reset ctx" + end + + pickers[i] = picker + end + + return { + upstream = upstream, + get = function (ctx) + for i = ctx.priority_balancer_picker_idx or 1, #pickers do + local picker = pickers[i] + local server, err = picker.get(ctx) + if server then + ctx.priority_balancer_picker_idx = i + return server + end + + core.log.notice("failed to get server from current priority ", + priority_index[i], + ", try next one, err: ", err) + + picker.before_retry_next_priority(ctx) + end + + return nil, "all servers tried" + end, + after_balance = function (ctx, before_retry) + local priority_balancer_picker = pickers[ctx.priority_balancer_picker_idx] + if not priority_balancer_picker or + not priority_balancer_picker.after_balance + then + return + end + + priority_balancer_picker.after_balance(ctx, before_retry) + end + } +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/balancer/roundrobin.lua b/CloudronPackages/APISIX/apisix-source/apisix/balancer/roundrobin.lua new file mode 100644 index 0000000..7090f52 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/balancer/roundrobin.lua @@ -0,0 +1,89 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local roundrobin = require("resty.roundrobin") +local core = require("apisix.core") +local nkeys = core.table.nkeys +local pairs = pairs + + +local _M = {} + + +function _M.new(up_nodes, upstream) + local safe_limit = 0 + for _, weight in pairs(up_nodes) do + -- the weight can be zero + safe_limit = safe_limit + weight + 1 + end + + local picker = roundrobin:new(up_nodes) + local nodes_count = nkeys(up_nodes) + return { + upstream = upstream, + get = function (ctx) + if ctx.balancer_tried_servers and ctx.balancer_tried_servers_count == nodes_count then + return nil, "all upstream servers tried" + end + + local server, err + for i = 1, safe_limit do + server, err = picker:find() + if not server then + return nil, err + end + if ctx.balancer_tried_servers then + if not ctx.balancer_tried_servers[server] then + break + end + else + break + end + end + + return server + end, + after_balance = function (ctx, before_retry) + if not before_retry then + if ctx.balancer_tried_servers then + core.tablepool.release("balancer_tried_servers", ctx.balancer_tried_servers) + ctx.balancer_tried_servers = nil + end + + return nil + end + + if not ctx.balancer_tried_servers then + ctx.balancer_tried_servers = core.tablepool.fetch("balancer_tried_servers", 0, 2) + end + + ctx.balancer_tried_servers[ctx.balancer_server] = true + ctx.balancer_tried_servers_count = (ctx.balancer_tried_servers_count or 0) + 1 + end, + before_retry_next_priority = function (ctx) + if ctx.balancer_tried_servers then + core.tablepool.release("balancer_tried_servers", ctx.balancer_tried_servers) + ctx.balancer_tried_servers = nil + end + + ctx.balancer_tried_servers_count = 0 + end, + } +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/cli/apisix.lua b/CloudronPackages/APISIX/apisix-source/apisix/cli/apisix.lua new file mode 100755 index 0000000..079691f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/cli/apisix.lua @@ -0,0 +1,40 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local pkg_cpath_org = package.cpath +local pkg_path_org = package.path + +local _, find_pos_end = string.find(pkg_path_org, ";", -1, true) +if not find_pos_end then + pkg_path_org = pkg_path_org .. ";" +end + +local apisix_home = "/usr/local/apisix" +local pkg_cpath = apisix_home .. "/deps/lib64/lua/5.1/?.so;" + .. apisix_home .. "/deps/lib/lua/5.1/?.so;" +local pkg_path_deps = apisix_home .. "/deps/share/lua/5.1/?.lua;" +local pkg_path_env = apisix_home .. "/?.lua;" + +-- modify the load path to load our dependencies +package.cpath = pkg_cpath .. pkg_cpath_org +package.path = pkg_path_deps .. pkg_path_org .. pkg_path_env + +-- pass path to construct the final result +local env = require("apisix.cli.env")(apisix_home, pkg_cpath_org, pkg_path_org) +local ops = require("apisix.cli.ops") + +ops.execute(env, arg) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/cli/config.lua b/CloudronPackages/APISIX/apisix-source/apisix/cli/config.lua new file mode 100644 index 0000000..20f0e04 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/cli/config.lua @@ -0,0 +1,385 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local table_conact = table.concat + +local _M = { + apisix = { + node_listen = { 9080 }, + enable_admin = true, + enable_dev_mode = false, + enable_reuseport = true, + show_upstream_status_in_response_header = false, + enable_ipv6 = true, + enable_http2 = true, + enable_server_tokens = true, + extra_lua_path = "", + extra_lua_cpath = "", + proxy_cache = { + cache_ttl = "10s", + zones = { + { + name = "disk_cache_one", + memory_size = "50m", + disk_size = "1G", + disk_path = "/tmp/disk_cache_one", + cache_levels = "1:2" + }, + { + name = "memory_cache", + memory_size = "50m" + } + } + }, + delete_uri_tail_slash = false, + normalize_uri_like_servlet = false, + router = { + http = "radixtree_host_uri", + ssl = "radixtree_sni" + }, + proxy_mode = "http", + resolver_timeout = 5, + enable_resolv_search_opt = true, + ssl = { + enable = true, + listen = { { + port = 9443, + enable_http3 = false + } }, + ssl_protocols = "TLSv1.2 TLSv1.3", + ssl_ciphers = table_conact({ + "ECDHE-ECDSA-AES128-GCM-SHA256", "ECDHE-RSA-AES128-GCM-SHA256", + "ECDHE-ECDSA-AES256-GCM-SHA384", "ECDHE-RSA-AES256-GCM-SHA384", + "ECDHE-ECDSA-CHACHA20-POLY1305", "ECDHE-RSA-CHACHA20-POLY1305", + "DHE-RSA-AES128-GCM-SHA256", "DHE-RSA-AES256-GCM-SHA384", + }, ":"), + ssl_session_tickets = false, + ssl_trusted_certificate = "system" + }, + enable_control = true, + disable_sync_configuration_during_start = false, + data_encryption = { + enable_encrypt_fields = true, + keyring = { "qeddd145sfvddff3", "edd1c9f0985e76a2" } + }, + events = { + module = "lua-resty-events" + } + }, + nginx_config = { + error_log = "logs/error.log", + error_log_level = "warn", + worker_processes = "auto", + enable_cpu_affinity = false, + worker_rlimit_nofile = 20480, + worker_shutdown_timeout = "240s", + max_pending_timers = 16384, + max_running_timers = 4096, + event = { + worker_connections = 10620 + }, + meta = { + lua_shared_dict = { + ["prometheus-metrics"] = "15m", + ["standalone-config"] = "10m", + ["status-report"] = "1m", + } + }, + stream = { + enable_access_log = false, + access_log = "logs/access_stream.log", + -- luacheck: push max code line length 300 + access_log_format = "$remote_addr [$time_local] $protocol $status $bytes_sent $bytes_received $session_time", + -- luacheck: pop + access_log_format_escape = "default", + lua_shared_dict = { + ["etcd-cluster-health-check-stream"] = "10m", + ["lrucache-lock-stream"] = "10m", + ["plugin-limit-conn-stream"] = "10m", + ["worker-events-stream"] = "10m", + ["tars-stream"] = "1m", + ["upstream-healthcheck-stream"] = "10m", + } + }, + main_configuration_snippet = "", + http_configuration_snippet = "", + http_server_configuration_snippet = "", + http_server_location_configuration_snippet = "", + http_admin_configuration_snippet = "", + http_end_configuration_snippet = "", + stream_configuration_snippet = "", + http = { + enable_access_log = true, + access_log = "logs/access.log", + access_log_buffer = 16384, + -- luacheck: push max code line length 300 + access_log_format = + '$remote_addr - $remote_user [$time_local] $http_host "$request" $status $body_bytes_sent $request_time "$http_referer" "$http_user_agent" $upstream_addr $upstream_status $upstream_response_time "$upstream_scheme://$upstream_host$upstream_uri"', + -- luacheck: pop + access_log_format_escape = "default", + keepalive_timeout = "60s", + client_header_timeout = "60s", + client_body_timeout = "60s", + client_max_body_size = 0, + send_timeout = "10s", + underscores_in_headers = "on", + real_ip_header = "X-Real-IP", + real_ip_recursive = "off", + real_ip_from = { "127.0.0.1", "unix:" }, + proxy_ssl_server_name = true, + upstream = { + keepalive = 320, + keepalive_requests = 1000, + keepalive_timeout = "60s" + }, + charset = "utf-8", + variables_hash_max_size = 2048, + lua_shared_dict = { + ["internal-status"] = "10m", + ["plugin-limit-req"] = "10m", + ["plugin-limit-count"] = "10m", + ["prometheus-metrics"] = "10m", + ["plugin-limit-conn"] = "10m", + ["upstream-healthcheck"] = "10m", + ["worker-events"] = "10m", + ["lrucache-lock"] = "10m", + ["balancer-ewma"] = "10m", + ["balancer-ewma-locks"] = "10m", + ["balancer-ewma-last-touched-at"] = "10m", + ["plugin-limit-req-redis-cluster-slot-lock"] = "1m", + ["plugin-limit-count-redis-cluster-slot-lock"] = "1m", + ["plugin-limit-conn-redis-cluster-slot-lock"] = "1m", + ["plugin-ai-rate-limiting"] = "10m", + ["plugin-ai-rate-limiting-reset-header"] = "10m", + tracing_buffer = "10m", + ["plugin-api-breaker"] = "10m", + ["etcd-cluster-health-check"] = "10m", + discovery = "1m", + jwks = "1m", + introspection = "10m", + ["access-tokens"] = "1m", + ["ext-plugin"] = "1m", + tars = "1m", + ["cas-auth"] = "10m", + ["ocsp-stapling"] = "10m", + ["mcp-session"] = "10m", + } + } + }, + graphql = { + max_size = 1048576 + }, + plugins = { + "real-ip", + "ai", + "client-control", + "proxy-control", + "request-id", + "zipkin", + "ext-plugin-pre-req", + "fault-injection", + "mocking", + "serverless-pre-function", + "cors", + "ip-restriction", + "ua-restriction", + "referer-restriction", + "csrf", + "uri-blocker", + "request-validation", + "chaitin-waf", + "multi-auth", + "openid-connect", + "cas-auth", + "authz-casbin", + "authz-casdoor", + "wolf-rbac", + "ldap-auth", + "hmac-auth", + "basic-auth", + "jwt-auth", + "jwe-decrypt", + "key-auth", + "consumer-restriction", + "attach-consumer-label", + "forward-auth", + "opa", + "authz-keycloak", + "proxy-cache", + "body-transformer", + "ai-prompt-template", + "ai-prompt-decorator", + "ai-prompt-guard", + "ai-rag", + "ai-rate-limiting", + "ai-proxy-multi", + "ai-proxy", + "ai-aws-content-moderation", + "proxy-mirror", + "proxy-rewrite", + "workflow", + "api-breaker", + "limit-conn", + "limit-count", + "limit-req", + "gzip", + -- deprecated and will be removed in a future release + -- "server-info", + "traffic-split", + "redirect", + "response-rewrite", + "mcp-bridge", + "degraphql", + "kafka-proxy", + "grpc-transcode", + "grpc-web", + "http-dubbo", + "public-api", + "prometheus", + "datadog", + "lago", + "loki-logger", + "elasticsearch-logger", + "echo", + "loggly", + "http-logger", + "splunk-hec-logging", + "skywalking-logger", + "google-cloud-logging", + "sls-logger", + "tcp-logger", + "kafka-logger", + "rocketmq-logger", + "syslog", + "udp-logger", + "file-logger", + "clickhouse-logger", + "tencent-cloud-cls", + "inspect", + "example-plugin", + "aws-lambda", + "azure-functions", + "openwhisk", + "openfunction", + "serverless-post-function", + "ext-plugin-post-req", + "ext-plugin-post-resp", + "ai-request-rewrite", + }, + stream_plugins = { "ip-restriction", "limit-conn", "mqtt-proxy", "syslog" }, + plugin_attr = { + ["log-rotate"] = { + timeout = 10000, + interval = 3600, + max_kept = 168, + max_size = -1, + enable_compression = false + }, + skywalking = { + service_name = "APISIX", + service_instance_name = "APISIX Instance Name", + endpoint_addr = "http://127.0.0.1:12800", + report_interval = 3 + }, + opentelemetry = { + trace_id_source = "x-request-id", + resource = { + ["service.name"] = "APISIX" + }, + collector = { + address = "127.0.0.1:4318", + request_timeout = 3, + request_headers = { + Authorization = "token" + } + }, + batch_span_processor = { + drop_on_queue_full = false, + max_queue_size = 1024, + batch_timeout = 2, + inactive_timeout = 1, + max_export_batch_size = tonumber(os.getenv("OTEL_BSP_MAX_EXPORT_BATCH_SIZE")) or 16 + }, + set_ngx_var = false + }, + prometheus = { + export_uri = "/apisix/prometheus/metrics", + metric_prefix = "apisix_", + enable_export_server = true, + export_addr = { + ip = "127.0.0.1", + port = 9091 + } + }, + ["server-info"] = { + report_ttl = 60 + }, + ["dubbo-proxy"] = { + upstream_multiplex_count = 32 + }, + ["proxy-mirror"] = { + timeout = { + connect = "60s", + read = "60s", + send = "60s" + } + }, + inspect = { + delay = 3, + hooks_file = "/usr/local/apisix/plugin_inspect_hooks.lua" + }, + zipkin = { + set_ngx_var = false + } + }, + deployment = { + role = "traditional", + role_traditional = { + config_provider = "etcd" + }, + admin = { + admin_key_required = true, + admin_key = { + { + name = "admin", + key = "", + role = "admin" + } + }, + enable_admin_cors = true, + enable_admin_ui = true, + allow_admin = { "127.0.0.0/24" }, + admin_listen = { + ip = "0.0.0.0", + port = 9180 + }, + admin_api_version = "v3" + }, + etcd = { + host = { "http://127.0.0.1:2379" }, + prefix = "/apisix", + timeout = 30, + watch_timeout = 50, + startup_retry = 2, + tls = { + verify = true + } + } + } +} + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/cli/env.lua b/CloudronPackages/APISIX/apisix-source/apisix/cli/env.lua new file mode 100644 index 0000000..3631483 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/cli/env.lua @@ -0,0 +1,115 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local require = require +local util = require("apisix.cli.util") + +local pcall = pcall +local error = error +local exit = os.exit +local stderr = io.stderr +local str_find = string.find +local arg = arg +local package = package +local tonumber = tonumber + +return function (apisix_home, pkg_cpath_org, pkg_path_org) + -- ulimit setting should be checked when APISIX starts + local res, err = util.execute_cmd("ulimit -n") + if not res then + error("failed to exec ulimit cmd \'ulimit -n \', err: " .. err) + end + local trimed_res = util.trim(res) + local ulimit = trimed_res == "unlimited" and trimed_res or tonumber(trimed_res) + if not ulimit then + error("failed to fetch current maximum number of open file descriptors") + end + + -- only for developer, use current folder as working space + local is_root_path = false + local script_path = arg[0] + if script_path:sub(1, 2) == './' then + apisix_home = util.trim(util.execute_cmd("pwd")) + if not apisix_home then + error("failed to fetch current path") + end + + -- determine whether the current path is under the "/root" folder. + -- "/root/" is the root folder flag. + if str_find(apisix_home .. "/", '/root/', nil, true) == 1 then + is_root_path = true + end + + local pkg_cpath = apisix_home .. "/deps/lib64/lua/5.1/?.so;" + .. apisix_home .. "/deps/lib/lua/5.1/?.so;" + + local pkg_path = apisix_home .. "/?/init.lua;" + .. apisix_home .. "/deps/share/lua/5.1/?/init.lua;" + .. apisix_home .. "/deps/share/lua/5.1/?.lua;;" + + package.cpath = pkg_cpath .. package.cpath + package.path = pkg_path .. package.path + end + + do + -- skip luajit environment + local ok = pcall(require, "table.new") + if not ok then + local ok, json = pcall(require, "cjson") + if ok and json then + stderr:write("please remove the cjson library in Lua, it may " + .. "conflict with the cjson library in openresty. " + .. "\n luarocks remove lua-cjson\n") + exit(1) + end + end + end + + -- pre-transform openresty path + res, err = util.execute_cmd("command -v openresty") + if not res then + error("failed to exec cmd \'command -v openresty\', err: " .. err) + end + local openresty_path_abs = util.trim(res) + + local openresty_args = openresty_path_abs .. [[ -p ]] .. apisix_home .. [[ -c ]] + .. apisix_home .. [[/conf/nginx.conf]] + + local or_info, err = util.execute_cmd("openresty -V 2>&1") + if not or_info then + error("failed to exec cmd \'openresty -V 2>&1\', err: " .. err) + end + + local use_apisix_base = true + if not or_info:find("apisix-nginx-module", 1, true) then + use_apisix_base = false + end + + local min_etcd_version = "3.4.0" + + return { + apisix_home = apisix_home, + is_root_path = is_root_path, + openresty_args = openresty_args, + openresty_info = or_info, + use_apisix_base = use_apisix_base, + pkg_cpath_org = pkg_cpath_org, + pkg_path_org = pkg_path_org, + min_etcd_version = min_etcd_version, + ulimit = ulimit, + } +end diff --git a/CloudronPackages/APISIX/apisix-source/apisix/cli/etcd.lua b/CloudronPackages/APISIX/apisix-source/apisix/cli/etcd.lua new file mode 100644 index 0000000..548a5d2 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/cli/etcd.lua @@ -0,0 +1,405 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local base64_encode = require("base64").encode +local dkjson = require("dkjson") +local constants = require("apisix.constants") +local util = require("apisix.cli.util") +local file = require("apisix.cli.file") +local http = require("socket.http") +local https = require("ssl.https") +local ltn12 = require("ltn12") + +local type = type +local ipairs = ipairs +local pairs = pairs +local print = print +local tonumber = tonumber +local str_format = string.format +local str_sub = string.sub +local table_concat = table.concat +local table_insert = table.insert +local io_stderr = io.stderr + +local _M = {} + +-- Timeout for all I/O operations +http.TIMEOUT = 3 + +local function parse_semantic_version(ver) + local errmsg = "invalid semantic version: " .. ver + + local parts = util.split(ver, "-") + if #parts > 2 then + return nil, errmsg + end + + if #parts == 2 then + ver = parts[1] + end + + local fields = util.split(ver, ".") + if #fields ~= 3 then + return nil, errmsg + end + + local major = tonumber(fields[1]) + local minor = tonumber(fields[2]) + local patch = tonumber(fields[3]) + + if not (major and minor and patch) then + return nil, errmsg + end + + return { + major = major, + minor = minor, + patch = patch, + } +end + + +local function compare_semantic_version(v1, v2) + local ver1, err = parse_semantic_version(v1) + if not ver1 then + return nil, err + end + + local ver2, err = parse_semantic_version(v2) + if not ver2 then + return nil, err + end + + if ver1.major ~= ver2.major then + return ver1.major < ver2.major + end + + if ver1.minor ~= ver2.minor then + return ver1.minor < ver2.minor + end + + return ver1.patch < ver2.patch +end + + +local function request(url, yaml_conf) + local response_body = {} + local single_request = false + if type(url) == "string" then + url = { + url = url, + method = "GET", + sink = ltn12.sink.table(response_body), + } + single_request = true + end + + local res, code + + if str_sub(url.url, 1, 8) == "https://" then + local verify = "peer" + if yaml_conf.etcd.tls then + local cfg = yaml_conf.etcd.tls + + if cfg.verify == false then + verify = "none" + end + + url.certificate = cfg.cert + url.key = cfg.key + + local apisix_ssl = yaml_conf.apisix.ssl + if apisix_ssl and apisix_ssl.ssl_trusted_certificate then + url.cafile = apisix_ssl.ssl_trusted_certificate + end + end + + url.verify = verify + res, code = https.request(url) + else + + res, code = http.request(url) + end + + -- In case of failure, request returns nil followed by an error message. + -- Else the first return value is the response body + -- and followed by the response status code. + if single_request and res ~= nil then + return table_concat(response_body), code + end + + return res, code +end + + +local function prepare_dirs_via_http(yaml_conf, args, index, host, host_count) + local is_success = true + + local errmsg + local auth_token + local user = yaml_conf.etcd.user + local password = yaml_conf.etcd.password + if user and password then + local auth_url = host .. "/v3/auth/authenticate" + local json_auth = { + name = user, + password = password + } + + local post_json_auth = dkjson.encode(json_auth) + local response_body = {} + + local res, err + local retry_time = 0 + while retry_time < 2 do + res, err = request({ + url = auth_url, + method = "POST", + source = ltn12.source.string(post_json_auth), + sink = ltn12.sink.table(response_body), + headers = { + ["Content-Length"] = #post_json_auth + } + }, yaml_conf) + -- In case of failure, request returns nil followed by an error message. + -- Else the first return value is just the number 1 + -- and followed by the response status code. + if res then + break + end + retry_time = retry_time + 1 + print(str_format("Warning! Request etcd endpoint \'%s\' error, %s, retry time=%s", + auth_url, err, retry_time)) + end + + if not res then + errmsg = str_format("request etcd endpoint \"%s\" error, %s\n", auth_url, err) + util.die(errmsg) + end + + local res_auth = table_concat(response_body) + local body_auth, _, err_auth = dkjson.decode(res_auth) + if err_auth or (body_auth and not body_auth["token"]) then + errmsg = str_format("got malformed auth message: \"%s\" from etcd \"%s\"\n", + res_auth, auth_url) + util.die(errmsg) + end + + auth_token = body_auth.token + end + + + local dirs = {} + for name in pairs(constants.HTTP_ETCD_DIRECTORY) do + dirs[name] = true + end + for name in pairs(constants.STREAM_ETCD_DIRECTORY) do + dirs[name] = true + end + + for dir_name in pairs(dirs) do + local key = (yaml_conf.etcd.prefix or "") .. dir_name .. "/" + + local put_url = host .. "/v3/kv/put" + local post_json = '{"value":"' .. base64_encode("init_dir") + .. '", "key":"' .. base64_encode(key) .. '"}' + local response_body = {} + local headers = {["Content-Length"] = #post_json} + if auth_token then + headers["Authorization"] = auth_token + end + + local res, err + local retry_time = 0 + while retry_time < 2 do + res, err = request({ + url = put_url, + method = "POST", + source = ltn12.source.string(post_json), + sink = ltn12.sink.table(response_body), + headers = headers + }, yaml_conf) + retry_time = retry_time + 1 + if res then + break + end + print(str_format("Warning! Request etcd endpoint \'%s\' error, %s, retry time=%s", + put_url, err, retry_time)) + end + + if not res then + errmsg = str_format("request etcd endpoint \"%s\" error, %s\n", put_url, err) + util.die(errmsg) + end + + local res_put = table_concat(response_body) + if res_put:find("404 page not found", 1, true) then + errmsg = str_format("gRPC gateway is not enabled in etcd cluster \"%s\",", + "which is required by Apache APISIX\n") + util.die(errmsg) + end + + if res_put:find("CommonName of client sending a request against gateway", 1, true) then + errmsg = str_format("etcd \"client-cert-auth\" cannot be used with gRPC-gateway, " + .. "please configure the etcd username and password " + .. "in configuration file\n") + util.die(errmsg) + end + + if res_put:find("error", 1, true) then + is_success = false + if (index == host_count) then + errmsg = str_format("got malformed key-put message: \"%s\" from etcd \"%s\"\n", + res_put, put_url) + util.die(errmsg) + end + + break + end + + if args and args["verbose"] then + print(res_put) + end + end + + return is_success +end + + +local function prepare_dirs(yaml_conf, args, index, host, host_count) + return prepare_dirs_via_http(yaml_conf, args, index, host, host_count) +end + + +function _M.init(env, args) + -- read_yaml_conf + local yaml_conf, err = file.read_yaml_conf(env.apisix_home) + if not yaml_conf then + util.die("failed to read local yaml config of apisix: ", err) + end + + if not yaml_conf.apisix then + util.die("failed to read `apisix` field from yaml file when init etcd") + end + + if yaml_conf.deployment.config_provider ~= "etcd" then + return true + end + + if not yaml_conf.etcd then + util.die("failed to read `etcd` field from yaml file when init etcd") + end + + -- convert old single etcd config to multiple etcd config + if type(yaml_conf.etcd.host) == "string" then + yaml_conf.etcd.host = {yaml_conf.etcd.host} + end + + local host_count = #(yaml_conf.etcd.host) + local scheme + for i = 1, host_count do + local host = yaml_conf.etcd.host[i] + local fields = util.split(host, "://") + if not fields then + util.die("malformed etcd endpoint: ", host, "\n") + end + + if not scheme then + scheme = fields[1] + elseif scheme ~= fields[1] then + print([[WARNING: mixed protocols among etcd endpoints]]) + end + end + + -- check the etcd cluster version + local etcd_healthy_hosts = {} + for index, host in ipairs(yaml_conf.etcd.host) do + local version_url = host .. "/version" + local errmsg + + local res, err + local retry_time = 0 + + local etcd = yaml_conf.etcd + local max_retry = tonumber(etcd.startup_retry) or 2 + while retry_time < max_retry do + res, err = request(version_url, yaml_conf) + -- In case of failure, request returns nil followed by an error message. + -- Else the first return value is the response body + -- and followed by the response status code. + if res then + break + end + retry_time = retry_time + 1 + print(str_format("Warning! Request etcd endpoint \'%s\' error, %s, retry time=%s", + version_url, err, retry_time)) + end + + if res then + local body, _, err = dkjson.decode(res) + if err or (body and not body["etcdcluster"]) then + errmsg = str_format("got malformed version message: \"%s\" from etcd \"%s\"\n", res, + version_url) + util.die(errmsg) + end + + local cluster_version = body["etcdcluster"] + if compare_semantic_version(cluster_version, env.min_etcd_version) then + util.die("etcd cluster version ", cluster_version, + " is less than the required version ", env.min_etcd_version, + ", please upgrade your etcd cluster\n") + end + + table_insert(etcd_healthy_hosts, host) + else + io_stderr:write(str_format("request etcd endpoint \'%s\' error, %s\n", version_url, + err)) + end + end + + if #etcd_healthy_hosts <= 0 then + util.die("all etcd nodes are unavailable\n") + end + + if (#etcd_healthy_hosts / host_count * 100) <= 50 then + util.die("the etcd cluster needs at least 50% and above healthy nodes\n") + end + + -- access from the data plane to etcd should be read-only. + -- data plane writes to etcd may cause security issues. + if yaml_conf.deployment.role == "data_plane" then + print("access from the data plane to etcd should be read-only, " + .."skip initializing the data of etcd") + return true + end + + print("trying to initialize the data of etcd") + local etcd_ok = false + for index, host in ipairs(etcd_healthy_hosts) do + if prepare_dirs(yaml_conf, args, index, host, host_count) then + etcd_ok = true + break + end + end + + if not etcd_ok then + util.die("none of the configured etcd works well\n") + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/cli/file.lua b/CloudronPackages/APISIX/apisix-source/apisix/cli/file.lua new file mode 100644 index 0000000..3687363 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/cli/file.lua @@ -0,0 +1,343 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local ngx = ngx +local yaml = require("lyaml") +local profile = require("apisix.core.profile") +local util = require("apisix.cli.util") +local schema = require("apisix.cli.schema") +local default_conf = require("apisix.cli.config") +local dkjson = require("dkjson") +local pl_path = require("pl.path") + +local pairs = pairs +local type = type +local tonumber = tonumber +local getenv = os.getenv +local str_gmatch = string.gmatch +local str_find = string.find +local str_sub = string.sub +local print = print + +local _M = {} +local exported_vars + + +function _M.get_exported_vars() + return exported_vars +end + + +local function is_empty_yaml_line(line) + return line == '' or str_find(line, '^%s*$') or str_find(line, '^%s*#') +end + + +local function tab_is_array(t) + local count = 0 + for k, v in pairs(t) do + count = count + 1 + end + + return #t == count +end + + +local function var_sub(val) + local err + local var_used = false + -- we use '${{var}}' because '$var' and '${var}' are taken + -- by Nginx + local new_val = val:gsub("%$%{%{%s*([%w_]+[%:%=]?.-)%s*%}%}", function(var) + local i, j = var:find("%:%=") + local default + if i and j then + default = var:sub(i + 2, #var) + default = default:gsub('^%s*(.-)%s*$', '%1') + var = var:sub(1, i - 1) + end + + local v = getenv(var) or default + if v then + if not exported_vars then + exported_vars = {} + end + + exported_vars[var] = v + var_used = true + return v + end + + err = "failed to handle configuration: " .. + "can't find environment variable " .. var + return "" + end) + return new_val, var_used, err +end + + +local function resolve_conf_var(conf) + local new_keys = {} + for key, val in pairs(conf) do + -- avoid re-iterating the table for already iterated key + if new_keys[key] then + goto continue + end + -- substitute environment variables from conf keys + if type(key) == "string" then + local new_key, _, err = var_sub(key) + if err then + return nil, err + end + if new_key ~= key then + new_keys[new_key] = "dummy" -- we only care about checking the key + conf.key = nil + conf[new_key] = val + key = new_key + end + end + if type(val) == "table" then + local ok, err = resolve_conf_var(val) + if not ok then + return nil, err + end + + elseif type(val) == "string" then + local new_val, var_used, err = var_sub(val) + + if err then + return nil, err + end + + if var_used then + if tonumber(new_val) ~= nil then + new_val = tonumber(new_val) + elseif new_val == "true" then + new_val = true + elseif new_val == "false" then + new_val = false + end + end + + conf[key] = new_val + end + ::continue:: + end + + return true +end + + +_M.resolve_conf_var = resolve_conf_var + + +local function replace_by_reserved_env_vars(conf) + -- TODO: support more reserved environment variables + local v = getenv("APISIX_DEPLOYMENT_ETCD_HOST") + if v and conf["deployment"] and conf["deployment"]["etcd"] then + local val, _, err = dkjson.decode(v) + if err or not val then + print("parse ${APISIX_DEPLOYMENT_ETCD_HOST} failed, error:", err) + return + end + + conf["deployment"]["etcd"]["host"] = val + end +end + + +local function path_is_multi_type(path, type_val) + if str_sub(path, 1, 14) == "nginx_config->" and + (type_val == "number" or type_val == "string") then + return true + end + + if path == "apisix->node_listen" and type_val == "number" then + return true + end + + if path == "apisix->data_encryption->keyring" then + return true + end + + return false +end + + +local function merge_conf(base, new_tab, ppath) + ppath = ppath or "" + + for key, val in pairs(new_tab) do + if type(val) == "table" then + if val == yaml.null then + base[key] = nil + + elseif tab_is_array(val) then + base[key] = val + + else + if base[key] == nil then + base[key] = {} + end + + local ok, err = merge_conf( + base[key], + val, + ppath == "" and key or ppath .. "->" .. key + ) + if not ok then + return nil, err + end + end + else + local type_val = type(val) + + if base[key] == nil then + base[key] = val + elseif type(base[key]) ~= type_val then + local path = ppath == "" and key or ppath .. "->" .. key + + if path_is_multi_type(path, type_val) then + base[key] = val + else + return nil, "failed to merge, path[" .. path .. "] expect: " .. + type(base[key]) .. ", but got: " .. type_val + end + else + base[key] = val + end + end + end + + return base +end + + +function _M.read_yaml_conf(apisix_home) + if apisix_home then + profile.apisix_home = apisix_home .. "/" + end + + local local_conf_path = profile:customized_yaml_path() + if not local_conf_path then + local_conf_path = profile:yaml_path("config") + end + local user_conf_yaml, err = util.read_file(local_conf_path) + if not user_conf_yaml then + return nil, err + end + + local is_empty_file = true + for line in str_gmatch(user_conf_yaml .. '\n', '(.-)\r?\n') do + if not is_empty_yaml_line(line) then + is_empty_file = false + break + end + end + + if not is_empty_file then + local user_conf = yaml.load(user_conf_yaml) + if not user_conf then + return nil, "invalid config.yaml file" + end + + local ok, err = resolve_conf_var(user_conf) + if not ok then + return nil, err + end + + ok, err = merge_conf(default_conf, user_conf) + if not ok then + return nil, err + end + end + + -- fill the default value by the schema + local ok, err = schema.validate(default_conf) + if not ok then + return nil, err + end + if default_conf.deployment then + default_conf.deployment.config_provider = "etcd" + if default_conf.deployment.role == "traditional" then + default_conf.etcd = default_conf.deployment.etcd + if default_conf.deployment.role_traditional.config_provider == "yaml" then + default_conf.deployment.config_provider = "yaml" + end + + elseif default_conf.deployment.role == "control_plane" then + default_conf.etcd = default_conf.deployment.etcd + default_conf.apisix.enable_admin = true + + elseif default_conf.deployment.role == "data_plane" then + default_conf.etcd = default_conf.deployment.etcd + if default_conf.deployment.role_data_plane.config_provider == "yaml" then + default_conf.deployment.config_provider = "yaml" + elseif default_conf.deployment.role_data_plane.config_provider == "json" then + default_conf.deployment.config_provider = "json" + elseif default_conf.deployment.role_data_plane.config_provider == "xds" then + default_conf.deployment.config_provider = "xds" + end + default_conf.apisix.enable_admin = false + end + end + + --- using `not ngx` to check whether the current execution environment is apisix cli module, + --- because it is only necessary to parse and validate `apisix.yaml` in apisix cli. + if default_conf.deployment.config_provider == "yaml" and not ngx then + local apisix_conf_path = profile:yaml_path("apisix") + local apisix_conf_yaml, _ = util.read_file(apisix_conf_path) + if apisix_conf_yaml then + local apisix_conf = yaml.load(apisix_conf_yaml) + if apisix_conf then + local ok, err = resolve_conf_var(apisix_conf) + if not ok then + return nil, err + end + end + end + end + + local apisix_ssl = default_conf.apisix.ssl + if apisix_ssl and apisix_ssl.ssl_trusted_certificate then + -- default value is set to "system" during schema validation + if apisix_ssl.ssl_trusted_certificate == "system" then + local trusted_certs_path, err = util.get_system_trusted_certs_filepath() + if not trusted_certs_path then + util.die(err) + end + + apisix_ssl.ssl_trusted_certificate = trusted_certs_path + else + -- During validation, the path is relative to PWD + -- When Nginx starts, the path is relative to conf + -- Therefore we need to check the absolute version instead + local cert_path = pl_path.abspath(apisix_ssl.ssl_trusted_certificate) + if not pl_path.exists(cert_path) then + util.die("certificate path", cert_path, "doesn't exist\n") + end + apisix_ssl.ssl_trusted_certificate = cert_path + end + end + + replace_by_reserved_env_vars(default_conf) + + return default_conf +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/cli/ip.lua b/CloudronPackages/APISIX/apisix-source/apisix/cli/ip.lua new file mode 100644 index 0000000..182b824 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/cli/ip.lua @@ -0,0 +1,66 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- IP match and verify module. +-- +-- @module cli.ip + +local mediador_ip = require("resty.mediador.ip") +local setmetatable = setmetatable + + +local _M = {} +local mt = { __index = _M } + + +--- +-- create a instance of module cli.ip +-- +-- @function cli.ip:new +-- @tparam string ip IP or CIDR. +-- @treturn instance of module if the given ip valid, nil and error message otherwise. +function _M.new(self, ip) + if not mediador_ip.valid(ip) then + return nil, "invalid ip" + end + + local _ip = mediador_ip.parse(ip) + + return setmetatable({ _ip = _ip }, mt) +end + + +--- +-- Is that the given ip loopback? +-- +-- @function cli.ip:is_loopback +-- @treturn boolean True if the given ip is the loopback, false otherwise. +function _M.is_loopback(self) + return self._ip and "loopback" == self._ip:range() +end + +--- +-- Is that the given ip unspecified? +-- +-- @function cli.ip:is_unspecified +-- @treturn boolean True if the given ip is all the unspecified, false otherwise. +function _M.is_unspecified(self) + return self._ip and "unspecified" == self._ip:range() +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/cli/ngx_tpl.lua b/CloudronPackages/APISIX/apisix-source/apisix/cli/ngx_tpl.lua new file mode 100644 index 0000000..5dd739b --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/cli/ngx_tpl.lua @@ -0,0 +1,998 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +return [=[ +# Configuration File - Nginx Server Configs +# This is a read-only file, do not try to modify it. +{% if user and user ~= '' then %} +user {* user *}; +{% end %} +master_process on; + +worker_processes {* worker_processes *}; +{% if os_name == "Linux" and enable_cpu_affinity == true then %} +worker_cpu_affinity auto; +{% end %} + +# main configuration snippet starts +{% if main_configuration_snippet then %} +{* main_configuration_snippet *} +{% end %} +# main configuration snippet ends + +error_log {* error_log *} {* error_log_level or "warn" *}; +pid logs/nginx.pid; + +worker_rlimit_nofile {* worker_rlimit_nofile *}; + +events { + accept_mutex off; + worker_connections {* event.worker_connections *}; +} + +worker_rlimit_core {* worker_rlimit_core *}; + +worker_shutdown_timeout {* worker_shutdown_timeout *}; + +env APISIX_PROFILE; +env PATH; # for searching external plugin runner's binary + +# reserved environment variables for configuration +env APISIX_DEPLOYMENT_ETCD_HOST; + +{% if envs then %} +{% for _, name in ipairs(envs) do %} +env {*name*}; +{% end %} +{% end %} + +{% if use_apisix_base then %} +thread_pool grpc-client-nginx-module threads=1; + +lua { + {% if enabled_stream_plugins["prometheus"] then %} + lua_shared_dict prometheus-metrics {* meta.lua_shared_dict["prometheus-metrics"] *}; + {% end %} + {% if standalone_with_admin_api then %} + lua_shared_dict standalone-config {* meta.lua_shared_dict["standalone-config"] *}; + {% end %} + {% if status then %} + lua_shared_dict status-report {* meta.lua_shared_dict["status-report"] *}; + {% end %} + lua_shared_dict nacos 10m; +} + +{% if enabled_stream_plugins["prometheus"] and not enable_http then %} +http { + lua_package_path "{*extra_lua_path*}$prefix/deps/share/lua/5.1/?.lua;$prefix/deps/share/lua/5.1/?/init.lua;]=] + .. [=[{*apisix_lua_home*}/?.lua;{*apisix_lua_home*}/?/init.lua;;{*lua_path*};"; + lua_package_cpath "{*extra_lua_cpath*}$prefix/deps/lib64/lua/5.1/?.so;]=] + .. [=[$prefix/deps/lib/lua/5.1/?.so;;]=] + .. [=[{*lua_cpath*};"; + + {% if enabled_stream_plugins["prometheus"] then %} + + init_by_lua_block { + require "resty.core" + local process = require("ngx.process") + local ok, err = process.enable_privileged_agent() + if not ok then + ngx.log(ngx.ERR, "failed to enable privileged_agent: ", err) + end + } + + init_worker_by_lua_block { + require("apisix.plugins.prometheus.exporter").http_init(true) + } + + server { + {% if use_apisix_base then %} + listen {* prometheus_server_addr *} enable_process=privileged_agent; + {% else %} + listen {* prometheus_server_addr *}; + {% end %} + + access_log off; + + location / { + content_by_lua_block { + local prometheus = require("apisix.plugins.prometheus.exporter") + prometheus.export_metrics(true) + } + } + + location = /apisix/nginx_status { + allow 127.0.0.0/24; + deny all; + stub_status; + } + } + {% end %} +} +{% end %} + +{% end %} + +{% if enable_stream then %} +stream { + lua_package_path "{*extra_lua_path*}$prefix/deps/share/lua/5.1/?.lua;$prefix/deps/share/lua/5.1/?/init.lua;]=] + .. [=[{*apisix_lua_home*}/?.lua;{*apisix_lua_home*}/?/init.lua;;{*lua_path*};"; + lua_package_cpath "{*extra_lua_cpath*}$prefix/deps/lib64/lua/5.1/?.so;]=] + .. [=[$prefix/deps/lib/lua/5.1/?.so;;]=] + .. [=[{*lua_cpath*};"; + lua_socket_log_errors off; + + {% if max_pending_timers then %} + lua_max_pending_timers {* max_pending_timers *}; + {% end %} + {% if max_running_timers then %} + lua_max_running_timers {* max_running_timers *}; + {% end %} + + lua_shared_dict lrucache-lock-stream {* stream.lua_shared_dict["lrucache-lock-stream"] *}; + lua_shared_dict etcd-cluster-health-check-stream {* stream.lua_shared_dict["etcd-cluster-health-check-stream"] *}; + lua_shared_dict worker-events-stream {* stream.lua_shared_dict["worker-events-stream"] *}; + + {% if stream.lua_shared_dict["upstream-healthcheck-stream"] then %} + lua_shared_dict upstream-healthcheck-stream {* stream.lua_shared_dict["upstream-healthcheck-stream"] *}; + {% end %} + + {% if enabled_discoveries["tars"] then %} + lua_shared_dict tars-stream {* stream.lua_shared_dict["tars-stream"] *}; + {% end %} + + {% if enabled_stream_plugins["limit-conn"] then %} + lua_shared_dict plugin-limit-conn-stream {* stream.lua_shared_dict["plugin-limit-conn-stream"] *}; + {% end %} + + # for discovery shared dict + {% if discovery_shared_dicts then %} + {% for key, size in pairs(discovery_shared_dicts) do %} + lua_shared_dict {*key*}-stream {*size*}; + {% end %} + {% end %} + + resolver {% for _, dns_addr in ipairs(dns_resolver or {}) do %} {*dns_addr*} {% end %} {% if dns_resolver_valid then %} valid={*dns_resolver_valid*}{% end %} ipv6={% if enable_ipv6 then %}on{% else %}off{% end %}; + resolver_timeout {*resolver_timeout*}; + + {% if ssl.ssl_trusted_certificate ~= nil then %} + lua_ssl_trusted_certificate {* ssl.ssl_trusted_certificate *}; + {% end %} + + # for stream logs, off by default + {% if stream.enable_access_log == true then %} + log_format main escape={* stream.access_log_format_escape *} '{* stream.access_log_format *}'; + + access_log {* stream.access_log *} main buffer=16384 flush=3; + {% end %} + + # stream configuration snippet starts + {% if stream_configuration_snippet then %} + {* stream_configuration_snippet *} + {% end %} + # stream configuration snippet ends + + upstream apisix_backend { + server 127.0.0.1:80; + balancer_by_lua_block { + apisix.stream_balancer_phase() + } + } + + init_by_lua_block { + require "resty.core" + {% if lua_module_hook then %} + require "{* lua_module_hook *}" + {% end %} + apisix = require("apisix") + local dns_resolver = { {% for _, dns_addr in ipairs(dns_resolver or {}) do %} "{*dns_addr*}", {% end %} } + local args = { + dns_resolver = dns_resolver, + } + apisix.stream_init(args) + } + + init_worker_by_lua_block { + apisix.stream_init_worker() + } + + {% if (events.module or "") == "lua-resty-events" then %} + # the server block for lua-resty-events + server { + listen unix:{*apisix_lua_home*}/logs/stream_worker_events.sock; + access_log off; + content_by_lua_block { + require("resty.events.compat").run() + } + } + {% end %} + + server { + {% for _, item in ipairs(stream_proxy.tcp or {}) do %} + listen {*item.addr*} {% if item.tls then %} ssl {% end %} {% if enable_reuseport then %} reuseport {% end %} {% if proxy_protocol and proxy_protocol.enable_tcp_pp then %} proxy_protocol {% end %}; + {% end %} + {% for _, addr in ipairs(stream_proxy.udp or {}) do %} + listen {*addr*} udp {% if enable_reuseport then %} reuseport {% end %}; + {% end %} + + {% if tcp_enable_ssl then %} + ssl_certificate {* ssl.ssl_cert *}; + ssl_certificate_key {* ssl.ssl_cert_key *}; + + ssl_client_hello_by_lua_block { + apisix.ssl_client_hello_phase() + } + + ssl_certificate_by_lua_block { + apisix.ssl_phase() + } + {% end %} + + {% if proxy_protocol and proxy_protocol.enable_tcp_pp_to_upstream then %} + proxy_protocol on; + {% end %} + + preread_by_lua_block { + apisix.stream_preread_phase() + } + + proxy_pass apisix_backend; + + {% if use_apisix_base then %} + set $upstream_sni "apisix_backend"; + proxy_ssl_server_name on; + proxy_ssl_name $upstream_sni; + {% end %} + + log_by_lua_block { + apisix.stream_log_phase() + } + } +} +{% end %} + +{% if enable_http then %} +http { + # put extra_lua_path in front of the builtin path + # so user can override the source code + lua_package_path "{*extra_lua_path*}$prefix/deps/share/lua/5.1/?.lua;$prefix/deps/share/lua/5.1/?/init.lua;]=] + .. [=[{*apisix_lua_home*}/?.lua;{*apisix_lua_home*}/?/init.lua;;{*lua_path*};"; + lua_package_cpath "{*extra_lua_cpath*}$prefix/deps/lib64/lua/5.1/?.so;]=] + .. [=[$prefix/deps/lib/lua/5.1/?.so;;]=] + .. [=[{*lua_cpath*};"; + + {% if max_pending_timers then %} + lua_max_pending_timers {* max_pending_timers *}; + {% end %} + {% if max_running_timers then %} + lua_max_running_timers {* max_running_timers *}; + {% end %} + + lua_shared_dict internal-status {* http.lua_shared_dict["internal-status"] *}; + lua_shared_dict upstream-healthcheck {* http.lua_shared_dict["upstream-healthcheck"] *}; + lua_shared_dict worker-events {* http.lua_shared_dict["worker-events"] *}; + lua_shared_dict lrucache-lock {* http.lua_shared_dict["lrucache-lock"] *}; + lua_shared_dict balancer-ewma {* http.lua_shared_dict["balancer-ewma"] *}; + lua_shared_dict balancer-ewma-locks {* http.lua_shared_dict["balancer-ewma-locks"] *}; + lua_shared_dict balancer-ewma-last-touched-at {* http.lua_shared_dict["balancer-ewma-last-touched-at"] *}; + lua_shared_dict etcd-cluster-health-check {* http.lua_shared_dict["etcd-cluster-health-check"] *}; # etcd health check + + # for discovery shared dict + {% if discovery_shared_dicts then %} + {% for key, size in pairs(discovery_shared_dicts) do %} + lua_shared_dict {*key*} {*size*}; + {% end %} + {% end %} + + {% if enabled_discoveries["tars"] then %} + lua_shared_dict tars {* http.lua_shared_dict["tars"] *}; + {% end %} + + + {% if http.lua_shared_dict["plugin-ai-rate-limiting"] then %} + lua_shared_dict plugin-ai-rate-limiting {* http.lua_shared_dict["plugin-ai-rate-limiting"] *}; + {% else %} + lua_shared_dict plugin-ai-rate-limiting 10m; + {% end %} + + {% if http.lua_shared_dict["plugin-ai-rate-limiting"] then %} + lua_shared_dict plugin-ai-rate-limiting-reset-header {* http.lua_shared_dict["plugin-ai-rate-limiting-reset-header"] *}; + {% else %} + lua_shared_dict plugin-ai-rate-limiting-reset-header 10m; + {% end %} + + {% if enabled_plugins["limit-conn"] then %} + lua_shared_dict plugin-limit-conn {* http.lua_shared_dict["plugin-limit-conn"] *}; + lua_shared_dict plugin-limit-conn-redis-cluster-slot-lock {* http.lua_shared_dict["plugin-limit-conn-redis-cluster-slot-lock"] *}; + {% end %} + + {% if enabled_plugins["limit-req"] then %} + lua_shared_dict plugin-limit-req-redis-cluster-slot-lock {* http.lua_shared_dict["plugin-limit-req-redis-cluster-slot-lock"] *}; + lua_shared_dict plugin-limit-req {* http.lua_shared_dict["plugin-limit-req"] *}; + {% end %} + + {% if enabled_plugins["limit-count"] then %} + lua_shared_dict plugin-limit-count {* http.lua_shared_dict["plugin-limit-count"] *}; + lua_shared_dict plugin-limit-count-redis-cluster-slot-lock {* http.lua_shared_dict["plugin-limit-count-redis-cluster-slot-lock"] *}; + lua_shared_dict plugin-limit-count-reset-header {* http.lua_shared_dict["plugin-limit-count"] *}; + {% end %} + + {% if enabled_plugins["prometheus"] and not enabled_stream_plugins["prometheus"] then %} + lua_shared_dict prometheus-metrics {* http.lua_shared_dict["prometheus-metrics"] *}; + {% end %} + + {% if enabled_plugins["skywalking"] then %} + lua_shared_dict tracing_buffer {* http.lua_shared_dict.tracing_buffer *}; # plugin: skywalking + {% end %} + + {% if enabled_plugins["api-breaker"] then %} + lua_shared_dict plugin-api-breaker {* http.lua_shared_dict["plugin-api-breaker"] *}; + {% end %} + + {% if enabled_plugins["openid-connect"] or enabled_plugins["authz-keycloak"] then %} + # for openid-connect and authz-keycloak plugin + lua_shared_dict discovery {* http.lua_shared_dict["discovery"] *}; # cache for discovery metadata documents + {% end %} + + {% if enabled_plugins["openid-connect"] then %} + # for openid-connect plugin + lua_shared_dict jwks {* http.lua_shared_dict["jwks"] *}; # cache for JWKs + lua_shared_dict introspection {* http.lua_shared_dict["introspection"] *}; # cache for JWT verification results + {% end %} + + {% if enabled_plugins["cas-auth"] then %} + lua_shared_dict cas_sessions {* http.lua_shared_dict["cas-auth"] *}; + {% end %} + + {% if enabled_plugins["authz-keycloak"] then %} + # for authz-keycloak + lua_shared_dict access-tokens {* http.lua_shared_dict["access-tokens"] *}; # cache for service account access tokens + {% end %} + + {% if enabled_plugins["ocsp-stapling"] then %} + lua_shared_dict ocsp-stapling {* http.lua_shared_dict["ocsp-stapling"] *}; # cache for ocsp-stapling + {% end %} + + {% if enabled_plugins["ext-plugin-pre-req"] or enabled_plugins["ext-plugin-post-req"] then %} + lua_shared_dict ext-plugin {* http.lua_shared_dict["ext-plugin"] *}; # cache for ext-plugin + {% end %} + + {% if enabled_plugins["mcp-bridge"] then %} + lua_shared_dict mcp-session {* http.lua_shared_dict["mcp-session"] *}; # cache for mcp-session + {% end %} + + {% if config_center == "xds" then %} + lua_shared_dict xds-config 10m; + lua_shared_dict xds-config-version 1m; + {% end %} + + # for custom shared dict + {% if http.custom_lua_shared_dict then %} + {% for cache_key, cache_size in pairs(http.custom_lua_shared_dict) do %} + lua_shared_dict {*cache_key*} {*cache_size*}; + {% end %} + {% end %} + + {% if enabled_plugins["error-log-logger"] then %} + lua_capture_error_log 10m; + {% end %} + + lua_ssl_verify_depth 5; + ssl_session_timeout 86400; + + {% if http.underscores_in_headers then %} + underscores_in_headers {* http.underscores_in_headers *}; + {%end%} + + lua_socket_log_errors off; + + resolver {% for _, dns_addr in ipairs(dns_resolver or {}) do %} {*dns_addr*} {% end %} {% if dns_resolver_valid then %} valid={*dns_resolver_valid*}{% end %} ipv6={% if enable_ipv6 then %}on{% else %}off{% end %}; + resolver_timeout {*resolver_timeout*}; + + lua_http10_buffering off; + + lua_regex_match_limit 100000; + lua_regex_cache_max_entries 8192; + + {% if http.enable_access_log == false then %} + access_log off; + {% else %} + log_format main escape={* http.access_log_format_escape *} '{* http.access_log_format *}'; + uninitialized_variable_warn off; + + {% if http.access_log_buffer then %} + access_log {* http.access_log *} main buffer={* http.access_log_buffer *} flush=3; + {% else %} + access_log {* http.access_log *} main buffer=16384 flush=3; + {% end %} + {% end %} + open_file_cache max=1000 inactive=60; + client_max_body_size {* http.client_max_body_size *}; + keepalive_timeout {* http.keepalive_timeout *}; + client_header_timeout {* http.client_header_timeout *}; + client_body_timeout {* http.client_body_timeout *}; + send_timeout {* http.send_timeout *}; + variables_hash_max_size {* http.variables_hash_max_size *}; + + server_tokens off; + + include mime.types; + charset {* http.charset *}; + + {% if http.real_ip_header then %} + real_ip_header {* http.real_ip_header *}; + {% end %} + + {% if http.real_ip_recursive then %} + real_ip_recursive {* http.real_ip_recursive *}; + {% end %} + + {% if http.real_ip_from then %} + {% for _, real_ip in ipairs(http.real_ip_from) do %} + set_real_ip_from {*real_ip*}; + {% end %} + {% end %} + + {% if ssl.ssl_trusted_certificate ~= nil then %} + lua_ssl_trusted_certificate {* ssl.ssl_trusted_certificate *}; + {% end %} + # http configuration snippet starts + {% if http_configuration_snippet then %} + {* http_configuration_snippet *} + {% end %} + # http configuration snippet ends + + upstream apisix_backend { + server 0.0.0.1; + + {% if use_apisix_base then %} + keepalive {* http.upstream.keepalive *}; + keepalive_requests {* http.upstream.keepalive_requests *}; + keepalive_timeout {* http.upstream.keepalive_timeout *}; + # we put the static configuration above so that we can override it in the Lua code + + balancer_by_lua_block { + apisix.http_balancer_phase() + } + {% else %} + balancer_by_lua_block { + apisix.http_balancer_phase() + } + + keepalive {* http.upstream.keepalive *}; + keepalive_requests {* http.upstream.keepalive_requests *}; + keepalive_timeout {* http.upstream.keepalive_timeout *}; + {% end %} + } + + {% if enabled_plugins["dubbo-proxy"] then %} + upstream apisix_dubbo_backend { + server 0.0.0.1; + balancer_by_lua_block { + apisix.http_balancer_phase() + } + + # dynamical keepalive doesn't work with dubbo as the connection here + # is managed by ngx_multi_upstream_module + multi {* dubbo_upstream_multiplex_count *}; + keepalive {* http.upstream.keepalive *}; + keepalive_requests {* http.upstream.keepalive_requests *}; + keepalive_timeout {* http.upstream.keepalive_timeout *}; + } + {% end %} + + {% if use_apisix_base then %} + apisix_delay_client_max_body_check on; + apisix_mirror_on_demand on; + {% end %} + + {% if wasm then %} + wasm_vm wasmtime; + {% end %} + + init_by_lua_block { + require "resty.core" + {% if lua_module_hook then %} + require "{* lua_module_hook *}" + {% end %} + apisix = require("apisix") + + local dns_resolver = { {% for _, dns_addr in ipairs(dns_resolver or {}) do %} "{*dns_addr*}", {% end %} } + local args = { + dns_resolver = dns_resolver, + } + apisix.http_init(args) + + -- set apisix_lua_home into constants module + -- it may be used by plugins to determine the work path of apisix + local constants = require("apisix.constants") + constants.apisix_lua_home = "{*apisix_lua_home*}" + } + + init_worker_by_lua_block { + apisix.http_init_worker() + } + + exit_worker_by_lua_block { + apisix.http_exit_worker() + } + + {% if (events.module or "") == "lua-resty-events" then %} + # the server block for lua-resty-events + server { + listen unix:{*apisix_lua_home*}/logs/worker_events.sock; + access_log off; + location / { + content_by_lua_block { + require("resty.events.compat").run() + } + } + } + {% end %} + + {% if enable_control then %} + server { + listen {* control_server_addr *}; + + access_log off; + + location / { + content_by_lua_block { + apisix.http_control() + } + } + } + {% end %} + + {% if status then %} + server { + listen {* status_server_addr *} enable_process=privileged_agent; + access_log off; + location /status { + content_by_lua_block { + apisix.status() + } + } + location /status/ready { + content_by_lua_block { + apisix.status_ready() + } + } + } + {% end %} + + {% if enabled_plugins["prometheus"] and prometheus_server_addr then %} + server { + {% if use_apisix_base then %} + listen {* prometheus_server_addr *} enable_process=privileged_agent; + {% else %} + listen {* prometheus_server_addr *}; + {% end %} + + access_log off; + + location / { + content_by_lua_block { + local prometheus = require("apisix.plugins.prometheus.exporter") + prometheus.export_metrics() + } + } + + location = /apisix/nginx_status { + allow 127.0.0.0/24; + deny all; + stub_status; + } + } + {% end %} + + {% if enable_admin then %} + server { + {%if https_admin then%} + listen {* admin_server_addr *} ssl; + + ssl_certificate {* admin_api_mtls.admin_ssl_cert *}; + ssl_certificate_key {* admin_api_mtls.admin_ssl_cert_key *}; + {%if admin_api_mtls.admin_ssl_ca_cert and admin_api_mtls.admin_ssl_ca_cert ~= "" then%} + ssl_verify_client on; + ssl_client_certificate {* admin_api_mtls.admin_ssl_ca_cert *}; + {% end %} + + ssl_session_cache shared:SSL:20m; + ssl_protocols {* ssl.ssl_protocols *}; + ssl_ciphers {* ssl.ssl_ciphers *}; + ssl_prefer_server_ciphers on; + {% if ssl.ssl_session_tickets then %} + ssl_session_tickets on; + {% else %} + ssl_session_tickets off; + {% end %} + + {% else %} + listen {* admin_server_addr *}; + {%end%} + log_not_found off; + + # admin configuration snippet starts + {% if http_admin_configuration_snippet then %} + {* http_admin_configuration_snippet *} + {% end %} + # admin configuration snippet ends + + set $upstream_scheme 'http'; + set $upstream_host $http_host; + set $upstream_uri ''; + + {%if allow_admin then%} + {% for _, allow_ip in ipairs(allow_admin) do %} + allow {*allow_ip*}; + {% end %} + deny all; + {%else%} + allow all; + {%end%} + + location /apisix/admin { + content_by_lua_block { + apisix.http_admin() + } + } + + {% if enable_admin_ui then %} + location = /ui { + return 301 /ui/; + } + location ^~ /ui/ { + rewrite ^/ui/(.*)$ /$1 break; + root {* apisix_lua_home *}/ui; + try_files $uri /index.html =404; + gzip on; + gzip_types text/css application/javascript application/json; + expires 7200s; + add_header Cache-Control "private,max-age=7200"; + } + {% end %} + } + {% end %} + + {% if deployment_role ~= "control_plane" then %} + + {% if enabled_plugins["proxy-cache"] then %} + # for proxy cache + {% for _, cache in ipairs(proxy_cache.zones) do %} + {% if cache.disk_path and cache.cache_levels and cache.disk_size then %} + proxy_cache_path {* cache.disk_path *} levels={* cache.cache_levels *} keys_zone={* cache.name *}:{* cache.memory_size *} inactive=1d max_size={* cache.disk_size *} use_temp_path=off; + {% else %} + lua_shared_dict {* cache.name *} {* cache.memory_size *}; + {% end %} + {% end %} + + map $upstream_cache_zone $upstream_cache_zone_info { + {% for _, cache in ipairs(proxy_cache.zones) do %} + {% if cache.disk_path and cache.cache_levels and cache.disk_size then %} + {* cache.name *} {* cache.disk_path *},{* cache.cache_levels *}; + {% end %} + {% end %} + } + {% end %} + + server { + {% if enable_http2 then %} + http2 on; + {% end %} + {% if enable_http3_in_server_context then %} + http3 on; + {% end %} + {% for _, item in ipairs(node_listen) do %} + listen {* item.ip *}:{* item.port *} default_server {% if enable_reuseport then %} reuseport {% end %}; + {% end %} + {% if ssl.enable then %} + {% for _, item in ipairs(ssl.listen) do %} + {% if item.enable_http3 then %} + listen {* item.ip *}:{* item.port *} quic default_server {% if enable_reuseport then %} reuseport {% end %}; + listen {* item.ip *}:{* item.port *} ssl default_server; + {% else %} + listen {* item.ip *}:{* item.port *} ssl default_server {% if enable_reuseport then %} reuseport {% end %}; + {% end %} + {% end %} + {% end %} + {% if proxy_protocol and proxy_protocol.listen_http_port then %} + listen {* proxy_protocol.listen_http_port *} default_server proxy_protocol; + {% end %} + {% if proxy_protocol and proxy_protocol.listen_https_port then %} + listen {* proxy_protocol.listen_https_port *} ssl default_server proxy_protocol; + {% end %} + + server_name _; + + {% if ssl.enable then %} + ssl_certificate {* ssl.ssl_cert *}; + ssl_certificate_key {* ssl.ssl_cert_key *}; + ssl_session_cache shared:SSL:20m; + ssl_session_timeout 10m; + + ssl_protocols {* ssl.ssl_protocols *}; + ssl_ciphers {* ssl.ssl_ciphers *}; + ssl_prefer_server_ciphers on; + {% if ssl.ssl_session_tickets then %} + ssl_session_tickets on; + {% else %} + ssl_session_tickets off; + {% end %} + {% end %} + + {% if ssl.ssl_trusted_certificate ~= nil then %} + proxy_ssl_trusted_certificate {* ssl.ssl_trusted_certificate *}; + {% end %} + + # opentelemetry_set_ngx_var starts + {% if opentelemetry_set_ngx_var then %} + set $opentelemetry_context_traceparent ''; + set $opentelemetry_trace_id ''; + set $opentelemetry_span_id ''; + {% end %} + # opentelemetry_set_ngx_var ends + + # zipkin_set_ngx_var starts + {% if zipkin_set_ngx_var then %} + set $zipkin_context_traceparent ''; + set $zipkin_trace_id ''; + set $zipkin_span_id ''; + {% end %} + # zipkin_set_ngx_var ends + + # http server configuration snippet starts + {% if http_server_configuration_snippet then %} + {* http_server_configuration_snippet *} + {% end %} + # http server configuration snippet ends + + location = /apisix/nginx_status { + allow 127.0.0.0/24; + deny all; + access_log off; + stub_status; + } + + {% if ssl.enable then %} + ssl_client_hello_by_lua_block { + apisix.ssl_client_hello_phase() + } + + ssl_certificate_by_lua_block { + apisix.ssl_phase() + } + {% end %} + + {% if http.proxy_ssl_server_name then %} + proxy_ssl_name $upstream_host; + proxy_ssl_server_name on; + {% end %} + + location / { + set $upstream_mirror_host ''; + set $upstream_mirror_uri ''; + set $upstream_upgrade ''; + set $upstream_connection ''; + + set $upstream_scheme 'http'; + set $upstream_host $http_host; + set $upstream_uri ''; + set $ctx_ref ''; + + {% if wasm then %} + set $wasm_process_req_body ''; + set $wasm_process_resp_body ''; + {% end %} + + # http server location configuration snippet starts + {% if http_server_location_configuration_snippet then %} + {* http_server_location_configuration_snippet *} + {% end %} + # http server location configuration snippet ends + + {% if enabled_plugins["dubbo-proxy"] then %} + set $dubbo_service_name ''; + set $dubbo_service_version ''; + set $dubbo_method ''; + {% end %} + + access_by_lua_block { + apisix.http_access_phase() + } + + proxy_http_version 1.1; + proxy_set_header Host $upstream_host; + proxy_set_header Upgrade $upstream_upgrade; + proxy_set_header Connection $upstream_connection; + proxy_set_header X-Real-IP $remote_addr; + proxy_pass_header Date; + + ### the following x-forwarded-* headers is to send to upstream server + + set $var_x_forwarded_proto $scheme; + set $var_x_forwarded_host $host; + set $var_x_forwarded_port $server_port; + + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $var_x_forwarded_proto; + proxy_set_header X-Forwarded-Host $var_x_forwarded_host; + proxy_set_header X-Forwarded-Port $var_x_forwarded_port; + + {% if enabled_plugins["proxy-cache"] then %} + ### the following configuration is to cache response content from upstream server + + set $upstream_cache_zone off; + set $upstream_cache_key ''; + set $upstream_cache_bypass ''; + set $upstream_no_cache ''; + + proxy_cache $upstream_cache_zone; + proxy_cache_valid any {% if proxy_cache.cache_ttl then %} {* proxy_cache.cache_ttl *} {% else %} 10s {% end %}; + proxy_cache_min_uses 1; + proxy_cache_methods GET HEAD POST; + proxy_cache_lock_timeout 5s; + proxy_cache_use_stale off; + proxy_cache_key $upstream_cache_key; + proxy_no_cache $upstream_no_cache; + proxy_cache_bypass $upstream_cache_bypass; + + {% end %} + + proxy_pass $upstream_scheme://apisix_backend$upstream_uri; + + {% if enabled_plugins["proxy-mirror"] then %} + mirror /proxy_mirror; + {% end %} + + header_filter_by_lua_block { + apisix.http_header_filter_phase() + } + + body_filter_by_lua_block { + apisix.http_body_filter_phase() + } + + log_by_lua_block { + apisix.http_log_phase() + } + } + + location @grpc_pass { + + access_by_lua_block { + apisix.grpc_access_phase() + } + + {% if use_apisix_base then %} + # For servers which obey the standard, when `:authority` is missing, + # `host` will be used instead. When used with apisix-runtime, we can do + # better by setting `:authority` directly + grpc_set_header ":authority" $upstream_host; + {% else %} + grpc_set_header "Host" $upstream_host; + {% end %} + grpc_set_header Content-Type application/grpc; + grpc_set_header TE trailers; + grpc_socket_keepalive on; + grpc_pass $upstream_scheme://apisix_backend; + + {% if enabled_plugins["proxy-mirror"] then %} + mirror /proxy_mirror_grpc; + {% end %} + + header_filter_by_lua_block { + apisix.http_header_filter_phase() + } + + body_filter_by_lua_block { + apisix.http_body_filter_phase() + } + + log_by_lua_block { + apisix.http_log_phase() + } + } + + {% if enabled_plugins["dubbo-proxy"] then %} + location @dubbo_pass { + access_by_lua_block { + apisix.dubbo_access_phase() + } + + dubbo_pass_all_headers on; + dubbo_pass_body on; + dubbo_pass $dubbo_service_name $dubbo_service_version $dubbo_method apisix_dubbo_backend; + + header_filter_by_lua_block { + apisix.http_header_filter_phase() + } + + body_filter_by_lua_block { + apisix.http_body_filter_phase() + } + + log_by_lua_block { + apisix.http_log_phase() + } + } + {% end %} + + {% if enabled_plugins["proxy-mirror"] then %} + location = /proxy_mirror { + internal; + + {% if not use_apisix_base then %} + if ($upstream_mirror_uri = "") { + return 200; + } + {% end %} + + + {% if proxy_mirror_timeouts then %} + {% if proxy_mirror_timeouts.connect then %} + proxy_connect_timeout {* proxy_mirror_timeouts.connect *}; + {% end %} + {% if proxy_mirror_timeouts.read then %} + proxy_read_timeout {* proxy_mirror_timeouts.read *}; + {% end %} + {% if proxy_mirror_timeouts.send then %} + proxy_send_timeout {* proxy_mirror_timeouts.send *}; + {% end %} + {% end %} + proxy_http_version 1.1; + proxy_set_header Host $upstream_host; + proxy_pass $upstream_mirror_uri; + } + {% end %} + + {% if enabled_plugins["proxy-mirror"] then %} + location = /proxy_mirror_grpc { + internal; + + {% if not use_apisix_base then %} + if ($upstream_mirror_uri = "") { + return 200; + } + {% end %} + + + {% if proxy_mirror_timeouts then %} + {% if proxy_mirror_timeouts.connect then %} + grpc_connect_timeout {* proxy_mirror_timeouts.connect *}; + {% end %} + {% if proxy_mirror_timeouts.read then %} + grpc_read_timeout {* proxy_mirror_timeouts.read *}; + {% end %} + {% if proxy_mirror_timeouts.send then %} + grpc_send_timeout {* proxy_mirror_timeouts.send *}; + {% end %} + {% end %} + grpc_pass $upstream_mirror_host; + } + {% end %} + } + {% end %} + + # http end configuration snippet starts + {% if http_end_configuration_snippet then %} + {* http_end_configuration_snippet *} + {% end %} + # http end configuration snippet ends +} +{% end %} +]=] diff --git a/CloudronPackages/APISIX/apisix-source/apisix/cli/ops.lua b/CloudronPackages/APISIX/apisix-source/apisix/cli/ops.lua new file mode 100644 index 0000000..3b2e555 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/cli/ops.lua @@ -0,0 +1,1013 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local ver = require("apisix.core.version") +local etcd = require("apisix.cli.etcd") +local util = require("apisix.cli.util") +local file = require("apisix.cli.file") +local schema = require("apisix.cli.schema") +local ngx_tpl = require("apisix.cli.ngx_tpl") +local cli_ip = require("apisix.cli.ip") +local profile = require("apisix.core.profile") +local template = require("resty.template") +local argparse = require("argparse") +local pl_path = require("pl.path") +local lfs = require("lfs") +local signal = require("posix.signal") +local errno = require("posix.errno") + +local stderr = io.stderr +local ipairs = ipairs +local pairs = pairs +local print = print +local type = type +local tostring = tostring +local tonumber = tonumber +local io_open = io.open +local execute = os.execute +local os_rename = os.rename +local os_remove = os.remove +local table_insert = table.insert +local table_remove = table.remove +local getenv = os.getenv +local max = math.max +local floor = math.floor +local str_find = string.find +local str_byte = string.byte +local str_sub = string.sub +local str_format = string.format + + +local _M = {} + + +local function help() + print([[ +Usage: apisix [action] + +help: print the apisix cli help message +init: initialize the local nginx.conf +init_etcd: initialize the data of etcd +start: start the apisix server +stop: stop the apisix server +quit: stop the apisix server gracefully +restart: restart the apisix server +reload: reload the apisix server +test: test the generated nginx.conf +version: print the version of apisix +]]) +end + + +local function version_greater_equal(cur_ver_s, need_ver_s) + local cur_vers = util.split(cur_ver_s, [[.]]) + local need_vers = util.split(need_ver_s, [[.]]) + local len = max(#cur_vers, #need_vers) + + for i = 1, len do + local cur_ver = tonumber(cur_vers[i]) or 0 + local need_ver = tonumber(need_vers[i]) or 0 + if cur_ver > need_ver then + return true + end + + if cur_ver < need_ver then + return false + end + end + + return true +end + + +local function get_openresty_version() + local str = "nginx version: openresty/" + local ret = util.execute_cmd("openresty -v 2>&1") + local pos = str_find(ret, str, 1, true) + if pos then + return str_sub(ret, pos + #str) + end + + str = "nginx version: nginx/" + pos = str_find(ret, str, 1, true) + if pos then + return str_sub(ret, pos + #str) + end +end + + +local function local_dns_resolver(file_path) + local file, err = io_open(file_path, "rb") + if not file then + return false, "failed to open file: " .. file_path .. ", error info:" .. err + end + + local dns_addrs = {} + for line in file:lines() do + local addr, n = line:gsub("^nameserver%s+([^%s]+)%s*$", "%1") + if n == 1 then + table_insert(dns_addrs, addr) + end + end + + file:close() + return dns_addrs +end +-- exported for test +_M.local_dns_resolver = local_dns_resolver + + +local function version() + print(ver['VERSION']) +end + + +local function get_lua_path(conf) + -- we use "" as the placeholder to enforce the type to be string + if conf and conf ~= "" then + if #conf < 2 then + -- the shortest valid path is ';;' + util.die("invalid extra_lua_path/extra_lua_cpath: \"", conf, "\"\n") + end + + local path = conf + if path:byte(-1) ~= str_byte(';') then + path = path .. ';' + end + return path + end + + return "" +end + + +local function init(env) + if env.is_root_path then + print('Warning! Running apisix under /root is only suitable for ' + .. 'development environments and it is dangerous to do so. ' + .. 'It is recommended to run APISIX in a directory ' + .. 'other than /root.') + end + + local min_ulimit = 1024 + if env.ulimit ~= "unlimited" and env.ulimit <= min_ulimit then + print(str_format("Warning! Current maximum number of open file " + .. "descriptors [%d] is not greater than %d, please increase user limits by " + .. "execute \'ulimit -n \' , otherwise the performance" + .. " is low.", env.ulimit, min_ulimit)) + end + + -- read_yaml_conf + local yaml_conf, err = file.read_yaml_conf(env.apisix_home) + if not yaml_conf then + util.die("failed to read local yaml config of apisix: ", err, "\n") + end + + local ok, err = schema.validate(yaml_conf) + if not ok then + util.die(err, "\n") + end + + -- check the Admin API token + local checked_admin_key = false + local allow_admin = yaml_conf.deployment.admin and + yaml_conf.deployment.admin.allow_admin + if yaml_conf.apisix.enable_admin and allow_admin + and #allow_admin == 1 and allow_admin[1] == "127.0.0.0/24" then + checked_admin_key = true + end + -- check if admin_key is required + if yaml_conf.deployment.admin.admin_key_required == false then + checked_admin_key = true + print("Warning! Admin key is bypassed! " + .. "If you are deploying APISIX in a production environment, " + .. "please enable `admin_key_required` and set a secure admin key!") + end + + if yaml_conf.apisix.enable_admin and not checked_admin_key then + local help = [[ + +%s +Please modify "admin_key" in conf/config.yaml . + +]] + local admin_key = yaml_conf.deployment.admin + if admin_key then + admin_key = admin_key.admin_key + end + + if type(admin_key) ~= "table" or #admin_key == 0 + then + util.die(help:format("ERROR: missing valid Admin API token.")) + end + + for _, admin in ipairs(admin_key) do + if type(admin.key) == "table" then + admin.key = "" + else + admin.key = tostring(admin.key) + end + + if admin.key == "" then + stderr:write( + help:format([[WARNING: using empty Admin API. + This will trigger APISIX to automatically generate a random Admin API token.]]), + "\n" + ) + end + end + end + + if yaml_conf.deployment.admin then + local admin_api_mtls = yaml_conf.deployment.admin.admin_api_mtls + local https_admin = yaml_conf.deployment.admin.https_admin + if https_admin and not (admin_api_mtls and + admin_api_mtls.admin_ssl_cert and + admin_api_mtls.admin_ssl_cert ~= "" and + admin_api_mtls.admin_ssl_cert_key and + admin_api_mtls.admin_ssl_cert_key ~= "") + then + util.die("missing ssl cert for https admin") + end + end + + local or_ver = get_openresty_version() + if or_ver == nil then + util.die("can not find openresty\n") + end + + local need_ver = "1.21.4" + if not version_greater_equal(or_ver, need_ver) then + util.die("openresty version must >=", need_ver, " current ", or_ver, "\n") + end + + local or_info = env.openresty_info + if not or_info:find("http_stub_status_module", 1, true) then + util.die("'http_stub_status_module' module is missing in ", + "your openresty, please check it out.\n") + end + + --- http is enabled by default + local enable_http = true + --- stream is disabled by default + local enable_stream = false + if yaml_conf.apisix.proxy_mode then + --- check for "http" + if yaml_conf.apisix.proxy_mode == "http" then + enable_http = true + enable_stream = false + --- check for "stream" + elseif yaml_conf.apisix.proxy_mode == "stream" then + enable_stream = true + enable_http = false + --- check for "http&stream" + elseif yaml_conf.apisix.proxy_mode == "http&stream" then + enable_stream = true + enable_http = true + end + end + + local enabled_discoveries = {} + for name in pairs(yaml_conf.discovery or {}) do + enabled_discoveries[name] = true + end + + local enabled_plugins = {} + for i, name in ipairs(yaml_conf.plugins or {}) do + enabled_plugins[name] = true + end + + local enabled_stream_plugins = {} + for i, name in ipairs(yaml_conf.stream_plugins or {}) do + enabled_stream_plugins[name] = true + end + + if enabled_plugins["proxy-cache"] and not yaml_conf.apisix.proxy_cache then + util.die("missing apisix.proxy_cache for plugin proxy-cache\n") + end + + if enabled_plugins["batch-requests"] then + local pass_real_client_ip = false + local real_ip_from = yaml_conf.nginx_config.http.real_ip_from + -- the real_ip_from is enabled by default, we just need to make sure it's + -- not disabled by the users + if real_ip_from then + for _, ip in ipairs(real_ip_from) do + local _ip = cli_ip:new(ip) + if _ip then + if _ip:is_loopback() or _ip:is_unspecified() then + pass_real_client_ip = true + end + end + end + end + + if not pass_real_client_ip then + util.die("missing loopback or unspecified in the nginx_config.http.real_ip_from" .. + " for plugin batch-requests\n") + end + end + + local ports_to_check = {} + + local function validate_and_get_listen_addr(port_name, default_ip, configured_ip, + default_port, configured_port) + local ip = configured_ip or default_ip + local port = tonumber(configured_port) or default_port + if ports_to_check[port] ~= nil then + util.die(port_name .. " ", port, " conflicts with ", ports_to_check[port], "\n") + end + ports_to_check[port] = port_name + return ip .. ":" .. port + end + + -- listen in admin use a separate port, support specific IP, compatible with the original style + local admin_server_addr + if yaml_conf.apisix.enable_admin then + local ip = yaml_conf.deployment.admin.admin_listen.ip + local port = yaml_conf.deployment.admin.admin_listen.port + admin_server_addr = validate_and_get_listen_addr("admin port", "0.0.0.0", ip, + 9180, port) + end + + local status_server_addr + if yaml_conf.apisix.status then + status_server_addr = validate_and_get_listen_addr("status port", "127.0.0.1", + yaml_conf.apisix.status.ip, 7085, + yaml_conf.apisix.status.port) + end + + local control_server_addr + if yaml_conf.apisix.enable_control then + if not yaml_conf.apisix.control then + control_server_addr = validate_and_get_listen_addr("control port", "127.0.0.1", nil, + 9090, nil) + else + control_server_addr = validate_and_get_listen_addr("control port", "127.0.0.1", + yaml_conf.apisix.control.ip, + 9090, yaml_conf.apisix.control.port) + end + end + + local prometheus_server_addr + if yaml_conf.plugin_attr.prometheus then + local prometheus = yaml_conf.plugin_attr.prometheus + if prometheus.enable_export_server then + prometheus_server_addr = validate_and_get_listen_addr("prometheus port", "127.0.0.1", + prometheus.export_addr.ip, + 9091, prometheus.export_addr.port) + end + end + + if enabled_stream_plugins["prometheus"] and not prometheus_server_addr then + util.die("L4 prometheus metric should be exposed via export server\n") + end + + local ip_port_to_check = {} + + local function listen_table_insert(listen_table, scheme, ip, port, + enable_http3, enable_ipv6) + if type(ip) ~= "string" then + util.die(scheme, " listen ip format error, must be string", "\n") + end + + if type(port) ~= "number" then + util.die(scheme, " listen port format error, must be number", "\n") + end + + if ports_to_check[port] ~= nil then + util.die(scheme, " listen port ", port, " conflicts with ", + ports_to_check[port], "\n") + end + + local addr = ip .. ":" .. port + + if ip_port_to_check[addr] == nil then + table_insert(listen_table, + { + ip = ip, + port = port, + enable_http3 = enable_http3 + }) + ip_port_to_check[addr] = scheme + end + + if enable_ipv6 then + ip = "[::]" + addr = ip .. ":" .. port + + if ip_port_to_check[addr] == nil then + table_insert(listen_table, + { + ip = ip, + port = port, + enable_http3 = enable_http3 + }) + ip_port_to_check[addr] = scheme + end + end + end + + local node_listen = {} + -- listen in http, support multiple ports and specific IP, compatible with the original style + if type(yaml_conf.apisix.node_listen) == "number" then + listen_table_insert(node_listen, "http", "0.0.0.0", yaml_conf.apisix.node_listen, + false, yaml_conf.apisix.enable_ipv6) + elseif type(yaml_conf.apisix.node_listen) == "table" then + for _, value in ipairs(yaml_conf.apisix.node_listen) do + if type(value) == "number" then + listen_table_insert(node_listen, "http", "0.0.0.0", value, + false, yaml_conf.apisix.enable_ipv6) + elseif type(value) == "table" then + local ip = value.ip + local port = value.port + local enable_ipv6 = false + local enable_http2 = value.enable_http2 + + if ip == nil then + ip = "0.0.0.0" + if yaml_conf.apisix.enable_ipv6 then + enable_ipv6 = true + end + end + + if port == nil then + port = 9080 + end + + if enable_http2 ~= nil then + util.die("ERROR: port level enable_http2 in node_listen is deprecated" + .. "from 3.9 version, and you should use enable_http2 in " + .. "apisix level.", "\n") + end + + listen_table_insert(node_listen, "http", ip, port, + false, enable_ipv6) + end + end + end + yaml_conf.apisix.node_listen = node_listen + + local enable_http3_in_server_context = false + local ssl_listen = {} + -- listen in https, support multiple ports, support specific IP + for _, value in ipairs(yaml_conf.apisix.ssl.listen) do + local ip = value.ip + local port = value.port + local enable_ipv6 = false + local enable_http2 = value.enable_http2 + local enable_http3 = value.enable_http3 + + if ip == nil then + ip = "0.0.0.0" + if yaml_conf.apisix.enable_ipv6 then + enable_ipv6 = true + end + end + + if port == nil then + port = 9443 + end + + if enable_http2 ~= nil then + util.die("ERROR: port level enable_http2 in ssl.listen is deprecated" + .. "from 3.9 version, and you should use enable_http2 in " + .. "apisix level.", "\n") + end + + if enable_http3 == nil then + enable_http3 = false + end + if enable_http3 == true then + enable_http3_in_server_context = true + end + + listen_table_insert(ssl_listen, "https", ip, port, + enable_http3, enable_ipv6) + end + + yaml_conf.apisix.ssl.listen = ssl_listen + yaml_conf.apisix.enable_http3_in_server_context = enable_http3_in_server_context + + -- enable ssl with place holder crt&key + yaml_conf.apisix.ssl.ssl_cert = "cert/ssl_PLACE_HOLDER.crt" + yaml_conf.apisix.ssl.ssl_cert_key = "cert/ssl_PLACE_HOLDER.key" + + local tcp_enable_ssl + -- compatible with the original style which only has the addr + if enable_stream and yaml_conf.apisix.stream_proxy and yaml_conf.apisix.stream_proxy.tcp then + local tcp = yaml_conf.apisix.stream_proxy.tcp + for i, item in ipairs(tcp) do + if type(item) ~= "table" then + tcp[i] = {addr = item} + else + if item.tls then + tcp_enable_ssl = true + end + end + end + end + + local dubbo_upstream_multiplex_count = 32 + if yaml_conf.plugin_attr and yaml_conf.plugin_attr["dubbo-proxy"] then + local dubbo_conf = yaml_conf.plugin_attr["dubbo-proxy"] + if tonumber(dubbo_conf.upstream_multiplex_count) >= 1 then + dubbo_upstream_multiplex_count = dubbo_conf.upstream_multiplex_count + end + end + + if yaml_conf.apisix.dns_resolver_valid then + if tonumber(yaml_conf.apisix.dns_resolver_valid) == nil then + util.die("apisix->dns_resolver_valid should be a number") + end + end + + local proxy_mirror_timeouts + if yaml_conf.plugin_attr["proxy-mirror"] then + proxy_mirror_timeouts = yaml_conf.plugin_attr["proxy-mirror"].timeout + end + + if yaml_conf.deployment and yaml_conf.deployment.role then + local role = yaml_conf.deployment.role + env.deployment_role = role + + if role == "control_plane" and not admin_server_addr then + local listen = node_listen[1] + admin_server_addr = str_format("%s:%s", listen.ip, listen.port) + end + end + + local opentelemetry_set_ngx_var + if enabled_plugins["opentelemetry"] and yaml_conf.plugin_attr["opentelemetry"] then + opentelemetry_set_ngx_var = yaml_conf.plugin_attr["opentelemetry"].set_ngx_var + end + + local zipkin_set_ngx_var + if enabled_plugins["zipkin"] and yaml_conf.plugin_attr["zipkin"] then + zipkin_set_ngx_var = yaml_conf.plugin_attr["zipkin"].set_ngx_var + end + + -- Using template.render + local sys_conf = { + lua_path = env.pkg_path_org, + lua_cpath = env.pkg_cpath_org, + os_name = util.trim(util.execute_cmd("uname")), + apisix_lua_home = env.apisix_home, + deployment_role = env.deployment_role, + use_apisix_base = env.use_apisix_base, + error_log = {level = "warn"}, + enable_http = enable_http, + enable_stream = enable_stream, + enabled_discoveries = enabled_discoveries, + enabled_plugins = enabled_plugins, + enabled_stream_plugins = enabled_stream_plugins, + dubbo_upstream_multiplex_count = dubbo_upstream_multiplex_count, + status_server_addr = status_server_addr, + tcp_enable_ssl = tcp_enable_ssl, + admin_server_addr = admin_server_addr, + control_server_addr = control_server_addr, + prometheus_server_addr = prometheus_server_addr, + proxy_mirror_timeouts = proxy_mirror_timeouts, + opentelemetry_set_ngx_var = opentelemetry_set_ngx_var, + zipkin_set_ngx_var = zipkin_set_ngx_var + } + + if not yaml_conf.apisix then + util.die("failed to read `apisix` field from yaml file") + end + + if not yaml_conf.nginx_config then + util.die("failed to read `nginx_config` field from yaml file") + end + + if util.is_32bit_arch() then + sys_conf["worker_rlimit_core"] = "4G" + else + sys_conf["worker_rlimit_core"] = "16G" + end + + for k,v in pairs(yaml_conf.apisix) do + sys_conf[k] = v + end + for k,v in pairs(yaml_conf.nginx_config) do + sys_conf[k] = v + end + if yaml_conf.deployment.admin then + for k,v in pairs(yaml_conf.deployment.admin) do + sys_conf[k] = v + end + end + + sys_conf.standalone_with_admin_api = env.deployment_role == "traditional" and + yaml_conf.apisix.enable_admin and yaml_conf.deployment.config_provider == "yaml" + + sys_conf["wasm"] = yaml_conf.wasm + + + local wrn = sys_conf["worker_rlimit_nofile"] + local wc = sys_conf["event"]["worker_connections"] + if not wrn or wrn <= wc then + -- ensure the number of fds is slightly larger than the number of conn + sys_conf["worker_rlimit_nofile"] = wc + 128 + end + + if sys_conf["enable_dev_mode"] == true then + sys_conf["worker_processes"] = 1 + sys_conf["enable_reuseport"] = false + + elseif tonumber(sys_conf["worker_processes"]) == nil then + sys_conf["worker_processes"] = "auto" + end + + local dns_resolver = sys_conf["dns_resolver"] + if not dns_resolver or #dns_resolver == 0 then + local dns_addrs, err = local_dns_resolver("/etc/resolv.conf") + if not dns_addrs then + util.die("failed to import local DNS: ", err, "\n") + end + + if #dns_addrs == 0 then + util.die("local DNS is empty\n") + end + + sys_conf["dns_resolver"] = dns_addrs + end + + for i, r in ipairs(sys_conf["dns_resolver"]) do + if r:match(":[^:]*:") then + -- more than one colon, is IPv6 + if r:byte(1) ~= str_byte('[') then + -- ensure IPv6 address is always wrapped in [] + sys_conf["dns_resolver"][i] = "[" .. r .. "]" + end + end + + -- check if the dns_resolver is ipv6 address with zone_id + -- Nginx does not support this form + if r:find("%%") then + stderr:write("unsupported DNS resolver: " .. r .. + ", would ignore this item\n") + table_remove(sys_conf["dns_resolver"], i) + end + end + + local env_worker_processes = getenv("APISIX_WORKER_PROCESSES") + if env_worker_processes then + sys_conf["worker_processes"] = floor(tonumber(env_worker_processes)) + end + + local exported_vars = file.get_exported_vars() + if exported_vars then + if not sys_conf["envs"] then + sys_conf["envs"]= {} + end + for _, cfg_env in ipairs(sys_conf["envs"]) do + local cfg_name + local from = str_find(cfg_env, "=", 1, true) + if from then + cfg_name = str_sub(cfg_env, 1, from - 1) + else + cfg_name = cfg_env + end + + exported_vars[cfg_name] = false + end + + for name, value in pairs(exported_vars) do + if value then + table_insert(sys_conf["envs"], name) + end + end + end + + -- inject kubernetes discovery shared dict and environment variable + if enabled_discoveries["kubernetes"] then + + if not sys_conf["discovery_shared_dicts"] then + sys_conf["discovery_shared_dicts"] = {} + end + + local kubernetes_conf = yaml_conf.discovery["kubernetes"] + + local inject_environment = function(conf, envs) + local keys = { + conf.service.host, + conf.service.port, + } + + if conf.client.token then + table_insert(keys, conf.client.token) + end + + if conf.client.token_file then + table_insert(keys, conf.client.token_file) + end + + for _, key in ipairs(keys) do + if #key > 3 then + local first, second = str_byte(key, 1, 2) + if first == str_byte('$') and second == str_byte('{') then + local last = str_byte(key, #key) + if last == str_byte('}') then + envs[str_sub(key, 3, #key - 1)] = "" + end + end + end + end + + end + + local envs = {} + if #kubernetes_conf == 0 then + sys_conf["discovery_shared_dicts"]["kubernetes"] = kubernetes_conf.shared_size + inject_environment(kubernetes_conf, envs) + else + for _, item in ipairs(kubernetes_conf) do + sys_conf["discovery_shared_dicts"]["kubernetes-" .. item.id] = item.shared_size + inject_environment(item, envs) + end + end + + if not sys_conf["envs"] then + sys_conf["envs"] = {} + end + + for item in pairs(envs) do + table_insert(sys_conf["envs"], item) + end + + end + + -- fix up lua path + sys_conf["extra_lua_path"] = get_lua_path(yaml_conf.apisix.extra_lua_path) + sys_conf["extra_lua_cpath"] = get_lua_path(yaml_conf.apisix.extra_lua_cpath) + + local conf_render = template.compile(ngx_tpl) + local ngxconf = conf_render(sys_conf) + + local ok, err = util.write_file(env.apisix_home .. "/conf/nginx.conf", + ngxconf) + if not ok then + util.die("failed to update nginx.conf: ", err, "\n") + end +end + + +local function init_etcd(env, args) + etcd.init(env, args) +end + + +local function cleanup(env) + if env.apisix_home then + profile.apisix_home = env.apisix_home + end + + os_remove(profile:customized_yaml_index()) +end + + +local function sleep(n) + execute("sleep " .. tonumber(n)) +end + + +local function check_running(env) + local pid_path = env.apisix_home .. "/logs/nginx.pid" + local pid = util.read_file(pid_path) + pid = tonumber(pid) + if not pid then + return false, nil + end + return true, pid +end + + +local function start(env, ...) + cleanup(env) + + if env.apisix_home then + profile.apisix_home = env.apisix_home + end + + -- Because the worker process started by apisix has "nobody" permission, + -- it cannot access the `/root` directory. Therefore, it is necessary to + -- prohibit APISIX from running in the /root directory. + if env.is_root_path then + util.die("Error: It is forbidden to run APISIX in the /root directory.\n") + end + + local logs_path = env.apisix_home .. "/logs" + if not pl_path.exists(logs_path) then + local _, err = pl_path.mkdir(logs_path) + if err ~= nil then + util.die("failed to mkdir ", logs_path, ", error: ", err) + end + elseif not pl_path.isdir(logs_path) and not pl_path.islink(logs_path) then + util.die(logs_path, " is not directory nor symbol link") + end + + -- check running and wait old apisix stop + local pid = nil + for i = 1, 30 do + local running + running, pid = check_running(env) + if not running then + break + else + sleep(0.1) + end + end + + if pid then + if pid <= 0 then + print("invalid pid") + return + end + + local signone = 0 + + local ok, err, err_no = signal.kill(pid, signone) + if ok then + print("the old APISIX is still running, the new one will not start") + return + -- no such process + elseif err_no ~= errno.ESRCH then + print(err) + return + end + + print("nginx.pid exists but there's no corresponding process with pid ", pid, + ", the file will be overwritten") + end + + -- start a new APISIX instance + + local parser = argparse() + parser:argument("_", "Placeholder") + parser:option("-c --config", "location of customized config.yaml") + -- TODO: more logs for APISIX cli could be added using this feature + parser:flag("-v --verbose", "show init_etcd debug information") + local args = parser:parse() + + local customized_yaml = args["config"] + if customized_yaml then + local customized_yaml_path + local idx = str_find(customized_yaml, "/") + if idx and idx == 1 then + customized_yaml_path = customized_yaml + else + local cur_dir, err = lfs.currentdir() + if err then + util.die("failed to get current directory") + end + customized_yaml_path = cur_dir .. "/" .. customized_yaml + end + + if not util.file_exists(customized_yaml_path) then + util.die("customized config file not exists, path: " .. customized_yaml_path) + end + + local ok, err = util.write_file(profile:customized_yaml_index(), customized_yaml_path) + if not ok then + util.die("write customized config index failed, err: " .. err) + end + + print("Use customized yaml: ", customized_yaml) + end + + init(env) + + if env.deployment_role ~= "data_plane" then + init_etcd(env, args) + end + + util.execute_cmd(env.openresty_args) +end + + +local function test(env, backup_ngx_conf) + -- backup nginx.conf + local ngx_conf_path = env.apisix_home .. "/conf/nginx.conf" + local ngx_conf_path_bak = ngx_conf_path .. ".bak" + local ngx_conf_exist = pl_path.exists(ngx_conf_path) + if ngx_conf_exist then + local ok, err = os_rename(ngx_conf_path, ngx_conf_path_bak) + if not ok then + util.die("failed to backup nginx.conf, error: ", err) + end + end + + -- reinit nginx.conf + init(env) + + local test_cmd = env.openresty_args .. [[ -t -q ]] + local test_ret = execute((test_cmd)) + + -- restore nginx.conf + if ngx_conf_exist then + local ok, err = os_rename(ngx_conf_path_bak, ngx_conf_path) + if not ok then + util.die("failed to restore original nginx.conf, error: ", err) + end + end + + -- When success, + -- On linux, os.execute returns 0, + -- On macos, os.execute returns 3 values: true, exit, 0, and we need the first. + if (test_ret == 0 or test_ret == true) then + print("configuration test is successful") + return + end + + util.die("configuration test failed") +end + + +local function quit(env) + cleanup(env) + + local cmd = env.openresty_args .. [[ -s quit]] + util.execute_cmd(cmd) +end + + +local function stop(env) + cleanup(env) + + local cmd = env.openresty_args .. [[ -s stop]] + util.execute_cmd(cmd) +end + + +local function restart(env) + -- test configuration + test(env) + stop(env) + start(env) +end + + +local function reload(env) + -- reinit nginx.conf + init(env) + + local test_cmd = env.openresty_args .. [[ -t -q ]] + -- When success, + -- On linux, os.execute returns 0, + -- On macos, os.execute returns 3 values: true, exit, 0, and we need the first. + local test_ret = execute((test_cmd)) + if (test_ret == 0 or test_ret == true) then + local cmd = env.openresty_args .. [[ -s reload]] + execute(cmd) + return + end + + print("test openresty failed") +end + + + +local action = { + help = help, + version = version, + init = init, + init_etcd = etcd.init, + start = start, + stop = stop, + quit = quit, + restart = restart, + reload = reload, + test = test, +} + + +function _M.execute(env, arg) + local cmd_action = arg[1] + if not cmd_action then + return help() + end + + if not action[cmd_action] then + stderr:write("invalid argument: ", cmd_action, "\n") + return help() + end + + action[cmd_action](env, arg[2]) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/cli/schema.lua b/CloudronPackages/APISIX/apisix-source/apisix/cli/schema.lua new file mode 100644 index 0000000..36d758c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/cli/schema.lua @@ -0,0 +1,450 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local jsonschema = require("jsonschema") +local pairs = pairs +local pcall = pcall +local require = require + + +local _M = {} +local etcd_schema = { + type = "object", + properties = { + resync_delay = { + type = "integer", + }, + user = { + type = "string", + }, + password = { + type = "string", + }, + tls = { + type = "object", + properties = { + cert = { + type = "string", + }, + key = { + type = "string", + }, + }, + }, + prefix = { + type = "string", + }, + host = { + type = "array", + items = { + type = "string", + pattern = [[^https?://]] + }, + minItems = 1, + }, + timeout = { + type = "integer", + default = 30, + minimum = 1, + description = "etcd connection timeout in seconds", + }, + }, + required = {"prefix", "host"} +} + +local config_schema = { + type = "object", + properties = { + apisix = { + properties = { + lua_module_hook = { + pattern = "^[a-zA-Z._-]+$", + }, + proxy_protocol = { + type = "object", + properties = { + listen_http_port = { + type = "integer", + }, + listen_https_port = { + type = "integer", + }, + enable_tcp_pp = { + type = "boolean", + }, + enable_tcp_pp_to_upstream = { + type = "boolean", + }, + } + }, + proxy_cache = { + type = "object", + properties = { + zones = { + type = "array", + minItems = 1, + items = { + type = "object", + properties = { + name = { + type = "string", + }, + memory_size = { + type = "string", + }, + disk_size = { + type = "string", + }, + disk_path = { + type = "string", + }, + cache_levels = { + type = "string", + }, + }, + oneOf = { + { + required = {"name", "memory_size"}, + maxProperties = 2, + }, + { + required = {"name", "memory_size", "disk_size", + "disk_path", "cache_levels"}, + } + }, + }, + uniqueItems = true, + } + } + }, + proxy_mode = { + type = "string", + enum = {"http", "stream", "http&stream"}, + }, + stream_proxy = { + type = "object", + properties = { + tcp = { + type = "array", + minItems = 1, + items = { + anyOf = { + { + type = "integer", + }, + { + type = "string", + }, + { + type = "object", + properties = { + addr = { + anyOf = { + { + type = "integer", + }, + { + type = "string", + }, + } + }, + tls = { + type = "boolean", + } + }, + required = {"addr"} + }, + }, + }, + uniqueItems = true, + }, + udp = { + type = "array", + minItems = 1, + items = { + anyOf = { + { + type = "integer", + }, + { + type = "string", + }, + }, + }, + uniqueItems = true, + }, + } + }, + dns_resolver = { + type = "array", + minItems = 1, + items = { + type = "string", + } + }, + dns_resolver_valid = { + type = "integer", + }, + enable_http2 = { + type = "boolean", + default = true + }, + ssl = { + type = "object", + properties = { + ssl_trusted_certificate = { + type = "string", + default = "system" + }, + listen = { + type = "array", + items = { + type = "object", + properties = { + ip = { + type = "string", + }, + port = { + type = "integer", + minimum = 1, + maximum = 65535 + }, + enable_http3 = { + type = "boolean", + }, + } + } + }, + } + }, + data_encryption = { + type = "object", + properties = { + keyring = { + anyOf = { + { + type = "array", + minItems = 1, + items = { + type = "string", + minLength = 16, + maxLength = 16 + } + }, + { + type = "string", + minLength = 16, + maxLength = 16 + } + } + }, + } + }, + } + }, + nginx_config = { + type = "object", + properties = { + envs = { + type = "array", + minItems = 1, + items = { + type = "string", + } + } + }, + }, + http = { + type = "object", + properties = { + custom_lua_shared_dict = { + type = "object", + } + } + }, + etcd = etcd_schema, + plugins = { + type = "array", + default = {}, + minItems = 0, + items = { + type = "string" + } + }, + stream_plugins = { + type = "array", + default = {}, + minItems = 0, + items = { + type = "string" + } + }, + wasm = { + type = "object", + properties = { + plugins = { + type = "array", + minItems = 1, + items = { + type = "object", + properties = { + name = { + type = "string" + }, + file = { + type = "string" + }, + priority = { + type = "integer" + }, + http_request_phase = { + enum = {"access", "rewrite"}, + default = "access", + }, + }, + required = {"name", "file", "priority"} + } + } + } + }, + deployment = { + type = "object", + properties = { + role = { + enum = {"traditional", "control_plane", "data_plane", "standalone"}, + default = "traditional" + } + }, + }, + }, + required = {"apisix", "deployment"}, +} + +local admin_schema = { + type = "object", + properties = { + admin_key = { + type = "array", + properties = { + items = { + properties = { + name = {type = "string"}, + key = {type = "string"}, + role = {type = "string"}, + } + } + } + }, + admin_listen = { + properties = { + listen = { type = "string" }, + port = { type = "integer" }, + }, + default = { + listen = "0.0.0.0", + port = 9180, + } + }, + https_admin = { + type = "boolean", + }, + admin_key_required = { + type = "boolean", + }, + } +} + +local deployment_schema = { + traditional = { + properties = { + etcd = etcd_schema, + admin = admin_schema, + role_traditional = { + properties = { + config_provider = { + enum = {"etcd", "yaml"} + }, + }, + required = {"config_provider"} + } + }, + required = {"etcd"} + }, + control_plane = { + properties = { + etcd = etcd_schema, + admin = admin_schema, + role_control_plane = { + properties = { + config_provider = { + enum = {"etcd"} + }, + }, + required = {"config_provider"} + }, + }, + required = {"etcd", "role_control_plane"} + }, + data_plane = { + properties = { + etcd = etcd_schema, + role_data_plane = { + properties = { + config_provider = { + enum = {"etcd", "yaml", "json", "xds"} + }, + }, + required = {"config_provider"} + }, + }, + required = {"role_data_plane"} + } +} + + +function _M.validate(yaml_conf) + local validator = jsonschema.generate_validator(config_schema) + local ok, err = validator(yaml_conf) + if not ok then + return false, "failed to validate config: " .. err + end + + if yaml_conf.discovery then + for kind, conf in pairs(yaml_conf.discovery) do + local ok, schema = pcall(require, "apisix.discovery." .. kind .. ".schema") + if ok then + local validator = jsonschema.generate_validator(schema) + local ok, err = validator(conf) + if not ok then + return false, "invalid discovery " .. kind .. " configuration: " .. err + end + end + end + end + + local role = yaml_conf.deployment.role + local validator = jsonschema.generate_validator(deployment_schema[role]) + local ok, err = validator(yaml_conf.deployment) + if not ok then + return false, "invalid deployment " .. role .. " configuration: " .. err + end + + return true +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/cli/util.lua b/CloudronPackages/APISIX/apisix-source/apisix/cli/util.lua new file mode 100644 index 0000000..d69468e --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/cli/util.lua @@ -0,0 +1,189 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local require = require +local pcall = pcall +local open = io.open +local popen = io.popen +local close = io.close +local exit = os.exit +local stderr = io.stderr +local str_format = string.format +local tonumber = tonumber +local io = io +local ipairs = ipairs +local assert = assert + +local _M = {} + + +-- Note: The `execute_cmd` return value will have a line break at the end, +-- it is recommended to use the `trim` function to handle the return value. +local function execute_cmd(cmd) + local t, err = popen(cmd) + if not t then + return nil, "failed to execute command: " + .. cmd .. ", error info: " .. err + end + + local data, err = t:read("*all") + t:close() + + if not data then + return nil, "failed to read execution result of: " + .. cmd .. ", error info: " .. err + end + + return data +end +_M.execute_cmd = execute_cmd + + +-- For commands which stdout would be always be empty, +-- forward stderr to stdout to get the error msg +function _M.execute_cmd_with_error(cmd) + return execute_cmd(cmd .. " 2>&1") +end + + +function _M.trim(s) + return (s:gsub("^%s*(.-)%s*$", "%1")) +end + + +function _M.split(self, sep) + local sep, fields = sep or ":", {} + local pattern = str_format("([^%s]+)", sep) + + self:gsub(pattern, function(c) fields[#fields + 1] = c end) + + return fields +end + + +function _M.read_file(file_path) + local file, err = open(file_path, "rb") + if not file then + return false, "failed to open file: " .. file_path .. ", error info:" .. err + end + + local data, err = file:read("*all") + file:close() + if not data then + return false, "failed to read file: " .. file_path .. ", error info:" .. err + end + + return data +end + + +function _M.die(...) + stderr:write(...) + exit(1) +end + + +function _M.is_32bit_arch() + local ok, ffi = pcall(require, "ffi") + if ok then + -- LuaJIT + return ffi.abi("32bit") + end + + local ret = _M.execute_cmd("getconf LONG_BIT") + local bits = tonumber(ret) + return bits <= 32 +end + + +function _M.write_file(file_path, data) + local file, err = open(file_path, "w+") + if not file then + return false, "failed to open file: " + .. file_path + .. ", error info:" + .. err + end + + local ok, err = file:write(data) + file:close() + if not ok then + return false, "failed to write file: " + .. file_path + .. ", error info:" + .. err + end + return true +end + + +function _M.file_exists(file_path) + local f = open(file_path, "r") + return f ~= nil and close(f) +end + +do + local trusted_certs_paths = { + "/etc/ssl/certs/ca-certificates.crt", -- Debian/Ubuntu/Gentoo + "/etc/pki/tls/certs/ca-bundle.crt", -- Fedora/RHEL 6 + "/etc/ssl/ca-bundle.pem", -- OpenSUSE + "/etc/pki/tls/cacert.pem", -- OpenELEC + "/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", -- CentOS/RHEL 7 + "/etc/ssl/cert.pem", -- OpenBSD, Alpine + } + + -- Check if a file exists using Lua's built-in `io.open` + local function file_exists(path) + local file = io.open(path, "r") + if file then + file:close() + return true + else + return false + end + end + + function _M.get_system_trusted_certs_filepath() + for _, path in ipairs(trusted_certs_paths) do + if file_exists(path) then + return path + end + end + + return nil, + "Could not find trusted certs file in " .. + "any of the `system`-predefined locations. " .. + "Please install a certs file there or set " .. + "`lua_ssl_trusted_certificate` to a " .. + "specific file path instead of `system`" + end +end + + +function _M.gen_trusted_certs_combined_file(combined_filepath, paths) + local combined_file = assert(io.open(combined_filepath, "w")) + for _, path in ipairs(paths) do + local cert_file = assert(io.open(path, "r")) + combined_file:write(cert_file:read("*a")) + combined_file:write("\n") + cert_file:close() + end + combined_file:close() +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/constants.lua b/CloudronPackages/APISIX/apisix-source/apisix/constants.lua new file mode 100644 index 0000000..0b3ec16 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/constants.lua @@ -0,0 +1,46 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +return { + RPC_ERROR = 0, + RPC_PREPARE_CONF = 1, + RPC_HTTP_REQ_CALL = 2, + RPC_EXTRA_INFO = 3, + RPC_HTTP_RESP_CALL = 4, + HTTP_ETCD_DIRECTORY = { + ["/upstreams"] = true, + ["/plugins"] = true, + ["/ssls"] = true, + ["/stream_routes"] = true, + ["/plugin_metadata"] = true, + ["/routes"] = true, + ["/services"] = true, + ["/consumers"] = true, + ["/global_rules"] = true, + ["/protos"] = true, + ["/plugin_configs"] = true, + ["/consumer_groups"] = true, + ["/secrets"] = true, + }, + STREAM_ETCD_DIRECTORY = { + ["/upstreams"] = true, + ["/services"] = true, + ["/plugins"] = true, + ["/ssls"] = true, + ["/stream_routes"] = true, + ["/plugin_metadata"] = true, + }, +} diff --git a/CloudronPackages/APISIX/apisix-source/apisix/consumer.lua b/CloudronPackages/APISIX/apisix-source/apisix/consumer.lua new file mode 100644 index 0000000..d69226b --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/consumer.lua @@ -0,0 +1,334 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local config_local = require("apisix.core.config_local") +local secret = require("apisix.secret") +local plugin = require("apisix.plugin") +local plugin_checker = require("apisix.plugin").plugin_checker +local check_schema = require("apisix.core.schema").check +local error = error +local ipairs = ipairs +local pairs = pairs +local type = type +local string_sub = string.sub +local consumers + + +local _M = { + version = 0.3, +} + +local lrucache = core.lrucache.new({ + ttl = 300, count = 512 +}) + +-- Please calculate and set the value of the "consumers_count_for_lrucache" +-- variable based on the number of consumers in the current environment, +-- taking into account the appropriate adjustment coefficient. +local consumers_count_for_lrucache = 4096 + +local function remove_etcd_prefix(key) + local prefix = "" + local local_conf = config_local.local_conf() + local role = core.table.try_read_attr(local_conf, "deployment", "role") + local provider = core.table.try_read_attr(local_conf, "deployment", "role_" .. + role, "config_provider") + if provider == "etcd" and local_conf.etcd and local_conf.etcd.prefix then + prefix = local_conf.etcd.prefix + end + return string_sub(key, #prefix + 1) +end + +-- /{etcd.prefix}/consumers/{consumer_name}/credentials/{credential_id} --> {consumer_name} +local function get_consumer_name_from_credential_etcd_key(key) + local uri_segs = core.utils.split_uri(remove_etcd_prefix(key)) + return uri_segs[3] +end + +local function is_credential_etcd_key(key) + if not key then + return false + end + + local uri_segs = core.utils.split_uri(remove_etcd_prefix(key)) + return uri_segs[2] == "consumers" and uri_segs[4] == "credentials" +end + +local function get_credential_id_from_etcd_key(key) + local uri_segs = core.utils.split_uri(remove_etcd_prefix(key)) + return uri_segs[5] +end + +local function filter_consumers_list(data_list) + if #data_list == 0 then + return data_list + end + + local list = {} + for _, item in ipairs(data_list) do + if not (type(item) == "table" and is_credential_etcd_key(item.key)) then + core.table.insert(list, item) + end + end + + return list +end + +local plugin_consumer +do + local consumers_id_lrucache = core.lrucache.new({ + count = consumers_count_for_lrucache + }) + +local function construct_consumer_data(val, plugin_config) + -- if the val is a Consumer, clone it to the local consumer; + -- if the val is a Credential, to get the Consumer by consumer_name and then clone + -- it to the local consumer. + local consumer + if is_credential_etcd_key(val.key) then + local consumer_name = get_consumer_name_from_credential_etcd_key(val.key) + local the_consumer = consumers:get(consumer_name) + if the_consumer and the_consumer.value then + consumer = core.table.clone(the_consumer.value) + consumer.modifiedIndex = the_consumer.modifiedIndex + consumer.credential_id = get_credential_id_from_etcd_key(val.key) + else + -- Normally wouldn't get here: + -- it should belong to a consumer for any credential. + core.log.error("failed to get the consumer for the credential,", + " a wild credential has appeared!", + " credential key: ", val.key, ", consumer name: ", consumer_name) + return nil, "failed to get the consumer for the credential" + end + else + consumer = core.table.clone(val.value) + consumer.modifiedIndex = val.modifiedIndex + end + + -- if the consumer has labels, set the field custom_id to it. + -- the custom_id is used to set in the request headers to the upstream. + if consumer.labels then + consumer.custom_id = consumer.labels["custom_id"] + end + + -- Note: the id here is the key of consumer data, which + -- is 'username' field in admin + consumer.consumer_name = consumer.id + consumer.auth_conf = plugin_config + + return consumer +end + + +function plugin_consumer() + local plugins = {} + + if consumers.values == nil then + return plugins + end + + -- consumers.values is the list that got from etcd by prefix key {etcd_prefix}/consumers. + -- So it contains consumers and credentials. + -- The val in the for-loop may be a Consumer or a Credential. + for _, val in ipairs(consumers.values) do + if type(val) ~= "table" then + goto CONTINUE + end + + for name, config in pairs(val.value.plugins or {}) do + local plugin_obj = plugin.get(name) + if plugin_obj and plugin_obj.type == "auth" then + if not plugins[name] then + plugins[name] = { + nodes = {}, + len = 0, + conf_version = consumers.conf_version + } + end + + local consumer = consumers_id_lrucache(val.value.id .. name, + val.modifiedIndex, construct_consumer_data, val, config) + if consumer == nil then + goto CONTINUE + end + + plugins[name].len = plugins[name].len + 1 + core.table.insert(plugins[name].nodes, plugins[name].len, + consumer) + core.log.info("consumer:", core.json.delay_encode(consumer)) + end + end + + ::CONTINUE:: + end + + return plugins +end + +end + + +_M.filter_consumers_list = filter_consumers_list + +function _M.get_consumer_key_from_credential_key(key) + local uri_segs = core.utils.split_uri(key) + return "/consumers/" .. uri_segs[3] +end + +function _M.plugin(plugin_name) + local plugin_conf = core.lrucache.global("/consumers", + consumers.conf_version, plugin_consumer) + return plugin_conf[plugin_name] +end + +function _M.consumers_conf(plugin_name) + return _M.plugin(plugin_name) +end + + +-- attach chosen consumer to the ctx, used in auth plugin +function _M.attach_consumer(ctx, consumer, conf) + ctx.consumer = consumer + ctx.consumer_name = consumer.consumer_name + ctx.consumer_group_id = consumer.group_id + ctx.consumer_ver = conf.conf_version + + core.request.set_header(ctx, "X-Consumer-Username", consumer.username) + core.request.set_header(ctx, "X-Credential-Identifier", consumer.credential_id) + core.request.set_header(ctx, "X-Consumer-Custom-ID", consumer.custom_id) +end + + +function _M.consumers() + if not consumers then + return nil, nil + end + + return filter_consumers_list(consumers.values), consumers.conf_version +end + + +local create_consume_cache +do + local consumer_lrucache = core.lrucache.new({ + count = consumers_count_for_lrucache + }) + +local function fill_consumer_secret(consumer) + local new_consumer = core.table.clone(consumer) + new_consumer.auth_conf = secret.fetch_secrets(new_consumer.auth_conf, false) + return new_consumer +end + + +function create_consume_cache(consumers_conf, key_attr) + local consumer_names = {} + + for _, consumer in ipairs(consumers_conf.nodes) do + core.log.info("consumer node: ", core.json.delay_encode(consumer)) + local new_consumer = consumer_lrucache(consumer, nil, + fill_consumer_secret, consumer) + consumer_names[new_consumer.auth_conf[key_attr]] = new_consumer + end + + return consumer_names +end + +end + + +function _M.consumers_kv(plugin_name, consumer_conf, key_attr) + local consumers = lrucache("consumers_key#" .. plugin_name, consumer_conf.conf_version, + create_consume_cache, consumer_conf, key_attr) + + return consumers +end + + +function _M.find_consumer(plugin_name, key, key_value) + local consumer + local consumer_conf + consumer_conf = _M.plugin(plugin_name) + if not consumer_conf then + return nil, nil, "Missing related consumer" + end + local consumers = _M.consumers_kv(plugin_name, consumer_conf, key) + consumer = consumers[key_value] + return consumer, consumer_conf +end + + +local function check_consumer(consumer, key) + local data_valid + local err + if is_credential_etcd_key(key) then + data_valid, err = check_schema(core.schema.credential, consumer) + else + data_valid, err = check_schema(core.schema.consumer, consumer) + end + if not data_valid then + return data_valid, err + end + + return plugin_checker(consumer, core.schema.TYPE_CONSUMER) +end + + +function _M.init_worker() + local err + local cfg = { + automatic = true, + checker = check_consumer, + } + + consumers, err = core.config.new("/consumers", cfg) + if not consumers then + error("failed to create etcd instance for fetching consumers: " .. err) + return + end +end + +local function get_anonymous_consumer_from_local_cache(name) + local anon_consumer_raw = consumers:get(name) + + if not anon_consumer_raw or not anon_consumer_raw.value or + not anon_consumer_raw.value.id or not anon_consumer_raw.modifiedIndex then + return nil, nil, "failed to get anonymous consumer " .. name + end + + -- make structure of anon_consumer similar to that of consumer_mod.consumers_kv's response + local anon_consumer = anon_consumer_raw.value + anon_consumer.consumer_name = anon_consumer_raw.value.id + anon_consumer.modifiedIndex = anon_consumer_raw.modifiedIndex + + local anon_consumer_conf = { + conf_version = anon_consumer_raw.modifiedIndex + } + + return anon_consumer, anon_consumer_conf +end + + +function _M.get_anonymous_consumer(name) + local anon_consumer, anon_consumer_conf, err + anon_consumer, anon_consumer_conf, err = get_anonymous_consumer_from_local_cache(name) + + return anon_consumer, anon_consumer_conf, err +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/consumer_group.lua b/CloudronPackages/APISIX/apisix-source/apisix/consumer_group.lua new file mode 100644 index 0000000..3be59ec --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/consumer_group.lua @@ -0,0 +1,55 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local plugin_checker = require("apisix.plugin").plugin_checker +local error = error + + +local consumer_groups + + +local _M = { +} + + +function _M.init_worker() + local err + consumer_groups, err = core.config.new("/consumer_groups", { + automatic = true, + item_schema = core.schema.consumer_group, + checker = plugin_checker, + }) + if not consumer_groups then + error("failed to sync /consumer_groups: " .. err) + end +end + + +function _M.consumer_groups() + if not consumer_groups then + return nil, nil + end + return consumer_groups.values, consumer_groups.conf_version +end + + +function _M.get(id) + return consumer_groups:get(id) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/control/router.lua b/CloudronPackages/APISIX/apisix-source/apisix/control/router.lua new file mode 100644 index 0000000..e6e5ff9 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/control/router.lua @@ -0,0 +1,212 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local router = require("apisix.utils.router") +local radixtree = require("resty.radixtree") +local builtin_v1_routes = require("apisix.control.v1") +local plugin_mod = require("apisix.plugin") +local core = require("apisix.core") + +local str_sub = string.sub +local ipairs = ipairs +local pairs = pairs +local type = type +local ngx = ngx +local get_method = ngx.req.get_method +local events = require("apisix.events") + +local _M = {} + + +local function format_dismod_uri(mod_name, uri) + if core.string.has_prefix(uri, "/v1/") then + return uri + end + + local tmp = {"/v1/discovery/", mod_name} + if not core.string.has_prefix(uri, "/") then + core.table.insert(tmp, "/") + end + core.table.insert(tmp, uri) + + return core.table.concat(tmp, "") +end + +-- we do not hardcode the discovery module's control api uri +local function format_dismod_control_api_uris(mod_name, api_route) + if not api_route or #api_route == 0 then + return api_route + end + + local clone_route = core.table.clone(api_route) + for _, v in ipairs(clone_route) do + local uris = v.uris + local target_uris = core.table.new(#uris, 0) + for _, uri in ipairs(uris) do + local target_uri = format_dismod_uri(mod_name, uri) + core.table.insert(target_uris, target_uri) + end + v.uris = target_uris + end + + return clone_route +end + + +local fetch_control_api_router +do + local function register_api_routes(routes, api_routes) + for _, route in ipairs(api_routes) do + core.table.insert(routes, { + methods = route.methods, + -- note that it is 'uris' for control API, which is an array of strings + paths = route.uris, + handler = function (api_ctx) + local code, body = route.handler(api_ctx) + if code or body then + if type(body) == "table" and ngx.header["Content-Type"] == nil then + core.response.set_header("Content-Type", "application/json") + end + + core.response.exit(code, body) + end + end + }) + end + end + + local routes = {} + local v1_routes = {} + local function empty_func() end + +function fetch_control_api_router() + core.table.clear(routes) + + for _, plugin in ipairs(plugin_mod.plugins) do + local api_fun = plugin.control_api + if api_fun then + local api_route = api_fun() + register_api_routes(routes, api_route) + end + end + + local discovery_type = require("apisix.core.config_local").local_conf().discovery + if discovery_type then + local discovery = require("apisix.discovery.init").discovery + local dump_apis = {} + for key, _ in pairs(discovery_type) do + local dis_mod = discovery[key] + -- if discovery module has control_api method, support it + local api_fun = dis_mod.control_api + if api_fun then + local api_route = api_fun() + local format_route = format_dismod_control_api_uris(key, api_route) + register_api_routes(routes, format_route) + end + + local dump_data = dis_mod.dump_data + if dump_data then + local target_uri = format_dismod_uri(key, "/dump") + local item = { + methods = {"GET"}, + uris = {target_uri}, + handler = function() + return 200, dump_data() + end + } + core.table.insert(dump_apis, item) + end + end + + if #dump_apis > 0 then + core.log.notice("dump_apis: ", core.json.encode(dump_apis, true)) + register_api_routes(routes, dump_apis) + end + end + + core.table.clear(v1_routes) + register_api_routes(v1_routes, builtin_v1_routes) + + local v1_router, err = router.new(v1_routes) + if not v1_router then + return nil, err + end + + core.table.insert(routes, { + paths = {"/v1/*"}, + filter_fun = function(vars, opts, ...) + local uri = str_sub(vars.uri, #"/v1" + 1) + return v1_router:dispatch(uri, opts, ...) + end, + handler = empty_func, + }) + + local with_parameter = false + local conf = core.config.local_conf() + if conf.apisix.enable_control and conf.apisix.control then + if conf.apisix.control.router == "radixtree_uri_with_parameter" then + with_parameter = true + end + end + + if with_parameter then + return radixtree.new(routes) + else + return router.new(routes) + end +end + +end -- do + + +do + local match_opts = {} + local cached_version + local router + +function _M.match(uri) + if cached_version ~= plugin_mod.load_times then + local err + router, err = fetch_control_api_router() + if router == nil then + core.log.error("failed to fetch valid api router: ", err) + return false + end + + cached_version = plugin_mod.load_times + end + + core.table.clear(match_opts) + match_opts.method = get_method() + + return router:dispatch(uri, match_opts) +end + +end -- do + +local function reload_plugins() + core.log.info("start to hot reload plugins") + plugin_mod.load() +end + + +function _M.init_worker() + -- register reload plugin handler + events:register(reload_plugins, builtin_v1_routes.reload_event, "PUT") +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/control/v1.lua b/CloudronPackages/APISIX/apisix-source/apisix/control/v1.lua new file mode 100644 index 0000000..4d35018 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/control/v1.lua @@ -0,0 +1,506 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local plugin = require("apisix.plugin") +local get_routes = require("apisix.router").http_routes +local get_services = require("apisix.http.service").services +local upstream_mod = require("apisix.upstream") +local get_upstreams = upstream_mod.upstreams +local collectgarbage = collectgarbage +local ipairs = ipairs +local pcall = pcall +local str_format = string.format +local ngx = ngx +local ngx_var = ngx.var +local events = require("apisix.events") + + +local _M = {} + +_M.RELOAD_EVENT = 'control-api-plugin-reload' + +function _M.schema() + local http_plugins, stream_plugins = plugin.get_all({ + version = true, + priority = true, + schema = true, + metadata_schema = true, + consumer_schema = true, + type = true, + scope = true, + }) + local schema = { + main = { + consumer = core.schema.consumer, + consumer_group = core.schema.consumer_group, + global_rule = core.schema.global_rule, + plugin_config = core.schema.plugin_config, + plugins = core.schema.plugins, + proto = core.schema.proto, + route = core.schema.route, + service = core.schema.service, + ssl = core.schema.ssl, + stream_route = core.schema.stream_route, + upstream = core.schema.upstream, + upstream_hash_header_schema = core.schema.upstream_hash_header_schema, + upstream_hash_vars_schema = core.schema.upstream_hash_vars_schema, + }, + plugins = http_plugins, + stream_plugins = stream_plugins, + } + return 200, schema +end + + +local healthcheck +local function extra_checker_info(value) + if not healthcheck then + healthcheck = require("resty.healthcheck") + end + + local name = upstream_mod.get_healthchecker_name(value) + local nodes, err = healthcheck.get_target_list(name, "upstream-healthcheck") + if err then + core.log.error("healthcheck.get_target_list failed: ", err) + end + return { + name = value.key, + nodes = nodes, + } +end + + +local function get_checker_type(checks) + if checks.active and checks.active.type then + return checks.active.type + elseif checks.passive and checks.passive.type then + return checks.passive.type + end +end + + +local function iter_and_add_healthcheck_info(infos, values) + if not values then + return + end + + for _, value in core.config_util.iterate_values(values) do + local checks = value.value.checks or (value.value.upstream and value.value.upstream.checks) + if checks then + local info = extra_checker_info(value) + info.type = get_checker_type(checks) + core.table.insert(infos, info) + end + end +end + + +local HTML_TEMPLATE = [[ + + + APISIX upstream check status + + +

APISIX upstream check status

+ + + + + + + + + + + + +{% local i = 0 %} +{% for _, stat in ipairs(stats) do %} +{% for _, node in ipairs(stat.nodes) do %} +{% i = i + 1 %} + {% if node.status == "healthy" or node.status == "mostly_healthy" then %} + + {% else %} + + {% end %} + + + + + + + + + + +{% end %} +{% end %} +
IndexUpstreamCheck typeHostStatusSuccess countsTCP FailuresHTTP FailuresTIMEOUT Failures
{* i *}{* stat.name *}{* stat.type *}{* node.ip .. ":" .. node.port *}{* node.status *}{* node.counter.success *}{* node.counter.tcp_failure *}{* node.counter.http_failure *}{* node.counter.timeout_failure *}
+ + +]] + +local html_render + +local function try_render_html(data) + if not html_render then + local template = require("resty.template") + html_render = template.compile(HTML_TEMPLATE) + end + local accept = ngx_var.http_accept + if accept and accept:find("text/html") then + local ok, out = pcall(html_render, data) + if not ok then + local err = str_format("HTML template rendering: %s", out) + core.log.error(err) + return nil, err + end + return out + end +end + + +local function _get_health_checkers() + local infos = {} + local routes = get_routes() + iter_and_add_healthcheck_info(infos, routes) + local services = get_services() + iter_and_add_healthcheck_info(infos, services) + local upstreams = get_upstreams() + iter_and_add_healthcheck_info(infos, upstreams) + return infos +end + + +function _M.get_health_checkers() + local infos = _get_health_checkers() + local out, err = try_render_html({stats=infos}) + if out then + core.response.set_header("Content-Type", "text/html") + return 200, out + end + if err then + return 503, {error_msg = err} + end + + return 200, infos +end + + +local function iter_and_find_healthcheck_info(values, src_type, src_id) + if not values then + return nil, str_format("%s[%s] not found", src_type, src_id) + end + + for _, value in core.config_util.iterate_values(values) do + if value.value.id == src_id then + local checks = value.value.checks or + (value.value.upstream and value.value.upstream.checks) + if not checks then + return nil, str_format("no checker for %s[%s]", src_type, src_id) + end + + local info = extra_checker_info(value) + info.type = get_checker_type(checks) + return info + end + end + + return nil, str_format("%s[%s] not found", src_type, src_id) +end + + +function _M.get_health_checker() + local uri_segs = core.utils.split_uri(ngx_var.uri) + core.log.info("healthcheck uri: ", core.json.delay_encode(uri_segs)) + + local src_type, src_id = uri_segs[4], uri_segs[5] + if not src_id then + return 404, {error_msg = str_format("missing src id for src type %s", src_type)} + end + + local values + if src_type == "routes" then + values = get_routes() + elseif src_type == "services" then + values = get_services() + elseif src_type == "upstreams" then + values = get_upstreams() + else + return 400, {error_msg = str_format("invalid src type %s", src_type)} + end + + local info, err = iter_and_find_healthcheck_info(values, src_type, src_id) + if not info then + return 404, {error_msg = err} + end + + local out, err = try_render_html({stats={info}}) + if out then + core.response.set_header("Content-Type", "text/html") + return 200, out + end + if err then + return 503, {error_msg = err} + end + + return 200, info +end + +local function iter_add_get_routes_info(values, route_id) + local infos = {} + for _, route in core.config_util.iterate_values(values) do + local new_route = core.table.deepcopy(route) + if new_route.value.upstream and new_route.value.upstream.parent then + new_route.value.upstream.parent = nil + end + -- remove healthcheck info + new_route.checker = nil + new_route.checker_idx = nil + new_route.checker_upstream = nil + new_route.clean_handlers = nil + core.table.insert(infos, new_route) + -- check the route id + if route_id and route.value.id == route_id then + return new_route + end + end + if not route_id then + return infos + end + return nil +end + +function _M.dump_all_routes_info() + local routes = get_routes() + local infos = iter_add_get_routes_info(routes, nil) + return 200, infos +end + +function _M.dump_route_info() + local routes = get_routes() + local uri_segs = core.utils.split_uri(ngx_var.uri) + local route_id = uri_segs[4] + local route = iter_add_get_routes_info(routes, route_id) + if not route then + return 404, {error_msg = str_format("route[%s] not found", route_id)} + end + return 200, route +end + +local function iter_add_get_upstream_info(values, upstream_id) + if not values then + return nil + end + + local infos = {} + for _, upstream in core.config_util.iterate_values(values) do + local new_upstream = core.table.deepcopy(upstream) + core.table.insert(infos, new_upstream) + if new_upstream.value and new_upstream.value.parent then + new_upstream.value.parent = nil + end + -- check the upstream id + if upstream_id and upstream.value.id == upstream_id then + return new_upstream + end + end + if not upstream_id then + return infos + end + return nil +end + +function _M.dump_all_upstreams_info() + local upstreams = get_upstreams() + local infos = iter_add_get_upstream_info(upstreams, nil) + return 200, infos +end + +function _M.dump_upstream_info() + local upstreams = get_upstreams() + local uri_segs = core.utils.split_uri(ngx_var.uri) + local upstream_id = uri_segs[4] + local upstream = iter_add_get_upstream_info(upstreams, upstream_id) + if not upstream then + return 404, {error_msg = str_format("upstream[%s] not found", upstream_id)} + end + return 200, upstream +end + +function _M.trigger_gc() + -- TODO: find a way to trigger GC in the stream subsystem + collectgarbage() + return 200 +end + + +local function iter_add_get_services_info(values, svc_id) + local infos = {} + for _, svc in core.config_util.iterate_values(values) do + local new_svc = core.table.deepcopy(svc) + if new_svc.value.upstream and new_svc.value.upstream.parent then + new_svc.value.upstream.parent = nil + end + -- remove healthcheck info + new_svc.checker = nil + new_svc.checker_idx = nil + new_svc.checker_upstream = nil + new_svc.clean_handlers = nil + core.table.insert(infos, new_svc) + -- check the service id + if svc_id and svc.value.id == svc_id then + return new_svc + end + end + if not svc_id then + return infos + end + return nil +end + +function _M.dump_all_services_info() + local services = get_services() + local infos = iter_add_get_services_info(services, nil) + return 200, infos +end + +function _M.dump_service_info() + local services = get_services() + local uri_segs = core.utils.split_uri(ngx_var.uri) + local svc_id = uri_segs[4] + local info = iter_add_get_services_info(services, svc_id) + if not info then + return 404, {error_msg = str_format("service[%s] not found", svc_id)} + end + return 200, info +end + +function _M.dump_all_plugin_metadata() + local names = core.config.local_conf().plugins + local metadatas = core.table.new(0, #names) + for _, name in ipairs(names) do + local metadata = plugin.plugin_metadata(name) + if metadata then + core.table.insert(metadatas, metadata.value) + end + end + return 200, metadatas +end + +function _M.dump_plugin_metadata() + local uri_segs = core.utils.split_uri(ngx_var.uri) + local name = uri_segs[4] + local metadata = plugin.plugin_metadata(name) + if not metadata then + return 404, {error_msg = str_format("plugin metadata[%s] not found", name)} + end + return 200, metadata.value +end + +function _M.post_reload_plugins() + local success, err = events:post(_M.RELOAD_EVENT, ngx.req.get_method(), ngx.time()) + if not success then + core.response.exit(503, err) + end + + core.response.exit(200, "done") +end + +return { + -- /v1/schema + { + methods = {"GET"}, + uris = {"/schema"}, + handler = _M.schema, + }, + -- /v1/healthcheck + { + methods = {"GET"}, + uris = {"/healthcheck"}, + handler = _M.get_health_checkers, + }, + -- /v1/healthcheck/{src_type}/{src_id} + { + methods = {"GET"}, + uris = {"/healthcheck/*"}, + handler = _M.get_health_checker, + }, + -- /v1/gc + { + methods = {"POST"}, + uris = {"/gc"}, + handler = _M.trigger_gc, + }, + -- /v1/routes + { + methods = {"GET"}, + uris = {"/routes"}, + handler = _M.dump_all_routes_info, + }, + -- /v1/route/* + { + methods = {"GET"}, + uris = {"/route/*"}, + handler = _M.dump_route_info, + }, + -- /v1/services + { + methods = {"GET"}, + uris = {"/services"}, + handler = _M.dump_all_services_info + }, + -- /v1/service/* + { + methods = {"GET"}, + uris = {"/service/*"}, + handler = _M.dump_service_info + }, + -- /v1/upstreams + { + methods = {"GET"}, + uris = {"/upstreams"}, + handler = _M.dump_all_upstreams_info, + }, + -- /v1/upstream/* + { + methods = {"GET"}, + uris = {"/upstream/*"}, + handler = _M.dump_upstream_info, + }, + -- /v1/plugin_metadatas + { + methods = {"GET"}, + uris = {"/plugin_metadatas"}, + handler = _M.dump_all_plugin_metadata, + }, + -- /v1/plugin_metadata/* + { + methods = {"GET"}, + uris = {"/plugin_metadata/*"}, + handler = _M.dump_plugin_metadata, + }, + -- /v1/plugins/reload + { + methods = {"PUT"}, + uris = {"/plugins/reload"}, + handler = _M.post_reload_plugins, + }, + get_health_checkers = _get_health_checkers, + reload_event = _M.RELOAD_EVENT, +} diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core.lua b/CloudronPackages/APISIX/apisix-source/apisix/core.lua new file mode 100644 index 0000000..14c5186 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core.lua @@ -0,0 +1,68 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local log = require("apisix.core.log") +local utils = require("apisix.core.utils") +local local_conf, err = require("apisix.core.config_local").local_conf() +if not local_conf then + error("failed to parse yaml config: " .. err) +end + +local config_provider = local_conf.deployment and local_conf.deployment.config_provider + or "etcd" +log.info("use config_provider: ", config_provider) + +local config +-- Currently, we handle JSON parsing in config_yaml, so special processing is needed here. +if config_provider == "json" then + config = require("apisix.core.config_yaml") + config.file_type = "json" +else + config = require("apisix.core.config_" .. config_provider) +end + +config.type = config_provider + + +return { + version = require("apisix.core.version"), + log = log, + config = config, + config_util = require("apisix.core.config_util"), + sleep = utils.sleep, + json = require("apisix.core.json"), + table = require("apisix.core.table"), + request = require("apisix.core.request"), + response = require("apisix.core.response"), + lrucache = require("apisix.core.lrucache"), + schema = require("apisix.schema_def"), + string = require("apisix.core.string"), + ctx = require("apisix.core.ctx"), + timer = require("apisix.core.timer"), + id = require("apisix.core.id"), + ip = require("apisix.core.ip"), + io = require("apisix.core.io"), + utils = utils, + dns_client = require("apisix.core.dns.client"), + etcd = require("apisix.core.etcd"), + tablepool = require("tablepool"), + resolver = require("apisix.core.resolver"), + os = require("apisix.core.os"), + pubsub = require("apisix.core.pubsub"), + math = require("apisix.core.math"), + event = require("apisix.core.event"), + env = require("apisix.core.env"), +} diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/config_etcd.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/config_etcd.lua new file mode 100644 index 0000000..d476941 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/config_etcd.lua @@ -0,0 +1,1168 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Get configuration information. +-- +-- @module core.config_etcd + +local table = require("apisix.core.table") +local config_local = require("apisix.core.config_local") +local config_util = require("apisix.core.config_util") +local log = require("apisix.core.log") +local json = require("apisix.core.json") +local etcd_apisix = require("apisix.core.etcd") +local core_str = require("apisix.core.string") +local new_tab = require("table.new") +local inspect = require("inspect") +local errlog = require("ngx.errlog") +local process = require("ngx.process") +local log_level = errlog.get_sys_filter_level() +local NGX_INFO = ngx.INFO +local check_schema = require("apisix.core.schema").check +local exiting = ngx.worker.exiting +local worker_id = ngx.worker.id +local insert_tab = table.insert +local type = type +local ipairs = ipairs +local setmetatable = setmetatable +local ngx_sleep = require("apisix.core.utils").sleep +local ngx_timer_at = ngx.timer.at +local ngx_time = ngx.time +local ngx = ngx +local sub_str = string.sub +local tostring = tostring +local tonumber = tonumber +local xpcall = xpcall +local debug = debug +local string = string +local error = error +local pairs = pairs +local next = next +local assert = assert +local rand = math.random +local constants = require("apisix.constants") +local health_check = require("resty.etcd.health_check") +local semaphore = require("ngx.semaphore") +local tablex = require("pl.tablex") +local ngx_thread_spawn = ngx.thread.spawn +local ngx_thread_kill = ngx.thread.kill +local ngx_thread_wait = ngx.thread.wait + + +local is_http = ngx.config.subsystem == "http" +local err_etcd_grpc_engine_timeout = "context deadline exceeded" +local err_etcd_grpc_ngx_timeout = "timeout" +local err_etcd_unhealthy_all = "has no healthy etcd endpoint available" +local health_check_shm_name = "etcd-cluster-health-check" +local status_report_shared_dict_name = "status-report" +if not is_http then + health_check_shm_name = health_check_shm_name .. "-stream" +end +local created_obj = {} +local loaded_configuration = {} +local watch_ctx + + +local _M = { + version = 0.3, + local_conf = config_local.local_conf, + clear_local_cache = config_local.clear_cache, +} + + +local mt = { + __index = _M, + __tostring = function(self) + return " etcd key: " .. self.key + end +} + + +local get_etcd +do + local etcd_cli + + function get_etcd() + if etcd_cli ~= nil then + return etcd_cli + end + + local _, err + etcd_cli, _, err = etcd_apisix.get_etcd_syncer() + return etcd_cli, err + end +end + + +local function cancel_watch(http_cli) + local res, err = watch_ctx.cli:watchcancel(http_cli) + if res == 1 then + log.info("cancel watch connection success") + else + log.error("cancel watch failed: ", err) + end +end + + +-- append res to the queue and notify pending watchers +local function produce_res(res, err) + if log_level >= NGX_INFO then + log.info("append res: ", inspect(res), ", err: ", inspect(err)) + end + insert_tab(watch_ctx.res, {res=res, err=err}) + for _, sema in pairs(watch_ctx.sema) do + sema:post() + end + table.clear(watch_ctx.sema) +end + + +local function do_run_watch(premature) + if premature then + return + end + + -- the main watcher first start + if watch_ctx.started == false then + local local_conf, err = config_local.local_conf() + if not local_conf then + error("no local conf: " .. err) + end + watch_ctx.prefix = local_conf.etcd.prefix .. "/" + watch_ctx.timeout = local_conf.etcd.watch_timeout + + watch_ctx.cli, err = get_etcd() + if not watch_ctx.cli then + error("failed to create etcd instance: " .. string(err)) + end + + local rev = 0 + if loaded_configuration then + local _, res = next(loaded_configuration) + if res then + rev = tonumber(res.headers["X-Etcd-Index"]) + assert(rev > 0, 'invalid res.headers["X-Etcd-Index"]') + end + end + + if rev == 0 then + while true do + local res, err = watch_ctx.cli:get(watch_ctx.prefix) + if not res then + log.error("etcd get: ", err) + ngx_sleep(3) + else + rev = tonumber(res.body.header.revision) + break + end + end + end + + watch_ctx.rev = rev + 1 + watch_ctx.started = true + + log.info("main etcd watcher initialised, revision=", watch_ctx.rev) + + if watch_ctx.wait_init then + for _, sema in pairs(watch_ctx.wait_init) do + sema:post() + end + watch_ctx.wait_init = nil + end + end + + local opts = {} + opts.timeout = watch_ctx.timeout or 50 -- second + opts.need_cancel = true + opts.start_revision = watch_ctx.rev + + log.info("restart watchdir: start_revision=", opts.start_revision) + + local res_func, err, http_cli = watch_ctx.cli:watchdir(watch_ctx.prefix, opts) + if not res_func then + log.error("watchdir err: ", err) + ngx_sleep(3) + return + end + + ::watch_event:: + while true do + local res, err = res_func() + if log_level >= NGX_INFO then + log.info("res_func: ", inspect(res)) + end + + if not res then + if err ~= "closed" and + err ~= "timeout" and + err ~= "broken pipe" + then + log.error("wait watch event: ", err) + end + cancel_watch(http_cli) + break + end + + if res.error then + log.error("wait watch event: ", inspect(res.error)) + cancel_watch(http_cli) + break + end + + if res.result.created then + goto watch_event + end + + if res.result.canceled then + log.warn("watch canceled by etcd, res: ", inspect(res)) + if res.result.compact_revision then + watch_ctx.rev = tonumber(res.result.compact_revision) + log.error("etcd compacted, compact_revision=", watch_ctx.rev) + produce_res(nil, "compacted") + end + cancel_watch(http_cli) + break + end + + -- cleanup + local min_idx = 0 + for _, idx in pairs(watch_ctx.idx) do + if (min_idx == 0) or (idx < min_idx) then + min_idx = idx + end + end + + for i = 1, min_idx - 1 do + watch_ctx.res[i] = false + end + + if min_idx > 100 then + for k, idx in pairs(watch_ctx.idx) do + watch_ctx.idx[k] = idx - min_idx + 1 + end + -- trim the res table + for i = 1, min_idx - 1 do + table.remove(watch_ctx.res, 1) + end + end + + local rev = tonumber(res.result.header.revision) + if rev == nil then + log.warn("receive a invalid revision header, header: ", inspect(res.result.header)) + cancel_watch(http_cli) + break + end + + if rev < watch_ctx.rev then + log.error("received smaller revision, rev=", rev, ", watch_ctx.rev=", + watch_ctx.rev,". etcd may be restarted. resyncing....") + watch_ctx.rev = rev + produce_res(nil, "restarted") + cancel_watch(http_cli) + break + end + if rev > watch_ctx.rev then + watch_ctx.rev = rev + 1 + end + produce_res(res) + end +end + + +local function run_watch(premature) + local run_watch_th, err = ngx_thread_spawn(do_run_watch, premature) + if not run_watch_th then + log.error("failed to spawn thread do_run_watch: ", err) + return + end + + local check_worker_th, err = ngx_thread_spawn(function () + while not exiting() do + ngx_sleep(0.1) + end + end) + if not check_worker_th then + log.error("failed to spawn thread check_worker: ", err) + return + end + + local ok, err = ngx_thread_wait(run_watch_th, check_worker_th) + if not ok then + log.error("run_watch or check_worker thread terminates failed", + " restart those threads, error: ", inspect(err)) + end + + ngx_thread_kill(run_watch_th) + ngx_thread_kill(check_worker_th) + + if not exiting() then + ngx_timer_at(0, run_watch) + else + -- notify child watchers + produce_res(nil, "worker exited") + end +end + + +local function init_watch_ctx(key) + if not watch_ctx then + watch_ctx = { + idx = {}, + res = {}, + sema = {}, + wait_init = {}, + started = false, + } + ngx_timer_at(0, run_watch) + end + + if watch_ctx.started == false then + -- wait until the main watcher is started + local sema, err = semaphore.new() + if not sema then + error(err) + end + watch_ctx.wait_init[key] = sema + while true do + local ok, err = sema:wait(60) + if ok then + break + end + log.error("wait main watcher to start, key: ", key, ", err: ", err) + end + end +end + + +local function getkey(etcd_cli, key) + if not etcd_cli then + return nil, "not inited" + end + + local res, err = etcd_cli:readdir(key) + if not res then + -- log.error("failed to get key from etcd: ", err) + return nil, err + end + + if type(res.body) ~= "table" then + return nil, "failed to get key from etcd" + end + + res, err = etcd_apisix.get_format(res, key, true) + if not res then + return nil, err + end + + return res +end + + +local function readdir(etcd_cli, key, formatter) + if not etcd_cli then + return nil, "not inited" + end + + local res, err = etcd_cli:readdir(key) + if not res then + -- log.error("failed to get key from etcd: ", err) + return nil, err + end + + if type(res.body) ~= "table" then + return nil, "failed to read etcd dir" + end + + res, err = etcd_apisix.get_format(res, key .. '/', true, formatter) + if not res then + return nil, err + end + + return res +end + + +local function http_waitdir(self, etcd_cli, key, modified_index, timeout) + if not watch_ctx.idx[key] then + watch_ctx.idx[key] = 1 + end + + ::iterate_events:: + for i = watch_ctx.idx[key], #watch_ctx.res do + watch_ctx.idx[key] = i + 1 + + local item = watch_ctx.res[i] + if item == false then + goto iterate_events + end + + local res, err = item.res, item.err + if err then + return res, err + end + + -- ignore res with revision smaller then self.prev_index + if tonumber(res.result.header.revision) > self.prev_index then + local res2 + for _, evt in ipairs(res.result.events) do + if core_str.find(evt.kv.key, key) == 1 then + if not res2 then + res2 = tablex.deepcopy(res) + table.clear(res2.result.events) + end + insert_tab(res2.result.events, evt) + end + end + + if res2 then + if log_level >= NGX_INFO then + log.info("http_waitdir: ", inspect(res2)) + end + return res2 + end + end + end + + -- if no events, wait via semaphore + if not self.watch_sema then + local sema, err = semaphore.new() + if not sema then + error(err) + end + self.watch_sema = sema + end + + watch_ctx.sema[key] = self.watch_sema + local ok, err = self.watch_sema:wait(timeout or 60) + watch_ctx.sema[key] = nil + if ok then + goto iterate_events + else + if err ~= "timeout" then + log.error("wait watch event, key=", key, ", err: ", err) + end + return nil, err + end +end + + +local function waitdir(self) + local etcd_cli = self.etcd_cli + local key = self.key + local modified_index = self.prev_index + 1 + local timeout = self.timeout + + if not etcd_cli then + return nil, "not inited" + end + + local res, err = http_waitdir(self, etcd_cli, key, modified_index, timeout) + + if not res then + -- log.error("failed to get key from etcd: ", err) + return nil, err + end + + return etcd_apisix.watch_format(res) +end + + +local function short_key(self, str) + return sub_str(str, #self.key + 2) +end + + +local function sync_status_to_shdict(status) + local local_conf = config_local.local_conf() + if not local_conf.apisix.status then + return + end + if process.type() ~= "worker" then + return + end + local status_shdict = ngx.shared[status_report_shared_dict_name] + if not status_shdict then + return + end + local id = worker_id() + status_shdict:set(id, status) +end + + +local function load_full_data(self, dir_res, headers) + local err + local changed = false + + if self.single_item then + self.values = new_tab(1, 0) + self.values_hash = new_tab(0, 1) + + local item = dir_res + local data_valid = item.value ~= nil + + if data_valid and self.item_schema then + data_valid, err = check_schema(self.item_schema, item.value) + if not data_valid then + log.error("failed to check item data of [", self.key, + "] err:", err, " ,val: ", json.encode(item.value)) + end + end + + if data_valid and self.checker then + data_valid, err = self.checker(item.value) + if not data_valid then + log.error("failed to check item data of [", self.key, + "] err:", err, " ,val: ", json.delay_encode(item.value)) + end + end + + if data_valid then + changed = true + insert_tab(self.values, item) + self.values_hash[self.key] = #self.values + + item.clean_handlers = {} + + if self.filter then + self.filter(item) + end + end + + self:upgrade_version(item.modifiedIndex) + + else + -- here dir_res maybe res.body.node or res.body.list + -- we need make values equals to res.body.node.nodes or res.body.list + local values = (dir_res and dir_res.nodes) or dir_res + if not values then + values = {} + end + + self.values = new_tab(#values, 0) + self.values_hash = new_tab(0, #values) + + for _, item in ipairs(values) do + local key = short_key(self, item.key) + local data_valid = true + if type(item.value) ~= "table" then + data_valid = false + log.error("invalid item data of [", self.key .. "/" .. key, + "], val: ", item.value, + ", it should be an object") + end + + if data_valid and self.item_schema then + data_valid, err = check_schema(self.item_schema, item.value) + if not data_valid then + log.error("failed to check item data of [", self.key, + "] err:", err, " ,val: ", json.encode(item.value)) + end + end + + if data_valid and self.checker then + -- TODO: An opts table should be used + -- as different checkers may use different parameters + data_valid, err = self.checker(item.value, item.key) + if not data_valid then + log.error("failed to check item data of [", self.key, + "] err:", err, " ,val: ", json.delay_encode(item.value)) + end + end + + if data_valid then + changed = true + insert_tab(self.values, item) + self.values_hash[key] = #self.values + + item.value.id = key + item.clean_handlers = {} + + if self.filter then + self.filter(item) + end + end + + self:upgrade_version(item.modifiedIndex) + end + end + + if headers then + self.prev_index = tonumber(headers["X-Etcd-Index"]) or 0 + self:upgrade_version(headers["X-Etcd-Index"]) + end + + if changed then + self.conf_version = self.conf_version + 1 + end + + self.need_reload = false + sync_status_to_shdict(true) +end + + +function _M.upgrade_version(self, new_ver) + new_ver = tonumber(new_ver) + if not new_ver then + return + end + + local pre_index = self.prev_index + + if new_ver <= pre_index then + return + end + + self.prev_index = new_ver + return +end + + +local function sync_data(self) + if not self.key then + return nil, "missing 'key' arguments" + end + + init_watch_ctx(self.key) + + if self.need_reload then + local res, err = readdir(self.etcd_cli, self.key) + if not res then + return false, err + end + + local dir_res, headers = res.body.list or res.body.node or {}, res.headers + log.debug("readdir key: ", self.key, " res: ", + json.delay_encode(dir_res)) + + if self.values then + for i, val in ipairs(self.values) do + config_util.fire_all_clean_handlers(val) + end + + self.values = nil + self.values_hash = nil + end + + load_full_data(self, dir_res, headers) + + return true + end + + local dir_res, err = waitdir(self) + log.info("waitdir key: ", self.key, " prev_index: ", self.prev_index + 1) + log.info("res: ", json.delay_encode(dir_res, true), ", err: ", err) + + if not dir_res then + if err == "compacted" or err == "restarted" then + self.need_reload = true + log.error("waitdir [", self.key, "] err: ", err, + ", will read the configuration again via readdir") + return false + end + + return false, err + end + + local res = dir_res.body.node + local err_msg = dir_res.body.message + if err_msg then + return false, err + end + + if not res then + return false, err + end + + local res_copy = res + -- waitdir will return [res] even for self.single_item = true + for _, res in ipairs(res_copy) do + local key + local data_valid = true + if self.single_item then + key = self.key + else + key = short_key(self, res.key) + end + + if res.value and not self.single_item and type(res.value) ~= "table" then + data_valid = false + log.error("invalid item data of [", self.key .. "/" .. key, + "], val: ", res.value, + ", it should be an object") + end + + if data_valid and res.value and self.item_schema then + data_valid, err = check_schema(self.item_schema, res.value) + if not data_valid then + log.error("failed to check item data of [", self.key, + "] err:", err, " ,val: ", json.encode(res.value)) + end + end + + if data_valid and res.value and self.checker then + data_valid, err = self.checker(res.value, res.key) + if not data_valid then + log.error("failed to check item data of [", self.key, + "] err:", err, " ,val: ", json.delay_encode(res.value)) + end + end + + -- the modifiedIndex tracking should be updated regardless of the validity of the config + self:upgrade_version(res.modifiedIndex) + + if not data_valid then + -- do not update the config cache when the data is invalid + -- invalid data should only cancel this config item update, not discard + -- the remaining events, use continue instead of loop break and return + goto CONTINUE + end + + if res.dir then + if res.value then + return false, "todo: support for parsing `dir` response " + .. "structures. " .. json.encode(res) + end + return false + end + + local pre_index = self.values_hash[key] + if pre_index then + local pre_val = self.values[pre_index] + if pre_val then + config_util.fire_all_clean_handlers(pre_val) + end + + if res.value then + if not self.single_item then + res.value.id = key + end + + self.values[pre_index] = res + res.clean_handlers = {} + log.info("update data by key: ", key) + + else + self.sync_times = self.sync_times + 1 + self.values[pre_index] = false + self.values_hash[key] = nil + log.info("delete data by key: ", key) + end + + elseif res.value then + res.clean_handlers = {} + insert_tab(self.values, res) + self.values_hash[key] = #self.values + if not self.single_item then + res.value.id = key + end + + log.info("insert data by key: ", key) + end + + -- avoid space waste + if self.sync_times > 100 then + local values_original = table.clone(self.values) + table.clear(self.values) + + for i = 1, #values_original do + local val = values_original[i] + if val then + table.insert(self.values, val) + end + end + + table.clear(self.values_hash) + log.info("clear stale data in `values_hash` for key: ", key) + + for i = 1, #self.values do + key = short_key(self, self.values[i].key) + self.values_hash[key] = i + end + + self.sync_times = 0 + end + + -- /plugins' filter need to known self.values when it is called + -- so the filter should be called after self.values set. + if self.filter then + self.filter(res) + end + + self.conf_version = self.conf_version + 1 + + ::CONTINUE:: + end + + return self.values +end + + +function _M.get(self, key) + if not self.values_hash then + return + end + + local arr_idx = self.values_hash[tostring(key)] + if not arr_idx then + return nil + end + + return self.values[arr_idx] +end + + +function _M.getkey(self, key) + if not self.running then + return nil, "stopped" + end + + local local_conf = config_local.local_conf() + if local_conf and local_conf.etcd and local_conf.etcd.prefix then + key = local_conf.etcd.prefix .. key + end + + return getkey(self.etcd_cli, key) +end + + +local function _automatic_fetch(premature, self) + if premature then + return + end + + if not (health_check.conf and health_check.conf.shm_name) then + -- used for worker processes to synchronize configuration + local _, err = health_check.init({ + shm_name = health_check_shm_name, + fail_timeout = self.health_check_timeout, + max_fails = 3, + retry = true, + }) + if err then + log.warn("fail to create health_check: " .. err) + end + end + + local i = 0 + while not exiting() and self.running and i <= 32 do + i = i + 1 + + local ok, err = xpcall(function() + if not self.etcd_cli then + local etcd_cli, err = get_etcd() + if not etcd_cli then + error("failed to create etcd instance for key [" + .. self.key .. "]: " .. (err or "unknown")) + end + self.etcd_cli = etcd_cli + end + + local ok, err = sync_data(self) + if err then + if core_str.find(err, err_etcd_grpc_engine_timeout) or + core_str.find(err, err_etcd_grpc_ngx_timeout) + then + err = "timeout" + end + + if core_str.find(err, err_etcd_unhealthy_all) then + local reconnected = false + while err and not reconnected and i <= 32 do + local backoff_duration, backoff_factor, backoff_step = 1, 2, 6 + for _ = 1, backoff_step do + i = i + 1 + ngx_sleep(backoff_duration) + _, err = sync_data(self) + if not err or not core_str.find(err, err_etcd_unhealthy_all) then + log.warn("reconnected to etcd") + reconnected = true + break + end + backoff_duration = backoff_duration * backoff_factor + log.error("no healthy etcd endpoint available, next retry after " + .. backoff_duration .. "s") + end + end + elseif err == "worker exited" then + log.info("worker exited.") + return + elseif err ~= "timeout" and err ~= "Key not found" + and self.last_err ~= err then + log.error("failed to fetch data from etcd: ", err, ", ", + tostring(self)) + end + + if err ~= self.last_err then + self.last_err = err + self.last_err_time = ngx_time() + elseif self.last_err then + if ngx_time() - self.last_err_time >= 30 then + self.last_err = nil + end + end + + -- etcd watch timeout is an expected error, so there is no need for resync_delay + if err ~= "timeout" then + ngx_sleep(self.resync_delay + rand() * 0.5 * self.resync_delay) + end + elseif not ok then + -- no error. reentry the sync with different state + ngx_sleep(0.05) + end + + end, debug.traceback) + + if not ok then + log.error("failed to fetch data from etcd: ", err, ", ", + tostring(self)) + ngx_sleep(self.resync_delay + rand() * 0.5 * self.resync_delay) + break + end + end + + if not exiting() and self.running then + ngx_timer_at(0, _automatic_fetch, self) + end +end + +-- for test +_M.test_sync_data = sync_data +_M.test_automatic_fetch = _automatic_fetch +function _M.inject_sync_data(f) + sync_data = f +end + + +--- +-- Create a new connection to communicate with the control plane. +-- This function should be used in the `init_worker_by_lua` phase. +-- +-- @function core.config.new +-- @tparam string key etcd directory to be monitored, e.g. "/routes". +-- @tparam table opts Parameters related to the etcd client connection. +-- The keys in `opts` are as follows: +-- * automatic: whether to get the latest etcd data automatically +-- * item_schema: the jsonschema that checks the value of each item under the **key** directory +-- * filter: the custom function to filter the value of each item under the **key** directory +-- * timeout: the timeout for watch operation, default is 30s +-- * single_item: whether only one item under the **key** directory +-- * checker: the custom function to check the value of each item under the **key** directory +-- @treturn table The etcd client connection. +-- @usage +-- local plugins_conf, err = core.config.new("/custom_dir", { +-- automatic = true, +-- filter = function(item) +-- -- called once before reload for sync data from admin +-- end, +--}) +function _M.new(key, opts) + local local_conf, err = config_local.local_conf() + if not local_conf then + return nil, err + end + + local etcd_conf = local_conf.etcd + local prefix = etcd_conf.prefix + local resync_delay = etcd_conf.resync_delay + if not resync_delay or resync_delay < 0 then + resync_delay = 5 + end + local health_check_timeout = etcd_conf.health_check_timeout + if not health_check_timeout or health_check_timeout < 0 then + health_check_timeout = 10 + end + local automatic = opts and opts.automatic + local item_schema = opts and opts.item_schema + local filter_fun = opts and opts.filter + local timeout = opts and opts.timeout + local single_item = opts and opts.single_item + local checker = opts and opts.checker + + local obj = setmetatable({ + etcd_cli = nil, + key = key and prefix .. key, + automatic = automatic, + item_schema = item_schema, + checker = checker, + sync_times = 0, + running = true, + conf_version = 0, + values = nil, + need_reload = true, + watching_stream = nil, + routes_hash = nil, + prev_index = 0, + last_err = nil, + last_err_time = nil, + resync_delay = resync_delay, + health_check_timeout = health_check_timeout, + timeout = timeout, + single_item = single_item, + filter = filter_fun, + }, mt) + + if automatic then + if not key then + return nil, "missing `key` argument" + end + + if loaded_configuration[key] then + local res = loaded_configuration[key] + loaded_configuration[key] = nil -- tried to load + + log.notice("use loaded configuration ", key) + + local dir_res, headers = res.body, res.headers + load_full_data(obj, dir_res, headers) + end + + ngx_timer_at(0, _automatic_fetch, obj) + + else + local etcd_cli, err = get_etcd() + if not etcd_cli then + return nil, "failed to start an etcd instance: " .. err + end + obj.etcd_cli = etcd_cli + end + + if key then + created_obj[key] = obj + end + + return obj +end + + +function _M.close(self) + self.running = false +end + + +function _M.fetch_created_obj(key) + return created_obj[key] +end + + +function _M.server_version(self) + if not self.running then + return nil, "stopped" + end + + local res, err = etcd_apisix.server_version() + if not res then + return nil, err + end + + return res.body +end + + +local function create_formatter(prefix) + return function (res) + res.body.nodes = {} + + local dirs + if is_http then + dirs = constants.HTTP_ETCD_DIRECTORY + else + dirs = constants.STREAM_ETCD_DIRECTORY + end + + local curr_dir_data + local curr_key + for _, item in ipairs(res.body.kvs) do + if curr_dir_data then + if core_str.has_prefix(item.key, curr_key) then + table.insert(curr_dir_data, etcd_apisix.kvs_to_node(item)) + goto CONTINUE + end + + curr_dir_data = nil + end + + local key = sub_str(item.key, #prefix + 1) + if dirs[key] then + -- single item + loaded_configuration[key] = { + body = etcd_apisix.kvs_to_node(item), + headers = res.headers, + } + else + local key = sub_str(item.key, #prefix + 1, #item.key - 1) + -- ensure the same key hasn't been handled as single item + if dirs[key] and not loaded_configuration[key] then + loaded_configuration[key] = { + body = { + nodes = {}, + }, + headers = res.headers, + } + curr_dir_data = loaded_configuration[key].body.nodes + curr_key = item.key + end + end + + ::CONTINUE:: + end + + return res + end +end + + +function _M.init() + local local_conf, err = config_local.local_conf() + if not local_conf then + return nil, err + end + + if table.try_read_attr(local_conf, "apisix", "disable_sync_configuration_during_start") then + return true + end + + -- don't go through proxy during start because the proxy is not available + local etcd_cli, prefix, err = etcd_apisix.new_without_proxy() + if not etcd_cli then + return nil, "failed to start a etcd instance: " .. err + end + + local res, err = readdir(etcd_cli, prefix, create_formatter(prefix)) + if not res then + return nil, err + end + + return true +end + + +function _M.init_worker() + sync_status_to_shdict(false) + local local_conf, err = config_local.local_conf() + if not local_conf then + return nil, err + end + + if table.try_read_attr(local_conf, "apisix", "disable_sync_configuration_during_start") then + return true + end + + return true +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/config_local.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/config_local.lua new file mode 100644 index 0000000..2b8f92f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/config_local.lua @@ -0,0 +1,71 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Get configuration information. +-- +-- @module core.config_local + +local file = require("apisix.cli.file") + + +local _M = {} + + +local config_data + + +function _M.clear_cache() + config_data = nil +end + +--- +-- Get the local config info. +-- The configuration information consists of two parts, user-defined configuration in +-- `conf/config.yaml` and default configuration in `conf/config-default.yaml`. The configuration +-- of the same name present in `conf/config.yaml` will overwrite `conf/config-default.yaml`. +-- The final full configuration is `conf/config.yaml` and the default configuration in +-- `conf/config-default.yaml` that is not overwritten. +-- +-- @function core.config_local.local_conf +-- @treturn table The configuration information. +-- @usage +-- -- Given a config item in `conf/config.yaml`: +-- -- +-- -- apisix: +-- -- ssl: +-- -- fallback_sni: "a.test2.com" +-- -- +-- -- you can get the value of `fallback_sni` by: +-- local local_conf = core.config.local_conf() +-- local fallback_sni = core.table.try_read_attr( +-- local_conf, "apisix", "ssl", "fallback_sni") -- "a.test2.com" +function _M.local_conf(force) + if not force and config_data then + return config_data + end + + local default_conf, err = file.read_yaml_conf() + if not default_conf then + return nil, err + end + + config_data = default_conf + return config_data +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/config_util.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/config_util.lua new file mode 100644 index 0000000..9621729 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/config_util.lua @@ -0,0 +1,219 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Collection of util functions +-- +-- @module core.config_util + +local core_tab = require("apisix.core.table") +local log = require("apisix.core.log") +local str_byte = string.byte +local str_char = string.char +local ipairs = ipairs +local setmetatable = setmetatable +local tostring = tostring +local type = type + + +local _M = {} + + +local function _iterate_values(self, tab) + while true do + self.idx = self.idx + 1 + local v = tab[self.idx] + if type(v) == "table" then + return self.idx, v + end + if v == nil then + return nil, nil + end + -- skip the tombstone + end +end + + +function _M.iterate_values(tab) + local iter = setmetatable({idx = 0}, {__call = _iterate_values}) + return iter, tab, 0 +end + + +-- Add a clean handler to a runtime configuration item. +-- The clean handler will be called when the item is deleted from configuration +-- or cancelled. Note that Nginx worker exit doesn't trigger the clean handler. +-- Return an index so that we can cancel it later. +function _M.add_clean_handler(item, func) + if not item.clean_handlers then + return nil, "clean handlers for the item are nil" + end + + if not item.clean_handlers._id then + item.clean_handlers._id = 1 + end + + local id = item.clean_handlers._id + item.clean_handlers._id = item.clean_handlers._id + 1 + core_tab.insert(item.clean_handlers, {f = func, id = id}) + return id +end + + +-- cancel a clean handler added by add_clean_handler. +-- If `fire` is true, call the clean handler. +function _M.cancel_clean_handler(item, idx, fire) + local pos, f + -- the number of pending clean handler is small so we can cancel them in O(n) + for i, clean_handler in ipairs(item.clean_handlers) do + if clean_handler.id == idx then + pos = i + f = clean_handler.f + break + end + end + + if not pos then + log.error("failed to find clean_handler with idx ", idx) + return + end + + core_tab.remove(item.clean_handlers, pos) + if not fire then + return + end + + if f then + f(item) + else + log.error("The function used to clear the health checker is nil, please check") + end +end + + +-- fire all clean handlers added by add_clean_handler. +function _M.fire_all_clean_handlers(item) + -- When the key is deleted, the item will be set to false. + if not item then + return + end + if not item.clean_handlers then + return + end + + for _, clean_handler in ipairs(item.clean_handlers) do + clean_handler.f(item) + end + + item.clean_handlers = {} +end + + +--- +-- Convert different time units to seconds as time units. +-- Time intervals can be specified in milliseconds, seconds, minutes, hours, days and so on, +-- using the following suffixes: +-- ms milliseconds +-- s seconds +-- m minutes +-- h hours +-- d days +-- w weeks +-- M months, 30 days +-- y years, 365 days +-- Multiple units can be combined in a single value by specifying them in the order from the most +-- to the least significant, and optionally separated by whitespace. +-- A value without a suffix means seconds. +-- +-- @function core.config_util.parse_time_unit +-- @tparam number|string s Strings with time units, e.g. "60m". +-- @treturn number Number of seconds after conversion +-- @usage +-- local seconds = core.config_util.parse_time_unit("60m") -- 3600 +function _M.parse_time_unit(s) + local typ = type(s) + if typ == "number" then + return s + end + + if typ ~= "string" or #s == 0 then + return nil, "invalid data: " .. tostring(s) + end + + local size = 0 + local size_in_unit = 0 + local step = 60 * 60 * 24 * 365 + local with_ms = false + for i = 1, #s do + local scale + local unit = str_byte(s, i) + if unit == 121 then -- y + scale = 60 * 60 * 24 * 365 + elseif unit == 77 then -- M + scale = 60 * 60 * 24 * 30 + elseif unit == 119 then -- w + scale = 60 * 60 * 24 * 7 + elseif unit == 100 then -- d + scale = 60 * 60 * 24 + elseif unit == 104 then -- h + scale = 60 * 60 + elseif unit == 109 then -- m + unit = str_byte(s, i + 1) + if unit == 115 then -- ms + size = size * 1000 + with_ms = true + step = 0 + break + end + + scale = 60 + + elseif unit == 115 then -- s + scale = 1 + elseif 48 <= unit and unit <= 57 then + size_in_unit = size_in_unit * 10 + unit - 48 + elseif unit ~= 32 then + return nil, "invalid data: " .. str_char(unit) + end + + if scale ~= nil then + if scale > step then + return nil, "unexpected unit: " .. str_char(unit) + end + + step = scale + size = size + scale * size_in_unit + size_in_unit = 0 + end + end + + if size_in_unit > 0 then + if step == 1 then + return nil, "specific unit conflicts with the default unit second" + end + + size = size + size_in_unit + end + + if with_ms then + size = size / 1000 + end + + return size +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/config_xds.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/config_xds.lua new file mode 100644 index 0000000..bdb4520 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/config_xds.lua @@ -0,0 +1,378 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Get configuration form ngx.shared.DICT +-- +-- @module core.config_xds + +local config_local = require("apisix.core.config_local") +local config_util = require("apisix.core.config_util") +local string = require("apisix.core.string") +local log = require("apisix.core.log") +local json = require("apisix.core.json") +local os = require("apisix.core.os") +local ngx_sleep = require("apisix.core.utils").sleep +local check_schema = require("apisix.core.schema").check +local new_tab = require("table.new") +local table = table +local insert_tab = table.insert +local error = error +local pcall = pcall +local tostring = tostring +local setmetatable = setmetatable +local io = io +local io_open = io.open +local io_close = io.close +local package = package +local ipairs = ipairs +local type = type +local sub_str = string.sub +local ffi = require ("ffi") +local C = ffi.C +local config = ngx.shared["xds-config"] +local conf_ver = ngx.shared["xds-config-version"] +local is_http = ngx.config.subsystem == "http" +local ngx_re_match = ngx.re.match +local ngx_re_gmatch = ngx.re.gmatch +local ngx_timer_every = ngx.timer.every +local ngx_timer_at = ngx.timer.at +local exiting = ngx.worker.exiting +local ngx_time = ngx.time + +local xds_lib_name = "libxds.so" + +local process +if is_http then + process = require("ngx.process") +end + +local shdict_udata_to_zone +if not pcall(function() return C.ngx_http_lua_ffi_shdict_udata_to_zone end) then + shdict_udata_to_zone = C.ngx_meta_lua_ffi_shdict_udata_to_zone +else + shdict_udata_to_zone = C.ngx_http_lua_ffi_shdict_udata_to_zone +end + + +ffi.cdef[[ +extern void initial(void* config_zone, void* version_zone); +]] + +local created_obj = {} + +local _M = { + version = 0.1, + local_conf = config_local.local_conf, +} + + +local mt = { + __index = _M, + __tostring = function(self) + return " xds key: " .. self.key + end +} + + +-- todo: refactor this function in chash.lua and radixtree.lua +local function load_shared_lib(lib_name) + local cpath = package.cpath + local tried_paths = new_tab(32, 0) + local i = 1 + + local iter, err = ngx_re_gmatch(cpath, "[^;]+", "jo") + if not iter then + error("failed to gmatch: " .. err) + end + + while true do + local it = iter() + local fpath + fpath, err = ngx_re_match(it[0], "(.*/)", "jo") + if err then + error("failed to match: " .. err) + end + local spath = fpath[0] .. lib_name + + local f = io_open(spath) + if f ~= nil then + io_close(f) + return ffi.load(spath) + end + tried_paths[i] = spath + i = i + 1 + + if not it then + break + end + end + + return nil, tried_paths +end + + +local function load_libxds(lib_name) + local xdsagent, tried_paths = load_shared_lib(lib_name) + + if not xdsagent then + tried_paths[#tried_paths + 1] = 'tried above paths but can not load ' .. lib_name + error("can not load xds library, tried paths: " .. + table.concat(tried_paths, '\r\n', 1, #tried_paths)) + end + + local config_zone = shdict_udata_to_zone(config[1]) + local config_shd_cdata = ffi.cast("void*", config_zone) + + local conf_ver_zone = shdict_udata_to_zone(conf_ver[1]) + local conf_ver_shd_cdata = ffi.cast("void*", conf_ver_zone) + + xdsagent.initial(config_shd_cdata, conf_ver_shd_cdata) +end + + +local latest_version +local function sync_data(self) + if self.conf_version == latest_version then + return true + end + + if self.values then + for _, val in ipairs(self.values) do + config_util.fire_all_clean_handlers(val) + end + self.values = nil + self.values_hash = nil + end + + local keys = config:get_keys(0) + + if not keys or #keys <= 0 then + -- xds did not write any data to shdict + return false, "no keys" + end + + self.values = new_tab(#keys, 0) + self.values_hash = new_tab(0, #keys) + + for _, key in ipairs(keys) do + if string.has_prefix(key, self.key) then + local data_valid = true + local conf_str = config:get(key, 0) + local conf, err = json.decode(conf_str) + if not conf then + data_valid = false + log.error("decode the conf of [", key, "] failed, err: ", err, + ", conf_str: ", conf_str) + end + + if not self.single_item and type(conf) ~= "table" then + data_valid = false + log.error("invalid conf of [", key, "], conf: ", conf, + ", it should be an object") + end + + if data_valid and self.item_schema then + local ok, err = check_schema(self.item_schema, conf) + if not ok then + data_valid = false + log.error("failed to check the conf of [", key, "] err:", err) + end + end + + if data_valid and self.checker then + local ok, err = self.checker(conf) + if not ok then + data_valid = false + log.error("failed to check the conf of [", key, "] err:", err) + end + end + + if data_valid then + if not conf.id then + conf.id = sub_str(key, #self.key + 2, #key + 1) + log.warn("the id of [", key, "] is nil, use the id: ", conf.id) + end + + local conf_item = {value = conf, modifiedIndex = latest_version, + key = key} + insert_tab(self.values, conf_item) + self.values_hash[conf.id] = #self.values + conf_item.clean_handlers = {} + + if self.filter then + self.filter(conf_item) + end + end + end + end + + self.conf_version = latest_version + return true +end + + +local function _automatic_fetch(premature, self) + if premature then + return + end + + local i = 0 + while not exiting() and self.running and i <= 32 do + i = i + 1 + local ok, ok2, err = pcall(sync_data, self) + if not ok then + err = ok2 + log.error("failed to fetch data from xds: ", + err, ", ", tostring(self)) + ngx_sleep(3) + break + elseif not ok2 and err then + -- todo: handler other error + if err ~= "wait for more time" and err ~= "no keys" and self.last_err ~= err then + log.error("failed to fetch data from xds, ", err, ", ", tostring(self)) + end + + if err ~= self.last_err then + self.last_err = err + self.last_err_time = ngx_time() + else + if ngx_time() - self.last_err_time >= 30 then + self.last_err = nil + end + end + ngx_sleep(0.5) + elseif not ok2 then + ngx_sleep(0.05) + else + ngx_sleep(0.1) + end + end + + if not exiting() and self.running then + ngx_timer_at(0, _automatic_fetch, self) + end +end + + +local function fetch_version(premature) + if premature then + return + end + + local version = conf_ver:get("version") + + if not version then + return + end + + if version ~= latest_version then + latest_version = version + end +end + + +function _M.new(key, opts) + local automatic = opts and opts.automatic + local item_schema = opts and opts.item_schema + local filter_fun = opts and opts.filter + local single_item = opts and opts.single_item + local checker = opts and opts.checker + + + local obj = setmetatable({ + automatic = automatic, + item_schema = item_schema, + checker = checker, + sync_times = 0, + running = true, + conf_version = 0, + values = nil, + routes_hash = nil, + prev_index = nil, + last_err = nil, + last_err_time = nil, + key = key, + single_item = single_item, + filter = filter_fun, + }, mt) + + if automatic then + if not key then + return nil, "missing `key` argument" + end + + -- blocking until xds completes initial configuration + while true do + os.usleep(1000) + fetch_version() + if latest_version then + break + end + end + + local ok, ok2, err = pcall(sync_data, obj) + if not ok then + err = ok2 + end + + if err then + log.error("failed to fetch data from xds ", + err, ", ", key) + end + + ngx_timer_at(0, _automatic_fetch, obj) + end + + if key then + created_obj[key] = obj + end + + return obj +end + + +function _M.get(self, key) + if not self.values_hash then + return + end + + local arr_idx = self.values_hash[tostring(key)] + if not arr_idx then + return nil + end + + return self.values[arr_idx] +end + + +function _M.fetch_created_obj(key) + return created_obj[key] +end + + +function _M.init_worker() + if process.type() == "privileged agent" then + load_libxds(xds_lib_name) + end + + ngx_timer_every(1, fetch_version) + + return true +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/config_yaml.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/config_yaml.lua new file mode 100644 index 0000000..747b087 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/config_yaml.lua @@ -0,0 +1,579 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Get configuration information in Stand-alone mode. +-- +-- @module core.config_yaml + +local config_local = require("apisix.core.config_local") +local config_util = require("apisix.core.config_util") +local yaml = require("lyaml") +local log = require("apisix.core.log") +local json = require("apisix.core.json") +local new_tab = require("table.new") +local check_schema = require("apisix.core.schema").check +local profile = require("apisix.core.profile") +local lfs = require("lfs") +local file = require("apisix.cli.file") +local exiting = ngx.worker.exiting +local insert_tab = table.insert +local type = type +local ipairs = ipairs +local setmetatable = setmetatable +local ngx_sleep = require("apisix.core.utils").sleep +local ngx_timer_at = ngx.timer.at +local ngx_time = ngx.time +local ngx_shared = ngx.shared +local sub_str = string.sub +local tostring = tostring +local pcall = pcall +local io = io +local ngx = ngx +local re_find = ngx.re.find +local process = require("ngx.process") +local worker_id = ngx.worker.id +local created_obj = {} +local shared_dict +local status_report_shared_dict_name = "status-report" + +local _M = { + version = 0.2, + local_conf = config_local.local_conf, + clear_local_cache = config_local.clear_cache, + + -- yaml or json + file_type = "yaml", + + ERR_NO_SHARED_DICT = "failed prepare standalone config shared dict, this will degrade ".. + "to event broadcasting, and if a worker crashes, the configuration ".. + "cannot be restored from other workers and shared dict" +} + + +local mt = { + __index = _M, + __tostring = function(self) + return "apisix.yaml key: " .. (self.key or "") + end +} + +local apisix_yaml +local apisix_yaml_mtime + +local config_yaml = { + path = profile:yaml_path("apisix"), + type = "yaml", + parse = function(self) + local f, err = io.open(self.path, "r") + if not f then + return nil, "failed to open file " .. self.path .. " : " .. err + end + + f:seek('end', -10) + local end_flag = f:read("*a") + local found_end_flag = re_find(end_flag, [[#END\s*$]], "jo") + + if not found_end_flag then + f:close() + return nil, "missing valid end flag in file " .. self.path + end + + f:seek('set') + local raw_config = f:read("*a") + f:close() + + return yaml.load(raw_config), nil + end +} + +local config_json = { + -- `-5` to remove the "yaml" suffix + path = config_yaml.path:sub(1, -5) .. "json", + type = "json", + parse = function(self) + local f, err = io.open(self.path, "r") + if not f then + return nil, "failed to open file " .. self.path .. " : " .. err + end + local raw_config = f:read("*a") + f:close() + + local config, err = json.decode(raw_config) + if err then + return nil, "failed to decode json: " .. err + end + return config, nil + end +} + +local config_file_table = { + yaml = config_yaml, + json = config_json +} + + +local config_file = setmetatable({}, { + __index = function(_, key) + return config_file_table[_M.file_type][key] + end +}) + + +local function sync_status_to_shdict(status) + if process.type() ~= "worker" then + return + end + local status_shdict = ngx.shared[status_report_shared_dict_name] + if not status_shdict then + return + end + local id = worker_id() + log.info("sync status to shared dict, id: ", id, " status: ", status) + status_shdict:set(id, status) +end + + +local function update_config(table, conf_version) + if not table then + log.error("failed update config: empty table") + return + end + + local ok, err = file.resolve_conf_var(table) + if not ok then + log.error("failed to resolve variables:" .. err) + return + end + + apisix_yaml = table + sync_status_to_shdict(true) + apisix_yaml_mtime = conf_version +end +_M._update_config = update_config + + +local function is_use_admin_api() + local local_conf, _ = config_local.local_conf() + return local_conf and local_conf.apisix and local_conf.apisix.enable_admin +end + + +local function read_apisix_config(premature, pre_mtime) + if premature then + return + end + local attributes, err = lfs.attributes(config_file.path) + if not attributes then + log.error("failed to fetch ", config_file.path, " attributes: ", err) + return + end + + local last_modification_time = attributes.modification + if apisix_yaml_mtime == last_modification_time then + return + end + + local config_new, err = config_file:parse() + if err then + log.error("failed to parse the content of file ", config_file.path, ": ", err) + return + end + + update_config(config_new, last_modification_time) + + log.warn("config file ", config_file.path, " reloaded.") +end + + +local function sync_data(self) + if not self.key then + return nil, "missing 'key' arguments" + end + + local conf_version + if is_use_admin_api() then + conf_version = apisix_yaml[self.conf_version_key] or 0 + else + if not apisix_yaml_mtime then + log.warn("wait for more time") + return nil, "failed to read local file " .. config_file.path + end + conf_version = apisix_yaml_mtime + end + + if not conf_version or conf_version == self.conf_version then + return true + end + + local items = apisix_yaml[self.key] + if not items then + self.values = new_tab(8, 0) + self.values_hash = new_tab(0, 8) + self.conf_version = conf_version + return true + end + + if self.values and #self.values > 0 then + if is_use_admin_api() then + -- filter self.values to retain only those whose IDs exist in the new items list. + local exist_values = new_tab(8, 0) + self.values_hash = new_tab(0, 8) + + local exist_items = {} + for _, item in ipairs(items) do + exist_items[tostring(item.id)] = true + end + -- remove objects that exist in the self.values but do not exist in the new items. + -- for removed items, trigger cleanup handlers. + for _, item in ipairs(self.values) do + local id = item.value.id + if not exist_items[id] then + config_util.fire_all_clean_handlers(item) + else + insert_tab(exist_values, item) + self.values_hash[id] = #exist_values + end + end + self.values = exist_values + else + for _, item in ipairs(self.values) do + config_util.fire_all_clean_handlers(item) + end + self.values = nil + end + end + + if self.single_item then + -- treat items as a single item + self.values = new_tab(1, 0) + self.values_hash = new_tab(0, 1) + + local item = items + local modifiedIndex = item.modifiedIndex or conf_version + local conf_item = {value = item, modifiedIndex = modifiedIndex, + key = "/" .. self.key} + + local data_valid = true + local err + if self.item_schema then + data_valid, err = check_schema(self.item_schema, item) + if not data_valid then + log.error("failed to check item data of [", self.key, + "] err:", err, " ,val: ", json.delay_encode(item)) + end + + if data_valid and self.checker then + -- TODO: An opts table should be used + -- as different checkers may use different parameters + data_valid, err = self.checker(item, conf_item.key) + if not data_valid then + log.error("failed to check item data of [", self.key, + "] err:", err, " ,val: ", json.delay_encode(item)) + end + end + end + + if data_valid then + insert_tab(self.values, conf_item) + self.values_hash[self.key] = #self.values + conf_item.clean_handlers = {} + + if self.filter then + self.filter(conf_item) + end + end + + else + if not self.values then + self.values = new_tab(8, 0) + self.values_hash = new_tab(0, 8) + end + + local err + for i, item in ipairs(items) do + local idx = tostring(i) + local data_valid = true + if type(item) ~= "table" then + data_valid = false + log.error("invalid item data of [", self.key .. "/" .. idx, + "], val: ", json.delay_encode(item), + ", it should be an object") + end + + local id = item.id or item.username or ("arr_" .. idx) + local modifiedIndex = item.modifiedIndex or conf_version + local conf_item = {value = item, modifiedIndex = modifiedIndex, + key = "/" .. self.key .. "/" .. id} + + if data_valid and self.item_schema then + data_valid, err = check_schema(self.item_schema, item) + if not data_valid then + log.error("failed to check item data of [", self.key, + "] err:", err, " ,val: ", json.delay_encode(item)) + end + end + + if data_valid and self.checker then + data_valid, err = self.checker(item, conf_item.key) + if not data_valid then + log.error("failed to check item data of [", self.key, + "] err:", err, " ,val: ", json.delay_encode(item)) + end + end + + if data_valid then + local item_id = tostring(id) + local pre_index = self.values_hash[item_id] + if pre_index then + -- remove the old item + local pre_val = self.values[pre_index] + if pre_val and + (not item.modifiedIndex or pre_val.modifiedIndex ~= item.modifiedIndex) then + config_util.fire_all_clean_handlers(pre_val) + self.values[pre_index] = conf_item + conf_item.value.id = item_id + conf_item.clean_handlers = {} + end + else + insert_tab(self.values, conf_item) + self.values_hash[item_id] = #self.values + conf_item.value.id = item_id + conf_item.clean_handlers = {} + end + + if self.filter then + self.filter(conf_item) + end + end + end + end + + self.conf_version = conf_version + return true +end + + +function _M.get(self, key) + if not self.values_hash then + return + end + + local arr_idx = self.values_hash[tostring(key)] + if not arr_idx then + return nil + end + + return self.values[arr_idx] +end + + +local function _automatic_fetch(premature, self) + if premature then + return + end + + -- the _automatic_fetch is only called in the timer, and according to the + -- documentation, ngx.shared.DICT.get can be executed there. + -- if the file's global variables have not yet been assigned values, + -- we can assume that the worker has not been initialized yet and try to + -- read any old data that may be present from the shared dict + -- try load from shared dict only on first startup, otherwise use event mechanism + if is_use_admin_api() and not shared_dict then + log.info("try to load config from shared dict") + + local config, err + shared_dict = ngx_shared["standalone-config"] -- init shared dict in current worker + if not shared_dict then + log.error("failed to read config from shared dict: shared dict not found") + goto SKIP_SHARED_DICT + end + config, err = shared_dict:get("config") + if not config then + if err then -- if the key does not exist, the return values are both nil + log.error("failed to read config from shared dict: ", err) + end + log.info("no config found in shared dict") + goto SKIP_SHARED_DICT + end + log.info("startup config loaded from shared dict: ", config) + + config, err = json.decode(tostring(config)) + if not config then + log.error("failed to decode config from shared dict: ", err) + goto SKIP_SHARED_DICT + end + _M._update_config(config) + log.info("config loaded from shared dict") + + ::SKIP_SHARED_DICT:: + if not shared_dict then + log.crit(_M.ERR_NO_SHARED_DICT) + + -- fill that value to make the worker not try to read from shared dict again + shared_dict = "error" + end + end + + local i = 0 + while not exiting() and self.running and i <= 32 do + i = i + 1 + local ok, ok2, err = pcall(sync_data, self) + if not ok then + err = ok2 + log.error("failed to fetch data from local file " .. config_file.path .. ": ", + err, ", ", tostring(self)) + ngx_sleep(3) + break + + elseif not ok2 and err then + if err ~= "timeout" and err ~= "Key not found" + and self.last_err ~= err then + log.error("failed to fetch data from local file " .. config_file.path .. ": ", + err, ", ", tostring(self)) + end + + if err ~= self.last_err then + self.last_err = err + self.last_err_time = ngx_time() + else + if ngx_time() - self.last_err_time >= 30 then + self.last_err = nil + end + end + ngx_sleep(0.5) + + elseif not ok2 then + ngx_sleep(0.05) + + else + ngx_sleep(0.1) + end + end + + if not exiting() and self.running then + ngx_timer_at(0, _automatic_fetch, self) + end +end + + +function _M.new(key, opts) + local local_conf, err = config_local.local_conf() + if not local_conf then + return nil, err + end + + local automatic = opts and opts.automatic + local item_schema = opts and opts.item_schema + local filter_fun = opts and opts.filter + local single_item = opts and opts.single_item + local checker = opts and opts.checker + + -- like /routes and /upstreams, remove first char `/` + if key then + key = sub_str(key, 2) + end + + local obj = setmetatable({ + automatic = automatic, + item_schema = item_schema, + checker = checker, + sync_times = 0, + running = true, + conf_version = 0, + values = nil, + routes_hash = nil, + prev_index = nil, + last_err = nil, + last_err_time = nil, + key = key, + conf_version_key = key and key .. "_conf_version", + single_item = single_item, + filter = filter_fun, + }, mt) + + if automatic then + if not key then + return nil, "missing `key` argument" + end + + local ok, ok2, err = pcall(sync_data, obj) + if not ok then + err = ok2 + end + + if err then + log.error("failed to fetch data from local file ", config_file.path, ": ", + err, ", ", key) + end + + ngx_timer_at(0, _automatic_fetch, obj) + end + + if key then + created_obj[key] = obj + end + + return obj +end + + +function _M.close(self) + self.running = false +end + + +function _M.server_version(self) + return "apisix.yaml " .. _M.version +end + + +function _M.fetch_created_obj(key) + return created_obj[sub_str(key, 2)] +end + + +function _M.fetch_all_created_obj() + return created_obj +end + + +function _M.init() + if is_use_admin_api() then + return true + end + + read_apisix_config() + return true +end + + +function _M.init_worker() + sync_status_to_shdict(false) + if is_use_admin_api() then + apisix_yaml = {} + apisix_yaml_mtime = 0 + return true + end + + -- sync data in each non-master process + ngx.timer.every(1, read_apisix_config) + + return true +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/ctx.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/ctx.lua new file mode 100644 index 0000000..c6f66fb --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/ctx.lua @@ -0,0 +1,463 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Define the request context. +-- +-- @module core.ctx + +local core_str = require("apisix.core.string") +local core_tab = require("apisix.core.table") +local request = require("apisix.core.request") +local log = require("apisix.core.log") +local json = require("apisix.core.json") +local config_local = require("apisix.core.config_local") +local tablepool = require("tablepool") +local get_var = require("resty.ngxvar").fetch +local get_request = require("resty.ngxvar").request +local ck = require "resty.cookie" +local multipart = require("multipart") +local util = require("apisix.cli.util") +local gq_parse = require("graphql").parse +local jp = require("jsonpath") +local setmetatable = setmetatable +local sub_str = string.sub +local ngx = ngx +local ngx_var = ngx.var +local re_gsub = ngx.re.gsub +local ipairs = ipairs +local type = type +local error = error +local pcall = pcall + + +local _M = {version = 0.2} +local GRAPHQL_DEFAULT_MAX_SIZE = 1048576 -- 1MiB +local GRAPHQL_REQ_DATA_KEY = "query" +local GRAPHQL_REQ_METHOD_HTTP_GET = "GET" +local GRAPHQL_REQ_METHOD_HTTP_POST = "POST" +local GRAPHQL_REQ_MIME_JSON = "application/json" + + +local fetch_graphql_data = { + [GRAPHQL_REQ_METHOD_HTTP_GET] = function(ctx, max_size) + local body = request.get_uri_args(ctx)[GRAPHQL_REQ_DATA_KEY] + if not body then + return nil, "failed to read graphql data, args[" .. + GRAPHQL_REQ_DATA_KEY .. "] is nil" + end + + if type(body) == "table" then + body = body[1] + end + + return body + end, + + [GRAPHQL_REQ_METHOD_HTTP_POST] = function(ctx, max_size) + local body, err = request.get_body(max_size, ctx) + if not body then + return nil, "failed to read graphql data, " .. (err or "request body has zero size") + end + + if request.header(ctx, "Content-Type") == GRAPHQL_REQ_MIME_JSON then + local res + res, err = json.decode(body) + if not res then + return nil, "failed to read graphql data, " .. err + end + + if not res[GRAPHQL_REQ_DATA_KEY] then + return nil, "failed to read graphql data, json body[" .. + GRAPHQL_REQ_DATA_KEY .. "] is nil" + end + + body = res[GRAPHQL_REQ_DATA_KEY] + end + + return body + end +} + + +local function parse_graphql(ctx) + local local_conf, err = config_local.local_conf() + if not local_conf then + return nil, "failed to get local conf: " .. err + end + + local max_size = GRAPHQL_DEFAULT_MAX_SIZE + local size = core_tab.try_read_attr(local_conf, "graphql", "max_size") + if size then + max_size = size + end + + local method = request.get_method() + local func = fetch_graphql_data[method] + if not func then + return nil, "graphql not support `" .. method .. "` request" + end + + local body + body, err = func(ctx, max_size) + if not body then + return nil, err + end + + local ok, res = pcall(gq_parse, body) + if not ok then + return nil, "failed to parse graphql: " .. res .. " body: " .. body + end + + if #res.definitions == 0 then + return nil, "empty graphql: " .. body + end + + return res +end + + +local function get_parsed_graphql() + local ctx = ngx.ctx.api_ctx + if ctx._graphql then + return ctx._graphql + end + + local res, err = parse_graphql(ctx) + if not res then + log.error(err) + ctx._graphql = {} + return ctx._graphql + end + + if #res.definitions > 1 then + log.warn("Multiple operations are not supported.", + "Only the first one is handled") + end + + local def = res.definitions[1] + local fields = def.selectionSet.selections + local root_fields = core_tab.new(#fields, 0) + for i, f in ipairs(fields) do + root_fields[i] = f.name.value + end + + local name = "" + if def.name and def.name.value then + name = def.name.value + end + + ctx._graphql = { + name = name, + operation = def.operation, + root_fields = root_fields, + } + + return ctx._graphql +end + + +local CONTENT_TYPE_JSON = "application/json" +local CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded" +local CONTENT_TYPE_MULTIPART_FORM = "multipart/form-data" + +local function get_parsed_request_body(ctx) + local ct_header = request.header(ctx, "Content-Type") or "" + + if core_str.find(ct_header, CONTENT_TYPE_JSON) then + local request_table, err = request.get_json_request_body_table() + if not request_table then + return nil, "failed to parse JSON body: " .. err + end + return request_table + end + + if core_str.find(ct_header, CONTENT_TYPE_FORM_URLENCODED) then + local args, err = request.get_post_args() + if not args then + return nil, "failed to parse form data: " .. (err or "unknown error") + end + return args + end + + if core_str.find(ct_header, CONTENT_TYPE_MULTIPART_FORM) then + local body = request.get_body() + local res = multipart(body, ct_header) + if not res then + return nil, "failed to parse multipart form data" + end + return res:get_all() + end + + local err = "unsupported content-type in header: " .. ct_header .. + ", supported types are: " .. + CONTENT_TYPE_JSON .. ", " .. + CONTENT_TYPE_FORM_URLENCODED .. ", " .. + CONTENT_TYPE_MULTIPART_FORM + return nil, err +end + + +do + local var_methods = { + method = ngx.req.get_method, + cookie = function () + if ngx.var.http_cookie then + return ck:new() + end + end + } + + local no_cacheable_var_names = { + -- var.args should not be cached as it can be changed via set_uri_args + args = true, + is_args = true, + } + + local ngx_var_names = { + upstream_scheme = true, + upstream_host = true, + upstream_upgrade = true, + upstream_connection = true, + upstream_uri = true, + + upstream_mirror_host = true, + upstream_mirror_uri = true, + + upstream_cache_zone = true, + upstream_cache_zone_info = true, + upstream_no_cache = true, + upstream_cache_key = true, + upstream_cache_bypass = true, + + var_x_forwarded_proto = true, + var_x_forwarded_port = true, + var_x_forwarded_host = true, + } + + -- sort in alphabetical + local apisix_var_names = { + balancer_ip = true, + balancer_port = true, + consumer_group_id = true, + consumer_name = true, + resp_body = function(ctx) + -- only for logger and requires the logger to have a special configuration + return ctx.resp_body or '' + end, + route_id = true, + route_name = true, + service_id = true, + service_name = true, + } + + local mt = { + __index = function(t, key) + local cached = t._cache[key] + if cached ~= nil then + log.debug("serving ctx value from cache for key: ", key) + return cached + end + + if type(key) ~= "string" then + error("invalid argument, expect string value", 2) + end + + local val + local method = var_methods[key] + if method then + val = method() + + elseif core_str.has_prefix(key, "cookie_") then + local cookie = t.cookie + if cookie then + local err + val, err = cookie:get(sub_str(key, 8)) + if err then + log.warn("failed to fetch cookie value by key: ", + key, " error: ", err) + end + end + + elseif core_str.has_prefix(key, "arg_") then + local arg_key = sub_str(key, 5) + local args = request.get_uri_args()[arg_key] + if args then + if type(args) == "table" then + val = args[1] + else + val = args + end + end + + elseif core_str.has_prefix(key, "post_arg_") then + -- only match default post form + local content_type = request.header(nil, "Content-Type") + if content_type ~= nil and core_str.has_prefix(content_type, + "application/x-www-form-urlencoded") then + local arg_key = sub_str(key, 10) + local args = request.get_post_args()[arg_key] + if args then + if type(args) == "table" then + val = args[1] + else + val = args + end + end + end + + elseif core_str.has_prefix(key, "uri_param_") then + -- `uri_param_` provides access to the uri parameters when using + -- radixtree_uri_with_parameter + if t._ctx.curr_req_matched then + local arg_key = sub_str(key, 11) + val = t._ctx.curr_req_matched[arg_key] + end + + elseif core_str.has_prefix(key, "http_") then + local arg_key = key:lower() + arg_key = re_gsub(arg_key, "-", "_", "jo") + val = get_var(arg_key, t._request) + + elseif core_str.has_prefix(key, "graphql_") then + -- trim the "graphql_" prefix + local arg_key = sub_str(key, 9) + val = get_parsed_graphql()[arg_key] + elseif core_str.has_prefix(key, "post_arg.") then + -- trim the "post_arg." prefix (10 characters) + local arg_key = sub_str(key, 10) + local parsed_body, err = get_parsed_request_body(t._ctx) + if not parsed_body then + log.warn("failed to fetch post args value by key: ", arg_key, " error: ", err) + return nil + end + if arg_key:find("[%[%*]") or arg_key:find("..", 1, true) then + arg_key = "$." .. arg_key + local results = jp.query(parsed_body, arg_key) + if #results == 0 then + val = nil + else + val = results + end + else + local parts = util.split(arg_key, "(.)") + local current = parsed_body + for _, part in ipairs(parts) do + if type(current) ~= "table" then + current = nil + break + end + current = current[part] + end + val = current + end + + else + local getter = apisix_var_names[key] + if getter then + local ctx = t._ctx + if getter == true then + val = ctx and ctx[key] + else + -- the getter is registered by ctx.register_var + val = getter(ctx) + end + + else + val = get_var(key, t._request) + end + end + + if val ~= nil and not no_cacheable_var_names[key] then + t._cache[key] = val + end + + return val + end, + + __newindex = function(t, key, val) + if ngx_var_names[key] then + ngx_var[key] = val + end + + -- log.info("key: ", key, " new val: ", val) + t._cache[key] = val + end, + } + +--- +-- Register custom variables. +-- Register variables globally, and use them as normal builtin variables. +-- Note that the custom variables can't be used in features that depend +-- on the Nginx directive, like `access_log_format`. +-- +-- @function core.ctx.register_var +-- @tparam string name custom variable name +-- @tparam function getter The fetch function for custom variables. +-- @tparam table opts An optional options table which controls the behavior about the variable +-- @usage +-- local core = require "apisix.core" +-- +-- core.ctx.register_var("a6_labels_zone", function(ctx) +-- local route = ctx.matched_route and ctx.matched_route.value +-- if route and route.labels then +-- return route.labels.zone +-- end +-- return nil +-- end) +-- +-- We support the options below in the `opts`: +-- * no_cacheable: if the result of getter is cacheable or not. Default to `false`. +function _M.register_var(name, getter, opts) + if type(getter) ~= "function" then + error("the getter of registered var should be a function") + end + + apisix_var_names[name] = getter + + if opts then + if opts.no_cacheable then + no_cacheable_var_names[name] = true + end + end +end + +function _M.set_vars_meta(ctx) + local var = tablepool.fetch("ctx_var", 0, 32) + if not var._cache then + var._cache = {} + end + + var._request = get_request() + var._ctx = ctx + setmetatable(var, mt) + ctx.var = var +end + +function _M.release_vars(ctx) + if ctx.var == nil then + return + end + + core_tab.clear(ctx.var._cache) + tablepool.release("ctx_var", ctx.var, true) + ctx.var = nil +end + +end -- do + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/dns/client.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/dns/client.lua new file mode 100644 index 0000000..1bf2aca --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/dns/client.lua @@ -0,0 +1,164 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Wrapped dns search client. +-- +-- @module core.dns.client + +local require = require +local config_local = require("apisix.core.config_local") +local log = require("apisix.core.log") +local json = require("apisix.core.json") +local table = require("apisix.core.table") +local gcd = require("apisix.core.math").gcd +local insert_tab = table.insert +local math_random = math.random +local package_loaded = package.loaded +local ipairs = ipairs +local table_remove = table.remove +local setmetatable = setmetatable + + +local _M = { + RETURN_RANDOM = 1, + RETURN_ALL = 2, +} + + +local function resolve_srv(client, answers) + if #answers == 0 then + return nil, "empty SRV record" + end + + local resolved_answers = {} + local answer_to_count = {} + for _, answer in ipairs(answers) do + if answer.type ~= client.TYPE_SRV then + return nil, "mess SRV with other record" + end + + local resolved, err = client.resolve(answer.target) + if not resolved then + local msg = "failed to resolve SRV record " .. answer.target .. ": " .. err + return nil, msg + end + + log.info("dns resolve SRV ", answer.target, ", result: ", + json.delay_encode(resolved)) + + local weight = answer.weight + if weight == 0 then + weight = 1 + end + + local count = #resolved + answer_to_count[answer] = count + -- one target may have multiple resolved results + for _, res in ipairs(resolved) do + local copy = table.deepcopy(res) + copy.weight = weight / count + copy.port = answer.port + copy.priority = answer.priority + insert_tab(resolved_answers, copy) + end + end + + -- find the least common multiple of the counts + local lcm = answer_to_count[answers[1]] + for i = 2, #answers do + local count = answer_to_count[answers[i]] + lcm = count * lcm / gcd(count, lcm) + end + -- fix the weight as the weight should be integer + for _, res in ipairs(resolved_answers) do + res.weight = res.weight * lcm + end + + return resolved_answers +end + + +function _M.resolve(self, domain, selector) + local client = self.client + + -- this function will dereference the CNAME records + local answers, err = client.resolve(domain) + if not answers then + return nil, "failed to query the DNS server: " .. err + end + + if answers.errcode then + return nil, "server returned error code: " .. answers.errcode + .. ": " .. answers.errstr + end + + if selector == _M.RETURN_ALL then + log.info("dns resolve ", domain, ", result: ", json.delay_encode(answers)) + for _, answer in ipairs(answers) do + if answer.type == client.TYPE_SRV then + return resolve_srv(client, answers) + end + end + return table.deepcopy(answers) + end + + local idx = math_random(1, #answers) + local answer = answers[idx] + local dns_type = answer.type + if dns_type == client.TYPE_A or dns_type == client.TYPE_AAAA then + log.info("dns resolve ", domain, ", result: ", json.delay_encode(answer)) + return table.deepcopy(answer) + end + + return nil, "unsupported DNS answer" +end + + +function _M.new(opts) + local local_conf = config_local.local_conf() + + if opts.enable_ipv6 == nil then + opts.enable_ipv6 = local_conf.apisix.enable_ipv6 + end + + -- ensure the resolver throws an error when ipv6 is disabled + if not opts.enable_ipv6 then + for i, v in ipairs(opts.order) do + if v == "AAAA" then + table_remove(opts.order, i) + break + end + end + end + + opts.timeout = 2000 -- 2 sec + opts.retrans = 5 -- 5 retransmissions on receive timeout + + -- make sure each client has its separate room + package_loaded["resty.dns.client"] = nil + local dns_client_mod = require("resty.dns.client") + + local ok, err = dns_client_mod.init(opts) + if not ok then + return nil, "failed to init the dns client: " .. err + end + + return setmetatable({client = dns_client_mod}, {__index = _M}) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/env.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/env.lua new file mode 100644 index 0000000..6a57a70 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/env.lua @@ -0,0 +1,109 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local ffi = require "ffi" + +local json = require("apisix.core.json") +local log = require("apisix.core.log") +local string = require("apisix.core.string") + +local os = os +local type = type +local upper = string.upper +local find = string.find +local sub = string.sub +local str = ffi.string + +local ENV_PREFIX = "$ENV://" + +local _M = { + PREFIX = ENV_PREFIX +} + + +local apisix_env_vars = {} + +ffi.cdef [[ + extern char **environ; +]] + + +function _M.init() + local e = ffi.C.environ + if not e then + log.warn("could not access environment variables") + return + end + + local i = 0 + while e[i] ~= nil do + local var = str(e[i]) + local p = find(var, "=") + if p then + apisix_env_vars[sub(var, 1, p - 1)] = sub(var, p + 1) + end + + i = i + 1 + end +end + + +local function parse_env_uri(env_uri) + -- Avoid the error caused by has_prefix to cause a crash. + if type(env_uri) ~= "string" then + return nil, "error env_uri type: " .. type(env_uri) + end + + if not string.has_prefix(upper(env_uri), ENV_PREFIX) then + return nil, "error env_uri prefix: " .. env_uri + end + + local path = sub(env_uri, #ENV_PREFIX + 1) + local idx = find(path, "/") + if not idx then + return {key = path, sub_key = ""} + end + local key = sub(path, 1, idx - 1) + local sub_key = sub(path, idx + 1) + + return { + key = key, + sub_key = sub_key + } +end + + +function _M.fetch_by_uri(env_uri) + log.info("fetching data from env uri: ", env_uri) + local opts, err = parse_env_uri(env_uri) + if not opts then + return nil, err + end + + local main_value = apisix_env_vars[opts.key] or os.getenv(opts.key) + if main_value and opts.sub_key ~= "" then + local vt, err = json.decode(main_value) + if not vt then + return nil, "decode failed, err: " .. (err or "") .. ", value: " .. main_value + end + return vt[opts.sub_key] + end + + return main_value +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/etcd.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/etcd.lua new file mode 100644 index 0000000..3caa2f1 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/etcd.lua @@ -0,0 +1,676 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Etcd API. +-- +-- @module core.etcd + +local require = require +local fetch_local_conf = require("apisix.core.config_local").local_conf +local array_mt = require("apisix.core.json").array_mt +local log = require("apisix.core.log") +local try_read_attr = require("apisix.core.table").try_read_attr +local v3_adapter = require("apisix.admin.v3_adapter") +local etcd = require("resty.etcd") +local clone_tab = require("table.clone") +local health_check = require("resty.etcd.health_check") +local pl_path = require("pl.path") +local ipairs = ipairs +local setmetatable = setmetatable +local string = string +local tonumber = tonumber +local ngx_get_phase = ngx.get_phase + + +local _M = {} + + +local NOT_ALLOW_WRITE_ETCD_WARN = 'Data plane role should not write to etcd. ' .. + 'This operation will be deprecated in future releases.' + +local function is_data_plane() + local local_conf, err = fetch_local_conf() + if not local_conf then + return nil, err + end + + local role = try_read_attr(local_conf, "deployment", "role") + if role == "data_plane" then + return true + end + + return false +end + + + +local function disable_write_if_data_plane() + local data_plane, err = is_data_plane() + if err then + log.error("failed to check data plane role: ", err) + return true, err + end + + if data_plane then + -- current only warn, will be return false in future releases + -- to block etcd write + log.warn(NOT_ALLOW_WRITE_ETCD_WARN) + return false + end + + return false, nil +end + + +local function wrap_etcd_client(etcd_cli) + -- note: methods txn can read and write, don't use txn to write when data plane role + local methods_to_wrap = { + "set", + "setnx", + "setx", + "delete", + "rmdir", + "grant", + "revoke", + "keepalive" + } + + local original_methods = {} + for _, method in ipairs(methods_to_wrap) do + if not etcd_cli[method] then + log.error("method ", method, " not found in etcd client") + return nil, "method " .. method .. " not found in etcd client" + end + + original_methods[method] = etcd_cli[method] + end + + for _, method in ipairs(methods_to_wrap) do + etcd_cli[method] = function(self, ...) + local disable, err = disable_write_if_data_plane() + if disable then + return nil, err + end + + return original_methods[method](self, ...) + end + end + + return etcd_cli +end + + +local function _new(etcd_conf) + local prefix = etcd_conf.prefix + etcd_conf.http_host = etcd_conf.host + etcd_conf.host = nil + etcd_conf.prefix = nil + etcd_conf.protocol = "v3" + etcd_conf.api_prefix = "/v3" + + -- default to verify etcd cluster certificate + etcd_conf.ssl_verify = true + if etcd_conf.tls then + if etcd_conf.tls.verify == false then + etcd_conf.ssl_verify = false + end + + if etcd_conf.tls.cert then + etcd_conf.ssl_cert_path = etcd_conf.tls.cert + etcd_conf.ssl_key_path = etcd_conf.tls.key + end + + if etcd_conf.tls.sni then + etcd_conf.sni = etcd_conf.tls.sni + end + end + + local etcd_cli, err = etcd.new(etcd_conf) + if not etcd_cli then + return nil, nil, err + end + + etcd_cli = wrap_etcd_client(etcd_cli) + + return etcd_cli, prefix +end + + +--- +-- Create an etcd client which will connect to etcd without being proxyed by conf server. +-- This method is used in init_worker phase when the conf server is not ready. +-- +-- @function core.etcd.new_without_proxy +-- @treturn table|nil the etcd client, or nil if failed. +-- @treturn string|nil the configured prefix of etcd keys, or nil if failed. +-- @treturn nil|string the error message. +local function new_without_proxy() + local local_conf, err = fetch_local_conf() + if not local_conf then + return nil, nil, err + end + + local etcd_conf = clone_tab(local_conf.etcd) + + if local_conf.apisix.ssl and local_conf.apisix.ssl.ssl_trusted_certificate then + etcd_conf.trusted_ca = local_conf.apisix.ssl.ssl_trusted_certificate + end + + return _new(etcd_conf) +end +_M.new_without_proxy = new_without_proxy + + +local function new() + local local_conf, err = fetch_local_conf() + if not local_conf then + return nil, nil, err + end + + local etcd_conf = clone_tab(local_conf.etcd) + + if local_conf.apisix.ssl and local_conf.apisix.ssl.ssl_trusted_certificate then + etcd_conf.trusted_ca = local_conf.apisix.ssl.ssl_trusted_certificate + end + + if not health_check.conf then + health_check.init({ + max_fails = 1, + retry = true, + }) + end + + return _new(etcd_conf) +end +_M.new = new + + +local function switch_proxy() + if ngx_get_phase() == "init" or ngx_get_phase() == "init_worker" then + return new_without_proxy() + end + + local etcd_cli, prefix, err = new() + if not etcd_cli or err then + return etcd_cli, prefix, err + end + + if not etcd_cli.unix_socket_proxy then + return etcd_cli, prefix, err + end + local sock_path = etcd_cli.unix_socket_proxy:sub(#"unix:" + 1) + local ok = pl_path.exists(sock_path) + if not ok then + return new_without_proxy() + end + + return etcd_cli, prefix, err +end +_M.get_etcd_syncer = switch_proxy + +-- convert ETCD v3 entry to v2 one +local function kvs_to_node(kvs) + local node = {} + node.key = kvs.key + node.value = kvs.value + node.createdIndex = tonumber(kvs.create_revision) + node.modifiedIndex = tonumber(kvs.mod_revision) + return node +end +_M.kvs_to_node = kvs_to_node + +local function kvs_to_nodes(res, exclude_dir) + res.body.node.dir = true + res.body.node.nodes = setmetatable({}, array_mt) + if exclude_dir then + for i=2, #res.body.kvs do + res.body.node.nodes[i-1] = kvs_to_node(res.body.kvs[i]) + end + else + for i=1, #res.body.kvs do + res.body.node.nodes[i] = kvs_to_node(res.body.kvs[i]) + end + end + return res +end + + +local function not_found(res) + res.body.message = "Key not found" + res.reason = "Not found" + res.status = 404 + return res +end + + +-- When `is_dir` is true, returns the value of both the dir key and its descendants. +-- Otherwise, return the value of key only. +function _M.get_format(res, real_key, is_dir, formatter) + if res.body.error == "etcdserver: user name is empty" then + return nil, "insufficient credentials code: 401" + end + + if res.body.error == "etcdserver: permission denied" then + return nil, "etcd forbidden code: 403" + end + + if res.body.error then + -- other errors, like "grpc: received message larger than max" + return nil, res.body.error + end + + res.headers["X-Etcd-Index"] = res.body.header.revision + + if not res.body.kvs then + return not_found(res) + end + + v3_adapter.to_v3(res.body, "get") + + if formatter then + return formatter(res) + end + + if not is_dir then + local key = res.body.kvs[1].key + if key ~= real_key then + return not_found(res) + end + + res.body.node = kvs_to_node(res.body.kvs[1]) + + else + -- In etcd v2, the direct key asked for is `node`, others which under this dir are `nodes` + -- While in v3, this structure is flatten and all keys related the key asked for are `kvs` + res.body.node = kvs_to_node(res.body.kvs[1]) + -- we have a init_dir (for etcd v2) value that can't be deserialized with json, + -- but we don't put init_dir for new resource type like consumer credential + if not res.body.kvs[1].value then + -- remove last "/" when necessary + if string.byte(res.body.node.key, -1) == 47 then + res.body.node.key = string.sub(res.body.node.key, 1, #res.body.node.key-1) + end + res = kvs_to_nodes(res, true) + else + -- get dir key by remove last part of node key, + -- for example: /apisix/consumers/jack -> /apisix/consumers + local last_slash_index = string.find(res.body.node.key, "/[^/]*$") + if last_slash_index then + res.body.node.key = string.sub(res.body.node.key, 1, last_slash_index-1) + end + res = kvs_to_nodes(res, false) + end + end + + res.body.kvs = nil + v3_adapter.to_v3_list(res.body) + return res +end + + +function _M.watch_format(v3res) + local v2res = {} + v2res.headers = { + ["X-Etcd-Index"] = v3res.result.header.revision + } + v2res.body = { + node = {} + } + + local compact_revision = v3res.result.compact_revision + if compact_revision and tonumber(compact_revision) > 0 then + -- When the revisions are compacted, there might be compacted changes + -- which are unsynced. So we need to do a fully sync. + -- TODO: cover this branch in CI + return nil, "compacted" + end + + for i, event in ipairs(v3res.result.events) do + v2res.body.node[i] = kvs_to_node(event.kv) + if event.type == "DELETE" then + v2res.body.action = "delete" + end + end + + return v2res +end + + +local get_etcd_cli +do + local prefix + local etcd_cli_init_phase + local etcd_cli + local tmp_etcd_cli + + function get_etcd_cli() + local err + if ngx_get_phase() == "init" or ngx_get_phase() == "init_worker" then + if etcd_cli_init_phase == nil then + tmp_etcd_cli, prefix, err = new_without_proxy() + if not tmp_etcd_cli then + return nil, nil, err + end + + return tmp_etcd_cli, prefix + end + + return etcd_cli_init_phase, prefix + end + + if etcd_cli_init_phase ~= nil then + -- we can't share the etcd instance created in init* phase + -- they have different configuration + etcd_cli_init_phase:close() + etcd_cli_init_phase = nil + end + + if etcd_cli == nil then + tmp_etcd_cli, prefix, err = switch_proxy() + if not tmp_etcd_cli then + return nil, nil, err + end + + etcd_cli = tmp_etcd_cli + + return tmp_etcd_cli, prefix + end + + return etcd_cli, prefix + end +end +-- export it so we can mock the etcd cli in test +_M.get_etcd_cli = get_etcd_cli + + +function _M.get(key, is_dir) + local etcd_cli, prefix, err = get_etcd_cli() + if not etcd_cli then + return nil, err + end + + key = prefix .. key + + -- in etcd v2, get could implicitly turn into readdir + -- while in v3, we need to do it explicitly + local res, err = etcd_cli:readdir(key) + if not res then + return nil, err + end + return _M.get_format(res, key, is_dir) +end + + +local function set(key, value, ttl) + local disable, err = disable_write_if_data_plane() + if disable then + return nil, err + end + + + local etcd_cli, prefix, err = get_etcd_cli() + if not etcd_cli then + return nil, err + end + + -- lease substitute ttl in v3 + local res, err + if ttl then + local data, grant_err = etcd_cli:grant(tonumber(ttl)) + if not data then + return nil, grant_err + end + + res, err = etcd_cli:set(prefix .. key, value, {prev_kv = true, lease = data.body.ID}) + if not res then + return nil, err + end + + res.body.lease_id = data.body.ID + else + res, err = etcd_cli:set(prefix .. key, value, {prev_kv = true}) + end + if not res then + return nil, err + end + + if res.body.error then + return nil, res.body.error + end + + res.headers["X-Etcd-Index"] = res.body.header.revision + + -- etcd v3 set would not return kv info + v3_adapter.to_v3(res.body, "set") + res.body.node = {} + res.body.node.key = prefix .. key + res.body.node.value = value + res.status = 201 + if res.body.prev_kv then + res.status = 200 + res.body.prev_kv = nil + end + + return res, nil +end +_M.set = set + + +function _M.atomic_set(key, value, ttl, mod_revision) + local disable, err = disable_write_if_data_plane() + if disable then + return nil, err + end + + local etcd_cli, prefix, err = get_etcd_cli() + if not etcd_cli then + return nil, err + end + + local lease_id + if ttl then + local data, grant_err = etcd_cli:grant(tonumber(ttl)) + if not data then + return nil, grant_err + end + + lease_id = data.body.ID + end + + key = prefix .. key + + local compare = { + { + key = key, + target = "MOD", + result = "EQUAL", + mod_revision = mod_revision, + } + } + + local success = { + { + requestPut = { + key = key, + value = value, + lease = lease_id, + } + } + } + + local res, err = etcd_cli:txn(compare, success) + if not res then + return nil, err + end + + if not res.body.succeeded then + return nil, "value changed before overwritten" + end + + res.headers["X-Etcd-Index"] = res.body.header.revision + -- etcd v3 set would not return kv info + v3_adapter.to_v3(res.body, "compareAndSwap") + res.body.node = { + key = key, + value = value, + } + + return res, nil +end + + + +function _M.push(key, value, ttl) + local disable, err = disable_write_if_data_plane() + if disable then + return nil, err + end + + local etcd_cli, _, err = get_etcd_cli() + if not etcd_cli then + return nil, err + end + + -- Create a new revision and use it as the id. + -- It will be better if we use snowflake algorithm like manager-api, + -- but we haven't found a good library. It costs too much to write + -- our own one as the admin-api will be replaced by manager-api finally. + local res, err = set("/gen_id", 1) + if not res then + return nil, err + end + + -- manually add suffix + local index = res.body.header.revision + index = string.format("%020d", index) + + -- set the basic id attribute + value.id = index + + res, err = set(key .. "/" .. index, value, ttl) + if not res then + return nil, err + end + + v3_adapter.to_v3(res.body, "create") + return res, nil +end + + +function _M.delete(key) + local disable, err = disable_write_if_data_plane() + if disable then + return nil, err + end + + local etcd_cli, prefix, err = get_etcd_cli() + if not etcd_cli then + return nil, err + end + + local res, err = etcd_cli:delete(prefix .. key) + + if not res then + return nil, err + end + + res.headers["X-Etcd-Index"] = res.body.header.revision + + if not res.body.deleted then + return not_found(res), nil + end + + -- etcd v3 set would not return kv info + v3_adapter.to_v3(res.body, "delete") + res.body.node = {} + res.body.key = prefix .. key + + return res, nil +end + +function _M.rmdir(key, opts) + local disable, err = disable_write_if_data_plane() + if disable then + return nil, err + end + + local etcd_cli, prefix, err = get_etcd_cli() + if not etcd_cli then + return nil, err + end + + local res, err = etcd_cli:rmdir(prefix .. key, opts) + if not res then + return nil, err + end + + res.headers["X-Etcd-Index"] = res.body.header.revision + + if not res.body.deleted then + return not_found(res), nil + end + + v3_adapter.to_v3(res.body, "delete") + res.body.node = {} + res.body.key = prefix .. key + + return res, nil +end + +--- +-- Get etcd cluster and server version. +-- +-- @function core.etcd.server_version +-- @treturn table The response of query etcd server version. +-- @usage +-- local res, err = core.etcd.server_version() +-- -- the res.body is as follows: +-- -- { +-- -- etcdcluster = "3.5.0", +-- -- etcdserver = "3.5.0" +-- -- } +function _M.server_version() + local etcd_cli, _, err = get_etcd_cli() + if not etcd_cli then + return nil, err + end + + return etcd_cli:version() +end + + +function _M.keepalive(id) + local disable, err = disable_write_if_data_plane() + if disable then + return nil, err + end + + local etcd_cli, _, err = get_etcd_cli() + if not etcd_cli then + return nil, err + end + + local res, err = etcd_cli:keepalive(id) + if not res then + return nil, err + end + + return res, nil +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/event.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/event.lua new file mode 100644 index 0000000..006cd1a --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/event.lua @@ -0,0 +1,45 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local CONST = { + BUILD_ROUTER = 1, +} + +local _M = { + CONST = CONST, +} + +local events = {} + + +function _M.push(type, ...) + local handler = events[type] + if handler then + handler(...) + end +end + +function _M.register(type, handler) + -- TODO: we can register more than one handler + events[type] = handler +end + +function _M.unregister(type) + events[type] = nil +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/id.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/id.lua new file mode 100644 index 0000000..ef8f727 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/id.lua @@ -0,0 +1,169 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Instance id of APISIX +-- +-- @module core.id + +local fetch_local_conf = require("apisix.core.config_local").local_conf +local try_read_attr = require("apisix.core.table").try_read_attr +local profile = require("apisix.core.profile") +local log = require("apisix.core.log") +local uuid = require("resty.jit-uuid") +local lyaml = require("lyaml") +local smatch = string.match +local open = io.open +local type = type +local ipairs = ipairs +local string = string +local math = math +local prefix = ngx.config.prefix() +local pairs = pairs +local ngx_exit = ngx.exit +local apisix_uid + +local _M = {version = 0.1} + + +local function rtrim(str) + return smatch(str, "^(.-)%s*$") +end + + +local function read_file(path) + local file = open(path, "rb") -- r read mode and b binary mode + if not file then + return nil + end + + local content = file:read("*a") -- *a or *all reads the whole file + file:close() + return rtrim(content) +end + + +local function write_file(path, data) + local file = open(path, "w+") + if not file then + return nil, "failed to open file[" .. path .. "] for writing" + end + + file:write(data) + file:close() + return true +end + + +local function generate_yaml(table) + -- By default lyaml will parse null values as [] + -- The following logic is a workaround so that null values are parsed as null + local function replace_null(tbl) + for k, v in pairs(tbl) do + if type(v) == "table" then + replace_null(v) + elseif v == nil then + tbl[k] = "" + end + end + end + + -- Replace null values with "" + replace_null(table) + local yaml = lyaml.dump({ table }) + yaml = yaml:gsub("", "null"):gsub("%[%s*%]", "null") + return yaml +end + + +_M.gen_uuid_v4 = uuid.generate_v4 + + +--- This will autogenerate the admin key if it's passed as an empty string in the configuration. +local function autogenerate_admin_key(default_conf) + local changed = false + -- Check if deployment.role is either traditional or control_plane + local deployment_role = default_conf.deployment and default_conf.deployment.role + if deployment_role and (deployment_role == "traditional" or + deployment_role == "control_plane") then + -- Check if deployment.admin.admin_key is not nil and it's an empty string + local admin_keys = try_read_attr(default_conf, "deployment", "admin", "admin_key") + if admin_keys and type(admin_keys) == "table" then + for i, admin_key in ipairs(admin_keys) do + if admin_key.role == "admin" and admin_key.key == "" then + changed = true + admin_keys[i].key = "" + for _ = 1, 32 do + admin_keys[i].key = admin_keys[i].key .. + string.char(math.random(65, 90) + math.random(0, 1) * 32) + end + end + end + end + end + return default_conf,changed +end + + +function _M.init() + local local_conf = fetch_local_conf() + + local local_conf, changed = autogenerate_admin_key(local_conf) + if changed then + local yaml_conf = generate_yaml(local_conf) + local local_conf_path = profile:yaml_path("config") + local ok, err = write_file(local_conf_path, yaml_conf) + if not ok then + log.error("failed to write updated local configuration: ", err) + ngx_exit(-1) + end + end + + --allow user to specify a meaningful id as apisix instance id + local uid_file_path = prefix .. "/conf/apisix.uid" + apisix_uid = read_file(uid_file_path) + if apisix_uid then + return + end + + local id = try_read_attr(local_conf, "apisix", "id") + if id then + apisix_uid = local_conf.apisix.id + else + uuid.seed() + apisix_uid = uuid.generate_v4() + log.notice("not found apisix uid, generate a new one: ", apisix_uid) + end + + local ok, err = write_file(uid_file_path, apisix_uid) + if not ok then + log.error(err) + end +end + + +--- +-- Returns the instance id of the running APISIX +-- +-- @function core.id.get +-- @treturn string the instance id +-- @usage +-- local apisix_id = core.id.get() +function _M.get() + return apisix_uid +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/io.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/io.lua new file mode 100644 index 0000000..ad1b229 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/io.lua @@ -0,0 +1,50 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- I/O operations on files. +-- +-- @module core.io + +local open = io.open + + +local _M = {} + +--- +-- Read the contents of a file. +-- +-- @function core.io.get_file +-- @tparam string file_name either an absolute path or +-- a relative path based on the APISIX working directory. +-- @treturn string The file content. +-- @usage +-- local file_content, err = core.io.get_file("conf/apisix.uid") +-- -- the `file_content` maybe the APISIX instance id in uuid format, +-- -- like "3f0e827b-5f26-440e-8074-c101c8eb0174" +function _M.get_file(file_name) + local f, err = open(file_name, 'r') + if not f then + return nil, err + end + + local req_body = f:read("*all") + f:close() + return req_body +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/ip.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/ip.lua new file mode 100644 index 0000000..5a762be --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/ip.lua @@ -0,0 +1,80 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- IP match and verify module. +-- +-- @module core.ip + +local json = require("apisix.core.json") +local log = require("apisix.core.log") +local ipmatcher = require("resty.ipmatcher") +local str_sub = string.sub +local str_find = require("apisix.core.string").find +local tonumber = tonumber + + +local _M = {} + + +function _M.create_ip_matcher(ip_list) + local ip, err = ipmatcher.new(ip_list) + if not ip then + log.error("failed to create ip matcher: ", err, + " ip list: ", json.delay_encode(ip_list)) + return nil + end + + return ip +end + +--- +-- Verify that the given ip is a valid ip or cidr. +-- +-- @function core.ip.validate_cidr_or_ip +-- @tparam string ip IP or cidr. +-- @treturn boolean True if the given ip is a valid ip or cidr, false otherwise. +-- @usage +-- local ip1 = core.ip.validate_cidr_or_ip("127.0.0.1") -- true +-- local cidr = core.ip.validate_cidr_or_ip("113.74.26.106/24") -- true +-- local ip2 = core.ip.validate_cidr_or_ip("113.74.26.666") -- false +function _M.validate_cidr_or_ip(ip) + local mask = 0 + local sep_pos = str_find(ip, "/") + if sep_pos then + mask = str_sub(ip, sep_pos + 1) + mask = tonumber(mask) + if mask < 0 or mask > 128 then + return false + end + ip = str_sub(ip, 1, sep_pos - 1) + end + + if ipmatcher.parse_ipv4(ip) then + if mask < 0 or mask > 32 then + return false + end + return true + end + + if mask < 0 or mask > 128 then + return false + end + return ipmatcher.parse_ipv6(ip) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/json.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/json.lua new file mode 100644 index 0000000..4341c46 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/json.lua @@ -0,0 +1,132 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Wrapped serialization and deserialization modules for json and lua tables. +-- +-- @module core.json + +local cjson = require("cjson.safe") +local json_encode = cjson.encode +local clear_tab = require("table.clear") +local ngx = ngx +local tostring = tostring +local type = type +local pairs = pairs +local cached_tab = {} + + +cjson.encode_escape_forward_slash(false) +cjson.decode_array_with_array_mt(true) +local _M = { + version = 0.1, + array_mt = cjson.array_mt, + decode = cjson.decode, + -- This method produces the same encoded string when the input is not changed. + -- Different calls with cjson.encode will produce different string because + -- it doesn't maintain the object key order. + stably_encode = require("dkjson").encode +} + + +local function serialise_obj(data) + if type(data) == "function" or type(data) == "userdata" + or type(data) == "cdata" + or type(data) == "table" then + return tostring(data) + end + + return data +end + + +local function tab_clone_with_serialise(data) + if type(data) ~= "table" then + return serialise_obj(data) + end + + local t = {} + for k, v in pairs(data) do + if type(v) == "table" then + if cached_tab[v] then + t[serialise_obj(k)] = tostring(v) + else + cached_tab[v] = true + t[serialise_obj(k)] = tab_clone_with_serialise(v) + end + + else + t[serialise_obj(k)] = serialise_obj(v) + end + end + + return t +end + + +local function encode(data, force) + if force then + clear_tab(cached_tab) + data = tab_clone_with_serialise(data) + end + + return json_encode(data) +end +_M.encode = encode + +local max_delay_encode_items = 16 +local delay_tab_idx = 0 +local delay_tab_arr = {} +for i = 1, max_delay_encode_items do + delay_tab_arr[i] = setmetatable({data = "", force = false}, { + __tostring = function(self) + local res, err = encode(self.data, self.force) + if not res then + ngx.log(ngx.WARN, "failed to encode: ", err, + " force: ", self.force) + end + + return res + end + }) +end + + + +--- +-- Delayed encoding of input data, avoid unnecessary encode operations. +-- When really writing logs, if the given parameter is table, it will be converted to string in +-- OpenResty by checking if there is a metamethod registered for `__tostring`, and if so, +-- calling this method to convert it to string. +-- +-- @function core.json.delay_encode +-- @tparam string|table data The data to be encoded. +-- @tparam boolean force encode data can't be encoded as JSON with tostring +-- @treturn table The table with the __tostring function overridden. +-- @usage +-- core.log.info("conf : ", core.json.delay_encode(conf)) +function _M.delay_encode(data, force) + delay_tab_idx = delay_tab_idx+1 + if delay_tab_idx > max_delay_encode_items then + delay_tab_idx = 1 + end + delay_tab_arr[delay_tab_idx].data = data + delay_tab_arr[delay_tab_idx].force = force + return delay_tab_arr[delay_tab_idx] +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/log.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/log.lua new file mode 100644 index 0000000..b59e0b7 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/log.lua @@ -0,0 +1,173 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Wrapped `ngx.log`. +-- +-- @module core.log + +local ngx = ngx +local ngx_log = ngx.log +local require = require +local select = select +local setmetatable = setmetatable +local tostring = tostring +local unpack = unpack +-- avoid loading other module since core.log is the most foundational one +local tab_clear = require("table.clear") +local ngx_errlog = require("ngx.errlog") +local ngx_get_phase = ngx.get_phase + + +local _M = {version = 0.4} + + +local log_levels = { + stderr = ngx.STDERR, + emerg = ngx.EMERG, + alert = ngx.ALERT, + crit = ngx.CRIT, + error = ngx.ERR, + warn = ngx.WARN, + notice = ngx.NOTICE, + info = ngx.INFO, + debug = ngx.DEBUG, +} + + +local cur_level + +local do_nothing = function() end + + +local function update_log_level() + -- Nginx use `notice` level in init phase instead of error_log directive config + -- Ref to src/core/ngx_log.c's ngx_log_init + if ngx_get_phase() ~= "init" then + cur_level = ngx.config.subsystem == "http" and ngx_errlog.get_sys_filter_level() + end +end + + +function _M.new(prefix) + local m = {version = _M.version} + setmetatable(m, {__index = function(self, cmd) + local log_level = log_levels[cmd] + local method + update_log_level() + + if cur_level and (log_level > cur_level) + then + method = do_nothing + else + method = function(...) + return ngx_log(log_level, prefix, ...) + end + end + + -- cache the lazily generated method in our + -- module table + if ngx_get_phase() ~= "init" then + self[cmd] = method + end + + return method + end}) + + return m +end + + +setmetatable(_M, {__index = function(self, cmd) + local log_level = log_levels[cmd] + local method + update_log_level() + + if cur_level and (log_level > cur_level) + then + method = do_nothing + else + method = function(...) + return ngx_log(log_level, ...) + end + end + + -- cache the lazily generated method in our + -- module table + if ngx_get_phase() ~= "init" then + self[cmd] = method + end + + return method +end}) + + +local delay_tab = setmetatable({ + func = function() end, + args = {}, + res = nil, + }, { + __tostring = function(self) + -- the `__tostring` will be called twice, the first to get the length and + -- the second to get the data + if self.res then + local res = self.res + -- avoid unexpected reference + self.res = nil + return res + end + + local res, err = self.func(unpack(self.args)) + if err then + ngx.log(ngx.WARN, "failed to exec: ", err) + end + + -- avoid unexpected reference + tab_clear(self.args) + self.res = tostring(res) + return self.res + end +}) + + +--- +-- Delayed execute log printing. +-- It works well with log.$level, eg: log.info(..., log.delay_exec(func, ...)) +-- Should not use it elsewhere. +-- +-- @function core.log.delay_exec +-- @tparam function func Functions that need to be delayed during log printing. +-- @treturn table The table with the res attribute overridden. +-- @usage +-- local function delay_func(param1, param2) +-- return param1 .. " " .. param2 +-- end +-- core.log.info("delay log print: ", core.log.delay_exec(delay_func, "hello", "world)) +-- -- then the log will be: "delay log print: hello world" +function _M.delay_exec(func, ...) + delay_tab.func = func + + tab_clear(delay_tab.args) + for i = 1, select('#', ...) do + delay_tab.args[i] = select(i, ...) + end + + delay_tab.res = nil + return delay_tab +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/lrucache.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/lrucache.lua new file mode 100644 index 0000000..5c81dd3 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/lrucache.lua @@ -0,0 +1,193 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- LRU Caching Implementation. +-- +-- @module core.lrucache + +local lru_new = require("resty.lrucache").new +local resty_lock = require("resty.lock") +local log = require("apisix.core.log") +local tostring = tostring +local ngx = ngx +local get_phase = ngx.get_phase + + +local lock_shdict_name = "lrucache-lock" +if ngx.config.subsystem == "stream" then + lock_shdict_name = lock_shdict_name .. "-" .. ngx.config.subsystem +end + + +local can_yield_phases = { + ssl_session_fetch = true, + ssl_session_store = true, + rewrite = true, + access = true, + content = true, + timer = true +} + +local GLOBAL_ITEMS_COUNT = 1024 +local GLOBAL_TTL = 60 * 60 -- 60 min +local PLUGIN_TTL = 5 * 60 -- 5 min +local PLUGIN_ITEMS_COUNT = 8 +local global_lru_fun + + +local function fetch_valid_cache(lru_obj, invalid_stale, item_ttl, + item_release, key, version) + local obj, stale_obj = lru_obj:get(key) + if obj and obj.ver == version then + return obj + end + + if not invalid_stale and stale_obj and stale_obj.ver == version then + lru_obj:set(key, stale_obj, item_ttl) + return stale_obj + end + + if item_release and obj then + item_release(obj.val) + end + + return nil +end + + +local function new_lru_fun(opts) + local item_count, item_ttl + if opts and opts.type == 'plugin' then + item_count = opts.count or PLUGIN_ITEMS_COUNT + item_ttl = opts.ttl or PLUGIN_TTL + else + item_count = opts and opts.count or GLOBAL_ITEMS_COUNT + item_ttl = opts and opts.ttl or GLOBAL_TTL + end + + local item_release = opts and opts.release + local invalid_stale = opts and opts.invalid_stale + local serial_creating = opts and opts.serial_creating + local lru_obj = lru_new(item_count) + + return function (key, version, create_obj_fun, ...) + if not serial_creating or not can_yield_phases[get_phase()] then + local cache_obj = fetch_valid_cache(lru_obj, invalid_stale, + item_ttl, item_release, key, version) + if cache_obj then + return cache_obj.val + end + + local obj, err = create_obj_fun(...) + if obj ~= nil then + lru_obj:set(key, {val = obj, ver = version}, item_ttl) + end + + return obj, err + end + + local cache_obj = fetch_valid_cache(lru_obj, invalid_stale, item_ttl, + item_release, key, version) + if cache_obj then + return cache_obj.val + end + + local lock, err = resty_lock:new(lock_shdict_name) + if not lock then + return nil, "failed to create lock: " .. err + end + + local key_s = tostring(key) + log.info("try to lock with key ", key_s) + + local elapsed, err = lock:lock(key_s) + if not elapsed then + return nil, "failed to acquire the lock: " .. err + end + + cache_obj = fetch_valid_cache(lru_obj, invalid_stale, item_ttl, + nil, key, version) + if cache_obj then + lock:unlock() + log.info("unlock with key ", key_s) + return cache_obj.val + end + + local obj, err = create_obj_fun(...) + if obj ~= nil then + lru_obj:set(key, {val = obj, ver = version}, item_ttl) + end + lock:unlock() + log.info("unlock with key ", key_s) + + return obj, err + end +end + + +global_lru_fun = new_lru_fun() + + +local function plugin_ctx_key_and_ver(api_ctx, extra_key) + local key = api_ctx.conf_type .. "#" .. api_ctx.conf_id + + if extra_key then + key = key .. "#" .. extra_key + end + + return key, api_ctx.conf_version +end + +--- +-- Cache some objects for plugins to avoid duplicate resources creation. +-- +-- @function core.lrucache.plugin_ctx +-- @tparam table lrucache LRUCache object instance. +-- @tparam table api_ctx The request context. +-- @tparam string extra_key Additional parameters for generating the lrucache identification key. +-- @tparam function create_obj_func Functions for creating cache objects. +-- If the object does not exist in the lrucache, this function is +-- called to create it and cache it in the lrucache. +-- @treturn table The object cached in lrucache. +-- @usage +-- local function create_obj() { +-- -- create the object +-- -- return the object +-- } +-- local obj, err = core.lrucache.plugin_ctx(lrucache, ctx, nil, create_obj) +-- -- obj is the object cached in lrucache +local function plugin_ctx(lrucache, api_ctx, extra_key, create_obj_func, ...) + local key, ver = plugin_ctx_key_and_ver(api_ctx, extra_key) + return lrucache(key, ver, create_obj_func, ...) +end + +local function plugin_ctx_id(api_ctx, extra_key) + local key, ver = plugin_ctx_key_and_ver(api_ctx, extra_key) + return key .. "#" .. ver +end + + +local _M = { + version = 0.1, + new = new_lru_fun, + global = global_lru_fun, + plugin_ctx = plugin_ctx, + plugin_ctx_id = plugin_ctx_id, +} + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/math.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/math.lua new file mode 100644 index 0000000..1514cf7 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/math.lua @@ -0,0 +1,41 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Common library about math +-- +-- @module core.math +local _M = {} + + +--- +-- Calculate the greatest common divisor (GCD) of two numbers +-- +-- @function core.math.gcd +-- @tparam number a +-- @tparam number b +-- @treturn number the GCD of a and b +local function gcd(a, b) + if b == 0 then + return a + end + + return gcd(b, a % b) +end +_M.gcd = gcd + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/os.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/os.lua new file mode 100644 index 0000000..4a922d0 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/os.lua @@ -0,0 +1,118 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- OS module. +-- +-- @module core.os + +local ffi = require("ffi") +local ffi_str = ffi.string +local ffi_errno = ffi.errno +local C = ffi.C +local ceil = math.ceil +local floor = math.floor +local error = error +local tostring = tostring +local type = type + + +local _M = {} +local WNOHANG = 1 + + +ffi.cdef[[ + typedef int32_t pid_t; + typedef unsigned int useconds_t; + + int setenv(const char *name, const char *value, int overwrite); + char *strerror(int errnum); + + int usleep(useconds_t usec); + pid_t waitpid(pid_t pid, int *wstatus, int options); +]] + + +local function err() + return ffi_str(C.strerror(ffi_errno())) +end + +--- +-- Sets the value of the environment variable. +-- +-- @function core.os.setenv +-- @tparam string name The name of environment variable. +-- @tparam string value The value of environment variable. +-- @treturn boolean Results of setting environment variables, true on success. +-- @usage +-- local ok, err = core.os.setenv("foo", "bar") +function _M.setenv(name, value) + local tv = type(value) + if type(name) ~= "string" or (tv ~= "string" and tv ~= "number") then + return false, "invalid argument" + end + + value = tostring(value) + local ok = C.setenv(name, value, 1) == 0 + if not ok then + return false, err() + end + return true +end + + +--- +-- sleep blockingly in microseconds +-- +-- @function core.os.usleep +-- @tparam number us The number of microseconds. +local function usleep(us) + if ceil(us) ~= floor(us) then + error("bad microseconds: " .. us) + end + C.usleep(us) +end +_M.usleep = usleep + + +local function waitpid_nohang(pid) + local res = C.waitpid(pid, nil, WNOHANG) + if res == -1 then + return nil, err() + end + return res > 0 +end + + +function _M.waitpid(pid, timeout) + local count = 0 + local step = 1000 * 10 + local total = timeout * 1000 * 1000 + while step * count < total do + count = count + 1 + usleep(step) + local ok, err = waitpid_nohang(pid) + if err then + return nil, err + end + if ok then + return true + end + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/profile.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/profile.lua new file mode 100644 index 0000000..a5dcdc8 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/profile.lua @@ -0,0 +1,67 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Profile module. +-- +-- @module core.profile + +local util = require("apisix.cli.util") + +local _M = { + version = 0.1, + profile = os.getenv("APISIX_PROFILE") or "", + apisix_home = (ngx and ngx.config.prefix()) or "" +} + +--- +-- Get yaml file path by filename under the `conf/`. +-- +-- @function core.profile.yaml_path +-- @tparam self self The profile module itself. +-- @tparam string file_name Name of the yaml file to search. +-- @treturn string The path of yaml file searched. +-- @usage +-- local profile = require("apisix.core.profile") +-- ...... +-- -- set the working directory of APISIX +-- profile.apisix_home = env.apisix_home .. "/" +-- local local_conf_path = profile:yaml_path("config") +function _M.yaml_path(self, file_name) + local file_path = self.apisix_home .. "conf/" .. file_name + if self.profile ~= "" and file_name ~= "config-default" then + file_path = file_path .. "-" .. self.profile + end + + return file_path .. ".yaml" +end + + +function _M.customized_yaml_index(self) + return self.apisix_home .. "/conf/.customized_config_path" +end + + +function _M.customized_yaml_path(self) + local customized_config_index = self:customized_yaml_index() + if util.file_exists(customized_config_index) then + return util.read_file(customized_config_index) + end + return nil +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/pubsub.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/pubsub.lua new file mode 100644 index 0000000..5b36b0c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/pubsub.lua @@ -0,0 +1,238 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Extensible framework to support publish-and-subscribe scenarios +-- +-- @module core.pubsub + +local log = require("apisix.core.log") +local ws_server = require("resty.websocket.server") +local protoc = require("protoc") +local pb = require("pb") +local ngx = ngx +local setmetatable = setmetatable +local pcall = pcall + + +local _M = { version = 0.1 } +local mt = { __index = _M } + +local pb_state +local function init_pb_state() + -- clear current pb state + local old_pb_state = pb.state(nil) + + -- set int64 rule for pubsub module + pb.option("int64_as_string") + + -- initialize protoc compiler + protoc.reload() + local pubsub_protoc = protoc.new() + pubsub_protoc:addpath(ngx.config.prefix() .. "apisix/include/apisix/model") + local ok, err = pcall(pubsub_protoc.loadfile, pubsub_protoc, "pubsub.proto") + if not ok then + pubsub_protoc:reset() + pb.state(old_pb_state) + return "failed to load pubsub protocol: " .. err + end + + pb_state = pb.state(old_pb_state) +end + + +-- parse command name and parameters from client message +local function get_cmd(data) + -- There are sequence and command properties in the data, + -- select the handler according to the command value. + local key = data.req + return key, data[key] +end + + +-- send generic response to client +local function send_resp(ws, sequence, data) + data.sequence = sequence + -- only restore state if it has changed + if pb_state ~= pb.state() then + pb.state(pb_state) + end + local ok, encoded = pcall(pb.encode, "PubSubResp", data) + if not ok or not encoded then + log.error("failed to encode response message, err: ", encoded) + return + end + + local _, err = ws:send_binary(encoded) + if err then + log.error("failed to send response to client, err: ", err) + end +end + + +-- send error response to client +local function send_error(ws, sequence, err_msg) + return send_resp(ws, sequence, { + error_resp = { + code = 0, + message = err_msg, + }, + }) +end + + +--- +-- Create pubsub module instance +-- +-- @function core.pubsub.new +-- @treturn pubsub module instance +-- @treturn string|nil error message if present +-- @usage +-- local pubsub, err = core.pubsub.new() +function _M.new() + if not pb_state then + local err = init_pb_state() + if err then + return nil, err + end + end + + local ws, err = ws_server:new() + if not ws then + return nil, err + end + + local obj = setmetatable({ + ws_server = ws, + cmd_handler = {}, + }, mt) + + -- add default ping handler + obj:on("cmd_ping", function (params) + return { pong_resp = params } + end) + + return obj +end + + +--- +-- Add command callbacks to pubsub module instances +-- +-- The callback function prototype: function (params) +-- The params in the parameters contain the data defined in the requested command. +-- Its first return value is the data, which needs to contain the data needed for +-- the particular resp, returns nil if an error exists. +-- Its second return value is a string type error message, no need to return when +-- no error exists. +-- +-- @function core.pubsub.on +-- @tparam string command The command to add callback. +-- @tparam func handler The callback function on receipt of command. +-- @usage +-- pubsub:on(command, function (params) +-- return data, err +-- end) +function _M.on(self, command, handler) + self.cmd_handler[command] = handler +end + + +--- +-- Put the pubsub instance into an event loop, waiting to process client commands +-- +-- @function core.pubsub.wait +-- @usage +-- local err = pubsub:wait() +function _M.wait(self) + local fatal_err + local ws = self.ws_server + while true do + -- read raw data frames from websocket connection + local raw_data, raw_type, err = ws:recv_frame() + if err then + -- terminate the event loop when a fatal error occurs + if ws.fatal then + fatal_err = err + break + end + + -- skip this loop for non-fatal errors + log.error("failed to receive websocket frame: ", err) + goto continue + end + + -- handle client close connection + if raw_type == "close" then + break + end + + -- the pubsub messages use binary, if the message is not + -- binary, skip this message + if raw_type ~= "binary" then + log.warn("pubsub server receive non-binary data, type: ", + raw_type, ", data: ", raw_data) + goto continue + end + + -- only recover state if it has changed + if pb.state() ~= pb_state then + pb.state(pb_state) + end + local data, err = pb.decode("PubSubReq", raw_data) + if not data then + log.error("pubsub server receives undecodable data, err: ", err) + send_error(ws, 0, "wrong command") + goto continue + end + + -- command sequence code + local sequence = data.sequence + + local cmd, params = get_cmd(data) + if not cmd and not params then + log.warn("pubsub server receives empty command") + goto continue + end + + -- find the handler for the current command + local handler = self.cmd_handler[cmd] + if not handler then + log.error("pubsub callback handler not registered for the", + " command, command: ", cmd) + send_error(ws, sequence, "unknown command") + goto continue + end + + -- call command handler to generate response data + local resp, err = handler(params) + if not resp then + send_error(ws, sequence, err) + goto continue + end + send_resp(ws, sequence, resp) + + ::continue:: + end + + if fatal_err then + log.error("fatal error in pubsub websocket server, err: ", fatal_err) + end + ws:send_close() +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/request.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/request.lua new file mode 100644 index 0000000..fef4bf1 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/request.lua @@ -0,0 +1,382 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Get or set the information of the client request. +-- +-- @module core.request + +local lfs = require("lfs") +local log = require("apisix.core.log") +local json = require("apisix.core.json") +local io = require("apisix.core.io") +local req_add_header +if ngx.config.subsystem == "http" then + local ngx_req = require "ngx.req" + req_add_header = ngx_req.add_header +end +local is_apisix_or, a6_request = pcall(require, "resty.apisix.request") +local ngx = ngx +local get_headers = ngx.req.get_headers +local clear_header = ngx.req.clear_header +local tonumber = tonumber +local error = error +local type = type +local str_fmt = string.format +local str_lower = string.lower +local req_read_body = ngx.req.read_body +local req_get_body_data = ngx.req.get_body_data +local req_get_body_file = ngx.req.get_body_file +local req_get_post_args = ngx.req.get_post_args +local req_get_uri_args = ngx.req.get_uri_args +local req_set_uri_args = ngx.req.set_uri_args +local table_insert = table.insert +local req_set_header = ngx.req.set_header + + +local _M = {} + + +local function _headers(ctx) + if not ctx then + ctx = ngx.ctx.api_ctx + end + + if not is_apisix_or then + return get_headers() + end + + if a6_request.is_request_header_set() then + a6_request.clear_request_header() + ctx.headers = get_headers() + end + + local headers = ctx.headers + if not headers then + headers = get_headers() + ctx.headers = headers + end + + return headers +end + +local function _validate_header_name(name) + local tname = type(name) + if tname ~= "string" then + return nil, str_fmt("invalid header name %q: got %s, " .. + "expected string", name, tname) + end + + return name +end + +--- +-- Returns all headers of the current request. +-- The name and value of the header in return table is in lower case. +-- +-- @function core.request.headers +-- @tparam table ctx The context of the current request. +-- @treturn table all headers +-- @usage +-- local headers = core.request.headers(ctx) +_M.headers = _headers + +--- +-- Returns the value of the header with the specified name. +-- +-- @function core.request.header +-- @tparam table ctx The context of the current request. +-- @tparam string name The header name, example: "Content-Type". +-- @treturn string|nil the value of the header, or nil if not found. +-- @usage +-- -- You can use upper case for header "Content-Type" here to get the value. +-- local content_type = core.request.header(ctx, "Content-Type") -- "application/json" +function _M.header(ctx, name) + if not ctx then + ctx = ngx.ctx.api_ctx + end + + local value = _headers(ctx)[name] + return type(value) == "table" and value[1] or value +end + +local function modify_header(ctx, header_name, header_value, override) + if type(ctx) == "string" then + -- It would be simpler to keep compatibility if we put 'ctx' + -- after 'header_value', but the style is too ugly! + header_value = header_name + header_name = ctx + ctx = nil + + if override then + log.warn("DEPRECATED: use set_header(ctx, header_name, header_value) instead") + else + log.warn("DEPRECATED: use add_header(ctx, header_name, header_value) instead") + end + end + + local err + header_name, err = _validate_header_name(header_name) + if err then + error(err) + end + + local changed = false + if is_apisix_or then + changed = a6_request.is_request_header_set() + end + + if override then + req_set_header(header_name, header_value) + else + req_add_header(header_name, header_value) + end + + if ctx and ctx.var then + -- when the header is updated, clear cache of ctx.var + ctx.var["http_" .. str_lower(header_name)] = nil + end + + if is_apisix_or and not changed then + -- if the headers are not changed before, + -- we can only update part of the cache instead of invalidating the whole + a6_request.clear_request_header() + if ctx and ctx.headers then + if override or not ctx.headers[header_name] then + ctx.headers[header_name] = header_value + else + local values = ctx.headers[header_name] + if type(values) == "table" then + table_insert(values, header_value) + else + ctx.headers[header_name] = {values, header_value} + end + end + end + end +end + +function _M.set_header(ctx, header_name, header_value) + modify_header(ctx, header_name, header_value, true) +end + +function _M.add_header(ctx, header_name, header_value) + modify_header(ctx, header_name, header_value, false) +end + +-- return the remote address of client which directly connecting to APISIX. +-- so if there is a load balancer between downstream client and APISIX, +-- this function will return the ip of load balancer. +function _M.get_ip(ctx) + if not ctx then + ctx = ngx.ctx.api_ctx + end + return ctx.var.realip_remote_addr or ctx.var.remote_addr or '' +end + + +-- get remote address of downstream client, +-- in cases there is a load balancer between downstream client and APISIX. +function _M.get_remote_client_ip(ctx) + if not ctx then + ctx = ngx.ctx.api_ctx + end + return ctx.var.remote_addr or '' +end + + +function _M.get_remote_client_port(ctx) + if not ctx then + ctx = ngx.ctx.api_ctx + end + return tonumber(ctx.var.remote_port) +end + + +function _M.get_uri_args(ctx) + if not ctx then + ctx = ngx.ctx.api_ctx + end + + if not ctx.req_uri_args then + -- use 0 to avoid truncated result and keep the behavior as the + -- same as other platforms + local args = req_get_uri_args(0) + ctx.req_uri_args = args + end + + return ctx.req_uri_args +end + + +function _M.set_uri_args(ctx, args) + if not ctx then + ctx = ngx.ctx.api_ctx + end + + ctx.req_uri_args = nil + return req_set_uri_args(args) +end + + +function _M.get_post_args(ctx) + if not ctx then + ctx = ngx.ctx.api_ctx + end + + if not ctx.req_post_args then + req_read_body() + + -- use 0 to avoid truncated result and keep the behavior as the + -- same as other platforms + local args, err = req_get_post_args(0) + if not args then + -- do we need a way to handle huge post forms? + log.error("the post form is too large: ", err) + args = {} + end + ctx.req_post_args = args + end + + return ctx.req_post_args +end + + +local function check_size(size, max_size) + if max_size and size > max_size then + return nil, "request size " .. size .. " is greater than the " + .. "maximum size " .. max_size .. " allowed" + end + + return true +end + + +local function test_expect(var) + local expect = var.http_expect + return expect and str_lower(expect) == "100-continue" +end + + +function _M.get_body(max_size, ctx) + if max_size then + local var = ctx and ctx.var or ngx.var + local content_length = tonumber(var.http_content_length) + if content_length then + local ok, err = check_size(content_length, max_size) + if not ok then + -- When client_max_body_size is exceeded, Nginx will set r->expect_tested = 1 to + -- avoid sending the 100 CONTINUE. + -- We use trick below to imitate this behavior. + if test_expect(var) then + clear_header("expect") + end + + return nil, err + end + end + end + + -- check content-length header for http2/http3 + do + local var = ctx and ctx.var or ngx.var + local content_length = tonumber(var.http_content_length) + if (var.server_protocol == "HTTP/2.0" or var.server_protocol == "HTTP/3.0") + and not content_length then + return nil, "HTTP2/HTTP3 request without a Content-Length header" + end + end + req_read_body() + + local req_body = req_get_body_data() + if req_body then + local ok, err = check_size(#req_body, max_size) + if not ok then + return nil, err + end + + return req_body + end + + local file_name = req_get_body_file() + if not file_name then + return nil + end + + log.info("attempt to read body from file: ", file_name) + + if max_size then + local size, err = lfs.attributes (file_name, "size") + if not size then + return nil, err + end + + local ok, err = check_size(size, max_size) + if not ok then + return nil, err + end + end + + local req_body, err = io.get_file(file_name) + return req_body, err +end + + +function _M.get_json_request_body_table() + local body, err = _M.get_body() + if not body then + return nil, { message = "could not get body: " .. (err or "request body is empty") } + end + + local body_tab, err = json.decode(body) + if not body_tab then + return nil, { message = "could not get parse JSON request body: " .. err } + end + + return body_tab +end + + +function _M.get_scheme(ctx) + if not ctx then + ctx = ngx.ctx.api_ctx + end + return ctx.var.scheme or '' +end + + +function _M.get_host(ctx) + if not ctx then + ctx = ngx.ctx.api_ctx + end + return ctx.var.host or '' +end + + +function _M.get_port(ctx) + if not ctx then + ctx = ngx.ctx.api_ctx + end + return tonumber(ctx.var.server_port) +end + + +_M.get_http_version = ngx.req.http_version + + +_M.get_method = ngx.req.get_method + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/resolver.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/resolver.lua new file mode 100644 index 0000000..3568a97 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/resolver.lua @@ -0,0 +1,96 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Domain Resolver. +-- +-- @module core.resolver + +local json = require("apisix.core.json") +local log = require("apisix.core.log") +local utils = require("apisix.core.utils") +local dns_utils = require("resty.dns.utils") +local config_local = require("apisix.core.config_local") + + +local HOSTS_IP_MATCH_CACHE = {} + + +local _M = {} + + +local function init_hosts_ip() + local hosts, err = dns_utils.parseHosts() + if not hosts then + return hosts, err + end + HOSTS_IP_MATCH_CACHE = hosts +end + + +function _M.init_resolver(args) + -- initialize /etc/hosts + init_hosts_ip() + + local dns_resolver = args and args["dns_resolver"] + utils.set_resolver(dns_resolver) + log.info("dns resolver ", json.delay_encode(dns_resolver, true)) +end + +--- +-- Resolve domain name to ip. +-- +-- @function core.resolver.parse_domain +-- @tparam string host Domain name that need to be resolved. +-- @treturn string The IP of the domain name after being resolved. +-- @usage +-- local ip, err = core.resolver.parse_domain("apache.org") -- "198.18.10.114" +function _M.parse_domain(host) + local rev = HOSTS_IP_MATCH_CACHE[host] + local enable_ipv6 = config_local.local_conf().apisix.enable_ipv6 + if rev then + -- use ipv4 in high priority + local ip = rev["ipv4"] + if enable_ipv6 and not ip then + ip = rev["ipv6"] + end + if ip then + -- meet test case + log.info("dns resolve ", host, ", result: ", json.delay_encode(ip)) + log.info("dns resolver domain: ", host, " to ", ip) + return ip + end + end + + local ip_info, err = utils.dns_parse(host) + if not ip_info then + log.error("failed to parse domain: ", host, ", error: ",err) + return nil, err + end + + log.info("parse addr: ", json.delay_encode(ip_info)) + log.info("resolver: ", json.delay_encode(utils.get_resolver())) + log.info("host: ", host) + if ip_info.address then + log.info("dns resolver domain: ", host, " to ", ip_info.address) + return ip_info.address + end + + return nil, "failed to parse domain" +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/response.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/response.lua new file mode 100644 index 0000000..baee977 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/response.lua @@ -0,0 +1,231 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Get the information form upstream response, or set the information to client response. +-- +-- @module core.response + +local encode_json = require("cjson.safe").encode +local ngx = ngx +local arg = ngx.arg +local ngx_print = ngx.print +local ngx_header = ngx.header +local ngx_add_header +if ngx.config.subsystem == "http" then + local ngx_resp = require "ngx.resp" + ngx_add_header = ngx_resp.add_header +end + +local error = error +local select = select +local type = type +local ngx_exit = ngx.exit +local concat_tab = table.concat +local str_sub = string.sub +local tonumber = tonumber +local clear_tab = require("table.clear") +local pairs = pairs + +local _M = {version = 0.1} + + +local resp_exit +do + local t = {} + local idx = 1 + +function resp_exit(code, ...) + clear_tab(t) + idx = 0 + + if code and type(code) ~= "number" then + idx = idx + 1 + t[idx] = code + code = nil + end + + if code then + ngx.status = code + end + + for i = 1, select('#', ...) do + local v = select(i, ...) + if type(v) == "table" then + local body, err = encode_json(v) + if err then + error("failed to encode data: " .. err, -2) + else + idx = idx + 1 + t[idx] = body + idx = idx + 1 + t[idx] = "\n" + end + + elseif v ~= nil then + idx = idx + 1 + t[idx] = v + end + end + + if idx > 0 then + ngx_print(t) + end + + if code then + return ngx_exit(code) + end +end + +end -- do +_M.exit = resp_exit + + +function _M.say(...) + resp_exit(nil, ...) +end + + +local function set_header(append, ...) + if ngx.headers_sent then + error("headers have already been sent", 2) + end + + local count = select('#', ...) + if count == 1 then + local headers = select(1, ...) + if type(headers) ~= "table" then + -- response.set_header(name, nil) + ngx_header[headers] = nil + return + end + + for k, v in pairs(headers) do + if append then + ngx_add_header(k, v) + else + ngx_header[k] = v + end + end + + return + end + + for i = 1, count, 2 do + if append then + ngx_add_header(select(i, ...), select(i + 1, ...)) + else + ngx_header[select(i, ...)] = select(i + 1, ...) + end + end +end + + +function _M.set_header(...) + set_header(false, ...) +end + +--- +-- Add a header to the client response. +-- +-- @function core.response.add_header +-- @usage +-- core.response.add_header("Apisix-Plugins", "no plugin") +function _M.add_header(...) + set_header(true, ...) +end + + +function _M.get_upstream_status(ctx) + -- $upstream_status maybe including multiple status, only need the last one + return tonumber(str_sub(ctx.var.upstream_status or "", -3)) +end + + +function _M.clear_header_as_body_modified() + ngx.header.content_length = nil + -- in case of upstream content is compressed content + ngx.header.content_encoding = nil + + -- clear cache identifier + ngx.header.last_modified = nil + ngx.header.etag = nil +end + + +-- Hold body chunks and return the final body once all chunks have been read. +-- Usage: +-- function _M.body_filter(conf, ctx) +-- local final_body = core.response.hold_body_chunk(ctx) +-- if not final_body then +-- return +-- end +-- final_body = transform(final_body) +-- ngx.arg[1] = final_body +-- ... +function _M.hold_body_chunk(ctx, hold_the_copy, max_resp_body_bytes) + local body_buffer + local chunk, eof = arg[1], arg[2] + + if not ctx._body_buffer then + ctx._body_buffer = {} + end + + if type(chunk) == "string" and chunk ~= "" then + body_buffer = ctx._body_buffer[ctx._plugin_name] + if not body_buffer then + body_buffer = { + chunk, + n = 1 + } + ctx._body_buffer[ctx._plugin_name] = body_buffer + ctx._resp_body_bytes = #chunk + else + local n = body_buffer.n + 1 + body_buffer.n = n + body_buffer[n] = chunk + ctx._resp_body_bytes = ctx._resp_body_bytes + #chunk + end + if max_resp_body_bytes and ctx._resp_body_bytes >= max_resp_body_bytes then + local body_data = concat_tab(body_buffer, "", 1, body_buffer.n) + body_data = str_sub(body_data, 1, max_resp_body_bytes) + return body_data + end + end + + if eof then + body_buffer = ctx._body_buffer[ctx._plugin_name] + if not body_buffer then + if max_resp_body_bytes and #chunk >= max_resp_body_bytes then + chunk = str_sub(chunk, 1, max_resp_body_bytes) + end + return chunk + end + + local body_data = concat_tab(body_buffer, "", 1, body_buffer.n) + ctx._body_buffer[ctx._plugin_name] = nil + return body_data + end + + if not hold_the_copy then + -- flush the origin body chunk + arg[1] = nil + end + return nil +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/schema.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/schema.lua new file mode 100644 index 0000000..9ce6a55 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/schema.lua @@ -0,0 +1,71 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Json schema validation module. +-- +-- @module core.schema + +local jsonschema = require('jsonschema') +local lrucache = require("apisix.core.lrucache") +local cached_validator = lrucache.new({count = 1000, ttl = 0}) +local pcall = pcall + +local _M = { + version = 0.3, + + TYPE_CONSUMER = 1, + TYPE_METADATA = 2, +} + + +local function create_validator(schema) + -- local code = jsonschema.generate_validator_code(schema, opts) + -- local file2=io.output("/tmp/2.txt") + -- file2:write(code) + -- file2:close() + local ok, res = pcall(jsonschema.generate_validator, schema) + if ok then + return res + end + + return nil, res -- error message +end + +local function get_validator(schema) + local validator, err = cached_validator(schema, nil, + create_validator, schema) + + if not validator then + return nil, err + end + + return validator, nil +end + +function _M.check(schema, json) + local validator, err = get_validator(schema) + + if not validator then + return false, err + end + + return validator(json) +end + +_M.valid = get_validator + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/string.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/string.lua new file mode 100644 index 0000000..5951d33 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/string.lua @@ -0,0 +1,136 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Wrapped string module. +-- +-- @module core.string + +local error = error +local type = type +local str_byte = string.byte +local str_find = string.find +local ffi = require("ffi") +local C = ffi.C +local ffi_cast = ffi.cast +local ngx = ngx +local ngx_decode_args = ngx.decode_args +local ngx_encode_args = ngx.encode_args + + +ffi.cdef[[ + int memcmp(const void *s1, const void *s2, size_t n); +]] + + +local _M = { + version = 0.1, +} + + +setmetatable(_M, {__index = string}) + + +-- find a needle from a haystack in the plain text way +-- note: Make sure that the haystack is 'string' type, otherwise an exception will be thrown. +function _M.find(haystack, needle, from) + return str_find(haystack, needle, from or 1, true) +end + +--- +-- Tests whether the string s begins with prefix. +-- +-- @function core.string.has_prefix +-- @tparam string s The string being tested. +-- @tparam string prefix Specify the prefix. +-- @treturn boolean Test result, true means the string s begins with prefix. +-- @usage +-- local res = core.string.has_prefix("/apisix/admin/routes", "/apisix/") -- true +function _M.has_prefix(s, prefix) + if type(s) ~= "string" or type(prefix) ~= "string" then + error("unexpected type: s:" .. type(s) .. ", prefix:" .. type(prefix)) + end + if #s < #prefix then + return false + end + local rc = C.memcmp(s, prefix, #prefix) + return rc == 0 +end + + +function _M.has_suffix(s, suffix) + if type(s) ~= "string" or type(suffix) ~= "string" then + error("unexpected type: s:" .. type(s) .. ", suffix:" .. type(suffix)) + end + if #s < #suffix then + return false + end + local rc = C.memcmp(ffi_cast("char *", s) + #s - #suffix, suffix, #suffix) + return rc == 0 +end + + +function _M.rfind_char(s, ch, idx) + local b = str_byte(ch) + for i = idx or #s, 1, -1 do + if str_byte(s, i, i) == b then + return i + end + end + return nil +end + + +-- reduce network consumption by compressing string indentation +-- this method should be used with caution +-- it will remove the spaces at the beginning of each line +-- and remove the spaces after `,` character +function _M.compress_script(s) + s = ngx.re.gsub(s, [[^\s+]], "", "mjo") + s = ngx.re.gsub(s, [[,\s+]], ",", "mjo") + return s +end + + +--- +-- Decodes a URI encoded query-string into a Lua table. +-- All request arguments received will be decoded by default. +-- +-- @function core.string.decode_args +-- @tparam string args A URI encoded query-string. +-- @treturn table the value of decoded query-string. +-- @usage +-- local args, err = core.string.decode_args("a=1&b=2") -- {a=1, b=2} +function _M.decode_args(args) + -- use 0 to avoid truncated result and keep the behavior as the + -- same as other platforms + return ngx_decode_args(args, 0) +end + + +--- +-- Encode the Lua table to a query args string according to the URI encoded rules. +-- +-- @function core.string.encode_args +-- @tparam table args The query args Lua table. +-- @treturn string the value of query args string. +-- @usage +-- local str = core.string.encode_args({a=1, b=2}) -- "a=1&b=2" +function _M.encode_args(args) + return ngx_encode_args(args) +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/table.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/table.lua new file mode 100644 index 0000000..ed9450a --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/table.lua @@ -0,0 +1,287 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Wrapped table module. +-- +-- @module core.table + +local newproxy = newproxy +local getmetatable = getmetatable +local setmetatable = setmetatable +local select = select +local tostring = tostring +local new_tab = require("table.new") +local nkeys = require("table.nkeys") +local ipairs = ipairs +local pairs = pairs +local type = type +local ngx_re = require("ngx.re") + + +local _M = { + version = 0.2, + new = new_tab, + clear = require("table.clear"), + nkeys = nkeys, + insert = table.insert, + concat = table.concat, + sort = table.sort, + clone = require("table.clone"), + isarray = require("table.isarray"), + isempty = require("table.isempty"), +} + + +setmetatable(_M, {__index = table}) + + +function _M.insert_tail(tab, ...) + local idx = #tab + for i = 1, select('#', ...) do + idx = idx + 1 + tab[idx] = select(i, ...) + end + + return idx +end + + +function _M.set(tab, ...) + for i = 1, select('#', ...) do + tab[i] = select(i, ...) + end +end + + +function _M.try_read_attr(tab, ...) + local count = select('#', ...) + + for i = 1, count do + local attr = select(i, ...) + if type(tab) ~= "table" then + return nil + end + + tab = tab[attr] + end + + return tab +end + +--- +-- Test if an element exists in an array. +-- +-- @function core.table.array_find +-- @tparam table array The tested array. +-- @tparam string val The tested value. +-- @treturn number The index of tested value. +-- @usage +-- local arr = {"a", "b", "c"} +-- local idx = core.table.array_find(arr, "b") -- idx = 2 +local function array_find(array, val) + if type(array) ~= "table" then + return nil + end + + for i, v in ipairs(array) do + if v == val then + return i + end + end + + return nil +end +_M.array_find = array_find + + +-- only work under lua51 or luajit +function _M.setmt__gc(t, mt) + local prox = newproxy(true) + getmetatable(prox).__gc = function() mt.__gc(t) end + t[prox] = true + return setmetatable(t, mt) +end + + +local deepcopy +do + local function _deepcopy(orig, copied, parent, opts) + -- If the array-like table contains nil in the middle, + -- the len might be smaller than the expected. + -- But it doesn't affect the correctness. + local len = #orig + local copy = new_tab(len, nkeys(orig) - len) + -- prevent infinite loop when a field refers its parent + copied[orig] = copy + for orig_key, orig_value in pairs(orig) do + local path = parent .. "." .. tostring(orig_key) + if opts and array_find(opts.shallows, path) then + copy[orig_key] = orig_value + else + if type(orig_value) == "table" then + if copied[orig_value] then + copy[orig_key] = copied[orig_value] + else + copy[orig_key] = _deepcopy(orig_value, copied, path, opts) + end + else + copy[orig_key] = orig_value + end + end + end + + local mt = getmetatable(orig) + if mt ~= nil then + setmetatable(copy, mt) + end + + return copy + end + + + local copied_recorder = {} + + function deepcopy(orig, opts) + local orig_type = type(orig) + if orig_type ~= 'table' then + return orig + end + + local res = _deepcopy(orig, copied_recorder, "self", opts) + _M.clear(copied_recorder) + return res + end +end +_M.deepcopy = deepcopy + + +local ngx_null = ngx.null +local function merge(origin, extend) + for k,v in pairs(extend) do + if type(v) == "table" then + if type(origin[k] or false) == "table" then + if _M.nkeys(origin[k]) ~= #origin[k] then + merge(origin[k] or {}, extend[k] or {}) + else + origin[k] = v + end + else + origin[k] = v + end + elseif v == ngx_null then + origin[k] = nil + else + origin[k] = v + end + end + + return origin +end +_M.merge = merge + + +local function patch(node_value, sub_path, conf) + local sub_value = node_value + local sub_paths = ngx_re.split(sub_path, "/") + for i = 1, #sub_paths - 1 do + local sub_name = sub_paths[i] + if sub_value[sub_name] == nil then + sub_value[sub_name] = {} + end + + sub_value = sub_value[sub_name] + + if type(sub_value) ~= "table" then + return 400, "invalid sub-path: /" + .. _M.concat(sub_paths, 1, i) + end + end + + if type(sub_value) ~= "table" then + return 400, "invalid sub-path: /" .. sub_path + end + + local sub_name = sub_paths[#sub_paths] + if sub_name and sub_name ~= "" then + sub_value[sub_name] = conf + else + node_value = conf + end + + return nil, nil, node_value +end +_M.patch = patch + + +-- Compare two tables as if they are sets (only compare the key part) +function _M.set_eq(a, b) + if nkeys(a) ~= nkeys(b) then + return false + end + + for k in pairs(a) do + if b[k] == nil then + return false + end + end + + return true +end + + +-- Compare two elements, including their descendants +local function deep_eq(a, b) + local type_a = type(a) + local type_b = type(b) + + if type_a ~= 'table' or type_b ~= 'table' then + return a == b + end + + local n_a = nkeys(a) + local n_b = nkeys(b) + if n_a ~= n_b then + return false + end + + for k, v_a in pairs(a) do + local v_b = b[k] + local eq = deep_eq(v_a, v_b) + if not eq then + return false + end + end + + return true +end +_M.deep_eq = deep_eq + + +-- pick takes the given attributes out of object +function _M.pick(obj, attrs) + local data = {} + for k, v in pairs(obj) do + if attrs[k] ~= nil then + data[k] = v + end + end + + return data +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/timer.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/timer.lua new file mode 100644 index 0000000..7cd3c53 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/timer.lua @@ -0,0 +1,108 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Wrapped timer module, can cancel the running timers. +-- +-- @module core.timer + +local log = require("apisix.core.log") +local sleep = require("apisix.core.utils").sleep +local timer_every = ngx.timer.every +local timer_at = ngx.timer.at +local update_time = ngx.update_time +local now = ngx.now +local pcall = pcall + + +local _M = { + version = 0.1, +} + + +local function _internal(timer) + timer.start_time = now() + + repeat + local ok, err = pcall(timer.callback_fun) + if not ok then + log.error("failed to run the timer: ", timer.name, " err: ", err) + + if timer.sleep_fail > 0 then + sleep(timer.sleep_fail) + end + + elseif timer.sleep_succ > 0 then + sleep(timer.sleep_succ) + end + + update_time() + until timer.each_ttl <= 0 or now() >= timer.start_time + timer.each_ttl +end + +local function run_timer(premature, self) + if self.running or premature then + return + end + + self.running = true + + local ok, err = pcall(_internal, self) + if not ok then + log.error("failed to run timer[", self.name, "] err: ", err) + end + + self.running = false +end + + +function _M.new(name, callback_fun, opts) + if not name then + return nil, "missing argument: name" + end + + if not callback_fun then + return nil, "missing argument: callback_fun" + end + + opts = opts or {} + local timer = { + name = name, + each_ttl = opts.each_ttl or 1, + sleep_succ = opts.sleep_succ or 1, + sleep_fail = opts.sleep_fail or 5, + start_time = 0, + + callback_fun = callback_fun, + running = false, + } + + local hdl, err = timer_every(opts.check_interval or 1, + run_timer, timer) + if not hdl then + return nil, err + end + + hdl, err = timer_at(0, run_timer, timer) + if not hdl then + return nil, err + end + + return timer +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/core/utils.lua b/CloudronPackages/APISIX/apisix-source/apisix/core/utils.lua new file mode 100644 index 0000000..cfea756 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/core/utils.lua @@ -0,0 +1,465 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Collection of util functions. +-- +-- @module core.utils + +local config_local = require("apisix.core.config_local") +local core_str = require("apisix.core.string") +local rfind_char = core_str.rfind_char +local table = require("apisix.core.table") +local log = require("apisix.core.log") +local string = require("apisix.core.string") +local dns_client = require("apisix.core.dns.client") +local ngx_re = require("ngx.re") +local ipmatcher = require("resty.ipmatcher") +local ffi = require("ffi") +local base = require("resty.core.base") +local open = io.open +local sub_str = string.sub +local str_byte = string.byte +local tonumber = tonumber +local tostring = tostring +local re_gsub = ngx.re.gsub +local re_match = ngx.re.match +local re_gmatch = ngx.re.gmatch +local type = type +local io_popen = io.popen +local C = ffi.C +local ffi_string = ffi.string +local get_string_buf = base.get_string_buf +local exiting = ngx.worker.exiting +local ngx_sleep = ngx.sleep +local ipairs = ipairs + +local hostname +local dns_resolvers +local current_inited_resolvers +local current_dns_client +local max_sleep_interval = 1 + +ffi.cdef[[ + int ngx_escape_uri(char *dst, const char *src, + size_t size, int type); +]] + + +local _M = { + version = 0.2, + parse_ipv4 = ipmatcher.parse_ipv4, + parse_ipv6 = ipmatcher.parse_ipv6, +} + + +function _M.get_seed_from_urandom() + local frandom, err = open("/dev/urandom", "rb") + if not frandom then + return nil, 'failed to open /dev/urandom: ' .. err + end + + local str = frandom:read(8) + frandom:close() + if not str then + return nil, 'failed to read data from /dev/urandom' + end + + local seed = 0 + for i = 1, 8 do + seed = 256 * seed + str:byte(i) + end + + return seed +end + + +function _M.split_uri(uri) + return ngx_re.split(uri, "/") +end + + +local function dns_parse(domain, selector) + if dns_resolvers ~= current_inited_resolvers then + local local_conf = config_local.local_conf() + local valid = table.try_read_attr(local_conf, "apisix", "dns_resolver_valid") + local enable_resolv_search_opt = table.try_read_attr(local_conf, "apisix", + "enable_resolv_search_opt") + local opts = { + nameservers = table.clone(dns_resolvers), + order = {"last", "A", "AAAA", "CNAME"}, -- avoid querying SRV + } + + opts.validTtl = valid + + if not enable_resolv_search_opt then + opts.search = {} + end + + local client, err = dns_client.new(opts) + if not client then + return nil, "failed to init the dns client: " .. err + end + + current_dns_client = client + current_inited_resolvers = dns_resolvers + end + + return current_dns_client:resolve(domain, selector) +end +_M.dns_parse = dns_parse + + +local function set_resolver(resolvers) + dns_resolvers = resolvers +end +_M.set_resolver = set_resolver + + +function _M.get_resolver(resolvers) + return dns_resolvers +end + + +local function _parse_ipv4_or_host(addr) + local pos = rfind_char(addr, ":", #addr - 1) + if not pos then + return addr, nil + end + + local host = sub_str(addr, 1, pos - 1) + local port = sub_str(addr, pos + 1) + return host, tonumber(port) +end + + +local function _parse_ipv6_without_port(addr) + return addr +end + + +-- parse_addr parses 'addr' into the host and the port parts. If the 'addr' +-- doesn't have a port, nil is used to return. +-- For IPv6 literal host with brackets, like [::1], the square brackets will be kept. +-- For malformed 'addr', the returned value can be anything. This method doesn't validate +-- if the input is valid. +function _M.parse_addr(addr) + if str_byte(addr, 1) == str_byte("[") then + -- IPv6 format, with brackets, maybe with port + local right_bracket = str_byte("]") + local len = #addr + if str_byte(addr, len) == right_bracket then + -- addr in [ip:v6] format + return addr, nil + else + local pos = rfind_char(addr, ":", #addr - 1) + if not pos or str_byte(addr, pos - 1) ~= right_bracket then + -- malformed addr + return addr, nil + end + + -- addr in [ip:v6]:port format + local host = sub_str(addr, 1, pos - 1) + local port = sub_str(addr, pos + 1) + return host, tonumber(port) + end + + else + -- When we reach here, the input can be: + -- 1. IPv4 + -- 2. IPv4, with port + -- 3. IPv6, like "2001:db8::68" or "::ffff:192.0.2.1" + -- 4. Malformed input + -- 5. Host, like "test.com" or "localhost" + -- 6. Host with port + local colon = str_byte(":") + local colon_counter = 0 + local dot = str_byte(".") + for i = 1, #addr do + local ch = str_byte(addr, i, i) + if ch == dot then + return _parse_ipv4_or_host(addr) + elseif ch == colon then + colon_counter = colon_counter + 1 + if colon_counter == 2 then + return _parse_ipv6_without_port(addr) + end + end + end + + return _parse_ipv4_or_host(addr) + end +end + + +function _M.uri_safe_encode(uri) + local count_escaped = C.ngx_escape_uri(nil, uri, #uri, 0) + local len = #uri + 2 * count_escaped + local buf = get_string_buf(len) + C.ngx_escape_uri(buf, uri, #uri, 0) + + return ffi_string(buf, len) +end + + +function _M.validate_header_field(field) + for i = 1, #field do + local b = str_byte(field, i, i) + -- '!' - '~', excluding ':' + if not (32 < b and b < 127) or b == 58 then + return false + end + end + return true +end + + +function _M.validate_header_value(value) + if type(value) ~= "string" then + return true + end + + for i = 1, #value do + local b = str_byte(value, i, i) + -- control characters + if b < 32 or b >= 127 then + return false + end + end + return true +end + + +--- +-- Returns the standard host name of the local host. +-- only use this method in init/init_worker phase. +-- +-- @function core.utils.gethostname +-- @treturn string The host name of the local host. +-- @usage +-- local hostname = core.utils.gethostname() -- "localhost" +function _M.gethostname() + if hostname then + return hostname + end + + local hd = io_popen("/bin/hostname") + local data, err = hd:read("*a") + if err == nil then + hostname = data + if string.has_suffix(hostname, "\r\n") then + hostname = sub_str(hostname, 1, -3) + elseif string.has_suffix(hostname, "\n") then + hostname = sub_str(hostname, 1, -2) + end + + else + hostname = "unknown" + log.error("failed to read output of \"/bin/hostname\": ", err) + end + + return hostname +end + + +local function sleep(sec) + if sec <= max_sleep_interval then + return ngx_sleep(sec) + end + ngx_sleep(max_sleep_interval) + if exiting() then + return + end + sec = sec - max_sleep_interval + return sleep(sec) +end + + +_M.sleep = sleep + + +local resolve_var +do + local _ctx + local n_resolved + local pat = [[(? 8 then + log.warn("missing valid end flag in file ", debug_yaml_path) + end + return + end + + f:seek('set') + local yaml_config = f:read("*a") + f:close() + + local debug_yaml_new = yaml.load(yaml_config) + if not debug_yaml_new then + log.error("failed to parse the content of file " .. debug_yaml_path) + return + end + + debug_yaml_new.hooks = debug_yaml_new.hooks or {} + debug_yaml = debug_yaml_new + debug_yaml_ctime = last_change_time + + -- validate the debug yaml config + local validator = jsonschema.generate_validator(config_schema) + local ok, err = validator(debug_yaml) + if not ok then + log.error("failed to validate debug config " .. err) + return + end + + return true +end + + +local sync_debug_hooks +do + local pre_mtime + local enabled_hooks = {} + +local function apply_new_fun(module, fun_name, file_path, hook_conf) + local log_level = hook_conf.log_level or "warn" + + if not module or type(module[fun_name]) ~= "function" then + log.error("failed to find function [", fun_name, + "] in module:", file_path) + return + end + + local fun = module[fun_name] + local fun_org + if enabled_hooks[fun] then + fun_org = enabled_hooks[fun].org + enabled_hooks[fun] = nil + else + fun_org = fun + end + + local t = {fun_org = fun_org} + local mt = {} + + function mt.__call(self, ...) + local arg = {...} + local http_filter = debug_yaml.http_filter + local api_ctx = ngx.ctx.api_ctx + local enable_by_hook = not (http_filter and http_filter.enable) + local enable_by_header_filter = (http_filter and http_filter.enable) + and (api_ctx and api_ctx.enable_dynamic_debug) + if hook_conf.is_print_input_args then + if enable_by_hook or enable_by_header_filter then + log[log_level]("call require(\"", file_path, "\").", fun_name, + "() args:", inspect(arg)) + end + end + + local ret = {self.fun_org(...)} + if hook_conf.is_print_return_value then + if enable_by_hook or enable_by_header_filter then + log[log_level]("call require(\"", file_path, "\").", fun_name, + "() return:", inspect(ret)) + end + end + return unpack(ret) + end + + setmetatable(t, mt) + enabled_hooks[t] = { + org = fun_org, new = t, mod = module, + fun_name = fun_name + } + module[fun_name] = t +end + + +function sync_debug_hooks() + if not debug_yaml_ctime or debug_yaml_ctime == pre_mtime then + return + end + + for _, hook in pairs(enabled_hooks) do + local m = hook.mod + local name = hook.fun_name + m[name] = hook.org + end + + enabled_hooks = {} + + local hook_conf = debug_yaml.hook_conf + if not hook_conf.enable then + pre_mtime = debug_yaml_ctime + return + end + + local hook_name = hook_conf.name or "" + local hooks = debug_yaml[hook_name] + if not hooks then + pre_mtime = debug_yaml_ctime + return + end + + for file_path, fun_names in pairs(hooks) do + local ok, module = pcall(require, file_path) + if not ok then + log.error("failed to load module [", file_path, "]: ", module) + + else + for _, fun_name in ipairs(fun_names) do + apply_new_fun(module, fun_name, file_path, hook_conf) + end + end + end + + pre_mtime = debug_yaml_ctime +end + +end --do + + +local function sync_debug_status(premature) + if premature then + return + end + + if not read_debug_yaml() then + return + end + + sync_debug_hooks() +end + + +local function check() + if not debug_yaml or not debug_yaml.http_filter then + return false + end + + local http_filter = debug_yaml.http_filter + if not http_filter or not http_filter.enable_header_name or not http_filter.enable then + return false + end + + return true +end + +function _M.dynamic_debug(api_ctx) + if not check() then + return + end + + if get_headers()[debug_yaml.http_filter.enable_header_name] then + api_ctx.enable_dynamic_debug = true + end +end + + +function _M.enable_debug() + if not debug_yaml or not debug_yaml.basic then + return false + end + + return debug_yaml.basic.enable +end + + +function _M.init_worker() + local process = require("ngx.process") + if process.type() ~= "worker" then + return + end + + sync_debug_status() + ngx.timer.every(1, sync_debug_status) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/consul/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/consul/init.lua new file mode 100644 index 0000000..4d3c0e4 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/consul/init.lua @@ -0,0 +1,691 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local local_conf = require("apisix.core.config_local").local_conf() +local core = require("apisix.core") +local core_sleep = require("apisix.core.utils").sleep +local resty_consul = require('resty.consul') +local http = require('resty.http') +local util = require("apisix.cli.util") +local ipairs = ipairs +local error = error +local ngx = ngx +local unpack = unpack +local tonumber = tonumber +local pairs = pairs +local ngx_timer_at = ngx.timer.at +local ngx_timer_every = ngx.timer.every +local log = core.log +local json_delay_encode = core.json.delay_encode +local ngx_worker_id = ngx.worker.id +local exiting = ngx.worker.exiting +local thread_spawn = ngx.thread.spawn +local thread_wait = ngx.thread.wait +local thread_kill = ngx.thread.kill +local math_random = math.random +local pcall = pcall +local null = ngx.null +local type = type +local next = next + +local all_services = core.table.new(0, 5) +local default_service +local default_weight +local sort_type +local skip_service_map = core.table.new(0, 1) +local dump_params + +local events +local events_list +local consul_services + +local default_skip_services = {"consul"} +local default_random_range = 5 +local default_catalog_error_index = -1 +local default_health_error_index = -2 +local watch_type_catalog = 1 +local watch_type_health = 2 +local max_retry_time = 256 + +local _M = { + version = 0.3, +} + + +local function discovery_consul_callback(data, event, source, pid) + all_services = data + log.notice("update local variable all_services, event is: ", event, + "source: ", source, "server pid:", pid, + ", all services: ", json_delay_encode(all_services, true)) +end + + +function _M.all_nodes() + return all_services +end + + +function _M.nodes(service_name) + if not all_services then + log.error("all_services is nil, failed to fetch nodes for : ", service_name) + return + end + + local resp_list = all_services[service_name] + + if not resp_list then + log.error("fetch nodes failed by ", service_name, ", return default service") + return default_service and {default_service} + end + + log.info("process id: ", ngx_worker_id(), ", all_services[", service_name, "] = ", + json_delay_encode(resp_list, true)) + + return resp_list +end + + +local function update_all_services(consul_server_url, up_services) + -- clean old unused data + local old_services = consul_services[consul_server_url] or {} + for k, _ in pairs(old_services) do + all_services[k] = nil + end + core.table.clear(old_services) + + for k, v in pairs(up_services) do + all_services[k] = v + end + consul_services[consul_server_url] = up_services + + log.info("update all services: ", json_delay_encode(all_services, true)) +end + + +local function read_dump_services() + local data, err = util.read_file(dump_params.path) + if not data then + log.error("read dump file get error: ", err) + return + end + + log.info("read dump file: ", data) + data = util.trim(data) + if #data == 0 then + log.error("dump file is empty") + return + end + + local entity, err = core.json.decode(data) + if not entity then + log.error("decoded dump data got error: ", err, ", file content: ", data) + return + end + + if not entity.services or not entity.last_update then + log.warn("decoded dump data miss fields, file content: ", data) + return + end + + local now_time = ngx.time() + log.info("dump file last_update: ", entity.last_update, ", dump_params.expire: ", + dump_params.expire, ", now_time: ", now_time) + if dump_params.expire ~= 0 and (entity.last_update + dump_params.expire) < now_time then + log.warn("dump file: ", dump_params.path, " had expired, ignored it") + return + end + + all_services = entity.services + log.info("load dump file into memory success") +end + + +local function write_dump_services() + local entity = { + services = all_services, + last_update = ngx.time(), + expire = dump_params.expire, -- later need handle it + } + local data = core.json.encode(entity) + local succ, err = util.write_file(dump_params.path, data) + if not succ then + log.error("write dump into file got error: ", err) + end +end + + +local function show_dump_file() + if not dump_params then + return 503, "dump params is nil" + end + + local data, err = util.read_file(dump_params.path) + if not data then + return 503, err + end + + return 200, data +end + + +local function get_retry_delay(retry_delay) + if not retry_delay or retry_delay >= max_retry_time then + retry_delay = 1 + else + retry_delay = retry_delay * 4 + end + + return retry_delay +end + + +local function get_opts(consul_server, is_catalog) + local opts = { + host = consul_server.host, + port = consul_server.port, + connect_timeout = consul_server.connect_timeout, + read_timeout = consul_server.read_timeout, + default_args = { + token = consul_server.token, + } + } + if not consul_server.keepalive then + return opts + end + + opts.default_args.wait = consul_server.wait_timeout --blocked wait!=0; unblocked by wait=0 + + if is_catalog then + opts.default_args.index = consul_server.catalog_index + else + opts.default_args.index = consul_server.health_index + end + + return opts +end + + +local function watch_catalog(consul_server) + local client = resty_consul:new(get_opts(consul_server, true)) + + ::RETRY:: + local watch_result, watch_err = client:get(consul_server.consul_watch_catalog_url) + local watch_error_info = (watch_err ~= nil and watch_err) + or ((watch_result ~= nil and watch_result.status ~= 200) + and watch_result.status) + if watch_error_info then + log.error("connect consul: ", consul_server.consul_server_url, + " by sub url: ", consul_server.consul_watch_catalog_url, + ", got watch result: ", json_delay_encode(watch_result), + ", with error: ", watch_error_info) + + return watch_type_catalog, default_catalog_error_index + end + + if consul_server.catalog_index > 0 + and consul_server.catalog_index == tonumber(watch_result.headers['X-Consul-Index']) then + local random_delay = math_random(default_random_range) + log.info("watch catalog has no change, re-watch consul after ", random_delay, " seconds") + core_sleep(random_delay) + goto RETRY + end + + return watch_type_catalog, watch_result.headers['X-Consul-Index'] +end + + +local function watch_health(consul_server) + local client = resty_consul:new(get_opts(consul_server, false)) + + ::RETRY:: + local watch_result, watch_err = client:get(consul_server.consul_watch_health_url) + local watch_error_info = (watch_err ~= nil and watch_err) + or ((watch_result ~= nil and watch_result.status ~= 200) + and watch_result.status) + if watch_error_info then + log.error("connect consul: ", consul_server.consul_server_url, + " by sub url: ", consul_server.consul_watch_health_url, + ", got watch result: ", json_delay_encode(watch_result), + ", with error: ", watch_error_info) + + return watch_type_health, default_health_error_index + end + + if consul_server.health_index > 0 + and consul_server.health_index == tonumber(watch_result.headers['X-Consul-Index']) then + local random_delay = math_random(default_random_range) + log.info("watch health has no change, re-watch consul after ", random_delay, " seconds") + core_sleep(random_delay) + goto RETRY + end + + return watch_type_health, watch_result.headers['X-Consul-Index'] +end + + +local function check_keepalive(consul_server, retry_delay) + if consul_server.keepalive and not exiting() then + local ok, err = ngx_timer_at(0, _M.connect, consul_server, retry_delay) + if not ok then + log.error("create ngx_timer_at got error: ", err) + return + end + end +end + + +local function update_index(consul_server, catalog_index, health_index) + local c_index = 0 + local h_index = 0 + if catalog_index ~= nil then + c_index = tonumber(catalog_index) + end + + if health_index ~= nil then + h_index = tonumber(health_index) + end + + if c_index > 0 then + consul_server.catalog_index = c_index + end + + if h_index > 0 then + consul_server.health_index = h_index + end +end + + +local function is_not_empty(value) + if value == nil or value == null + or (type(value) == "table" and not next(value)) + or (type(value) == "string" and value == "") + then + return false + end + + return true +end + + +local function watch_result_is_valid(watch_type, index, catalog_index, health_index) + if index <= 0 then + return false + end + + if watch_type == watch_type_catalog then + if index == catalog_index then + return false + end + else + if index == health_index then + return false + end + end + + return true +end + + +local function combine_sort_nodes_cmp(left, right) + if left.host ~= right.host then + return left.host < right.host + end + + return left.port < right.port +end + + +local function port_sort_nodes_cmp(left, right) + return left.port < right.port +end + + +local function host_sort_nodes_cmp(left, right) + return left.host < right.host +end + + +function _M.connect(premature, consul_server, retry_delay) + if premature then + return + end + + local catalog_thread, spawn_catalog_err = thread_spawn(watch_catalog, consul_server) + if not catalog_thread then + local random_delay = math_random(default_random_range) + log.error("failed to spawn thread watch catalog: ", spawn_catalog_err, + ", retry connecting consul after ", random_delay, " seconds") + core_sleep(random_delay) + + check_keepalive(consul_server, retry_delay) + return + end + + local health_thread, err = thread_spawn(watch_health, consul_server) + if not health_thread then + thread_kill(catalog_thread) + local random_delay = math_random(default_random_range) + log.error("failed to spawn thread watch health: ", err, ", retry connecting consul after ", + random_delay, " seconds") + core_sleep(random_delay) + + check_keepalive(consul_server, retry_delay) + return + end + + local thread_wait_ok, watch_type, index = thread_wait(catalog_thread, health_thread) + thread_kill(catalog_thread) + thread_kill(health_thread) + if not thread_wait_ok then + local random_delay = math_random(default_random_range) + log.error("failed to wait thread: ", watch_type, ", retry connecting consul after ", + random_delay, " seconds") + core_sleep(random_delay) + + check_keepalive(consul_server, retry_delay) + return + end + + -- double check index has changed + if not watch_result_is_valid(tonumber(watch_type), + tonumber(index), consul_server.catalog_index, consul_server.health_index) then + retry_delay = get_retry_delay(retry_delay) + log.warn("get all svcs got err, retry connecting consul after ", retry_delay, " seconds") + core_sleep(retry_delay) + + check_keepalive(consul_server, retry_delay) + return + end + + local consul_client = resty_consul:new({ + host = consul_server.host, + port = consul_server.port, + connect_timeout = consul_server.connect_timeout, + read_timeout = consul_server.read_timeout, + default_args = { + token = consul_server.token + } + }) + local catalog_success, catalog_res, catalog_err = pcall(function() + return consul_client:get(consul_server.consul_watch_catalog_url) + end) + if not catalog_success then + log.error("connect consul: ", consul_server.consul_server_url, + " by sub url: ", consul_server.consul_watch_catalog_url, + ", got catalog result: ", json_delay_encode(catalog_res)) + check_keepalive(consul_server, retry_delay) + return + end + local catalog_error_info = (catalog_err ~= nil and catalog_err) + or ((catalog_res ~= nil and catalog_res.status ~= 200) + and catalog_res.status) + if catalog_error_info then + log.error("connect consul: ", consul_server.consul_server_url, + " by sub url: ", consul_server.consul_watch_catalog_url, + ", got catalog result: ", json_delay_encode(catalog_res), + ", with error: ", catalog_error_info) + + retry_delay = get_retry_delay(retry_delay) + log.warn("get all svcs got err, retry connecting consul after ", retry_delay, " seconds") + core_sleep(retry_delay) + + check_keepalive(consul_server, retry_delay) + return + end + + -- get health index + local success, health_res, health_err = pcall(function() + return consul_client:get(consul_server.consul_watch_health_url) + end) + if not success then + log.error("connect consul: ", consul_server.consul_server_url, + " by sub url: ", consul_server.consul_watch_health_url, + ", got health result: ", json_delay_encode(health_res)) + check_keepalive(consul_server, retry_delay) + return + end + local health_error_info = (health_err ~= nil and health_err) + or ((health_res ~= nil and health_res.status ~= 200) + and health_res.status) + if health_error_info then + log.error("connect consul: ", consul_server.consul_server_url, + " by sub url: ", consul_server.consul_watch_health_url, + ", got health result: ", json_delay_encode(health_res), + ", with error: ", health_error_info) + + retry_delay = get_retry_delay(retry_delay) + log.warn("get all svcs got err, retry connecting consul after ", retry_delay, " seconds") + core_sleep(retry_delay) + + check_keepalive(consul_server, retry_delay) + return + end + + log.info("connect consul: ", consul_server.consul_server_url, + ", catalog_result status: ", catalog_res.status, + ", catalog_result.headers.index: ", catalog_res.headers['X-Consul-Index'], + ", consul_server.index: ", consul_server.index, + ", consul_server: ", json_delay_encode(consul_server)) + + -- if the current index is different from the last index, then update the service + if (consul_server.catalog_index ~= tonumber(catalog_res.headers['X-Consul-Index'])) + or (consul_server.health_index ~= tonumber(health_res.headers['X-Consul-Index'])) then + local up_services = core.table.new(0, #catalog_res.body) + for service_name, _ in pairs(catalog_res.body) do + -- check if the service_name is 'skip service' + if skip_service_map[service_name] then + goto CONTINUE + end + + -- get node from service + local svc_url = consul_server.consul_sub_url .. "/" .. service_name + local svc_success, result, get_err = pcall(function() + return consul_client:get(svc_url, {passing = true}) + end) + local error_info = (get_err ~= nil and get_err) or + ((result ~= nil and result.status ~= 200) and result.status) + if not svc_success or error_info then + log.error("connect consul: ", consul_server.consul_server_url, + ", by service url: ", svc_url, ", with error: ", error_info) + goto CONTINUE + end + + -- decode body, decode json, update service, error handling + -- check result body is not nil and not empty + if is_not_empty(result.body) then + -- add services to table + local nodes = up_services[service_name] + local nodes_uniq = {} + for _, node in ipairs(result.body) do + if not node.Service then + goto CONTINUE + end + + local svc_address, svc_port = node.Service.Address, node.Service.Port + -- Handle nil or 0 port case - default to 80 for HTTP services + if not svc_port or svc_port == 0 then + svc_port = 80 + end + -- if nodes is nil, new nodes table and set to up_services + if not nodes then + nodes = core.table.new(1, 0) + up_services[service_name] = nodes + end + -- not store duplicate service IDs. + local service_id = svc_address .. ":" .. svc_port + if not nodes_uniq[service_id] then + -- add node to nodes table + core.table.insert(nodes, { + host = svc_address, + port = tonumber(svc_port), + weight = default_weight, + }) + nodes_uniq[service_id] = true + end + end + if nodes then + if sort_type == "port_sort" then + core.table.sort(nodes, port_sort_nodes_cmp) + + elseif sort_type == "host_sort" then + core.table.sort(nodes, host_sort_nodes_cmp) + + elseif sort_type == "combine_sort" then + core.table.sort(nodes, combine_sort_nodes_cmp) + + end + end + up_services[service_name] = nodes + end + :: CONTINUE :: + end + + update_all_services(consul_server.consul_server_url, up_services) + + --update events + local post_ok, post_err = events:post(events_list._source, + events_list.updating, all_services) + if not post_ok then + log.error("post_event failure with ", events_list._source, + ", update all services error: ", post_err) + end + + if dump_params then + ngx_timer_at(0, write_dump_services) + end + + update_index(consul_server, + catalog_res.headers['X-Consul-Index'], + health_res.headers['X-Consul-Index']) + end + + check_keepalive(consul_server, retry_delay) +end + + +local function format_consul_params(consul_conf) + local consul_server_list = core.table.new(0, #consul_conf.servers) + + for _, v in pairs(consul_conf.servers) do + local scheme, host, port, path = unpack(http.parse_uri(nil, v)) + if scheme ~= "http" then + return nil, "only support consul http schema address, eg: http://address:port" + elseif path ~= "/" or core.string.has_suffix(v, '/') then + return nil, "invalid consul server address, the valid format: http://address:port" + end + core.table.insert(consul_server_list, { + host = host, + port = port, + token = consul_conf.token, + connect_timeout = consul_conf.timeout.connect, + read_timeout = consul_conf.timeout.read, + wait_timeout = consul_conf.timeout.wait, + consul_watch_catalog_url = "/catalog/services", + consul_sub_url = "/health/service", + consul_watch_health_url = "/health/state/any", + consul_server_url = v .. "/v1", + weight = consul_conf.weight, + keepalive = consul_conf.keepalive, + health_index = 0, + catalog_index = 0, + fetch_interval = consul_conf.fetch_interval -- fetch interval to next connect consul + }) + end + return consul_server_list, nil +end + + +function _M.init_worker() + local consul_conf = local_conf.discovery.consul + + if consul_conf.dump then + local dump = consul_conf.dump + dump_params = dump + + if dump.load_on_init then + read_dump_services() + end + end + + events = require("apisix.events") + events_list = events:event_list( + "discovery_consul_update_all_services", + "updating" + ) + + if 0 ~= ngx_worker_id() then + events:register(discovery_consul_callback, events_list._source, events_list.updating) + return + end + + log.notice("consul_conf: ", json_delay_encode(consul_conf, true)) + default_weight = consul_conf.weight + sort_type = consul_conf.sort_type + -- set default service, used when the server node cannot be found + if consul_conf.default_service then + default_service = consul_conf.default_service + default_service.weight = default_weight + end + if consul_conf.skip_services then + skip_service_map = core.table.new(0, #consul_conf.skip_services) + for _, v in ipairs(consul_conf.skip_services) do + skip_service_map[v] = true + end + end + -- set up default skip service + for _, v in ipairs(default_skip_services) do + skip_service_map[v] = true + end + + local consul_servers_list, err = format_consul_params(consul_conf) + if err then + error("format consul config got error: " .. err) + end + log.info("consul_server_list: ", json_delay_encode(consul_servers_list, true)) + + consul_services = core.table.new(0, 1) + -- success or failure + for _, server in ipairs(consul_servers_list) do + local ok, err = ngx_timer_at(0, _M.connect, server) + if not ok then + error("create consul got error: " .. err) + end + + if server.keepalive == false then + ngx_timer_every(server.fetch_interval, _M.connect, server) + end + end +end + + +function _M.dump_data() + return {config = local_conf.discovery.consul, services = all_services } +end + + +function _M.control_api() + return { + { + methods = {"GET"}, + uris = {"/show_dump_file"}, + handler = show_dump_file, + } + } +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/consul/schema.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/consul/schema.lua new file mode 100644 index 0000000..5d6fc64 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/consul/schema.lua @@ -0,0 +1,92 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +return { + type = "object", + properties = { + servers = { + type = "array", + minItems = 1, + items = { + type = "string", + } + }, + token = {type = "string", default = ""}, + fetch_interval = {type = "integer", minimum = 1, default = 3}, + keepalive = { + type = "boolean", + default = true + }, + weight = {type = "integer", minimum = 1, default = 1}, + timeout = { + type = "object", + properties = { + connect = {type = "integer", minimum = 1, default = 2000}, + read = {type = "integer", minimum = 1, default = 2000}, + wait = {type = "integer", minimum = 1, default = 60} + }, + default = { + connect = 2000, + read = 2000, + wait = 60, + } + }, + sort_type = { + type = "string", + enum = {"origin", "host_sort", "port_sort", "combine_sort"}, + default = "origin", + }, + skip_services = { + type = "array", + minItems = 1, + items = { + type = "string", + } + }, + dump = { + type = "object", + properties = { + path = {type = "string", minLength = 1}, + load_on_init = {type = "boolean", default = true}, + expire = {type = "integer", default = 0}, + }, + required = {"path"}, + }, + default_service = { + type = "object", + properties = { + host = {type = "string"}, + port = {type = "integer"}, + metadata = { + type = "object", + properties = { + fail_timeout = {type = "integer", default = 1}, + weight = {type = "integer", default = 1}, + max_fails = {type = "integer", default = 1} + }, + default = { + fail_timeout = 1, + weight = 1, + max_fails = 1 + } + } + } + } + }, + + required = {"servers"} +} + diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/consul_kv/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/consul_kv/init.lua new file mode 100644 index 0000000..bf60654 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/consul_kv/init.lua @@ -0,0 +1,439 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local local_conf = require("apisix.core.config_local").local_conf() +local core = require("apisix.core") +local core_sleep = require("apisix.core.utils").sleep +local resty_consul = require('resty.consul') +local cjson = require('cjson') +local http = require('resty.http') +local util = require("apisix.cli.util") +local ipairs = ipairs +local error = error +local ngx = ngx +local unpack = unpack +local ngx_re_match = ngx.re.match +local tonumber = tonumber +local pairs = pairs +local ipairs = ipairs +local ngx_timer_at = ngx.timer.at +local ngx_timer_every = ngx.timer.every +local log = core.log +local ngx_decode_base64 = ngx.decode_base64 +local json_delay_encode = core.json.delay_encode +local cjson_null = cjson.null + +local applications = core.table.new(0, 5) +local default_service +local default_weight +local default_prefix_rule +local skip_keys_map = core.table.new(0, 1) +local dump_params + +local events +local events_list +local consul_apps + +local _M = { + version = 0.3, +} + + +local function discovery_consul_callback(data, event, source, pid) + applications = data + log.notice("update local variable application, event is: ", event, + "source: ", source, "server pid:", pid, + ", application: ", core.json.encode(applications, true)) +end + + +function _M.all_nodes() + return applications +end + + +function _M.nodes(service_name) + if not applications then + log.error("application is nil, failed to fetch nodes for : ", service_name) + return + end + + local resp_list = applications[service_name] + + if not resp_list then + log.error("fetch nodes failed by ", service_name, ", return default service") + return default_service and {default_service} + end + + log.info("process id: ", ngx.worker.id(), ", applications[", service_name, "] = ", + json_delay_encode(resp_list, true)) + + return resp_list +end + + +local function parse_instance(node, server_name_prefix) + local key = node.Key + + if key == cjson_null or not key or #key == 0 then + log.error("consul_key_empty, server_name_prefix: ", server_name_prefix, + ", node: ", json_delay_encode(node, true)) + return false + end + + local result = ngx_re_match(key, default_prefix_rule, "jo") + if not result then + log.error("server name parse error, server_name_prefix: ", server_name_prefix, + ", node: ", json_delay_encode(node, true)) + return false + end + + local sn, host, port = result[1], result[2], result[3] + + -- if exist, skip special kesy + if sn and skip_keys_map[sn] then + return false + end + + -- base64 value = "IHsid2VpZ2h0IjogMTIwLCAibWF4X2ZhaWxzIjogMiwgImZhaWxfdGltZW91dCI6IDJ9" + -- ori value = "{"weight": 120, "max_fails": 2, "fail_timeout": 2}" + local metadataBase64 = node.Value + if metadataBase64 == cjson_null or not metadataBase64 or #metadataBase64 == 0 then + log.error("error: consul_value_empty, server_name_prefix: ", server_name_prefix, + ", node: ", json_delay_encode(node, true)) + return false + end + + local metadata, err = core.json.decode(ngx_decode_base64(metadataBase64)) + if err then + log.error("invalid upstream value, server_name_prefix: ", server_name_prefix, + ",err: ", err, ", node: ", json_delay_encode(node, true)) + return false + elseif metadata.check_status == false or metadata.check_status == "false" then + log.error("server node unhealthy, server_name_prefix: ", server_name_prefix, + ", node: ", json_delay_encode(node, true)) + return false + end + + return true, host, tonumber(port), metadata, sn +end + + +local function update_application(server_name_prefix, data) + local sn + local up_apps = core.table.new(0, #data) + local weight = default_weight + + for _, node in ipairs(data) do + local succ, ip, port, metadata, server_name = parse_instance(node, server_name_prefix) + if succ then + sn = server_name_prefix .. server_name + local nodes = up_apps[sn] + if not nodes then + nodes = core.table.new(1, 0) + up_apps[sn] = nodes + end + core.table.insert(nodes, { + host = ip, + port = port, + weight = metadata and metadata.weight or weight, + }) + end + end + + -- clean old unused data + local old_apps = consul_apps[server_name_prefix] or {} + for k, _ in pairs(old_apps) do + applications[k] = nil + end + core.table.clear(old_apps) + + for k, v in pairs(up_apps) do + applications[k] = v + end + consul_apps[server_name_prefix] = up_apps + + log.info("update applications: ", core.json.encode(applications)) +end + + +local function read_dump_srvs() + local data, err = util.read_file(dump_params.path) + if not data then + log.notice("read dump file get error: ", err) + return + end + + log.info("read dump file: ", data) + data = util.trim(data) + if #data == 0 then + log.error("dump file is empty") + return + end + + local entity, err = core.json.decode(data) + if not entity then + log.error("decoded dump data got error: ", err, ", file content: ", data) + return + end + + if not entity.services or not entity.last_update then + log.warn("decoded dump data miss fields, file content: ", data) + return + end + + local now_time = ngx.time() + log.info("dump file last_update: ", entity.last_update, ", dump_params.expire: ", + dump_params.expire, ", now_time: ", now_time) + if dump_params.expire ~= 0 and (entity.last_update + dump_params.expire) < now_time then + log.warn("dump file: ", dump_params.path, " had expired, ignored it") + return + end + + applications = entity.services + log.info("load dump file into memory success") +end + + +local function write_dump_srvs() + local entity = { + services = applications, + last_update = ngx.time(), + expire = dump_params.expire, -- later need handle it + } + local data = core.json.encode(entity) + local succ, err = util.write_file(dump_params.path, data) + if not succ then + log.error("write dump into file got error: ", err) + end +end + + +local function show_dump_file() + if not dump_params then + return 503, "dump params is nil" + end + + local data, err = util.read_file(dump_params.path) + if not data then + return 503, err + end + + return 200, data +end + + +function _M.connect(premature, consul_server, retry_delay) + if premature then + return + end + + local consul_client = resty_consul:new({ + host = consul_server.host, + port = consul_server.port, + connect_timeout = consul_server.connect_timeout, + read_timeout = consul_server.read_timeout, + default_args = consul_server.default_args, + }) + + log.info("consul_server: ", json_delay_encode(consul_server, true)) + local result, err = consul_client:get(consul_server.consul_key) + local error_info = (err ~= nil and err) + or ((result ~= nil and result.status ~= 200) + and result.status) + if error_info then + log.error("connect consul: ", consul_server.server_name_key, + " by key: ", consul_server.consul_key, + ", got result: ", json_delay_encode(result, true), + ", with error: ", error_info) + + if not retry_delay then + retry_delay = 1 + else + retry_delay = retry_delay * 4 + end + + log.warn("retry connecting consul after ", retry_delay, " seconds") + core_sleep(retry_delay) + + goto ERR + end + + log.info("connect consul: ", consul_server.server_name_key, + ", result status: ", result.status, + ", result.headers.index: ", result.headers['X-Consul-Index'], + ", result body: ", json_delay_encode(result.body)) + + -- if current index different last index then update application + if consul_server.index ~= result.headers['X-Consul-Index'] then + consul_server.index = result.headers['X-Consul-Index'] + -- only long connect type use index + if consul_server.keepalive then + consul_server.default_args.index = result.headers['X-Consul-Index'] + end + + -- decode body, decode json, update application, error handling + if result.body and #result.body ~= 0 then + log.notice("server_name: ", consul_server.server_name_key, + ", header: ", core.json.encode(result.headers, true), + ", body: ", core.json.encode(result.body, true)) + + update_application(consul_server.server_name_key, result.body) + --update events + local ok, err = events:post(events_list._source, events_list.updating, applications) + if not ok then + log.error("post_event failure with ", events_list._source, + ", update application error: ", err) + end + + if dump_params then + ngx_timer_at(0, write_dump_srvs) + end + end + end + + :: ERR :: + local keepalive = consul_server.keepalive + if keepalive then + local ok, err = ngx_timer_at(0, _M.connect, consul_server, retry_delay) + if not ok then + log.error("create ngx_timer_at got error: ", err) + return + end + end +end + + +local function format_consul_params(consul_conf) + local consul_server_list = core.table.new(0, #consul_conf.servers) + local args = { + token = consul_conf.token, + recurse = true + } + + if consul_conf.keepalive then + args.wait = consul_conf.timeout.wait --blocked wait!=0; unblocked by wait=0 + args.index = 0 + end + + for _, v in pairs(consul_conf.servers) do + local scheme, host, port, path = unpack(http.parse_uri(nil, v)) + if scheme ~= "http" then + return nil, "only support consul http schema address, eg: http://address:port" + elseif path ~= "/" or core.string.has_suffix(v, '/') then + return nil, "invalid consul server address, the valid format: http://address:port" + end + + core.table.insert(consul_server_list, { + host = host, + port = port, + connect_timeout = consul_conf.timeout.connect, + read_timeout = consul_conf.timeout.read, + consul_key = "/kv/" .. consul_conf.prefix, + server_name_key = v .. "/v1/kv/", + weight = consul_conf.weight, + keepalive = consul_conf.keepalive, + default_args = args, + index = 0, + fetch_interval = consul_conf.fetch_interval -- fetch interval to next connect consul + }) + end + + return consul_server_list +end + + +function _M.init_worker() + local consul_conf = local_conf.discovery.consul_kv + + if consul_conf.dump then + local dump = consul_conf.dump + dump_params = dump + + if dump.load_on_init then + read_dump_srvs() + end + end + + events = require("apisix.events") + events_list = events:event_list( + "discovery_consul_update_application", + "updating" + ) + + if 0 ~= ngx.worker.id() then + events:register(discovery_consul_callback, events_list._source, events_list.updating) + return + end + + log.notice("consul_conf: ", core.json.encode(consul_conf)) + default_weight = consul_conf.weight + -- set default service, used when the server node cannot be found + if consul_conf.default_service then + default_service = consul_conf.default_service + default_service.weight = default_weight + end + default_prefix_rule = "(" .. consul_conf.prefix .. "/.*/)([a-zA-Z0-9.]+):([0-9]+)" + log.info("default params, default_weight: ", default_weight, + ", default_prefix_rule: ", default_prefix_rule) + if consul_conf.skip_keys then + skip_keys_map = core.table.new(0, #consul_conf.skip_keys) + for _, v in ipairs(consul_conf.skip_keys) do + skip_keys_map[v] = true + end + end + + local consul_servers_list, err = format_consul_params(consul_conf) + if err then + error(err) + return + end + log.info("consul_server_list: ", core.json.encode(consul_servers_list)) + + consul_apps = core.table.new(0, 1) + -- success or failure + for _, server in ipairs(consul_servers_list) do + local ok, err = ngx_timer_at(0, _M.connect, server) + if not ok then + error("create consul_kv got error: " .. err) + return + end + + if server.keepalive == false then + ngx_timer_every(server.fetch_interval, _M.connect, server) + end + end +end + + +function _M.dump_data() + return {config = local_conf.discovery.consul_kv, services = applications} +end + + +function _M.control_api() + return { + { + methods = {"GET"}, + uris = {"/show_dump_file"}, + handler = show_dump_file, + } + } +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/consul_kv/schema.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/consul_kv/schema.lua new file mode 100644 index 0000000..4c02b2c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/consul_kv/schema.lua @@ -0,0 +1,88 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +return { + type = "object", + properties = { + servers = { + type = "array", + minItems = 1, + items = { + type = "string", + } + }, + token = {type = "string", default = ""}, + fetch_interval = {type = "integer", minimum = 1, default = 3}, + keepalive = { + type = "boolean", + default = true + }, + prefix = {type = "string", default = "upstreams"}, + weight = {type = "integer", minimum = 1, default = 1}, + timeout = { + type = "object", + properties = { + connect = {type = "integer", minimum = 1, default = 2000}, + read = {type = "integer", minimum = 1, default = 2000}, + wait = {type = "integer", minimum = 1, default = 60} + }, + default = { + connect = 2000, + read = 2000, + wait = 60, + } + }, + skip_keys = { + type = "array", + minItems = 1, + items = { + type = "string", + } + }, + dump = { + type = "object", + properties = { + path = {type = "string", minLength = 1}, + load_on_init = {type = "boolean", default = true}, + expire = {type = "integer", default = 0}, + }, + required = {"path"}, + }, + default_service = { + type = "object", + properties = { + host = {type = "string"}, + port = {type = "integer"}, + metadata = { + type = "object", + properties = { + fail_timeout = {type = "integer", default = 1}, + weight = {type = "integer", default = 1}, + max_fails = {type = "integer", default = 1} + }, + default = { + fail_timeout = 1, + weight = 1, + max_fails = 1 + } + } + } + } + }, + + required = {"servers"} +} + diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/dns/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/dns/init.lua new file mode 100644 index 0000000..601de0e --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/dns/init.lua @@ -0,0 +1,89 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local config_local = require("apisix.core.config_local") +local is_http = ngx.config.subsystem == "http" +local ipairs = ipairs +local error = error + + +local dns_client +local _M = {} + + +function _M.nodes(service_name) + local host, port = core.utils.parse_addr(service_name) + core.log.info("discovery dns with host ", host, ", port ", port) + + local records, err = dns_client:resolve(host, core.dns_client.RETURN_ALL) + if not records then + return nil, err + end + + local nodes = core.table.new(#records, 0) + local index = 1 + for _, r in ipairs(records) do + if r.address then + local node_port = port + if not node_port and r.port ~= 0 then + -- if the port is zero, fallback to use the default + node_port = r.port + end + + -- ignore zero port when subsystem is stream + if node_port or is_http then + nodes[index] = {host = r.address, weight = r.weight or 1, port = node_port} + if r.priority then + -- for SRV record, nodes with lower priority are chosen first + nodes[index].priority = -r.priority + end + index = index + 1 + end + end + end + + return nodes +end + + +function _M.init_worker() + local local_conf = config_local.local_conf() + local servers = local_conf.discovery.dns.servers + local resolv_conf = local_conf.discovery.dns.resolv_conf + local default_order = {"last", "SRV", "A", "AAAA", "CNAME"} + local order = core.table.try_read_attr(local_conf, "discovery", "dns", "order") + order = order or default_order + + local opts = { + hosts = {}, + resolvConf = resolv_conf, + nameservers = servers, + order = order, + } + + local client, err = core.dns_client.new(opts) + if not client then + error("failed to init the dns client: ", err) + return + end + + dns_client = client +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/dns/schema.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/dns/schema.lua new file mode 100644 index 0000000..03c7934 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/dns/schema.lua @@ -0,0 +1,48 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +return { + type = "object", + properties = { + servers = { + type = "array", + minItems = 1, + items = { + type = "string", + }, + }, + resolv_conf = { + type = "string", + }, + order = { + type = "array", + minItems = 1, + maxItems = 5, + uniqueItems = true, + items = { + enum = {"last", "SRV", "A", "AAAA", "CNAME"} + }, + }, + }, + oneOf = { + { + required = {"servers"}, + }, + { + required = {"resolv_conf"}, + } + } +} diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/eureka/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/eureka/init.lua new file mode 100644 index 0000000..df72a52 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/eureka/init.lua @@ -0,0 +1,223 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local local_conf = require("apisix.core.config_local").local_conf() +local http = require("resty.http") +local core = require("apisix.core") +local ipmatcher = require("resty.ipmatcher") +local ipairs = ipairs +local tostring = tostring +local type = type +local math_random = math.random +local ngx = ngx +local ngx_timer_at = ngx.timer.at +local ngx_timer_every = ngx.timer.every +local string_sub = string.sub +local str_find = core.string.find +local log = core.log + +local default_weight +local applications + + +local _M = { + version = 0.1, +} + + +local function service_info() + local host = local_conf.discovery and + local_conf.discovery.eureka and local_conf.discovery.eureka.host + if not host then + log.error("do not set eureka.host") + return + end + + local basic_auth + -- TODO Add health check to get healthy nodes. + local url = host[math_random(#host)] + local auth_idx = str_find(url, "@") + if auth_idx then + local protocol_idx = str_find(url, "://") + local protocol = string_sub(url, 1, protocol_idx + 2) + local user_and_password = string_sub(url, protocol_idx + 3, auth_idx - 1) + local other = string_sub(url, auth_idx + 1) + url = protocol .. other + basic_auth = "Basic " .. ngx.encode_base64(user_and_password) + end + if local_conf.discovery.eureka.prefix then + url = url .. local_conf.discovery.eureka.prefix + end + if string_sub(url, #url) ~= "/" then + url = url .. "/" + end + + return url, basic_auth +end + + +local function request(request_uri, basic_auth, method, path, query, body) + log.info("eureka uri:", request_uri, ".") + local url = request_uri .. path + local headers = core.table.new(0, 5) + headers['Connection'] = 'Keep-Alive' + headers['Accept'] = 'application/json' + + if basic_auth then + headers['Authorization'] = basic_auth + end + + if body and 'table' == type(body) then + local err + body, err = core.json.encode(body) + if not body then + return nil, 'invalid body : ' .. err + end + -- log.warn(method, url, body) + headers['Content-Type'] = 'application/json' + end + + local httpc = http.new() + local timeout = local_conf.discovery.eureka.timeout + local connect_timeout = timeout and timeout.connect or 2000 + local send_timeout = timeout and timeout.send or 2000 + local read_timeout = timeout and timeout.read or 5000 + log.info("connect_timeout:", connect_timeout, ", send_timeout:", send_timeout, + ", read_timeout:", read_timeout, ".") + httpc:set_timeouts(connect_timeout, send_timeout, read_timeout) + return httpc:request_uri(url, { + version = 1.1, + method = method, + headers = headers, + query = query, + body = body, + ssl_verify = false, + }) +end + + +local function parse_instance(instance) + local status = instance.status + local overridden_status = instance.overriddenstatus or instance.overriddenStatus + if overridden_status and overridden_status ~= "UNKNOWN" then + status = overridden_status + end + + if status ~= "UP" then + return + end + local port + if tostring(instance.port["@enabled"]) == "true" and instance.port["$"] then + port = instance.port["$"] + -- secure = false + end + if tostring(instance.securePort["@enabled"]) == "true" and instance.securePort["$"] then + port = instance.securePort["$"] + -- secure = true + end + local ip = instance.ipAddr + if not ipmatcher.parse_ipv4(ip) and + not ipmatcher.parse_ipv6(ip) then + log.error(instance.app, " service ", instance.hostName, " node IP ", ip, + " is invalid(must be IPv4 or IPv6).") + return + end + return ip, port, instance.metadata +end + + +local function fetch_full_registry(premature) + if premature then + return + end + + local request_uri, basic_auth = service_info() + if not request_uri then + return + end + + local res, err = request(request_uri, basic_auth, "GET", "apps") + if not res then + log.error("failed to fetch registry", err) + return + end + + if not res.body or res.status ~= 200 then + log.error("failed to fetch registry, status = ", res.status) + return + end + + local json_str = res.body + local data, err = core.json.decode(json_str) + if not data then + log.error("invalid response body: ", json_str, " err: ", err) + return + end + local apps = data.applications.application + local up_apps = core.table.new(0, #apps) + for _, app in ipairs(apps) do + for _, instance in ipairs(app.instance) do + local ip, port, metadata = parse_instance(instance) + if ip and port then + local nodes = up_apps[app.name] + if not nodes then + nodes = core.table.new(#app.instance, 0) + up_apps[app.name] = nodes + end + core.table.insert(nodes, { + host = ip, + port = port, + weight = metadata and metadata.weight or default_weight, + metadata = metadata, + }) + if metadata then + -- remove useless data + metadata.weight = nil + end + end + end + end + applications = up_apps +end + + +function _M.nodes(service_name) + if not applications then + log.error("failed to fetch nodes for : ", service_name) + return + end + + return applications[service_name] +end + + +function _M.init_worker() + default_weight = local_conf.discovery.eureka.weight or 100 + log.info("default_weight:", default_weight, ".") + local fetch_interval = local_conf.discovery.eureka.fetch_interval or 30 + log.info("fetch_interval:", fetch_interval, ".") + ngx_timer_at(0, fetch_full_registry) + ngx_timer_every(fetch_interval, fetch_full_registry) +end + + +function _M.dump_data() + return {config = local_conf.discovery.eureka, services = applications or {}} +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/eureka/schema.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/eureka/schema.lua new file mode 100644 index 0000000..1966b8e --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/eureka/schema.lua @@ -0,0 +1,40 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +return { + type = "object", + properties = { + host = { + type = "array", + minItems = 1, + items = { + type = "string", + }, + }, + fetch_interval = {type = "integer", minimum = 1, default = 30}, + prefix = {type = "string"}, + weight = {type = "integer", minimum = 0}, + timeout = { + type = "object", + properties = { + connect = {type = "integer", minimum = 1, default = 2000}, + send = {type = "integer", minimum = 1, default = 2000}, + read = {type = "integer", minimum = 1, default = 5000}, + } + }, + }, + required = {"host"} +} diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/init.lua new file mode 100644 index 0000000..10e7aa1 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/init.lua @@ -0,0 +1,43 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local log = require("apisix.core.log") +local local_conf = require("apisix.core.config_local").local_conf() +local pairs = pairs + +local discovery_type = local_conf.discovery +local discovery = {} + +if discovery_type then + for discovery_name, _ in pairs(discovery_type) do + log.info("use discovery: ", discovery_name) + discovery[discovery_name] = require("apisix.discovery." .. discovery_name) + end +end + +function discovery.init_worker() + if discovery_type then + for discovery_name, _ in pairs(discovery_type) do + discovery[discovery_name].init_worker() + end + end +end + +return { + version = 0.1, + discovery = discovery +} diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/kubernetes/informer_factory.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/kubernetes/informer_factory.lua new file mode 100644 index 0000000..fd434c0 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/kubernetes/informer_factory.lua @@ -0,0 +1,377 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local ngx = ngx +local ipairs = ipairs +local string = string +local math = math +local type = type +local core = require("apisix.core") +local http = require("resty.http") + +local function list_query(informer) + local arguments = { + limit = informer.limit, + } + + if informer.continue and informer.continue ~= "" then + arguments.continue = informer.continue + end + + if informer.label_selector and informer.label_selector ~= "" then + arguments.labelSelector = informer.label_selector + end + + if informer.field_selector and informer.field_selector ~= "" then + arguments.fieldSelector = informer.field_selector + end + + return ngx.encode_args(arguments) +end + + +local function list(httpc, apiserver, informer) + local response, err = httpc:request({ + path = informer.path, + query = list_query(informer), + headers = { + ["Host"] = apiserver.host .. ":" .. apiserver.port, + ["Authorization"] = "Bearer " .. apiserver.token, + ["Accept"] = "application/json", + ["Connection"] = "keep-alive" + } + }) + + core.log.info("--raw=", informer.path, "?", list_query(informer)) + + if not response then + return false, "RequestError", err or "" + end + + if response.status ~= 200 then + return false, response.reason, response:read_body() or "" + end + + local body, err = response:read_body() + if err then + return false, "ReadBodyError", err + end + + local data = core.json.decode(body) + if not data or data.kind ~= informer.list_kind then + return false, "UnexpectedBody", body + end + + informer.version = data.metadata.resourceVersion + + if informer.on_added then + for _, item in ipairs(data.items or {}) do + informer:on_added(item, "list") + end + end + + informer.continue = data.metadata.continue + if informer.continue and informer.continue ~= "" then + list(httpc, apiserver, informer) + end + + return true +end + + +local function watch_query(informer) + local arguments = { + watch = "true", + allowWatchBookmarks = "true", + timeoutSeconds = informer.overtime, + } + + if informer.version and informer.version ~= "" then + arguments.resourceVersion = informer.version + end + + if informer.label_selector and informer.label_selector ~= "" then + arguments.labelSelector = informer.label_selector + end + + if informer.field_selector and informer.field_selector ~= "" then + arguments.fieldSelector = informer.field_selector + end + + return ngx.encode_args(arguments) +end + + +local function split_event (body, callback, ...) + local gmatch_iterator, err = ngx.re.gmatch(body, "{\"type\":.*}\n", "jao") + if not gmatch_iterator then + return false, nil, "GmatchError", err + end + + local captures + local captured_size = 0 + local ok, reason + while true do + captures, err = gmatch_iterator() + + if err then + return false, nil, "GmatchError", err + end + + if not captures then + break + end + + captured_size = captured_size + #captures[0] + + ok, reason, err = callback(captures[0], ...) + if not ok then + return false, nil, reason, err + end + end + + local remainder_body + if captured_size == #body then + remainder_body = "" + elseif captured_size == 0 then + remainder_body = body + elseif captured_size < #body then + remainder_body = string.sub(body, captured_size + 1) + end + + return true, remainder_body +end + + +local function dispatch_event(event_string, informer) + local event = core.json.decode(event_string) + + if not event or not event.type or not event.object then + return false, "UnexpectedBody", event_string + end + + local tp = event.type + + if tp == "ERROR" then + if event.object.code == 410 then + return false, "ResourceGone", nil + end + return false, "UnexpectedBody", event_string + end + + local object = event.object + informer.version = object.metadata.resourceVersion + + if tp == "ADDED" then + if informer.on_added then + informer:on_added(object, "watch") + end + elseif tp == "DELETED" then + if informer.on_deleted then + informer:on_deleted(object) + end + elseif tp == "MODIFIED" then + if informer.on_modified then + informer:on_modified(object) + end + -- elseif type == "BOOKMARK" then + -- do nothing + end + + return true +end + + +local function watch(httpc, apiserver, informer) + local watch_times = 8 + for _ = 1, watch_times do + local watch_seconds = 1800 + math.random(9, 999) + informer.overtime = watch_seconds + local http_seconds = watch_seconds + 120 + httpc:set_timeouts(2000, 3000, http_seconds * 1000) + + local response, err = httpc:request({ + path = informer.path, + query = watch_query(informer), + headers = { + ["Host"] = apiserver.host .. ":" .. apiserver.port, + ["Authorization"] = "Bearer " .. apiserver.token, + ["Accept"] = "application/json", + ["Connection"] = "keep-alive" + } + }) + + core.log.info("--raw=", informer.path, "?", watch_query(informer)) + + if err then + return false, "RequestError", err + end + + if response.status ~= 200 then + return false, response.reason, response:read_body() or "" + end + + local ok + local remainder_body + local body + local reason + + while true do + body, err = response.body_reader() + if err then + return false, "ReadBodyError", err + end + + if not body then + break + end + + if remainder_body and #remainder_body > 0 then + body = remainder_body .. body + end + + ok, remainder_body, reason, err = split_event(body, dispatch_event, informer) + if not ok then + if reason == "ResourceGone" then + return true + end + return false, reason, err + end + end + end + + return true +end + + +local function list_watch(informer, apiserver) + local ok + local reason, message + local httpc = http.new() + + informer.continue = "" + informer.version = "" + + informer.fetch_state = "connecting" + core.log.info("begin to connect ", apiserver.host, ":", apiserver.port) + + ok, message = httpc:connect({ + scheme = apiserver.schema, + host = apiserver.host, + port = apiserver.port, + ssl_verify = false + }) + + if not ok then + informer.fetch_state = "connect failed" + core.log.error("connect apiserver failed, apiserver.host: ", apiserver.host, + ", apiserver.port: ", apiserver.port, ", message : ", message) + return false + end + + core.log.info("begin to list ", informer.kind) + informer.fetch_state = "listing" + if informer.pre_List then + informer:pre_list() + end + + ok, reason, message = list(httpc, apiserver, informer) + if not ok then + informer.fetch_state = "list failed" + core.log.error("list failed, kind: ", informer.kind, + ", reason: ", reason, ", message : ", message) + return false + end + + informer.fetch_state = "list finished" + if informer.post_List then + informer:post_list() + end + + core.log.info("begin to watch ", informer.kind) + informer.fetch_state = "watching" + ok, reason, message = watch(httpc, apiserver, informer) + if not ok then + informer.fetch_state = "watch failed" + core.log.error("watch failed, kind: ", informer.kind, + ", reason: ", reason, ", message : ", message) + return false + end + + informer.fetch_state = "watch finished" + + return true +end + +local _M = { +} + +function _M.new(group, version, kind, plural, namespace) + local tp + tp = type(group) + if tp ~= "nil" and tp ~= "string" then + return nil, "group should set to string or nil type but " .. tp + end + + tp = type(namespace) + if tp ~= "nil" and tp ~= "string" then + return nil, "namespace should set to string or nil type but " .. tp + end + + tp = type(version) + if tp ~= "string" or version == "" then + return nil, "version should set to non-empty string" + end + + tp = type(kind) + if tp ~= "string" or kind == "" then + return nil, "kind should set to non-empty string" + end + + tp = type(plural) + if tp ~= "string" or plural == "" then + return nil, "plural should set to non-empty string" + end + + local path = "" + if group == nil or group == "" then + path = path .. "/api/" .. version + else + path = path .. "/apis/" .. group .. "/" .. version + end + + if namespace and namespace ~= "" then + path = path .. "/namespaces/" .. namespace + end + path = path .. "/" .. plural + + return { + kind = kind, + list_kind = kind .. "List", + plural = plural, + path = path, + limit = 120, + label_selector = "", + field_selector = "", + overtime = "1800", + version = "", + continue = "", + list_watch = list_watch + } +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/kubernetes/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/kubernetes/init.lua new file mode 100644 index 0000000..39fa69e --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/kubernetes/init.lua @@ -0,0 +1,694 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local ngx = ngx +local ipairs = ipairs +local pairs = pairs +local string = string +local tonumber = tonumber +local tostring = tostring +local os = os +local error = error +local pcall = pcall +local setmetatable = setmetatable +local is_http = ngx.config.subsystem == "http" +local process = require("ngx.process") +local core = require("apisix.core") +local util = require("apisix.cli.util") +local local_conf = require("apisix.core.config_local").local_conf() +local informer_factory = require("apisix.discovery.kubernetes.informer_factory") + + +local ctx + +local endpoint_lrucache = core.lrucache.new({ + ttl = 300, + count = 1024 +}) + +local endpoint_buffer = {} + +local function sort_nodes_cmp(left, right) + if left.host ~= right.host then + return left.host < right.host + end + + return left.port < right.port +end + +local function on_endpoint_slices_modified(handle, endpoint) + if handle.namespace_selector and + not handle:namespace_selector(endpoint.metadata.namespace) then + return + end + + core.log.debug(core.json.delay_encode(endpoint)) + core.table.clear(endpoint_buffer) + + local endpointslices = endpoint.endpoints + for _, endpointslice in ipairs(endpointslices or {}) do + if endpointslice.addresses then + local addresses = endpointslices.addresses + for _, port in ipairs(endpoint.ports or {}) do + local port_name + if port.name then + port_name = port.name + elseif port.targetPort then + port_name = tostring(port.targetPort) + else + port_name = tostring(port.port) + end + + if endpointslice.conditions and endpointslice.condition.ready then + local nodes = endpoint_buffer[port_name] + if nodes == nil then + nodes = core.table.new(0, #endpointslices * #addresses) + endpoint_buffer[port_name] = nodes + end + + for _, address in ipairs(endpointslices.addresses) do + core.table.insert(nodes, { + host = address.ip, + port = port.port, + weight = handle.default_weight + }) + end + end + end + end + end + + for _, ports in pairs(endpoint_buffer) do + for _, nodes in pairs(ports) do + core.table.sort(nodes, sort_nodes_cmp) + end + end + local endpoint_key = endpoint.metadata.namespace .. "/" .. endpoint.metadata.name + local endpoint_content = core.json.encode(endpoint_buffer, true) + local endpoint_version = ngx.crc32_long(endpoint_content) + + local _, err + _, err = handle.endpoint_dict:safe_set(endpoint_key .. "#version", endpoint_version) + if err then + core.log.error("set endpoint version into discovery DICT failed, ", err) + return + end + _, err = handle.endpoint_dict:safe_set(endpoint_key, endpoint_content) + if err then + core.log.error("set endpoint into discovery DICT failed, ", err) + handle.endpoint_dict:delete(endpoint_key .. "#version") + end +end + +local function on_endpoint_modified(handle, endpoint) + if handle.namespace_selector and + not handle:namespace_selector(endpoint.metadata.namespace) then + return + end + + core.log.debug(core.json.delay_encode(endpoint)) + core.table.clear(endpoint_buffer) + + local subsets = endpoint.subsets + for _, subset in ipairs(subsets or {}) do + if subset.addresses then + local addresses = subset.addresses + for _, port in ipairs(subset.ports or {}) do + local port_name + if port.name then + port_name = port.name + elseif port.targetPort then + port_name = tostring(port.targetPort) + else + port_name = tostring(port.port) + end + + local nodes = endpoint_buffer[port_name] + if nodes == nil then + nodes = core.table.new(0, #subsets * #addresses) + endpoint_buffer[port_name] = nodes + end + + for _, address in ipairs(subset.addresses) do + core.table.insert(nodes, { + host = address.ip, + port = port.port, + weight = handle.default_weight + }) + end + end + end + end + + for _, ports in pairs(endpoint_buffer) do + for _, nodes in pairs(ports) do + core.table.sort(nodes, sort_nodes_cmp) + end + end + + local endpoint_key = endpoint.metadata.namespace .. "/" .. endpoint.metadata.name + local endpoint_content = core.json.encode(endpoint_buffer, true) + local endpoint_version = ngx.crc32_long(endpoint_content) + + local _, err + _, err = handle.endpoint_dict:safe_set(endpoint_key .. "#version", endpoint_version) + if err then + core.log.error("set endpoint version into discovery DICT failed, ", err) + return + end + _, err = handle.endpoint_dict:safe_set(endpoint_key, endpoint_content) + if err then + core.log.error("set endpoint into discovery DICT failed, ", err) + handle.endpoint_dict:delete(endpoint_key .. "#version") + end +end + + +local function on_endpoint_deleted(handle, endpoint) + if handle.namespace_selector and + not handle:namespace_selector(endpoint.metadata.namespace) then + return + end + + core.log.debug(core.json.delay_encode(endpoint)) + local endpoint_key = endpoint.metadata.namespace .. "/" .. endpoint.metadata.name + handle.endpoint_dict:delete(endpoint_key .. "#version") + handle.endpoint_dict:delete(endpoint_key) +end + + +local function pre_list(handle) + handle.endpoint_dict:flush_all() +end + + +local function post_list(handle) + handle.endpoint_dict:flush_expired() +end + + +local function setup_label_selector(conf, informer) + informer.label_selector = conf.label_selector +end + + +local function setup_namespace_selector(conf, informer) + local ns = conf.namespace_selector + if ns == nil then + informer.namespace_selector = nil + return + end + + if ns.equal then + informer.field_selector = "metadata.namespace=" .. ns.equal + informer.namespace_selector = nil + return + end + + if ns.not_equal then + informer.field_selector = "metadata.namespace!=" .. ns.not_equal + informer.namespace_selector = nil + return + end + + if ns.match then + informer.namespace_selector = function(self, namespace) + local match = conf.namespace_selector.match + local m, err + for _, v in ipairs(match) do + m, err = ngx.re.match(namespace, v, "jo") + if m and m[0] == namespace then + return true + end + if err then + core.log.error("ngx.re.match failed: ", err) + end + end + return false + end + return + end + + if ns.not_match then + informer.namespace_selector = function(self, namespace) + local not_match = conf.namespace_selector.not_match + local m, err + for _, v in ipairs(not_match) do + m, err = ngx.re.match(namespace, v, "jo") + if m and m[0] == namespace then + return false + end + if err then + return false + end + end + return true + end + return + end + + return +end + + +local function read_env(key) + if #key > 3 then + local first, second = string.byte(key, 1, 2) + if first == string.byte('$') and second == string.byte('{') then + local last = string.byte(key, #key) + if last == string.byte('}') then + local env = string.sub(key, 3, #key - 1) + local value = os.getenv(env) + if not value then + return nil, "not found environment variable " .. env + end + return value + end + end + end + return key +end + +local function read_token(token_file) + local token, err = util.read_file(token_file) + if err then + return nil, err + end + + -- remove possible extra whitespace + return util.trim(token) +end + +local function get_apiserver(conf) + local apiserver = { + schema = "", + host = "", + port = "", + } + + apiserver.schema = conf.service.schema + if apiserver.schema ~= "http" and apiserver.schema ~= "https" then + return nil, "service.schema should set to one of [http,https] but " .. apiserver.schema + end + + local err + apiserver.host, err = read_env(conf.service.host) + if err then + return nil, err + end + + if apiserver.host == "" then + return nil, "service.host should set to non-empty string" + end + + local port + port, err = read_env(conf.service.port) + if err then + return nil, err + end + + apiserver.port = tonumber(port) + if not apiserver.port or apiserver.port <= 0 or apiserver.port > 65535 then + return nil, "invalid port value: " .. apiserver.port + end + + if conf.client.token then + local token, err = read_env(conf.client.token) + if err then + return nil, err + end + apiserver.token = util.trim(token) + elseif conf.client.token_file and conf.client.token_file ~= "" then + setmetatable(apiserver, { + __index = function(_, key) + if key ~= "token" then + return + end + + local token_file, err = read_env(conf.client.token_file) + if err then + core.log.error("failed to read token file path: ", err) + return + end + + local token, err = read_token(token_file) + if err then + core.log.error("failed to read token from file: ", err) + return + end + core.log.debug("re-read the token value") + return token + end + }) + else + return nil, "one of [client.token,client.token_file] should be set but none" + end + + if apiserver.schema == "https" and apiserver.token == "" then + return nil, "apiserver.token should set to non-empty string when service.schema is https" + end + + return apiserver +end + +local function create_endpoint_lrucache(endpoint_dict, endpoint_key, endpoint_port) + local endpoint_content = endpoint_dict:get_stale(endpoint_key) + if not endpoint_content then + core.log.error("get empty endpoint content from discovery DIC, this should not happen ", + endpoint_key) + return nil + end + + local endpoint = core.json.decode(endpoint_content) + if not endpoint then + core.log.error("decode endpoint content failed, this should not happen, content: ", + endpoint_content) + return nil + end + + return endpoint[endpoint_port] +end + + +local _M = { + version = "0.0.1" +} + + +local function start_fetch(handle) + local timer_runner + timer_runner = function(premature) + if premature then + return + end + + local ok, status = pcall(handle.list_watch, handle, handle.apiserver) + + local retry_interval = 0 + if not ok then + core.log.error("list_watch failed, kind: ", handle.kind, + ", reason: ", "RuntimeException", ", message : ", status) + retry_interval = 40 + elseif not status then + retry_interval = 40 + end + + ngx.timer.at(retry_interval, timer_runner) + end + ngx.timer.at(0, timer_runner) +end + +local function get_endpoint_dict(id) + local shm = "kubernetes" + + if id and #id > 0 then + shm = shm .. "-" .. id + end + + if not is_http then + shm = shm .. "-stream" + end + + return ngx.shared[shm] +end + + +local function single_mode_init(conf) + local endpoint_dict = get_endpoint_dict() + + if not endpoint_dict then + error("failed to get lua_shared_dict: ngx.shared.kubernetes, " .. + "please check your APISIX version") + end + + if process.type() ~= "privileged agent" then + ctx = endpoint_dict + return + end + + local apiserver, err = get_apiserver(conf) + if err then + error(err) + return + end + + local default_weight = conf.default_weight + local endpoints_informer, err + if conf.watch_endpoint_slices_schema then + endpoints_informer, err = informer_factory.new("discovery.k8s.io", "v1", + "EndpointSlice", "endpointslices", "") + else + endpoints_informer, err = informer_factory.new("", "v1", "Endpoints", "endpoints", "") + end + if err then + error(err) + return + end + + setup_namespace_selector(conf, endpoints_informer) + setup_label_selector(conf, endpoints_informer) + + if conf.watch_endpoint_slices_schema then + endpoints_informer.on_added = on_endpoint_slices_modified + endpoints_informer.on_modified = on_endpoint_slices_modified + else + endpoints_informer.on_added = on_endpoint_modified + endpoints_informer.on_modified = on_endpoint_modified + end + endpoints_informer.on_deleted = on_endpoint_deleted + endpoints_informer.pre_list = pre_list + endpoints_informer.post_list = post_list + + ctx = setmetatable({ + endpoint_dict = endpoint_dict, + apiserver = apiserver, + default_weight = default_weight + }, { __index = endpoints_informer }) + + start_fetch(ctx) +end + + +local function single_mode_nodes(service_name) + local pattern = "^(.*):(.*)$" -- namespace/name:port_name + local match = ngx.re.match(service_name, pattern, "jo") + if not match then + core.log.error("get unexpected upstream service_name: ", service_name) + return nil + end + + local endpoint_dict = ctx + local endpoint_key = match[1] + local endpoint_port = match[2] + local endpoint_version = endpoint_dict:get_stale(endpoint_key .. "#version") + if not endpoint_version then + core.log.info("get empty endpoint version from discovery DICT ", endpoint_key) + return nil + end + + return endpoint_lrucache(service_name, endpoint_version, + create_endpoint_lrucache, endpoint_dict, endpoint_key, endpoint_port) +end + + +local function multiple_mode_worker_init(confs) + for _, conf in ipairs(confs) do + + local id = conf.id + if ctx[id] then + error("duplicate id value") + end + + local endpoint_dict = get_endpoint_dict(id) + if not endpoint_dict then + error(string.format("failed to get lua_shared_dict: ngx.shared.kubernetes-%s, ", id) .. + "please check your APISIX version") + end + + ctx[id] = endpoint_dict + end +end + + +local function multiple_mode_init(confs) + ctx = core.table.new(#confs, 0) + + if process.type() ~= "privileged agent" then + multiple_mode_worker_init(confs) + return + end + + for _, conf in ipairs(confs) do + local id = conf.id + + if ctx[id] then + error("duplicate id value") + end + + local endpoint_dict = get_endpoint_dict(id) + if not endpoint_dict then + error(string.format("failed to get lua_shared_dict: ngx.shared.kubernetes-%s, ", id) .. + "please check your APISIX version") + end + + local apiserver, err = get_apiserver(conf) + if err then + error(err) + return + end + + local default_weight = conf.default_weight + + local endpoints_informer, err + if conf.watch_endpoint_slices_schema then + endpoints_informer, err = informer_factory.new("discovery.k8s.io", "v1", + "EndpointSlice", "endpointslices", "") + else + endpoints_informer, err = informer_factory.new("", "v1", "Endpoints", "endpoints", "") + end + if err then + error(err) + return + end + + setup_namespace_selector(conf, endpoints_informer) + setup_label_selector(conf, endpoints_informer) + + if conf.watch_endpoint_slices_schema then + endpoints_informer.on_added = on_endpoint_slices_modified + endpoints_informer.on_modified = on_endpoint_slices_modified + else + endpoints_informer.on_added = on_endpoint_modified + endpoints_informer.on_modified = on_endpoint_modified + end + endpoints_informer.on_deleted = on_endpoint_deleted + endpoints_informer.pre_list = pre_list + endpoints_informer.post_list = post_list + + ctx[id] = setmetatable({ + endpoint_dict = endpoint_dict, + apiserver = apiserver, + default_weight = default_weight + }, { __index = endpoints_informer }) + end + + for _, item in pairs(ctx) do + start_fetch(item) + end +end + + +local function multiple_mode_nodes(service_name) + local pattern = "^(.*)/(.*/.*):(.*)$" -- id/namespace/name:port_name + local match = ngx.re.match(service_name, pattern, "jo") + if not match then + core.log.error("get unexpected upstream service_name: ", service_name) + return nil + end + + local id = match[1] + local endpoint_dict = ctx[id] + if not endpoint_dict then + core.log.error("id not exist") + return nil + end + + local endpoint_key = match[2] + local endpoint_port = match[3] + local endpoint_version = endpoint_dict:get_stale(endpoint_key .. "#version") + if not endpoint_version then + core.log.info("get empty endpoint version from discovery DICT ", endpoint_key) + return nil + end + + return endpoint_lrucache(service_name, endpoint_version, + create_endpoint_lrucache, endpoint_dict, endpoint_key, endpoint_port) +end + + +function _M.init_worker() + local discovery_conf = local_conf.discovery.kubernetes + core.log.info("kubernetes discovery conf: ", core.json.delay_encode(discovery_conf)) + if #discovery_conf == 0 then + _M.nodes = single_mode_nodes + single_mode_init(discovery_conf) + else + _M.nodes = multiple_mode_nodes + multiple_mode_init(discovery_conf) + end +end + + +local function dump_endpoints_from_dict(endpoint_dict) + local keys, err = endpoint_dict:get_keys(0) + if err then + core.log.error("get keys from discovery dict failed: ", err) + return + end + + if not keys or #keys == 0 then + return + end + + local endpoints = {} + for i = 1, #keys do + local key = keys[i] + -- skip key with suffix #version + if key:sub(-#"#version") ~= "#version" then + local value = endpoint_dict:get(key) + core.table.insert(endpoints, { + name = key, + value = value + }) + end + end + + return endpoints +end + +function _M.dump_data() + local discovery_conf = local_conf.discovery.kubernetes + local eps = {} + + if #discovery_conf == 0 then + -- Single mode: discovery_conf is a single configuration object + local endpoint_dict = get_endpoint_dict() + local endpoints = dump_endpoints_from_dict(endpoint_dict) + if endpoints then + core.table.insert(eps, { + endpoints = endpoints + }) + end + else + -- Multiple mode: discovery_conf is an array of configuration objects + for _, conf in ipairs(discovery_conf) do + local endpoint_dict = get_endpoint_dict(conf.id) + local endpoints = dump_endpoints_from_dict(endpoint_dict) + if endpoints then + core.table.insert(eps, { + id = conf.id, + endpoints = endpoints + }) + end + end + end + + return {config = discovery_conf, endpoints = eps} +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/kubernetes/schema.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/kubernetes/schema.lua new file mode 100644 index 0000000..e18d06f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/kubernetes/schema.lua @@ -0,0 +1,217 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local host_patterns = { + { pattern = [[^\${[_A-Za-z]([_A-Za-z0-9]*[_A-Za-z])*}$]] }, + { pattern = [[^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$]] }, +} + +local port_patterns = { + { pattern = [[^\${[_A-Za-z]([_A-Za-z0-9]*[_A-Za-z])*}$]] }, + { pattern = [[^(([1-9]\d{0,3}|[1-5]\d{4}|6[0-4]\d{3}|65[0-4]\d{2}|655[0-2]\d|6553[0-5]))$]] }, +} + +local schema_schema = { + type = "string", + enum = { "http", "https" }, + default = "https", +} + +local token_patterns = { + { pattern = [[\${[_A-Za-z]([_A-Za-z0-9]*[_A-Za-z])*}$]] }, + { pattern = [[^[A-Za-z0-9+\/._=-]{0,4096}$]] }, +} + +local token_schema = { + type = "string", + oneOf = token_patterns, +} + +local token_file_schema = { + type = "string", + pattern = [[^[^\:*?"<>|]*$]], + minLength = 1, + maxLength = 500, +} + +local namespace_pattern = [[^[a-z0-9]([-a-z0-9_.]*[a-z0-9])?$]] + +local namespace_regex_pattern = [[^[\x21-\x7e]*$]] + +local namespace_selector_schema = { + type = "object", + properties = { + equal = { + type = "string", + pattern = namespace_pattern, + }, + not_equal = { + type = "string", + pattern = namespace_pattern, + }, + match = { + type = "array", + items = { + type = "string", + pattern = namespace_regex_pattern + }, + minItems = 1 + }, + not_match = { + type = "array", + items = { + type = "string", + pattern = namespace_regex_pattern + }, + minItems = 1 + }, + }, + oneOf = { + { required = {} }, + { required = { "equal" } }, + { required = { "not_equal" } }, + { required = { "match" } }, + { required = { "not_match" } } + }, +} + +local label_selector_schema = { + type = "string", +} + +local default_weight_schema = { + type = "integer", + default = 50, + minimum = 0, +} + +local shared_size_schema = { + type = "string", + pattern = [[^[1-9][0-9]*m$]], + default = "1m", +} + +local watch_endpoint_slices_schema = { + type = "boolean", + default = false, +} + +return { + anyOf = { + { + type = "object", + properties = { + service = { + type = "object", + properties = { + schema = schema_schema, + host = { + type = "string", + oneOf = host_patterns, + default = "${KUBERNETES_SERVICE_HOST}", + }, + port = { + type = "string", + oneOf = port_patterns, + default = "${KUBERNETES_SERVICE_PORT}", + }, + }, + default = { + schema = "https", + host = "${KUBERNETES_SERVICE_HOST}", + port = "${KUBERNETES_SERVICE_PORT}", + } + }, + client = { + type = "object", + properties = { + token = token_schema, + token_file = token_file_schema, + }, + default = { + token_file = "/var/run/secrets/kubernetes.io/serviceaccount/token" + }, + ["if"] = { + ["not"] = { + anyOf = { + { required = { "token" } }, + { required = { "token_file" } }, + } + } + }, + ["then"] = { + properties = { + token_file = { + default = "/var/run/secrets/kubernetes.io/serviceaccount/token" + } + } + } + }, + namespace_selector = namespace_selector_schema, + label_selector = label_selector_schema, + default_weight = default_weight_schema, + shared_size = shared_size_schema, + watch_endpoint_slices = watch_endpoint_slices_schema, + }, + }, + { + type = "array", + minItems = 1, + items = { + type = "object", + properties = { + id = { + type = "string", + pattern = [[^[a-z0-9]{1,8}$]] + }, + service = { + type = "object", + properties = { + schema = schema_schema, + host = { + type = "string", + oneOf = host_patterns, + }, + port = { + type = "string", + oneOf = port_patterns, + }, + }, + required = { "host", "port" } + }, + client = { + type = "object", + properties = { + token = token_schema, + token_file = token_file_schema, + }, + oneOf = { + { required = { "token" } }, + { required = { "token_file" } }, + }, + }, + namespace_selector = namespace_selector_schema, + label_selector = label_selector_schema, + default_weight = default_weight_schema, + shared_size = shared_size_schema, + watch_endpoint_slices = watch_endpoint_slices_schema, + }, + required = { "id", "service", "client" } + }, + } + } +} diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/nacos/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/nacos/init.lua new file mode 100644 index 0000000..d4fec79 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/nacos/init.lua @@ -0,0 +1,392 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local require = require +local local_conf = require('apisix.core.config_local').local_conf() +local http = require('resty.http') +local core = require('apisix.core') +local ipairs = ipairs +local pairs = pairs +local type = type +local math = math +local math_random = math.random +local ngx = ngx +local ngx_re = require('ngx.re') +local ngx_timer_at = ngx.timer.at +local ngx_timer_every = ngx.timer.every +local string = string +local string_sub = string.sub +local str_byte = string.byte +local str_find = core.string.find +local log = core.log + +local default_weight +local nacos_dict = ngx.shared.nacos --key: namespace_id.group_name.service_name +if not nacos_dict then + error("lua_shared_dict \"nacos\" not configured") +end + +local auth_path = 'auth/login' +local instance_list_path = 'ns/instance/list?healthyOnly=true&serviceName=' +local default_namespace_id = "public" +local default_group_name = "DEFAULT_GROUP" +local access_key +local secret_key + + +local _M = {} + +local function get_key(namespace_id, group_name, service_name) + return namespace_id .. '.' .. group_name .. '.' .. service_name +end + +local function request(request_uri, path, body, method, basic_auth) + local url = request_uri .. path + log.info('request url:', url) + local headers = {} + headers['Accept'] = 'application/json' + + if basic_auth then + headers['Authorization'] = basic_auth + end + + if body and 'table' == type(body) then + local err + body, err = core.json.encode(body) + if not body then + return nil, 'invalid body : ' .. err + end + headers['Content-Type'] = 'application/json' + end + + local httpc = http.new() + local timeout = local_conf.discovery.nacos.timeout + local connect_timeout = timeout.connect + local send_timeout = timeout.send + local read_timeout = timeout.read + log.info('connect_timeout:', connect_timeout, ', send_timeout:', send_timeout, + ', read_timeout:', read_timeout) + httpc:set_timeouts(connect_timeout, send_timeout, read_timeout) + local res, err = httpc:request_uri(url, { + method = method, + headers = headers, + body = body, + ssl_verify = true, + }) + if not res then + return nil, err + end + + if not res.body or res.status ~= 200 then + return nil, 'status = ' .. res.status + end + + local json_str = res.body + local data, err = core.json.decode(json_str) + if not data then + return nil, err + end + return data +end + + +local function get_url(request_uri, path) + return request(request_uri, path, nil, 'GET', nil) +end + + +local function post_url(request_uri, path, body) + return request(request_uri, path, body, 'POST', nil) +end + + +local function get_token_param(base_uri, username, password) + if not username or not password then + return '' + end + + local args = { username = username, password = password} + local data, err = post_url(base_uri, auth_path .. '?' .. ngx.encode_args(args), nil) + if err then + log.error('nacos login fail:', username, ' ', password, ' desc:', err) + return nil, err + end + return '&accessToken=' .. data.accessToken +end + + +local function get_namespace_param(namespace_id) + local param = '' + if namespace_id then + local args = {namespaceId = namespace_id} + param = '&' .. ngx.encode_args(args) + end + return param +end + + +local function get_group_name_param(group_name) + local param = '' + if group_name then + local args = {groupName = group_name} + param = '&' .. ngx.encode_args(args) + end + return param +end + + +local function get_signed_param(group_name, service_name) + local param = '' + if access_key ~= '' and secret_key ~= '' then + local str_to_sign = ngx.now() * 1000 .. '@@' .. group_name .. '@@' .. service_name + local args = { + ak = access_key, + data = str_to_sign, + signature = ngx.encode_base64(ngx.hmac_sha1(secret_key, str_to_sign)) + } + param = '&' .. ngx.encode_args(args) + end + return param +end + + +local function get_base_uri() + local host = local_conf.discovery.nacos.host + -- TODO Add health check to get healthy nodes. + local url = host[math_random(#host)] + local auth_idx = core.string.rfind_char(url, '@') + local username, password + if auth_idx then + local protocol_idx = str_find(url, '://') + local protocol = string_sub(url, 1, protocol_idx + 2) + local user_and_password = string_sub(url, protocol_idx + 3, auth_idx - 1) + local arr = ngx_re.split(user_and_password, ':') + if #arr == 2 then + username = arr[1] + password = arr[2] + end + local other = string_sub(url, auth_idx + 1) + url = protocol .. other + end + + if local_conf.discovery.nacos.prefix then + url = url .. local_conf.discovery.nacos.prefix + end + + if str_byte(url, #url) ~= str_byte('/') then + url = url .. '/' + end + + return url, username, password +end + + +local function de_duplication(services, namespace_id, group_name, service_name, scheme) + for _, service in ipairs(services) do + if service.namespace_id == namespace_id and service.group_name == group_name + and service.service_name == service_name and service.scheme == scheme then + return true + end + end + return false +end + + +local function iter_and_add_service(services, values) + if not values then + return + end + + for _, value in core.config_util.iterate_values(values) do + local conf = value.value + if not conf then + goto CONTINUE + end + + local up + if conf.upstream then + up = conf.upstream + else + up = conf + end + + local namespace_id = (up.discovery_args and up.discovery_args.namespace_id) + or default_namespace_id + + local group_name = (up.discovery_args and up.discovery_args.group_name) + or default_group_name + + local dup = de_duplication(services, namespace_id, group_name, + up.service_name, up.scheme) + if dup then + goto CONTINUE + end + + if up.discovery_type == 'nacos' then + core.table.insert(services, { + service_name = up.service_name, + namespace_id = namespace_id, + group_name = group_name, + scheme = up.scheme, + }) + end + ::CONTINUE:: + end +end + + +local function get_nacos_services() + local services = {} + + -- here we use lazy load to work around circle dependency + local get_upstreams = require('apisix.upstream').upstreams + local get_routes = require('apisix.router').http_routes + local get_stream_routes = require('apisix.router').stream_routes + local get_services = require('apisix.http.service').services + local values = get_upstreams() + iter_and_add_service(services, values) + values = get_routes() + iter_and_add_service(services, values) + values = get_services() + iter_and_add_service(services, values) + values = get_stream_routes() + iter_and_add_service(services, values) + return services +end + +local function is_grpc(scheme) + if scheme == 'grpc' or scheme == 'grpcs' then + return true + end + + return false +end + +local curr_service_in_use = {} +local function fetch_full_registry(premature) + if premature then + return + end + + local base_uri, username, password = get_base_uri() + local token_param, err = get_token_param(base_uri, username, password) + if err then + log.error('get_token_param error:', err) + return + end + + local infos = get_nacos_services() + if #infos == 0 then + return + end + local service_names = {} + for _, service_info in ipairs(infos) do + local data, err + local namespace_id = service_info.namespace_id + local group_name = service_info.group_name + local scheme = service_info.scheme or '' + local namespace_param = get_namespace_param(service_info.namespace_id) + local group_name_param = get_group_name_param(service_info.group_name) + local signature_param = get_signed_param(service_info.group_name, service_info.service_name) + local query_path = instance_list_path .. service_info.service_name + .. token_param .. namespace_param .. group_name_param + .. signature_param + data, err = get_url(base_uri, query_path) + if err then + log.error('get_url:', query_path, ' err:', err) + goto CONTINUE + end + + local nodes = {} + local key = get_key(namespace_id, group_name, service_info.service_name) + service_names[key] = true + for _, host in ipairs(data.hosts) do + local node = { + host = host.ip, + port = host.port, + weight = host.weight or default_weight, + } + -- docs: https://github.com/yidongnan/grpc-spring-boot-starter/pull/496 + if is_grpc(scheme) and host.metadata and host.metadata.gRPC_port then + node.port = host.metadata.gRPC_port + end + + core.table.insert(nodes, node) + end + if #nodes > 0 then + local content = core.json.encode(nodes) + nacos_dict:set(key, content) + end + ::CONTINUE:: + end + -- remove services that are not in use anymore + for key, _ in pairs(curr_service_in_use) do + if not service_names[key] then + nacos_dict:delete(key) + end + end + curr_service_in_use = service_names +end + + +function _M.nodes(service_name, discovery_args) + local namespace_id = discovery_args and + discovery_args.namespace_id or default_namespace_id + local group_name = discovery_args + and discovery_args.group_name or default_group_name + local key = get_key(namespace_id, group_name, service_name) + local value = nacos_dict:get(key) + if not value then + core.log.error("nacos service not found: ", service_name) + return nil + end + local nodes = core.json.decode(value) + return nodes +end + + +function _M.init_worker() + default_weight = local_conf.discovery.nacos.weight + log.info('default_weight:', default_weight) + local fetch_interval = local_conf.discovery.nacos.fetch_interval + log.info('fetch_interval:', fetch_interval) + access_key = local_conf.discovery.nacos.access_key + secret_key = local_conf.discovery.nacos.secret_key + ngx_timer_at(0, fetch_full_registry) + ngx_timer_every(fetch_interval, fetch_full_registry) +end + + +function _M.dump_data() + local keys = nacos_dict:get_keys(0) + local applications = {} + for _, key in ipairs(keys) do + local value = nacos_dict:get(key) + if value then + local nodes = core.json.decode(value) + if nodes then + applications[key] = { + nodes = nodes, + } + end + end + end + return {services = applications or {}} +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/nacos/schema.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/nacos/schema.lua new file mode 100644 index 0000000..2940487 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/nacos/schema.lua @@ -0,0 +1,59 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local host_pattern = [[^http(s)?:\/\/([a-zA-Z0-9-_.]+:.+\@)?[a-zA-Z0-9-_.:]+$]] +local prefix_pattern = [[^[\/a-zA-Z0-9-_.]+$]] + + +return { + type = 'object', + properties = { + host = { + type = 'array', + minItems = 1, + items = { + type = 'string', + pattern = host_pattern, + minLength = 2, + maxLength = 100, + }, + }, + fetch_interval = {type = 'integer', minimum = 1, default = 30}, + prefix = { + type = 'string', + pattern = prefix_pattern, + maxLength = 100, + default = '/nacos/v1/' + }, + weight = {type = 'integer', minimum = 1, default = 100}, + timeout = { + type = 'object', + properties = { + connect = {type = 'integer', minimum = 1, default = 2000}, + send = {type = 'integer', minimum = 1, default = 2000}, + read = {type = 'integer', minimum = 1, default = 5000}, + }, + default = { + connect = 2000, + send = 2000, + read = 5000, + } + }, + access_key = {type = 'string', default = ''}, + secret_key = {type = 'string', default = ''}, + }, + required = {'host'} +} diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/tars/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/tars/init.lua new file mode 100644 index 0000000..17bb275 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/tars/init.lua @@ -0,0 +1,367 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local ngx = ngx +local format = string.format +local ipairs = ipairs +local error = error +local tonumber = tonumber +local local_conf = require("apisix.core.config_local").local_conf() +local core = require("apisix.core") +local mysql = require("resty.mysql") +local is_http = ngx.config.subsystem == "http" +local process = require("ngx.process") + +local endpoint_dict + +local full_query_sql = [[ select servant, group_concat(endpoint order by endpoint) as endpoints +from t_server_conf left join t_adapter_conf tac using (application, server_name, node_name) +where setting_state = 'active' and present_state = 'active' +group by servant ]] + +local incremental_query_sql = [[ +select servant, (setting_state = 'active' and present_state = 'active') activated, +group_concat(endpoint order by endpoint) endpoints +from t_server_conf left join t_adapter_conf tac using (application, server_name, node_name) +where (application, server_name) in +( +select application, server_name from t_server_conf +where registry_timestamp > now() - interval %d second +union +select application, server_name from t_adapter_conf +where registry_timestamp > now() - interval %d second +) +group by servant, activated order by activated desc ]] + +local _M = { + version = 0.1, +} + +local default_weight + +local last_fetch_full_time = 0 +local last_db_error + +local endpoint_lrucache = core.lrucache.new({ + ttl = 300, + count = 1024 +}) + +local activated_buffer = core.table.new(10, 0) +local nodes_buffer = core.table.new(0, 5) + + +--[[ +endpoints format as follows: + tcp -h 172.16.1.1 -p 11 -t 6000 -e 0,tcp -e 0 -p 12 -h 172.16.1.1,tcp -p 13 -h 172.16.1.1 +we extract host and port value via endpoints_pattern +--]] +local endpoints_pattern = core.table.concat( + { [[tcp(\s*-[te]\s*(\S+)){0,2}\s*-([hpHP])\s*(\S+)(\s*-[teTE]\s*(\S+))]], + [[{0,2}\s*-([hpHP])\s*(\S+)(\s*-[teTE]\s*(\S+)){0,2}\s*(,|$)]] } +) + + +local function update_endpoint(servant, nodes) + local endpoint_content = core.json.encode(nodes, true) + local endpoint_version = ngx.crc32_long(endpoint_content) + core.log.debug("set servant ", servant, endpoint_content) + local _, err + _, err = endpoint_dict:safe_set(servant .. "#version", endpoint_version) + if err then + core.log.error("set endpoint version into nginx shared dict failed, ", err) + return + end + _, err = endpoint_dict:safe_set(servant, endpoint_content) + if err then + core.log.error("set endpoint into nginx shared dict failed, ", err) + endpoint_dict:delete(servant .. "#version") + end +end + + +local function delete_endpoint(servant) + core.log.info("delete servant ", servant) + endpoint_dict:delete(servant .. "#version") + endpoint_dict:delete(servant) +end + + +local function add_endpoint_to_lrucache(servant) + local endpoint_content, err = endpoint_dict:get_stale(servant) + if not endpoint_content then + core.log.error("get empty endpoint content, servant: ", servant, ", err: ", err) + return nil + end + + local endpoint, err = core.json.decode(endpoint_content) + if not endpoint then + core.log.error("decode json failed, content: ", endpoint_content, ", err: ", err) + return nil + end + + return endpoint +end + + +local function get_endpoint(servant) + + --[[ + fetch_full function will: + 1: call endpoint_dict:flush_all() + 2: setup servant:nodes pairs into endpoint_dict + 3: call endpoint_dict:flush_expired() + + get_endpoint may be called during the 2 step of the fetch_full function, + so we must use endpoint_dict:get_stale() to get value instead endpoint_dict:get() + --]] + + local endpoint_version, err = endpoint_dict:get_stale(servant .. "#version") + if not endpoint_version then + if err then + core.log.error("get empty endpoint version, servant: ", servant, ", err: ", err) + end + return nil + end + return endpoint_lrucache(servant, endpoint_version, add_endpoint_to_lrucache, servant) +end + + +local function extract_endpoint(query_result) + for _, p in ipairs(query_result) do + repeat + local servant = p.servant + + if servant == ngx.null then + break + end + + if p.activated == 1 then + activated_buffer[servant] = ngx.null + elseif p.activated == 0 then + if activated_buffer[servant] == nil then + delete_endpoint(servant) + end + break + end + + core.table.clear(nodes_buffer) + local iterator = ngx.re.gmatch(p.endpoints, endpoints_pattern, "jao") + while true do + local captures, err = iterator() + if err then + core.log.error("gmatch failed, error: ", err, " , endpoints: ", p.endpoints) + break + end + + if not captures then + break + end + + local host, port + if captures[3] == "h" or captures[3] == "H" then + host = captures[4] + port = tonumber(captures[8]) + else + host = captures[8] + port = tonumber(captures[4]) + end + + core.table.insert(nodes_buffer, { + host = host, + port = port, + weight = default_weight, + }) + end + update_endpoint(servant, nodes_buffer) + until true + end +end + + +local function fetch_full(db_cli) + local res, err, errcode, sqlstate = db_cli:query(full_query_sql) + --[[ + res format is as follows: + { + { + servant = "A.AServer.FirstObj", + endpoints = "tcp -h 172.16.1.1 -p 10001 -e 0 -t 3000,tcp -p 10002 -h 172.16.1.2 -t 3000" + }, + { + servant = "A.AServer.SecondObj", + endpoints = "tcp -t 3000 -p 10002 -h 172.16.1.2" + }, + } + + if current endpoint_dict is as follows: + key1:nodes1, key2:nodes2, key3:nodes3 + + then fetch_full get follow results: + key1:nodes1, key4:nodes4, key5:nodes5 + + at this time, we need + 1: setup key4:nodes4, key5:nodes5 + 2: delete key2:nodes2, key3:nodes3 + + to achieve goals, we should: + 1: before setup results, execute endpoint_dict:flush_all() + 2: after setup results, execute endpoint_dict:flush_expired() + --]] + if not res then + core.log.error("query failed, error: ", err, ", ", errcode, " ", sqlstate) + return err + end + + endpoint_dict:flush_all() + extract_endpoint(res) + + while err == "again" do + res, err, errcode, sqlstate = db_cli:read_result() + if not res then + if err then + core.log.error("read result failed, error: ", err, ", ", errcode, " ", sqlstate) + end + return err + end + extract_endpoint(res) + end + endpoint_dict:flush_expired() + + return nil +end + + +local function fetch_incremental(db_cli) + local res, err, errcode, sqlstate = db_cli:query(incremental_query_sql) + --[[ + res is as follows: + { + { + activated=1, + servant = "A.AServer.FirstObj", + endpoints = "tcp -h 172.16.1.1 -p 10001 -e 0 -t 3000,tcp -p 10002 -h 172.16.1.2 -t 3000" + }, + { + activated=0, + servant = "A.AServer.FirstObj", + endpoints = "tcp -t 3000 -p 10001 -h 172.16.1.3" + }, + { + activated=0, + servant = "B.BServer.FirstObj", + endpoints = "tcp -t 3000 -p 10002 -h 172.16.1.2" + }, + } + + for each item: + if activated==1, setup + if activated==0, if there is a other item had same servant and activate==1, ignore + if activated==0, and there is no other item had same servant, delete + --]] + if not res then + core.log.error("query failed, error: ", err, ", ", errcode, " ", sqlstate) + return err + end + + core.table.clear(activated_buffer) + extract_endpoint(res) + + while err == "again" do + res, err, errcode, sqlstate = db_cli:read_result() + if not res then + if err then + core.log.error("read result failed, error: ", err, ", ", errcode, " ", sqlstate) + end + return err + end + extract_endpoint(res) + end + + return nil +end + + +local function fetch_endpoint(premature, conf) + if premature then + return + end + + local db_cli, err = mysql:new() + if not db_cli then + core.log.error("failed to instantiate mysql: ", err) + return + end + db_cli:set_timeout(3000) + + local ok, err, errcode, sqlstate = db_cli:connect(conf.db_conf) + if not ok then + core.log.error("failed to connect mysql: ", err, ", ", errcode, ", ", sqlstate) + return + end + + local now = ngx.time() + + if last_db_error or last_fetch_full_time + conf.full_fetch_interval <= now then + last_fetch_full_time = now + last_db_error = fetch_full(db_cli) + else + last_db_error = fetch_incremental(db_cli) + end + + if not last_db_error then + db_cli:set_keepalive(120 * 1000, 1) + end +end + + +function _M.nodes(servant) + return get_endpoint(servant) +end + +local function get_endpoint_dict() + local shm = "tars" + + if not is_http then + shm = shm .. "-stream" + end + + return ngx.shared[shm] +end + +function _M.init_worker() + endpoint_dict = get_endpoint_dict() + if not endpoint_dict then + error("failed to get lua_shared_dict: tars, please check your APISIX version") + end + + if process.type() ~= "privileged agent" then + return + end + + local conf = local_conf.discovery.tars + default_weight = conf.default_weight + + core.log.info("conf ", core.json.delay_encode(conf)) + local backtrack_time = conf.incremental_fetch_interval + 5 + incremental_query_sql = format(incremental_query_sql, backtrack_time, backtrack_time) + + ngx.timer.at(0, fetch_endpoint, conf) + ngx.timer.every(conf.incremental_fetch_interval, fetch_endpoint, conf) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/discovery/tars/schema.lua b/CloudronPackages/APISIX/apisix-source/apisix/discovery/tars/schema.lua new file mode 100644 index 0000000..01d44d1 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/discovery/tars/schema.lua @@ -0,0 +1,45 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local host_pattern = [[^([a-zA-Z0-9-_.]+:.+\@)?[a-zA-Z0-9-_.:]+$]] + +return { + type = 'object', + properties = { + db_conf = { + type = 'object', + properties = { + host = { type = 'string', minLength = 1, maxLength = 500, pattern = host_pattern }, + port = { type = 'integer', minimum = 1, maximum = 65535, default = 3306 }, + database = { type = 'string', minLength = 1, maxLength = 64 }, + user = { type = 'string', minLength = 1, maxLength = 64 }, + password = { type = 'string', minLength = 1, maxLength = 64 }, + }, + required = { 'host', 'database', 'user', 'password' } + }, + full_fetch_interval = { + type = 'integer', minimum = 90, maximum = 3600, default = 300, + }, + incremental_fetch_interval = { + type = 'integer', minimum = 5, maximum = 60, default = 15, + }, + default_weight = { + type = 'integer', minimum = 0, maximum = 100, default = 100, + }, + }, + required = { 'db_conf' } +} diff --git a/CloudronPackages/APISIX/apisix-source/apisix/events.lua b/CloudronPackages/APISIX/apisix-source/apisix/events.lua new file mode 100644 index 0000000..dac71ac --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/events.lua @@ -0,0 +1,139 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local require = require +local error = error +local assert = assert +local tostring = tostring +local pairs = pairs +local setmetatable = setmetatable +local ngx = ngx +local core = require("apisix.core") + +local _M = { + events_module = nil, +} + +_M.EVENTS_MODULE_LUA_RESTY_WORKER_EVENTS = 'lua-resty-worker-events' +_M.EVENTS_MODULE_LUA_RESTY_EVENTS = 'lua-resty-events' + + +-- use lua-resty-worker-events +local function init_resty_worker_events() + _M.events_module = _M.EVENTS_MODULE_LUA_RESTY_WORKER_EVENTS + + local we = require("resty.worker.events") + local shm = ngx.config.subsystem == "http" and "worker-events" or "worker-events-stream" + local ok, err = we.configure({shm = shm, interval = 0.1}) + if not ok then + error("failed to init worker event: " .. err) + end + + return we +end + + +-- use lua-resty-events +local function init_resty_events() + _M.events_module = _M.EVENTS_MODULE_LUA_RESTY_EVENTS + + local listening = "unix:" .. ngx.config.prefix() .. "logs/" + if ngx.config.subsystem == "http" then + listening = listening .. "worker_events.sock" + else + listening = listening .. "stream_worker_events.sock" + end + core.log.info("subsystem: " .. ngx.config.subsystem .. " listening sock: " .. listening) + + local opts = { + unique_timeout = 5, -- life time of unique event data in lrucache + broker_id = 0, -- broker server runs in nginx worker #0 + listening = listening, -- unix socket for broker listening + } + + local we = require("resty.events.compat") + assert(we.configure(opts)) + assert(we.configured()) + + return we +end + + +function _M.init_worker() + if _M.inited then + -- prevent duplicate initializations in the same worker to + -- avoid potentially unexpected behavior + return + end + + _M.inited = true + + local conf = core.config.local_conf() + local module_name = core.table.try_read_attr(conf, "apisix", "events", "module") + or _M.EVENTS_MODULE_LUA_RESTY_WORKER_EVENTS + + if module_name == _M.EVENTS_MODULE_LUA_RESTY_EVENTS then + -- use lua-resty-events as an event module via the apisix.events.module + -- key in the configuration file + _M.worker_events = init_resty_events() + else + -- use lua-resty-worker-events default now + _M.worker_events = init_resty_worker_events() + end +end + + +function _M.register(self, ...) + return self.worker_events.register(...) +end + + +function _M.event_list(self, source, ...) + -- a patch for the lua-resty-events to support event_list + -- this snippet is copied from the lua-resty-worker-events lib + if self.events_module == _M.EVENTS_MODULE_LUA_RESTY_EVENTS then + local events = { _source = source } + for _, event in pairs({...}) do + events[event] = event + end + return setmetatable(events, { + __index = function(_, key) + error("event '"..tostring(key).."' is an unknown event", 2) + end + }) + end + + -- the lua-resty-worker-events has a built-in event_list implementation + return self.worker_events.event_list(source, ...) +end + + +function _M.post(self, ...) + return self.worker_events.post(...) +end + + +function _M.get_healthcheck_events_modele(self) + if self.events_module == _M.EVENTS_MODULE_LUA_RESTY_EVENTS then + return "resty.events" + else + return "resty.worker.events" + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/global_rules.lua b/CloudronPackages/APISIX/apisix-source/apisix/global_rules.lua new file mode 100644 index 0000000..93fa289 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/global_rules.lua @@ -0,0 +1,56 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local plugin_checker = require("apisix.plugin").plugin_checker +local error = error + + +local _M = {} + +local global_rules + +function _M.init_worker() + local err + global_rules, err = core.config.new("/global_rules", { + automatic = true, + item_schema = core.schema.global_rule, + checker = plugin_checker, + }) + if not global_rules then + error("failed to create etcd instance for fetching /global_rules : " + .. err) + end +end + + +function _M.global_rules() + if not global_rules then + return nil, nil + end + return global_rules.values, global_rules.conf_version +end + + +function _M.get_pre_index() + if not global_rules then + return nil + end + return global_rules.prev_index +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/http/route.lua b/CloudronPackages/APISIX/apisix-source/apisix/http/route.lua new file mode 100644 index 0000000..dbf11ab --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/http/route.lua @@ -0,0 +1,153 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local radixtree = require("resty.radixtree") +local router = require("apisix.utils.router") +local service_fetch = require("apisix.http.service").get +local core = require("apisix.core") +local expr = require("resty.expr.v1") +local plugin_checker = require("apisix.plugin").plugin_checker +local event = require("apisix.core.event") +local ipairs = ipairs +local type = type +local error = error +local loadstring = loadstring + + +local _M = {} + + +function _M.create_radixtree_uri_router(routes, uri_routes, with_parameter) + routes = routes or {} + + core.table.clear(uri_routes) + + for _, route in ipairs(routes) do + if type(route) == "table" then + local status = core.table.try_read_attr(route, "value", "status") + -- check the status + if status and status == 0 then + goto CONTINUE + end + + local filter_fun, err + if route.value.filter_func then + filter_fun, err = loadstring( + "return " .. route.value.filter_func, + "router#" .. route.value.id) + if not filter_fun then + core.log.error("failed to load filter function: ", err, + " route id: ", route.value.id) + goto CONTINUE + end + + filter_fun = filter_fun() + end + + local hosts = route.value.hosts or route.value.host + if not hosts and route.value.service_id then + local service = service_fetch(route.value.service_id) + if not service then + core.log.error("failed to fetch service configuration by ", + "id: ", route.value.service_id) + -- we keep the behavior that missing service won't affect the route matching + else + hosts = service.value.hosts + end + end + + core.log.info("insert uri route: ", + core.json.delay_encode(route.value, true)) + core.table.insert(uri_routes, { + paths = route.value.uris or route.value.uri, + methods = route.value.methods, + priority = route.value.priority, + hosts = hosts, + remote_addrs = route.value.remote_addrs + or route.value.remote_addr, + vars = route.value.vars, + filter_fun = filter_fun, + handler = function (api_ctx, match_opts) + api_ctx.matched_params = nil + api_ctx.matched_route = route + api_ctx.curr_req_matched = match_opts.matched + end + }) + + ::CONTINUE:: + end + end + + event.push(event.CONST.BUILD_ROUTER, routes) + core.log.info("route items: ", core.json.delay_encode(uri_routes, true)) + + if with_parameter then + return radixtree.new(uri_routes) + else + return router.new(uri_routes) + end +end + + +function _M.match_uri(uri_router, api_ctx) + local match_opts = core.tablepool.fetch("route_match_opts", 0, 4) + match_opts.method = api_ctx.var.request_method + match_opts.host = api_ctx.var.host + match_opts.remote_addr = api_ctx.var.remote_addr + match_opts.vars = api_ctx.var + match_opts.matched = core.tablepool.fetch("matched_route_record", 0, 4) + + local ok = uri_router:dispatch(api_ctx.var.uri, match_opts, api_ctx, match_opts) + core.tablepool.release("route_match_opts", match_opts) + return ok +end + + +-- additional check for synced route configuration, run after schema check +local function check_route(route) + local ok, err = plugin_checker(route) + if not ok then + return nil, err + end + + if route.vars then + ok, err = expr.new(route.vars) + if not ok then + return nil, "failed to validate the 'vars' expression: " .. err + end + end + + return true +end + + +function _M.init_worker(filter) + local user_routes, err = core.config.new("/routes", { + automatic = true, + item_schema = core.schema.route, + checker = check_route, + filter = filter, + }) + if not user_routes then + error("failed to create etcd instance for fetching /routes : " .. err) + end + + return user_routes +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/http/router/radixtree_host_uri.lua b/CloudronPackages/APISIX/apisix-source/apisix/http/router/radixtree_host_uri.lua new file mode 100644 index 0000000..680a04f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/http/router/radixtree_host_uri.lua @@ -0,0 +1,193 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local router = require("apisix.utils.router") +local core = require("apisix.core") +local event = require("apisix.core.event") +local get_services = require("apisix.http.service").services +local service_fetch = require("apisix.http.service").get +local ipairs = ipairs +local type = type +local tab_insert = table.insert +local loadstring = loadstring +local pairs = pairs +local cached_router_version +local cached_service_version +local host_router +local only_uri_router + + +local _M = {version = 0.1} + + +local function push_host_router(route, host_routes, only_uri_routes) + if type(route) ~= "table" then + return + end + + local filter_fun, err + if route.value.filter_func then + filter_fun, err = loadstring( + "return " .. route.value.filter_func, + "router#" .. route.value.id) + if not filter_fun then + core.log.error("failed to load filter function: ", err, + " route id: ", route.value.id) + return + end + + filter_fun = filter_fun() + end + + local hosts = route.value.hosts + if not hosts then + if route.value.host then + hosts = {route.value.host} + elseif route.value.service_id then + local service = service_fetch(route.value.service_id) + if not service then + core.log.error("failed to fetch service configuration by ", + "id: ", route.value.service_id) + -- we keep the behavior that missing service won't affect the route matching + else + hosts = service.value.hosts + end + end + end + + local radixtree_route = { + paths = route.value.uris or route.value.uri, + methods = route.value.methods, + priority = route.value.priority, + remote_addrs = route.value.remote_addrs + or route.value.remote_addr, + vars = route.value.vars, + filter_fun = filter_fun, + handler = function (api_ctx, match_opts) + api_ctx.matched_params = nil + api_ctx.matched_route = route + api_ctx.curr_req_matched = match_opts.matched + api_ctx.real_curr_req_matched_path = match_opts.matched._path + end + } + + if hosts == nil then + core.table.insert(only_uri_routes, radixtree_route) + return + end + + for i, host in ipairs(hosts) do + local host_rev = host:reverse() + if not host_routes[host_rev] then + host_routes[host_rev] = {radixtree_route} + else + tab_insert(host_routes[host_rev], radixtree_route) + end + end +end + + +local function create_radixtree_router(routes) + local host_routes = {} + local only_uri_routes = {} + host_router = nil + routes = routes or {} + + for _, route in ipairs(routes) do + local status = core.table.try_read_attr(route, "value", "status") + -- check the status + if not status or status == 1 then + push_host_router(route, host_routes, only_uri_routes) + end + end + + -- create router: host_router + local host_router_routes = {} + for host_rev, routes in pairs(host_routes) do + local sub_router = router.new(routes) + + core.table.insert(host_router_routes, { + paths = host_rev, + filter_fun = function(vars, opts, ...) + return sub_router:dispatch(vars.uri, opts, ...) + end, + handler = function (api_ctx, match_opts) + api_ctx.real_curr_req_matched_host = match_opts.matched._path + end + }) + end + + event.push(event.CONST.BUILD_ROUTER, routes) + + if #host_router_routes > 0 then + host_router = router.new(host_router_routes) + end + + -- create router: only_uri_router + only_uri_router = router.new(only_uri_routes) + return true +end + +function _M.match(api_ctx) + local user_routes = _M.user_routes + local _, service_version = get_services() + if not cached_router_version or cached_router_version ~= user_routes.conf_version + or not cached_service_version or cached_service_version ~= service_version + then + create_radixtree_router(user_routes.values) + cached_router_version = user_routes.conf_version + cached_service_version = service_version + end + + return _M.matching(api_ctx) +end + + +function _M.matching(api_ctx) + core.log.info("route match mode: radixtree_host_uri") + + local match_opts = core.tablepool.fetch("route_match_opts", 0, 16) + match_opts.method = api_ctx.var.request_method + match_opts.remote_addr = api_ctx.var.remote_addr + match_opts.vars = api_ctx.var + match_opts.host = api_ctx.var.host + match_opts.matched = core.tablepool.fetch("matched_route_record", 0, 4) + + if host_router then + local host_uri = api_ctx.var.host + local ok = host_router:dispatch(host_uri:reverse(), match_opts, api_ctx, match_opts) + if ok then + if api_ctx.real_curr_req_matched_path then + api_ctx.curr_req_matched._path = api_ctx.real_curr_req_matched_path + api_ctx.real_curr_req_matched_path = nil + end + if api_ctx.real_curr_req_matched_host then + api_ctx.curr_req_matched._host = api_ctx.real_curr_req_matched_host:reverse() + api_ctx.real_curr_req_matched_host = nil + end + core.tablepool.release("route_match_opts", match_opts) + return true + end + end + + local ok = only_uri_router:dispatch(api_ctx.var.uri, match_opts, api_ctx, match_opts) + core.tablepool.release("route_match_opts", match_opts) + return ok +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/http/router/radixtree_uri.lua b/CloudronPackages/APISIX/apisix-source/apisix/http/router/radixtree_uri.lua new file mode 100644 index 0000000..7c1b5c0 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/http/router/radixtree_uri.lua @@ -0,0 +1,57 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local base_router = require("apisix.http.route") +local get_services = require("apisix.http.service").services +local cached_router_version +local cached_service_version + + +local _M = {version = 0.2} + + + local uri_routes = {} + local uri_router +function _M.match(api_ctx) + local user_routes = _M.user_routes + local _, service_version = get_services() + if not cached_router_version or cached_router_version ~= user_routes.conf_version + or not cached_service_version or cached_service_version ~= service_version + then + uri_router = base_router.create_radixtree_uri_router(user_routes.values, + uri_routes, false) + cached_router_version = user_routes.conf_version + cached_service_version = service_version + end + + if not uri_router then + core.log.error("failed to fetch valid `uri` router: ") + return true + end + + return _M.matching(api_ctx) +end + + +function _M.matching(api_ctx) + core.log.info("route match mode: radixtree_uri") + return base_router.match_uri(uri_router, api_ctx) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/http/router/radixtree_uri_with_parameter.lua b/CloudronPackages/APISIX/apisix-source/apisix/http/router/radixtree_uri_with_parameter.lua new file mode 100644 index 0000000..3f10f4f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/http/router/radixtree_uri_with_parameter.lua @@ -0,0 +1,57 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local base_router = require("apisix.http.route") +local get_services = require("apisix.http.service").services +local cached_router_version +local cached_service_version + + +local _M = {} + + + local uri_routes = {} + local uri_router +function _M.match(api_ctx) + local user_routes = _M.user_routes + local _, service_version = get_services() + if not cached_router_version or cached_router_version ~= user_routes.conf_version + or not cached_service_version or cached_service_version ~= service_version + then + uri_router = base_router.create_radixtree_uri_router(user_routes.values, + uri_routes, true) + cached_router_version = user_routes.conf_version + cached_service_version = service_version + end + + if not uri_router then + core.log.error("failed to fetch valid `uri_with_parameter` router: ") + return true + end + + return _M.matching(api_ctx) +end + + +function _M.matching(api_ctx) + core.log.info("route match mode: radixtree_uri_with_parameter") + return base_router.match_uri(uri_router, api_ctx) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/http/service.lua b/CloudronPackages/APISIX/apisix-source/apisix/http/service.lua new file mode 100644 index 0000000..97b224d --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/http/service.lua @@ -0,0 +1,70 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local apisix_upstream = require("apisix.upstream") +local plugin_checker = require("apisix.plugin").plugin_checker +local services +local error = error + + +local _M = { + version = 0.2, +} + + +function _M.get(service_id) + return services:get(service_id) +end + + +function _M.services() + if not services then + return nil, nil + end + + return services.values, services.conf_version +end + + +local function filter(service) + service.has_domain = false + if not service.value then + return + end + + apisix_upstream.filter_upstream(service.value.upstream, service) + + core.log.info("filter service: ", core.json.delay_encode(service, true)) +end + + +function _M.init_worker() + local err + services, err = core.config.new("/services", { + automatic = true, + item_schema = core.schema.service, + checker = plugin_checker, + filter = filter, + }) + if not services then + error("failed to create etcd instance for fetching /services: " .. err) + return + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/include/apisix/model/pubsub.proto b/CloudronPackages/APISIX/apisix-source/apisix/include/apisix/model/pubsub.proto new file mode 100644 index 0000000..e5459e6 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/include/apisix/model/pubsub.proto @@ -0,0 +1,143 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one or more +// contributor license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright ownership. +// The ASF licenses this file to You under the Apache License, Version 2.0 +// (the "License"); you may not use this file except in compliance with +// the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +option java_package = "org.apache.apisix.api.pubsub"; +option java_outer_classname = "PubSubProto"; +option java_multiple_files = true; +option go_package = "github.com/apache/apisix/api/pubsub;pubsub"; + +/** + * Ping command, used to keep the websocket connection alive + * + * The state field is used to pass some non-specific information, + * which will be returned in the pong response as is. + */ +message CmdPing { + bytes state = 1; +} + +/** + * An empty command, a placeholder for testing purposes only + */ +message CmdEmpty {} + +/** + * Get the offset of the specified topic partition from Apache Kafka. + */ +message CmdKafkaListOffset { + string topic = 1; + int32 partition = 2; + int64 timestamp = 3; +} + +/** + * Fetch messages of the specified topic partition from Apache Kafka. + */ +message CmdKafkaFetch { + string topic = 1; + int32 partition = 2; + int64 offset = 3; +} + +/** + * Client request definition for pubsub scenarios + * + * The sequence field is used to associate requests and responses. + * Apache APISIX will set a consistent sequence for the associated + * requests and responses, and the client can explicitly know the + * response corresponding to any of the requests. + * + * The req field is the command data sent by the client, and its + * type will be chosen from any of the lists in the definition. + * + * Field numbers 1 to 30 in the definition are used to define basic + * information and future extensions, and numbers after 30 are used + * to define commands. + */ +message PubSubReq { + int64 sequence = 1; + oneof req { + CmdEmpty cmd_empty = 31; + CmdPing cmd_ping = 32; + CmdKafkaFetch cmd_kafka_fetch = 33; + CmdKafkaListOffset cmd_kafka_list_offset = 34; + }; +} + +/** + * The response body of the service when an error occurs, + * containing the error code and the error message. + */ +message ErrorResp { + int32 code = 1; + string message = 2; +} + +/** + * Pong response, the state field will pass through the + * value in the Ping command field. + */ +message PongResp { + bytes state = 1; +} + +/** + * The definition of a message in Kafka with the current message + * offset, production timestamp, Key, and message content. + */ +message KafkaMessage { + int64 offset = 1; + int64 timestamp = 2; + bytes key = 3; + bytes value = 4; +} + +/** + * The response of Fetch messages from Apache Kafka. + */ +message KafkaFetchResp { + repeated KafkaMessage messages = 1; +} + +/** + * The response of list offset from Apache Kafka. + */ +message KafkaListOffsetResp { + int64 offset = 1; +} + +/** + * Server response definition for pubsub scenarios + * + * The sequence field will be the same as the value in the + * request, which is used to associate the associated request + * and response. + * + * The resp field is the response data sent by the server, and + * its type will be chosen from any of the lists in the definition. + */ +message PubSubResp { + int64 sequence = 1; + oneof resp { + ErrorResp error_resp = 31; + PongResp pong_resp = 32; + KafkaFetchResp kafka_fetch_resp = 33; + KafkaListOffsetResp kafka_list_offset_resp = 34; + }; +} diff --git a/CloudronPackages/APISIX/apisix-source/apisix/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/init.lua new file mode 100644 index 0000000..b5ee018 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/init.lua @@ -0,0 +1,1253 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +-- set the JIT options before any code, to prevent error "changing jit stack size is not +-- allowed when some regexs have already been compiled and cached" +if require("ffi").os == "Linux" then + require("ngx.re").opt("jit_stack_size", 200 * 1024) +end + +require("jit.opt").start("minstitch=2", "maxtrace=4000", + "maxrecord=8000", "sizemcode=64", + "maxmcode=4000", "maxirconst=1000") + +require("apisix.patch").patch() +local core = require("apisix.core") +local plugin = require("apisix.plugin") +local plugin_config = require("apisix.plugin_config") +local consumer_group = require("apisix.consumer_group") +local script = require("apisix.script") +local service_fetch = require("apisix.http.service").get +local admin_init = require("apisix.admin.init") +local get_var = require("resty.ngxvar").fetch +local router = require("apisix.router") +local apisix_upstream = require("apisix.upstream") +local apisix_secret = require("apisix.secret") +local set_upstream = apisix_upstream.set_by_route +local apisix_ssl = require("apisix.ssl") +local apisix_global_rules = require("apisix.global_rules") +local upstream_util = require("apisix.utils.upstream") +local xrpc = require("apisix.stream.xrpc") +local ctxdump = require("resty.ctxdump") +local debug = require("apisix.debug") +local pubsub_kafka = require("apisix.pubsub.kafka") +local ngx = ngx +local get_method = ngx.req.get_method +local ngx_exit = ngx.exit +local math = math +local ipairs = ipairs +local ngx_now = ngx.now +local ngx_var = ngx.var +local re_split = require("ngx.re").split +local str_byte = string.byte +local str_sub = string.sub +local tonumber = tonumber +local type = type +local pairs = pairs +local tostring = tostring +local ngx_re_match = ngx.re.match +local control_api_router + +local is_http = false +if ngx.config.subsystem == "http" then + is_http = true + control_api_router = require("apisix.control.router") +end + +local ok, apisix_base_flags = pcall(require, "resty.apisix.patch") +if not ok then + apisix_base_flags = {} +end + +local load_balancer +local local_conf +local ver_header = "APISIX/" .. core.version.VERSION + +local has_mod, apisix_ngx_client = pcall(require, "resty.apisix.client") + +local _M = {version = 0.4} + + +function _M.http_init(args) + core.resolver.init_resolver(args) + core.id.init() + core.env.init() + + local process = require("ngx.process") + local ok, err = process.enable_privileged_agent() + if not ok then + core.log.error("failed to enable privileged_agent: ", err) + end + + if core.config.init then + local ok, err = core.config.init() + if not ok then + core.log.error("failed to load the configuration: ", err) + end + end + + xrpc.init() +end + + +function _M.http_init_worker() + local seed, err = core.utils.get_seed_from_urandom() + if not seed then + core.log.warn('failed to get seed from urandom: ', err) + seed = ngx_now() * 1000 + ngx.worker.pid() + end + math.randomseed(seed) + -- for testing only + core.log.info("random test in [1, 10000]: ", math.random(1, 10000)) + + require("apisix.events").init_worker() + + local discovery = require("apisix.discovery.init").discovery + if discovery and discovery.init_worker then + discovery.init_worker() + end + require("apisix.balancer").init_worker() + load_balancer = require("apisix.balancer") + require("apisix.admin.init").init_worker() + + require("apisix.timers").init_worker() + + require("apisix.debug").init_worker() + + if core.config.init_worker then + local ok, err = core.config.init_worker() + if not ok then + core.log.error("failed to init worker process of ", core.config.type, + " config center, err: ", err) + end + end + + plugin.init_worker() + router.http_init_worker() + require("apisix.http.service").init_worker() + plugin_config.init_worker() + require("apisix.consumer").init_worker() + consumer_group.init_worker() + apisix_secret.init_worker() + + apisix_global_rules.init_worker() + + apisix_upstream.init_worker() + require("apisix.plugins.ext-plugin.init").init_worker() + + control_api_router.init_worker() + local_conf = core.config.local_conf() + + if local_conf.apisix and local_conf.apisix.enable_server_tokens == false then + ver_header = "APISIX" + end +end + + +function _M.http_exit_worker() + -- TODO: we can support stream plugin later - currently there is not `destroy` method + -- in stream plugins + plugin.exit_worker() + require("apisix.plugins.ext-plugin.init").exit_worker() +end + + +function _M.ssl_phase() + local ok, err = router.router_ssl.set(ngx.ctx.matched_ssl) + if not ok then + if err then + core.log.error("failed to fetch ssl config: ", err) + end + ngx_exit(-1) + end +end + + +function _M.ssl_client_hello_phase() + local sni, err = apisix_ssl.server_name(true) + if not sni or type(sni) ~= "string" then + local advise = "please check if the client requests via IP or uses an outdated " .. + "protocol. If you need to report an issue, " .. + "provide a packet capture file of the TLS handshake." + core.log.error("failed to find SNI: " .. (err or advise)) + ngx_exit(-1) + end + local tls_ext_status_req = apisix_ssl.get_status_request_ext() + + local ngx_ctx = ngx.ctx + local api_ctx = core.tablepool.fetch("api_ctx", 0, 32) + ngx_ctx.api_ctx = api_ctx + + local ok, err = router.router_ssl.match_and_set(api_ctx, true, sni) + + ngx_ctx.matched_ssl = api_ctx.matched_ssl + core.tablepool.release("api_ctx", api_ctx) + ngx_ctx.api_ctx = nil + ngx_ctx.tls_ext_status_req = tls_ext_status_req + + if not ok then + if err then + core.log.error("failed to fetch ssl config: ", err) + end + core.log.error("failed to match any SSL certificate by SNI: ", sni) + ngx_exit(-1) + end + + ok, err = apisix_ssl.set_protocols_by_clienthello(ngx_ctx.matched_ssl.value.ssl_protocols) + if not ok then + core.log.error("failed to set ssl protocols: ", err) + ngx_exit(-1) + end + + -- in stream subsystem, ngx.ssl.server_name() return hostname of ssl session in preread phase, + -- so that we can't get real SNI without recording it in ngx.ctx during client_hello phase + ngx.ctx.client_hello_sni = sni +end + + +local function stash_ngx_ctx() + local ref = ctxdump.stash_ngx_ctx() + core.log.info("stash ngx ctx: ", ref) + ngx_var.ctx_ref = ref +end + + +local function fetch_ctx() + local ref = ngx_var.ctx_ref + core.log.info("fetch ngx ctx: ", ref) + local ctx = ctxdump.apply_ngx_ctx(ref) + ngx_var.ctx_ref = '' + return ctx +end + + +local function parse_domain_in_route(route) + local nodes = route.value.upstream.nodes + local new_nodes, err = upstream_util.parse_domain_for_nodes(nodes) + if not new_nodes then + return nil, err + end + + local up_conf = route.dns_value and route.dns_value.upstream + local ok = upstream_util.compare_upstream_node(up_conf, new_nodes) + if ok then + return route + end + + -- don't modify the modifiedIndex to avoid plugin cache miss because of DNS resolve result + -- has changed + + route.dns_value = core.table.deepcopy(route.value, { shallows = { "self.upstream.parent"}}) + route.dns_value.upstream.nodes = new_nodes + core.log.info("parse route which contain domain: ", + core.json.delay_encode(route, true)) + return route +end + + +local function set_upstream_host(api_ctx, picked_server) + local up_conf = api_ctx.upstream_conf + if up_conf.pass_host then + api_ctx.pass_host = up_conf.pass_host + api_ctx.upstream_host = up_conf.upstream_host + end + + local pass_host = api_ctx.pass_host or "pass" + if pass_host == "pass" then + return + end + + if pass_host == "rewrite" then + api_ctx.var.upstream_host = api_ctx.upstream_host + return + end + + api_ctx.var.upstream_host = picked_server.upstream_host +end + + +local function set_upstream_headers(api_ctx, picked_server) + set_upstream_host(api_ctx, picked_server) + + local proto = api_ctx.var.http_x_forwarded_proto + if proto then + api_ctx.var.var_x_forwarded_proto = proto + end + + local x_forwarded_host = api_ctx.var.http_x_forwarded_host + if x_forwarded_host then + api_ctx.var.var_x_forwarded_host = x_forwarded_host + end + + local port = api_ctx.var.http_x_forwarded_port + if port then + api_ctx.var.var_x_forwarded_port = port + end +end + + +-- verify the TLS session resumption by checking if the SNI in the client hello +-- matches the hostname of the SSL session, this is to prevent the mTLS bypass security issue. +local function verify_tls_session_resumption() + local session_hostname, err = apisix_ssl.session_hostname() + if err then + core.log.error("failed to get session hostname: ", err) + return false + end + if session_hostname and session_hostname ~= ngx.ctx.client_hello_sni then + core.log.error("sni in client hello mismatch hostname of ssl session, ", + "sni: ", ngx.ctx.client_hello_sni, ", hostname: ", session_hostname) + return false + end + + return true +end + + +local function verify_tls_client(ctx) + local matched = router.router_ssl.match_and_set(ctx, true) + if not matched then + return true + end + + local matched_ssl = ctx.matched_ssl + if matched_ssl.value.client and apisix_ssl.support_client_verification() then + local res = ngx_var.ssl_client_verify + if res ~= "SUCCESS" then + if res == "NONE" then + core.log.error("client certificate was not present") + else + core.log.error("client certificate verification is not passed: ", res) + end + + return false + end + + if not verify_tls_session_resumption() then + return false + end + end + + return true +end + + +local function uri_matches_skip_mtls_route_patterns(ssl, uri) + for _, pat in ipairs(ssl.value.client.skip_mtls_uri_regex) do + if ngx_re_match(uri, pat, "jo") then + return true + end + end +end + + +local function verify_https_client(ctx) + local scheme = ctx.var.scheme + if scheme ~= "https" then + return true + end + + local matched_ssl = ngx.ctx.matched_ssl + if matched_ssl.value.client + and matched_ssl.value.client.skip_mtls_uri_regex + and apisix_ssl.support_client_verification() + and (not uri_matches_skip_mtls_route_patterns(matched_ssl, ngx.var.uri)) then + local res = ctx.var.ssl_client_verify + if res ~= "SUCCESS" then + if res == "NONE" then + core.log.error("client certificate was not present") + else + core.log.error("client certificate verification is not passed: ", res) + end + + return false + end + end + + local host = ctx.var.host + local matched = router.router_ssl.match_and_set(ctx, true, host) + if not matched then + return true + end + + local matched_ssl = ctx.matched_ssl + if matched_ssl.value.client and apisix_ssl.support_client_verification() then + local verified = apisix_base_flags.client_cert_verified_in_handshake + if not verified then + -- vanilla OpenResty requires to check the verification result + local res = ctx.var.ssl_client_verify + if res ~= "SUCCESS" then + if res == "NONE" then + core.log.error("client certificate was not present") + else + core.log.error("client certificate verification is not passed: ", res) + end + + return false + end + end + + local sni = apisix_ssl.server_name() + if sni ~= host then + -- There is a case that the user configures a SSL object with `*.domain`, + -- and the client accesses with SNI `a.domain` but uses Host `b.domain`. + -- This case is complex and we choose to restrict the access until there + -- is a stronge demand in real world. + core.log.error("client certificate verified with SNI ", sni, + ", but the host is ", host) + return false + end + + if not verify_tls_session_resumption() then + return false + end + end + + return true +end + + +local function normalize_uri_like_servlet(uri) + local found = core.string.find(uri, ';') + if not found then + return uri + end + + local segs, err = re_split(uri, "/", "jo") + if not segs then + return nil, err + end + + local len = #segs + for i = 1, len do + local seg = segs[i] + local pos = core.string.find(seg, ';') + if pos then + seg = seg:sub(1, pos - 1) + -- reject bad uri which bypasses with ';' + if seg == "." or seg == ".." then + return nil, "dot segment with parameter" + end + if seg == "" and i < len then + return nil, "empty segment with parameters" + end + + segs[i] = seg + + seg = seg:lower() + if seg == "%2e" or seg == "%2e%2e" then + return nil, "encoded dot segment" + end + end + end + + return core.table.concat(segs, '/') +end + + +local function common_phase(phase_name) + local api_ctx = ngx.ctx.api_ctx + if not api_ctx then + return + end + + plugin.run_global_rules(api_ctx, api_ctx.global_rules, phase_name) + + if api_ctx.script_obj then + script.run(phase_name, api_ctx) + return api_ctx, true + end + + return plugin.run_plugin(phase_name, nil, api_ctx) +end + + + +function _M.handle_upstream(api_ctx, route, enable_websocket) + -- some plugins(ai-proxy...) request upstream by http client directly + if api_ctx.bypass_nginx_upstream then + common_phase("before_proxy") + return + end + + local up_id = route.value.upstream_id + + -- used for the traffic-split plugin + if api_ctx.upstream_id then + up_id = api_ctx.upstream_id + end + + if up_id then + local upstream = apisix_upstream.get_by_id(up_id) + if not upstream then + if is_http then + return core.response.exit(502) + end + + return ngx_exit(1) + end + + api_ctx.matched_upstream = upstream + + else + if route.has_domain then + local err + route, err = parse_domain_in_route(route) + if err then + core.log.error("failed to get resolved route: ", err) + return core.response.exit(500) + end + + api_ctx.conf_version = route.modifiedIndex + api_ctx.matched_route = route + end + + local route_val = route.value + + api_ctx.matched_upstream = (route.dns_value and + route.dns_value.upstream) + or route_val.upstream + end + + if api_ctx.matched_upstream and api_ctx.matched_upstream.tls and + api_ctx.matched_upstream.tls.client_cert_id then + + local cert_id = api_ctx.matched_upstream.tls.client_cert_id + local upstream_ssl = router.router_ssl.get_by_id(cert_id) + if not upstream_ssl or upstream_ssl.type ~= "client" then + local err = upstream_ssl and + "ssl type should be 'client'" or + "ssl id [" .. cert_id .. "] not exits" + core.log.error("failed to get ssl cert: ", err) + + if is_http then + return core.response.exit(502) + end + + return ngx_exit(1) + end + + core.log.info("matched ssl: ", + core.json.delay_encode(upstream_ssl, true)) + api_ctx.upstream_ssl = upstream_ssl + end + + if enable_websocket then + api_ctx.var.upstream_upgrade = api_ctx.var.http_upgrade + api_ctx.var.upstream_connection = api_ctx.var.http_connection + core.log.info("enabled websocket for route: ", route.value.id) + end + + -- load balancer is not required by kafka upstream, so the upstream + -- node selection process is intercepted and left to kafka to + -- handle on its own + if api_ctx.matched_upstream and api_ctx.matched_upstream.scheme == "kafka" then + return pubsub_kafka.access(api_ctx) + end + + local code, err = set_upstream(route, api_ctx) + if code then + core.log.error("failed to set upstream: ", err) + core.response.exit(code) + end + + local server, err = load_balancer.pick_server(route, api_ctx) + if not server then + core.log.error("failed to pick server: ", err) + return core.response.exit(502) + end + + api_ctx.picked_server = server + + set_upstream_headers(api_ctx, server) + + -- run the before_proxy method in access phase first to avoid always reinit request + common_phase("before_proxy") + + local up_scheme = api_ctx.upstream_scheme + if up_scheme == "grpcs" or up_scheme == "grpc" then + stash_ngx_ctx() + return ngx.exec("@grpc_pass") + end + + if api_ctx.dubbo_proxy_enabled then + stash_ngx_ctx() + return ngx.exec("@dubbo_pass") + end +end + + +function _M.http_access_phase() + -- from HTTP/3 to HTTP/1.1 we need to convert :authority pesudo-header + -- to Host header, so we set upstream_host variable here. + if ngx.req.http_version() == 3 then + ngx.var.upstream_host = ngx.var.host .. ":" .. ngx.var.server_port + end + local ngx_ctx = ngx.ctx + + -- always fetch table from the table pool, we don't need a reused api_ctx + local api_ctx = core.tablepool.fetch("api_ctx", 0, 32) + ngx_ctx.api_ctx = api_ctx + + core.ctx.set_vars_meta(api_ctx) + + if not verify_https_client(api_ctx) then + return core.response.exit(400) + end + + debug.dynamic_debug(api_ctx) + + local uri = api_ctx.var.uri + if local_conf.apisix then + if local_conf.apisix.delete_uri_tail_slash then + if str_byte(uri, #uri) == str_byte("/") then + api_ctx.var.uri = str_sub(api_ctx.var.uri, 1, #uri - 1) + core.log.info("remove the end of uri '/', current uri: ", api_ctx.var.uri) + end + end + + if local_conf.apisix.normalize_uri_like_servlet then + local new_uri, err = normalize_uri_like_servlet(uri) + if not new_uri then + core.log.error("failed to normalize: ", err) + return core.response.exit(400) + end + + api_ctx.var.uri = new_uri + -- forward the original uri so the servlet upstream + -- can consume the param after ';' + api_ctx.var.upstream_uri = uri + end + end + + -- To prevent being hacked by untrusted request_uri, here we + -- record the normalized but not rewritten uri as request_uri, + -- the original request_uri can be accessed via var.real_request_uri + api_ctx.var.real_request_uri = api_ctx.var.request_uri + api_ctx.var.request_uri = api_ctx.var.uri .. api_ctx.var.is_args .. (api_ctx.var.args or "") + + router.router_http.match(api_ctx) + + local route = api_ctx.matched_route + if not route then + -- run global rule when there is no matching route + local global_rules = apisix_global_rules.global_rules() + plugin.run_global_rules(api_ctx, global_rules, nil) + + core.log.info("not find any matched route") + return core.response.exit(404, + {error_msg = "404 Route Not Found"}) + end + + core.log.info("matched route: ", + core.json.delay_encode(api_ctx.matched_route, true)) + + local enable_websocket = route.value.enable_websocket + + if route.value.plugin_config_id then + local conf = plugin_config.get(route.value.plugin_config_id) + if not conf then + core.log.error("failed to fetch plugin config by ", + "id: ", route.value.plugin_config_id) + return core.response.exit(503) + end + + route = plugin_config.merge(route, conf) + end + + if route.value.service_id then + local service = service_fetch(route.value.service_id) + if not service then + core.log.error("failed to fetch service configuration by ", + "id: ", route.value.service_id) + return core.response.exit(404) + end + + route = plugin.merge_service_route(service, route) + api_ctx.matched_route = route + api_ctx.conf_type = "route&service" + api_ctx.conf_version = route.modifiedIndex .. "&" .. service.modifiedIndex + api_ctx.conf_id = route.value.id .. "&" .. service.value.id + api_ctx.service_id = service.value.id + api_ctx.service_name = service.value.name + + if enable_websocket == nil then + enable_websocket = service.value.enable_websocket + end + + else + api_ctx.conf_type = "route" + api_ctx.conf_version = route.modifiedIndex + api_ctx.conf_id = route.value.id + end + api_ctx.route_id = route.value.id + api_ctx.route_name = route.value.name + + -- run global rule + local global_rules = apisix_global_rules.global_rules() + plugin.run_global_rules(api_ctx, global_rules, nil) + + if route.value.script then + script.load(route, api_ctx) + script.run("access", api_ctx) + + else + local plugins = plugin.filter(api_ctx, route) + api_ctx.plugins = plugins + + plugin.run_plugin("rewrite", plugins, api_ctx) + if api_ctx.consumer then + local changed + local group_conf + + if api_ctx.consumer.group_id then + group_conf = consumer_group.get(api_ctx.consumer.group_id) + if not group_conf then + core.log.error("failed to fetch consumer group config by ", + "id: ", api_ctx.consumer.group_id) + return core.response.exit(503) + end + end + + route, changed = plugin.merge_consumer_route( + route, + api_ctx.consumer, + group_conf, + api_ctx + ) + + core.log.info("find consumer ", api_ctx.consumer.username, + ", config changed: ", changed) + + if changed then + api_ctx.matched_route = route + core.table.clear(api_ctx.plugins) + local phase = "rewrite_in_consumer" + api_ctx.plugins = plugin.filter(api_ctx, route, api_ctx.plugins, nil, phase) + -- rerun rewrite phase for newly added plugins in consumer + plugin.run_plugin(phase, api_ctx.plugins, api_ctx) + end + end + plugin.run_plugin("access", plugins, api_ctx) + end + + _M.handle_upstream(api_ctx, route, enable_websocket) +end + + +function _M.dubbo_access_phase() + ngx.ctx = fetch_ctx() +end + + +function _M.grpc_access_phase() + ngx.ctx = fetch_ctx() + + local api_ctx = ngx.ctx.api_ctx + if not api_ctx then + return + end + + local code, err = apisix_upstream.set_grpcs_upstream_param(api_ctx) + if code then + core.log.error("failed to set grpcs upstream param: ", err) + core.response.exit(code) + end + + if api_ctx.enable_mirror == true and has_mod then + apisix_ngx_client.enable_mirror() + end +end + + +local function set_resp_upstream_status(up_status) + local_conf = core.config.local_conf() + + if local_conf.apisix and local_conf.apisix.show_upstream_status_in_response_header then + core.response.set_header("X-APISIX-Upstream-Status", up_status) + elseif #up_status == 3 then + if tonumber(up_status) >= 500 and tonumber(up_status) <= 599 then + core.response.set_header("X-APISIX-Upstream-Status", up_status) + end + elseif #up_status > 3 then + -- the up_status can be "502, 502" or "502, 502 : " + local last_status + if str_byte(up_status, -1) == str_byte(" ") then + last_status = str_sub(up_status, -6, -3) + else + last_status = str_sub(up_status, -3) + end + + if tonumber(last_status) >= 500 and tonumber(last_status) <= 599 then + core.response.set_header("X-APISIX-Upstream-Status", up_status) + end + end +end + + +function _M.http_header_filter_phase() + core.response.set_header("Server", ver_header) + + local up_status = get_var("upstream_status") + if up_status then + set_resp_upstream_status(up_status) + end + + common_phase("header_filter") + + local api_ctx = ngx.ctx.api_ctx + if not api_ctx then + return + end + + local debug_headers = api_ctx.debug_headers + if debug_headers then + local deduplicate = core.table.new(core.table.nkeys(debug_headers), 0) + for k, v in pairs(debug_headers) do + core.table.insert(deduplicate, k) + end + core.response.set_header("Apisix-Plugins", core.table.concat(deduplicate, ", ")) + end +end + + +function _M.http_body_filter_phase() + common_phase("body_filter") + common_phase("delayed_body_filter") +end + + +local function healthcheck_passive(api_ctx) + local checker = api_ctx.up_checker + if not checker then + return + end + + local up_conf = api_ctx.upstream_conf + local passive = up_conf.checks.passive + if not passive then + return + end + + core.log.info("enabled healthcheck passive") + local host = up_conf.checks and up_conf.checks.active + and up_conf.checks.active.host + local port = up_conf.checks and up_conf.checks.active + and up_conf.checks.active.port or api_ctx.balancer_port + + local resp_status = ngx.status + + if not is_http then + -- 200 is the only success status code for TCP + if resp_status ~= 200 then + checker:report_tcp_failure(api_ctx.balancer_ip, port, host, nil, "passive") + end + return + end + + local http_statuses = passive and passive.healthy and + passive.healthy.http_statuses + core.log.info("passive.healthy.http_statuses: ", + core.json.delay_encode(http_statuses)) + if http_statuses then + for i, status in ipairs(http_statuses) do + if resp_status == status then + checker:report_http_status(api_ctx.balancer_ip, + port, + host, + resp_status) + end + end + end + + http_statuses = passive and passive.unhealthy and + passive.unhealthy.http_statuses + core.log.info("passive.unhealthy.http_statuses: ", + core.json.delay_encode(http_statuses)) + if not http_statuses then + return + end + + for i, status in ipairs(http_statuses) do + if resp_status == status then + checker:report_http_status(api_ctx.balancer_ip, + port, + host, + resp_status) + end + end +end + + +function _M.status() + core.response.exit(200, core.json.encode({ status = "ok" })) +end + +function _M.status_ready() + local local_conf = core.config.local_conf() + local role = core.table.try_read_attr(local_conf, "deployment", "role") + local provider = core.table.try_read_attr(local_conf, "deployment", "role_" .. + role, "config_provider") + if provider == "yaml" or provider == "etcd" then + local status_shdict = ngx.shared["status-report"] + local ids = status_shdict:get_keys() + local error + local worker_count = ngx.worker.count() + if #ids ~= worker_count then + core.log.warn("worker count: ", worker_count, " but status report count: ", #ids) + error = "worker count: " .. ngx.worker.count() .. + " but status report count: " .. #ids + end + if error then + core.response.exit(503, core.json.encode({ + status = "error", + error = error + })) + return + end + for _, id in ipairs(ids) do + local ready = status_shdict:get(id) + if not ready then + core.log.warn("worker id: ", id, " has not received configuration") + error = "worker id: " .. id .. + " has not received configuration" + break + end + end + + if error then + core.response.exit(503, core.json.encode({ + status = "error", + error = error + })) + return + end + + core.response.exit(200, core.json.encode({ status = "ok" })) + return + end + + core.response.exit(503, core.json.encode({ + status = "error", + message = "unknown config provider: " .. tostring(provider) + }), { ["Content-Type"] = "application/json" }) +end + + +function _M.http_log_phase() + local api_ctx = common_phase("log") + if not api_ctx then + return + end + + healthcheck_passive(api_ctx) + + if api_ctx.server_picker and api_ctx.server_picker.after_balance then + api_ctx.server_picker.after_balance(api_ctx, false) + end + + core.ctx.release_vars(api_ctx) + if api_ctx.plugins then + core.tablepool.release("plugins", api_ctx.plugins) + end + + if api_ctx.curr_req_matched then + core.tablepool.release("matched_route_record", api_ctx.curr_req_matched) + end + + core.tablepool.release("api_ctx", api_ctx) +end + + +function _M.http_balancer_phase() + local api_ctx = ngx.ctx.api_ctx + if not api_ctx then + core.log.error("invalid api_ctx") + return core.response.exit(500) + end + + load_balancer.run(api_ctx.matched_route, api_ctx, common_phase) +end + + +local function cors_admin() + local_conf = core.config.local_conf() + if not core.table.try_read_attr(local_conf, "deployment", "admin", "enable_admin_cors") then + return + end + + local method = get_method() + if method == "OPTIONS" then + core.response.set_header("Access-Control-Allow-Origin", "*", + "Access-Control-Allow-Methods", + "POST, GET, PUT, OPTIONS, DELETE, PATCH", + "Access-Control-Max-Age", "3600", + "Access-Control-Allow-Headers", "*", + "Access-Control-Allow-Credentials", "true", + "Content-Length", "0", + "Content-Type", "text/plain") + ngx_exit(200) + end + + core.response.set_header("Access-Control-Allow-Origin", "*", + "Access-Control-Allow-Credentials", "true", + "Access-Control-Expose-Headers", "*", + "Access-Control-Max-Age", "3600") +end + +local function add_content_type() + core.response.set_header("Content-Type", "application/json") +end + +do + local router + +function _M.http_admin() + if not router then + router = admin_init.get() + end + + core.response.set_header("Server", ver_header) + -- add cors rsp header + cors_admin() + + -- add content type to rsp header + add_content_type() + + -- core.log.info("uri: ", get_var("uri"), " method: ", get_method()) + local ok = router:dispatch(get_var("uri"), {method = get_method()}) + if not ok then + ngx_exit(404) + end +end + +end -- do + + +function _M.http_control() + local ok = control_api_router.match(get_var("uri")) + if not ok then + ngx_exit(404) + end +end + + +function _M.stream_init(args) + core.log.info("enter stream_init") + + core.resolver.init_resolver(args) + + if core.config.init then + local ok, err = core.config.init() + if not ok then + core.log.error("failed to load the configuration: ", err) + end + end + + xrpc.init() +end + + +function _M.stream_init_worker() + core.log.info("enter stream_init_worker") + local seed, err = core.utils.get_seed_from_urandom() + if not seed then + core.log.warn('failed to get seed from urandom: ', err) + seed = ngx_now() * 1000 + ngx.worker.pid() + end + math.randomseed(seed) + -- for testing only + core.log.info("random stream test in [1, 10000]: ", math.random(1, 10000)) + + if core.config.init_worker then + local ok, err = core.config.init_worker() + if not ok then + core.log.error("failed to init worker process of ", core.config.type, + " config center, err: ", err) + end + end + + plugin.init_worker() + xrpc.init_worker() + router.stream_init_worker() + require("apisix.http.service").init_worker() + apisix_upstream.init_worker() + + require("apisix.events").init_worker() + + local discovery = require("apisix.discovery.init").discovery + if discovery and discovery.init_worker then + discovery.init_worker() + end + + load_balancer = require("apisix.balancer") + + local_conf = core.config.local_conf() +end + + +function _M.stream_preread_phase() + local ngx_ctx = ngx.ctx + local api_ctx = core.tablepool.fetch("api_ctx", 0, 32) + ngx_ctx.api_ctx = api_ctx + + if not verify_tls_client(api_ctx) then + return ngx_exit(1) + end + + core.ctx.set_vars_meta(api_ctx) + + local ok, err = router.router_stream.match(api_ctx) + if not ok then + core.log.error(err) + return ngx_exit(1) + end + + core.log.info("matched route: ", + core.json.delay_encode(api_ctx.matched_route, true)) + + local matched_route = api_ctx.matched_route + if not matched_route then + return ngx_exit(1) + end + + + local up_id = matched_route.value.upstream_id + if up_id then + local upstream = apisix_upstream.get_by_id(up_id) + if not upstream then + if is_http then + return core.response.exit(502) + end + + return ngx_exit(1) + end + + api_ctx.matched_upstream = upstream + + elseif matched_route.value.service_id then + local service = service_fetch(matched_route.value.service_id) + if not service then + core.log.error("failed to fetch service configuration by ", + "id: ", matched_route.value.service_id) + return core.response.exit(404) + end + + matched_route = plugin.merge_service_stream_route(service, matched_route) + api_ctx.matched_route = matched_route + api_ctx.conf_type = "stream_route&service" + api_ctx.conf_version = matched_route.modifiedIndex .. "&" .. service.modifiedIndex + api_ctx.conf_id = matched_route.value.id .. "&" .. service.value.id + api_ctx.service_id = service.value.id + api_ctx.service_name = service.value.name + api_ctx.matched_upstream = matched_route.value.upstream + if matched_route.value.upstream_id and not matched_route.value.upstream then + local upstream = apisix_upstream.get_by_id(matched_route.value.upstream_id) + if not upstream then + if is_http then + return core.response.exit(502) + end + + return ngx_exit(1) + end + + api_ctx.matched_upstream = upstream + end + else + if matched_route.has_domain then + local err + matched_route, err = parse_domain_in_route(matched_route) + if err then + core.log.error("failed to get resolved route: ", err) + return ngx_exit(1) + end + + api_ctx.matched_route = matched_route + end + + local route_val = matched_route.value + api_ctx.matched_upstream = (matched_route.dns_value and + matched_route.dns_value.upstream) + or route_val.upstream + end + + local plugins = core.tablepool.fetch("plugins", 32, 0) + api_ctx.plugins = plugin.stream_filter(matched_route, plugins) + -- core.log.info("valid plugins: ", core.json.delay_encode(plugins, true)) + + api_ctx.conf_type = "stream/route" + api_ctx.conf_version = matched_route.modifiedIndex + api_ctx.conf_id = matched_route.value.id + + plugin.run_plugin("preread", plugins, api_ctx) + + if matched_route.value.protocol then + xrpc.run_protocol(matched_route.value.protocol, api_ctx) + return + end + + local code, err = set_upstream(matched_route, api_ctx) + if code then + core.log.error("failed to set upstream: ", err) + return ngx_exit(1) + end + + local server, err = load_balancer.pick_server(matched_route, api_ctx) + if not server then + core.log.error("failed to pick server: ", err) + return ngx_exit(1) + end + + api_ctx.picked_server = server + + -- run the before_proxy method in preread phase first to avoid always reinit request + common_phase("before_proxy") +end + + +function _M.stream_balancer_phase() + core.log.info("enter stream_balancer_phase") + local api_ctx = ngx.ctx.api_ctx + if not api_ctx then + core.log.error("invalid api_ctx") + return ngx_exit(1) + end + + load_balancer.run(api_ctx.matched_route, api_ctx, common_phase) +end + + +function _M.stream_log_phase() + core.log.info("enter stream_log_phase") + + local api_ctx = plugin.run_plugin("log") + if not api_ctx then + return + end + + healthcheck_passive(api_ctx) + + core.ctx.release_vars(api_ctx) + if api_ctx.plugins then + core.tablepool.release("plugins", api_ctx.plugins) + end + + core.tablepool.release("api_ctx", api_ctx) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/inspect/dbg.lua b/CloudronPackages/APISIX/apisix-source/apisix/inspect/dbg.lua new file mode 100644 index 0000000..2fd7878 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/inspect/dbg.lua @@ -0,0 +1,163 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local string_format = string.format +local debug = debug +local ipairs = ipairs +local pcall = pcall +local table_insert = table.insert +local jit = jit + +local _M = {} + +local hooks = {} + +function _M.getname(n) + if n.what == "C" then + return n.name + end + local lc = string_format("%s:%d", n.short_src, n.currentline) + if n.what ~= "main" and n.namewhat ~= "" then + return string_format("%s (%s)", lc, n.name) + else + return lc + end +end + +local function hook(_, arg) + local level = 2 + local finfo = debug.getinfo(level, "nSlf") + local key = finfo.source .. "#" .. arg + + local hooks2 = {} + local removed_hooks = {} + for _, hook in ipairs(hooks) do + if key:sub(-#hook.key) == hook.key then + local filter_func = hook.filter_func + local info = {finfo = finfo, uv = {}, vals = {}} + + -- upvalues + local i = 1 + while true do + local name, value = debug.getupvalue(finfo.func, i) + if name == nil then break end + if name:sub(1, 1) ~= "(" then + info.uv[name] = value + end + i = i + 1 + end + + -- local values + local i = 1 + while true do + local name, value = debug.getlocal(level, i) + if not name then break end + if name:sub(1, 1) ~= "(" then + info.vals[name] = value + end + i = i + 1 + end + + local r1, r2_or_err = pcall(filter_func, info) + if not r1 then + core.log.error("inspect: pcall filter_func:", r2_or_err) + table_insert(removed_hooks, hook) + elseif r2_or_err == false then + -- if filter_func returns false, keep the hook + table_insert(hooks2, hook) + else + table_insert(removed_hooks, hook) + end + else + -- key not match, keep the hook + table_insert(hooks2, hook) + end + end + + for _, hook in ipairs(removed_hooks) do + core.log.warn("inspect: remove hook: ", hook.key) + end + + -- disable debug mode if all hooks done + if #hooks2 ~= #hooks then + hooks = hooks2 + if #hooks == 0 then + core.log.warn("inspect: all hooks removed") + debug.sethook() + if jit then + jit.on() + end + end + end +end + +function _M.set_hook(file, line, func, filter_func) + if file == nil then + file = "=stdin" + end + + local key = file .. "#" .. line + table_insert(hooks, {key = key, filter_func = filter_func}) + + if jit then + jit.flush(func) + jit.off() + end + + debug.sethook(hook, "l") +end + +function _M.unset_hook(file, line) + if file == nil then + file = "=stdin" + end + + local hooks2 = {} + + local key = file .. "#" .. line + for i, hook in ipairs(hooks) do + if hook.key ~= key then + table_insert(hooks2, hook) + end + end + + if #hooks2 ~= #hooks then + hooks = hooks2 + if #hooks == 0 then + debug.sethook() + if jit then + jit.on() + end + end + end +end + +function _M.unset_all() + if #hooks > 0 then + hooks = {} + debug.sethook() + if jit then + jit.on() + end + end +end + +function _M.hooks() + return hooks +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/inspect/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/inspect/init.lua new file mode 100644 index 0000000..7014b61 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/inspect/init.lua @@ -0,0 +1,128 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local dbg = require("apisix.inspect.dbg") +local lfs = require("lfs") +local pl_path = require("pl.path") +local io = io +local table_insert = table.insert +local pcall = pcall +local ipairs = ipairs +local os = os +local ngx = ngx +local loadstring = loadstring +local format = string.format + +local _M = {} + +local last_modified = 0 + +local stop = false + +local running = false + +local last_report_time = 0 + +local REPORT_INTERVAL = 30 -- secs + +local function run_lua_file(file) + local f, err = io.open(file, "rb") + if not f then + return false, err + end + local code, err = f:read("*all") + f:close() + if code == nil then + return false, format("cannot read hooks file: %s", err) + end + local func, err = loadstring(code) + if not func then + return false, err + end + func() + return true +end + +local function setup_hooks(file) + if pl_path.exists(file) then + dbg.unset_all() + local _, err = pcall(run_lua_file, file) + local hooks = {} + for _, hook in ipairs(dbg.hooks()) do + table_insert(hooks, hook.key) + end + core.log.warn("set hooks: err: ", err, ", hooks: ", core.json.delay_encode(hooks)) + end +end + +local function reload_hooks(premature, delay, file) + if premature or stop then + stop = false + running = false + return + end + + local time, err = lfs.attributes(file, 'modification') + if err then + if last_modified ~= 0 then + core.log.info(err, ", disable all hooks") + dbg.unset_all() + last_modified = 0 + end + elseif time ~= last_modified then + setup_hooks(file) + last_modified = time + else + local ts = os.time() + if ts - last_report_time >= REPORT_INTERVAL then + local hooks = {} + for _, hook in ipairs(dbg.hooks()) do + table_insert(hooks, hook.key) + end + core.log.info("alive hooks: ", core.json.encode(hooks)) + last_report_time = ts + end + end + + local ok, err = ngx.timer.at(delay, reload_hooks, delay, file) + if not ok then + core.log.error("failed to create the timer: ", err) + running = false + end +end + +function _M.init(delay, file) + if not running then + file = file or "/usr/local/apisix/plugin_inspect_hooks.lua" + delay = delay or 3 + + setup_hooks(file) + + local ok, err = ngx.timer.at(delay, reload_hooks, delay, file) + if not ok then + core.log.error("failed to create the timer: ", err) + return + end + running = true + end +end + +function _M.destroy() + stop = true +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/patch.lua b/CloudronPackages/APISIX/apisix-source/apisix/patch.lua new file mode 100644 index 0000000..2b191b2 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/patch.lua @@ -0,0 +1,384 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +require("resty.dns.resolver") -- preload dns resolver to prevent recursive patch +local ipmatcher = require("resty.ipmatcher") +local socket = require("socket") +local unix_socket = require("socket.unix") +local ssl = require("ssl") +local ngx = ngx +local get_phase = ngx.get_phase +local ngx_socket = ngx.socket +local original_tcp = ngx.socket.tcp +local original_udp = ngx.socket.udp +local concat_tab = table.concat +local debug = debug +local new_tab = require("table.new") +local log = ngx.log +local WARN = ngx.WARN +local ipairs = ipairs +local select = select +local setmetatable = setmetatable +local string = string +local table = table +local type = type +local tonumber = tonumber + + +local config_local +local _M = {} + + +local function get_local_conf() + if not config_local then + config_local = require("apisix.core.config_local") + end + + return config_local.local_conf() +end + + +local patch_tcp_socket +do + local old_tcp_sock_connect + + local function new_tcp_sock_connect(sock, host, port, opts) + local core_str = require("apisix.core.string") + local resolver = require("apisix.core.resolver") + + if host then + if core_str.has_prefix(host, "unix:") then + if not opts then + -- workaround for https://github.com/openresty/lua-nginx-module/issues/860 + return old_tcp_sock_connect(sock, host) + end + + elseif not ipmatcher.parse_ipv4(host) and not ipmatcher.parse_ipv6(host) then + local err + host, err = resolver.parse_domain(host) + if not host then + return nil, "failed to parse domain: " .. err + end + end + end + + return old_tcp_sock_connect(sock, host, port, opts) + end + + + function patch_tcp_socket(sock) + if not old_tcp_sock_connect then + old_tcp_sock_connect = sock.connect + end + + sock.connect = new_tcp_sock_connect + return sock + end +end + + +do -- `math.randomseed` patch + -- `math.random` generates PRND(pseudo-random numbers) from the seed set by `math.randomseed` + -- Many module libraries use `ngx.time` and `ngx.worker.pid` to generate seeds which may + -- loss randomness in container env (where pids are identical, e.g. root pid is 1) + -- Kubernetes may launch multi instance with deployment RS at the same time, `ngx.time` may + -- get same return in the pods. + -- Therefore, this global patch enforce entire framework to use + -- the best-practice PRND generates. + + local resty_random = require("resty.random") + local math_randomseed = math.randomseed + local seeded = {} + + -- make linter happy + -- luacheck: ignore + math.randomseed = function() + local worker_pid = ngx.worker.pid() + + -- check seed mark + if seeded[worker_pid] then + log(ngx.DEBUG, debug.traceback("Random seed has been inited", 2)) + return + end + + -- generate randomseed + -- chose 6 from APISIX's SIX, 256 ^ 6 should do the trick + -- it shouldn't be large than 16 to prevent overflow. + local random_bytes = resty_random.bytes(6) + local t = {} + + for i = 1, #random_bytes do + t[i] = string.byte(random_bytes, i) + end + + local s = table.concat(t) + + math_randomseed(tonumber(s)) + seeded[worker_pid] = true + end +end -- do + + +local patch_udp_socket +do + local old_udp_sock_setpeername + + local function new_udp_sock_setpeername(sock, host, port) + local core_str = require("apisix.core.string") + local resolver = require("apisix.core.resolver") + + if host then + if core_str.has_prefix(host, "unix:") then + return old_udp_sock_setpeername(sock, host) + end + + if not ipmatcher.parse_ipv4(host) and not ipmatcher.parse_ipv6(host) then + local err + host, err = resolver.parse_domain(host) + if not host then + return nil, "failed to parse domain: " .. err + end + end + end + + return old_udp_sock_setpeername(sock, host, port) + end + + + function patch_udp_socket(sock) + if not old_udp_sock_setpeername then + old_udp_sock_setpeername = sock.setpeername + end + + sock.setpeername = new_udp_sock_setpeername + return sock + end +end + + +local function flatten(args) + local buf = new_tab(#args, 0) + for i, v in ipairs(args) do + local ty = type(v) + if ty == "table" then + buf[i] = flatten(v) + elseif ty == "boolean" then + buf[i] = v and "true" or "false" + elseif ty == "nil" then + buf[i] = "nil" + else + buf[i] = v + end + end + return concat_tab(buf) +end + + +local luasocket_wrapper = { + connect = function (self, host, port) + if not port then + -- unix socket + self.sock = unix_socket() + if self.timeout then + self.sock:settimeout(self.timeout) + end + + local path = host:sub(#("unix:") + 1) + return self.sock:connect(path) + end + + if host:byte(1) == string.byte('[') then + -- ipv6, form as '[::1]', remove '[' and ']' + host = host:sub(2, -2) + self.sock = self.tcp6 + else + self.sock = self.tcp4 + end + + return self.sock:connect(host, port) + end, + + send = function(self, ...) + if select('#', ...) == 1 and type(select(1, ...)) == "string" then + -- fast path + return self.sock:send(...) + end + + -- luasocket's send only accepts a single string + return self.sock:send(flatten({...})) + end, + + getreusedtimes = function () + return 0 + end, + setkeepalive = function (self) + self.sock:close() + return 1 + end, + + settimeout = function (self, time) + if time then + time = time / 1000 + end + + self.timeout = time + + return self.sock:settimeout(time) + end, + settimeouts = function (self, connect_time, read_time, write_time) + connect_time = connect_time or 0 + read_time = read_time or 0 + write_time = write_time or 0 + + -- set the max one as the timeout + local time = connect_time + if time < read_time then + time = read_time + end + if time < write_time then + time = write_time + end + + if time > 0 then + time = time / 1000 + else + time = nil + end + + self.timeout = time + + return self.sock:settimeout(time) + end, + + tlshandshake = function (self, options) + local reused_session = options.reused_session + local server_name = options.server_name + local verify = options.verify + local send_status_req = options.ocsp_status_req + + if reused_session then + log(WARN, "reused_session is not supported yet") + end + + if send_status_req then + log(WARN, "send_status_req is not supported yet") + end + + local params = { + mode = "client", + protocol = "any", + verify = verify and "peer" or "none", + certificate = options.client_cert_path, + key = options.client_priv_key_path, + options = { + "all", + "no_sslv2", + "no_sslv3", + "no_tlsv1" + } + } + + local local_conf, err = get_local_conf() + if not local_conf then + return nil, err + end + + local apisix_ssl = local_conf.apisix.ssl + if apisix_ssl and apisix_ssl.ssl_trusted_certificate then + params.cafile = apisix_ssl.ssl_trusted_certificate + end + + local sec_sock, err = ssl.wrap(self.sock, params) + if not sec_sock then + return false, err + end + + if server_name then + sec_sock:sni(server_name) + end + + local success + success, err = sec_sock:dohandshake() + if not success then + return false, err + end + + self.sock = sec_sock + return true + end, + + sslhandshake = function (self, reused_session, server_name, verify, send_status_req) + return self:tlshandshake({ + reused_session = reused_session, + server_name = server_name, + verify = verify, + ocsp_status_req = send_status_req, + }) + end +} + + +local mt = { + __index = function(self, key) + local sock = self.sock + local fn = luasocket_wrapper[key] + if fn then + self[key] = fn + return fn + end + + local origin = sock[key] + if type(origin) ~= "function" then + return origin + end + + fn = function(_, ...) + return origin(sock, ...) + end + + self[key] = fn + return fn + end +} + +local function luasocket_tcp() + local sock = socket.tcp() + local tcp4 = socket.tcp4() + local tcp6 = socket.tcp6() + return setmetatable({sock = sock, tcp4 = tcp4, tcp6 = tcp6}, mt) +end + + +function _M.patch() + -- make linter happy + -- luacheck: ignore + ngx_socket.tcp = function () + local phase = get_phase() + if phase ~= "init" and phase ~= "init_worker" then + return patch_tcp_socket(original_tcp()) + end + + return luasocket_tcp() + end + + ngx_socket.udp = function () + return patch_udp_socket(original_udp()) + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugin.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugin.lua new file mode 100644 index 0000000..6cb876b --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugin.lua @@ -0,0 +1,1285 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local config_util = require("apisix.core.config_util") +local enable_debug = require("apisix.debug").enable_debug +local wasm = require("apisix.wasm") +local expr = require("resty.expr.v1") +local apisix_ssl = require("apisix.ssl") +local re_split = require("ngx.re").split +local ngx = ngx +local crc32 = ngx.crc32_short +local ngx_exit = ngx.exit +local pkg_loaded = package.loaded +local sort_tab = table.sort +local pcall = pcall +local ipairs = ipairs +local pairs = pairs +local type = type +local local_plugins = core.table.new(32, 0) +local tostring = tostring +local error = error +-- make linter happy to avoid error: getting the Lua global "load" +-- luacheck: globals load, ignore lua_load +local lua_load = load +local is_http = ngx.config.subsystem == "http" +local local_plugins_hash = core.table.new(0, 32) +local stream_local_plugins = core.table.new(32, 0) +local stream_local_plugins_hash = core.table.new(0, 32) + + +local merged_route = core.lrucache.new({ + ttl = 300, count = 512 +}) +local merged_stream_route = core.lrucache.new({ + ttl = 300, count = 512 +}) +local expr_lrucache = core.lrucache.new({ + ttl = 300, count = 512 +}) +local meta_pre_func_load_lrucache = core.lrucache.new({ + ttl = 300, count = 512 +}) +local local_conf +local check_plugin_metadata + +local _M = { + version = 0.3, + + load_times = 0, + plugins = local_plugins, + plugins_hash = local_plugins_hash, + + stream_load_times= 0, + stream_plugins = stream_local_plugins, + stream_plugins_hash = stream_local_plugins_hash, +} + + +local function plugin_attr(name) + -- TODO: get attr from synchronized data + local local_conf = core.config.local_conf() + return core.table.try_read_attr(local_conf, "plugin_attr", name) +end +_M.plugin_attr = plugin_attr + + +local function sort_plugin(l, r) + return l.priority > r.priority +end + +local function custom_sort_plugin(l, r) + return l._meta.priority > r._meta.priority +end + +local function check_disable(plugin_conf) + if not plugin_conf then + return nil + end + + if not plugin_conf._meta then + return nil + end + + if type(plugin_conf._meta) ~= "table" then + return nil + end + + return plugin_conf._meta.disable +end + +local PLUGIN_TYPE_HTTP = 1 +local PLUGIN_TYPE_STREAM = 2 +local PLUGIN_TYPE_HTTP_WASM = 3 +local function unload_plugin(name, plugin_type) + if plugin_type == PLUGIN_TYPE_HTTP_WASM then + return + end + + local pkg_name = "apisix.plugins." .. name + if plugin_type == PLUGIN_TYPE_STREAM then + pkg_name = "apisix.stream.plugins." .. name + end + + local old_plugin = pkg_loaded[pkg_name] + if old_plugin and type(old_plugin.destroy) == "function" then + old_plugin.destroy() + end + + pkg_loaded[pkg_name] = nil +end + + +local function load_plugin(name, plugins_list, plugin_type) + local ok, plugin + if plugin_type == PLUGIN_TYPE_HTTP_WASM then + -- for wasm plugin, we pass the whole attrs instead of name + ok, plugin = wasm.require(name) + name = name.name + else + local pkg_name = "apisix.plugins." .. name + if plugin_type == PLUGIN_TYPE_STREAM then + pkg_name = "apisix.stream.plugins." .. name + end + + ok, plugin = pcall(require, pkg_name) + end + + if not ok then + core.log.error("failed to load plugin [", name, "] err: ", plugin) + return + end + + if not plugin.priority then + core.log.error("invalid plugin [", name, + "], missing field: priority") + return + end + + if not plugin.version then + core.log.error("invalid plugin [", name, "] missing field: version") + return + end + + if type(plugin.schema) ~= "table" then + core.log.error("invalid plugin [", name, "] schema field") + return + end + + if not plugin.schema.properties then + plugin.schema.properties = {} + end + + local properties = plugin.schema.properties + local plugin_injected_schema = core.schema.plugin_injected_schema + + if plugin.schema['$comment'] ~= plugin_injected_schema['$comment'] then + if properties._meta then + core.log.error("invalid plugin [", name, + "]: found forbidden '_meta' field in the schema") + return + end + + properties._meta = plugin_injected_schema._meta + -- new injected fields should be added under `_meta` + -- 1. so we won't break user's code when adding any new injected fields + -- 2. the semantics is clear, especially in the doc and in the caller side + + plugin.schema['$comment'] = plugin_injected_schema['$comment'] + end + + plugin.name = name + plugin.attr = plugin_attr(name) + core.table.insert(plugins_list, plugin) + + if plugin.init then + plugin.init() + end + + if plugin.workflow_handler then + plugin.workflow_handler() + end + + return +end + + +local function load(plugin_names, wasm_plugin_names) + local processed = {} + for _, name in ipairs(plugin_names) do + if processed[name] == nil then + processed[name] = true + end + end + for _, attrs in ipairs(wasm_plugin_names) do + if processed[attrs.name] == nil then + processed[attrs.name] = attrs + end + end + + core.log.warn("new plugins: ", core.json.delay_encode(processed)) + + for name, plugin in pairs(local_plugins_hash) do + local ty = PLUGIN_TYPE_HTTP + if plugin.type == "wasm" then + ty = PLUGIN_TYPE_HTTP_WASM + end + unload_plugin(name, ty) + end + + core.table.clear(local_plugins) + core.table.clear(local_plugins_hash) + + for name, value in pairs(processed) do + local ty = PLUGIN_TYPE_HTTP + if type(value) == "table" then + ty = PLUGIN_TYPE_HTTP_WASM + name = value + end + load_plugin(name, local_plugins, ty) + end + + -- sort by plugin's priority + if #local_plugins > 1 then + sort_tab(local_plugins, sort_plugin) + end + + for i, plugin in ipairs(local_plugins) do + local_plugins_hash[plugin.name] = plugin + if enable_debug() then + core.log.warn("loaded plugin and sort by priority:", + " ", plugin.priority, + " name: ", plugin.name) + end + end + + _M.load_times = _M.load_times + 1 + core.log.info("load plugin times: ", _M.load_times) + return true +end + + +local function load_stream(plugin_names) + local processed = {} + for _, name in ipairs(plugin_names) do + if processed[name] == nil then + processed[name] = true + end + end + + core.log.warn("new plugins: ", core.json.delay_encode(processed)) + + for name in pairs(stream_local_plugins_hash) do + unload_plugin(name, PLUGIN_TYPE_STREAM) + end + + core.table.clear(stream_local_plugins) + core.table.clear(stream_local_plugins_hash) + + for name in pairs(processed) do + load_plugin(name, stream_local_plugins, PLUGIN_TYPE_STREAM) + end + + -- sort by plugin's priority + if #stream_local_plugins > 1 then + sort_tab(stream_local_plugins, sort_plugin) + end + + for i, plugin in ipairs(stream_local_plugins) do + stream_local_plugins_hash[plugin.name] = plugin + if enable_debug() then + core.log.warn("loaded stream plugin and sort by priority:", + " ", plugin.priority, + " name: ", plugin.name) + end + end + + _M.stream_load_times = _M.stream_load_times + 1 + core.log.info("stream plugins: ", + core.json.delay_encode(stream_local_plugins, true)) + core.log.info("load stream plugin times: ", _M.stream_load_times) + return true +end + + +local function get_plugin_names(config) + local http_plugin_names + local stream_plugin_names + + if not config then + -- called during starting or hot reload in admin + local err + local_conf, err = core.config.local_conf(true) + if not local_conf then + -- the error is unrecoverable, so we need to raise it + error("failed to load the configuration file: " .. err) + end + + http_plugin_names = local_conf.plugins + stream_plugin_names = local_conf.stream_plugins + else + -- called during synchronizing plugin data + http_plugin_names = {} + stream_plugin_names = {} + local plugins_conf = config.value + -- plugins_conf can be nil when another instance writes into etcd key "/apisix/plugins/" + if not plugins_conf then + return true + end + + for _, conf in ipairs(plugins_conf) do + if conf.stream then + core.table.insert(stream_plugin_names, conf.name) + else + core.table.insert(http_plugin_names, conf.name) + end + end + end + + return false, http_plugin_names, stream_plugin_names +end + + +function _M.load(config) + local ignored, http_plugin_names, stream_plugin_names = get_plugin_names(config) + if ignored then + return local_plugins + end + + local exporter = require("apisix.plugins.prometheus.exporter") + + if ngx.config.subsystem == "http" then + if not http_plugin_names then + core.log.error("failed to read plugin list from local file") + else + local wasm_plugin_names = {} + if local_conf.wasm then + wasm_plugin_names = local_conf.wasm.plugins + end + + local ok, err = load(http_plugin_names, wasm_plugin_names) + if not ok then + core.log.error("failed to load plugins: ", err) + end + + local enabled = core.table.array_find(http_plugin_names, "prometheus") ~= nil + local active = exporter.get_prometheus() ~= nil + if not enabled then + exporter.destroy() + end + if enabled and not active then + exporter.http_init() + end + end + end + + if not stream_plugin_names then + core.log.warn("failed to read stream plugin list from local file") + else + local ok, err = load_stream(stream_plugin_names) + if not ok then + core.log.error("failed to load stream plugins: ", err) + end + end + + -- for test + return local_plugins +end + + +function _M.exit_worker() + for name, plugin in pairs(local_plugins_hash) do + local ty = PLUGIN_TYPE_HTTP + if plugin.type == "wasm" then + ty = PLUGIN_TYPE_HTTP_WASM + end + unload_plugin(name, ty) + end + + -- we need to load stream plugin so that we can check their schemas in + -- Admin API. Maybe we can avoid calling `load` in this case? So that + -- we don't need to call `destroy` too + for name in pairs(stream_local_plugins_hash) do + unload_plugin(name, PLUGIN_TYPE_STREAM) + end +end + + +local function trace_plugins_info_for_debug(ctx, plugins) + if not enable_debug() then + return + end + + if not plugins then + if is_http and not ngx.headers_sent then + core.response.add_header("Apisix-Plugins", "no plugin") + else + core.log.warn("Apisix-Plugins: no plugin") + end + + return + end + + local t = {} + for i = 1, #plugins, 2 do + core.table.insert(t, plugins[i].name) + end + if is_http and not ngx.headers_sent then + if ctx then + local debug_headers = ctx.debug_headers + if not debug_headers then + debug_headers = core.table.new(0, 5) + end + for i, v in ipairs(t) do + debug_headers[v] = true + end + ctx.debug_headers = debug_headers + end + else + core.log.warn("Apisix-Plugins: ", core.table.concat(t, ", ")) + end +end + + +local function meta_filter(ctx, plugin_name, plugin_conf) + local filter = plugin_conf._meta and plugin_conf._meta.filter + if not filter then + return true + end + + local match_cache_key = + ctx.conf_type .. "#" .. ctx.conf_id .. "#" + .. ctx.conf_version .. "#" .. plugin_name .. "#meta_filter_matched" + if ctx[match_cache_key] ~= nil then + return ctx[match_cache_key] + end + + local ex, ok, err + if ctx then + ex, err = expr_lrucache(plugin_name .. ctx.conf_type .. ctx.conf_id, + ctx.conf_version, expr.new, filter) + else + ex, err = expr.new(filter) + end + if not ex then + core.log.warn("failed to get the 'vars' expression: ", err , + " plugin_name: ", plugin_name) + return true + end + ok, err = ex:eval(ctx.var) + if err then + core.log.warn("failed to run the 'vars' expression: ", err, + " plugin_name: ", plugin_name) + return true + end + + ctx[match_cache_key] = ok + return ok +end + + +function _M.filter(ctx, conf, plugins, route_conf, phase) + local user_plugin_conf = conf.value.plugins + if user_plugin_conf == nil or + core.table.nkeys(user_plugin_conf) == 0 then + trace_plugins_info_for_debug(nil, nil) + -- when 'plugins' is given, always return 'plugins' itself instead + -- of another one + return plugins or core.tablepool.fetch("plugins", 0, 0) + end + + local custom_sort = false + local route_plugin_conf = route_conf and route_conf.value.plugins + plugins = plugins or core.tablepool.fetch("plugins", 32, 0) + for _, plugin_obj in ipairs(local_plugins) do + local name = plugin_obj.name + local plugin_conf = user_plugin_conf[name] + + if type(plugin_conf) ~= "table" then + goto continue + end + + if check_disable(plugin_conf) then + goto continue + end + + if plugin_obj.run_policy == "prefer_route" and route_plugin_conf ~= nil then + local plugin_conf_in_route = route_plugin_conf[name] + local disable_in_route = check_disable(plugin_conf_in_route) + if plugin_conf_in_route and not disable_in_route then + goto continue + end + end + + -- in the rewrite phase, the plugin executes in the following order: + -- 1. execute the rewrite phase of the plugins on route(including the auth plugins) + -- 2. merge plugins from consumer and route + -- 3. execute the rewrite phase of the plugins on consumer(phase: rewrite_in_consumer) + -- in this case, we need to skip the plugins that was already executed(step 1) + if phase == "rewrite_in_consumer" + and (not plugin_conf._from_consumer or plugin_obj.type == "auth") then + plugin_conf._skip_rewrite_in_consumer = true + end + + if plugin_conf._meta and plugin_conf._meta.priority then + custom_sort = true + end + + core.table.insert(plugins, plugin_obj) + core.table.insert(plugins, plugin_conf) + + ::continue:: + end + + trace_plugins_info_for_debug(ctx, plugins) + + if custom_sort then + local tmp_plugin_objs = core.tablepool.fetch("tmp_plugin_objs", 0, #plugins / 2) + local tmp_plugin_confs = core.tablepool.fetch("tmp_plugin_confs", #plugins / 2, 0) + + for i = 1, #plugins, 2 do + local plugin_obj = plugins[i] + local plugin_conf = plugins[i + 1] + + tmp_plugin_objs[plugin_conf] = plugin_obj + core.table.insert(tmp_plugin_confs, plugin_conf) + + if not plugin_conf._meta then + plugin_conf._meta = core.table.new(0, 1) + plugin_conf._meta.priority = plugin_obj.priority + else + if not plugin_conf._meta.priority then + plugin_conf._meta.priority = plugin_obj.priority + end + end + end + + sort_tab(tmp_plugin_confs, custom_sort_plugin) + + local index + for i = 1, #tmp_plugin_confs do + index = i * 2 - 1 + local plugin_conf = tmp_plugin_confs[i] + local plugin_obj = tmp_plugin_objs[plugin_conf] + plugins[index] = plugin_obj + plugins[index + 1] = plugin_conf + end + + core.tablepool.release("tmp_plugin_objs", tmp_plugin_objs) + core.tablepool.release("tmp_plugin_confs", tmp_plugin_confs) + end + + return plugins +end + + +function _M.stream_filter(user_route, plugins) + plugins = plugins or core.table.new(#stream_local_plugins * 2, 0) + local user_plugin_conf = user_route.value.plugins + if user_plugin_conf == nil then + trace_plugins_info_for_debug(nil, nil) + return plugins + end + + for _, plugin_obj in ipairs(stream_local_plugins) do + local name = plugin_obj.name + local plugin_conf = user_plugin_conf[name] + + local disable = check_disable(plugin_conf) + if type(plugin_conf) == "table" and not disable then + core.table.insert(plugins, plugin_obj) + core.table.insert(plugins, plugin_conf) + end + end + + trace_plugins_info_for_debug(nil, plugins) + + return plugins +end + + +local function merge_service_route(service_conf, route_conf) + local new_conf = core.table.deepcopy(service_conf, { shallows = {"self.value.upstream.parent"}}) + new_conf.value.service_id = new_conf.value.id + new_conf.value.id = route_conf.value.id + new_conf.modifiedIndex = route_conf.modifiedIndex + + if route_conf.value.plugins then + for name, conf in pairs(route_conf.value.plugins) do + if not new_conf.value.plugins then + new_conf.value.plugins = {} + end + + new_conf.value.plugins[name] = conf + end + end + + local route_upstream = route_conf.value.upstream + if route_upstream then + new_conf.value.upstream = route_upstream + -- when route's upstream override service's upstream, + -- the upstream.parent still point to the route + new_conf.value.upstream_id = nil + new_conf.has_domain = route_conf.has_domain + end + + if route_conf.value.upstream_id then + new_conf.value.upstream_id = route_conf.value.upstream_id + new_conf.has_domain = route_conf.has_domain + end + + if route_conf.value.script then + new_conf.value.script = route_conf.value.script + end + + if route_conf.value.timeout then + new_conf.value.timeout = route_conf.value.timeout + end + + if route_conf.value.name then + new_conf.value.name = route_conf.value.name + else + new_conf.value.name = nil + end + + if route_conf.value.hosts then + new_conf.value.hosts = route_conf.value.hosts + end + if not new_conf.value.hosts and route_conf.value.host then + new_conf.value.host = route_conf.value.host + end + + if route_conf.value.labels then + new_conf.value.labels = route_conf.value.labels + end + + -- core.log.info("merged conf : ", core.json.delay_encode(new_conf)) + return new_conf +end + + +function _M.merge_service_route(service_conf, route_conf) + core.log.info("service conf: ", core.json.delay_encode(service_conf, true)) + core.log.info(" route conf: ", core.json.delay_encode(route_conf, true)) + + local route_service_key = route_conf.value.id .. "#" + .. route_conf.modifiedIndex .. "#" .. service_conf.modifiedIndex + return merged_route(route_service_key, service_conf, + merge_service_route, + service_conf, route_conf) +end + + +local function merge_service_stream_route(service_conf, route_conf) + -- because many fields in Service are not supported by stream route, + -- so we copy the stream route as base object + local new_conf = core.table.deepcopy(route_conf, { shallows = {"self.value.upstream.parent"}}) + if service_conf.value.plugins then + for name, conf in pairs(service_conf.value.plugins) do + if not new_conf.value.plugins then + new_conf.value.plugins = {} + end + + if not new_conf.value.plugins[name] then + new_conf.value.plugins[name] = conf + end + end + end + + new_conf.value.service_id = nil + + if not new_conf.value.upstream and service_conf.value.upstream then + new_conf.value.upstream = service_conf.value.upstream + end + + if not new_conf.value.upstream_id and service_conf.value.upstream_id then + new_conf.value.upstream_id = service_conf.value.upstream_id + end + + return new_conf +end + + +function _M.merge_service_stream_route(service_conf, route_conf) + core.log.info("service conf: ", core.json.delay_encode(service_conf, true)) + core.log.info(" stream route conf: ", core.json.delay_encode(route_conf, true)) + + local version = route_conf.modifiedIndex .. "#" .. service_conf.modifiedIndex + local route_service_key = route_conf.value.id .. "#" + .. version + return merged_stream_route(route_service_key, version, + merge_service_stream_route, + service_conf, route_conf) +end + + +local function merge_consumer_route(route_conf, consumer_conf, consumer_group_conf) + if not consumer_conf.plugins or + core.table.nkeys(consumer_conf.plugins) == 0 + then + core.log.info("consumer no plugins") + return route_conf + end + + local new_route_conf = core.table.deepcopy(route_conf, + { shallows = {"self.value.upstream.parent"}}) + + if consumer_group_conf then + for name, conf in pairs(consumer_group_conf.value.plugins) do + if not new_route_conf.value.plugins then + new_route_conf.value.plugins = {} + end + + if new_route_conf.value.plugins[name] == nil then + conf._from_consumer = true + end + new_route_conf.value.plugins[name] = conf + end + end + + for name, conf in pairs(consumer_conf.plugins) do + if not new_route_conf.value.plugins then + new_route_conf.value.plugins = {} + end + + if new_route_conf.value.plugins[name] == nil then + conf._from_consumer = true + end + new_route_conf.value.plugins[name] = conf + end + + core.log.info("merged conf : ", core.json.delay_encode(new_route_conf)) + return new_route_conf +end + + +function _M.merge_consumer_route(route_conf, consumer_conf, consumer_group_conf, api_ctx) + core.log.info("route conf: ", core.json.delay_encode(route_conf)) + core.log.info("consumer conf: ", core.json.delay_encode(consumer_conf)) + core.log.info("consumer group conf: ", core.json.delay_encode(consumer_group_conf)) + + local flag = route_conf.value.id .. "#" .. route_conf.modifiedIndex + .. "#" .. consumer_conf.id .. "#" .. consumer_conf.modifiedIndex + + if consumer_group_conf then + flag = flag .. "#" .. consumer_group_conf.value.id + .. "#" .. consumer_group_conf.modifiedIndex + end + + local new_conf = merged_route(flag, api_ctx.conf_version, + merge_consumer_route, route_conf, consumer_conf, consumer_group_conf) + + -- some plugins like limit-count don't care if consumer changes + -- all consumers should share the same counter + api_ctx.conf_type_without_consumer = api_ctx.conf_type + api_ctx.conf_version_without_consumer = api_ctx.conf_version + api_ctx.conf_id_without_consumer = api_ctx.conf_id + + api_ctx.conf_type = api_ctx.conf_type .. "&consumer" + api_ctx.conf_version = api_ctx.conf_version .. "&" .. + api_ctx.consumer_ver + api_ctx.conf_id = api_ctx.conf_id .. "&" .. api_ctx.consumer_name + + if consumer_group_conf then + api_ctx.conf_type = api_ctx.conf_type .. "&consumer_group" + api_ctx.conf_version = api_ctx.conf_version .. "&" .. consumer_group_conf.modifiedIndex + api_ctx.conf_id = api_ctx.conf_id .. "&" .. consumer_group_conf.value.id + end + + return new_conf, new_conf ~= route_conf +end + + +local init_plugins_syncer +do + local plugins_conf + + function init_plugins_syncer() + local err + plugins_conf, err = core.config.new("/plugins", { + automatic = true, + item_schema = core.schema.plugins, + single_item = true, + filter = function(item) + -- we need to pass 'item' instead of plugins_conf because + -- the latter one is nil at the first run + _M.load(item) + end, + }) + if not plugins_conf then + error("failed to create etcd instance for fetching /plugins : " .. err) + end + end +end + + +function _M.init_worker() + local _, http_plugin_names, stream_plugin_names = get_plugin_names() + + -- some plugins need to be initialized in init* phases + if is_http and core.table.array_find(http_plugin_names, "prometheus") then + local prometheus_enabled_in_stream = + core.table.array_find(stream_plugin_names, "prometheus") + require("apisix.plugins.prometheus.exporter").http_init(prometheus_enabled_in_stream) + elseif not is_http and core.table.array_find(stream_plugin_names, "prometheus") then + require("apisix.plugins.prometheus.exporter").stream_init() + end + + -- someone's plugin needs to be initialized after prometheus + -- see https://github.com/apache/apisix/issues/3286 + _M.load() + + if local_conf and not local_conf.apisix.enable_admin then + init_plugins_syncer() + end + + local plugin_metadatas, err = core.config.new("/plugin_metadata", + { + automatic = true, + checker = check_plugin_metadata + } + ) + if not plugin_metadatas then + error("failed to create etcd instance for fetching /plugin_metadatas : " + .. err) + end + + _M.plugin_metadatas = plugin_metadatas +end + + +function _M.plugin_metadata(name) + return _M.plugin_metadatas:get(name) +end + + +function _M.get(name) + return local_plugins_hash and local_plugins_hash[name] +end + + +function _M.get_stream(name) + return stream_local_plugins_hash and stream_local_plugins_hash[name] +end + + +function _M.get_all(attrs) + local http_plugins = {} + local stream_plugins = {} + + if local_plugins_hash then + for name, plugin_obj in pairs(local_plugins_hash) do + http_plugins[name] = core.table.pick(plugin_obj, attrs) + end + end + + if stream_local_plugins_hash then + for name, plugin_obj in pairs(stream_local_plugins_hash) do + stream_plugins[name] = core.table.pick(plugin_obj, attrs) + end + end + + return http_plugins, stream_plugins +end + + +-- conf_version returns a version which only depends on the value of conf, +-- instead of where this plugin conf belongs to +function _M.conf_version(conf) + if not conf._version then + local data = core.json.stably_encode(conf) + conf._version = tostring(crc32(data)) + core.log.info("init plugin-level conf version: ", conf._version, ", from ", data) + end + + return conf._version +end + + +local function check_single_plugin_schema(name, plugin_conf, schema_type, skip_disabled_plugin) + core.log.info("check plugin schema, name: ", name, ", configurations: ", + core.json.delay_encode(plugin_conf, true)) + if type(plugin_conf) ~= "table" then + return false, "invalid plugin conf " .. + core.json.encode(plugin_conf, true) .. + " for plugin [" .. name .. "]" + end + + local plugin_obj = local_plugins_hash[name] + if not plugin_obj then + if skip_disabled_plugin then + return true + else + return false, "unknown plugin [" .. name .. "]" + end + end + + if plugin_obj.check_schema then + local ok, err = plugin_obj.check_schema(plugin_conf, schema_type) + if not ok then + return false, "failed to check the configuration of plugin " + .. name .. " err: " .. err + end + + if plugin_conf._meta then + if plugin_conf._meta.filter then + ok, err = expr.new(plugin_conf._meta.filter) + if not ok then + return nil, "failed to validate the 'vars' expression: " .. err + end + end + + if plugin_conf._meta.pre_function then + local pre_function, err = meta_pre_func_load_lrucache(plugin_conf._meta.pre_function + , "", + lua_load, + plugin_conf._meta.pre_function, "meta pre_function") + if not pre_function then + return nil, "failed to load _meta.pre_function in plugin " .. name .. ": " + .. err + end + end + end + end + + return true +end + + +local enable_data_encryption +local function enable_gde() + if enable_data_encryption == nil then + enable_data_encryption = + core.table.try_read_attr(local_conf, "apisix", "data_encryption", + "enable_encrypt_fields") and (core.config.type == "etcd") + _M.enable_data_encryption = enable_data_encryption + end + + return enable_data_encryption +end + + +local function get_plugin_schema_for_gde(name, schema_type) + local plugin_schema = local_plugins_hash and local_plugins_hash[name] + if not plugin_schema then + return nil + end + + local schema + if schema_type == core.schema.TYPE_CONSUMER then + -- when we use a non-auth plugin in the consumer, + -- where the consumer_schema field does not exist, + -- we need to fallback to it's schema for encryption and decryption. + schema = plugin_schema.consumer_schema or plugin_schema.schema + elseif schema_type == core.schema.TYPE_METADATA then + schema = plugin_schema.metadata_schema + else + schema = plugin_schema.schema + end + + return schema +end + + +local function decrypt_conf(name, conf, schema_type) + if not enable_gde() then + return + end + local schema = get_plugin_schema_for_gde(name, schema_type) + if not schema then + core.log.warn("failed to get schema for plugin: ", name) + return + end + + if schema.encrypt_fields and not core.table.isempty(schema.encrypt_fields) then + for _, key in ipairs(schema.encrypt_fields) do + if conf[key] then + local decrypted, err = apisix_ssl.aes_decrypt_pkey(conf[key], "data_encrypt") + if not decrypted then + core.log.warn("failed to decrypt the conf of plugin [", name, + "] key [", key, "], err: ", err) + else + conf[key] = decrypted + end + elseif core.string.find(key, ".") then + -- decrypt fields has indents + local res, err = re_split(key, "\\.", "jo") + if not res then + core.log.warn("failed to split key [", key, "], err: ", err) + return + end + + -- we only support two levels + if conf[res[1]] and conf[res[1]][res[2]] then + local decrypted, err = apisix_ssl.aes_decrypt_pkey( + conf[res[1]][res[2]], "data_encrypt") + if not decrypted then + core.log.warn("failed to decrypt the conf of plugin [", name, + "] key [", key, "], err: ", err) + else + conf[res[1]][res[2]] = decrypted + end + end + end + end + end +end +_M.decrypt_conf = decrypt_conf + + +local function encrypt_conf(name, conf, schema_type) + if not enable_gde() then + return + end + local schema = get_plugin_schema_for_gde(name, schema_type) + if not schema then + core.log.warn("failed to get schema for plugin: ", name) + return + end + + if schema.encrypt_fields and not core.table.isempty(schema.encrypt_fields) then + for _, key in ipairs(schema.encrypt_fields) do + if conf[key] then + local encrypted, err = apisix_ssl.aes_encrypt_pkey(conf[key], "data_encrypt") + if not encrypted then + core.log.warn("failed to encrypt the conf of plugin [", name, + "] key [", key, "], err: ", err) + else + conf[key] = encrypted + end + elseif core.string.find(key, ".") then + -- encrypt fields has indents + local res, err = re_split(key, "\\.", "jo") + if not res then + core.log.warn("failed to split key [", key, "], err: ", err) + return + end + + -- we only support two levels + if conf[res[1]] and conf[res[1]][res[2]] then + local encrypted, err = apisix_ssl.aes_encrypt_pkey( + conf[res[1]][res[2]], "data_encrypt") + if not encrypted then + core.log.warn("failed to encrypt the conf of plugin [", name, + "] key [", key, "], err: ", err) + else + conf[res[1]][res[2]] = encrypted + end + end + end + end + end +end +_M.encrypt_conf = encrypt_conf + + +check_plugin_metadata = function(item) + local ok, err = check_single_plugin_schema(item.id, item, + core.schema.TYPE_METADATA, true) + if ok and enable_gde() then + decrypt_conf(item.id, item, core.schema.TYPE_METADATA) + end + + return ok, err +end + + +local function check_schema(plugins_conf, schema_type, skip_disabled_plugin) + for name, plugin_conf in pairs(plugins_conf) do + local ok, err = check_single_plugin_schema(name, plugin_conf, + schema_type, skip_disabled_plugin) + if not ok then + return false, err + end + end + + return true +end +_M.check_schema = check_schema + + +local function stream_check_schema(plugins_conf, schema_type, skip_disabled_plugin) + for name, plugin_conf in pairs(plugins_conf) do + core.log.info("check stream plugin schema, name: ", name, + ": ", core.json.delay_encode(plugin_conf, true)) + if type(plugin_conf) ~= "table" then + return false, "invalid plugin conf " .. + core.json.encode(plugin_conf, true) .. + " for plugin [" .. name .. "]" + end + + local plugin_obj = stream_local_plugins_hash[name] + if not plugin_obj then + if skip_disabled_plugin then + goto CONTINUE + else + return false, "unknown plugin [" .. name .. "]" + end + end + + if plugin_obj.check_schema then + local ok, err = plugin_obj.check_schema(plugin_conf, schema_type) + if not ok then + return false, "failed to check the configuration of " + .. "stream plugin [" .. name .. "]: " .. err + end + end + + ::CONTINUE:: + end + + return true +end +_M.stream_check_schema = stream_check_schema + + +function _M.plugin_checker(item, schema_type) + if item.plugins then + local ok, err = check_schema(item.plugins, schema_type, true) + + if ok and enable_gde() then + -- decrypt conf + for name, conf in pairs(item.plugins) do + decrypt_conf(name, conf, schema_type) + end + end + return ok, err + end + + return true +end + + +function _M.stream_plugin_checker(item, in_cp) + if item.plugins then + return stream_check_schema(item.plugins, nil, not in_cp) + end + + return true +end + +local function run_meta_pre_function(conf, api_ctx, name) + if conf._meta and conf._meta.pre_function then + local _, pre_function = pcall(meta_pre_func_load_lrucache(conf._meta.pre_function, "", + lua_load, + conf._meta.pre_function, "meta pre_function")) + local ok, err = pcall(pre_function, conf, api_ctx) + if not ok then + core.log.error("pre_function execution for plugin ", name, " failed: ", err) + end + end +end + +function _M.run_plugin(phase, plugins, api_ctx) + local plugin_run = false + api_ctx = api_ctx or ngx.ctx.api_ctx + if not api_ctx then + return + end + + plugins = plugins or api_ctx.plugins + if not plugins or #plugins == 0 then + return api_ctx + end + + if phase ~= "log" + and phase ~= "header_filter" + and phase ~= "body_filter" + and phase ~= "delayed_body_filter" + then + for i = 1, #plugins, 2 do + + if phase == "rewrite_in_consumer" and plugins[i + 1]._skip_rewrite_in_consumer then + goto CONTINUE + end + + local phase_func = phase == "rewrite_in_consumer" and plugins[i]["rewrite"] + or plugins[i][phase] + if phase_func then + local conf = plugins[i + 1] + if not meta_filter(api_ctx, plugins[i]["name"], conf)then + goto CONTINUE + end + + run_meta_pre_function(conf, api_ctx, plugins[i]["name"]) + plugin_run = true + api_ctx._plugin_name = plugins[i]["name"] + local code, body = phase_func(conf, api_ctx) + api_ctx._plugin_name = nil + if code or body then + if is_http then + if code >= 400 then + core.log.warn(plugins[i].name, " exits with http status code ", code) + + if conf._meta and conf._meta.error_response then + -- Whether or not the original error message is output, + -- always return the configured message + -- so the caller can't guess the real error + body = conf._meta.error_response + end + end + + core.response.exit(code, body) + else + if code >= 400 then + core.log.warn(plugins[i].name, " exits with status code ", code) + end + + ngx_exit(1) + end + end + end + + ::CONTINUE:: + end + return api_ctx, plugin_run + end + + for i = 1, #plugins, 2 do + local phase_func = plugins[i][phase] + local conf = plugins[i + 1] + if phase_func and meta_filter(api_ctx, plugins[i]["name"], conf) then + plugin_run = true + run_meta_pre_function(conf, api_ctx, plugins[i]["name"]) + api_ctx._plugin_name = plugins[i]["name"] + phase_func(conf, api_ctx) + api_ctx._plugin_name = nil + end + end + + return api_ctx, plugin_run +end + + +function _M.run_global_rules(api_ctx, global_rules, phase_name) + if global_rules and #global_rules > 0 then + local orig_conf_type = api_ctx.conf_type + local orig_conf_version = api_ctx.conf_version + local orig_conf_id = api_ctx.conf_id + + if phase_name == nil then + api_ctx.global_rules = global_rules + end + + local plugins = core.tablepool.fetch("plugins", 32, 0) + local values = global_rules + local route = api_ctx.matched_route + for _, global_rule in config_util.iterate_values(values) do + api_ctx.conf_type = "global_rule" + api_ctx.conf_version = global_rule.modifiedIndex + api_ctx.conf_id = global_rule.value.id + + core.table.clear(plugins) + plugins = _M.filter(api_ctx, global_rule, plugins, route) + if phase_name == nil then + _M.run_plugin("rewrite", plugins, api_ctx) + _M.run_plugin("access", plugins, api_ctx) + else + _M.run_plugin(phase_name, plugins, api_ctx) + end + end + core.tablepool.release("plugins", plugins) + + api_ctx.conf_type = orig_conf_type + api_ctx.conf_version = orig_conf_version + api_ctx.conf_id = orig_conf_id + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugin_config.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugin_config.lua new file mode 100644 index 0000000..88b17d4 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugin_config.lua @@ -0,0 +1,88 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local plugin_checker = require("apisix.plugin").plugin_checker +local pairs = pairs +local error = error + + +local plugin_configs + + +local _M = { +} + + +function _M.init_worker() + local err + plugin_configs, err = core.config.new("/plugin_configs", { + automatic = true, + item_schema = core.schema.plugin_config, + checker = plugin_checker, + }) + if not plugin_configs then + error("failed to sync /plugin_configs: " .. err) + end +end + + +function _M.plugin_configs() + if not plugin_configs then + return nil, nil + end + return plugin_configs.values, plugin_configs.conf_version +end + + +function _M.get(id) + return plugin_configs:get(id) +end + + +function _M.merge(route_conf, plugin_config) + if route_conf.prev_plugin_config_ver == plugin_config.modifiedIndex then + return route_conf + end + + if not route_conf.value.plugins then + route_conf.value.plugins = {} + end + + if route_conf.orig_plugins then + -- recover + route_conf.value.plugins = route_conf.orig_plugins + else + -- backup in the first time + route_conf.orig_plugins = route_conf.value.plugins + end + + route_conf.value.plugins = core.table.clone(route_conf.value.plugins) + + for name, value in pairs(plugin_config.value.plugins) do + if not route_conf.value.plugins[name] then + route_conf.value.plugins[name] = value + end + end + + route_conf.modifiedIndex = route_conf.orig_modifiedIndex .. "#" .. plugin_config.modifiedIndex + route_conf.prev_plugin_config_ver = plugin_config.modifiedIndex + + return route_conf +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-aws-content-moderation.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-aws-content-moderation.lua new file mode 100644 index 0000000..d229b47 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-aws-content-moderation.lua @@ -0,0 +1,161 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +require("resty.aws.config") -- to read env vars before initing aws module + +local core = require("apisix.core") +local aws = require("resty.aws") +local aws_instance + +local http = require("resty.http") +local fetch_secrets = require("apisix.secret").fetch_secrets + +local pairs = pairs +local unpack = unpack +local type = type +local ipairs = ipairs +local HTTP_INTERNAL_SERVER_ERROR = ngx.HTTP_INTERNAL_SERVER_ERROR +local HTTP_BAD_REQUEST = ngx.HTTP_BAD_REQUEST + +local moderation_categories_pattern = "^(PROFANITY|HATE_SPEECH|INSULT|".. + "HARASSMENT_OR_ABUSE|SEXUAL|VIOLENCE_OR_THREAT)$" +local schema = { + type = "object", + properties = { + comprehend = { + type = "object", + properties = { + access_key_id = { type = "string" }, + secret_access_key = { type = "string" }, + region = { type = "string" }, + endpoint = { + type = "string", + pattern = [[^https?://]] + }, + ssl_verify = { + type = "boolean", + default = true + } + }, + required = { "access_key_id", "secret_access_key", "region", } + }, + moderation_categories = { + type = "object", + patternProperties = { + [moderation_categories_pattern] = { + type = "number", + minimum = 0, + maximum = 1 + } + }, + additionalProperties = false + }, + moderation_threshold = { + type = "number", + minimum = 0, + maximum = 1, + default = 0.5 + } + }, + required = { "comprehend" }, +} + + +local _M = { + version = 0.1, + priority = 1050, + name = "ai-aws-content-moderation", + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +function _M.rewrite(conf, ctx) + conf = fetch_secrets(conf, true, conf, "") + if not conf then + return HTTP_INTERNAL_SERVER_ERROR, "failed to retrieve secrets from conf" + end + + local body, err = core.request.get_body() + if not body then + return HTTP_BAD_REQUEST, err + end + + local comprehend = conf.comprehend + + if not aws_instance then + aws_instance = aws() + end + local credentials = aws_instance:Credentials({ + accessKeyId = comprehend.access_key_id, + secretAccessKey = comprehend.secret_access_key, + sessionToken = comprehend.session_token, + }) + + local default_endpoint = "https://comprehend." .. comprehend.region .. ".amazonaws.com" + local scheme, host, port = unpack(http:parse_uri(comprehend.endpoint or default_endpoint)) + local endpoint = scheme .. "://" .. host + aws_instance.config.endpoint = endpoint + aws_instance.config.ssl_verify = comprehend.ssl_verify + + local comprehend = aws_instance:Comprehend({ + credentials = credentials, + endpoint = endpoint, + region = comprehend.region, + port = port, + }) + + local res, err = comprehend:detectToxicContent({ + LanguageCode = "en", + TextSegments = {{ + Text = body + }}, + }) + + if not res then + core.log.error("failed to send request to ", endpoint, ": ", err) + return HTTP_INTERNAL_SERVER_ERROR, err + end + + local results = res.body and res.body.ResultList + if type(results) ~= "table" or core.table.isempty(results) then + return HTTP_INTERNAL_SERVER_ERROR, "failed to get moderation results from response" + end + + for _, result in ipairs(results) do + if conf.moderation_categories then + for _, item in pairs(result.Labels) do + if not conf.moderation_categories[item.Name] then + goto continue + end + if item.Score > conf.moderation_categories[item.Name] then + return HTTP_BAD_REQUEST, "request body exceeds " .. item.Name .. " threshold" + end + ::continue:: + end + end + + if result.Toxicity > conf.moderation_threshold then + return HTTP_BAD_REQUEST, "request body exceeds toxicity threshold" + end + end +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/aimlapi.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/aimlapi.lua new file mode 100644 index 0000000..dad1014 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/aimlapi.lua @@ -0,0 +1,24 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +return require("apisix.plugins.ai-drivers.openai-base").new( + { + host = "api.aimlapi.com", + path = "/chat/completions", + port = 443 + } +) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/deepseek.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/deepseek.lua new file mode 100644 index 0000000..19c2e90 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/deepseek.lua @@ -0,0 +1,24 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +return require("apisix.plugins.ai-drivers.openai-base").new( + { + host = "api.deepseek.com", + path = "/chat/completions", + port = 443 + } +) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/openai-base.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/openai-base.lua new file mode 100644 index 0000000..0913426 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/openai-base.lua @@ -0,0 +1,255 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local _M = {} + +local mt = { + __index = _M +} + +local CONTENT_TYPE_JSON = "application/json" + +local core = require("apisix.core") +local http = require("resty.http") +local url = require("socket.url") +local ngx_re = require("ngx.re") + +local ngx_print = ngx.print +local ngx_flush = ngx.flush + +local pairs = pairs +local type = type +local ipairs = ipairs +local setmetatable = setmetatable + +local HTTP_INTERNAL_SERVER_ERROR = ngx.HTTP_INTERNAL_SERVER_ERROR +local HTTP_GATEWAY_TIMEOUT = ngx.HTTP_GATEWAY_TIMEOUT + + +function _M.new(opts) + + local self = { + host = opts.host, + port = opts.port, + path = opts.path, + } + return setmetatable(self, mt) +end + + +function _M.validate_request(ctx) + local ct = core.request.header(ctx, "Content-Type") or CONTENT_TYPE_JSON + if not core.string.has_prefix(ct, CONTENT_TYPE_JSON) then + return nil, "unsupported content-type: " .. ct .. ", only application/json is supported" + end + + local request_table, err = core.request.get_json_request_body_table() + if not request_table then + return nil, err + end + + return request_table, nil +end + + +local function handle_error(err) + if core.string.find(err, "timeout") then + return HTTP_GATEWAY_TIMEOUT + end + return HTTP_INTERNAL_SERVER_ERROR +end + + +local function read_response(ctx, res) + local body_reader = res.body_reader + if not body_reader then + core.log.warn("AI service sent no response body") + return HTTP_INTERNAL_SERVER_ERROR + end + + local content_type = res.headers["Content-Type"] + core.response.set_header("Content-Type", content_type) + + if content_type and core.string.find(content_type, "text/event-stream") then + while true do + local chunk, err = body_reader() -- will read chunk by chunk + if err then + core.log.warn("failed to read response chunk: ", err) + return handle_error(err) + end + if not chunk then + return + end + + ngx_print(chunk) + ngx_flush(true) + + local events, err = ngx_re.split(chunk, "\n") + if err then + core.log.warn("failed to split response chunk [", chunk, "] to events: ", err) + goto CONTINUE + end + + for _, event in ipairs(events) do + if not core.string.find(event, "data:") or core.string.find(event, "[DONE]") then + goto CONTINUE + end + + local parts, err = ngx_re.split(event, ":", nil, nil, 2) + if err then + core.log.warn("failed to split data event [", event, "] to parts: ", err) + goto CONTINUE + end + + if #parts ~= 2 then + core.log.warn("malformed data event: ", event) + goto CONTINUE + end + + local data, err = core.json.decode(parts[2]) + if err then + core.log.warn("failed to decode data event [", parts[2], "] to json: ", err) + goto CONTINUE + end + + -- usage field is null for non-last events, null is parsed as userdata type + if data and data.usage and type(data.usage) ~= "userdata" then + core.log.info("got token usage from ai service: ", + core.json.delay_encode(data.usage)) + ctx.ai_token_usage = { + prompt_tokens = data.usage.prompt_tokens or 0, + completion_tokens = data.usage.completion_tokens or 0, + total_tokens = data.usage.total_tokens or 0, + } + end + end + + ::CONTINUE:: + end + end + + local raw_res_body, err = res:read_body() + if not raw_res_body then + core.log.warn("failed to read response body: ", err) + return handle_error(err) + end + local res_body, err = core.json.decode(raw_res_body) + if err then + core.log.warn("invalid response body from ai service: ", raw_res_body, " err: ", err, + ", it will cause token usage not available") + else + core.log.info("got token usage from ai service: ", core.json.delay_encode(res_body.usage)) + ctx.ai_token_usage = { + prompt_tokens = res_body.usage and res_body.usage.prompt_tokens or 0, + completion_tokens = res_body.usage and res_body.usage.completion_tokens or 0, + total_tokens = res_body.usage and res_body.usage.total_tokens or 0, + } + end + return res.status, raw_res_body +end + + +function _M.request(self, ctx, conf, request_table, extra_opts) + local httpc, err = http.new() + if not httpc then + core.log.error("failed to create http client to send request to LLM server: ", err) + return HTTP_INTERNAL_SERVER_ERROR + end + httpc:set_timeout(conf.timeout) + + local endpoint = extra_opts.endpoint + local parsed_url + if endpoint then + parsed_url = url.parse(endpoint) + end + + local scheme = parsed_url and parsed_url.scheme or "https" + local host = parsed_url and parsed_url.host or self.host + local port = parsed_url and parsed_url.port + if not port then + if scheme == "https" then + port = 443 + else + port = 80 + end + end + local ok, err = httpc:connect({ + scheme = scheme, + host = host, + port = port, + ssl_verify = conf.ssl_verify, + ssl_server_name = parsed_url and parsed_url.host or self.host, + }) + + if not ok then + core.log.warn("failed to connect to LLM server: ", err) + return handle_error(err) + end + + local query_params = extra_opts.query_params + + if type(parsed_url) == "table" and parsed_url.query and #parsed_url.query > 0 then + local args_tab = core.string.decode_args(parsed_url.query) + if type(args_tab) == "table" then + core.table.merge(query_params, args_tab) + end + end + + local path = (parsed_url and parsed_url.path or self.path) + + local headers = extra_opts.headers + headers["Content-Type"] = "application/json" + local params = { + method = "POST", + headers = headers, + ssl_verify = conf.ssl_verify, + path = path, + query = query_params + } + + if extra_opts.model_options then + for opt, val in pairs(extra_opts.model_options) do + request_table[opt] = val + end + end + + local req_json, err = core.json.encode(request_table) + if not req_json then + return nil, err + end + + params.body = req_json + + local res, err = httpc:request(params) + if not res then + core.log.warn("failed to send request to LLM server: ", err) + return handle_error(err) + end + + local code, body = read_response(ctx, res) + + if conf.keepalive then + local ok, err = httpc:set_keepalive(conf.keepalive_timeout, conf.keepalive_pool) + if not ok then + core.log.warn("failed to keepalive connection: ", err) + end + end + + return code, body +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/openai-compatible.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/openai-compatible.lua new file mode 100644 index 0000000..b6c21cf --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/openai-compatible.lua @@ -0,0 +1,18 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +return require("apisix.plugins.ai-drivers.openai-base").new({}) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/openai.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/openai.lua new file mode 100644 index 0000000..e922c8b --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/openai.lua @@ -0,0 +1,24 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +return require("apisix.plugins.ai-drivers.openai-base").new( + { + host = "api.openai.com", + path = "/v1/chat/completions", + port = 443 + } +) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/schema.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/schema.lua new file mode 100644 index 0000000..7a469bd --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-drivers/schema.lua @@ -0,0 +1,44 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local _M = {} + +_M.chat_request_schema = { + type = "object", + properties = { + messages = { + type = "array", + minItems = 1, + items = { + properties = { + role = { + type = "string", + enum = {"system", "user", "assistant"} + }, + content = { + type = "string", + minLength = "1", + }, + }, + additionalProperties = false, + required = {"role", "content"}, + }, + } + }, + required = {"messages"} +} + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-prompt-decorator.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-prompt-decorator.lua new file mode 100644 index 0000000..10b36e8 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-prompt-decorator.lua @@ -0,0 +1,117 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local ngx = ngx +local pairs = pairs +local EMPTY = {} + +local prompt_schema = { + properties = { + role = { + type = "string", + enum = { "system", "user", "assistant" } + }, + content = { + type = "string", + minLength = 1, + } + }, + required = { "role", "content" } +} + +local prompts = { + type = "array", + items = prompt_schema +} + +local schema = { + type = "object", + properties = { + prepend = prompts, + append = prompts, + }, + anyOf = { + { required = { "prepend" } }, + { required = { "append" } }, + { required = { "append", "prepend" } }, + }, +} + + +local _M = { + version = 0.1, + priority = 1070, + name = "ai-prompt-decorator", + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +local function get_request_body_table() + local body, err = core.request.get_body() + if not body then + return nil, { message = "could not get body: " .. err } + end + + local body_tab, err = core.json.decode(body) + if not body_tab then + return nil, { message = "could not get parse JSON request body: " .. err } + end + + return body_tab +end + + +local function decorate(conf, body_tab) + local new_messages = conf.prepend or EMPTY + for _, message in pairs(body_tab.messages) do + core.table.insert_tail(new_messages, message) + end + + for _, message in pairs(conf.append or EMPTY) do + core.table.insert_tail(new_messages, message) + end + + body_tab.messages = new_messages +end + + +function _M.rewrite(conf, ctx) + local body_tab, err = get_request_body_table() + if not body_tab then + return 400, err + end + + if not body_tab.messages then + return 400, "messages missing from request body" + end + decorate(conf, body_tab) -- will decorate body_tab in place + + local new_jbody, err = core.json.encode(body_tab) + if not new_jbody then + return 500, { message = "failed to parse modified JSON request body: " .. err } + end + + ngx.req.set_body_data(new_jbody) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-prompt-guard.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-prompt-guard.lua new file mode 100644 index 0000000..fd6a931 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-prompt-guard.lua @@ -0,0 +1,153 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local ngx = ngx +local ipairs = ipairs +local table = table +local re_compile = require("resty.core.regex").re_match_compile +local re_find = ngx.re.find + +local plugin_name = "ai-prompt-guard" + +local schema = { + type = "object", + properties = { + match_all_roles = { + type = "boolean", + default = false, + }, + match_all_conversation_history = { + type = "boolean", + default = false, + }, + allow_patterns = { + type = "array", + items = {type = "string"}, + default = {}, + }, + deny_patterns = { + type = "array", + items = {type = "string"}, + default = {}, + }, + }, +} + +local _M = { + version = 0.1, + priority = 1072, + name = plugin_name, + schema = schema, +} + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + -- Validate allow_patterns + for _, pattern in ipairs(conf.allow_patterns) do + local compiled = re_compile(pattern, "jou") + if not compiled then + return false, "invalid allow_pattern: " .. pattern + end + end + + -- Validate deny_patterns + for _, pattern in ipairs(conf.deny_patterns) do + local compiled = re_compile(pattern, "jou") + if not compiled then + return false, "invalid deny_pattern: " .. pattern + end + end + + return true +end + +local function get_content_to_check(conf, messages) + if conf.match_all_conversation_history then + return messages + end + local contents = {} + if #messages > 0 then + local last_msg = messages[#messages] + if last_msg then + core.table.insert(contents, last_msg) + end + end + return contents +end + +function _M.access(conf, ctx) + local body = core.request.get_body() + if not body then + core.log.error("Empty request body") + return 400, {message = "Empty request body"} + end + + local json_body, err = core.json.decode(body) + if err then + return 400, {message = err} + end + + local messages = json_body.messages or {} + messages = get_content_to_check(conf, messages) + if not conf.match_all_roles then + -- filter to only user messages + local new_messages = {} + for _, msg in ipairs(messages) do + if msg.role == "user" then + core.table.insert(new_messages, msg) + end + end + messages = new_messages + end + if #messages == 0 then --nothing to check + return 200 + end + -- extract only messages + local content = {} + for _, msg in ipairs(messages) do + if msg.content then + core.table.insert(content, msg.content) + end + end + local content_to_check = table.concat(content, " ") + -- Allow patterns check + if #conf.allow_patterns > 0 then + local any_allowed = false + for _, pattern in ipairs(conf.allow_patterns) do + if re_find(content_to_check, pattern, "jou") then + any_allowed = true + break + end + end + if not any_allowed then + return 400, {message = "Request doesn't match allow patterns"} + end + end + + -- Deny patterns check + for _, pattern in ipairs(conf.deny_patterns) do + if re_find(content_to_check, pattern, "jou") then + return 400, {message = "Request contains prohibited content"} + end + end +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-prompt-template.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-prompt-template.lua new file mode 100644 index 0000000..d2c3669 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-prompt-template.lua @@ -0,0 +1,146 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local body_transformer = require("apisix.plugins.body-transformer") +local ipairs = ipairs + +local prompt_schema = { + properties = { + role = { + type = "string", + enum = { "system", "user", "assistant" } + }, + content = { + type = "string", + minLength = 1, + } + }, + required = { "role", "content" } +} + +local prompts = { + type = "array", + minItems = 1, + items = prompt_schema +} + +local schema = { + type = "object", + properties = { + templates = { + type = "array", + minItems = 1, + items = { + type = "object", + properties = { + name = { + type = "string", + minLength = 1, + }, + template = { + type = "object", + properties = { + model = { + type = "string", + minLength = 1, + }, + messages = prompts + } + } + }, + required = {"name", "template"} + } + }, + }, + required = {"templates"}, +} + + +local _M = { + version = 0.1, + priority = 1071, + name = "ai-prompt-template", + schema = schema, +} + +local templates_lrucache = core.lrucache.new({ + ttl = 300, count = 256 +}) + +local templates_json_lrucache = core.lrucache.new({ + ttl = 300, count = 256 +}) + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +local function get_request_body_table() + local body, err = core.request.get_body() + if not body then + return nil, { message = "could not get body: " .. err } + end + + local body_tab, err = core.json.decode(body) + if not body_tab then + return nil, { message = "could not get parse JSON request body: ", err } + end + + return body_tab +end + + +local function find_template(conf, template_name) + for _, template in ipairs(conf.templates) do + if template.name == template_name then + return template.template + end + end + return nil +end + +function _M.rewrite(conf, ctx) + local body_tab, err = get_request_body_table() + if not body_tab then + return 400, err + end + local template_name = body_tab.template_name + if not template_name then + return 400, { message = "template name is missing in request." } + end + + local template = templates_lrucache(template_name, conf, find_template, conf, template_name) + if not template then + return 400, { message = "template: " .. template_name .. " not configured." } + end + + local template_json = templates_json_lrucache(template, template, core.json.encode, template) + core.log.info("sending template to body_transformer: ", template_json) + return body_transformer.rewrite( + { + request = { + template = template_json, + input_format = "json" + } + }, + ctx + ) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy-multi.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy-multi.lua new file mode 100644 index 0000000..7ac8bb2 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy-multi.lua @@ -0,0 +1,227 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local schema = require("apisix.plugins.ai-proxy.schema") +local base = require("apisix.plugins.ai-proxy.base") +local plugin = require("apisix.plugin") + +local require = require +local pcall = pcall +local ipairs = ipairs +local type = type + +local priority_balancer = require("apisix.balancer.priority") + +local pickers = {} +local lrucache_server_picker = core.lrucache.new({ + ttl = 300, count = 256 +}) + +local plugin_name = "ai-proxy-multi" +local _M = { + version = 0.5, + priority = 1041, + name = plugin_name, + schema = schema.ai_proxy_multi_schema, +} + + +local function get_chash_key_schema(hash_on) + if hash_on == "vars" then + return core.schema.upstream_hash_vars_schema + end + + if hash_on == "header" or hash_on == "cookie" then + return core.schema.upstream_hash_header_schema + end + + if hash_on == "consumer" then + return nil, nil + end + + if hash_on == "vars_combinations" then + return core.schema.upstream_hash_vars_combinations_schema + end + + return nil, "invalid hash_on type " .. hash_on +end + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema.ai_proxy_multi_schema, conf) + if not ok then + return false, err + end + + for _, instance in ipairs(conf.instances) do + local ai_driver, err = pcall(require, "apisix.plugins.ai-drivers." .. instance.provider) + if not ai_driver then + core.log.warn("fail to require ai provider: ", instance.provider, ", err", err) + return false, "ai provider: " .. instance.provider .. " is not supported." + end + end + local algo = core.table.try_read_attr(conf, "balancer", "algorithm") + local hash_on = core.table.try_read_attr(conf, "balancer", "hash_on") + local hash_key = core.table.try_read_attr(conf, "balancer", "key") + + if type(algo) == "string" and algo == "chash" then + if not hash_on then + return false, "must configure `hash_on` when balancer algorithm is chash" + end + + if hash_on ~= "consumer" and not hash_key then + return false, "must configure `hash_key` when balancer `hash_on` is not set to cookie" + end + + local key_schema, err = get_chash_key_schema(hash_on) + if err then + return false, "type is chash, err: " .. err + end + + if key_schema then + local ok, err = core.schema.check(key_schema, hash_key) + if not ok then + return false, "invalid configuration: " .. err + end + end + end + + return ok +end + + +local function transform_instances(new_instances, instance) + if not new_instances._priority_index then + new_instances._priority_index = {} + end + + if not new_instances[instance.priority] then + new_instances[instance.priority] = {} + core.table.insert(new_instances._priority_index, instance.priority) + end + + new_instances[instance.priority][instance.name] = instance.weight +end + + +local function create_server_picker(conf, ups_tab) + local picker = pickers[conf.balancer.algorithm] -- nil check + if not picker then + pickers[conf.balancer.algorithm] = require("apisix.balancer." .. conf.balancer.algorithm) + picker = pickers[conf.balancer.algorithm] + end + local new_instances = {} + for _, ins in ipairs(conf.instances) do + transform_instances(new_instances, ins) + end + + if #new_instances._priority_index > 1 then + core.log.info("new instances: ", core.json.delay_encode(new_instances)) + return priority_balancer.new(new_instances, ups_tab, picker) + end + core.log.info("upstream nodes: ", + core.json.delay_encode(new_instances[new_instances._priority_index[1]])) + return picker.new(new_instances[new_instances._priority_index[1]], ups_tab) +end + + +local function get_instance_conf(instances, name) + for _, ins in ipairs(instances) do + if ins.name == name then + return ins + end + end +end + + +local function pick_target(ctx, conf, ups_tab) + local server_picker = ctx.server_picker + if not server_picker then + server_picker = lrucache_server_picker(ctx.matched_route.key, plugin.conf_version(conf), + create_server_picker, conf, ups_tab) + end + if not server_picker then + return nil, nil, "failed to fetch server picker" + end + ctx.server_picker = server_picker + + local instance_name, err = server_picker.get(ctx) + if err then + return nil, nil, err + end + ctx.balancer_server = instance_name + if conf.fallback_strategy == "instance_health_and_rate_limiting" then + local ai_rate_limiting = require("apisix.plugins.ai-rate-limiting") + for _ = 1, #conf.instances do + if ai_rate_limiting.check_instance_status(nil, ctx, instance_name) then + break + end + core.log.info("ai instance: ", instance_name, + " is not available, try to pick another one") + server_picker.after_balance(ctx, true) + instance_name, err = server_picker.get(ctx) + if err then + return nil, nil, err + end + ctx.balancer_server = instance_name + end + end + + local instance_conf = get_instance_conf(conf.instances, instance_name) + return instance_name, instance_conf +end + + +local function pick_ai_instance(ctx, conf, ups_tab) + local instance_name, instance_conf, err + if #conf.instances == 1 then + instance_name = conf.instances[1].name + instance_conf = conf.instances[1] + else + instance_name, instance_conf, err = pick_target(ctx, conf, ups_tab) + end + + core.log.info("picked instance: ", instance_name) + return instance_name, instance_conf, err +end + + +function _M.access(conf, ctx) + local ups_tab = {} + local algo = core.table.try_read_attr(conf, "balancer", "algorithm") + if algo == "chash" then + local hash_on = core.table.try_read_attr(conf, "balancer", "hash_on") + local hash_key = core.table.try_read_attr(conf, "balancer", "key") + ups_tab["key"] = hash_key + ups_tab["hash_on"] = hash_on + end + + local name, ai_instance, err = pick_ai_instance(ctx, conf, ups_tab) + if err then + return 503, err + end + ctx.picked_ai_instance_name = name + ctx.picked_ai_instance = ai_instance + ctx.bypass_nginx_upstream = true +end + + +_M.before_proxy = base.before_proxy + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy.lua new file mode 100644 index 0000000..fa7f5f2 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy.lua @@ -0,0 +1,57 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local schema = require("apisix.plugins.ai-proxy.schema") +local base = require("apisix.plugins.ai-proxy.base") + +local require = require +local pcall = pcall + +local plugin_name = "ai-proxy" +local _M = { + version = 0.5, + priority = 1040, + name = plugin_name, + schema = schema.ai_proxy_schema, +} + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema.ai_proxy_schema, conf) + if not ok then + return false, err + end + local ai_driver, err = pcall(require, "apisix.plugins.ai-drivers." .. conf.provider) + if not ai_driver then + core.log.warn("fail to require ai provider: ", conf.provider, ", err", err) + return false, "ai provider: " .. conf.provider .. " is not supported." + end + return ok +end + + +function _M.access(conf, ctx) + ctx.picked_ai_instance_name = "ai-proxy" + ctx.picked_ai_instance = conf + ctx.bypass_nginx_upstream = true +end + + +_M.before_proxy = base.before_proxy + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy/base.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy/base.lua new file mode 100644 index 0000000..9076260 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy/base.lua @@ -0,0 +1,50 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local require = require +local bad_request = ngx.HTTP_BAD_REQUEST + +local _M = {} + +function _M.before_proxy(conf, ctx) + local ai_instance = ctx.picked_ai_instance + local ai_driver = require("apisix.plugins.ai-drivers." .. ai_instance.provider) + + local request_body, err = ai_driver.validate_request(ctx) + if not request_body then + return bad_request, err + end + + local extra_opts = { + endpoint = core.table.try_read_attr(ai_instance, "override", "endpoint"), + query_params = ai_instance.auth.query or {}, + headers = (ai_instance.auth.header or {}), + model_options = ai_instance.options, + } + + if request_body.stream then + request_body.stream_options = { + include_usage = true + } + end + + return ai_driver:request(ctx, conf, request_body, extra_opts) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy/schema.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy/schema.lua new file mode 100644 index 0000000..0a3c028 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-proxy/schema.lua @@ -0,0 +1,219 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local _M = {} + +local auth_item_schema = { + type = "object", + patternProperties = { + ["^[a-zA-Z0-9._-]+$"] = { + type = "string" + } + } +} + +local auth_schema = { + type = "object", + patternProperties = { + header = auth_item_schema, + query = auth_item_schema, + }, + additionalProperties = false, +} + +local model_options_schema = { + description = "Key/value settings for the model", + type = "object", + properties = { + model = { + type = "string", + description = "Model to execute.", + }, + }, + additionalProperties = true, +} + +local ai_instance_schema = { + type = "array", + minItems = 1, + items = { + type = "object", + properties = { + name = { + type = "string", + minLength = 1, + maxLength = 100, + description = "Name of the AI service instance.", + }, + provider = { + type = "string", + description = "Type of the AI service instance.", + enum = { + "openai", + "deepseek", + "aimlapi", + "openai-compatible", + }, -- add more providers later + }, + priority = { + type = "integer", + description = "Priority of the provider for load balancing", + default = 0, + }, + weight = { + type = "integer", + minimum = 0, + }, + auth = auth_schema, + options = model_options_schema, + override = { + type = "object", + properties = { + endpoint = { + type = "string", + description = "To be specified to override the endpoint of the AI Instance", + }, + }, + }, + }, + required = {"name", "provider", "auth", "weight"} + }, +} + + +_M.ai_proxy_schema = { + type = "object", + properties = { + provider = { + type = "string", + description = "Type of the AI service instance.", + enum = { + "openai", + "deepseek", + "aimlapi", + "openai-compatible", + }, -- add more providers later + + }, + auth = auth_schema, + options = model_options_schema, + timeout = { + type = "integer", + minimum = 1, + default = 30000, + description = "timeout in milliseconds", + }, + keepalive = {type = "boolean", default = true}, + keepalive_timeout = { + type = "integer", + minimum = 1000, + default = 60000, + description = "keepalive timeout in milliseconds", + }, + keepalive_pool = {type = "integer", minimum = 1, default = 30}, + ssl_verify = {type = "boolean", default = true }, + override = { + type = "object", + properties = { + endpoint = { + type = "string", + description = "To be specified to override the endpoint of the AI Instance", + }, + }, + }, + }, + required = {"provider", "auth"} +} + +_M.ai_proxy_multi_schema = { + type = "object", + properties = { + balancer = { + type = "object", + properties = { + algorithm = { + type = "string", + enum = { "chash", "roundrobin" }, + }, + hash_on = { + type = "string", + default = "vars", + enum = { + "vars", + "header", + "cookie", + "consumer", + "vars_combinations", + }, + }, + key = { + description = "the key of chash for dynamic load balancing", + type = "string", + }, + }, + default = { algorithm = "roundrobin" } + }, + instances = ai_instance_schema, + fallback_strategy = { + type = "string", + enum = { "instance_health_and_rate_limiting" }, + default = "instance_health_and_rate_limiting", + }, + timeout = { + type = "integer", + minimum = 1, + default = 30000, + description = "timeout in milliseconds", + }, + keepalive = {type = "boolean", default = true}, + keepalive_timeout = { + type = "integer", + minimum = 1000, + default = 60000, + description = "keepalive timeout in milliseconds", + }, + keepalive_pool = {type = "integer", minimum = 1, default = 30}, + ssl_verify = {type = "boolean", default = true }, + }, + required = {"instances"} +} + +_M.chat_request_schema = { + type = "object", + properties = { + messages = { + type = "array", + minItems = 1, + items = { + properties = { + role = { + type = "string", + enum = {"system", "user", "assistant"} + }, + content = { + type = "string", + minLength = "1", + }, + }, + additionalProperties = false, + required = {"role", "content"}, + }, + } + }, + required = {"messages"} +} + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rag.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rag.lua new file mode 100644 index 0000000..0acd5f1 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rag.lua @@ -0,0 +1,156 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local next = next +local require = require +local ngx_req = ngx.req + +local http = require("resty.http") +local core = require("apisix.core") + +local azure_openai_embeddings = require("apisix.plugins.ai-rag.embeddings.azure_openai").schema +local azure_ai_search_schema = require("apisix.plugins.ai-rag.vector-search.azure_ai_search").schema + +local HTTP_INTERNAL_SERVER_ERROR = ngx.HTTP_INTERNAL_SERVER_ERROR +local HTTP_BAD_REQUEST = ngx.HTTP_BAD_REQUEST + +local schema = { + type = "object", + properties = { + type = "object", + embeddings_provider = { + type = "object", + properties = { + azure_openai = azure_openai_embeddings + }, + -- ensure only one provider can be configured while implementing support for + -- other providers + required = { "azure_openai" }, + maxProperties = 1, + }, + vector_search_provider = { + type = "object", + properties = { + azure_ai_search = azure_ai_search_schema + }, + -- ensure only one provider can be configured while implementing support for + -- other providers + required = { "azure_ai_search" }, + maxProperties = 1 + }, + }, + required = { "embeddings_provider", "vector_search_provider" } +} + +local request_schema = { + type = "object", + properties = { + ai_rag = { + type = "object", + properties = { + vector_search = {}, + embeddings = {}, + }, + required = { "vector_search", "embeddings" } + } + } +} + +local _M = { + version = 0.1, + priority = 1060, + name = "ai-rag", + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +function _M.access(conf, ctx) + local httpc = http.new() + local body_tab, err = core.request.get_json_request_body_table() + if not body_tab then + return HTTP_BAD_REQUEST, err + end + if not body_tab["ai_rag"] then + core.log.error("request body must have \"ai-rag\" field") + return HTTP_BAD_REQUEST + end + + local embeddings_provider = next(conf.embeddings_provider) + local embeddings_provider_conf = conf.embeddings_provider[embeddings_provider] + local embeddings_driver = require("apisix.plugins.ai-rag.embeddings." .. embeddings_provider) + + local vector_search_provider = next(conf.vector_search_provider) + local vector_search_provider_conf = conf.vector_search_provider[vector_search_provider] + local vector_search_driver = require("apisix.plugins.ai-rag.vector-search." .. + vector_search_provider) + + local vs_req_schema = vector_search_driver.request_schema + local emb_req_schema = embeddings_driver.request_schema + + request_schema.properties.ai_rag.properties.vector_search = vs_req_schema + request_schema.properties.ai_rag.properties.embeddings = emb_req_schema + + local ok, err = core.schema.check(request_schema, body_tab) + if not ok then + core.log.error("request body fails schema check: ", err) + return HTTP_BAD_REQUEST + end + + local embeddings, status, err = embeddings_driver.get_embeddings(embeddings_provider_conf, + body_tab["ai_rag"].embeddings, httpc) + if not embeddings then + core.log.error("could not get embeddings: ", err) + return status, err + end + + local search_body = body_tab["ai_rag"].vector_search + search_body.embeddings = embeddings + local res, status, err = vector_search_driver.search(vector_search_provider_conf, + search_body, httpc) + if not res then + core.log.error("could not get vector_search result: ", err) + return status, err + end + + -- remove ai_rag from request body because their purpose is served + -- also, these values will cause failure when proxying requests to LLM. + body_tab["ai_rag"] = nil + + if not body_tab.messages then + body_tab.messages = {} + end + + local augment = { + role = "user", + content = res + } + core.table.insert_tail(body_tab.messages, augment) + + local req_body_json, err = core.json.encode(body_tab) + if not req_body_json then + return HTTP_INTERNAL_SERVER_ERROR, err + end + + ngx_req.set_body_data(req_body_json) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rag/embeddings/azure_openai.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rag/embeddings/azure_openai.lua new file mode 100644 index 0000000..b6bacbf --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rag/embeddings/azure_openai.lua @@ -0,0 +1,88 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local HTTP_INTERNAL_SERVER_ERROR = ngx.HTTP_INTERNAL_SERVER_ERROR +local HTTP_OK = ngx.HTTP_OK +local type = type + +local _M = {} + +_M.schema = { + type = "object", + properties = { + endpoint = { + type = "string", + }, + api_key = { + type = "string", + }, + }, + required = { "endpoint", "api_key" } +} + +function _M.get_embeddings(conf, body, httpc) + local body_tab, err = core.json.encode(body) + if not body_tab then + return nil, HTTP_INTERNAL_SERVER_ERROR, err + end + + local res, err = httpc:request_uri(conf.endpoint, { + method = "POST", + headers = { + ["Content-Type"] = "application/json", + ["api-key"] = conf.api_key, + }, + body = body_tab + }) + + if not res or not res.body then + return nil, HTTP_INTERNAL_SERVER_ERROR, err + end + + if res.status ~= HTTP_OK then + return nil, res.status, res.body + end + + local res_tab, err = core.json.decode(res.body) + if not res_tab then + return nil, HTTP_INTERNAL_SERVER_ERROR, err + end + + if type(res_tab.data) ~= "table" or core.table.isempty(res_tab.data) then + return nil, HTTP_INTERNAL_SERVER_ERROR, res.body + end + + local embeddings, err = core.json.encode(res_tab.data[1].embedding) + if not embeddings then + return nil, HTTP_INTERNAL_SERVER_ERROR, err + end + + return res_tab.data[1].embedding +end + + +_M.request_schema = { + type = "object", + properties = { + input = { + type = "string" + } + }, + required = { "input" } +} + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rag/vector-search/azure_ai_search.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rag/vector-search/azure_ai_search.lua new file mode 100644 index 0000000..7a01064 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rag/vector-search/azure_ai_search.lua @@ -0,0 +1,83 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local HTTP_INTERNAL_SERVER_ERROR = ngx.HTTP_INTERNAL_SERVER_ERROR +local HTTP_OK = ngx.HTTP_OK + +local _M = {} + +_M.schema = { + type = "object", + properties = { + endpoint = { + type = "string", + }, + api_key = { + type = "string", + }, + }, + required = {"endpoint", "api_key"} +} + + +function _M.search(conf, search_body, httpc) + local body = { + vectorQueries = { + { + kind = "vector", + vector = search_body.embeddings, + fields = search_body.fields + } + } + } + local final_body, err = core.json.encode(body) + if not final_body then + return nil, HTTP_INTERNAL_SERVER_ERROR, err + end + + local res, err = httpc:request_uri(conf.endpoint, { + method = "POST", + headers = { + ["Content-Type"] = "application/json", + ["api-key"] = conf.api_key, + }, + body = final_body + }) + + if not res or not res.body then + return nil, HTTP_INTERNAL_SERVER_ERROR, err + end + + if res.status ~= HTTP_OK then + return nil, res.status, res.body + end + + return res.body +end + + +_M.request_schema = { + type = "object", + properties = { + fields = { + type = "string" + } + }, + required = { "fields" } +} + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rate-limiting.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rate-limiting.lua new file mode 100644 index 0000000..d8bf970 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-rate-limiting.lua @@ -0,0 +1,234 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local setmetatable = setmetatable +local ipairs = ipairs +local type = type +local core = require("apisix.core") +local limit_count = require("apisix.plugins.limit-count.init") + +local plugin_name = "ai-rate-limiting" + +local instance_limit_schema = { + type = "object", + properties = { + name = {type = "string"}, + limit = {type = "integer", minimum = 1}, + time_window = {type = "integer", minimum = 1} + }, + required = {"name", "limit", "time_window"} +} + +local schema = { + type = "object", + properties = { + limit = {type = "integer", exclusiveMinimum = 0}, + time_window = {type = "integer", exclusiveMinimum = 0}, + show_limit_quota_header = {type = "boolean", default = true}, + limit_strategy = { + type = "string", + enum = {"total_tokens", "prompt_tokens", "completion_tokens"}, + default = "total_tokens", + description = "The strategy to limit the tokens" + }, + instances = { + type = "array", + items = instance_limit_schema, + minItems = 1, + }, + rejected_code = { + type = "integer", minimum = 200, maximum = 599, default = 503 + }, + rejected_msg = { + type = "string", minLength = 1 + }, + }, + dependencies = { + limit = {"time_window"}, + time_window = {"limit"} + }, + anyOf = { + { + required = {"limit", "time_window"} + }, + { + required = {"instances"} + } + } +} + +local _M = { + version = 0.1, + priority = 1030, + name = plugin_name, + schema = schema +} + +local limit_conf_cache = core.lrucache.new({ + ttl = 300, count = 512 +}) + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +local function transform_limit_conf(plugin_conf, instance_conf, instance_name) + local key = plugin_name .. "#global" + local limit = plugin_conf.limit + local time_window = plugin_conf.time_window + local name = instance_name or "" + if instance_conf then + name = instance_conf.name + key = instance_conf.name + limit = instance_conf.limit + time_window = instance_conf.time_window + end + return { + _vid = key, + + key = key, + count = limit, + time_window = time_window, + rejected_code = plugin_conf.rejected_code, + rejected_msg = plugin_conf.rejected_msg, + show_limit_quota_header = plugin_conf.show_limit_quota_header, + -- limit-count need these fields + policy = "local", + key_type = "constant", + allow_degradation = false, + sync_interval = -1, + + limit_header = "X-AI-RateLimit-Limit-" .. name, + remaining_header = "X-AI-RateLimit-Remaining-" .. name, + reset_header = "X-AI-RateLimit-Reset-" .. name, + } +end + + +local function fetch_limit_conf_kvs(conf) + local mt = { + __index = function(t, k) + if not conf.limit then + return nil + end + + local limit_conf = transform_limit_conf(conf, nil, k) + t[k] = limit_conf + return limit_conf + end + } + local limit_conf_kvs = setmetatable({}, mt) + local conf_instances = conf.instances or {} + for _, limit_conf in ipairs(conf_instances) do + limit_conf_kvs[limit_conf.name] = transform_limit_conf(conf, limit_conf) + end + return limit_conf_kvs +end + + +function _M.access(conf, ctx) + local ai_instance_name = ctx.picked_ai_instance_name + if not ai_instance_name then + return + end + + local limit_conf_kvs = limit_conf_cache(conf, nil, fetch_limit_conf_kvs, conf) + local limit_conf = limit_conf_kvs[ai_instance_name] + if not limit_conf then + return + end + local code, msg = limit_count.rate_limit(limit_conf, ctx, plugin_name, 1, true) + ctx.ai_rate_limiting = code and true or false + return code, msg +end + + +function _M.check_instance_status(conf, ctx, instance_name) + if conf == nil then + local plugins = ctx.plugins + for i = 1, #plugins, 2 do + if plugins[i]["name"] == plugin_name then + conf = plugins[i + 1] + end + end + end + if not conf then + return true + end + + instance_name = instance_name or ctx.picked_ai_instance_name + if not instance_name then + return nil, "missing instance_name" + end + + if type(instance_name) ~= "string" then + return nil, "invalid instance_name" + end + + local limit_conf_kvs = limit_conf_cache(conf, nil, fetch_limit_conf_kvs, conf) + local limit_conf = limit_conf_kvs[instance_name] + if not limit_conf then + return true + end + + local code, _ = limit_count.rate_limit(limit_conf, ctx, plugin_name, 1, true) + if code then + core.log.info("rate limit for instance: ", instance_name, " code: ", code) + return false + end + return true +end + + +local function get_token_usage(conf, ctx) + local usage = ctx.ai_token_usage + if not usage then + return + end + return usage[conf.limit_strategy] +end + + +function _M.log(conf, ctx) + local instance_name = ctx.picked_ai_instance_name + if not instance_name then + return + end + + if ctx.ai_rate_limiting then + return + end + + local used_tokens = get_token_usage(conf, ctx) + if not used_tokens then + core.log.error("failed to get token usage for llm service") + return + end + + core.log.info("instance name: ", instance_name, " used tokens: ", used_tokens) + + local limit_conf_kvs = limit_conf_cache(conf, nil, fetch_limit_conf_kvs, conf) + local limit_conf = limit_conf_kvs[instance_name] + if limit_conf then + limit_count.rate_limit(limit_conf, ctx, plugin_name, used_tokens) + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-request-rewrite.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-request-rewrite.lua new file mode 100644 index 0000000..1b850eb --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai-request-rewrite.lua @@ -0,0 +1,231 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local require = require +local pcall = pcall +local ngx = ngx +local req_set_body_data = ngx.req.set_body_data +local HTTP_BAD_REQUEST = ngx.HTTP_BAD_REQUEST +local HTTP_INTERNAL_SERVER_ERROR = ngx.HTTP_INTERNAL_SERVER_ERROR + +local plugin_name = "ai-request-rewrite" + +local auth_item_schema = { + type = "object", + patternProperties = { + ["^[a-zA-Z0-9._-]+$"] = { + type = "string" + } + } +} + +local auth_schema = { + type = "object", + properties = { + header = auth_item_schema, + query = auth_item_schema + }, + additionalProperties = false +} + +local model_options_schema = { + description = "Key/value settings for the model", + type = "object", + properties = { + model = { + type = "string", + description = "Model to execute. Examples: \"gpt-3.5-turbo\" for openai, " .. + "\"deepseek-chat\" for deekseek, or \"qwen-turbo\" for openai-compatible services" + } + }, + additionalProperties = true +} + +local schema = { + type = "object", + properties = { + prompt = { + type = "string", + description = "The prompt to rewrite client request." + }, + provider = { + type = "string", + description = "Name of the AI service provider.", + enum = { + "openai", + "openai-compatible", + "deepseek", + "aimlapi" + } -- add more providers later + }, + auth = auth_schema, + options = model_options_schema, + timeout = { + type = "integer", + minimum = 1, + maximum = 60000, + default = 30000, + description = "Total timeout in milliseconds for requests to LLM service, " .. + "including connect, send, and read timeouts." + }, + keepalive = { + type = "boolean", + default = true + }, + keepalive_pool = { + type = "integer", + minimum = 1, + default = 30 + }, + ssl_verify = { + type = "boolean", + default = true + }, + override = { + type = "object", + properties = { + endpoint = { + type = "string", + description = "To be specified to override " .. + "the endpoint of the AI service provider." + } + } + } + }, + required = {"prompt", "provider", "auth"} +} + +local _M = { + version = 0.1, + priority = 1073, + name = plugin_name, + schema = schema +} + +local function request_to_llm(conf, request_table, ctx) + local ok, ai_driver = pcall(require, "apisix.plugins.ai-drivers." .. conf.provider) + if not ok then + return nil, nil, "failed to load ai-driver: " .. conf.provider + end + + local extra_opts = { + endpoint = core.table.try_read_attr(conf, "override", "endpoint"), + query_params = conf.auth.query or {}, + headers = (conf.auth.header or {}), + model_options = conf.options + } + + local res, err, httpc = ai_driver:request(conf, request_table, extra_opts) + if err then + return nil, nil, err + end + + local resp_body, err = res:read_body() + httpc:close() + if err then + return nil, nil, err + end + + return res, resp_body +end + + +local function parse_llm_response(res_body) + local response_table, err = core.json.decode(res_body) + + if err then + return nil, "failed to decode llm response " .. ", err: " .. err + end + + if not response_table.choices or not response_table.choices[1] then + return nil, "'choices' not in llm response" + end + + local message = response_table.choices[1].message + if not message then + return nil, "'message' not in llm response choices" + end + + return message.content +end + + +function _M.check_schema(conf) + -- openai-compatible should be used with override.endpoint + if conf.provider == "openai-compatible" then + local override = conf.override + + if not override or not override.endpoint then + return false, "override.endpoint is required for openai-compatible provider" + end + end + + return core.schema.check(schema, conf) +end + + +function _M.access(conf, ctx) + local client_request_body, err = core.request.get_body() + if err then + core.log.warn("failed to get request body: ", err) + return HTTP_BAD_REQUEST + end + + if not client_request_body then + core.log.warn("missing request body") + return + end + + -- Prepare request for LLM service + local ai_request_table = { + messages = { + { + role = "system", + content = conf.prompt + }, + { + role = "user", + content = client_request_body + } + }, + stream = false + } + + -- Send request to LLM service + local res, resp_body, err = request_to_llm(conf, ai_request_table, ctx) + if err then + core.log.error("failed to request to LLM service: ", err) + return HTTP_INTERNAL_SERVER_ERROR + end + + -- Handle LLM response + if res.status > 299 then + core.log.error("LLM service returned error status: ", res.status) + return HTTP_INTERNAL_SERVER_ERROR + end + + -- Parse LLM response + local llm_response, err = parse_llm_response(resp_body) + if err then + core.log.error("failed to parse LLM response: ", err) + return HTTP_INTERNAL_SERVER_ERROR + end + + req_set_body_data(llm_response) +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai.lua new file mode 100644 index 0000000..278201d --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ai.lua @@ -0,0 +1,324 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local apisix = require("apisix") +local core = require("apisix.core") +local router = require("apisix.router") +local get_global_rules = require("apisix.global_rules").global_rules +local event = require("apisix.core.event") +local balancer = require("ngx.balancer") +local ngx = ngx +local is_http = ngx.config.subsystem == "http" +local enable_keepalive = balancer.enable_keepalive and is_http +local is_apisix_or, response = pcall(require, "resty.apisix.response") +local ipairs = ipairs +local pcall = pcall +local loadstring = loadstring +local type = type +local pairs = pairs + +local get_cache_key_func +local get_cache_key_func_def_render + +local get_cache_key_func_def = [[ +return function(ctx) + local var = ctx.var + return var.uri + {% if route_flags["methods"] then %} + .. "#" .. var.method + {% end %} + {% if route_flags["host"] then %} + .. "#" .. var.host + {% end %} +end +]] + +local route_lrucache + +local schema = {} + +local plugin_name = "ai" + +local _M = { + version = 0.1, + priority = 22900, + name = plugin_name, + schema = schema, + scope = "global", +} + +local orig_router_http_matching +local orig_handle_upstream +local orig_http_balancer_phase + +local default_keepalive_pool = {} + +local function create_router_matching_cache(api_ctx) + orig_router_http_matching(api_ctx) + return core.table.deepcopy(api_ctx, { + shallows = { "self.matched_route.value.upstream.parent" } + }) +end + + +local function ai_router_http_matching(api_ctx) + core.log.info("route match mode: ai_match") + + local key = get_cache_key_func(api_ctx) + core.log.info("route cache key: ", key) + local api_ctx_cache = route_lrucache(key, nil, + create_router_matching_cache, api_ctx) + -- if the version has not changed, use the cached route + if api_ctx then + api_ctx.matched_route = api_ctx_cache.matched_route + if api_ctx_cache.curr_req_matched then + api_ctx.curr_req_matched = core.table.clone(api_ctx_cache.curr_req_matched) + end + end +end + + +local function gen_get_cache_key_func(route_flags) + if get_cache_key_func_def_render == nil then + local template = require("resty.template") + get_cache_key_func_def_render = template.compile(get_cache_key_func_def) + end + + local str = get_cache_key_func_def_render({route_flags = route_flags}) + local func, err = loadstring(str) + if func == nil then + return false, err + else + local ok, err_or_function = pcall(func) + if not ok then + return false, err_or_function + end + get_cache_key_func = err_or_function + end + + return true +end + + +local function ai_upstream() + core.log.info("enable sample upstream") +end + + +local pool_opt +local function ai_http_balancer_phase() + local api_ctx = ngx.ctx.api_ctx + if not api_ctx then + core.log.error("invalid api_ctx") + return core.response.exit(500) + end + + if is_apisix_or then + local ok, err = response.skip_body_filter_by_lua() + if not ok then + core.log.error("failed to skip body filter by lua: ", err) + end + end + + local route = api_ctx.matched_route + local server = route.value.upstream.nodes[1] + if enable_keepalive then + local ok, err = balancer.set_current_peer(server.host, server.port or 80, pool_opt) + if not ok then + core.log.error("failed to set server peer [", server.host, ":", + server.port, "] err: ", err) + return ok, err + end + balancer.enable_keepalive(default_keepalive_pool.idle_timeout, + default_keepalive_pool.requests) + else + balancer.set_current_peer(server.host, server.port or 80) + end +end + + +local function routes_analyze(routes) + if orig_router_http_matching == nil then + orig_router_http_matching = router.router_http.matching + end + + if orig_handle_upstream == nil then + orig_handle_upstream = apisix.handle_upstream + end + + if orig_http_balancer_phase == nil then + orig_http_balancer_phase = apisix.http_balancer_phase + end + + local route_flags = core.table.new(0, 16) + local route_up_flags = core.table.new(0, 12) + + for _, route in ipairs(routes) do + if type(route) == "table" then + for key, value in pairs(route.value) do + -- collect route flags + if key == "methods" then + route_flags["methods"] = true + elseif key == "host" or key == "hosts" then + route_flags["host"] = true + elseif key == "vars" then + route_flags["vars"] = true + elseif key == "filter_func"then + route_flags["filter_func"] = true + elseif key == "remote_addr" or key == "remote_addrs" then + route_flags["remote_addr"] = true + elseif key == "service" then + route_flags["service"] = true + elseif key == "enable_websocket" then + route_flags["enable_websocket"] = true + elseif key == "plugins" then + route_flags["plugins"] = true + elseif key == "upstream_id" then + route_flags["upstream_id"] = true + elseif key == "service_id" then + route_flags["service_id"] = true + elseif key == "plugin_config_id" then + route_flags["plugin_config_id"] = true + elseif key == "script" then + route_flags["script"] = true + end + + -- collect upstream flags + if key == "upstream" then + if value.nodes and #value.nodes == 1 then + for k, v in pairs(value) do + if k == "nodes" then + if (not core.utils.parse_ipv4(v[1].host) + and not core.utils.parse_ipv6(v[1].host)) then + route_up_flags["has_domain"] = true + end + elseif k == "pass_host" and v ~= "pass" then + route_up_flags["pass_host"] = true + elseif k == "scheme" and v ~= "http" then + route_up_flags["scheme"] = true + elseif k == "checks" then + route_up_flags["checks"] = true + elseif k == "retries" then + route_up_flags["retries"] = true + elseif k == "timeout" then + route_up_flags["timeout"] = true + elseif k == "tls" then + route_up_flags["tls"] = true + elseif k == "keepalive_pool" then + route_up_flags["keepalive_pool"] = true + elseif k == "service_name" then + route_up_flags["service_name"] = true + end + end + else + route_up_flags["more_nodes"] = true + end + end + end + end + end + + local global_rules, _ = get_global_rules() + local global_rules_flag = global_rules and #global_rules ~= 0 + + if route_flags["vars"] or route_flags["filter_func"] + or route_flags["remote_addr"] + or route_flags["service_id"] + or route_flags["plugin_config_id"] + or global_rules_flag then + router.router_http.matching = orig_router_http_matching + else + core.log.info("use ai plane to match route") + router.router_http.matching = ai_router_http_matching + + local count = #routes + 3000 + core.log.info("renew route cache: count=", count) + route_lrucache = core.lrucache.new({ + count = count + }) + + local ok, err = gen_get_cache_key_func(route_flags) + if not ok then + core.log.error("generate get_cache_key_func failed:", err) + router.router_http.matching = orig_router_http_matching + end + end + + if route_flags["service"] + or route_flags["script"] + or route_flags["service_id"] + or route_flags["upstream_id"] + or route_flags["enable_websocket"] + or route_flags["plugins"] + or route_flags["plugin_config_id"] + or route_up_flags["has_domain"] + or route_up_flags["pass_host"] + or route_up_flags["scheme"] + or route_up_flags["checks"] + or route_up_flags["retries"] + or route_up_flags["timeout"] + or route_up_flags["tls"] + or route_up_flags["keepalive_pool"] + or route_up_flags["service_name"] + or route_up_flags["more_nodes"] + or global_rules_flag then + apisix.handle_upstream = orig_handle_upstream + apisix.http_balancer_phase = orig_http_balancer_phase + else + -- replace the upstream and balancer module + apisix.handle_upstream = ai_upstream + apisix.http_balancer_phase = ai_http_balancer_phase + end +end + + +function _M.init() + event.register(event.CONST.BUILD_ROUTER, routes_analyze) + local local_conf = core.config.local_conf() + local up_keepalive_conf = + core.table.try_read_attr(local_conf, "nginx_config", + "http", "upstream") + default_keepalive_pool.idle_timeout = + core.config_util.parse_time_unit(up_keepalive_conf.keepalive_timeout) + default_keepalive_pool.size = up_keepalive_conf.keepalive + default_keepalive_pool.requests = up_keepalive_conf.keepalive_requests + + pool_opt = { pool_size = default_keepalive_pool.size } +end + + +function _M.destroy() + if orig_router_http_matching then + router.router_http.matching = orig_router_http_matching + orig_router_http_matching = nil + end + + if orig_handle_upstream then + apisix.handle_upstream = orig_handle_upstream + orig_handle_upstream = nil + end + + if orig_http_balancer_phase then + apisix.http_balancer_phase = orig_http_balancer_phase + orig_http_balancer_phase = nil + end + + event.unregister(event.CONST.BUILD_ROUTER) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/api-breaker.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/api-breaker.lua new file mode 100644 index 0000000..eabca14 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/api-breaker.lua @@ -0,0 +1,267 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local plugin_name = "api-breaker" +local ngx = ngx +local math = math +local error = error +local ipairs = ipairs + + +local shared_buffer = ngx.shared["plugin-".. plugin_name] +if not shared_buffer then + error("failed to get ngx.shared dict when load plugin " .. plugin_name) +end + + +local schema = { + type = "object", + properties = { + break_response_code = { + type = "integer", + minimum = 200, + maximum = 599, + }, + break_response_body = { + type = "string" + }, + break_response_headers = { + type = "array", + items = { + type = "object", + properties = { + key = { + type = "string", + minLength = 1 + }, + value = { + type = "string", + minLength = 1 + } + }, + required = {"key", "value"}, + } + }, + max_breaker_sec = { + type = "integer", + minimum = 3, + default = 300, + }, + unhealthy = { + type = "object", + properties = { + http_statuses = { + type = "array", + minItems = 1, + items = { + type = "integer", + minimum = 500, + maximum = 599, + }, + uniqueItems = true, + default = {500} + }, + failures = { + type = "integer", + minimum = 1, + default = 3, + } + }, + default = {http_statuses = {500}, failures = 3} + }, + healthy = { + type = "object", + properties = { + http_statuses = { + type = "array", + minItems = 1, + items = { + type = "integer", + minimum = 200, + maximum = 499, + }, + uniqueItems = true, + default = {200} + }, + successes = { + type = "integer", + minimum = 1, + default = 3, + } + }, + default = {http_statuses = {200}, successes = 3} + } + }, + required = {"break_response_code"}, +} + + +local function gen_healthy_key(ctx) + return "healthy-" .. core.request.get_host(ctx) .. ctx.var.uri +end + + +local function gen_unhealthy_key(ctx) + return "unhealthy-" .. core.request.get_host(ctx) .. ctx.var.uri +end + + +local function gen_lasttime_key(ctx) + return "unhealthy-lasttime" .. core.request.get_host(ctx) .. ctx.var.uri +end + + +local _M = { + version = 0.1, + name = plugin_name, + priority = 1005, + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +function _M.access(conf, ctx) + local unhealthy_key = gen_unhealthy_key(ctx) + -- unhealthy counts + local unhealthy_count, err = shared_buffer:get(unhealthy_key) + if err then + core.log.warn("failed to get unhealthy_key: ", + unhealthy_key, " err: ", err) + return + end + + if not unhealthy_count then + return + end + + -- timestamp of the last time a unhealthy state was triggered + local lasttime_key = gen_lasttime_key(ctx) + local lasttime, err = shared_buffer:get(lasttime_key) + if err then + core.log.warn("failed to get lasttime_key: ", + lasttime_key, " err: ", err) + return + end + + if not lasttime then + return + end + + local failure_times = math.ceil(unhealthy_count / conf.unhealthy.failures) + if failure_times < 1 then + failure_times = 1 + end + + -- cannot exceed the maximum value of the user configuration + local breaker_time = 2 ^ failure_times + if breaker_time > conf.max_breaker_sec then + breaker_time = conf.max_breaker_sec + end + core.log.info("breaker_time: ", breaker_time) + + -- breaker + if lasttime + breaker_time >= ngx.time() then + if conf.break_response_body then + if conf.break_response_headers then + for _, value in ipairs(conf.break_response_headers) do + local val = core.utils.resolve_var(value.value, ctx.var) + core.response.add_header(value.key, val) + end + end + return conf.break_response_code, conf.break_response_body + end + return conf.break_response_code + end + + return +end + + +function _M.log(conf, ctx) + local unhealthy_key = gen_unhealthy_key(ctx) + local healthy_key = gen_healthy_key(ctx) + local upstream_status = core.response.get_upstream_status(ctx) + + if not upstream_status then + return + end + + -- unhealthy process + if core.table.array_find(conf.unhealthy.http_statuses, + upstream_status) + then + local unhealthy_count, err = shared_buffer:incr(unhealthy_key, 1, 0) + if err then + core.log.warn("failed to incr unhealthy_key: ", unhealthy_key, + " err: ", err) + end + core.log.info("unhealthy_key: ", unhealthy_key, " count: ", + unhealthy_count) + + shared_buffer:delete(healthy_key) + + -- whether the user-configured number of failures has been reached, + -- and if so, the timestamp for entering the unhealthy state. + if unhealthy_count % conf.unhealthy.failures == 0 then + shared_buffer:set(gen_lasttime_key(ctx), ngx.time(), + conf.max_breaker_sec) + core.log.info("update unhealthy_key: ", unhealthy_key, " to ", + unhealthy_count) + end + + return + end + + -- health process + if not core.table.array_find(conf.healthy.http_statuses, upstream_status) then + return + end + + local unhealthy_count, err = shared_buffer:get(unhealthy_key) + if err then + core.log.warn("failed to `get` unhealthy_key: ", unhealthy_key, + " err: ", err) + end + + if not unhealthy_count then + return + end + + local healthy_count, err = shared_buffer:incr(healthy_key, 1, 0) + if err then + core.log.warn("failed to `incr` healthy_key: ", healthy_key, + " err: ", err) + end + + -- clear related status + if healthy_count >= conf.healthy.successes then + -- stat change to normal + core.log.info("change to normal, ", healthy_key, " ", healthy_count) + shared_buffer:delete(gen_lasttime_key(ctx)) + shared_buffer:delete(unhealthy_key) + shared_buffer:delete(healthy_key) + end + + return +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/attach-consumer-label.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/attach-consumer-label.lua new file mode 100644 index 0000000..6d3396a --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/attach-consumer-label.lua @@ -0,0 +1,68 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local pairs = pairs +local plugin_name = "attach-consumer-label" + +local schema = { + type = "object", + properties = { + headers = { + type = "object", + additionalProperties = { + type = "string", + pattern = "^\\$.*" + }, + minProperties = 1 + }, + }, + required = {"headers"}, +} + +local _M = { + version = 0.1, + priority = 2399, + name = plugin_name, + schema = schema, +} + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + +function _M.before_proxy(conf, ctx) + -- check if the consumer is exists in the context + if not ctx.consumer then + return + end + + local labels = ctx.consumer.labels + core.log.info("consumer username: ", ctx.consumer.username, " labels: ", + core.json.delay_encode(labels)) + if not labels then + return + end + + for header, label_key in pairs(conf.headers) do + -- remove leading $ character + local label_value = labels[label_key:sub(2)] + core.request.set_header(ctx, header, label_value) + end +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/authz-casbin.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/authz-casbin.lua new file mode 100644 index 0000000..834c747 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/authz-casbin.lua @@ -0,0 +1,135 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local casbin = require("casbin") +local core = require("apisix.core") +local plugin = require("apisix.plugin") + +local plugin_name = "authz-casbin" + +local schema = { + type = "object", + properties = { + model_path = { type = "string" }, + policy_path = { type = "string" }, + model = { type = "string" }, + policy = { type = "string" }, + username = { type = "string"} + }, + oneOf = { + {required = {"model_path", "policy_path", "username"}}, + {required = {"model", "policy", "username"}} + }, +} + +local metadata_schema = { + type = "object", + properties = { + model = {type = "string"}, + policy = {type = "string"}, + }, + required = {"model", "policy"}, +} + +local _M = { + version = 0.1, + priority = 2560, + name = plugin_name, + schema = schema, + metadata_schema = metadata_schema +} + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + local ok, err = core.schema.check(schema, conf) + if ok then + return true + else + local metadata = plugin.plugin_metadata(plugin_name) + if metadata and metadata.value and conf.username then + return true + end + end + return false, err +end + +local casbin_enforcer + +local function new_enforcer_if_need(conf) + if conf.model_path and conf.policy_path then + local model_path = conf.model_path + local policy_path = conf.policy_path + if not conf.casbin_enforcer then + conf.casbin_enforcer = casbin:new(model_path, policy_path) + end + return true + end + + if conf.model and conf.policy then + local model = conf.model + local policy = conf.policy + if not conf.casbin_enforcer then + conf.casbin_enforcer = casbin:newEnforcerFromText(model, policy) + end + return true + end + + local metadata = plugin.plugin_metadata(plugin_name) + if not (metadata and metadata.value.model and metadata.value.policy) then + return nil, "not enough configuration to create enforcer" + end + + local modifiedIndex = metadata.modifiedIndex + if not casbin_enforcer or casbin_enforcer.modifiedIndex ~= modifiedIndex then + local model = metadata.value.model + local policy = metadata.value.policy + casbin_enforcer = casbin:newEnforcerFromText(model, policy) + casbin_enforcer.modifiedIndex = modifiedIndex + end + return true +end + + +function _M.rewrite(conf, ctx) + -- creates an enforcer when request sent for the first time + local ok, err = new_enforcer_if_need(conf) + if not ok then + core.log.error(err) + return 503 + end + + local path = ctx.var.uri + local method = ctx.var.method + local headers = core.request.headers(ctx) + local username = headers[conf.username] or "anonymous" + + if conf.casbin_enforcer then + if not conf.casbin_enforcer:enforce(username, path, method) then + return 403, {message = "Access Denied"} + end + else + if not casbin_enforcer:enforce(username, path, method) then + return 403, {message = "Access Denied"} + end + end +end + + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/authz-casdoor.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/authz-casdoor.lua new file mode 100644 index 0000000..c496ab6 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/authz-casdoor.lua @@ -0,0 +1,176 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local http = require("resty.http") +local session = require("resty.session") +local ngx = ngx +local rand = math.random +local tostring = tostring + + +local plugin_name = "authz-casdoor" +local schema = { + type = "object", + properties = { + -- Note: endpoint_addr and callback_url should not end with '/' + endpoint_addr = {type = "string", pattern = "^[^%?]+[^/]$"}, + client_id = {type = "string"}, + client_secret = {type = "string"}, + callback_url = {type = "string", pattern = "^[^%?]+[^/]$"} + }, + encrypt_fields = {"client_secret"}, + required = { + "callback_url", "endpoint_addr", "client_id", "client_secret" + } +} + +local _M = { + version = 0.1, + priority = 2559, + name = plugin_name, + schema = schema +} + +local function fetch_access_token(code, conf) + local client = http.new() + local url = conf.endpoint_addr .. "/api/login/oauth/access_token" + local res, err = client:request_uri(url, { + method = "POST", + body = ngx.encode_args({ + code = code, + grant_type = "authorization_code", + client_id = conf.client_id, + client_secret = conf.client_secret + }), + headers = { + ["Content-Type"] = "application/x-www-form-urlencoded" + } + }) + + if not res then + return nil, nil, err + end + local data, err = core.json.decode(res.body) + + if err or not data then + err = "failed to parse casdoor response data: " .. err .. ", body: " .. res.body + return nil, nil, err + end + + if not data.access_token then + return nil, nil, + "failed when accessing token: no access_token contained" + end + -- In the reply of casdoor, setting expires_in to 0 indicates that the access_token is invalid. + if not data.expires_in or data.expires_in == 0 then + return nil, nil, "failed when accessing token: invalid access_token" + end + + return data.access_token, data.expires_in, nil +end + + +function _M.check_schema(conf) + local check = {"endpoint_addr", "callback_url"} + core.utils.check_https(check, conf, plugin_name) + return core.schema.check(schema, conf) +end + + +function _M.access(conf, ctx) + local current_uri = ctx.var.uri + local session_obj_read, session_present = session.open() + -- step 1: check whether hits the callback + local m, err = ngx.re.match(conf.callback_url, ".+//[^/]+(/.*)", "jo") + if err or not m then + core.log.error(err) + return 503 + end + local real_callback_url = m[1] + if current_uri == real_callback_url then + if not session_present then + err = "no session found" + core.log.error(err) + return 503 + end + local state_in_session = session_obj_read.data.state + if not state_in_session then + err = "no state found in session" + core.log.error(err) + return 503 + end + local args = core.request.get_uri_args(ctx) + if not args or not args.code or not args.state then + err = "failed when accessing token. Invalid code or state" + core.log.error(err) + return 400, err + end + if args.state ~= tostring(state_in_session) then + err = "invalid state" + core.log.error(err) + return 400, err + end + if not args.code then + err = "invalid code" + core.log.error(err) + return 400, err + end + local access_token, lifetime, err = + fetch_access_token(args.code, conf) + if not access_token then + core.log.error(err) + return 503 + end + local original_url = session_obj_read.data.original_uri + if not original_url then + err = "no original_url found in session" + core.log.error(err) + return 503 + end + local session_obj_write = session.new { + cookie = {lifetime = lifetime} + } + session_obj_write:start() + session_obj_write.data.access_token = access_token + session_obj_write:save() + core.response.set_header("Location", original_url) + return 302 + end + + -- step 2: check whether session exists + if not (session_present and session_obj_read.data.access_token) then + -- session not exists, redirect to login page + local state = rand(0x7fffffff) + local session_obj_write = session.start() + session_obj_write.data.original_uri = current_uri + session_obj_write.data.state = state + session_obj_write:save() + + local redirect_url = conf.endpoint_addr .. "/login/oauth/authorize?" .. ngx.encode_args({ + response_type = "code", + scope = "read", + state = state, + client_id = conf.client_id, + redirect_uri = conf.callback_url + }) + core.response.set_header("Location", redirect_url) + return 302 + end + +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/authz-keycloak.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/authz-keycloak.lua new file mode 100644 index 0000000..34a0533 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/authz-keycloak.lua @@ -0,0 +1,790 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local http = require "resty.http" +local sub_str = string.sub +local type = type +local ngx = ngx +local plugin_name = "authz-keycloak" +local fetch_secrets = require("apisix.secret").fetch_secrets + +local log = core.log +local pairs = pairs + +local schema = { + type = "object", + properties = { + discovery = {type = "string", minLength = 1, maxLength = 4096}, + token_endpoint = {type = "string", minLength = 1, maxLength = 4096}, + resource_registration_endpoint = {type = "string", minLength = 1, maxLength = 4096}, + client_id = {type = "string", minLength = 1, maxLength = 100}, + client_secret = {type = "string", minLength = 1, maxLength = 100}, + grant_type = { + type = "string", + default="urn:ietf:params:oauth:grant-type:uma-ticket", + enum = {"urn:ietf:params:oauth:grant-type:uma-ticket"}, + minLength = 1, maxLength = 100 + }, + policy_enforcement_mode = { + type = "string", + enum = {"ENFORCING", "PERMISSIVE"}, + default = "ENFORCING" + }, + permissions = { + type = "array", + items = { + type = "string", + minLength = 1, maxLength = 100 + }, + uniqueItems = true, + default = {} + }, + lazy_load_paths = {type = "boolean", default = false}, + http_method_as_scope = {type = "boolean", default = false}, + timeout = {type = "integer", minimum = 1000, default = 3000}, + ssl_verify = {type = "boolean", default = true}, + cache_ttl_seconds = {type = "integer", minimum = 1, default = 24 * 60 * 60}, + keepalive = {type = "boolean", default = true}, + keepalive_timeout = {type = "integer", minimum = 1000, default = 60000}, + keepalive_pool = {type = "integer", minimum = 1, default = 5}, + access_denied_redirect_uri = {type = "string", minLength = 1, maxLength = 2048}, + access_token_expires_in = {type = "integer", minimum = 1, default = 300}, + access_token_expires_leeway = {type = "integer", minimum = 0, default = 0}, + refresh_token_expires_in = {type = "integer", minimum = 1, default = 3600}, + refresh_token_expires_leeway = {type = "integer", minimum = 0, default = 0}, + password_grant_token_generation_incoming_uri = { + type = "string", + minLength = 1, + maxLength = 4096 + }, + }, + encrypt_fields = {"client_secret"}, + required = {"client_id"}, + allOf = { + -- Require discovery or token endpoint. + { + anyOf = { + {required = {"discovery"}}, + {required = {"token_endpoint"}} + } + }, + -- If lazy_load_paths is true, require discovery or resource registration endpoint. + { + anyOf = { + { + properties = { + lazy_load_paths = {enum = {false}}, + } + }, + { + properties = { + lazy_load_paths = {enum = {true}}, + }, + anyOf = { + {required = {"discovery"}}, + {required = {"resource_registration_endpoint"}} + } + } + } + } + } +} + + +local _M = { + version = 0.1, + priority = 2000, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + local check = {"discovery", "token_endpoint", "resource_registration_endpoint", + "access_denied_redirect_uri"} + core.utils.check_https(check, conf, plugin_name) + core.utils.check_tls_bool({"ssl_verify"}, conf, plugin_name) + + return core.schema.check(schema, conf) +end + + +-- Some auxiliary functions below heavily inspired by the excellent +-- lua-resty-openidc module; see https://github.com/zmartzone/lua-resty-openidc + + +-- Retrieve value from server-wide cache, if available. +local function authz_keycloak_cache_get(type, key) + local dict = ngx.shared[type] + local value + if dict then + value = dict:get(key) + if value then log.debug("cache hit: type=", type, " key=", key) end + end + return value +end + + +-- Set value in server-wide cache, if available. +local function authz_keycloak_cache_set(type, key, value, exp) + local dict = ngx.shared[type] + if dict and (exp > 0) then + local success, err, forcible = dict:set(key, value, exp) + if err then + log.error("cache set: success=", success, " err=", err, " forcible=", forcible) + else + log.debug("cache set: success=", success, " err=", err, " forcible=", forcible) + end + end +end + + +-- Configure request parameters. +local function authz_keycloak_configure_params(params, conf) + -- Keepalive options. + if conf.keepalive then + params.keepalive_timeout = conf.keepalive_timeout + params.keepalive_pool = conf.keepalive_pool + else + params.keepalive = conf.keepalive + end + + -- TLS verification. + params.ssl_verify = conf.ssl_verify + + -- Decorate parameters, maybe, and return. + return conf.http_request_decorator and conf.http_request_decorator(params) or params +end + + +-- Configure timeouts. +local function authz_keycloak_configure_timeouts(httpc, timeout) + if timeout then + if type(timeout) == "table" then + httpc:set_timeouts(timeout.connect or 0, timeout.send or 0, timeout.read or 0) + else + httpc:set_timeout(timeout) + end + end +end + + +-- Set outgoing proxy options. +local function authz_keycloak_configure_proxy(httpc, proxy_opts) + if httpc and proxy_opts and type(proxy_opts) == "table" then + log.debug("authz_keycloak_configure_proxy : use http proxy") + httpc:set_proxy_options(proxy_opts) + else + log.debug("authz_keycloak_configure_proxy : don't use http proxy") + end +end + + +-- Get and configure HTTP client. +local function authz_keycloak_get_http_client(conf) + local httpc = http.new() + authz_keycloak_configure_timeouts(httpc, conf.timeout) + authz_keycloak_configure_proxy(httpc, conf.proxy_opts) + return httpc +end + + +-- Parse the JSON result from a call to the OP. +local function authz_keycloak_parse_json_response(response) + local err + local res + + -- Check the response from the OP. + if response.status ~= 200 then + err = "response indicates failure, status=" .. response.status .. ", body=" .. response.body + else + -- Decode the response and extract the JSON object. + res, err = core.json.decode(response.body) + + if not res then + err = "JSON decoding failed: " .. err + end + end + + return res, err +end + + +-- Get the Discovery metadata from the specified URL. +local function authz_keycloak_discover(conf) + log.debug("authz_keycloak_discover: URL is: " .. conf.discovery) + + local json, err + local v = authz_keycloak_cache_get("discovery", conf.discovery) + + if not v then + log.debug("Discovery data not in cache, making call to discovery endpoint.") + + -- Make the call to the discovery endpoint. + local httpc = authz_keycloak_get_http_client(conf) + + local params = authz_keycloak_configure_params({}, conf) + + local res, error = httpc:request_uri(conf.discovery, params) + + if not res then + err = "Accessing discovery URL (" .. conf.discovery .. ") failed: " .. error + log.error(err) + else + log.debug("Response data: " .. res.body) + json, err = authz_keycloak_parse_json_response(res) + if json then + authz_keycloak_cache_set("discovery", conf.discovery, core.json.encode(json), + conf.cache_ttl_seconds) + else + err = "could not decode JSON from Discovery data" .. (err and (": " .. err) or '') + log.error(err) + end + end + else + json = core.json.decode(v) + end + + return json, err +end + + +-- Turn a discovery url set in the conf dictionary into the discovered information. +local function authz_keycloak_ensure_discovered_data(conf) + local err + if type(conf.discovery) == "string" then + local discovery + discovery, err = authz_keycloak_discover(conf) + if not err then + conf.discovery = discovery + end + end + return err +end + + +-- Get an endpoint from the configuration. +local function authz_keycloak_get_endpoint(conf, endpoint) + if conf and conf[endpoint] then + -- Use explicit entry. + return conf[endpoint] + elseif conf and conf.discovery and type(conf.discovery) == "table" then + -- Use discovery data. + return conf.discovery[endpoint] + end + + -- Unable to obtain endpoint. + return nil +end + + +-- Return the token endpoint from the configuration. +local function authz_keycloak_get_token_endpoint(conf) + return authz_keycloak_get_endpoint(conf, "token_endpoint") +end + + +-- Return the resource registration endpoint from the configuration. +local function authz_keycloak_get_resource_registration_endpoint(conf) + return authz_keycloak_get_endpoint(conf, "resource_registration_endpoint") +end + + +-- Return access_token expires_in value (in seconds). +local function authz_keycloak_access_token_expires_in(conf, expires_in) + return (expires_in or conf.access_token_expires_in) + - 1 - conf.access_token_expires_leeway +end + + +-- Return refresh_token expires_in value (in seconds). +local function authz_keycloak_refresh_token_expires_in(conf, expires_in) + return (expires_in or conf.refresh_token_expires_in) + - 1 - conf.refresh_token_expires_leeway +end + + +-- Ensure a valid service account access token is available for the configured client. +local function authz_keycloak_ensure_sa_access_token(conf) + local client_id = conf.client_id + local ttl = conf.cache_ttl_seconds + local token_endpoint = authz_keycloak_get_token_endpoint(conf) + + if not token_endpoint then + log.error("Unable to determine token endpoint.") + return 503, "Unable to determine token endpoint." + end + + local session = authz_keycloak_cache_get("access-tokens", token_endpoint .. ":" + .. client_id) + + if session then + -- Decode session string. + local err + session, err = core.json.decode(session) + + if not session then + -- Should never happen. + return 500, err + end + + local current_time = ngx.time() + + if current_time < session.access_token_expiration then + -- Access token is still valid. + log.debug("Access token is still valid.") + return session.access_token + else + -- Access token has expired. + log.debug("Access token has expired.") + if session.refresh_token + and (not session.refresh_token_expiration + or current_time < session.refresh_token_expiration) then + -- Try to get a new access token, using the refresh token. + log.debug("Trying to get new access token using refresh token.") + + local httpc = authz_keycloak_get_http_client(conf) + + local params = { + method = "POST", + body = ngx.encode_args({ + grant_type = "refresh_token", + client_id = client_id, + client_secret = conf.client_secret, + refresh_token = session.refresh_token, + }), + headers = { + ["Content-Type"] = "application/x-www-form-urlencoded" + } + } + + params = authz_keycloak_configure_params(params, conf) + + local res, err = httpc:request_uri(token_endpoint, params) + + if not res then + err = "Accessing token endpoint URL (" .. token_endpoint + .. ") failed: " .. err + log.error(err) + return nil, err + end + + log.debug("Response data: " .. res.body) + local json, err = authz_keycloak_parse_json_response(res) + + if not json then + err = "Could not decode JSON from token endpoint" + .. (err and (": " .. err) or '.') + log.error(err) + return nil, err + end + + if not json.access_token then + -- Clear session. + log.debug("Answer didn't contain a new access token. Clearing session.") + session = nil + else + log.debug("Got new access token.") + -- Save access token. + session.access_token = json.access_token + + -- Calculate and save access token expiry time. + session.access_token_expiration = current_time + + authz_keycloak_access_token_expires_in(conf, json.expires_in) + + -- Save refresh token, maybe. + if json.refresh_token ~= nil then + log.debug("Got new refresh token.") + session.refresh_token = json.refresh_token + + -- Calculate and save refresh token expiry time. + session.refresh_token_expiration = current_time + + authz_keycloak_refresh_token_expires_in(conf, + json.refresh_expires_in) + end + + authz_keycloak_cache_set("access-tokens", + token_endpoint .. ":" .. client_id, + core.json.encode(session), ttl) + end + else + -- No refresh token available, or it has expired. Clear session. + log.debug("No or expired refresh token. Clearing session.") + session = nil + end + end + end + + if not session then + -- No session available. Create a new one. + + log.debug("Getting access token for Protection API from token endpoint.") + local httpc = authz_keycloak_get_http_client(conf) + + local params = { + method = "POST", + body = ngx.encode_args({ + grant_type = "client_credentials", + client_id = client_id, + client_secret = conf.client_secret, + }), + headers = { + ["Content-Type"] = "application/x-www-form-urlencoded" + } + } + + params = authz_keycloak_configure_params(params, conf) + + local current_time = ngx.time() + + local res, err = httpc:request_uri(token_endpoint, params) + + if not res then + err = "Accessing token endpoint URL (" .. token_endpoint .. ") failed: " .. err + log.error(err) + return nil, err + end + + log.debug("Response data: " .. res.body) + local json, err = authz_keycloak_parse_json_response(res) + + if not json then + err = "Could not decode JSON from token endpoint" .. (err and (": " .. err) or '.') + log.error(err) + return nil, err + end + + if not json.access_token then + err = "Response does not contain access_token field." + log.error(err) + return nil, err + end + + session = {} + + -- Save access token. + session.access_token = json.access_token + + -- Calculate and save access token expiry time. + session.access_token_expiration = current_time + + authz_keycloak_access_token_expires_in(conf, json.expires_in) + + -- Save refresh token, maybe. + if json.refresh_token ~= nil then + session.refresh_token = json.refresh_token + + -- Calculate and save refresh token expiry time. + session.refresh_token_expiration = current_time + + authz_keycloak_refresh_token_expires_in(conf, json.refresh_expires_in) + end + + authz_keycloak_cache_set("access-tokens", token_endpoint .. ":" .. client_id, + core.json.encode(session), ttl) + end + + return session.access_token +end + + +-- Resolve a URI to one or more resource IDs. +local function authz_keycloak_resolve_resource(conf, uri, sa_access_token) + -- Get resource registration endpoint URL. + local resource_registration_endpoint = authz_keycloak_get_resource_registration_endpoint(conf) + + if not resource_registration_endpoint then + local err = "Unable to determine registration endpoint." + log.error(err) + return nil, err + end + + log.debug("Resource registration endpoint: ", resource_registration_endpoint) + + local httpc = authz_keycloak_get_http_client(conf) + + local params = { + method = "GET", + query = {uri = uri, matchingUri = "true"}, + headers = { + ["Authorization"] = "Bearer " .. sa_access_token + } + } + + params = authz_keycloak_configure_params(params, conf) + + local res, err = httpc:request_uri(resource_registration_endpoint, params) + + if not res then + err = "Accessing resource registration endpoint URL (" .. resource_registration_endpoint + .. ") failed: " .. err + log.error(err) + return nil, err + end + + log.debug("Response data: " .. res.body) + res.body = '{"resources": ' .. res.body .. '}' + local json, err = authz_keycloak_parse_json_response(res) + + if not json then + err = "Could not decode JSON from resource registration endpoint" + .. (err and (": " .. err) or '.') + log.error(err) + return nil, err + end + + return json.resources +end + + +local function evaluate_permissions(conf, ctx, token) + -- Ensure discovered data. + local err = authz_keycloak_ensure_discovered_data(conf) + if err then + return 503, err + end + + local permission + + if conf.lazy_load_paths then + -- Ensure service account access token. + local sa_access_token, err = authz_keycloak_ensure_sa_access_token(conf) + if err then + log.error(err) + return 503, err + end + + -- Resolve URI to resource(s). + permission, err = authz_keycloak_resolve_resource(conf, ctx.var.request_uri, + sa_access_token) + + -- Check result. + if permission == nil then + -- No result back from resource registration endpoint. + log.error(err) + return 503, err + end + else + -- Use statically configured permissions. + permission = conf.permissions + end + + -- Return 403 or 307 if permission is empty and enforcement mode is "ENFORCING". + if #permission == 0 and conf.policy_enforcement_mode == "ENFORCING" then + -- Return Keycloak-style message for consistency. + if conf.access_denied_redirect_uri then + core.response.set_header("Location", conf.access_denied_redirect_uri) + return 307 + end + return 403, '{"error":"access_denied","error_description":"not_authorized"}' + end + + -- Determine scope from HTTP method, maybe. + local scope + if conf.http_method_as_scope then + scope = ctx.var.request_method + end + + if scope then + -- Loop over permissions and add scope. + for k, v in pairs(permission) do + if v:find("#", 1, true) then + -- Already contains scope. + permission[k] = v .. ", " .. scope + else + -- Doesn't contain scope yet. + permission[k] = v .. "#" .. scope + end + end + end + + for k, v in pairs(permission) do + log.debug("Requesting permission ", v, ".") + end + + -- Get token endpoint URL. + local token_endpoint = authz_keycloak_get_token_endpoint(conf) + if not token_endpoint then + err = "Unable to determine token endpoint." + log.error(err) + return 503, err + end + log.debug("Token endpoint: ", token_endpoint) + + local httpc = authz_keycloak_get_http_client(conf) + + local params = { + method = "POST", + body = ngx.encode_args({ + grant_type = conf.grant_type, + audience = conf.client_id, + response_mode = "decision", + permission = permission + }), + headers = { + ["Content-Type"] = "application/x-www-form-urlencoded", + ["Authorization"] = token + } + } + + params = authz_keycloak_configure_params(params, conf) + + local res, err = httpc:request_uri(token_endpoint, params) + + if not res then + err = "Error while sending authz request to " .. token_endpoint .. ": " .. err + log.error(err) + return 503 + end + + log.debug("Response status: ", res.status, ", data: ", res.body) + + if res.status == 403 then + -- Request permanently denied, e.g. due to lacking permissions. + log.debug('Request denied: HTTP 403 Forbidden. Body: ', res.body) + if conf.access_denied_redirect_uri then + core.response.set_header("Location", conf.access_denied_redirect_uri) + return 307 + end + + return res.status, res.body + elseif res.status == 401 then + -- Request temporarily denied, e.g access token not valid. + log.debug('Request denied: HTTP 401 Unauthorized. Body: ', res.body) + return res.status, res.body + elseif res.status >= 400 then + -- Some other error. Log full response. + log.error('Request denied: Token endpoint returned an error (status: ', + res.status, ', body: ', res.body, ').') + return res.status, res.body + end + + -- Request accepted. +end + + +local function fetch_jwt_token(ctx) + local token = core.request.header(ctx, "Authorization") + if not token then + return nil, "authorization header not available" + end + + local prefix = sub_str(token, 1, 7) + if prefix ~= 'Bearer ' and prefix ~= 'bearer ' then + return "Bearer " .. token + end + return token +end + +-- To get new access token by calling get token api +local function generate_token_using_password_grant(conf,ctx) + log.debug("generate_token_using_password_grant Function Called") + + local body, err = core.request.get_body() + if err or not body then + log.error("Failed to get request body: ", err) + return 503 + end + local parameters = core.string.decode_args(body) + + local username = parameters["username"] + local password = parameters["password"] + + if not username then + local err = "username is missing." + log.warn(err) + return 422, {message = err} + end + if not password then + local err = "password is missing." + log.warn(err) + return 422, {message = err} + end + + local client_id = conf.client_id + + local token_endpoint = authz_keycloak_get_token_endpoint(conf) + + if not token_endpoint then + local err = "Unable to determine token endpoint." + log.error(err) + return 503, {message = err} + end + local httpc = authz_keycloak_get_http_client(conf) + + local params = { + method = "POST", + body = ngx.encode_args({ + grant_type = "password", + client_id = client_id, + client_secret = conf.client_secret, + username = username, + password = password + }), + headers = { + ["Content-Type"] = "application/x-www-form-urlencoded" + } + } + + params = authz_keycloak_configure_params(params, conf) + + local res, err = httpc:request_uri(token_endpoint, params) + + if not res then + err = "Accessing token endpoint URL (" .. token_endpoint + .. ") failed: " .. err + log.error(err) + return 401, {message = "Accessing token endpoint URL failed."} + end + + log.debug("Response data: " .. res.body) + local json, err = authz_keycloak_parse_json_response(res) + + if not json then + err = "Could not decode JSON from response" + .. (err and (": " .. err) or '.') + log.error(err) + return 401, {message = "Could not decode JSON from response."} + end + + return res.status, res.body +end + +function _M.access(conf, ctx) + -- resolve secrets + conf = fetch_secrets(conf, true, conf, "") + local headers = core.request.headers(ctx) + local need_grant_token = conf.password_grant_token_generation_incoming_uri and + ctx.var.request_uri == conf.password_grant_token_generation_incoming_uri and + headers["content-type"] == "application/x-www-form-urlencoded" and + core.request.get_method() == "POST" + if need_grant_token then + return generate_token_using_password_grant(conf,ctx) + end + log.debug("hit keycloak-auth access") + local jwt_token, err = fetch_jwt_token(ctx) + if not jwt_token then + log.error("failed to fetch JWT token: ", err) + return 401, {message = "Missing JWT token in request"} + end + + local status, body = evaluate_permissions(conf, ctx, jwt_token) + if status then + return status, body + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/aws-lambda.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/aws-lambda.lua new file mode 100644 index 0000000..1b172af --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/aws-lambda.lua @@ -0,0 +1,187 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +local ngx = ngx +local hmac = require("resty.hmac") +local hex_encode = require("resty.string").to_hex +local resty_sha256 = require("resty.sha256") +local str_strip = require("pl.stringx").strip +local norm_path = require("pl.path").normpath +local pairs = pairs +local tab_concat = table.concat +local tab_sort = table.sort +local os = os + + +local plugin_name = "aws-lambda" +local plugin_version = 0.1 +local priority = -1899 + +local ALGO = "AWS4-HMAC-SHA256" + +local function hmac256(key, msg) + return hmac:new(key, hmac.ALGOS.SHA256):final(msg) +end + +local function sha256(msg) + local hash = resty_sha256:new() + hash:update(msg) + local digest = hash:final() + return hex_encode(digest) +end + +local function get_signature_key(key, datestamp, region, service) + local kDate = hmac256("AWS4" .. key, datestamp) + local kRegion = hmac256(kDate, region) + local kService = hmac256(kRegion, service) + local kSigning = hmac256(kService, "aws4_request") + return kSigning +end + +local aws_authz_schema = { + type = "object", + properties = { + -- API Key based authorization + apikey = {type = "string"}, + -- IAM role based authorization, works via aws v4 request signing + -- more at https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html + iam = { + type = "object", + properties = { + accesskey = { + type = "string", + description = "access key id from from aws iam console" + }, + secretkey = { + type = "string", + description = "secret access key from from aws iam console" + }, + aws_region = { + type = "string", + default = "us-east-1", + description = "the aws region that is receiving the request" + }, + service = { + type = "string", + default = "execute-api", + description = "the service that is receiving the request" + } + }, + required = {"accesskey", "secretkey"} + } + } +} + +local function request_processor(conf, ctx, params) + local headers = params.headers + -- set authorization headers if not already set by the client + -- we are following not to overwrite the authz keys + if not headers["x-api-key"] then + if conf.authorization and conf.authorization.apikey then + headers["x-api-key"] = conf.authorization.apikey + return + end + end + + -- performing aws v4 request signing for IAM authorization + -- visit https://docs.aws.amazon.com/general/latest/gr/sigv4-signed-request-examples.html + -- to look at the pseudocode in python. + if headers["authorization"] or not conf.authorization or not conf.authorization.iam then + return + end + + -- create a date for headers and the credential string + local t = ngx.time() + local amzdate = os.date("!%Y%m%dT%H%M%SZ", t) + local datestamp = os.date("!%Y%m%d", t) -- Date w/o time, used in credential scope + headers["X-Amz-Date"] = amzdate + + -- computing canonical uri + local canonical_uri = norm_path(params.path) + if canonical_uri ~= "/" then + if canonical_uri:sub(-1, -1) == "/" then + canonical_uri = canonical_uri:sub(1, -2) + end + if canonical_uri:sub(1, 1) ~= "/" then + canonical_uri = "/" .. canonical_uri + end + end + + -- computing canonical query string + local canonical_qs = {} + local canonical_qs_i = 0 + for k, v in pairs(params.query) do + canonical_qs_i = canonical_qs_i + 1 + canonical_qs[canonical_qs_i] = ngx.unescape_uri(k) .. "=" .. ngx.unescape_uri(v) + end + + tab_sort(canonical_qs) + canonical_qs = tab_concat(canonical_qs, "&") + + -- computing canonical and signed headers + + local canonical_headers, signed_headers = {}, {} + local signed_headers_i = 0 + for k, v in pairs(headers) do + k = k:lower() + if k ~= "connection" then + signed_headers_i = signed_headers_i + 1 + signed_headers[signed_headers_i] = k + -- strip starting and trailing spaces including strip multiple spaces into single space + canonical_headers[k] = str_strip(v) + end + end + tab_sort(signed_headers) + + for i = 1, #signed_headers do + local k = signed_headers[i] + canonical_headers[i] = k .. ":" .. canonical_headers[k] .. "\n" + end + canonical_headers = tab_concat(canonical_headers, nil, 1, #signed_headers) + signed_headers = tab_concat(signed_headers, ";") + + -- combining elements to form the canonical request (step-1) + local canonical_request = params.method:upper() .. "\n" + .. canonical_uri .. "\n" + .. (canonical_qs or "") .. "\n" + .. canonical_headers .. "\n" + .. signed_headers .. "\n" + .. sha256(params.body or "") + + -- creating the string to sign for aws signature v4 (step-2) + local iam = conf.authorization.iam + local credential_scope = datestamp .. "/" .. iam.aws_region .. "/" + .. iam.service .. "/aws4_request" + local string_to_sign = ALGO .. "\n" + .. amzdate .. "\n" + .. credential_scope .. "\n" + .. sha256(canonical_request) + + -- calculate the signature (step-3) + local signature_key = get_signature_key(iam.secretkey, datestamp, iam.aws_region, iam.service) + local signature = hex_encode(hmac256(signature_key, string_to_sign)) + + -- add info to the headers (step-4) + headers["authorization"] = ALGO .. " Credential=" .. iam.accesskey + .. "/" .. credential_scope + .. ", SignedHeaders=" .. signed_headers + .. ", Signature=" .. signature +end + + +local serverless_obj = require("apisix.plugins.serverless.generic-upstream") + +return serverless_obj(plugin_name, plugin_version, priority, request_processor, aws_authz_schema) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/azure-functions.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/azure-functions.lua new file mode 100644 index 0000000..0b0e64d --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/azure-functions.lua @@ -0,0 +1,61 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +local plugin = require("apisix.plugin") +local plugin_name, plugin_version, priority = "azure-functions", 0.1, -1900 + +local azure_authz_schema = { + type = "object", + properties = { + apikey = {type = "string"}, + clientid = {type = "string"} + } +} + +local metadata_schema = { + type = "object", + properties = { + master_apikey = {type = "string", default = ""}, + master_clientid = {type = "string", default = ""} + } +} + +local function request_processor(conf, ctx, params) + local headers = params.headers or {} + -- set authorization headers if not already set by the client + -- we are following not to overwrite the authz keys + if not headers["x-functions-key"] and + not headers["x-functions-clientid"] then + if conf.authorization then + headers["x-functions-key"] = conf.authorization.apikey + headers["x-functions-clientid"] = conf.authorization.clientid + else + -- If neither api keys are set with the client request nor inside the plugin attributes + -- plugin will fallback to the master key (if any) present inside the metadata. + local metadata = plugin.plugin_metadata(plugin_name) + if metadata then + headers["x-functions-key"] = metadata.value.master_apikey + headers["x-functions-clientid"] = metadata.value.master_clientid + end + end + end + + params.headers = headers +end + + +return require("apisix.plugins.serverless.generic-upstream")(plugin_name, + plugin_version, priority, request_processor, azure_authz_schema, metadata_schema) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/basic-auth.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/basic-auth.lua new file mode 100644 index 0000000..aac1ef9 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/basic-auth.lua @@ -0,0 +1,189 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local ngx = ngx +local ngx_re = require("ngx.re") +local consumer = require("apisix.consumer") +local schema_def = require("apisix.schema_def") +local auth_utils = require("apisix.utils.auth") + +local lrucache = core.lrucache.new({ + ttl = 300, count = 512 +}) + +local schema = { + type = "object", + title = "work with route or service object", + properties = { + hide_credentials = { + type = "boolean", + default = false, + } + }, + anonymous_consumer = schema_def.anonymous_consumer_schema, +} + +local consumer_schema = { + type = "object", + title = "work with consumer object", + properties = { + username = { type = "string" }, + password = { type = "string" }, + }, + encrypt_fields = {"password"}, + required = {"username", "password"}, +} + +local plugin_name = "basic-auth" + + +local _M = { + version = 0.1, + priority = 2520, + type = 'auth', + name = plugin_name, + schema = schema, + consumer_schema = consumer_schema +} + +function _M.check_schema(conf, schema_type) + local ok, err + if schema_type == core.schema.TYPE_CONSUMER then + ok, err = core.schema.check(consumer_schema, conf) + else + ok, err = core.schema.check(schema, conf) + end + + if not ok then + return false, err + end + + return true +end + +local function extract_auth_header(authorization) + + local function do_extract(auth) + local obj = { username = "", password = "" } + + local m, err = ngx.re.match(auth, "Basic\\s(.+)", "jo") + if err then + -- error authorization + return nil, err + end + + if not m then + return nil, "Invalid authorization header format" + end + + local decoded = ngx.decode_base64(m[1]) + + if not decoded then + return nil, "Failed to decode authentication header: " .. m[1] + end + + local res + res, err = ngx_re.split(decoded, ":") + if err then + return nil, "Split authorization err:" .. err + end + if #res < 2 then + return nil, "Split authorization err: invalid decoded data: " .. decoded + end + + obj.username = ngx.re.gsub(res[1], "\\s+", "", "jo") + obj.password = ngx.re.gsub(res[2], "\\s+", "", "jo") + core.log.info("plugin access phase, authorization: ", + obj.username, ": ", obj.password) + + return obj, nil + end + + local matcher, err = lrucache(authorization, nil, do_extract, authorization) + + if matcher then + return matcher.username, matcher.password, err + else + return "", "", err + end + +end + + +local function find_consumer(ctx) + local auth_header = core.request.header(ctx, "Authorization") + if not auth_header then + core.response.set_header("WWW-Authenticate", "Basic realm='.'") + return nil, nil, "Missing authorization in request" + end + + local username, password, err = extract_auth_header(auth_header) + if err then + if auth_utils.is_running_under_multi_auth(ctx) then + return nil, nil, err + end + core.log.warn(err) + return nil, nil, "Invalid authorization in request" + end + + local cur_consumer, consumer_conf, err = consumer.find_consumer(plugin_name, + "username", username) + if not cur_consumer then + err = "failed to find user: " .. (err or "invalid user") + if auth_utils.is_running_under_multi_auth(ctx) then + return nil, nil, err + end + core.log.warn(err) + return nil, nil, "Invalid user authorization" + end + + if cur_consumer.auth_conf.password ~= password then + return nil, nil, "Invalid user authorization" + end + + return cur_consumer, consumer_conf, err +end + + +function _M.rewrite(conf, ctx) + core.log.info("plugin access phase, conf: ", core.json.delay_encode(conf)) + + local cur_consumer, consumer_conf, err = find_consumer(ctx) + if not cur_consumer then + if not conf.anonymous_consumer then + return 401, { message = err } + end + cur_consumer, consumer_conf, err = consumer.get_anonymous_consumer(conf.anonymous_consumer) + if not cur_consumer then + err = "basic-auth failed to authenticate the request, code: 401. error: " .. err + core.log.error(err) + return 401, { message = "Invalid user authorization" } + end + end + + core.log.info("consumer: ", core.json.delay_encode(cur_consumer)) + + if conf.hide_credentials then + core.request.set_header(ctx, "Authorization", nil) + end + + consumer.attach_consumer(ctx, cur_consumer, consumer_conf) + + core.log.info("hit basic-auth access") +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/batch-requests.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/batch-requests.lua new file mode 100644 index 0000000..a1b5743 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/batch-requests.lua @@ -0,0 +1,309 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local http = require("resty.http") +local plugin = require("apisix.plugin") +local ngx = ngx +local ipairs = ipairs +local pairs = pairs +local str_find = core.string.find +local str_lower = string.lower + + +local plugin_name = "batch-requests" + +local default_uri = "/apisix/batch-requests" + +local attr_schema = { + type = "object", + properties = { + uri = { + type = "string", + description = "uri for batch-requests", + default = default_uri + } + }, +} + +local schema = { + type = "object", +} + +local default_max_body_size = 1024 * 1024 -- 1MiB +local metadata_schema = { + type = "object", + properties = { + max_body_size = { + description = "max pipeline body size in bytes", + type = "integer", + exclusiveMinimum = 0, + default = default_max_body_size, + }, + }, +} + +local method_schema = core.table.clone(core.schema.method_schema) +method_schema.default = "GET" + +local req_schema = { + type = "object", + properties = { + query = { + description = "pipeline query string", + type = "object" + }, + headers = { + description = "pipeline header", + type = "object" + }, + timeout = { + description = "pipeline timeout(ms)", + type = "integer", + default = 30000, + }, + pipeline = { + type = "array", + minItems = 1, + items = { + type = "object", + properties = { + version = { + description = "HTTP version", + type = "number", + enum = {1.0, 1.1}, + default = 1.1, + }, + method = method_schema, + path = { + type = "string", + minLength = 1, + }, + query = { + description = "request header", + type = "object", + }, + headers = { + description = "request query string", + type = "object", + }, + ssl_verify = { + type = "boolean", + default = false + }, + } + } + } + }, + anyOf = { + {required = {"pipeline"}}, + }, +} + +local _M = { + version = 0.1, + priority = 4010, + name = plugin_name, + schema = schema, + metadata_schema = metadata_schema, + attr_schema = attr_schema, + scope = "global", +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + return core.schema.check(schema, conf) +end + + +local function check_input(data) + local ok, err = core.schema.check(req_schema, data) + if not ok then + return 400, {error_msg = "bad request body: " .. err} + end +end + +local function lowercase_key_or_init(obj) + if not obj then + return {} + end + + local lowercase_key_obj = {} + for k, v in pairs(obj) do + lowercase_key_obj[str_lower(k)] = v + end + + return lowercase_key_obj +end + +local function ensure_header_lowercase(data) + data.headers = lowercase_key_or_init(data.headers) + + for i,req in ipairs(data.pipeline) do + req.headers = lowercase_key_or_init(req.headers) + end +end + + +local function set_common_header(data) + local local_conf = core.config.local_conf() + local real_ip_hdr = core.table.try_read_attr(local_conf, "nginx_config", "http", + "real_ip_header") + -- we don't need to handle '_' to '-' as Nginx won't treat 'X_REAL_IP' as 'X-Real-IP' + real_ip_hdr = str_lower(real_ip_hdr) + + local outer_headers = core.request.headers(nil) + for i,req in ipairs(data.pipeline) do + for k, v in pairs(data.headers) do + if not req.headers[k] then + req.headers[k] = v + end + end + + if outer_headers then + for k, v in pairs(outer_headers) do + local is_content_header = str_find(k, "content-") == 1 + -- skip header start with "content-" + if not req.headers[k] and not is_content_header then + req.headers[k] = v + end + end + end + + req.headers[real_ip_hdr] = core.request.get_remote_client_ip() + end +end + + +local function set_common_query(data) + if not data.query then + return + end + + for i,req in ipairs(data.pipeline) do + if not req.query then + req.query = data.query + else + for k, v in pairs(data.query) do + if not req.query[k] then + req.query[k] = v + end + end + end + end +end + + +local function batch_requests(ctx) + local metadata = plugin.plugin_metadata(plugin_name) + core.log.info("metadata: ", core.json.delay_encode(metadata)) + + local max_body_size + if metadata then + max_body_size = metadata.value.max_body_size + else + max_body_size = default_max_body_size + end + + local req_body, err = core.request.get_body(max_body_size, ctx) + if err then + -- Nginx doesn't support 417: https://trac.nginx.org/nginx/ticket/2062 + -- So always return 413 instead + return 413, { error_msg = err } + end + if not req_body then + return 400, { + error_msg = "no request body, you should give at least one pipeline setting" + } + end + + local data, err = core.json.decode(req_body) + if not data then + return 400, { + error_msg = "invalid request body: " .. req_body .. ", err: " .. err + } + end + + local code, body = check_input(data) + if code then + return code, body + end + + local httpc = http.new() + httpc:set_timeout(data.timeout) + local ok, err = httpc:connect("127.0.0.1", ngx.var.server_port) + if not ok then + return 500, {error_msg = "connect to apisix failed: " .. err} + end + + ensure_header_lowercase(data) + set_common_header(data) + set_common_query(data) + + local responses, err = httpc:request_pipeline(data.pipeline) + if not responses then + return 400, {error_msg = "request failed: " .. err} + end + + local aggregated_resp = {} + for _, resp in ipairs(responses) do + if not resp.status then + core.table.insert(aggregated_resp, { + status = 504, + reason = "upstream timeout" + }) + end + local sub_resp = { + status = resp.status, + reason = resp.reason, + headers = resp.headers, + } + if resp.has_body then + local err + sub_resp.body, err = resp:read_body() + if err then + sub_resp.read_body_err = err + core.log.error("read pipeline response body failed: ", err) + else + resp:read_trailers() + end + end + core.table.insert(aggregated_resp, sub_resp) + end + return 200, aggregated_resp +end + + +function _M.api() + local uri = default_uri + local attr = plugin.plugin_attr(plugin_name) + if attr then + uri = attr.uri or default_uri + end + return { + { + methods = {"POST"}, + uri = uri, + handler = batch_requests, + } + } +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/body-transformer.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/body-transformer.lua new file mode 100644 index 0000000..9cb881a --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/body-transformer.lua @@ -0,0 +1,261 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local xml2lua = require("xml2lua") +local xmlhandler = require("xmlhandler.tree") +local template = require("resty.template") +local ngx = ngx +local decode_base64 = ngx.decode_base64 +local req_set_body_data = ngx.req.set_body_data +local req_get_uri_args = ngx.req.get_uri_args +local str_format = string.format +local decode_args = ngx.decode_args +local str_find = core.string.find +local type = type +local pcall = pcall +local pairs = pairs +local next = next +local multipart = require("multipart") +local setmetatable = setmetatable + +local transform_schema = { + type = "object", + properties = { + input_format = { type = "string", + enum = {"xml", "json", "encoded", "args", "plain", "multipart",}}, + template = { type = "string" }, + template_is_base64 = { type = "boolean" }, + }, + required = {"template"}, +} + +local schema = { + type = "object", + properties = { + request = transform_schema, + response = transform_schema, + }, + anyOf = { + {required = {"request"}}, + {required = {"response"}}, + {required = {"request", "response"}}, + }, +} + + +local _M = { + version = 0.1, + priority = 1080, + name = "body-transformer", + schema = schema, +} + + +local function escape_xml(s) + return s:gsub("&", "&") + :gsub("<", "<") + :gsub(">", ">") + :gsub("'", "'") + :gsub('"', """) +end + + +local function escape_json(s) + return core.json.encode(s) +end + + +local function remove_namespace(tbl) + for k, v in pairs(tbl) do + if type(v) == "table" and next(v) == nil then + v = "" + tbl[k] = v + end + if type(k) == "string" then + local newk = k:match(".*:(.*)") + if newk then + tbl[newk] = v + tbl[k] = nil + end + if type(v) == "table" then + remove_namespace(v) + end + end + end + return tbl +end + + +local decoders = { + xml = function(data) + local handler = xmlhandler:new() + local parser = xml2lua.parser(handler) + local ok, err = pcall(parser.parse, parser, data) + if ok then + return remove_namespace(handler.root) + else + return nil, err + end + end, + json = function(data) + return core.json.decode(data) + end, + encoded = function(data) + return decode_args(data) + end, + args = function() + return req_get_uri_args() + end, + multipart = function (data, content_type_header) + local res = multipart(data, content_type_header) + return res + end +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +local function transform(conf, body, typ, ctx, request_method) + local out = {} + local _multipart + local format = conf[typ].input_format + local ct = ctx.var.http_content_type + if typ == "response" then + ct = ngx.header.content_type + end + if (body or request_method == "GET") and format ~= "plain" then + local err + if format then + out, err = decoders[format](body, ct) + if format == "multipart" then + _multipart = out + out = out:get_all_with_arrays() + end + if not out then + err = str_format("%s body decode: %s", typ, err) + core.log.error(err, ", body=", body) + return nil, 400, err + end + else + core.log.warn("no input format to parse ", typ, " body") + end + end + + local text = conf[typ].template + if (conf[typ].template_is_base64 or (format and format ~= "encoded" and format ~= "args")) then + text = decode_base64(text) or text + end + local ok, render = pcall(template.compile, text) + if not ok then + local err = render + err = str_format("%s template compile: %s", typ, err) + core.log.error(err) + return nil, 503, err + end + + setmetatable(out, {__index = { + _ctx = ctx, + _body = body, + _escape_xml = escape_xml, + _escape_json = escape_json, + _multipart = _multipart + }}) + + local ok, render_out = pcall(render, out) + if not ok then + local err = str_format("%s template rendering: %s", typ, render_out) + core.log.error(err) + return nil, 503, err + end + + core.log.info(typ, " body transform output=", render_out) + return render_out +end + + +local function set_input_format(conf, typ, ct, method) + if method == "GET" then + conf[typ].input_format = "args" + end + if conf[typ].input_format == nil and ct then + if ct:find("text/xml") then + conf[typ].input_format = "xml" + elseif ct:find("application/json") then + conf[typ].input_format = "json" + elseif str_find(ct:lower(), "application/x-www-form-urlencoded", nil, true) then + conf[typ].input_format = "encoded" + elseif str_find(ct:lower(), "multipart/", nil, true) then + conf[typ].input_format = "multipart" + end + end +end + + +function _M.rewrite(conf, ctx) + if conf.request then + local request_method = ngx.var.request_method + conf = core.table.deepcopy(conf) + ctx.body_transformer_conf = conf + local body = core.request.get_body() + set_input_format(conf, "request", ctx.var.http_content_type, request_method) + local out, status, err = transform(conf, body, "request", ctx, request_method) + if not out then + return status, { message = err } + end + req_set_body_data(out) + end +end + + +function _M.header_filter(conf, ctx) + if conf.response then + if not ctx.body_transformer_conf then + conf = core.table.deepcopy(conf) + ctx.body_transformer_conf = conf + end + set_input_format(conf, "response", ngx.header.content_type) + core.response.clear_header_as_body_modified() + end +end + + +function _M.body_filter(_, ctx) + local conf = ctx.body_transformer_conf + if not conf then + return + end + if conf.response then + local body = core.response.hold_body_chunk(ctx) + if ngx.arg[2] == false and not body then + return + end + + local out = transform(conf, body, "response", ctx) + if not out then + core.log.error("failed to transform response body: ", body) + return + end + + ngx.arg[1] = out + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/brotli.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/brotli.lua new file mode 100644 index 0000000..031bd8e --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/brotli.lua @@ -0,0 +1,248 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local ngx = ngx +local ngx_re_gmatch = ngx.re.gmatch +local ngx_header = ngx.header +local req_http_version = ngx.req.http_version +local str_sub = string.sub +local ipairs = ipairs +local tonumber = tonumber +local type = type +local is_loaded, brotli = pcall(require, "brotli") + + +local schema = { + type = "object", + properties = { + types = { + anyOf = { + { + type = "array", + minItems = 1, + items = { + type = "string", + minLength = 1, + }, + }, + { + enum = {"*"} + } + }, + default = {"text/html"} + }, + min_length = { + type = "integer", + minimum = 1, + default = 20, + }, + mode = { + type = "integer", + minimum = 0, + maximum = 2, + default = 0, + -- 0: MODE_GENERIC (default), + -- 1: MODE_TEXT (for UTF-8 format text input) + -- 2: MODE_FONT (for WOFF 2.0) + }, + comp_level = { + type = "integer", + minimum = 0, + maximum = 11, + default = 6, + -- follow the default value from ngx_brotli brotli_comp_level + }, + lgwin = { + type = "integer", + default = 19, + -- follow the default value from ngx_brotli brotli_window + enum = {0,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24}, + }, + lgblock = { + type = "integer", + default = 0, + enum = {0,16,17,18,19,20,21,22,23,24}, + }, + http_version = { + enum = {1.1, 1.0}, + default = 1.1, + }, + vary = { + type = "boolean", + } + }, +} + + +local _M = { + version = 0.1, + priority = 996, + name = "brotli", + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +local function create_brotli_compressor(mode, comp_level, lgwin, lgblock) + local options = { + mode = mode, + quality = comp_level, + lgwin = lgwin, + lgblock = lgblock, + } + return brotli.compressor:new(options) +end + + +local function check_accept_encoding(ctx) + local accept_encoding = core.request.header(ctx, "Accept-Encoding") + -- no Accept-Encoding + if not accept_encoding then + return false + end + + -- single Accept-Encoding + if accept_encoding == "*" or accept_encoding == "br" then + return true + end + + -- multi Accept-Encoding + local iterator, err = ngx_re_gmatch(accept_encoding, + [[([a-z\*]+)(;q=)?([0-9.]*)?]], "jo") + if not iterator then + core.log.error("gmatch failed, error: ", err) + return false + end + + local captures + while true do + captures, err = iterator() + if not captures then + break + end + if err then + core.log.error("iterator failed, error: ", err) + return false + end + if (captures[1] == "br" or captures[1] == "*") and + (not captures[2] or captures[3] ~= "0") then + return true + end + end + + return false +end + + +function _M.header_filter(conf, ctx) + if not is_loaded then + core.log.error("please check the brotli library") + return + end + + local allow_encoding = check_accept_encoding(ctx) + if not allow_encoding then + return + end + + local content_encoded = ngx_header["Content-Encoding"] + if content_encoded then + -- Don't compress if Content-Encoding is present in upstream data + return + end + + local types = conf.types + local content_type = ngx_header["Content-Type"] + if not content_type then + -- Like Nginx, don't compress if Content-Type is missing + return + end + + if type(types) == "table" then + local matched = false + local from = core.string.find(content_type, ";") + if from then + content_type = str_sub(content_type, 1, from - 1) + end + + for _, ty in ipairs(types) do + if content_type == ty then + matched = true + break + end + end + + if not matched then + return + end + end + + local content_length = tonumber(ngx_header["Content-Length"]) + if content_length then + local min_length = conf.min_length + if content_length < min_length then + return + end + -- Like Nginx, don't check min_length if Content-Length is missing + end + + local http_version = req_http_version() + if http_version < conf.http_version then + return + end + + if conf.vary then + core.response.add_header("Vary", "Accept-Encoding") + end + + local compressor = create_brotli_compressor(conf.mode, conf.comp_level, + conf.lgwin, conf.lgblock) + if not compressor then + core.log.error("failed to create brotli compressor") + return + end + + ctx.brotli_matched = true + ctx.compressor = compressor + core.response.clear_header_as_body_modified() + core.response.add_header("Content-Encoding", "br") +end + + +function _M.body_filter(conf, ctx) + if not ctx.brotli_matched then + return + end + + local chunk, eof = ngx.arg[1], ngx.arg[2] + if type(chunk) == "string" and chunk ~= "" then + local encode_chunk = ctx.compressor:compress(chunk) + ngx.arg[1] = encode_chunk .. ctx.compressor:flush() + end + + if eof then + -- overwriting the arg[1], results into partial response + ngx.arg[1] = ngx.arg[1] .. ctx.compressor:finish() + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/cas-auth.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/cas-auth.lua new file mode 100644 index 0000000..d949636 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/cas-auth.lua @@ -0,0 +1,201 @@ +-- +---- Licensed to the Apache Software Foundation (ASF) under one or more +---- contributor license agreements. See the NOTICE file distributed with +---- this work for additional information regarding copyright ownership. +---- The ASF licenses this file to You under the Apache License, Version 2.0 +---- (the "License"); you may not use this file except in compliance with +---- the License. You may obtain a copy of the License at +---- +---- http://www.apache.org/licenses/LICENSE-2.0 +---- +---- Unless required by applicable law or agreed to in writing, software +---- distributed under the License is distributed on an "AS IS" BASIS, +---- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +---- See the License for the specific language governing permissions and +---- limitations under the License. +---- +local core = require("apisix.core") +local http = require("resty.http") +local ngx = ngx +local ngx_re_match = ngx.re.match + +local CAS_REQUEST_URI = "CAS_REQUEST_URI" +local COOKIE_NAME = "CAS_SESSION" +local COOKIE_PARAMS = "; Path=/; HttpOnly" +local SESSION_LIFETIME = 3600 +local STORE_NAME = "cas_sessions" + +local store = ngx.shared[STORE_NAME] + + +local plugin_name = "cas-auth" +local schema = { + type = "object", + properties = { + idp_uri = {type = "string"}, + cas_callback_uri = {type = "string"}, + logout_uri = {type = "string"}, + }, + required = { + "idp_uri", "cas_callback_uri", "logout_uri" + } +} + +local _M = { + version = 0.1, + priority = 2597, + name = plugin_name, + schema = schema +} + +function _M.check_schema(conf) + local check = {"idp_uri"} + core.utils.check_https(check, conf, plugin_name) + return core.schema.check(schema, conf) +end + +local function uri_without_ticket(conf, ctx) + return ctx.var.scheme .. "://" .. ctx.var.host .. ":" .. + ctx.var.server_port .. conf.cas_callback_uri +end + +local function get_session_id(ctx) + return ctx.var["cookie_" .. COOKIE_NAME] +end + +local function set_our_cookie(name, val) + core.response.add_header("Set-Cookie", name .. "=" .. val .. COOKIE_PARAMS) +end + +local function first_access(conf, ctx) + local login_uri = conf.idp_uri .. "/login?" .. + ngx.encode_args({ service = uri_without_ticket(conf, ctx) }) + core.log.info("first access: ", login_uri, + ", cookie: ", ctx.var.http_cookie, ", request_uri: ", ctx.var.request_uri) + set_our_cookie(CAS_REQUEST_URI, ctx.var.request_uri) + core.response.set_header("Location", login_uri) + return ngx.HTTP_MOVED_TEMPORARILY +end + +local function with_session_id(conf, ctx, session_id) + -- does the cookie exist in our store? + local user = store:get(session_id); + core.log.info("ticket=", session_id, ", user=", user) + if user == nil then + set_our_cookie(COOKIE_NAME, "deleted; Max-Age=0") + return first_access(conf, ctx) + else + -- refresh the TTL + store:set(session_id, user, SESSION_LIFETIME) + end +end + +local function set_store_and_cookie(session_id, user) + -- place cookie into cookie store + local success, err, forcible = store:add(session_id, user, SESSION_LIFETIME) + if success then + if forcible then + core.log.info("CAS cookie store is out of memory") + end + set_our_cookie(COOKIE_NAME, session_id) + else + if err == "no memory" then + core.log.emerg("CAS cookie store is out of memory") + elseif err == "exists" then + core.log.error("Same CAS ticket validated twice, this should never happen!") + else + core.log.error("CAS cookie store: ", err) + end + end + return success +end + +local function validate(conf, ctx, ticket) + -- send a request to CAS to validate the ticket + local httpc = http.new() + local res, err = httpc:request_uri(conf.idp_uri .. + "/serviceValidate", + { query = { ticket = ticket, service = uri_without_ticket(conf, ctx) } }) + + if res and res.status == ngx.HTTP_OK and res.body ~= nil then + if core.string.find(res.body, "") then + local m = ngx_re_match(res.body, "(.*?)", "jo"); + if m then + return m[1] + end + else + core.log.info("CAS serviceValidate failed: ", res.body) + end + else + core.log.error("validate ticket failed: status=", (res and res.status), + ", has_body=", (res and res.body ~= nil or false), ", err=", err) + end + return nil +end + +local function validate_with_cas(conf, ctx, ticket) + local user = validate(conf, ctx, ticket) + if user and set_store_and_cookie(ticket, user) then + local request_uri = ctx.var["cookie_" .. CAS_REQUEST_URI] + set_our_cookie(CAS_REQUEST_URI, "deleted; Max-Age=0") + core.log.info("ticket: ", ticket, + ", cookie: ", ctx.var.http_cookie, ", request_uri: ", request_uri, ", user=", user) + core.response.set_header("Location", request_uri) + return ngx.HTTP_MOVED_TEMPORARILY + else + return ngx.HTTP_UNAUTHORIZED, {message = "invalid ticket"} + end +end + +local function logout(conf, ctx) + local session_id = get_session_id(ctx) + if session_id == nil then + return ngx.HTTP_UNAUTHORIZED + end + + core.log.info("logout: ticket=", session_id, ", cookie=", ctx.var.http_cookie) + store:delete(session_id) + set_our_cookie(COOKIE_NAME, "deleted; Max-Age=0") + + core.response.set_header("Location", conf.idp_uri .. "/logout") + return ngx.HTTP_MOVED_TEMPORARILY +end + +function _M.access(conf, ctx) + local method = core.request.get_method() + local uri = ctx.var.uri + + if method == "GET" and uri == conf.logout_uri then + return logout(conf, ctx) + end + + if method == "POST" and uri == conf.cas_callback_uri then + local data = core.request.get_body() + local ticket = data:match("(.*)") + if ticket == nil then + return ngx.HTTP_BAD_REQUEST, + {message = "invalid logout request from IdP, no ticket"} + end + core.log.info("Back-channel logout (SLO) from IdP: LogoutRequest: ", data) + local session_id = ticket + local user = store:get(session_id); + if user then + store:delete(session_id) + core.log.info("SLO: user=", user, ", tocket=", ticket) + end + else + local session_id = get_session_id(ctx) + if session_id ~= nil then + return with_session_id(conf, ctx, session_id) + end + + local ticket = ctx.var.arg_ticket + if ticket ~= nil and uri == conf.cas_callback_uri then + return validate_with_cas(conf, ctx, ticket) + else + return first_access(conf, ctx) + end + end +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/chaitin-waf.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/chaitin-waf.lua new file mode 100644 index 0000000..39bb088 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/chaitin-waf.lua @@ -0,0 +1,421 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local rr_balancer = require("apisix.balancer.roundrobin") +local plugin = require("apisix.plugin") +local t1k = require "resty.t1k" +local expr = require("resty.expr.v1") + +local ngx = ngx +local ngx_now = ngx.now +local string = string +local fmt = string.format +local tostring = tostring +local tonumber = tonumber +local ipairs = ipairs + +local plugin_name = "chaitin-waf" + +local vars_schema = { + type = "array", +} + +local lrucache = core.lrucache.new({ + ttl = 300, count = 1024 +}) + +local match_schema = { + type = "array", + items = { + type = "object", + properties = { + vars = vars_schema + } + }, +} + +local plugin_schema = { + type = "object", + properties = { + mode = { + type = "string", + enum = { "off", "monitor", "block", nil }, + default = nil, + }, + match = match_schema, + append_waf_resp_header = { + type = "boolean", + default = true + }, + append_waf_debug_header = { + type = "boolean", + default = false + }, + config = { + type = "object", + properties = { + connect_timeout = { + type = "integer", + }, + send_timeout = { + type = "integer", + }, + read_timeout = { + type = "integer", + }, + req_body_size = { + type = "integer", + }, + keepalive_size = { + type = "integer", + }, + keepalive_timeout = { + type = "integer", + }, + real_client_ip = { + type = "boolean" + } + }, + }, + }, +} + +local metadata_schema = { + type = "object", + properties = { + mode = { + type = "string", + enum = { "off", "monitor", "block", nil }, + default = nil, + }, + nodes = { + type = "array", + items = { + type = "object", + properties = { + host = { + type = "string", + pattern = "^\\*?[0-9a-zA-Z-._\\[\\]:/]+$" + }, + port = { + type = "integer", + minimum = 1, + default = 80 + }, + }, + required = { "host" } + }, + minItems = 1, + }, + config = { + type = "object", + properties = { + connect_timeout = { + type = "integer", + default = 1000 -- milliseconds + }, + send_timeout = { + type = "integer", + default = 1000 -- milliseconds + }, + read_timeout = { + type = "integer", + default = 1000 -- milliseconds + }, + req_body_size = { + type = "integer", + default = 1024 -- milliseconds + }, + -- maximum concurrent idle connections to + -- the SafeLine WAF detection service + keepalive_size = { + type = "integer", + default = 256 + }, + keepalive_timeout = { + type = "integer", + default = 60000 -- milliseconds + }, + real_client_ip = { + type = "boolean", + default = true + } + }, + default = {}, + }, + }, + required = { "nodes" }, +} + +local _M = { + version = 0.1, + priority = 2700, + name = plugin_name, + schema = plugin_schema, + metadata_schema = metadata_schema +} + +local global_server_picker + +local HEADER_CHAITIN_WAF = "X-APISIX-CHAITIN-WAF" +local HEADER_CHAITIN_WAF_ERROR = "X-APISIX-CHAITIN-WAF-ERROR" +local HEADER_CHAITIN_WAF_TIME = "X-APISIX-CHAITIN-WAF-TIME" +local HEADER_CHAITIN_WAF_STATUS = "X-APISIX-CHAITIN-WAF-STATUS" +local HEADER_CHAITIN_WAF_ACTION = "X-APISIX-CHAITIN-WAF-ACTION" +local HEADER_CHAITIN_WAF_SERVER = "X-APISIX-CHAITIN-WAF-SERVER" +local blocked_message = [[{"code": %s, "success":false, ]] .. + [["message": "blocked by Chaitin SafeLine Web Application Firewall", "event_id": "%s"}]] +local warning_message = "chaitin-waf monitor mode: request would have been rejected, event_id: " + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + + local ok, err = core.schema.check(plugin_schema, conf) + + if not ok then + return false, err + end + + if conf.match then + for _, m in ipairs(conf.match) do + local ok, err = expr.new(m.vars) + if not ok then + return false, "failed to validate the 'vars' expression: " .. err + end + end + end + + return true +end + + +local function get_healthy_chaitin_server_nodes(metadata, checker) + local nodes = metadata.nodes + local new_nodes = core.table.new(0, #nodes) + + for i = 1, #nodes do + local host, port = nodes[i].host, nodes[i].port + new_nodes[host .. ":" .. tostring(port)] = 1 + end + + return new_nodes +end + + +local function get_chaitin_server(metadata, ctx) + if not global_server_picker or global_server_picker.upstream ~= metadata.value.nodes then + local up_nodes = get_healthy_chaitin_server_nodes(metadata.value) + if core.table.nkeys(up_nodes) == 0 then + return nil, nil, "no healthy nodes" + end + core.log.info("chaitin-waf nodes: ", core.json.delay_encode(up_nodes)) + + global_server_picker = rr_balancer.new(up_nodes, metadata.value.nodes) + end + + local server = global_server_picker.get(ctx) + local host, port, err = core.utils.parse_addr(server) + if err then + return nil, nil, err + end + + return host, port, nil +end + + +local function check_match(conf, ctx) + if not conf.match or #conf.match == 0 then + return true + end + + for _, match in ipairs(conf.match) do + local cache_key = tostring(match.vars) + + local exp, err = lrucache(cache_key, nil, function(vars) + return expr.new(vars) + end, match.vars) + + if not exp then + local msg = "failed to create match expression for " .. + tostring(match.vars) .. ", err: " .. tostring(err) + return false, msg + end + + local matched = exp:eval(ctx.var) + if matched then + return true + end + end + + return false +end + + +local function get_conf(conf, metadata) + local t = { + mode = "block", + real_client_ip = true, + } + + if metadata.config then + t.connect_timeout = metadata.config.connect_timeout + t.send_timeout = metadata.config.send_timeout + t.read_timeout = metadata.config.read_timeout + t.req_body_size = metadata.config.req_body_size + t.keepalive_size = metadata.config.keepalive_size + t.keepalive_timeout = metadata.config.keepalive_timeout + t.real_client_ip = metadata.config.real_client_ip or t.real_client_ip + end + + if conf.config then + t.connect_timeout = conf.config.connect_timeout + t.send_timeout = conf.config.send_timeout + t.read_timeout = conf.config.read_timeout + t.req_body_size = conf.config.req_body_size + t.keepalive_size = conf.config.keepalive_size + t.keepalive_timeout = conf.config.keepalive_timeout + t.real_client_ip = conf.config.real_client_ip or t.real_client_ip + end + + t.mode = conf.mode or metadata.mode or t.mode + + return t +end + + +local function do_access(conf, ctx) + local extra_headers = {} + + local metadata = plugin.plugin_metadata(plugin_name) + if not core.table.try_read_attr(metadata, "value", "nodes") then + extra_headers[HEADER_CHAITIN_WAF] = "err" + extra_headers[HEADER_CHAITIN_WAF_ERROR] = "missing metadata" + return 500, nil, extra_headers + end + + local host, port, err = get_chaitin_server(metadata, ctx) + if err then + extra_headers[HEADER_CHAITIN_WAF] = "unhealthy" + extra_headers[HEADER_CHAITIN_WAF_ERROR] = tostring(err) + + return 500, nil, extra_headers + end + + core.log.info("picked chaitin-waf server: ", host, ":", port) + local t = get_conf(conf, metadata.value) + t.host = host + t.port = port + + extra_headers[HEADER_CHAITIN_WAF_SERVER] = host + + local mode = t.mode or "block" + if mode == "off" then + extra_headers[HEADER_CHAITIN_WAF] = "off" + return nil, nil, extra_headers + end + + local match, err = check_match(conf, ctx) + if not match then + if err then + extra_headers[HEADER_CHAITIN_WAF] = "err" + extra_headers[HEADER_CHAITIN_WAF_ERROR] = tostring(err) + return 500, nil, extra_headers + else + extra_headers[HEADER_CHAITIN_WAF] = "no" + return nil, nil, extra_headers + end + end + + if t.real_client_ip then + t.client_ip = ctx.var.http_x_forwarded_for or ctx.var.remote_addr + else + t.client_ip = ctx.var.remote_addr + end + + local start_time = ngx_now() * 1000 + local ok, err, result = t1k.do_access(t, false) + + extra_headers[HEADER_CHAITIN_WAF_TIME] = ngx_now() * 1000 - start_time + + if not ok then + extra_headers[HEADER_CHAITIN_WAF] = "waf-err" + local err_msg = tostring(err) + if core.string.find(err_msg, "timeout") then + extra_headers[HEADER_CHAITIN_WAF] = "timeout" + end + extra_headers[HEADER_CHAITIN_WAF_ERROR] = tostring(err) + + if mode == "monitor" then + core.log.warn("chaitin-waf monitor mode: detected waf error - ", err_msg) + return nil, nil, extra_headers + end + + return 500, nil, extra_headers + else + extra_headers[HEADER_CHAITIN_WAF] = "yes" + extra_headers[HEADER_CHAITIN_WAF_ACTION] = "pass" + end + + local code = 200 + extra_headers[HEADER_CHAITIN_WAF_STATUS] = code + + if result and result.status and result.status ~= 200 and result.event_id then + extra_headers[HEADER_CHAITIN_WAF_STATUS] = result.status + extra_headers[HEADER_CHAITIN_WAF_ACTION] = "reject" + + if mode == "monitor" then + core.log.warn(warning_message, result.event_id) + return nil, nil, extra_headers + end + + core.log.error("request rejected by chaitin-waf, event_id: " .. result.event_id) + + return tonumber(result.status), + fmt(blocked_message, result.status, result.event_id) .. "\n", + extra_headers + end + + return nil, nil, extra_headers +end + + +function _M.access(conf, ctx) + local code, msg, extra_headers = do_access(conf, ctx) + + if not conf.append_waf_debug_header then + extra_headers[HEADER_CHAITIN_WAF_ERROR] = nil + extra_headers[HEADER_CHAITIN_WAF_SERVER] = nil + end + + if conf.append_waf_resp_header then + core.response.set_header(extra_headers) + end + + return code, msg +end + + +function _M.header_filter(conf, ctx) + t1k.do_header_filter() +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/clickhouse-logger.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/clickhouse-logger.lua new file mode 100644 index 0000000..793a8d4 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/clickhouse-logger.lua @@ -0,0 +1,208 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local log_util = require("apisix.utils.log-util") +local core = require("apisix.core") +local http = require("resty.http") +local url = require("net.url") +local math_random = math.random + +local tostring = tostring + +local plugin_name = "clickhouse-logger" +local batch_processor_manager = bp_manager_mod.new(plugin_name) + +local schema = { + type = "object", + properties = { + -- deprecated, use "endpoint_addrs" instead + endpoint_addr = core.schema.uri_def, + endpoint_addrs = {items = core.schema.uri_def, type = "array", minItems = 1}, + user = {type = "string", default = ""}, + password = {type = "string", default = ""}, + database = {type = "string", default = ""}, + logtable = {type = "string", default = ""}, + timeout = {type = "integer", minimum = 1, default = 3}, + name = {type = "string", default = "clickhouse logger"}, + ssl_verify = {type = "boolean", default = true}, + log_format = {type = "object"}, + include_req_body = {type = "boolean", default = false}, + include_req_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + include_resp_body = {type = "boolean", default = false}, + include_resp_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + } + }, + oneOf = { + {required = {"endpoint_addr", "user", "password", "database", "logtable"}}, + {required = {"endpoint_addrs", "user", "password", "database", "logtable"}} + }, + encrypt_fields = {"password"}, +} + + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + } + }, +} + + +local _M = { + version = 0.1, + priority = 398, + name = plugin_name, + schema = batch_processor_manager:wrap_schema(schema), + metadata_schema = metadata_schema, +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + local check = {"endpoint_addrs"} + core.utils.check_https(check, conf, plugin_name) + core.utils.check_tls_bool({"ssl_verify"}, conf, plugin_name) + + return core.schema.check(schema, conf) +end + + +local function send_http_data(conf, log_message) + local err_msg + local res = true + local selected_endpoint_addr + if conf.endpoint_addr then + selected_endpoint_addr = conf.endpoint_addr + else + selected_endpoint_addr = conf.endpoint_addrs[math_random(#conf.endpoint_addrs)] + end + local url_decoded = url.parse(selected_endpoint_addr) + local host = url_decoded.host + local port = url_decoded.port + + core.log.info("sending a batch logs to ", selected_endpoint_addr) + + if not port then + if url_decoded.scheme == "https" then + port = 443 + else + port = 80 + end + end + + local httpc = http.new() + httpc:set_timeout(conf.timeout * 1000) + local ok, err = httpc:connect(host, port) + + if not ok then + return false, "failed to connect to host[" .. host .. "] port[" + .. tostring(port) .. "] " .. err + end + + if url_decoded.scheme == "https" then + ok, err = httpc:ssl_handshake(true, host, conf.ssl_verify) + if not ok then + return false, "failed to perform SSL with host[" .. host .. "] " + .. "port[" .. tostring(port) .. "] " .. err + end + end + + local httpc_res, httpc_err = httpc:request({ + method = "POST", + path = url_decoded.path, + query = url_decoded.query, + body = "INSERT INTO " .. conf.logtable .." FORMAT JSONEachRow " .. log_message, + headers = { + ["Host"] = url_decoded.host, + ["Content-Type"] = "application/json", + ["X-ClickHouse-User"] = conf.user, + ["X-ClickHouse-Key"] = conf.password, + ["X-ClickHouse-Database"] = conf.database + } + }) + + if not httpc_res then + return false, "error while sending data to [" .. host .. "] port[" + .. tostring(port) .. "] " .. httpc_err + end + + -- some error occurred in the server + if httpc_res.status >= 400 then + res = false + err_msg = "server returned status code[" .. httpc_res.status .. "] host[" + .. host .. "] port[" .. tostring(port) .. "] " + .. "body[" .. httpc_res:read_body() .. "]" + end + + return res, err_msg +end + + +function _M.body_filter(conf, ctx) + log_util.collect_body(conf, ctx) +end + + +function _M.log(conf, ctx) + local entry = log_util.get_log_entry(plugin_name, conf, ctx) + + if batch_processor_manager:add_entry(conf, entry) then + return + end + + -- Generate a function to be executed by the batch processor + local func = function(entries, batch_max_size) + local data, err + + if batch_max_size == 1 then + data, err = core.json.encode(entries[1]) -- encode as single {} + else + local log_table = {} + for i = 1, #entries do + core.table.insert(log_table, core.json.encode(entries[i])) + end + data = core.table.concat(log_table, " ") -- assemble multi items as string "{} {}" + end + + if not data then + return false, 'error occurred while encoding the data: ' .. err + end + + return send_http_data(conf, data) + end + + batch_processor_manager:add_entry_to_new_processor(conf, entry, ctx, func) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/client-control.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/client-control.lua new file mode 100644 index 0000000..1975098 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/client-control.lua @@ -0,0 +1,76 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local ok, apisix_ngx_client = pcall(require, "resty.apisix.client") +local tonumber = tonumber + + +local schema = { + type = "object", + properties = { + max_body_size = { + type = "integer", + minimum = 0, + description = "Maximum message body size in bytes. No restriction when set to 0." + }, + }, +} + + +local plugin_name = "client-control" + + +local _M = { + version = 0.1, + priority = 22000, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +function _M.rewrite(conf, ctx) + if not ok then + core.log.error("need to build APISIX-Runtime to support client control") + return 501 + end + + if conf.max_body_size then + local len = tonumber(core.request.header(ctx, "Content-Length")) + if len then + -- if length is given in the header, check it immediately + if conf.max_body_size ~= 0 and len > conf.max_body_size then + return 413 + end + end + + -- then check it when reading the body + local ok, err = apisix_ngx_client.set_client_max_body_size(conf.max_body_size) + if not ok then + core.log.error("failed to set client max body size: ", err) + return 503 + end + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/consumer-restriction.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/consumer-restriction.lua new file mode 100644 index 0000000..88c2bbd --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/consumer-restriction.lua @@ -0,0 +1,164 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local ipairs = ipairs +local core = require("apisix.core") +local ngx = ngx +local schema = { + type = "object", + properties = { + type = { + type = "string", + enum = {"consumer_name", "service_id", "route_id", "consumer_group_id"}, + default = "consumer_name" + }, + blacklist = { + type = "array", + minItems = 1, + items = {type = "string"} + }, + whitelist = { + type = "array", + minItems = 1, + items = {type = "string"} + }, + allowed_by_methods = { + type = "array", + items = { + type = "object", + properties = { + user = { + type = "string" + }, + methods = { + type = "array", + minItems = 1, + items = core.schema.method_schema, + } + } + } + }, + rejected_code = {type = "integer", minimum = 200, default = 403}, + rejected_msg = {type = "string"} + }, + anyOf = { + {required = {"blacklist"}}, + {required = {"whitelist"}}, + {required = {"allowed_by_methods"}} + }, +} + +local plugin_name = "consumer-restriction" + +local _M = { + version = 0.1, + priority = 2400, + name = plugin_name, + schema = schema, +} + +local fetch_val_funcs = { + ["route_id"] = function(ctx) + return ctx.route_id + end, + ["service_id"] = function(ctx) + return ctx.service_id + end, + ["consumer_name"] = function(ctx) + return ctx.consumer_name + end, + ["consumer_group_id"] = function (ctx) + return ctx.consumer_group_id + end +} + +local function is_include(value, tab) + for k,v in ipairs(tab) do + if v == value then + return true + end + end + return false +end + +local function is_method_allowed(allowed_methods, method, user) + for _, value in ipairs(allowed_methods) do + if value.user == user then + for _, allowed_method in ipairs(value.methods) do + if allowed_method == method then + return true + end + end + return false + end + end + return true +end + +local function reject(conf) + if conf.rejected_msg then + return conf.rejected_code , { message = conf.rejected_msg } + end + return conf.rejected_code , { message = "The " .. conf.type .. " is forbidden."} +end + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + return true +end + +function _M.access(conf, ctx) + local value = fetch_val_funcs[conf.type](ctx) + local method = ngx.req.get_method() + + if not value then + local err_msg = "The request is rejected, please check the " + .. conf.type .. " for this request" + return 401, { message = err_msg} + end + core.log.info("value: ", value) + + local block = false + local whitelisted = false + + if conf.blacklist and #conf.blacklist > 0 then + if is_include(value, conf.blacklist) then + return reject(conf) + end + end + + if conf.whitelist and #conf.whitelist > 0 then + whitelisted = is_include(value, conf.whitelist) + if not whitelisted then + block = true + end + end + + if conf.allowed_by_methods and #conf.allowed_by_methods > 0 and not whitelisted then + if not is_method_allowed(conf.allowed_by_methods, method, value) then + block = true + end + end + + if block then + return reject(conf) + end +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/cors.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/cors.lua new file mode 100644 index 0000000..deae034 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/cors.lua @@ -0,0 +1,402 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local plugin = require("apisix.plugin") +local ngx = ngx +local plugin_name = "cors" +local str_find = core.string.find +local re_gmatch = ngx.re.gmatch +local re_compile = require("resty.core.regex").re_match_compile +local re_find = ngx.re.find +local ipairs = ipairs +local origins_pattern = [[^(\*|\*\*|null|\w+://[^,]+(,\w+://[^,]+)*)$]] + +local TYPE_ACCESS_CONTROL_ALLOW_ORIGIN = "ACAO" +local TYPE_TIMING_ALLOW_ORIGIN = "TAO" + +local lrucache = core.lrucache.new({ + type = "plugin", +}) + +local metadata_schema = { + type = "object", + properties = { + allow_origins = { + type = "object", + additionalProperties = { + type = "string", + pattern = origins_pattern + } + }, + }, +} + +local schema = { + type = "object", + properties = { + allow_origins = { + description = + "you can use '*' to allow all origins when no credentials," .. + "'**' to allow forcefully(it will bring some security risks, be carefully)," .. + "multiple origin use ',' to split. default: *.", + type = "string", + pattern = origins_pattern, + default = "*" + }, + allow_methods = { + description = + "you can use '*' to allow all methods when no credentials," .. + "'**' to allow forcefully(it will bring some security risks, be carefully)," .. + "multiple method use ',' to split. default: *.", + type = "string", + default = "*" + }, + allow_headers = { + description = + "you can use '*' to allow all header when no credentials," .. + "'**' to allow forcefully(it will bring some security risks, be carefully)," .. + "multiple header use ',' to split. default: *.", + type = "string", + default = "*" + }, + expose_headers = { + description = + "multiple header use ',' to split." .. + "If not specified, no custom headers are exposed.", + type = "string" + }, + max_age = { + description = + "maximum number of seconds the results can be cached." .. + "-1 means no cached, the max value is depend on browser," .. + "more details plz check MDN. default: 5.", + type = "integer", + default = 5 + }, + allow_credential = { + description = + "allow client append credential. according to CORS specification," .. + "if you set this option to 'true', you can not use '*' for other options.", + type = "boolean", + default = false + }, + allow_origins_by_regex = { + type = "array", + description = + "you can use regex to allow specific origins when no credentials," .. + "for example use [.*\\.test.com$] to allow a.test.com and b.test.com", + items = { + type = "string", + minLength = 1, + maxLength = 4096, + }, + minItems = 1, + uniqueItems = true, + }, + allow_origins_by_metadata = { + type = "array", + description = + "set allowed origins by referencing origins in plugin metadata", + items = { + type = "string", + minLength = 1, + maxLength = 4096, + }, + minItems = 1, + uniqueItems = true, + }, + timing_allow_origins = { + description = + "you can use '*' to allow all origins which can view timing information " .. + "when no credentials," .. + "'**' to allow forcefully (it will bring some security risks, be careful)," .. + "multiple origin use ',' to split. default: nil", + type = "string", + pattern = origins_pattern + }, + timing_allow_origins_by_regex = { + type = "array", + description = + "you can use regex to allow specific origins which can view timing information," .. + "for example use [.*\\.test.com] to allow a.test.com and b.test.com", + items = { + type = "string", + minLength = 1, + maxLength = 4096, + }, + minItems = 1, + uniqueItems = true, + }, + } +} + +local _M = { + version = 0.1, + priority = 4000, + name = plugin_name, + schema = schema, + metadata_schema = metadata_schema, +} + + +local function create_multiple_origin_cache(allow_origins) + if not str_find(allow_origins, ",") then + return nil + end + local origin_cache = {} + local iterator, err = re_gmatch(allow_origins, "([^,]+)", "jiox") + if not iterator then + core.log.error("match origins failed: ", err) + return nil + end + while true do + local origin, err = iterator() + if err then + core.log.error("iterate origins failed: ", err) + return nil + end + if not origin then + break + end + origin_cache[origin[0]] = true + end + return origin_cache +end + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + if conf.allow_credential then + if conf.allow_origins == "*" or conf.allow_methods == "*" or + conf.allow_headers == "*" or conf.expose_headers == "*" or + conf.timing_allow_origins == "*" then + return false, "you can not set '*' for other option when 'allow_credential' is true" + end + end + if conf.allow_origins_by_regex then + for i, re_rule in ipairs(conf.allow_origins_by_regex) do + local ok, err = re_compile(re_rule, "j") + if not ok then + return false, err + end + end + end + + if conf.timing_allow_origins_by_regex then + for i, re_rule in ipairs(conf.timing_allow_origins_by_regex) do + local ok, err = re_compile(re_rule, "j") + if not ok then + return false, err + end + end + end + + return true +end + + +local function set_cors_headers(conf, ctx) + local allow_methods = conf.allow_methods + if allow_methods == "**" then + allow_methods = "GET,POST,PUT,DELETE,PATCH,HEAD,OPTIONS,CONNECT,TRACE" + end + + core.response.set_header("Access-Control-Allow-Origin", ctx.cors_allow_origins) + core.response.set_header("Access-Control-Allow-Methods", allow_methods) + core.response.set_header("Access-Control-Max-Age", conf.max_age) + if conf.expose_headers ~= nil and conf.expose_headers ~= "" then + core.response.set_header("Access-Control-Expose-Headers", conf.expose_headers) + end + if conf.allow_headers == "**" then + core.response.set_header("Access-Control-Allow-Headers", + core.request.header(ctx, "Access-Control-Request-Headers")) + else + core.response.set_header("Access-Control-Allow-Headers", conf.allow_headers) + end + if conf.allow_credential then + core.response.set_header("Access-Control-Allow-Credentials", true) + end +end + +local function set_timing_headers(conf, ctx) + if ctx.timing_allow_origin then + core.response.set_header("Timing-Allow-Origin", ctx.timing_allow_origin) + end +end + + +local function process_with_allow_origins(allow_origin_type, allow_origins, ctx, req_origin, + cache_key, cache_version) + if allow_origins == "**" then + allow_origins = req_origin or '*' + end + + local multiple_origin, err + if cache_key and cache_version then + multiple_origin, err = lrucache( + cache_key, cache_version, create_multiple_origin_cache, allow_origins + ) + else + multiple_origin, err = core.lrucache.plugin_ctx( + lrucache, ctx, allow_origin_type, create_multiple_origin_cache, allow_origins + ) + end + + if err then + return 500, {message = "get multiple origin cache failed: " .. err} + end + + if multiple_origin then + if multiple_origin[req_origin] then + allow_origins = req_origin + else + return + end + end + + return allow_origins +end + +local function process_with_allow_origins_by_regex(allow_origin_type, + allow_origins_by_regex, conf, ctx, req_origin) + + local allow_origins_by_regex_rules_concat_conf_key = + "allow_origins_by_regex_rules_concat_" .. allow_origin_type + + if not conf[allow_origins_by_regex_rules_concat_conf_key] then + local allow_origins_by_regex_rules = {} + for i, re_rule in ipairs(allow_origins_by_regex) do + allow_origins_by_regex_rules[i] = re_rule + end + conf[allow_origins_by_regex_rules_concat_conf_key] = core.table.concat( + allow_origins_by_regex_rules, "|") + end + + -- core.log.warn("regex: ", conf[allow_origins_by_regex_rules_concat_conf_key], "\n ") + local matched = re_find(req_origin, conf[allow_origins_by_regex_rules_concat_conf_key], "jo") + if matched then + return req_origin + end +end + + +local function match_origins(req_origin, allow_origins) + return req_origin == allow_origins or allow_origins == '*' +end + +local function process_with_allow_origins_by_metadata(allow_origin_type, allow_origins_by_metadata, + ctx, req_origin) + + if allow_origins_by_metadata == nil then + return + end + + local metadata = plugin.plugin_metadata(plugin_name) + if metadata and metadata.value.allow_origins then + local allow_origins_map = metadata.value.allow_origins + for _, key in ipairs(allow_origins_by_metadata) do + local allow_origins_conf = allow_origins_map[key] + local allow_origins = process_with_allow_origins( + allow_origin_type, allow_origins_conf, ctx, req_origin, + plugin_name .. "#" .. key, metadata.modifiedIndex + ) + if match_origins(req_origin, allow_origins) then + return req_origin + end + end + end +end + + +function _M.rewrite(conf, ctx) + -- save the original request origin as it may be changed at other phase + ctx.original_request_origin = core.request.header(ctx, "Origin") + if ctx.var.request_method == "OPTIONS" then + return 200 + end +end + + +function _M.header_filter(conf, ctx) + local req_origin = ctx.original_request_origin + -- If allow_origins_by_regex is not nil, should be matched to it only + local allow_origins + local allow_origins_local = false + if conf.allow_origins_by_metadata then + allow_origins = process_with_allow_origins_by_metadata( + TYPE_ACCESS_CONTROL_ALLOW_ORIGIN, conf.allow_origins_by_metadata, ctx, req_origin + ) + if not match_origins(req_origin, allow_origins) then + if conf.allow_origins and conf.allow_origins ~= "*" then + allow_origins_local = true + end + end + else + allow_origins_local = true + end + if conf.allow_origins_by_regex == nil then + if allow_origins_local then + allow_origins = process_with_allow_origins( + TYPE_ACCESS_CONTROL_ALLOW_ORIGIN, conf.allow_origins, ctx, req_origin + ) + end + else + if allow_origins_local then + allow_origins = process_with_allow_origins_by_regex( + TYPE_ACCESS_CONTROL_ALLOW_ORIGIN, conf.allow_origins_by_regex, + conf, ctx, req_origin + ) + end + end + if not match_origins(req_origin, allow_origins) then + allow_origins = process_with_allow_origins_by_metadata( + TYPE_ACCESS_CONTROL_ALLOW_ORIGIN, conf.allow_origins_by_metadata, ctx, req_origin + ) + end + if conf.allow_origins ~= "*" then + core.response.add_header("Vary", "Origin") + end + if allow_origins then + ctx.cors_allow_origins = allow_origins + set_cors_headers(conf, ctx) + end + + local timing_allow_origins + if conf.timing_allow_origins_by_regex == nil and conf.timing_allow_origins then + timing_allow_origins = process_with_allow_origins( + TYPE_TIMING_ALLOW_ORIGIN, conf.timing_allow_origins, ctx, req_origin + ) + elseif conf.timing_allow_origins_by_regex then + timing_allow_origins = process_with_allow_origins_by_regex( + TYPE_TIMING_ALLOW_ORIGIN, conf.timing_allow_origins_by_regex, + conf, ctx, req_origin + ) + end + if timing_allow_origins and match_origins(req_origin, timing_allow_origins) then + ctx.timing_allow_origin = timing_allow_origins + set_timing_headers(conf, ctx) + end + +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/csrf.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/csrf.lua new file mode 100644 index 0000000..4ed2ad6 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/csrf.lua @@ -0,0 +1,168 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local resty_sha256 = require("resty.sha256") +local str = require("resty.string") +local ngx = ngx +local ngx_encode_base64 = ngx.encode_base64 +local ngx_decode_base64 = ngx.decode_base64 +local ngx_time = ngx.time +local ngx_cookie_time = ngx.cookie_time +local math = math +local SAFE_METHODS = {"GET", "HEAD", "OPTIONS"} + + +local schema = { + type = "object", + properties = { + key = { + description = "use to generate csrf token", + type = "string", + }, + expires = { + description = "expires time(s) for csrf token", + type = "integer", + default = 7200 + }, + name = { + description = "the csrf token name", + type = "string", + default = "apisix-csrf-token" + } + }, + encrypt_fields = {"key"}, + required = {"key"} +} + + +local _M = { + version = 0.1, + priority = 2980, + name = "csrf", + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +local function gen_sign(random, expires, key) + local sha256 = resty_sha256:new() + + local sign = "{expires:" .. expires .. ",random:" .. random .. ",key:" .. key .. "}" + + sha256:update(sign) + local digest = sha256:final() + + return str.to_hex(digest) +end + + +local function gen_csrf_token(conf) + local random = math.random() + local timestamp = ngx_time() + local sign = gen_sign(random, timestamp, conf.key) + + local token = { + random = random, + expires = timestamp, + sign = sign, + } + + local cookie = ngx_encode_base64(core.json.encode(token)) + return cookie +end + + +local function check_csrf_token(conf, ctx, token) + local token_str = ngx_decode_base64(token) + if not token_str then + core.log.error("csrf token base64 decode error") + return false + end + + local token_table, err = core.json.decode(token_str) + if err then + core.log.error("decode token error: ", err) + return false + end + + local random = token_table["random"] + if not random then + core.log.error("no random in token") + return false + end + + local expires = token_table["expires"] + if not expires then + core.log.error("no expires in token") + return false + end + local time_now = ngx_time() + if conf.expires > 0 and time_now - expires > conf.expires then + core.log.error("token has expired") + return false + end + + local sign = gen_sign(random, expires, conf.key) + if token_table["sign"] ~= sign then + core.log.error("Invalid signatures") + return false + end + + return true +end + + +function _M.access(conf, ctx) + local method = core.request.get_method(ctx) + if core.table.array_find(SAFE_METHODS, method) then + return + end + + local header_token = core.request.header(ctx, conf.name) + if not header_token or header_token == "" then + return 401, {error_msg = "no csrf token in headers"} + end + + local cookie_token = ctx.var["cookie_" .. conf.name] + if not cookie_token then + return 401, {error_msg = "no csrf cookie"} + end + + if header_token ~= cookie_token then + return 401, {error_msg = "csrf token mismatch"} + end + + local result = check_csrf_token(conf, ctx, cookie_token) + if not result then + return 401, {error_msg = "Failed to verify the csrf token signature"} + end +end + + +function _M.header_filter(conf, ctx) + local csrf_token = gen_csrf_token(conf) + local cookie = conf.name .. "=" .. csrf_token .. ";path=/;SameSite=Lax;Expires=" + .. ngx_cookie_time(ngx_time() + conf.expires) + core.response.add_header("Set-Cookie", cookie) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/datadog.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/datadog.lua new file mode 100644 index 0000000..972c0a2 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/datadog.lua @@ -0,0 +1,251 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +local core = require("apisix.core") +local plugin = require("apisix.plugin") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local fetch_log = require("apisix.utils.log-util").get_full_log +local service_fetch = require("apisix.http.service").get +local ngx = ngx +local udp = ngx.socket.udp +local format = string.format +local concat = table.concat +local tostring = tostring + +local plugin_name = "datadog" +local defaults = { + host = "127.0.0.1", + port = 8125, + namespace = "apisix", + constant_tags = {"source:apisix"} +} + +local batch_processor_manager = bp_manager_mod.new(plugin_name) +local schema = { + type = "object", + properties = { + prefer_name = {type = "boolean", default = true} + } +} + +local metadata_schema = { + type = "object", + properties = { + host = {type = "string", default= defaults.host}, + port = {type = "integer", minimum = 0, default = defaults.port}, + namespace = {type = "string", default = defaults.namespace}, + constant_tags = { + type = "array", + items = {type = "string"}, + default = defaults.constant_tags + } + }, +} + +local _M = { + version = 0.1, + priority = 495, + name = plugin_name, + schema = batch_processor_manager:wrap_schema(schema), + metadata_schema = metadata_schema, +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + return core.schema.check(schema, conf) +end + + +local function generate_tag(entry, const_tags) + local tags + if const_tags and #const_tags > 0 then + tags = core.table.clone(const_tags) + else + tags = {} + end + + if entry.route_id and entry.route_id ~= "" then + core.table.insert(tags, "route_name:" .. entry.route_id) + end + + if entry.service_id and entry.service_id ~= "" then + core.table.insert(tags, "service_name:" .. entry.service_id) + end + + if entry.consumer and entry.consumer.username then + core.table.insert(tags, "consumer:" .. entry.consumer.username) + end + if entry.balancer_ip ~= "" then + core.table.insert(tags, "balancer_ip:" .. entry.balancer_ip) + end + if entry.response.status then + core.table.insert(tags, "response_status:" .. entry.response.status) + end + if entry.scheme ~= "" then + core.table.insert(tags, "scheme:" .. entry.scheme) + end + + if #tags > 0 then + return "|#" .. concat(tags, ',') + end + + return "" +end + + +local function send_metric_over_udp(entry, metadata) + local err_msg + local sock = udp() + local host, port = metadata.value.host, metadata.value.port + + local ok, err = sock:setpeername(host, port) + if not ok then + return false, "failed to connect to UDP server: host[" .. host + .. "] port[" .. tostring(port) .. "] err: " .. err + end + + -- Generate prefix & suffix according dogstatsd udp data format. + local suffix = generate_tag(entry, metadata.value.constant_tags) + local prefix = metadata.value.namespace + if prefix ~= "" then + prefix = prefix .. "." + end + + -- request counter + ok, err = sock:send(format("%s:%s|%s%s", prefix .. "request.counter", 1, "c", suffix)) + if not ok then + err_msg = "error sending request.counter: " .. err + core.log.error("failed to report request count to dogstatsd server: host[" .. host + .. "] port[" .. tostring(port) .. "] err: " .. err) + end + + -- request latency histogram + ok, err = sock:send(format("%s:%s|%s%s", prefix .. "request.latency", + entry.latency, "h", suffix)) + if not ok then + err_msg = "error sending request.latency: " .. err + core.log.error("failed to report request latency to dogstatsd server: host[" + .. host .. "] port[" .. tostring(port) .. "] err: " .. err) + end + + -- upstream latency + if entry.upstream_latency then + ok, err = sock:send(format("%s:%s|%s%s", prefix .. "upstream.latency", + entry.upstream_latency, "h", suffix)) + if not ok then + err_msg = "error sending upstream.latency: " .. err + core.log.error("failed to report upstream latency to dogstatsd server: host[" + .. host .. "] port[" .. tostring(port) .. "] err: " .. err) + end + end + + -- apisix_latency + ok, err = sock:send(format("%s:%s|%s%s", prefix .. "apisix.latency", + entry.apisix_latency, "h", suffix)) + if not ok then + err_msg = "error sending apisix.latency: " .. err + core.log.error("failed to report apisix latency to dogstatsd server: host[" .. host + .. "] port[" .. tostring(port) .. "] err: " .. err) + end + + -- request body size timer + ok, err = sock:send(format("%s:%s|%s%s", prefix .. "ingress.size", + entry.request.size, "ms", suffix)) + if not ok then + err_msg = "error sending ingress.size: " .. err + core.log.error("failed to report req body size to dogstatsd server: host[" .. host + .. "] port[" .. tostring(port) .. "] err: " .. err) + end + + -- response body size timer + ok, err = sock:send(format("%s:%s|%s%s", prefix .. "egress.size", + entry.response.size, "ms", suffix)) + if not ok then + err_msg = "error sending egress.size: " .. err + core.log.error("failed to report response body size to dogstatsd server: host[" + .. host .. "] port[" .. tostring(port) .. "] err: " .. err) + end + + ok, err = sock:close() + if not ok then + core.log.error("failed to close the UDP connection, host[", + host, "] port[", port, "] ", err) + end + + if not err_msg then + return true + end + + return false, err_msg +end + + +local function push_metrics(entries) + -- Fetching metadata details + local metadata = plugin.plugin_metadata(plugin_name) + core.log.info("metadata: ", core.json.delay_encode(metadata)) + + if not metadata then + core.log.info("received nil metadata: using metadata defaults: ", + core.json.delay_encode(defaults, true)) + metadata = {} + metadata.value = defaults + end + core.log.info("sending batch metrics to dogstatsd: ", metadata.value.host, + ":", metadata.value.port) + + for i = 1, #entries do + local ok, err = send_metric_over_udp(entries[i], metadata) + if not ok then + return false, err, i + end + end + + return true +end + + +function _M.log(conf, ctx) + local entry = fetch_log(ngx, {}) + entry.balancer_ip = ctx.balancer_ip or "" + entry.scheme = ctx.upstream_scheme or "" + + -- if prefer_name is set, fetch the service/route name. If the name is nil, fall back to id. + if conf.prefer_name then + if entry.service_id and entry.service_id ~= "" then + local svc = service_fetch(entry.service_id) + + if svc and svc.value.name ~= "" then + entry.service_id = svc.value.name + end + end + + if ctx.route_name and ctx.route_name ~= "" then + entry.route_id = ctx.route_name + end + end + + if batch_processor_manager:add_entry(conf, entry) then + return + end + + batch_processor_manager:add_entry_to_new_processor(conf, entry, ctx, push_metrics) +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/degraphql.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/degraphql.lua new file mode 100644 index 0000000..e47a276 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/degraphql.lua @@ -0,0 +1,160 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local gq_parse = require("graphql").parse +local req_set_body_data = ngx.req.set_body_data +local ipairs = ipairs +local pcall = pcall +local type = type + + +local schema = { + type = "object", + properties = { + query = { + type = "string", + minLength = 1, + maxLength = 1024, + }, + variables = { + type = "array", + items = { + type = "string" + }, + minItems = 1, + }, + operation_name = { + type = "string", + minLength = 1, + maxLength = 1024 + }, + }, + required = {"query"}, +} + +local plugin_name = "degraphql" + +local _M = { + version = 0.1, + priority = 509, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + local ok, res = pcall(gq_parse, conf.query) + if not ok then + return false, "failed to parse query: " .. res + end + + if #res.definitions > 1 and not conf.operation_name then + return false, "operation_name is required if multiple operations are present in the query" + end + return true +end + + +local function fetch_post_variables(conf) + local req_body, err = core.request.get_body() + if err ~= nil then + core.log.error("failed to get request body: ", err) + return nil, 503 + end + + if not req_body then + core.log.error("missing request body") + return nil, 400 + end + + -- JSON as the default content type + req_body, err = core.json.decode(req_body) + if type(req_body) ~= "table" then + core.log.error("invalid request body can't be decoded: ", err or "bad type") + return nil, 400 + end + + local variables = {} + for _, v in ipairs(conf.variables) do + variables[v] = req_body[v] + end + + return variables +end + + +local function fetch_get_variables(conf) + local args = core.request.get_uri_args() + local variables = {} + for _, v in ipairs(conf.variables) do + variables[v] = args[v] + end + + return variables +end + + +function _M.access(conf, ctx) + local meth = core.request.get_method() + if meth ~= "POST" and meth ~= "GET" then + return 405 + end + + local new_body = core.table.new(0, 3) + + if conf.variables then + local variables, code + if meth == "POST" then + variables, code = fetch_post_variables(conf) + else + variables, code = fetch_get_variables(conf) + end + + if not variables then + return code + end + + if meth == "POST" then + new_body["variables"] = variables + else + new_body["variables"] = core.json.encode(variables) + end + end + + new_body["operationName"] = conf.operation_name + new_body["query"] = conf.query + + if meth == "POST" then + if not conf.variables then + -- the set_body_data requires to read the body first + core.request.get_body() + end + + core.request.set_header(ctx, "Content-Type", "application/json") + req_set_body_data(core.json.encode(new_body)) + else + core.request.set_uri_args(ctx, new_body) + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/dubbo-proxy.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/dubbo-proxy.lua new file mode 100644 index 0000000..57a093f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/dubbo-proxy.lua @@ -0,0 +1,69 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local ngx_var = ngx.var + + +local plugin_name = "dubbo-proxy" + +local schema = { + type = "object", + properties = { + service_name = { + type = "string", + minLength = 1, + }, + service_version = { + type = "string", + pattern = [[^\d+\.\d+\.\d+]], + }, + method = { + type = "string", + minLength = 1, + }, + }, + required = { "service_name", "service_version"}, +} + +local _M = { + version = 0.1, + priority = 507, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +function _M.access(conf, ctx) + ctx.dubbo_proxy_enabled = true + + ngx_var.dubbo_service_name = conf.service_name + ngx_var.dubbo_service_version = conf.service_version + if not conf.method then + -- remove the prefix '/' from $uri + ngx_var.dubbo_method = core.string.sub(ngx_var.uri, 2) + else + ngx_var.dubbo_method = conf.method + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/echo.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/echo.lua new file mode 100644 index 0000000..525c175 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/echo.lua @@ -0,0 +1,121 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local pairs = pairs +local type = type +local ngx = ngx + + +local schema = { + type = "object", + properties = { + before_body = { + description = "body before the filter phase.", + type = "string" + }, + body = { + description = "body to replace upstream response.", + type = "string" + }, + after_body = { + description = "body after the modification of filter phase.", + type = "string" + }, + headers = { + description = "new headers for response", + type = "object", + minProperties = 1, + }, + }, + anyOf = { + {required = {"before_body"}}, + {required = {"body"}}, + {required = {"after_body"}} + }, + minProperties = 1, +} + +local plugin_name = "echo" + +local _M = { + version = 0.1, + priority = 412, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + return true +end + + +function _M.body_filter(conf, ctx) + if conf.body then + ngx.arg[1] = conf.body + ngx.arg[2] = true + end + + if conf.before_body and not ctx.plugin_echo_body_set then + ngx.arg[1] = conf.before_body .. ngx.arg[1] + ctx.plugin_echo_body_set = true + end + + if ngx.arg[2] and conf.after_body then + ngx.arg[1] = ngx.arg[1] .. conf.after_body + end +end + + +function _M.header_filter(conf, ctx) + if conf.body or conf.before_body or conf.after_body then + core.response.clear_header_as_body_modified() + end + + if not conf.headers then + return + end + + if not conf.headers_arr then + conf.headers_arr = {} + + for field, value in pairs(conf.headers) do + if type(field) == 'string' + and (type(value) == 'string' or type(value) == 'number') then + if #field == 0 then + return false, 'invalid field length in header' + end + core.table.insert(conf.headers_arr, field) + core.table.insert(conf.headers_arr, value) + else + return false, 'invalid type as header value' + end + end + end + + local field_cnt = #conf.headers_arr + for i = 1, field_cnt, 2 do + ngx.header[conf.headers_arr[i]] = conf.headers_arr[i+1] + end +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/elasticsearch-logger.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/elasticsearch-logger.lua new file mode 100644 index 0000000..09dcbd7 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/elasticsearch-logger.lua @@ -0,0 +1,281 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local http = require("resty.http") +local log_util = require("apisix.utils.log-util") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") + +local ngx = ngx +local str_format = core.string.format +local math_random = math.random + +local plugin_name = "elasticsearch-logger" +local batch_processor_manager = bp_manager_mod.new(plugin_name) + +local schema = { + type = "object", + properties = { + -- deprecated, use "endpoint_addrs" instead + endpoint_addr = { + type = "string", + pattern = "[^/]$", + }, + endpoint_addrs = { + type = "array", + minItems = 1, + items = { + type = "string", + pattern = "[^/]$", + }, + }, + field = { + type = "object", + properties = { + index = { type = "string"}, + }, + required = {"index"} + }, + log_format = {type = "object"}, + auth = { + type = "object", + properties = { + username = { + type = "string", + minLength = 1 + }, + password = { + type = "string", + minLength = 1 + }, + }, + required = {"username", "password"}, + }, + timeout = { + type = "integer", + minimum = 1, + default = 10 + }, + ssl_verify = { + type = "boolean", + default = true + }, + include_req_body = {type = "boolean", default = false}, + include_req_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + include_resp_body = { type = "boolean", default = false }, + include_resp_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + }, + encrypt_fields = {"auth.password"}, + oneOf = { + {required = {"endpoint_addr", "field"}}, + {required = {"endpoint_addrs", "field"}} + }, +} + + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + } + }, +} + + +local _M = { + version = 0.1, + priority = 413, + name = plugin_name, + schema = batch_processor_manager:wrap_schema(schema), + metadata_schema = metadata_schema, +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + + local check = {"endpoint_addrs"} + core.utils.check_https(check, conf, plugin_name) + core.utils.check_tls_bool({"ssl_verify"}, conf, plugin_name) + return core.schema.check(schema, conf) +end + + +local function get_es_major_version(uri, conf) + local httpc = http.new() + if not httpc then + return nil, "failed to create http client" + end + local headers = {} + if conf.auth then + local authorization = "Basic " .. ngx.encode_base64( + conf.auth.username .. ":" .. conf.auth.password + ) + headers["Authorization"] = authorization + end + httpc:set_timeout(conf.timeout * 1000) + local res, err = httpc:request_uri(uri, { + ssl_verify = conf.ssl_verify, + method = "GET", + headers = headers, + }) + if not res then + return false, err + end + if res.status ~= 200 then + return nil, str_format("server returned status: %d, body: %s", + res.status, res.body or "") + end + local json_body, err = core.json.decode(res.body) + if not json_body then + return nil, "failed to decode response body: " .. err + end + if not json_body.version or not json_body.version.number then + return nil, "failed to get version from response body" + end + + local major_version = json_body.version.number:match("^(%d+)%.") + if not major_version then + return nil, "invalid version format: " .. json_body.version.number + end + + return major_version +end + + +local function get_logger_entry(conf, ctx) + local entry = log_util.get_log_entry(plugin_name, conf, ctx) + local body = { + index = { + _index = conf.field.index + } + } + -- for older version type is required + if conf._version == "6" or conf._version == "5" then + body.index._type = "_doc" + end + return core.json.encode(body) .. "\n" .. + core.json.encode(entry) .. "\n" +end + +local function fetch_and_update_es_version(conf) + if conf._version then + return + end + local selected_endpoint_addr + if conf.endpoint_addr then + selected_endpoint_addr = conf.endpoint_addr + else + selected_endpoint_addr = conf.endpoint_addrs[math_random(#conf.endpoint_addrs)] + end + local major_version, err = get_es_major_version(selected_endpoint_addr, conf) + if err then + core.log.error("failed to get Elasticsearch version: ", err) + return + end + conf._version = major_version +end + + +local function send_to_elasticsearch(conf, entries) + local httpc, err = http.new() + if not httpc then + return false, str_format("create http error: %s", err) + end + fetch_and_update_es_version(conf) + local selected_endpoint_addr + if conf.endpoint_addr then + selected_endpoint_addr = conf.endpoint_addr + else + selected_endpoint_addr = conf.endpoint_addrs[math_random(#conf.endpoint_addrs)] + end + local uri = selected_endpoint_addr .. "/_bulk" + local body = core.table.concat(entries, "") + local headers = { + ["Content-Type"] = "application/x-ndjson", + ["Accept"] = "application/vnd.elasticsearch+json" + } + if conf.auth then + local authorization = "Basic " .. ngx.encode_base64( + conf.auth.username .. ":" .. conf.auth.password + ) + headers["Authorization"] = authorization + end + + core.log.info("uri: ", uri, ", body: ", body) + + httpc:set_timeout(conf.timeout * 1000) + local resp, err = httpc:request_uri(uri, { + ssl_verify = conf.ssl_verify, + method = "POST", + headers = headers, + body = body + }) + if not resp then + return false, err + end + + if resp.status ~= 200 then + return false, str_format("elasticsearch server returned status: %d, body: %s", + resp.status, resp.body or "") + end + + return true +end + + +function _M.body_filter(conf, ctx) + log_util.collect_body(conf, ctx) +end + +function _M.access(conf) + -- fetch_and_update_es_version will call ES server only the first time + -- so this should not amount to considerable overhead + fetch_and_update_es_version(conf) +end + +function _M.log(conf, ctx) + local entry = get_logger_entry(conf, ctx) + + if batch_processor_manager:add_entry(conf, entry) then + return + end + + local process = function(entries) + return send_to_elasticsearch(conf, entries) + end + + batch_processor_manager:add_entry_to_new_processor(conf, entry, ctx, process) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/error-log-logger.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/error-log-logger.lua new file mode 100644 index 0000000..88eca65 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/error-log-logger.lua @@ -0,0 +1,510 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local errlog = require("ngx.errlog") +local batch_processor = require("apisix.utils.batch-processor") +local plugin = require("apisix.plugin") +local timers = require("apisix.timers") +local http = require("resty.http") +local producer = require("resty.kafka.producer") +local plugin_name = "error-log-logger" +local table = core.table +local schema_def = core.schema +local ngx = ngx +local tcp = ngx.socket.tcp +local tostring = tostring +local ipairs = ipairs +local string = require("string") +local lrucache = core.lrucache.new({ + ttl = 300, count = 32 +}) +local kafka_prod_lrucache = core.lrucache.new({ + ttl = 300, count = 32 +}) + + +local metadata_schema = { + type = "object", + properties = { + tcp = { + type = "object", + properties = { + host = schema_def.host_def, + port = {type = "integer", minimum = 0}, + tls = {type = "boolean", default = false}, + tls_server_name = {type = "string"}, + }, + required = {"host", "port"} + }, + skywalking = { + type = "object", + properties = { + endpoint_addr = {schema_def.uri, default = "http://127.0.0.1:12900/v3/logs"}, + service_name = {type = "string", default = "APISIX"}, + service_instance_name = {type="string", default = "APISIX Service Instance"}, + }, + }, + clickhouse = { + type = "object", + properties = { + endpoint_addr = {schema_def.uri_def, default="http://127.0.0.1:8123"}, + user = {type = "string", default = "default"}, + password = {type = "string", default = ""}, + database = {type = "string", default = ""}, + logtable = {type = "string", default = ""}, + }, + required = {"endpoint_addr", "user", "password", "database", "logtable"} + }, + kafka = { + type = "object", + properties = { + brokers = { + type = "array", + minItems = 1, + items = { + type = "object", + properties = { + host = { + type = "string", + description = "the host of kafka broker", + }, + port = { + type = "integer", + minimum = 1, + maximum = 65535, + description = "the port of kafka broker", + }, + sasl_config = { + type = "object", + description = "sasl config", + properties = { + mechanism = { + type = "string", + default = "PLAIN", + enum = {"PLAIN"}, + }, + user = { type = "string", description = "user" }, + password = { type = "string", description = "password" }, + }, + required = {"user", "password"}, + }, + }, + required = {"host", "port"}, + }, + uniqueItems = true, + }, + kafka_topic = {type = "string"}, + producer_type = { + type = "string", + default = "async", + enum = {"async", "sync"}, + }, + required_acks = { + type = "integer", + default = 1, + enum = { 0, 1, -1 }, + }, + key = {type = "string"}, + -- in lua-resty-kafka, cluster_name is defined as number + -- see https://github.com/doujiang24/lua-resty-kafka#new-1 + cluster_name = {type = "integer", minimum = 1, default = 1}, + meta_refresh_interval = {type = "integer", minimum = 1, default = 30}, + }, + required = {"brokers", "kafka_topic"}, + }, + name = {type = "string", default = plugin_name}, + level = {type = "string", default = "WARN", enum = {"STDERR", "EMERG", "ALERT", "CRIT", + "ERR", "ERROR", "WARN", "NOTICE", "INFO", "DEBUG"}}, + timeout = {type = "integer", minimum = 1, default = 3}, + keepalive = {type = "integer", minimum = 1, default = 30}, + batch_max_size = {type = "integer", minimum = 0, default = 1000}, + max_retry_count = {type = "integer", minimum = 0, default = 0}, + retry_delay = {type = "integer", minimum = 0, default = 1}, + buffer_duration = {type = "integer", minimum = 1, default = 60}, + inactive_timeout = {type = "integer", minimum = 1, default = 3}, + }, + oneOf = { + {required = {"skywalking"}}, + {required = {"tcp"}}, + {required = {"clickhouse"}}, + {required = {"kafka"}}, + -- for compatible with old schema + {required = {"host", "port"}} + }, + encrypt_fields = {"clickhouse.password"}, +} + + +local schema = { + type = "object", +} + + +local log_level = { + STDERR = ngx.STDERR, + EMERG = ngx.EMERG, + ALERT = ngx.ALERT, + CRIT = ngx.CRIT, + ERR = ngx.ERR, + ERROR = ngx.ERR, + WARN = ngx.WARN, + NOTICE = ngx.NOTICE, + INFO = ngx.INFO, + DEBUG = ngx.DEBUG +} + + +local config = {} +local log_buffer + + +local _M = { + version = 0.1, + priority = 1091, + name = plugin_name, + schema = schema, + metadata_schema = metadata_schema, + scope = "global", +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + + local check = {"skywalking.endpoint_addr", "clickhouse.endpoint_addr"} + core.utils.check_https(check, conf, plugin_name) + core.utils.check_tls_bool({"tcp.tls"}, conf, plugin_name) + + return core.schema.check(schema, conf) +end + + +local function send_to_tcp_server(data) + local sock, soc_err = tcp() + + if not sock then + return false, "failed to init the socket " .. soc_err + end + + sock:settimeout(config.timeout * 1000) + + local tcp_config = config.tcp + local ok, err = sock:connect(tcp_config.host, tcp_config.port) + if not ok then + return false, "failed to connect the TCP server: host[" .. tcp_config.host + .. "] port[" .. tostring(tcp_config.port) .. "] err: " .. err + end + + if tcp_config.tls then + ok, err = sock:sslhandshake(false, tcp_config.tls_server_name, false) + if not ok then + sock:close() + return false, "failed to perform TLS handshake to TCP server: host[" + .. tcp_config.host .. "] port[" .. tostring(tcp_config.port) .. "] err: " .. err + end + end + + local bytes, err = sock:send(data) + if not bytes then + sock:close() + return false, "failed to send data to TCP server: host[" .. tcp_config.host + .. "] port[" .. tostring(tcp_config.port) .. "] err: " .. err + end + + sock:setkeepalive(config.keepalive * 1000) + return true +end + + +local function send_to_skywalking(log_message) + local err_msg + local res = true + core.log.info("sending a batch logs to ", config.skywalking.endpoint_addr) + + local httpc = http.new() + httpc:set_timeout(config.timeout * 1000) + + local entries = {} + local service_instance_name = config.skywalking.service_instance_name + if service_instance_name == "$hostname" then + service_instance_name = core.utils.gethostname() + end + + for i = 1, #log_message, 2 do + local content = { + service = config.skywalking.service_name, + serviceInstance = service_instance_name, + endpoint = "", + body = { + text = { + text = log_message[i] + } + } + } + table.insert(entries, content) + end + + local httpc_res, httpc_err = httpc:request_uri( + config.skywalking.endpoint_addr, + { + method = "POST", + body = core.json.encode(entries), + keepalive_timeout = config.keepalive * 1000, + headers = { + ["Content-Type"] = "application/json", + } + } + ) + + if not httpc_res then + return false, "error while sending data to skywalking[" + .. config.skywalking.endpoint_addr .. "] " .. httpc_err + end + + -- some error occurred in the server + if httpc_res.status >= 400 then + res = false + err_msg = string.format( + "server returned status code[%s] skywalking[%s] body[%s]", + httpc_res.status, + config.skywalking.endpoint_addr.endpoint_addr, + httpc_res:read_body() + ) + end + + return res, err_msg +end + + +local function send_to_clickhouse(log_message) + local err_msg + local res = true + core.log.info("sending a batch logs to ", config.clickhouse.endpoint_addr) + + local httpc = http.new() + httpc:set_timeout(config.timeout * 1000) + + local entries = {} + for i = 1, #log_message, 2 do + -- TODO Here save error log as a whole string to clickhouse 'data' column. + -- We will add more columns in the future. + table.insert(entries, core.json.encode({data=log_message[i]})) + end + + local httpc_res, httpc_err = httpc:request_uri( + config.clickhouse.endpoint_addr, + { + method = "POST", + body = "INSERT INTO " .. config.clickhouse.logtable .." FORMAT JSONEachRow " + .. table.concat(entries, " "), + keepalive_timeout = config.keepalive * 1000, + headers = { + ["Content-Type"] = "application/json", + ["X-ClickHouse-User"] = config.clickhouse.user, + ["X-ClickHouse-Key"] = config.clickhouse.password, + ["X-ClickHouse-Database"] = config.clickhouse.database + } + } + ) + + if not httpc_res then + return false, "error while sending data to clickhouse[" + .. config.clickhouse.endpoint_addr .. "] " .. httpc_err + end + + -- some error occurred in the server + if httpc_res.status >= 400 then + res = false + err_msg = string.format( + "server returned status code[%s] clickhouse[%s] body[%s]", + httpc_res.status, + config.clickhouse.endpoint_addr.endpoint_addr, + httpc_res:read_body() + ) + end + + return res, err_msg +end + + +local function update_filter(value) + local level = log_level[value.level] + local status, err = errlog.set_filter_level(level) + if not status then + return nil, "failed to set filter level by ngx.errlog, the error is :" .. err + else + core.log.notice("set the filter_level to ", value.level) + end + + return value +end + + +local function create_producer(broker_list, broker_config, cluster_name) + core.log.info("create new kafka producer instance") + return producer:new(broker_list, broker_config, cluster_name) +end + + +local function send_to_kafka(log_message) + -- avoid race of the global config + local metadata = plugin.plugin_metadata(plugin_name) + if not (metadata and metadata.value and metadata.modifiedIndex) then + return false, "please set the correct plugin_metadata for " .. plugin_name + end + local config, err = lrucache(plugin_name, metadata.modifiedIndex, update_filter, metadata.value) + if not config then + return false, "get config failed: " .. err + end + + core.log.info("sending a batch logs to kafka brokers: ", + core.json.delay_encode(config.kafka.brokers)) + + local broker_config = {} + broker_config["request_timeout"] = config.timeout * 1000 + broker_config["producer_type"] = config.kafka.producer_type + broker_config["required_acks"] = config.kafka.required_acks + broker_config["refresh_interval"] = config.kafka.meta_refresh_interval * 1000 + + -- reuse producer via kafka_prod_lrucache to avoid unbalanced partitions of messages in kafka + local prod, err = kafka_prod_lrucache(plugin_name, metadata.modifiedIndex, + create_producer, config.kafka.brokers, broker_config, + config.kafka.cluster_name) + if not prod then + return false, "get kafka producer failed: " .. err + end + core.log.info("kafka cluster name ", config.kafka.cluster_name, ", broker_list[1] port ", + prod.client.broker_list[1].port) + + local ok + for i = 1, #log_message, 2 do + ok, err = prod:send(config.kafka.kafka_topic, + config.kafka.key, core.json.encode(log_message[i])) + if not ok then + return false, "failed to send data to Kafka topic: " .. err .. + ", brokers: " .. core.json.encode(config.kafka.brokers) + end + core.log.info("send data to kafka: ", core.json.delay_encode(log_message[i])) + end + + return true +end + + +local function send(data) + if config.skywalking then + return send_to_skywalking(data) + elseif config.clickhouse then + return send_to_clickhouse(data) + elseif config.kafka then + return send_to_kafka(data) + end + return send_to_tcp_server(data) +end + + +local function process() + local metadata = plugin.plugin_metadata(plugin_name) + if not (metadata and metadata.value and metadata.modifiedIndex) then + core.log.info("please set the correct plugin_metadata for ", plugin_name) + return + else + local err + config, err = lrucache(plugin_name, metadata.modifiedIndex, update_filter, metadata.value) + if not config then + core.log.warn("set log filter failed for ", err) + return + end + if not (config.tcp or config.skywalking or config.clickhouse or config.kafka) then + config.tcp = { + host = config.host, + port = config.port, + tls = config.tls, + tls_server_name = config.tls_server_name + } + core.log.warn( + string.format("The schema is out of date. Please update to the new configuration, " + .. "for example: {\"tcp\": {\"host\": \"%s\", \"port\": \"%s\"}}", + config.host, config.port + )) + end + end + + local err_level = log_level[metadata.value.level] + local entries = {} + local logs = errlog.get_logs(9) + while ( logs and #logs>0 ) do + for i = 1, #logs, 3 do + -- There will be some stale error logs after the filter level changed. + -- We should avoid reporting them. + if logs[i] <= err_level then + table.insert(entries, logs[i + 2]) + table.insert(entries, "\n") + end + end + logs = errlog.get_logs(9) + end + + if #entries == 0 then + return + end + + if log_buffer then + for _, v in ipairs(entries) do + log_buffer:push(v) + end + return + end + + local config_bat = { + name = config.name, + retry_delay = config.retry_delay, + batch_max_size = config.batch_max_size, + max_retry_count = config.max_retry_count, + buffer_duration = config.buffer_duration, + inactive_timeout = config.inactive_timeout, + } + + local err + log_buffer, err = batch_processor:new(send, config_bat) + + if not log_buffer then + core.log.warn("error when creating the batch processor: ", err) + return + end + + for _, v in ipairs(entries) do + log_buffer:push(v) + end + +end + + +function _M.init() + timers.register_timer("plugin#error-log-logger", process) +end + + +function _M.destroy() + timers.unregister_timer("plugin#error-log-logger") +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/example-plugin.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/example-plugin.lua new file mode 100644 index 0000000..767ccfa --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/example-plugin.lua @@ -0,0 +1,152 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local ngx = ngx +local core = require("apisix.core") +local plugin = require("apisix.plugin") +local upstream = require("apisix.upstream") + +local schema = { + type = "object", + properties = { + i = {type = "number", minimum = 0}, + s = {type = "string"}, + t = {type = "array", minItems = 1}, + ip = {type = "string"}, + port = {type = "integer"}, + }, + required = {"i"}, +} + +local metadata_schema = { + type = "object", + properties = { + ikey = {type = "number", minimum = 0}, + skey = {type = "string"}, + }, + required = {"ikey", "skey"}, +} + +local plugin_name = "example-plugin" + +local _M = { + version = 0.1, + priority = 0, + name = plugin_name, + schema = schema, + metadata_schema = metadata_schema, +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + return core.schema.check(schema, conf) +end + + +function _M.init() + -- call this function when plugin is loaded + local attr = plugin.plugin_attr(plugin_name) + if attr then + core.log.info(plugin_name, " get plugin attr val: ", attr.val) + end +end + + +function _M.destroy() + -- call this function when plugin is unloaded +end + + +function _M.rewrite(conf, ctx) + core.log.warn("plugin rewrite phase, conf: ", core.json.encode(conf)) + core.log.warn("conf_type: ", ctx.conf_type) + core.log.warn("conf_id: ", ctx.conf_id) + core.log.warn("conf_version: ", ctx.conf_version) +end + + +function _M.access(conf, ctx) + core.log.warn("plugin access phase, conf: ", core.json.encode(conf)) + -- return 200, {message = "hit example plugin"} + + if not conf.ip then + return + end + + local up_conf = { + type = "roundrobin", + nodes = { + {host = conf.ip, port = conf.port, weight = 1} + } + } + + local ok, err = upstream.check_schema(up_conf) + if not ok then + return 500, err + end + + local matched_route = ctx.matched_route + upstream.set(ctx, up_conf.type .. "#route_" .. matched_route.value.id, + ctx.conf_version, up_conf) + return +end + +function _M.header_filter(conf, ctx) + core.log.warn("plugin header_filter phase, conf: ", core.json.encode(conf)) +end + + +function _M.body_filter(conf, ctx) + core.log.warn("plugin body_filter phase, eof: ", ngx.arg[2], + ", conf: ", core.json.encode(conf)) +end + + +function _M.delayed_body_filter(conf, ctx) + core.log.warn("plugin delayed_body_filter phase, eof: ", ngx.arg[2], + ", conf: ", core.json.encode(conf)) +end + +function _M.log(conf, ctx) + core.log.warn("plugin log phase, conf: ", core.json.encode(conf)) +end + + +local function hello() + local args = ngx.req.get_uri_args() + if args["json"] then + return 200, {msg = "world"} + else + return 200, "world\n" + end +end + + +function _M.control_api() + return { + { + methods = {"GET"}, + uris = {"/v1/plugin/example-plugin/hello"}, + handler = hello, + } + } +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin-post-req.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin-post-req.lua new file mode 100644 index 0000000..a8b809f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin-post-req.lua @@ -0,0 +1,40 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local ext = require("apisix.plugins.ext-plugin.init") + + +local name = "ext-plugin-post-req" +local _M = { + version = 0.1, + priority = -3000, + name = name, + schema = ext.schema, +} + + +function _M.check_schema(conf) + return core.schema.check(_M.schema, conf) +end + + +function _M.access(conf, ctx) + return ext.communicate(conf, ctx, name) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin-post-resp.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin-post-resp.lua new file mode 100644 index 0000000..40d3ca4 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin-post-resp.lua @@ -0,0 +1,183 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local ext = require("apisix.plugins.ext-plugin.init") +local helper = require("apisix.plugins.ext-plugin.helper") +local constants = require("apisix.constants") +local http = require("resty.http") + +local ngx = ngx +local ngx_print = ngx.print +local ngx_flush = ngx.flush +local string = string +local str_sub = string.sub + + +local name = "ext-plugin-post-resp" +local _M = { + version = 0.1, + priority = -4000, + name = name, + schema = ext.schema, +} + + +local function include_req_headers(ctx) + -- TODO: handle proxy_set_header + return core.request.headers(ctx) +end + + +local function close(http_obj) + -- TODO: keepalive + local ok, err = http_obj:close() + if not ok then + core.log.error("close http object failed: ", err) + end +end + + +local function get_response(ctx, http_obj) + local ok, err = http_obj:connect({ + scheme = ctx.upstream_scheme, + host = ctx.picked_server.host, + port = ctx.picked_server.port, + }) + + if not ok then + return nil, err + end + -- TODO: set timeout + local uri, args + if ctx.var.upstream_uri == "" then + -- use original uri instead of rewritten one + uri = ctx.var.uri + else + uri = ctx.var.upstream_uri + + -- the rewritten one may contain new args + local index = core.string.find(uri, "?") + if index then + local raw_uri = uri + uri = str_sub(raw_uri, 1, index - 1) + args = str_sub(raw_uri, index + 1) + end + end + local params = { + path = uri, + query = args or ctx.var.args, + headers = include_req_headers(ctx), + method = core.request.get_method(), + } + + local body, err = core.request.get_body() + if err then + return nil, err + end + + if body then + params["body"] = body + end + + local res, err = http_obj:request(params) + if not res then + return nil, err + end + + return res, err +end + +local function send_chunk(chunk) + if not chunk then + return nil + end + + local ok, print_err = ngx_print(chunk) + if not ok then + return "output response failed: ".. (print_err or "") + end + local ok, flush_err = ngx_flush(true) + if not ok then + core.log.warn("flush response failed: ", flush_err) + end + + return nil +end + +-- TODO: response body is empty (304 or HEAD) +-- If the upstream returns 304 or the request method is HEAD, +-- there is no response body. In this case, +-- we need to send a response to the client in the plugin, +-- instead of continuing to execute the subsequent plugin. +local function send_response(ctx, res, code) + ngx.status = code or res.status + + local chunks = ctx.runner_ext_response_body + if chunks then + for i=1, #chunks do + local err = send_chunk(chunks[i]) + if err then + return err + end + end + return + end + + return helper.response_reader(res.body_reader, send_chunk) +end + + +function _M.check_schema(conf) + return core.schema.check(_M.schema, conf) +end + + +function _M.before_proxy(conf, ctx) + local http_obj = http.new() + local res, err = get_response(ctx, http_obj) + if not res or err then + core.log.error("failed to request: ", err or "") + close(http_obj) + return 502 + end + ctx.runner_ext_response = res + + core.log.info("response info, status: ", res.status) + core.log.info("response info, headers: ", core.json.delay_encode(res.headers)) + + local code, body = ext.communicate(conf, ctx, name, constants.RPC_HTTP_RESP_CALL) + if body then + close(http_obj) + -- if the body is changed, the code will be set. + return code, body + end + core.log.info("ext-plugin will send response") + + -- send origin response, status maybe changed. + err = send_response(ctx, res, code) + close(http_obj) + + if err then + core.log.error(err) + return not ngx.headers_sent and 502 or nil + end + + core.log.info("ext-plugin send response succefully") +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin-pre-req.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin-pre-req.lua new file mode 100644 index 0000000..183506d --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin-pre-req.lua @@ -0,0 +1,40 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local ext = require("apisix.plugins.ext-plugin.init") + + +local name = "ext-plugin-pre-req" +local _M = { + version = 0.1, + priority = 12000, + name = name, + schema = ext.schema, +} + + +function _M.check_schema(conf) + return core.schema.check(_M.schema, conf) +end + + +function _M.rewrite(conf, ctx) + return ext.communicate(conf, ctx, name) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin/helper.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin/helper.lua new file mode 100644 index 0000000..7750bb5 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin/helper.lua @@ -0,0 +1,81 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local is_http = ngx.config.subsystem == "http" +local core = require("apisix.core") +local config_local = require("apisix.core.config_local") +local process +if is_http then + process = require "ngx.process" +end +local pl_path = require("pl.path") + + +local _M = {} + + +do + local path + function _M.get_path() + if not path then + local local_conf = config_local.local_conf() + if local_conf then + local test_path = + core.table.try_read_attr(local_conf, "ext-plugin", "path_for_test") + if test_path then + path = "unix:" .. test_path + end + end + + if not path then + local sock = "./conf/apisix-" .. process.get_master_pid() .. ".sock" + path = "unix:" .. pl_path.abspath(sock) + end + end + + return path + end +end + + +function _M.get_conf_token_cache_time() + return 3600 +end + + +function _M.response_reader(reader, callback, ...) + if not reader then + return "get response reader failed" + end + + repeat + local chunk, read_err, cb_err + chunk, read_err = reader() + if read_err then + return "read response failed: ".. (read_err or "") + end + + if chunk then + cb_err = callback(chunk, ...) + if cb_err then + return cb_err + end + end + until not chunk +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin/init.lua new file mode 100644 index 0000000..2631afd --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ext-plugin/init.lua @@ -0,0 +1,1025 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local is_http = ngx.config.subsystem == "http" +local flatbuffers = require("flatbuffers") +local a6_method = require("A6.Method") +local prepare_conf_req = require("A6.PrepareConf.Req") +local prepare_conf_resp = require("A6.PrepareConf.Resp") +local http_req_call_req = require("A6.HTTPReqCall.Req") +local http_req_call_resp = require("A6.HTTPReqCall.Resp") +local http_req_call_action = require("A6.HTTPReqCall.Action") +local http_req_call_stop = require("A6.HTTPReqCall.Stop") +local http_req_call_rewrite = require("A6.HTTPReqCall.Rewrite") +local http_resp_call_req = require("A6.HTTPRespCall.Req") +local http_resp_call_resp = require("A6.HTTPRespCall.Resp") +local extra_info = require("A6.ExtraInfo.Info") +local extra_info_req = require("A6.ExtraInfo.Req") +local extra_info_var = require("A6.ExtraInfo.Var") +local extra_info_resp = require("A6.ExtraInfo.Resp") +local extra_info_reqbody = require("A6.ExtraInfo.ReqBody") +local extra_info_respbody = require("A6.ExtraInfo.RespBody") +local text_entry = require("A6.TextEntry") +local err_resp = require("A6.Err.Resp") +local err_code = require("A6.Err.Code") +local constants = require("apisix.constants") +local core = require("apisix.core") +local helper = require("apisix.plugins.ext-plugin.helper") +local process, ngx_pipe, events +if is_http then + process = require("ngx.process") + ngx_pipe = require("ngx.pipe") + events = require("apisix.events") +end +local resty_lock = require("resty.lock") +local resty_signal = require "resty.signal" +local bit = require("bit") +local band = bit.band +local lshift = bit.lshift +local rshift = bit.rshift +local ffi = require("ffi") +local ffi_str = ffi.string +local socket_tcp = ngx.socket.tcp +local worker_id = ngx.worker.id +local ngx_timer_at = ngx.timer.at +local exiting = ngx.worker.exiting +local str_byte = string.byte +local str_format = string.format +local str_lower = string.lower +local str_sub = string.sub +local error = error +local ipairs = ipairs +local pairs = pairs +local tostring = tostring +local type = type +local ngx = ngx + + +local events_list +local exclude_resp_header = { + ["connection"] = true, + ["content-length"] = true, + ["transfer-encoding"] = true, + ["location"] = true, + ["server"] = true, + ["www-authenticate"] = true, + ["content-encoding"] = true, + ["content-type"] = true, + ["content-location"] = true, + ["content-language"] = true, +} + +local function new_lrucache() + return core.lrucache.new({ + type = "plugin", + invalid_stale = true, + ttl = helper.get_conf_token_cache_time(), + }) +end +local lrucache = new_lrucache() + +local shdict_name = "ext-plugin" +local shdict = ngx.shared[shdict_name] + +local schema = { + type = "object", + properties = { + conf = { + type = "array", + items = { + type = "object", + properties = { + name = { + type = "string", + maxLength = 128, + minLength = 1 + }, + value = { + type = "string", + }, + }, + required = {"name", "value"} + }, + minItems = 1, + }, + allow_degradation = {type = "boolean", default = false} + }, +} + +local _M = { + schema = schema, +} +local builder = flatbuffers.Builder(0) + + +local send +do + local hdr_buf = ffi.new("unsigned char[4]") + local buf = core.table.new(2, 0) + local MAX_DATA_SIZE = lshift(1, 24) - 1 + + function send(sock, ty, data) + hdr_buf[0] = ty + + local len = #data + + core.log.info("sending rpc type: ", ty, " data length: ", len) + + if len > MAX_DATA_SIZE then + return nil, str_format("the max length of data is %d but got %d", MAX_DATA_SIZE, len) + end + + -- length is sent as big endian + for i = 3, 1, -1 do + hdr_buf[i] = band(len, 255) + len = rshift(len, 8) + end + + buf[1] = ffi_str(hdr_buf, 4) + buf[2] = data + return sock:send(buf) + end +end +_M.send = send + + +local err_to_msg +do + local map = { + [err_code.BAD_REQUEST] = "bad request", + [err_code.SERVICE_UNAVAILABLE] = "service unavailable", + [err_code.CONF_TOKEN_NOT_FOUND] = "conf token not found", + } + + function err_to_msg(resp) + local buf = flatbuffers.binaryArray.New(resp) + local resp = err_resp.GetRootAsResp(buf, 0) + local code = resp:Code() + return map[code] or str_format("unknown err %d", code) + end +end + + +local function receive(sock) + local hdr, err = sock:receive(4) + if not hdr then + return nil, err + end + if #hdr ~= 4 then + return nil, "header too short" + end + + local ty = str_byte(hdr, 1) + local resp + local hi, mi, li = str_byte(hdr, 2, 4) + local len = 256 * (256 * hi + mi) + li + + core.log.info("receiving rpc type: ", ty, " data length: ", len) + + if len > 0 then + resp, err = sock:receive(len) + if not resp then + return nil, err + end + if #resp ~= len then + return nil, "data truncated" + end + end + + if ty == constants.RPC_ERROR then + return nil, err_to_msg(resp) + end + + return ty, resp +end +_M.receive = receive + + +local generate_id +do + local count = 0 + local MAX_COUNT = lshift(1, 22) + + function generate_id() + local wid = worker_id() + local id = lshift(wid, 22) + count + count = count + 1 + if count == MAX_COUNT then + count = 0 + end + return id + end +end + + +local encode_a6_method +do + local map = { + GET = a6_method.GET, + HEAD = a6_method.HEAD, + POST = a6_method.POST, + PUT = a6_method.PUT, + DELETE = a6_method.DELETE, + MKCOL = a6_method.MKCOL, + COPY = a6_method.COPY, + MOVE = a6_method.MOVE, + OPTIONS = a6_method.OPTIONS, + PROPFIND = a6_method.PROPFIND, + PROPPATCH = a6_method.PROPPATCH, + LOCK = a6_method.LOCK, + UNLOCK = a6_method.UNLOCK, + PATCH = a6_method.PATCH, + TRACE = a6_method.TRACE, + } + + function encode_a6_method(name) + return map[name] + end +end + + +local function build_args(builder, key, val) + local name = builder:CreateString(key) + local value + if val ~= true then + value = builder:CreateString(val) + end + + text_entry.Start(builder) + text_entry.AddName(builder, name) + if val ~= true then + text_entry.AddValue(builder, value) + end + return text_entry.End(builder) +end + + +local function build_headers(var, builder, key, val) + if key == "host" then + val = var.upstream_host + end + + local name = builder:CreateString(key) + local value = builder:CreateString(val) + + text_entry.Start(builder) + text_entry.AddName(builder, name) + text_entry.AddValue(builder, value) + return text_entry.End(builder) +end + + +local function handle_extra_info(ctx, input) + -- exact request + local buf = flatbuffers.binaryArray.New(input) + local req = extra_info_req.GetRootAsReq(buf, 0) + + local res + local info_type = req:InfoType() + if info_type == extra_info.Var then + local info = req:Info() + local var_req = extra_info_var.New() + var_req:Init(info.bytes, info.pos) + + local var_name = var_req:Name() + res = ctx.var[var_name] + elseif info_type == extra_info.ReqBody then + local info = req:Info() + local reqbody_req = extra_info_reqbody.New() + reqbody_req:Init(info.bytes, info.pos) + + local err + res, err = core.request.get_body() + if err then + core.log.error("failed to read request body: ", err) + end + elseif info_type == extra_info.RespBody then + local ext_res = ctx.runner_ext_response + if ext_res then + local info = req:Info() + local respbody_req = extra_info_respbody.New() + respbody_req:Init(info.byte, info.pos) + + local chunks = {} + local err = helper.response_reader(ext_res.body_reader, function (chunk, chunks) + -- When the upstream response is chunked type, + -- we will receive the complete response body + -- before sending it to the runner program + -- to reduce the number of RPC calls. + core.table.insert_tail(chunks, chunk) + end, chunks) + if err then + -- TODO: send RPC_ERROR to runner + core.log.error(err) + else + res = core.table.concat(chunks) + ctx.runner_ext_response_body = chunks + end + else + core.log.error("failed to read response body: not exits") + end + else + return nil, "unsupported info type: " .. info_type + end + + -- build response + builder:Clear() + + local packed_res + if res then + -- ensure to pass the res in string type + res = tostring(res) + packed_res = builder:CreateByteVector(res) + end + extra_info_resp.Start(builder) + if packed_res then + extra_info_resp.AddResult(builder, packed_res) + end + local resp = extra_info_resp.End(builder) + builder:Finish(resp) + return builder:Output() +end + + +local function fetch_token(key) + if shdict then + return shdict:get(key) + else + core.log.error('shm "ext-plugin" not found') + return nil + end +end + + +local function store_token(key, token) + if shdict then + local exp = helper.get_conf_token_cache_time() + -- early expiry, lrucache in critical state sends prepare_conf_req as original behaviour + exp = exp * 0.9 + local success, err, forcible = shdict:set(key, token, exp) + if not success then + core.log.error("ext-plugin:failed to set conf token, err: ", err) + end + if forcible then + core.log.warn("ext-plugin:set valid items forcibly overwritten") + end + else + core.log.error('shm "ext-plugin" not found') + end +end + + +local function flush_token() + if shdict then + core.log.warn("flush conf token in shared dict") + shdict:flush_all() + else + core.log.error('shm "ext-plugin" not found') + end +end + + +local rpc_call +local rpc_handlers = { + nil, + function (conf, ctx, sock, unique_key) + local token = fetch_token(unique_key) + if token then + core.log.info("fetch token from shared dict, token: ", token) + return token + end + + local lock, err = resty_lock:new(shdict_name) + if not lock then + return nil, "failed to create lock: " .. err + end + + local elapsed, err = lock:lock("prepare_conf") + if not elapsed then + return nil, "failed to acquire the lock: " .. err + end + + local token = fetch_token(unique_key) + if token then + lock:unlock() + core.log.info("fetch token from shared dict, token: ", token) + return token + end + + builder:Clear() + + local key = builder:CreateString(unique_key) + local conf_vec + if conf.conf then + local len = #conf.conf + local textEntries = core.table.new(len, 0) + for i = 1, len do + local name = builder:CreateString(conf.conf[i].name) + local value = builder:CreateString(conf.conf[i].value) + text_entry.Start(builder) + text_entry.AddName(builder, name) + text_entry.AddValue(builder, value) + local c = text_entry.End(builder) + textEntries[i] = c + end + prepare_conf_req.StartConfVector(builder, len) + for i = len, 1, -1 do + builder:PrependUOffsetTRelative(textEntries[i]) + end + conf_vec = builder:EndVector(len) + end + + prepare_conf_req.Start(builder) + prepare_conf_req.AddKey(builder, key) + if conf_vec then + prepare_conf_req.AddConf(builder, conf_vec) + end + local req = prepare_conf_req.End(builder) + builder:Finish(req) + + local ok, err = send(sock, constants.RPC_PREPARE_CONF, builder:Output()) + if not ok then + lock:unlock() + return nil, "failed to send RPC_PREPARE_CONF: " .. err + end + + local ty, resp = receive(sock) + if ty == nil then + lock:unlock() + return nil, "failed to receive RPC_PREPARE_CONF: " .. resp + end + + if ty ~= constants.RPC_PREPARE_CONF then + lock:unlock() + return nil, "failed to receive RPC_PREPARE_CONF: unexpected type " .. ty + end + + local buf = flatbuffers.binaryArray.New(resp) + local pcr = prepare_conf_resp.GetRootAsResp(buf, 0) + token = pcr:ConfToken() + + core.log.notice("get conf token: ", token, " conf: ", core.json.delay_encode(conf.conf)) + store_token(unique_key, token) + + lock:unlock() + + return token + end, + function (conf, ctx, sock, entry) + local lrucache_id = core.lrucache.plugin_ctx_id(ctx, entry) + local token, err = core.lrucache.plugin_ctx(lrucache, ctx, entry, rpc_call, + constants.RPC_PREPARE_CONF, conf, ctx, + lrucache_id) + if not token then + return nil, err + end + + builder:Clear() + local var = ctx.var + + local uri + if var.upstream_uri == "" then + -- use original uri instead of rewritten one + uri = var.uri + else + uri = var.upstream_uri + + -- the rewritten one may contain new args + local index = core.string.find(uri, "?") + if index then + local raw_uri = uri + uri = str_sub(raw_uri, 1, index - 1) + core.request.set_uri_args(ctx, str_sub(raw_uri, index + 1)) + end + end + + local path = builder:CreateString(uri) + + local bin_addr = var.binary_remote_addr + local src_ip = builder:CreateByteVector(bin_addr) + + local args = core.request.get_uri_args(ctx) + local textEntries = {} + for key, val in pairs(args) do + local ty = type(val) + if ty == "table" then + for _, v in ipairs(val) do + core.table.insert(textEntries, build_args(builder, key, v)) + end + else + core.table.insert(textEntries, build_args(builder, key, val)) + end + end + local len = #textEntries + http_req_call_req.StartArgsVector(builder, len) + for i = len, 1, -1 do + builder:PrependUOffsetTRelative(textEntries[i]) + end + local args_vec = builder:EndVector(len) + + local hdrs = core.request.headers(ctx) + core.table.clear(textEntries) + for key, val in pairs(hdrs) do + local ty = type(val) + if ty == "table" then + for _, v in ipairs(val) do + core.table.insert(textEntries, build_headers(var, builder, key, v)) + end + else + core.table.insert(textEntries, build_headers(var, builder, key, val)) + end + end + local len = #textEntries + http_req_call_req.StartHeadersVector(builder, len) + for i = len, 1, -1 do + builder:PrependUOffsetTRelative(textEntries[i]) + end + local hdrs_vec = builder:EndVector(len) + + local id = generate_id() + local method = var.method + + http_req_call_req.Start(builder) + http_req_call_req.AddId(builder, id) + http_req_call_req.AddConfToken(builder, token) + http_req_call_req.AddSrcIp(builder, src_ip) + http_req_call_req.AddPath(builder, path) + http_req_call_req.AddArgs(builder, args_vec) + http_req_call_req.AddHeaders(builder, hdrs_vec) + http_req_call_req.AddMethod(builder, encode_a6_method(method)) + + local req = http_req_call_req.End(builder) + builder:Finish(req) + + local ok, err = send(sock, constants.RPC_HTTP_REQ_CALL, builder:Output()) + if not ok then + return nil, "failed to send RPC_HTTP_REQ_CALL: " .. err + end + + local ty, resp + while true do + ty, resp = receive(sock) + if ty == nil then + return nil, "failed to receive RPC_HTTP_REQ_CALL: " .. resp + end + + if ty ~= constants.RPC_EXTRA_INFO then + break + end + + local out, err = handle_extra_info(ctx, resp) + if not out then + return nil, "failed to handle RPC_EXTRA_INFO: " .. err + end + + local ok, err = send(sock, constants.RPC_EXTRA_INFO, out) + if not ok then + return nil, "failed to reply RPC_EXTRA_INFO: " .. err + end + end + + if ty ~= constants.RPC_HTTP_REQ_CALL then + return nil, "failed to receive RPC_HTTP_REQ_CALL: unexpected type " .. ty + end + + local buf = flatbuffers.binaryArray.New(resp) + local call_resp = http_req_call_resp.GetRootAsResp(buf, 0) + local action_type = call_resp:ActionType() + if action_type == http_req_call_action.Stop then + local action = call_resp:Action() + local stop = http_req_call_stop.New() + stop:Init(action.bytes, action.pos) + + local len = stop:HeadersLength() + if len > 0 then + local stop_resp_headers = {} + for i = 1, len do + local entry = stop:Headers(i) + local name = str_lower(entry:Name()) + if stop_resp_headers[name] == nil then + core.response.set_header(name, entry:Value()) + stop_resp_headers[name] = true + else + core.response.add_header(name, entry:Value()) + end + end + end + + local body + local len = stop:BodyLength() + if len > 0 then + -- TODO: support empty body + body = stop:BodyAsString() + end + local code = stop:Status() + -- avoid using 0 as the default http status code + if code == 0 then + code = 200 + end + return true, nil, code, body + end + + if action_type == http_req_call_action.Rewrite then + local action = call_resp:Action() + local rewrite = http_req_call_rewrite.New() + rewrite:Init(action.bytes, action.pos) + + local path = rewrite:Path() + if path then + path = core.utils.uri_safe_encode(path) + var.upstream_uri = path + end + + local len = rewrite:HeadersLength() + if len > 0 then + for i = 1, len do + local entry = rewrite:Headers(i) + local name = entry:Name() + core.request.set_header(ctx, name, entry:Value()) + + if str_lower(name) == "host" then + var.upstream_host = entry:Value() + end + end + end + + local body_len = rewrite:BodyLength() + if body_len > 0 then + local body = rewrite:BodyAsString() + ngx.req.read_body() + ngx.req.set_body_data(body) + end + + local len = rewrite:RespHeadersLength() + if len > 0 then + local rewrite_resp_headers = {} + for i = 1, len do + local entry = rewrite:RespHeaders(i) + local name = str_lower(entry:Name()) + if exclude_resp_header[name] == nil then + if rewrite_resp_headers[name] == nil then + core.response.set_header(name, entry:Value()) + rewrite_resp_headers[name] = true + else + core.response.add_header(name, entry:Value()) + end + end + end + end + + local len = rewrite:ArgsLength() + if len > 0 then + local changed = {} + for i = 1, len do + local entry = rewrite:Args(i) + local name = entry:Name() + local value = entry:Value() + if value == nil then + args[name] = nil + + else + if changed[name] then + if type(args[name]) == "table" then + core.table.insert(args[name], value) + else + args[name] = {args[name], entry:Value()} + end + else + args[name] = entry:Value() + end + + changed[name] = true + end + end + + core.request.set_uri_args(ctx, args) + + if path then + var.upstream_uri = path .. '?' .. var.args + end + end + end + + return true + end, + nil, -- ignore RPC_EXTRA_INFO, already processed during RPC_HTTP_REQ_CALL interaction + function (conf, ctx, sock, entry) + local lrucache_id = core.lrucache.plugin_ctx_id(ctx, entry) + local token, err = core.lrucache.plugin_ctx(lrucache, ctx, entry, rpc_call, + constants.RPC_PREPARE_CONF, conf, ctx, + lrucache_id) + if not token then + return nil, err + end + + builder:Clear() + local var = ctx.var + + local res = ctx.runner_ext_response + local textEntries = {} + local hdrs = res.headers + for key, val in pairs(hdrs) do + local ty = type(val) + if ty == "table" then + for _, v in ipairs(val) do + core.table.insert(textEntries, build_headers(var, builder, key, v)) + end + else + core.table.insert(textEntries, build_headers(var, builder, key, val)) + end + end + local len = #textEntries + http_resp_call_req.StartHeadersVector(builder, len) + for i = len, 1, -1 do + builder:PrependUOffsetTRelative(textEntries[i]) + end + local hdrs_vec = builder:EndVector(len) + + local id = generate_id() + local status = res.status + + http_resp_call_req.Start(builder) + http_resp_call_req.AddId(builder, id) + http_resp_call_req.AddStatus(builder, status) + http_resp_call_req.AddConfToken(builder, token) + http_resp_call_req.AddHeaders(builder, hdrs_vec) + + local req = http_resp_call_req.End(builder) + builder:Finish(req) + + local ok, err = send(sock, constants.RPC_HTTP_RESP_CALL, builder:Output()) + if not ok then + return nil, "failed to send RPC_HTTP_RESP_CALL: " .. err + end + + local ty, resp + while true do + ty, resp = receive(sock) + if ty == nil then + return nil, "failed to receive RPC_HTTP_REQ_CALL: " .. resp + end + + if ty ~= constants.RPC_EXTRA_INFO then + break + end + + local out, err = handle_extra_info(ctx, resp) + if not out then + return nil, "failed to handle RPC_EXTRA_INFO: " .. err + end + + local ok, err = send(sock, constants.RPC_EXTRA_INFO, out) + if not ok then + return nil, "failed to reply RPC_EXTRA_INFO: " .. err + end + end + + if ty ~= constants.RPC_HTTP_RESP_CALL then + return nil, "failed to receive RPC_HTTP_RESP_CALL: unexpected type " .. ty + end + + local buf = flatbuffers.binaryArray.New(resp) + local call_resp = http_resp_call_resp.GetRootAsResp(buf, 0) + local len = call_resp:HeadersLength() + if len > 0 then + local resp_headers = {} + for i = 1, len do + local entry = call_resp:Headers(i) + local name = str_lower(entry:Name()) + if resp_headers[name] == nil then + core.response.set_header(name, entry:Value()) + resp_headers[name] = true + else + core.response.add_header(name, entry:Value()) + end + end + else + -- Filter out origin headeres + for k, v in pairs(res.headers) do + if not exclude_resp_header[str_lower(k)] then + core.response.set_header(k, v) + end + end + end + + local body + local len = call_resp:BodyLength() + if len > 0 then + -- TODO: support empty body + body = call_resp:BodyAsString() + end + local code = call_resp:Status() + core.log.info("recv resp, code: ", code, " body: ", body, " len: ", len) + + if code == 0 then + -- runner changes body only, we should set code. + code = body and res.status or nil + end + + return true, nil, code, body + end +} + + +rpc_call = function (ty, conf, ctx, ...) + local path = helper.get_path() + + local sock = socket_tcp() + sock:settimeouts(1000, 60000, 60000) + local ok, err = sock:connect(path) + if not ok then + return nil, "failed to connect to the unix socket " .. path .. ": " .. err + end + + local res, err, code, body = rpc_handlers[ty + 1](conf, ctx, sock, ...) + if not res then + sock:close() + return nil, err + end + + local ok, err = sock:setkeepalive(180 * 1000, 32) + if not ok then + core.log.info("failed to setkeepalive: ", err) + end + + return res, nil, code, body +end + + +local function recreate_lrucache() + flush_token() + + if lrucache then + core.log.warn("flush conf token lrucache") + end + + lrucache = new_lrucache() +end + + +function _M.communicate(conf, ctx, plugin_name, rpc_cmd) + local ok, err, code, body + local tries = 0 + local ty = rpc_cmd and rpc_cmd or constants.RPC_HTTP_REQ_CALL + while tries < 3 do + tries = tries + 1 + ok, err, code, body = rpc_call(ty, conf, ctx, plugin_name) + if ok then + if code then + return code, body + end + + return + end + + if not core.string.find(err, "conf token not found") then + core.log.error(err) + if conf.allow_degradation then + core.log.warn("Plugin Runner is wrong, allow degradation") + return + end + return 503 + end + + core.log.warn("refresh cache and try again") + recreate_lrucache() + end + + core.log.error(err) + if conf.allow_degradation then + core.log.warn("Plugin Runner is wrong after " .. tries .. " times retry, allow degradation") + return + end + return 503 +end + + +local function must_set(env, value) + local ok, err = core.os.setenv(env, value) + if not ok then + error(str_format("failed to set %s: %s", env, err), 2) + end +end + + +local function spawn_proc(cmd) + must_set("APISIX_CONF_EXPIRE_TIME", helper.get_conf_token_cache_time()) + must_set("APISIX_LISTEN_ADDRESS", helper.get_path()) + + local opt = { + merge_stderr = true, + } + local proc, err = ngx_pipe.spawn(cmd, opt) + if not proc then + error(str_format("failed to start %s: %s", core.json.encode(cmd), err)) + -- TODO: add retry + end + + proc:set_timeouts(nil, nil, nil, 0) + return proc +end + + +local runner +local function setup_runner(cmd) + + ngx_timer_at(0, function(premature) + if premature then + return + end + + runner = spawn_proc(cmd) + + while not exiting() do + while true do + -- drain output + local max = 3800 -- smaller than Nginx error log length limit + local data, err = runner:stdout_read_any(max) + if not data then + if exiting() then + return + end + + if err == "closed" then + break + end + else + -- we log stdout here just for debug or test + -- the runner itself should log to a file + core.log.warn(data) + end + end + + local ok, reason, status = runner:wait() + if not ok then + core.log.warn("runner exited with reason: ", reason, ", status: ", status) + end + + runner = nil + local ok, err = events:post(events_list._source, events_list.runner_exit) + if not ok then + core.log.error("post event failure with ", events_list._source, ", error: ", err) + end + + core.log.warn("respawn runner 3 seconds later with cmd: ", core.json.encode(cmd)) + core.utils.sleep(3) + core.log.warn("respawning new runner...") + runner = spawn_proc(cmd) + end + end) +end + + +function _M.init_worker() + local local_conf = core.config.local_conf() + local cmd = core.table.try_read_attr(local_conf, "ext-plugin", "cmd") + if not cmd then + return + end + + events_list = events:event_list( + "process_runner_exit_event", + "runner_exit" + ) + + -- flush cache when runner exited + events:register(recreate_lrucache, events_list._source, events_list.runner_exit) + + -- note that the runner is run under the same user as the Nginx master + if process.type() == "privileged agent" then + setup_runner(cmd) + end +end + + +function _M.exit_worker() + if process.type() == "privileged agent" and runner then + -- We need to send SIGTERM in the exit_worker phase, as: + -- 1. privileged agent doesn't support graceful exiting when I write this + -- 2. better to make it work without graceful exiting + local pid = runner:pid() + core.log.notice("terminate runner ", pid, " with SIGTERM") + local num = resty_signal.signum("TERM") + runner:kill(num) + + -- give 1s to clean up the mess + core.os.waitpid(pid, 1) + -- then we KILL it via gc finalizer + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/fault-injection.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/fault-injection.lua new file mode 100644 index 0000000..34ca05e --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/fault-injection.lua @@ -0,0 +1,175 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local expr = require("resty.expr.v1") + +local sleep = core.sleep +local random = math.random +local ipairs = ipairs +local ngx = ngx +local pairs = pairs +local type = type + +local plugin_name = "fault-injection" + + +local schema = { + type = "object", + properties = { + abort = { + type = "object", + properties = { + http_status = {type = "integer", minimum = 200}, + body = {type = "string", minLength = 0}, + headers = { + type = "object", + minProperties = 1, + patternProperties = { + ["^[^:]+$"] = { + oneOf = { + { type = "string" }, + { type = "number" } + } + } + } + }, + percentage = {type = "integer", minimum = 0, maximum = 100}, + vars = { + type = "array", + maxItems = 20, + items = { + type = "array", + }, + } + }, + required = {"http_status"}, + }, + delay = { + type = "object", + properties = { + duration = {type = "number", minimum = 0}, + percentage = {type = "integer", minimum = 0, maximum = 100}, + vars = { + type = "array", + maxItems = 20, + items = { + type = "array", + }, + } + }, + required = {"duration"}, + } + }, + minProperties = 1, +} + + +local _M = { + version = 0.1, + priority = 11000, + name = plugin_name, + schema = schema, +} + + +local function sample_hit(percentage) + if not percentage then + return true + end + + return random(1, 100) <= percentage +end + + +local function vars_match(vars, ctx) + local match_result + for _, var in ipairs(vars) do + local expr, _ = expr.new(var) + match_result = expr:eval(ctx.var) + if match_result then + break + end + end + + return match_result +end + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + if conf.abort and conf.abort.vars then + for _, var in ipairs(conf.abort.vars) do + local _, err = expr.new(var) + if err then + core.log.error("failed to create vars expression: ", err) + return false, err + end + end + end + + if conf.delay and conf.delay.vars then + for _, var in ipairs(conf.delay.vars) do + local _, err = expr.new(var) + if err then + core.log.error("failed to create vars expression: ", err) + return false, err + end + end + end + + return true +end + + +function _M.rewrite(conf, ctx) + core.log.info("plugin rewrite phase, conf: ", core.json.delay_encode(conf)) + + local abort_vars = true + if conf.abort and conf.abort.vars then + abort_vars = vars_match(conf.abort.vars, ctx) + end + core.log.info("abort_vars: ", abort_vars) + + local delay_vars = true + if conf.delay and conf.delay.vars then + delay_vars = vars_match(conf.delay.vars, ctx) + end + core.log.info("delay_vars: ", delay_vars) + + if conf.delay and sample_hit(conf.delay.percentage) and delay_vars then + sleep(conf.delay.duration) + end + + if conf.abort and sample_hit(conf.abort.percentage) and abort_vars then + if conf.abort.headers then + for header_name, header_value in pairs(conf.abort.headers) do + if type(header_value) == "string" then + header_value = core.utils.resolve_var(header_value, ctx.var) + end + ngx.header[header_name] = header_value + end + end + return conf.abort.http_status, core.utils.resolve_var(conf.abort.body, ctx.var) + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/file-logger.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/file-logger.lua new file mode 100644 index 0000000..e0970d8 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/file-logger.lua @@ -0,0 +1,184 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local log_util = require("apisix.utils.log-util") +local core = require("apisix.core") +local expr = require("resty.expr.v1") +local ngx = ngx +local io_open = io.open +local is_apisix_or, process = pcall(require, "resty.apisix.process") + + +local plugin_name = "file-logger" + + +local schema = { + type = "object", + properties = { + path = { + type = "string" + }, + log_format = {type = "object"}, + include_req_body = {type = "boolean", default = false}, + include_req_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + include_resp_body = {type = "boolean", default = false}, + include_resp_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + match = { + type = "array", + maxItems = 20, + items = { + type = "array", + }, + } + }, + required = {"path"} +} + + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + } + } +} + + +local _M = { + version = 0.1, + priority = 399, + name = plugin_name, + schema = schema, + metadata_schema = metadata_schema +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + if conf.match then + local ok, err = expr.new(conf.match) + if not ok then + return nil, "failed to validate the 'match' expression: " .. err + end + end + return core.schema.check(schema, conf) +end + + +local open_file_cache +if is_apisix_or then + -- TODO: switch to a cache which supports inactive time, + -- so that unused files would not be cached + local path_to_file = core.lrucache.new({ + type = "plugin", + }) + + local function open_file_handler(conf, handler) + local file, err = io_open(conf.path, 'a+') + if not file then + return nil, err + end + + -- it will case output problem with buffer when log is larger than buffer + file:setvbuf("no") + + handler.file = file + handler.open_time = ngx.now() * 1000 + return handler + end + + function open_file_cache(conf) + local last_reopen_time = process.get_last_reopen_ms() + + local handler, err = path_to_file(conf.path, 0, open_file_handler, conf, {}) + if not handler then + return nil, err + end + + if handler.open_time < last_reopen_time then + core.log.notice("reopen cached log file: ", conf.path) + handler.file:close() + + local ok, err = open_file_handler(conf, handler) + if not ok then + return nil, err + end + end + + return handler.file + end +end + + +local function write_file_data(conf, log_message) + local msg = core.json.encode(log_message) + + local file, err + if open_file_cache then + file, err = open_file_cache(conf) + else + file, err = io_open(conf.path, 'a+') + end + + if not file then + core.log.error("failed to open file: ", conf.path, ", error info: ", err) + else + -- file:write(msg, "\n") will call fwrite several times + -- which will cause problem with the log output + -- it should be atomic + msg = msg .. "\n" + -- write to file directly, no need flush + local ok, err = file:write(msg) + if not ok then + core.log.error("failed to write file: ", conf.path, ", error info: ", err) + end + + -- file will be closed by gc, if open_file_cache exists + if not open_file_cache then + file:close() + end + end +end + +function _M.body_filter(conf, ctx) + log_util.collect_body(conf, ctx) +end + +function _M.log(conf, ctx) + local entry = log_util.get_log_entry(plugin_name, conf, ctx) + if entry == nil then + return + end + write_file_data(conf, entry) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/forward-auth.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/forward-auth.lua new file mode 100644 index 0000000..bd58364 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/forward-auth.lua @@ -0,0 +1,164 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local ipairs = ipairs +local core = require("apisix.core") +local http = require("resty.http") + +local schema = { + type = "object", + properties = { + uri = {type = "string"}, + allow_degradation = {type = "boolean", default = false}, + status_on_error = {type = "integer", minimum = 200, maximum = 599, default = 403}, + ssl_verify = { + type = "boolean", + default = true, + }, + request_method = { + type = "string", + default = "GET", + enum = {"GET", "POST"}, + description = "the method for client to request the authorization service" + }, + request_headers = { + type = "array", + default = {}, + items = {type = "string"}, + description = "client request header that will be sent to the authorization service" + }, + upstream_headers = { + type = "array", + default = {}, + items = {type = "string"}, + description = "authorization response header that will be sent to the upstream" + }, + client_headers = { + type = "array", + default = {}, + items = {type = "string"}, + description = "authorization response header that will be sent to" + .. "the client when authorizing failed" + }, + timeout = { + type = "integer", + minimum = 1, + maximum = 60000, + default = 3000, + description = "timeout in milliseconds", + }, + keepalive = {type = "boolean", default = true}, + keepalive_timeout = {type = "integer", minimum = 1000, default = 60000}, + keepalive_pool = {type = "integer", minimum = 1, default = 5}, + }, + required = {"uri"} +} + + +local _M = { + version = 0.1, + priority = 2002, + name = "forward-auth", + schema = schema, +} + + +function _M.check_schema(conf) + local check = {"uri"} + core.utils.check_https(check, conf, _M.name) + core.utils.check_tls_bool({"ssl_verify"}, conf, _M.name) + + return core.schema.check(schema, conf) +end + + +function _M.access(conf, ctx) + local auth_headers = { + ["X-Forwarded-Proto"] = core.request.get_scheme(ctx), + ["X-Forwarded-Method"] = core.request.get_method(), + ["X-Forwarded-Host"] = core.request.get_host(ctx), + ["X-Forwarded-Uri"] = ctx.var.request_uri, + ["X-Forwarded-For"] = core.request.get_remote_client_ip(ctx), + } + + if conf.request_method == "POST" then + auth_headers["Content-Length"] = core.request.header(ctx, "content-length") + auth_headers["Expect"] = core.request.header(ctx, "expect") + auth_headers["Transfer-Encoding"] = core.request.header(ctx, "transfer-encoding") + auth_headers["Content-Encoding"] = core.request.header(ctx, "content-encoding") + end + + -- append headers that need to be get from the client request header + if #conf.request_headers > 0 then + for _, header in ipairs(conf.request_headers) do + if not auth_headers[header] then + auth_headers[header] = core.request.header(ctx, header) + end + end + end + + local params = { + headers = auth_headers, + keepalive = conf.keepalive, + ssl_verify = conf.ssl_verify, + method = conf.request_method + } + + if params.method == "POST" then + params.body = core.request.get_body() + end + + if conf.keepalive then + params.keepalive_timeout = conf.keepalive_timeout + params.keepalive_pool = conf.keepalive_pool + end + + local httpc = http.new() + httpc:set_timeout(conf.timeout) + + local res, err = httpc:request_uri(conf.uri, params) + if not res and conf.allow_degradation then + return + elseif not res then + core.log.warn("failed to process forward auth, err: ", err) + return conf.status_on_error + end + + if res.status >= 300 then + local client_headers = {} + + if #conf.client_headers > 0 then + for _, header in ipairs(conf.client_headers) do + client_headers[header] = res.headers[header] + end + end + + core.response.set_header(client_headers) + return res.status, res.body + end + + -- append headers that need to be get from the auth response header + for _, header in ipairs(conf.upstream_headers) do + local header_value = res.headers[header] + if header_value then + core.request.set_header(ctx, header, header_value) + end + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/gm.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/gm.lua new file mode 100644 index 0000000..ee147ce --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/gm.lua @@ -0,0 +1,175 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + +-- local common libs +local require = require +local pcall = pcall +local ffi = require("ffi") +local C = ffi.C +local get_request = require("resty.core.base").get_request +local core = require("apisix.core") +local radixtree_sni = require("apisix.ssl.router.radixtree_sni") +local apisix_ssl = require("apisix.ssl") +local _, ssl = pcall(require, "resty.apisix.ssl") +local error = error + + +ffi.cdef[[ +unsigned long Tongsuo_version_num(void) +]] + + +-- local function +local function set_pem_ssl_key(sni, enc_cert, enc_pkey, sign_cert, sign_pkey) + local r = get_request() + if r == nil then + return false, "no request found" + end + + local parsed_enc_cert, err = apisix_ssl.fetch_cert(sni, enc_cert) + if not parsed_enc_cert then + return false, "failed to parse enc PEM cert: " .. err + end + + local parsed_sign_cert, err = apisix_ssl.fetch_cert(sni, sign_cert) + if not parsed_sign_cert then + return false, "failed to parse sign PEM cert: " .. err + end + + local ok, err = ssl.set_gm_cert(parsed_enc_cert, parsed_sign_cert) + if not ok then + return false, "failed to set PEM cert: " .. err + end + + local parsed_enc_pkey, err = apisix_ssl.fetch_pkey(sni, enc_pkey) + if not parsed_enc_pkey then + return false, "failed to parse enc PEM priv key: " .. err + end + + local parsed_sign_pkey, err = apisix_ssl.fetch_pkey(sni, sign_pkey) + if not parsed_sign_pkey then + return false, "failed to parse sign PEM priv key: " .. err + end + + ok, err = ssl.set_gm_priv_key(parsed_enc_pkey, parsed_sign_pkey) + if not ok then + return false, "failed to set PEM priv key: " .. err + end + + return true +end + + +local original_set_cert_and_key +local function set_cert_and_key(sni, value) + if value.gm then + -- process as GM certificate + -- For GM dual certificate, the `cert` and `key` will be encryption cert/key. + -- The first item in `certs` and `keys` will be sign cert/key. + local enc_cert = value.cert + local enc_pkey = value.key + local sign_cert = value.certs[1] + local sign_pkey = value.keys[1] + return set_pem_ssl_key(sni, enc_cert, enc_pkey, sign_cert, sign_pkey) + end + return original_set_cert_and_key(sni, value) +end + + +local original_check_ssl_conf +local function check_ssl_conf(in_dp, conf) + if conf.gm then + -- process as GM certificate + -- For GM dual certificate, the `cert` and `key` will be encryption cert/key. + -- The first item in `certs` and `keys` will be sign cert/key. + local ok, err = original_check_ssl_conf(in_dp, conf) + -- check cert/key first in the original method + if not ok then + return nil, err + end + + -- Currently, APISIX doesn't check the cert type (ECDSA / RSA). So we skip the + -- check for now in this plugin. + local num_certs = conf.certs and #conf.certs or 0 + local num_keys = conf.keys and #conf.keys or 0 + if num_certs ~= 1 or num_keys ~= 1 then + return nil, "sign cert/key are required" + end + return true + end + return original_check_ssl_conf(in_dp, conf) +end + + +-- module define +local plugin_name = "gm" + +-- plugin schema +local plugin_schema = { + type = "object", + properties = { + }, +} + +local _M = { + version = 0.1, -- plugin version + priority = -43, + name = plugin_name, -- plugin name + schema = plugin_schema, -- plugin schema +} + + +function _M.init() + if not pcall(function () return C.Tongsuo_version_num end) then + error("need to build Tongsuo (https://github.com/Tongsuo-Project/Tongsuo) " .. + "into the APISIX-Runtime") + end + + ssl.enable_ntls() + original_set_cert_and_key = radixtree_sni.set_cert_and_key + radixtree_sni.set_cert_and_key = set_cert_and_key + original_check_ssl_conf = apisix_ssl.check_ssl_conf + apisix_ssl.check_ssl_conf = check_ssl_conf + + if core.schema.ssl.properties.gm ~= nil then + error("Field 'gm' is occupied") + end + + -- inject a mark to distinguish GM certificate + core.schema.ssl.properties.gm = { + type = "boolean" + } +end + + +function _M.destroy() + ssl.disable_ntls() + radixtree_sni.set_cert_and_key = original_set_cert_and_key + apisix_ssl.check_ssl_conf = original_check_ssl_conf + core.schema.ssl.properties.gm = nil +end + +-- module interface for schema check +-- @param `conf` user defined conf data +-- @param `schema_type` defined in `apisix/core/schema.lua` +-- @return +function _M.check_schema(conf, schema_type) + return core.schema.check(plugin_schema, conf) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/google-cloud-logging.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/google-cloud-logging.lua new file mode 100644 index 0000000..62ca991 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/google-cloud-logging.lua @@ -0,0 +1,265 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local tostring = tostring +local http = require("resty.http") +local log_util = require("apisix.utils.log-util") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local google_oauth = require("apisix.utils.google-cloud-oauth") + + +local lrucache = core.lrucache.new({ + type = "plugin", +}) + +local plugin_name = "google-cloud-logging" +local batch_processor_manager = bp_manager_mod.new(plugin_name) +local schema = { + type = "object", + properties = { + auth_config = { + type = "object", + properties = { + client_email = { type = "string" }, + private_key = { type = "string" }, + project_id = { type = "string" }, + token_uri = { + type = "string", + default = "https://oauth2.googleapis.com/token" + }, + -- https://developers.google.com/identity/protocols/oauth2/scopes#logging + scope = { + type = "array", + items = { + description = "Google OAuth2 Authorization Scopes", + type = "string", + }, + minItems = 1, + uniqueItems = true, + default = { + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/cloud-platform" + } + }, + scopes = { + type = "array", + items = { + description = "Google OAuth2 Authorization Scopes", + type = "string", + }, + minItems = 1, + uniqueItems = true + }, + entries_uri = { + type = "string", + default = "https://logging.googleapis.com/v2/entries:write" + }, + }, + required = { "client_email", "private_key", "project_id", "token_uri" } + }, + ssl_verify = { + type = "boolean", + default = true + }, + auth_file = { type = "string" }, + -- https://cloud.google.com/logging/docs/reference/v2/rest/v2/MonitoredResource + resource = { + type = "object", + properties = { + type = { type = "string" }, + labels = { type = "object" } + }, + default = { + type = "global" + }, + required = { "type" } + }, + -- https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry + log_id = { + type = "string", + default = "apisix.apache.org%2Flogs" + }, + log_format = {type = "object"}, + }, + oneOf = { + { required = { "auth_config" } }, + { required = { "auth_file" } }, + }, + encrypt_fields = {"auth_config.private_key"}, +} + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + } + }, +} + + +local function send_to_google(oauth, entries) + local http_new = http.new() + local access_token = oauth:generate_access_token() + if not access_token then + return nil, "failed to get google oauth token" + end + + local res, err = http_new:request_uri(oauth.entries_uri, { + ssl_verify = oauth.ssl_verify, + method = "POST", + body = core.json.encode({ + entries = entries, + partialSuccess = false, + }), + headers = { + ["Content-Type"] = "application/json", + ["Authorization"] = (oauth.access_token_type or "Bearer") .. " " .. access_token, + }, + }) + + if not res then + return nil, "failed to write log to google, " .. err + end + + if res.status ~= 200 then + return nil, res.body + end + + return res.body +end + + +local function fetch_oauth_conf(conf) + if conf.auth_config then + return conf.auth_config + end + + if not conf.auth_file then + return nil, "configuration is not defined" + end + + local file_content, err = core.io.get_file(conf.auth_file) + if not file_content then + return nil, "failed to read configuration, file: " .. conf.auth_file .. " err: " .. err + end + + local config_tab + config_tab, err = core.json.decode(file_content) + if not config_tab then + return nil, "config parse failure, data: " .. file_content .. " , err: " .. err + end + + return config_tab +end + + +local function create_oauth_object(conf) + local auth_conf, err = fetch_oauth_conf(conf) + if not auth_conf then + return nil, err + end + + auth_conf.scope = auth_conf.scopes or auth_conf.scope + + return google_oauth.new(auth_conf, conf.ssl_verify) +end + + +local function get_logger_entry(conf, ctx, oauth) + local entry, customized = log_util.get_log_entry(plugin_name, conf, ctx) + local google_entry + if not customized then + google_entry = { + httpRequest = { + requestMethod = entry.request.method, + requestUrl = entry.request.url, + requestSize = entry.request.size, + status = entry.response.status, + responseSize = entry.response.size, + userAgent = entry.request.headers and entry.request.headers["user-agent"], + remoteIp = entry.client_ip, + serverIp = entry.upstream, + latency = tostring(core.string.format("%0.3f", entry.latency / 1000)) .. "s" + }, + jsonPayload = { + route_id = entry.route_id, + service_id = entry.service_id, + }, + } + else + google_entry = { + jsonPayload = entry, + } + end + + google_entry.labels = { + source = "apache-apisix-google-cloud-logging" + } + google_entry.timestamp = log_util.get_rfc3339_zulu_timestamp() + google_entry.resource = conf.resource + google_entry.insertId = ctx.var.request_id + google_entry.logName = core.string.format("projects/%s/logs/%s", oauth.project_id, conf.log_id) + + return google_entry +end + + +local _M = { + version = 0.1, + priority = 407, + name = plugin_name, + metadata_schema = metadata_schema, + schema = batch_processor_manager:wrap_schema(schema), +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + + return core.schema.check(schema, conf) +end + + +function _M.log(conf, ctx) + local oauth, err = core.lrucache.plugin_ctx(lrucache, ctx, nil, + create_oauth_object, conf) + if not oauth then + core.log.error("failed to fetch google-cloud-logging.oauth object: ", err) + return + end + + local entry = get_logger_entry(conf, ctx, oauth) + + if batch_processor_manager:add_entry(conf, entry) then + return + end + + local process = function(entries) + return send_to_google(oauth, entries) + end + + batch_processor_manager:add_entry_to_new_processor(conf, entry, ctx, process) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode.lua new file mode 100644 index 0000000..625018f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode.lua @@ -0,0 +1,211 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local ngx = ngx +local core = require("apisix.core") +local schema_def = require("apisix.schema_def") +local proto = require("apisix.plugins.grpc-transcode.proto") +local request = require("apisix.plugins.grpc-transcode.request") +local response = require("apisix.plugins.grpc-transcode.response") + + +local plugin_name = "grpc-transcode" + +local pb_option_def = { + { description = "enum as result", + type = "string", + enum = {"enum_as_name", "enum_as_value"}, + }, + { description = "int64 as result", + type = "string", + enum = {"int64_as_number", "int64_as_string", "int64_as_hexstring"}, + }, + { description ="default values option", + type = "string", + enum = {"auto_default_values", "no_default_values", + "use_default_values", "use_default_metatable"}, + }, + { description = "hooks option", + type = "string", + enum = {"enable_hooks", "disable_hooks" }, + }, +} + +local schema = { + type = "object", + properties = { + proto_id = schema_def.id_schema, + service = { + description = "the grpc service name", + type = "string" + }, + method = { + description = "the method name in the grpc service.", + type = "string" + }, + deadline = { + description = "deadline for grpc, millisecond", + type = "number", + default = 0 + }, + pb_option = { + type = "array", + items = { type="string", anyOf = pb_option_def }, + minItems = 1, + default = { + "enum_as_name", + "int64_as_number", + "auto_default_values", + "disable_hooks", + } + }, + show_status_in_body = { + description = "show decoded grpc-status-details-bin in response body", + type = "boolean", + default = false + }, + -- https://github.com/googleapis/googleapis/blob/b7cb84f5d42e6dba0fdcc2d8689313f6a8c9d7b9/ + -- google/rpc/status.proto#L46 + status_detail_type = { + description = "the message type of the grpc-status-details-bin's details part, " + .. "if not given, the details part will not be decoded", + type = "string", + }, + }, + additionalProperties = true, + required = { "proto_id", "service", "method" }, +} + +-- Based on https://cloud.google.com/apis/design/errors#handling_errors +local status_rel = { + ["1"] = 499, -- CANCELLED + ["2"] = 500, -- UNKNOWN + ["3"] = 400, -- INVALID_ARGUMENT + ["4"] = 504, -- DEADLINE_EXCEEDED + ["5"] = 404, -- NOT_FOUND + ["6"] = 409, -- ALREADY_EXISTS + ["7"] = 403, -- PERMISSION_DENIED + ["8"] = 429, -- RESOURCE_EXHAUSTED + ["9"] = 400, -- FAILED_PRECONDITION + ["10"] = 409, -- ABORTED + ["11"] = 400, -- OUT_OF_RANGE + ["12"] = 501, -- UNIMPLEMENTED + ["13"] = 500, -- INTERNAL + ["14"] = 503, -- UNAVAILABLE + ["15"] = 500, -- DATA_LOSS + ["16"] = 401, -- UNAUTHENTICATED +} + +local _M = { + version = 0.1, + priority = 506, + name = plugin_name, + schema = schema, +} + + +function _M.init() + proto.init() +end + + +function _M.destroy() + proto.destroy() +end + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + return true +end + + +function _M.access(conf, ctx) + core.log.info("conf: ", core.json.delay_encode(conf)) + + local proto_id = conf.proto_id + if not proto_id then + core.log.error("proto id miss: ", proto_id) + return + end + + local proto_obj, err = proto.fetch(proto_id) + if err then + core.log.error("proto load error: ", err) + return + end + + local ok, err, err_code = request(proto_obj, conf.service, + conf.method, conf.pb_option, conf.deadline) + if not ok then + core.log.error("transform request error: ", err) + return err_code + end + + ctx.proto_obj = proto_obj + +end + + +function _M.header_filter(conf, ctx) + if ngx.status >= 300 then + return + end + + ngx.header["Content-Type"] = "application/json" + ngx.header.content_length = nil + + local headers = ngx.resp.get_headers() + + if headers["grpc-status"] ~= nil and headers["grpc-status"] ~= "0" then + local http_status = status_rel[headers["grpc-status"]] + if http_status ~= nil then + ngx.status = http_status + else + ngx.status = 599 + end + else + -- The error response body does not contain grpc-status and grpc-message + ngx.header["Trailer"] = {"grpc-status", "grpc-message"} + end + +end + + +function _M.body_filter(conf, ctx) + if ngx.status >= 300 and not conf.show_status_in_body then + return + end + + local proto_obj = ctx.proto_obj + if not proto_obj then + return + end + + local err = response(ctx, proto_obj, conf.service, conf.method, conf.pb_option, + conf.show_status_in_body, conf.status_detail_type) + if err then + core.log.error("transform response error: ", err) + return + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/proto.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/proto.lua new file mode 100644 index 0000000..347ec39 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/proto.lua @@ -0,0 +1,279 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local config_util = require("apisix.core.config_util") +local pb = require("pb") +local protoc = require("protoc") +local pcall = pcall +local ipairs = ipairs +local decode_base64 = ngx.decode_base64 + + +local protos +local lrucache_proto = core.lrucache.new({ + ttl = 300, count = 100 +}) + +local proto_fake_file = "filename for loaded" + +local function compile_proto_text(content) + protoc.reload() + local _p = protoc.new() + -- the loaded proto won't appears in _p.loaded without a file name after lua-protobuf=0.3.2, + -- which means _p.loaded after _p:load(content) is always empty, so we can pass a fake file + -- name to keep the code below unchanged, or we can create our own load function with returning + -- the loaded DescriptorProto table additionally, see more details in + -- https://github.com/apache/apisix/pull/4368 + local ok, res = pcall(_p.load, _p, content, proto_fake_file) + if not ok then + return nil, res + end + + if not res or not _p.loaded then + return nil, "failed to load proto content" + end + + local compiled = _p.loaded + + local index = {} + for _, s in ipairs(compiled[proto_fake_file].service or {}) do + local method_index = {} + for _, m in ipairs(s.method) do + method_index[m.name] = m + end + + index[compiled[proto_fake_file].package .. '.' .. s.name] = method_index + end + + compiled[proto_fake_file].index = index + + return compiled +end + + +local function compile_proto_bin(content) + content = decode_base64(content) + if not content then + return nil + end + + -- pb.load doesn't return err + local ok = pb.load(content) + if not ok then + return nil + end + + local files = pb.decode("google.protobuf.FileDescriptorSet", content).file + local index = {} + for _, f in ipairs(files) do + for _, s in ipairs(f.service or {}) do + local method_index = {} + for _, m in ipairs(s.method) do + method_index[m.name] = m + end + + index[f.package .. '.' .. s.name] = method_index + end + end + + local compiled = {} + compiled[proto_fake_file] = {} + compiled[proto_fake_file].index = index + return compiled +end + + +local function compile_proto(content) + -- clear pb state + local old_pb_state = pb.state(nil) + + local compiled, err = compile_proto_text(content) + if not compiled then + compiled = compile_proto_bin(content) + if not compiled then + return nil, err + end + end + + -- fetch pb state + compiled.pb_state = pb.state(old_pb_state) + return compiled +end + + +local _M = { + version = 0.1, + compile_proto = compile_proto, + proto_fake_file = proto_fake_file +} + +local function create_proto_obj(proto_id) + if protos.values == nil then + return nil + end + + local content + for _, proto in config_util.iterate_values(protos.values) do + if proto_id == proto.value.id then + content = proto.value.content + break + end + end + + if not content then + return nil, "failed to find proto by id: " .. proto_id + end + + return compile_proto(content) +end + + +function _M.fetch(proto_id) + return lrucache_proto(proto_id, protos.conf_version, + create_proto_obj, proto_id) +end + + +function _M.protos() + if not protos then + return nil, nil + end + + return protos.values, protos.conf_version +end + + +local grpc_status_proto = [[ + syntax = "proto3"; + + package grpc_status; + + message Any { + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + // + string type_url = 1; + + // Must be a valid serialized protocol buffer of the above specified type. + bytes value = 2; + } + + // The `Status` type defines a logical error model that is suitable for + // different programming environments, including REST APIs and RPC APIs. It is + // used by [gRPC](https://github.com/grpc). Each `Status` message contains + // three pieces of data: error code, error message, and error details. + // + // You can find out more about this error model and how to work with it in the + // [API Design Guide](https://cloud.google.com/apis/design/errors). + message ErrorStatus { + // The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]. + int32 code = 1; + + // A developer-facing error message, which should be in English. Any + // user-facing error message should be localized and sent in the + // [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client. + string message = 2; + + // A list of messages that carry the error details. There is a common set of + // message types for APIs to use. + repeated Any details = 3; + } +]] + + +local status_pb_state +local function init_status_pb_state() + if not status_pb_state then + -- clear current pb state + local old_pb_state = pb.state(nil) + + -- initialize protoc compiler + protoc.reload() + local status_protoc = protoc.new() + -- do not use loadfile here, it can not load the proto file when using a relative address + -- after luarocks install apisix + local ok, err = status_protoc:load(grpc_status_proto, "grpc_status.proto") + if not ok then + status_protoc:reset() + pb.state(old_pb_state) + return "failed to load grpc status protocol: " .. err + end + + status_pb_state = pb.state(old_pb_state) + end +end + + +function _M.fetch_status_pb_state() + return status_pb_state +end + + +function _M.init() + local err + protos, err = core.config.new("/protos", { + automatic = true, + item_schema = core.schema.proto + }) + if not protos then + core.log.error("failed to create etcd instance for fetching protos: ", + err) + return + end + + if not status_pb_state then + err = init_status_pb_state() + if err then + core.log.error("failed to init grpc status proto: ", + err) + return + end + end +end + +function _M.destroy() + if protos then + protos:close() + end +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/request.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/request.lua new file mode 100644 index 0000000..934a1c9 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/request.lua @@ -0,0 +1,72 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local util = require("apisix.plugins.grpc-transcode.util") +local core = require("apisix.core") +local pb = require("pb") +local bit = require("bit") +local ngx = ngx +local string = string +local table = table +local pcall = pcall +local tonumber = tonumber +local req_read_body = ngx.req.read_body + +return function (proto, service, method, pb_option, deadline, default_values) + core.log.info("proto: ", core.json.delay_encode(proto, true)) + local m = util.find_method(proto, service, method) + if not m then + return false, "Undefined service method: " .. service .. "/" .. method + .. " end", 503 + end + + req_read_body() + + local pb_old_state = pb.state(proto.pb_state) + util.set_options(proto, pb_option) + + local map_message = util.map_message(m.input_type, default_values or {}) + local ok, encoded = pcall(pb.encode, m.input_type, map_message) + pb.state(pb_old_state) + + if not ok or not encoded then + return false, "failed to encode request data to protobuf", 400 + end + + local size = #encoded + local prefix = { + string.char(0), + string.char(bit.band(bit.rshift(size, 24), 0xFF)), + string.char(bit.band(bit.rshift(size, 16), 0xFF)), + string.char(bit.band(bit.rshift(size, 8), 0xFF)), + string.char(bit.band(size, 0xFF)) + } + + local message = table.concat(prefix, "") .. encoded + + ngx.req.set_method(ngx.HTTP_POST) + ngx.req.set_uri("/" .. service .. "/" .. method, false) + ngx.req.set_uri_args({}) + ngx.req.set_body_data(message) + + local dl = tonumber(deadline) + if dl~= nil and dl > 0 then + ngx.req.set_header("grpc-timeout", dl .. "m") + end + + return true +end diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/response.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/response.lua new file mode 100644 index 0000000..9dd6780 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/response.lua @@ -0,0 +1,144 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local util = require("apisix.plugins.grpc-transcode.util") +local grpc_proto = require("apisix.plugins.grpc-transcode.proto") +local core = require("apisix.core") +local pb = require("pb") +local ngx = ngx +local string = string +local ngx_decode_base64 = ngx.decode_base64 +local ipairs = ipairs +local pcall = pcall + + +local function handle_error_response(status_detail_type, proto) + local err_msg + + local grpc_status = ngx.header["grpc-status-details-bin"] + if grpc_status then + grpc_status = ngx_decode_base64(grpc_status) + if grpc_status == nil then + err_msg = "grpc-status-details-bin is not base64 format" + ngx.arg[1] = err_msg + return err_msg + end + + local status_pb_state = grpc_proto.fetch_status_pb_state() + local old_pb_state = pb.state(status_pb_state) + + local ok, decoded_grpc_status = pcall(pb.decode, "grpc_status.ErrorStatus", grpc_status) + pb.state(old_pb_state) + if not ok then + err_msg = "failed to call pb.decode to decode grpc-status-details-bin" + ngx.arg[1] = err_msg + return err_msg .. ", err: " .. decoded_grpc_status + end + + if not decoded_grpc_status then + err_msg = "failed to decode grpc-status-details-bin" + ngx.arg[1] = err_msg + return err_msg + end + + local details = decoded_grpc_status.details + if status_detail_type and details then + local decoded_details = {} + for _, detail in ipairs(details) do + local pb_old_state = pb.state(proto.pb_state) + local ok, err_or_value = pcall(pb.decode, status_detail_type, detail.value) + pb.state(pb_old_state) + if not ok then + err_msg = "failed to call pb.decode to decode details in " + .. "grpc-status-details-bin" + ngx.arg[1] = err_msg + return err_msg .. ", err: " .. err_or_value + end + + if not err_or_value then + err_msg = "failed to decode details in grpc-status-details-bin" + ngx.arg[1] = err_msg + return err_msg + end + + core.table.insert(decoded_details, err_or_value) + end + + decoded_grpc_status.details = decoded_details + end + + local resp_body = {error = decoded_grpc_status} + local response, err = core.json.encode(resp_body) + if not response then + err_msg = "failed to json_encode response body" + ngx.arg[1] = err_msg + return err_msg .. ", error: " .. err + end + + ngx.arg[1] = response + end +end + + +return function(ctx, proto, service, method, pb_option, show_status_in_body, status_detail_type) + local buffer = core.response.hold_body_chunk(ctx) + if not buffer then + return nil + end + + -- handle error response after the last response chunk + if ngx.status >= 300 and show_status_in_body then + return handle_error_response(status_detail_type, proto) + end + + -- when body has already been read by other plugin + -- the buffer is an empty string + if buffer == "" and ctx.resp_body then + buffer = ctx.resp_body + end + + local m = util.find_method(proto, service, method) + if not m then + return false, "2.Undefined service method: " .. service .. "/" .. method + .. " end." + end + + if not ngx.req.get_headers()["X-Grpc-Web"] then + buffer = string.sub(buffer, 6) + end + + local pb_old_state = pb.state(proto.pb_state) + util.set_options(proto, pb_option) + + local err_msg + local decoded = pb.decode(m.output_type, buffer) + pb.state(pb_old_state) + if not decoded then + err_msg = "failed to decode response data by protobuf" + ngx.arg[1] = err_msg + return err_msg + end + + local response, err = core.json.encode(decoded) + if not response then + err_msg = "failed to json_encode response body" + ngx.arg[1] = err_msg + return err_msg .. ", err: " .. err + end + + ngx.arg[1] = response + return nil +end diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/util.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/util.lua new file mode 100644 index 0000000..a95cb82 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-transcode/util.lua @@ -0,0 +1,202 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local proto_fake_file = require("apisix.plugins.grpc-transcode.proto").proto_fake_file +local json = core.json +local pb = require("pb") +local ngx = ngx +local string = string +local table = table +local ipairs = ipairs +local pairs = pairs +local tonumber = tonumber +local type = type + + +local _M = {version = 0.1} + + +function _M.find_method(proto, service, method) + local loaded = proto[proto_fake_file] + if type(loaded) ~= "table" then + core.log.error("compiled proto not found") + return nil + end + + if type(loaded.index[service]) ~= "table" then + core.log.error("compiled proto service not found") + return nil + end + + local res = loaded.index[service][method] + if not res then + core.log.error("compiled proto method not found") + return nil + end + + return res +end + + +function _M.set_options(proto, options) + local cur_opts = proto.options + if cur_opts then + if cur_opts == options then + -- same route + return + end + + local same = true + table.sort(options) + for i, v in ipairs(options) do + if cur_opts[i] ~= v then + same = false + break + end + end + + if same then + -- Routes have the same configuration, usually the default one. + -- As this is a small optimization, we don't care about routes have different + -- configuration but have the same effect eventually. + return + end + else + table.sort(options) + end + + for _, opt in ipairs(options) do + pb.option(opt) + end + + proto.options = options +end + + +local function get_request_table() + local method = ngx.req.get_method() + local content_type = ngx.req.get_headers()["Content-Type"] or "" + if string.find(content_type, "application/json", 1, true) and + (method == "POST" or method == "PUT" or method == "PATCH") + then + local req_body, _ = core.request.get_body() + if req_body then + local data, _ = json.decode(req_body) + if data then + return data + end + end + end + + if method == "POST" then + return ngx.req.get_post_args() + end + + return ngx.req.get_uri_args() +end + + +local function get_from_request(request_table, name, kind) + if not request_table then + return nil + end + + local prefix = kind:sub(1, 3) + if prefix == "int" then + if request_table[name] then + if kind == "int64" then + return request_table[name] + else + return tonumber(request_table[name]) + end + end + end + + return request_table[name] +end + + +function _M.map_message(field, default_values, request_table, real_key) + if not pb.type(field) then + return nil, "Field " .. field .. " is not defined" + end + + local request = {} + local sub, err + if not request_table then + request_table = get_request_table() + end + + for name, _, field_type in pb.fields(field) do + local _, _, ty = pb.type(field_type) + if ty ~= "enum" and field_type:sub(1, 1) == "." then + if request_table[name] == nil then + sub = default_values and default_values[name] + elseif core.table.isarray(request_table[name]) then + local sub_array = core.table.new(#request_table[name], 0) + for i, value in ipairs(request_table[name]) do + local sub_array_obj + if type(value) == "table" then + sub_array_obj, err = _M.map_message(field_type, + default_values and default_values[name], value) + if err then + return nil, err + end + else + sub_array_obj = value + end + sub_array[i] = sub_array_obj + end + sub = sub_array + else + if ty == "map" then + for k, v in pairs(request_table[name]) do + local tbl, err = _M.map_message(field_type, + default_values and default_values[name], + request_table[name], k) + if err then + return nil, err + end + if not sub then + sub = {} + end + sub[k] = tbl[k] + end + else + sub, err = _M.map_message(field_type, + default_values and default_values[name], + request_table[name]) + if err then + return nil, err + end + end + end + + request[name] = sub + else + if real_key then + name = real_key + end + request[name] = get_from_request(request_table, name, field_type) + or (default_values and default_values[name]) + end + end + return request +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-web.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-web.lua new file mode 100644 index 0000000..43a075c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/grpc-web.lua @@ -0,0 +1,228 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +local ngx = ngx +local ngx_arg = ngx.arg +local core = require("apisix.core") +local req_set_uri = ngx.req.set_uri +local req_set_body_data = ngx.req.set_body_data +local decode_base64 = ngx.decode_base64 +local encode_base64 = ngx.encode_base64 +local bit = require("bit") +local string = string + + +local ALLOW_METHOD_OPTIONS = "OPTIONS" +local ALLOW_METHOD_POST = "POST" +local CONTENT_ENCODING_BASE64 = "base64" +local CONTENT_ENCODING_BINARY = "binary" +local DEFAULT_CORS_ALLOW_ORIGIN = "*" +local DEFAULT_CORS_ALLOW_METHODS = ALLOW_METHOD_POST +local DEFAULT_CORS_ALLOW_HEADERS = "content-type,x-grpc-web,x-user-agent" +local DEFAULT_CORS_EXPOSE_HEADERS = "grpc-message,grpc-status" +local DEFAULT_PROXY_CONTENT_TYPE = "application/grpc" + + +local plugin_name = "grpc-web" + +local schema = { + type = "object", + properties = { + cors_allow_headers = { + description = + "multiple header use ',' to split. default: content-type,x-grpc-web,x-user-agent.", + type = "string", + default = DEFAULT_CORS_ALLOW_HEADERS + } + } +} + +local grpc_web_content_encoding = { + ["application/grpc-web"] = CONTENT_ENCODING_BINARY, + ["application/grpc-web-text"] = CONTENT_ENCODING_BASE64, + ["application/grpc-web+proto"] = CONTENT_ENCODING_BINARY, + ["application/grpc-web-text+proto"] = CONTENT_ENCODING_BASE64, +} + +local _M = { + version = 0.1, + priority = 505, + name = plugin_name, + schema = schema, +} + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + +local function exit(ctx, status) + ctx.grpc_web_skip_body_filter = true + return status +end + +--- Build gRPC-Web trailer chunk +-- grpc-web trailer format reference: +-- envoyproxy/envoy/source/extensions/filters/http/grpc_web/grpc_web_filter.cc +-- +-- Format for grpc-web trailer +-- 1 byte: 0x80 +-- 4 bytes: length of the trailer +-- n bytes: trailer +-- It using upstream_trailer_* variables from nginx, it is available since NGINX version 1.13.10 +-- https://nginx.org/en/docs/http/ngx_http_upstream_module.html#var_upstream_trailer_ +-- +-- @param grpc_status number grpc status code +-- @param grpc_message string grpc message +-- @return string grpc-web trailer chunk in raw string +local build_trailer = function (grpc_status, grpc_message) + local status_str = "grpc-status:" .. grpc_status + local status_msg = "grpc-message:" .. ( grpc_message or "") + local grpc_web_trailer = status_str .. "\r\n" .. status_msg .. "\r\n" + local len = #grpc_web_trailer + + -- 1 byte: 0x80 + local trailer_buf = string.char(0x80) + -- 4 bytes: length of the trailer + trailer_buf = trailer_buf .. string.char( + bit.band(bit.rshift(len, 24), 0xff), + bit.band(bit.rshift(len, 16), 0xff), + bit.band(bit.rshift(len, 8), 0xff), + bit.band(len, 0xff) + ) + -- n bytes: trailer + trailer_buf = trailer_buf .. grpc_web_trailer + + return trailer_buf +end + +function _M.access(conf, ctx) + -- set context variable mime + -- When processing non gRPC Web requests, `mime` can be obtained in the context + -- and set to the `Content-Type` of the response + ctx.grpc_web_mime = core.request.header(ctx, "Content-Type") + + local method = core.request.get_method() + if method == ALLOW_METHOD_OPTIONS then + return exit(ctx, 204) + end + + if method ~= ALLOW_METHOD_POST then + -- https://github.com/grpc/grpc-web/blob/master/doc/browser-features.md#cors-support + core.log.error("request method: `", method, "` invalid") + return exit(ctx, 405) + end + + local encoding = grpc_web_content_encoding[ctx.grpc_web_mime] + if not encoding then + core.log.error("request Content-Type: `", ctx.grpc_web_mime, "` invalid") + return exit(ctx, 400) + end + + -- set context variable encoding method + ctx.grpc_web_encoding = encoding + + -- set grpc path + if not (ctx.curr_req_matched and ctx.curr_req_matched[":ext"]) then + core.log.error("routing configuration error, grpc-web plugin only supports ", + "`prefix matching` pattern routing") + return exit(ctx, 400) + end + + local path = ctx.curr_req_matched[":ext"] + if path:byte(1) ~= core.string.byte("/") then + path = "/" .. path + end + + req_set_uri(path) + + -- set grpc body + local body, err = core.request.get_body() + if err or not body then + core.log.error("failed to read request body, err: ", err) + return exit(ctx, 400) + end + + if encoding == CONTENT_ENCODING_BASE64 then + body = decode_base64(body) + if not body then + core.log.error("failed to decode request body") + return exit(ctx, 400) + end + end + + -- set grpc content-type + core.request.set_header(ctx, "Content-Type", DEFAULT_PROXY_CONTENT_TYPE) + -- set grpc body + req_set_body_data(body) +end + +function _M.header_filter(conf, ctx) + local method = core.request.get_method() + if method == ALLOW_METHOD_OPTIONS then + core.response.set_header("Access-Control-Allow-Methods", DEFAULT_CORS_ALLOW_METHODS) + core.response.set_header("Access-Control-Allow-Headers", conf.cors_allow_headers) + end + + if not ctx.cors_allow_origins then + core.response.set_header("Access-Control-Allow-Origin", DEFAULT_CORS_ALLOW_ORIGIN) + end + core.response.set_header("Access-Control-Expose-Headers", DEFAULT_CORS_EXPOSE_HEADERS) + + if not ctx.grpc_web_skip_body_filter then + core.response.set_header("Content-Type", ctx.grpc_web_mime) + core.response.set_header("Content-Length", nil) + end +end + +function _M.body_filter(conf, ctx) + if ctx.grpc_web_skip_body_filter then + return + end + + -- If the MIME extension type description of the gRPC-Web standard is not obtained, + -- indicating that the request is not based on the gRPC Web specification, + -- the processing of the request body will be ignored + -- https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-WEB.md + -- https://github.com/grpc/grpc-web/blob/master/doc/browser-features.md#cors-support + if not ctx.grpc_web_mime then + return + end + + if ctx.grpc_web_encoding == CONTENT_ENCODING_BASE64 then + local chunk = ngx_arg[1] + chunk = encode_base64(chunk) + ngx_arg[1] = chunk + end + + if ngx_arg[2] then -- if eof + local status = ctx.var.upstream_trailer_grpc_status + local message = ctx.var.upstream_trailer_grpc_message + + -- When the response body completes and still does not receive the grpc status + local resp_ok = status ~= nil and status ~= "" + local trailer_buf = build_trailer( + resp_ok and status or 2, + resp_ok and message or "upstream grpc status not received" + ) + if ctx.grpc_web_encoding == CONTENT_ENCODING_BASE64 then + trailer_buf = encode_base64(trailer_buf) + end + + ngx_arg[1] = ngx_arg[1] .. trailer_buf + end +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/gzip.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/gzip.lua new file mode 100644 index 0000000..dfd0f10 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/gzip.lua @@ -0,0 +1,170 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local is_apisix_or, response = pcall(require, "resty.apisix.response") +local ngx_header = ngx.header +local req_http_version = ngx.req.http_version +local str_sub = string.sub +local ipairs = ipairs +local tonumber = tonumber +local type = type + + +local schema = { + type = "object", + properties = { + types = { + anyOf = { + { + type = "array", + minItems = 1, + items = { + type = "string", + minLength = 1, + }, + }, + { + enum = {"*"} + } + }, + default = {"text/html"} + }, + min_length = { + type = "integer", + minimum = 1, + default = 20, + }, + comp_level = { + type = "integer", + minimum = 1, + maximum = 9, + default = 1, + }, + http_version = { + enum = {1.1, 1.0}, + default = 1.1, + }, + buffers = { + type = "object", + properties = { + number = { + type = "integer", + minimum = 1, + default = 32, + }, + size = { + type = "integer", + minimum = 1, + default = 4096, + } + }, + default = { + number = 32, + size = 4096, + } + }, + vary = { + type = "boolean", + } + }, +} + + +local plugin_name = "gzip" + + +local _M = { + version = 0.1, + priority = 995, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +function _M.header_filter(conf, ctx) + if not is_apisix_or then + core.log.error("need to build APISIX-Runtime to support setting gzip") + return 501 + end + + local types = conf.types + local content_type = ngx_header["Content-Type"] + if not content_type then + -- Like Nginx, don't gzip if Content-Type is missing + return + end + + if type(types) == "table" then + local matched = false + local from = core.string.find(content_type, ";") + if from then + content_type = str_sub(content_type, 1, from - 1) + end + + for _, ty in ipairs(types) do + if content_type == ty then + matched = true + break + end + end + + if not matched then + return + end + end + + local content_length = tonumber(ngx_header["Content-Length"]) + if content_length then + local min_length = conf.min_length + if content_length < min_length then + return + end + -- Like Nginx, don't check min_length if Content-Length is missing + end + + local http_version = req_http_version() + if http_version < conf.http_version then + return + end + + local buffers = conf.buffers + + core.log.info("set gzip with buffers: ", buffers.number, " ", buffers.size, + ", level: ", conf.comp_level) + + local ok, err = response.set_gzip({ + buffer_num = buffers.number, + buffer_size = buffers.size, + compress_level = conf.comp_level, + }) + if not ok then + core.log.error("failed to set gzip: ", err) + return + end + + if conf.vary then + core.response.add_header("Vary", "Accept-Encoding") + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/hmac-auth.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/hmac-auth.lua new file mode 100644 index 0000000..30e8db0 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/hmac-auth.lua @@ -0,0 +1,372 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local ngx = ngx +local abs = math.abs +local ngx_time = ngx.time +local ngx_re = require("ngx.re") +local ipairs = ipairs +local hmac_sha1 = ngx.hmac_sha1 +local core = require("apisix.core") +local hmac = require("resty.hmac") +local consumer = require("apisix.consumer") +local ngx_decode_base64 = ngx.decode_base64 +local ngx_encode_base64 = ngx.encode_base64 +local plugin_name = "hmac-auth" +local ALLOWED_ALGORITHMS = {"hmac-sha1", "hmac-sha256", "hmac-sha512"} +local resty_sha256 = require("resty.sha256") +local schema_def = require("apisix.schema_def") +local auth_utils = require("apisix.utils.auth") + +local schema = { + type = "object", + title = "work with route or service object", + properties = { + allowed_algorithms = { + type = "array", + minItems = 1, + items = { + type = "string", + enum = ALLOWED_ALGORITHMS + }, + default = ALLOWED_ALGORITHMS, + }, + clock_skew = { + type = "integer", + default = 300, + minimum = 1 + }, + signed_headers = { + type = "array", + items = { + type = "string", + minLength = 1, + maxLength = 50, + } + }, + validate_request_body = { + type = "boolean", + title = "A boolean value telling the plugin to enable body validation", + default = false, + }, + hide_credentials = {type = "boolean", default = false}, + anonymous_consumer = schema_def.anonymous_consumer_schema, + }, +} + +local consumer_schema = { + type = "object", + title = "work with consumer object", + properties = { + key_id = {type = "string", minLength = 1, maxLength = 256}, + secret_key = {type = "string", minLength = 1, maxLength = 256}, + }, + encrypt_fields = {"secret_key"}, + required = {"key_id", "secret_key"}, +} + +local _M = { + version = 0.1, + priority = 2530, + type = 'auth', + name = plugin_name, + schema = schema, + consumer_schema = consumer_schema +} + +local hmac_funcs = { + ["hmac-sha1"] = function(secret_key, message) + return hmac_sha1(secret_key, message) + end, + ["hmac-sha256"] = function(secret_key, message) + return hmac:new(secret_key, hmac.ALGOS.SHA256):final(message) + end, + ["hmac-sha512"] = function(secret_key, message) + return hmac:new(secret_key, hmac.ALGOS.SHA512):final(message) + end, +} + + +local function array_to_map(arr) + local map = core.table.new(0, #arr) + for _, v in ipairs(arr) do + map[v] = true + end + + return map +end + + +function _M.check_schema(conf, schema_type) + core.log.info("input conf: ", core.json.delay_encode(conf)) + + if schema_type == core.schema.TYPE_CONSUMER then + return core.schema.check(consumer_schema, conf) + else + return core.schema.check(schema, conf) + end +end + + +local function get_consumer(key_id) + if not key_id then + return nil, "missing key_id" + end + + local cur_consumer, _, err = consumer.find_consumer(plugin_name, "key_id", key_id) + if not cur_consumer then + return nil, err or "Invalid key_id" + end + core.log.info("consumer: ", core.json.delay_encode(consumer, true)) + + return cur_consumer +end + + +local function generate_signature(ctx, secret_key, params) + local uri = ctx.var.request_uri + local request_method = core.request.get_method() + + if uri == "" then + uri = "/" + end + + local signing_string_items = { + params.keyId, + } + + if params.headers then + for _, h in ipairs(params.headers) do + local canonical_header = core.request.header(ctx, h) + if not canonical_header then + if h == "@request-target" then + local request_target = request_method .. " " .. uri + core.table.insert(signing_string_items, request_target) + core.log.info("canonical_header name:", core.json.delay_encode(h)) + core.log.info("canonical_header value: ", + core.json.delay_encode(request_target)) + end + else + core.table.insert(signing_string_items, + h .. ": " .. canonical_header) + core.log.info("canonical_header name:", core.json.delay_encode(h)) + core.log.info("canonical_header value: ", + core.json.delay_encode(canonical_header)) + end + end + end + + local signing_string = core.table.concat(signing_string_items, "\n") .. "\n" + return hmac_funcs[params.algorithm](secret_key, signing_string) +end + + +local function sha256(key) + local hash = resty_sha256:new() + hash:update(key) + local digest = hash:final() + return digest +end + + +local function validate(ctx, conf, params) + if not params then + return nil + end + + if not params.keyId or not params.signature then + return nil, "keyId or signature missing" + end + + if not params.algorithm then + return nil, "algorithm missing" + end + + local consumer, err = get_consumer(params.keyId) + if err then + return nil, err + end + + local consumer_conf = consumer.auth_conf + local found_algorithm = false + -- check supported algorithm used + if not conf.allowed_algorithms then + conf.allowed_algorithms = ALLOWED_ALGORITHMS + end + + for _, algo in ipairs(conf.allowed_algorithms) do + if algo == params.algorithm then + found_algorithm = true + break + end + end + + if not found_algorithm then + return nil, "Invalid algorithm" + end + + core.log.info("clock_skew: ", conf.clock_skew) + if conf.clock_skew and conf.clock_skew > 0 then + if not params.date then + return nil, "Date header missing. failed to validate clock skew" + end + + local time = ngx.parse_http_time(params.date) + core.log.info("params.date: ", params.date, " time: ", time) + if not time then + return nil, "Invalid GMT format time" + end + + local diff = abs(ngx_time() - time) + + if diff > conf.clock_skew then + return nil, "Clock skew exceeded" + end + end + + -- validate headers + -- All headers passed in route conf.signed_headers must be used in signing(params.headers) + if conf.signed_headers and #conf.signed_headers >= 1 then + if not params.headers then + return nil, "headers missing" + end + local params_headers_map = array_to_map(params.headers) + if params_headers_map then + for _, header in ipairs(conf.signed_headers) do + if not params_headers_map[header] then + return nil, [[expected header "]] .. header .. [[" missing in signing]] + end + end + end + end + + local secret_key = consumer_conf and consumer_conf.secret_key + local request_signature = ngx_decode_base64(params.signature) + local generated_signature = generate_signature(ctx, secret_key, params) + if request_signature ~= generated_signature then + return nil, "Invalid signature" + end + + local validate_request_body = conf.validate_request_body + if validate_request_body then + local digest_header = params.body_digest + if not digest_header then + return nil, "Invalid digest" + end + + local req_body, err = core.request.get_body() + if err then + return nil, err + end + + req_body = req_body or "" + local digest_created = "SHA-256" .. "=" .. + ngx_encode_base64(sha256(req_body)) + if digest_created ~= digest_header then + return nil, "Invalid digest" + end + end + + return consumer +end + + +local function retrieve_hmac_fields(ctx) + local hmac_params = {} + local auth_string = core.request.header(ctx, "Authorization") + if not auth_string then + return nil, "missing Authorization header" + end + + if not core.string.has_prefix(auth_string, "Signature") then + return nil, "Authorization header does not start with 'Signature'" + end + + local signature_fields = auth_string:sub(10):gmatch('[^,]+') + + for field in signature_fields do + local key, value = field:match('%s*(%w+)="(.-)"') + if key and value then + if key == "keyId" or key == "algorithm" or key == "signature" then + hmac_params[key] = value + + elseif key == "headers" then + hmac_params.headers = ngx_re.split(value, " ") + end + end + end + + -- will be required to check clock skew + if core.request.header(ctx, "Date") then + hmac_params.date = core.request.header(ctx, "Date") + end + + if core.request.header(ctx, "Digest") then + hmac_params.body_digest = core.request.header(ctx, "Digest") + end + + return hmac_params +end + +local function find_consumer(conf, ctx) + local params,err = retrieve_hmac_fields(ctx) + if err then + if not auth_utils.is_running_under_multi_auth(ctx) then + core.log.warn("client request can't be validated: ", err) + end + return nil, nil, "client request can't be validated: " .. err + end + + local validated_consumer, err = validate(ctx, conf, params) + if not validated_consumer then + err = "client request can't be validated: " .. (err or "Invalid signature") + if auth_utils.is_running_under_multi_auth(ctx) then + return nil, nil, err + end + core.log.warn(err) + return nil, nil, "client request can't be validated" + end + + local consumers_conf = consumer.consumers_conf(plugin_name) + return validated_consumer, consumers_conf, err +end + + +function _M.rewrite(conf, ctx) + local cur_consumer, consumers_conf, err = find_consumer(conf, ctx) + if not cur_consumer then + if not conf.anonymous_consumer then + return 401, { message = err } + end + cur_consumer, consumers_conf, err = consumer.get_anonymous_consumer(conf.anonymous_consumer) + if not cur_consumer then + if auth_utils.is_running_under_multi_auth(ctx) then + return 401, err + end + core.log.error(err) + return 401, { message = "Invalid user authorization" } + end + end + + if conf.hide_credentials then + core.request.set_header("Authorization", nil) + end + + consumer.attach_consumer(ctx, cur_consumer, consumers_conf) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/http-dubbo.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/http-dubbo.lua new file mode 100644 index 0000000..f068654 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/http-dubbo.lua @@ -0,0 +1,262 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local require = require +local core = require("apisix.core") +local pairs = pairs +local str_format = string.format +local bit = require("bit") +local rshift = bit.rshift +local band = bit.band +local char = string.char +local tostring = tostring +local ngx = ngx +local type = type +local plugin_name = "http-dubbo" + + +local schema = { + type = "object", + properties = { + service_name = { + type = "string", + minLength = 1, + }, + service_version = { + type = "string", + pattern = [[^\d+\.\d+\.\d+]], + default ="0.0.0" + }, + method = { + type = "string", + minLength = 1, + }, + params_type_desc = { + type = "string", + default = "" + }, + serialization_header_key = { + type = "string" + }, + serialized = { + type = "boolean", + default = false + }, + connect_timeout={ + type = "number", + default = 6000 + }, + read_timeout={ + type = "number", + default = 6000 + }, + send_timeout={ + type = "number", + default = 6000 + } + }, + required = { "service_name", "method" }, +} + +local _M = { + version = 0.1, + priority = 504, + name = plugin_name, + schema = schema, +} + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +local function str_int32(int) + return char(band(rshift(int, 24), 0xff), + band(rshift(int, 16), 0xff), + band(rshift(int, 8), 0xff), + band(int, 0xff)) +end + + +local function parse_dubbo_header(header) + for i = 1, 16 do + local currentByte = header:byte(i) + if not currentByte then + return nil + end + end + + local magic_number = str_format("%04x", header:byte(1) * 256 + header:byte(2)) + local message_flag = header:byte(3) + local status = header:byte(4) + local request_id = 0 + for i = 5, 12 do + request_id = request_id * 256 + header:byte(i) + end + + local byte13Val = header:byte(13) * 256 * 256 * 256 + local byte14Val = header:byte(14) * 256 * 256 + local data_length = byte13Val + byte14Val + header:byte(15) * 256 + header:byte(16) + + local is_request = bit.band(bit.rshift(message_flag, 7), 0x01) == 1 and 1 or 0 + local is_two_way = bit.band(bit.rshift(message_flag, 6), 0x01) == 1 and 1 or 0 + local is_event = bit.band(bit.rshift(message_flag, 5), 0x01) == 1 and 1 or 0 + + return { + magic_number = magic_number, + message_flag = message_flag, + is_request = is_request, + is_two_way = is_two_way, + is_event = is_event, + status = status, + request_id = request_id, + data_length = data_length + } +end + + +local function string_to_json_string(str) + local result = "\"" + for i = 1, #str do + local byte = core.string.sub(str, i, i) + if byte == "\\" then + result = result .. "\\\\" + elseif byte == "\n" then + result = result .. "\\n" + elseif byte == "\t" then + result = result .. "\\t" + elseif byte == "\r" then + result = result .. "\\r" + elseif byte == "\b" then + result = result .. "\\b" + elseif byte == "\f" then + result = result .. "\\f" + elseif byte == "\"" then + result = result .. "\\\"" + else + result = result .. byte + end + end + return result .. "\"" +end + + +local function get_dubbo_request(conf, ctx) + -- use dubbo and fastjson + local first_byte4 = "\xda\xbb\xc6\x00" + + local requestId = "\x00\x00\x00\x00\x00\x00\x00\x01" + local version = "\"2.0.2\"\n" + local service = "\"" .. conf.service_name .. "\"" .. "\n" + + local service_version = "\"" .. conf.service_version .. "\"" .. "\n" + local method_name = "\"" .. conf.method .. "\"" .. "\n" + + local params_desc = "\"" .. conf.params_type_desc .. "\"" .. "\n" + local params = "" + local serialized = conf.serialized + if conf.serialization_header_key then + local serialization_header = core.request.header(ctx, conf.serialization_header_key) + serialized = serialization_header == "true" + end + if serialized then + params = core.request.get_body() + if params then + local end_of_params = core.string.sub(params, -1) + if end_of_params ~= "\n" then + params = params .. "\n" + end + end + else + local body_data = core.request.get_body() + if body_data then + local lua_object = core.json.decode(body_data); + for _, v in pairs(lua_object) do + local pt = type(v) + if pt == "nil" then + params = params .. "null" .. "\n" + elseif pt == "string" then + params = params .. string_to_json_string(v) .. "\n" + elseif pt == "number" then + params = params .. tostring(v) .. "\n" + else + params = params .. core.json.encode(v) .. "\n" + end + end + end + + end + local attachments = "{}\n" + if params == nil then + params = "" + end + local payload = #version + #service + #service_version + + #method_name + #params_desc + #params + #attachments + return { + first_byte4, + requestId, + str_int32(payload), + version, + service, + service_version, + method_name, + params_desc, + params, + attachments + } +end + + +function _M.before_proxy(conf, ctx) + local sock = ngx.socket.tcp() + + sock:settimeouts(conf.connect_timeout, conf.send_timeout, conf.read_timeout) + local ok, err = sock:connect(ctx.picked_server.host, ctx.picked_server.port) + if not ok then + sock:close() + core.log.error("failed to connect to upstream ", err) + return 502 + end + local request = get_dubbo_request(conf, ctx) + local bytes, _ = sock:send(request) + if bytes > 0 then + local header, _ = sock:receiveany(16); + if header then + local header_info = parse_dubbo_header(header) + if header_info and header_info.status == 20 then + local readline = sock:receiveuntil("\n") + local body_status, _, _ = readline() + if body_status then + local response_status = core.string.sub(body_status, 1, 1) + if response_status == "2" or response_status == "5" then + sock:close() + return 200 + elseif response_status == "1" or response_status == "4" then + local body, _, _ = readline() + sock:close() + return 200, body + end + end + end + end + end + sock:close() + return 500 + +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/http-logger.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/http-logger.lua new file mode 100644 index 0000000..44f84ac --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/http-logger.lua @@ -0,0 +1,223 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local log_util = require("apisix.utils.log-util") +local core = require("apisix.core") +local http = require("resty.http") +local url = require("net.url") + +local tostring = tostring +local ipairs = ipairs + +local plugin_name = "http-logger" +local batch_processor_manager = bp_manager_mod.new("http logger") + +local schema = { + type = "object", + properties = { + uri = core.schema.uri_def, + auth_header = {type = "string"}, + timeout = {type = "integer", minimum = 1, default = 3}, + log_format = {type = "object"}, + include_req_body = {type = "boolean", default = false}, + include_req_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + include_resp_body = {type = "boolean", default = false}, + include_resp_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + concat_method = {type = "string", default = "json", + enum = {"json", "new_line"}}, + ssl_verify = {type = "boolean", default = false}, + }, + required = {"uri"} +} + + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + } + }, +} + + +local _M = { + version = 0.1, + priority = 410, + name = plugin_name, + schema = batch_processor_manager:wrap_schema(schema), + metadata_schema = metadata_schema, +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + + local check = {"uri"} + core.utils.check_https(check, conf, plugin_name) + core.utils.check_tls_bool({"ssl_verify"}, conf, plugin_name) + + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, err + end + return log_util.check_log_schema(conf) +end + + +local function send_http_data(conf, log_message) + local err_msg + local res = true + local url_decoded = url.parse(conf.uri) + local host = url_decoded.host + local port = url_decoded.port + + core.log.info("sending a batch logs to ", conf.uri) + + if ((not port) and url_decoded.scheme == "https") then + port = 443 + elseif not port then + port = 80 + end + + local httpc = http.new() + httpc:set_timeout(conf.timeout * 1000) + local ok, err = httpc:connect(host, port) + + if not ok then + return false, "failed to connect to host[" .. host .. "] port[" + .. tostring(port) .. "] " .. err + end + + if url_decoded.scheme == "https" then + ok, err = httpc:ssl_handshake(true, host, conf.ssl_verify) + if not ok then + return false, "failed to perform SSL with host[" .. host .. "] " + .. "port[" .. tostring(port) .. "] " .. err + end + end + + local content_type + if conf.concat_method == "json" then + content_type = "application/json" + else + content_type = "text/plain" + end + + local httpc_res, httpc_err = httpc:request({ + method = "POST", + path = #url_decoded.path ~= 0 and url_decoded.path or "/", + query = url_decoded.query, + body = log_message, + headers = { + ["Host"] = url_decoded.host, + ["Content-Type"] = content_type, + ["Authorization"] = conf.auth_header + } + }) + + if not httpc_res then + return false, "error while sending data to [" .. host .. "] port[" + .. tostring(port) .. "] " .. httpc_err + end + + -- some error occurred in the server + if httpc_res.status >= 400 then + res = false + err_msg = "server returned status code[" .. httpc_res.status .. "] host[" + .. host .. "] port[" .. tostring(port) .. "] " + .. "body[" .. httpc_res:read_body() .. "]" + end + + return res, err_msg +end + + +function _M.body_filter(conf, ctx) + log_util.collect_body(conf, ctx) +end + + +function _M.log(conf, ctx) + local entry = log_util.get_log_entry(plugin_name, conf, ctx) + + if not entry.route_id then + entry.route_id = "no-matched" + end + + if batch_processor_manager:add_entry(conf, entry) then + return + end + + -- Generate a function to be executed by the batch processor + local func = function(entries, batch_max_size) + local data, err + + if conf.concat_method == "json" then + if batch_max_size == 1 then + data, err = core.json.encode(entries[1]) -- encode as single {} + else + data, err = core.json.encode(entries) -- encode as array [{}] + end + + elseif conf.concat_method == "new_line" then + if batch_max_size == 1 then + data, err = core.json.encode(entries[1]) -- encode as single {} + else + local t = core.table.new(#entries, 0) + for i, entry in ipairs(entries) do + t[i], err = core.json.encode(entry) + if err then + core.log.warn("failed to encode http log: ", err, ", log data: ", entry) + break + end + end + data = core.table.concat(t, "\n") -- encode as multiple string + end + + else + -- defensive programming check + err = "unknown concat_method " .. (conf.concat_method or "nil") + end + + if not data then + return false, 'error occurred while encoding the data: ' .. err + end + + return send_http_data(conf, data) + end + + batch_processor_manager:add_entry_to_new_processor(conf, entry, ctx, func) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/inspect.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/inspect.lua new file mode 100644 index 0000000..19f50c7 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/inspect.lua @@ -0,0 +1,61 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local plugin = require("apisix.plugin") +local inspect = require("apisix.inspect") + + +local plugin_name = "inspect" + + +local schema = { + type = "object", + properties = {}, +} + + +local _M = { + version = 0.1, + priority = 200, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf, schema_type) + return core.schema.check(schema, conf) +end + + +function _M.init() + local attr = plugin.plugin_attr(plugin_name) + local delay + local hooks_file + if attr then + delay = attr.delay + hooks_file = attr.hooks_file + end + core.log.info("delay=", delay, ", hooks_file=", hooks_file) + return inspect.init(delay, hooks_file) +end + + +function _M.destroy() + return inspect.destroy() +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ip-restriction.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ip-restriction.lua new file mode 100644 index 0000000..b499f2d --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ip-restriction.lua @@ -0,0 +1,26 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local base = require("apisix.plugins.ip-restriction.init") + + +-- avoid unexpected data sharing +local ip_restriction = core.table.clone(base) +ip_restriction.access = base.restrict + + +return ip_restriction diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ip-restriction/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ip-restriction/init.lua new file mode 100644 index 0000000..1800024 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ip-restriction/init.lua @@ -0,0 +1,122 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local ipairs = ipairs +local core = require("apisix.core") +local lrucache = core.lrucache.new({ + ttl = 300, count = 512 +}) + + +local schema = { + type = "object", + properties = { + message = { + type = "string", + minLength = 1, + maxLength = 1024, + default = "Your IP address is not allowed" + }, + response_code = { + type = "integer", + minimum = 403, + maximum = 404, + default = 403 + }, + whitelist = { + type = "array", + items = {anyOf = core.schema.ip_def}, + minItems = 1 + }, + blacklist = { + type = "array", + items = {anyOf = core.schema.ip_def}, + minItems = 1 + }, + }, + oneOf = { + {required = {"whitelist"}}, + {required = {"blacklist"}}, + }, +} + + +local plugin_name = "ip-restriction" + + +local _M = { + version = 0.1, + priority = 3000, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + + if not ok then + return false, err + end + + -- we still need this as it is too complex to filter out all invalid IPv6 via regex + if conf.whitelist then + for _, cidr in ipairs(conf.whitelist) do + if not core.ip.validate_cidr_or_ip(cidr) then + return false, "invalid ip address: " .. cidr + end + end + end + + if conf.blacklist then + for _, cidr in ipairs(conf.blacklist) do + if not core.ip.validate_cidr_or_ip(cidr) then + return false, "invalid ip address: " .. cidr + end + end + end + + return true +end + + +function _M.restrict(conf, ctx) + local block = false + local remote_addr = ctx.var.remote_addr + + if conf.blacklist then + local matcher = lrucache(conf.blacklist, nil, + core.ip.create_ip_matcher, conf.blacklist) + if matcher then + block = matcher:match(remote_addr) + end + end + + if conf.whitelist then + local matcher = lrucache(conf.whitelist, nil, + core.ip.create_ip_matcher, conf.whitelist) + if matcher then + block = not matcher:match(remote_addr) + end + end + + if block then + return conf.response_code, { message = conf.message } + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/jwe-decrypt.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/jwe-decrypt.lua new file mode 100644 index 0000000..b0d1e16 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/jwe-decrypt.lua @@ -0,0 +1,279 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local consumer_mod = require("apisix.consumer") +local base64 = require("ngx.base64") +local aes = require("resty.aes") +local ngx = ngx +local sub_str = string.sub +local cipher = aes.cipher(256, "gcm") + +local plugin_name = "jwe-decrypt" + +local schema = { + type = "object", + properties = { + header = { + type = "string", + default = "Authorization" + }, + forward_header = { + type = "string", + default = "Authorization" + }, + strict = { + type = "boolean", + default = true + } + }, + required = { "header", "forward_header" }, +} + +local consumer_schema = { + type = "object", + properties = { + key = { type = "string" }, + secret = { type = "string" }, + is_base64_encoded = { type = "boolean" }, + }, + required = { "key", "secret" }, + encrypt_fields = { "key", "secret" }, +} + + +local _M = { + version = 0.1, + priority = 2509, + type = 'auth', + name = plugin_name, + schema = schema, + consumer_schema = consumer_schema +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_CONSUMER then + local ok, err = core.schema.check(consumer_schema, conf) + if not ok then + return false, err + end + + local local_conf, err = core.config.local_conf(true) + if not local_conf then + return false, "failed to load the configuration file: " .. err + end + + local encrypted = core.table.try_read_attr(local_conf, "apisix", "data_encryption", + "enable_encrypt_fields") and (core.config.type == "etcd") + + -- if encrypted, the secret length will exceed 32 so don't check + if not encrypted then + -- restrict the length of secret, we use A256GCM for encryption, + -- so the length should be 32 chars only + if conf.is_base64_encoded then + if #base64.decode_base64url(conf.secret) ~= 32 then + return false, "the secret length after base64 decode should be 32 chars" + end + else + if #conf.secret ~= 32 then + return false, "the secret length should be 32 chars" + end + end + end + + return true + end + return core.schema.check(schema, conf) +end + + +local function get_secret(conf) + local secret = conf.secret + + if conf.is_base64_encoded then + return base64.decode_base64url(secret) + end + + return secret +end + + +local function load_jwe_token(jwe_token) + local o = { valid = false } + o.header, o.enckey, o.iv, o.ciphertext, o.tag = jwe_token:match("(.-)%.(.-)%.(.-)%.(.-)%.(.*)") + if not o.header then + return o + end + local he = base64.decode_base64url(o.header) + if not he then + return o + end + o.header_obj = core.json.decode(he) + if not o.header_obj then + return o + end + o.valid = true + return o +end + + +local function jwe_decrypt_with_obj(o, consumer) + local secret = get_secret(consumer.auth_conf) + local dec = base64.decode_base64url + + local aes_default = aes:new( + secret, + nil, + cipher, + {iv = dec(o.iv)} + ) + + local decrypted = aes_default:decrypt(dec(o.ciphertext), dec(o.tag)) + return decrypted +end + + +local function jwe_encrypt(o, consumer) + local secret = get_secret(consumer.auth_conf) + local enc = base64.encode_base64url + + local aes_default = aes:new( + secret, + nil, + cipher, + {iv = o.iv}) + + local encrypted = aes_default:encrypt(o.plaintext) + + o.ciphertext = encrypted[1] + o.tag = encrypted[2] + return o.header .. ".." .. enc(o.iv) .. "." .. enc(o.ciphertext) .. "." .. enc(o.tag) +end + + +local function get_consumer(key) + local consumer_conf = consumer_mod.plugin(plugin_name) + if not consumer_conf then + return nil + end + local consumers = consumer_mod.consumers_kv(plugin_name, consumer_conf, "key") + if not consumers then + return nil + end + core.log.info("consumers: ", core.json.delay_encode(consumers)) + return consumers[key] +end + + +local function fetch_jwe_token(conf, ctx) + local token = core.request.header(ctx, conf.header) + if token then + local prefix = sub_str(token, 1, 7) + if prefix == 'Bearer ' or prefix == 'bearer ' then + return sub_str(token, 8) + end + + return token + end +end + + +function _M.rewrite(conf, ctx) + -- fetch token and hide credentials if necessary + local jwe_token, err = fetch_jwe_token(conf, ctx) + if not jwe_token and conf.strict then + core.log.info("failed to fetch JWE token: ", err) + return 403, { message = "missing JWE token in request" } + end + + local jwe_obj = load_jwe_token(jwe_token) + if not jwe_obj.valid then + return 400, { message = "JWE token invalid" } + end + + if not jwe_obj.header_obj.kid then + return 400, { message = "missing kid in JWE token" } + end + + local consumer = get_consumer(jwe_obj.header_obj.kid) + if not consumer then + return 400, { message = "invalid kid in JWE token" } + end + + local plaintext, err = jwe_decrypt_with_obj(jwe_obj, consumer) + if err ~= nil then + return 400, { message = "failed to decrypt JWE token" } + end + core.request.set_header(ctx, conf.forward_header, plaintext) +end + + +local function gen_token() + local args = core.request.get_uri_args() + if not args or not args.key then + return core.response.exit(400) + end + + local key = args.key + local payload = args.payload + if payload then + payload = ngx.unescape_uri(payload) + end + + local consumer = get_consumer(key) + if not consumer then + return core.response.exit(404) + end + + core.log.info("consumer: ", core.json.delay_encode(consumer)) + + local iv = args.iv + if not iv then + -- TODO: random bytes + iv = "123456789012" + end + + local obj = { + iv = iv, + plaintext = payload, + header_obj = { + kid = key, + alg = "dir", + enc = "A256GCM", + }, + } + obj.header = base64.encode_base64url(core.json.encode(obj.header_obj)) + local jwe_token = jwe_encrypt(obj, consumer) + if jwe_token then + return core.response.exit(200, jwe_token) + end + + return core.response.exit(404) +end + + +function _M.api() + return { + { + methods = { "GET" }, + uri = "/apisix/plugin/jwe/encrypt", + handler = gen_token, + } + } +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/jwt-auth.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/jwt-auth.lua new file mode 100644 index 0000000..b61d82d --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/jwt-auth.lua @@ -0,0 +1,331 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local jwt = require("resty.jwt") +local consumer_mod = require("apisix.consumer") +local resty_random = require("resty.random") +local new_tab = require ("table.new") +local auth_utils = require("apisix.utils.auth") + +local ngx_encode_base64 = ngx.encode_base64 +local ngx_decode_base64 = ngx.decode_base64 +local ngx = ngx +local sub_str = string.sub +local table_insert = table.insert +local table_concat = table.concat +local ngx_re_gmatch = ngx.re.gmatch +local plugin_name = "jwt-auth" +local schema_def = require("apisix.schema_def") + + +local schema = { + type = "object", + properties = { + header = { + type = "string", + default = "authorization" + }, + query = { + type = "string", + default = "jwt" + }, + cookie = { + type = "string", + default = "jwt" + }, + hide_credentials = { + type = "boolean", + default = false + }, + key_claim_name = { + type = "string", + default = "key", + minLength = 1, + }, + store_in_ctx = { + type = "boolean", + default = false + }, + anonymous_consumer = schema_def.anonymous_consumer_schema, + }, +} + +local consumer_schema = { + type = "object", + -- can't use additionalProperties with dependencies + properties = { + key = { + type = "string", + minLength = 1, + }, + secret = { + type = "string", + minLength = 1, + }, + algorithm = { + type = "string", + enum = {"HS256", "HS512", "RS256", "ES256"}, + default = "HS256" + }, + exp = {type = "integer", minimum = 1, default = 86400}, + base64_secret = { + type = "boolean", + default = false + }, + lifetime_grace_period = { + type = "integer", + minimum = 0, + default = 0 + } + }, + dependencies = { + algorithm = { + oneOf = { + { + properties = { + algorithm = { + enum = {"HS256", "HS512"}, + default = "HS256" + }, + }, + }, + { + properties = { + public_key = {type = "string"}, + algorithm = { + enum = {"RS256", "ES256"}, + }, + }, + required = {"public_key"}, + }, + } + } + }, + encrypt_fields = {"secret"}, + required = {"key"}, +} + + +local _M = { + version = 0.1, + priority = 2510, + type = 'auth', + name = plugin_name, + schema = schema, + consumer_schema = consumer_schema +} + + +function _M.check_schema(conf, schema_type) + core.log.info("input conf: ", core.json.delay_encode(conf)) + + local ok, err + if schema_type == core.schema.TYPE_CONSUMER then + ok, err = core.schema.check(consumer_schema, conf) + else + return core.schema.check(schema, conf) + end + + if not ok then + return false, err + end + + if conf.algorithm ~= "RS256" and conf.algorithm ~= "ES256" and not conf.secret then + conf.secret = ngx_encode_base64(resty_random.bytes(32, true)) + elseif conf.base64_secret then + if ngx_decode_base64(conf.secret) == nil then + return false, "base64_secret required but the secret is not in base64 format" + end + end + + return true +end + +local function remove_specified_cookie(src, key) + local cookie_key_pattern = "([a-zA-Z0-9-_]*)" + local cookie_val_pattern = "([a-zA-Z0-9-._]*)" + local t = new_tab(1, 0) + + local it, err = ngx_re_gmatch(src, cookie_key_pattern .. "=" .. cookie_val_pattern, "jo") + if not it then + core.log.error("match origins failed: ", err) + return src + end + while true do + local m, err = it() + if err then + core.log.error("iterate origins failed: ", err) + return src + end + if not m then + break + end + if m[1] ~= key then + table_insert(t, m[0]) + end + end + + return table_concat(t, "; ") +end + +local function fetch_jwt_token(conf, ctx) + local token = core.request.header(ctx, conf.header) + if token then + if conf.hide_credentials then + -- hide for header + core.request.set_header(ctx, conf.header, nil) + end + + local prefix = sub_str(token, 1, 7) + if prefix == 'Bearer ' or prefix == 'bearer ' then + return sub_str(token, 8) + end + + return token + end + + local uri_args = core.request.get_uri_args(ctx) or {} + token = uri_args[conf.query] + if token then + if conf.hide_credentials then + -- hide for query + uri_args[conf.query] = nil + core.request.set_uri_args(ctx, uri_args) + end + return token + end + + local val = ctx.var["cookie_" .. conf.cookie] + if not val then + return nil, "JWT not found in cookie" + end + + if conf.hide_credentials then + -- hide for cookie + local src = core.request.header(ctx, "Cookie") + local reset_val = remove_specified_cookie(src, conf.cookie) + core.request.set_header(ctx, "Cookie", reset_val) + end + + return val +end + +local function get_secret(conf) + local secret = conf.secret + + if conf.base64_secret then + return ngx_decode_base64(secret) + end + + return secret +end + +local function get_auth_secret(auth_conf) + if not auth_conf.algorithm or auth_conf.algorithm == "HS256" + or auth_conf.algorithm == "HS512" then + return get_secret(auth_conf) + elseif auth_conf.algorithm == "RS256" or auth_conf.algorithm == "ES256" then + return auth_conf.public_key + end +end + +local function find_consumer(conf, ctx) + -- fetch token and hide credentials if necessary + local jwt_token, err = fetch_jwt_token(conf, ctx) + if not jwt_token then + core.log.info("failed to fetch JWT token: ", err) + return nil, nil, "Missing JWT token in request" + end + + local jwt_obj = jwt:load_jwt(jwt_token) + core.log.info("jwt object: ", core.json.delay_encode(jwt_obj)) + if not jwt_obj.valid then + err = "JWT token invalid: " .. jwt_obj.reason + if auth_utils.is_running_under_multi_auth(ctx) then + return nil, nil, err + end + core.log.warn(err) + return nil, nil, "JWT token invalid" + end + + local key_claim_name = conf.key_claim_name + local user_key = jwt_obj.payload and jwt_obj.payload[key_claim_name] + if not user_key then + return nil, nil, "missing user key in JWT token" + end + + local consumer, consumer_conf, err = consumer_mod.find_consumer(plugin_name, "key", user_key) + if not consumer then + core.log.warn("failed to find consumer: ", err or "invalid user key") + return nil, nil, "Invalid user key in JWT token" + end + core.log.info("consumer: ", core.json.delay_encode(consumer)) + + local auth_secret, err = get_auth_secret(consumer.auth_conf) + if not auth_secret then + err = "failed to retrieve secrets, err: " .. err + if auth_utils.is_running_under_multi_auth(ctx) then + return nil, nil, err + end + core.log.error(err) + return nil, nil, "failed to verify jwt" + end + local claim_specs = jwt:get_default_validation_options(jwt_obj) + claim_specs.lifetime_grace_period = consumer.auth_conf.lifetime_grace_period + + jwt_obj = jwt:verify_jwt_obj(auth_secret, jwt_obj, claim_specs) + core.log.info("jwt object: ", core.json.delay_encode(jwt_obj)) + + if not jwt_obj.verified then + err = "failed to verify jwt: " .. jwt_obj.reason + if auth_utils.is_running_under_multi_auth(ctx) then + return nil, nil, err + end + core.log.warn(err) + return nil, nil, "failed to verify jwt" + end + + if conf.store_in_ctx then + ctx.jwt_auth_payload = jwt_obj.payload + end + + return consumer, consumer_conf +end + + +function _M.rewrite(conf, ctx) + local consumer, consumer_conf, err = find_consumer(conf, ctx) + if not consumer then + if not conf.anonymous_consumer then + return 401, { message = err } + end + consumer, consumer_conf, err = consumer_mod.get_anonymous_consumer(conf.anonymous_consumer) + if not consumer then + err = "jwt-auth failed to authenticate the request, code: 401. error: " .. err + core.log.error(err) + return 401, { message = "Invalid user authorization"} + end + end + + core.log.info("consumer: ", core.json.delay_encode(consumer)) + + consumer_mod.attach_consumer(ctx, consumer, consumer_conf) + core.log.info("hit jwt-auth rewrite") +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/kafka-logger.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/kafka-logger.lua new file mode 100644 index 0000000..75510f5 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/kafka-logger.lua @@ -0,0 +1,327 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local expr = require("resty.expr.v1") +local core = require("apisix.core") +local log_util = require("apisix.utils.log-util") +local producer = require ("resty.kafka.producer") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local plugin = require("apisix.plugin") + +local math = math +local pairs = pairs +local type = type +local req_read_body = ngx.req.read_body +local plugin_name = "kafka-logger" +local batch_processor_manager = bp_manager_mod.new("kafka logger") + +local lrucache = core.lrucache.new({ + type = "plugin", +}) + +local schema = { + type = "object", + properties = { + meta_format = { + type = "string", + default = "default", + enum = {"default", "origin"}, + }, + log_format = {type = "object"}, + -- deprecated, use "brokers" instead + broker_list = { + type = "object", + minProperties = 1, + patternProperties = { + [".*"] = { + description = "the port of kafka broker", + type = "integer", + minimum = 1, + maximum = 65535, + }, + }, + }, + brokers = { + type = "array", + minItems = 1, + items = { + type = "object", + properties = { + host = { + type = "string", + description = "the host of kafka broker", + }, + port = { + type = "integer", + minimum = 1, + maximum = 65535, + description = "the port of kafka broker", + }, + sasl_config = { + type = "object", + description = "sasl config", + properties = { + mechanism = { + type = "string", + default = "PLAIN", + enum = {"PLAIN"}, + }, + user = { type = "string", description = "user" }, + password = { type = "string", description = "password" }, + }, + required = {"user", "password"}, + }, + }, + required = {"host", "port"}, + }, + uniqueItems = true, + }, + kafka_topic = {type = "string"}, + producer_type = { + type = "string", + default = "async", + enum = {"async", "sync"}, + }, + required_acks = { + type = "integer", + default = 1, + enum = { 1, -1 }, + }, + key = {type = "string"}, + timeout = {type = "integer", minimum = 1, default = 3}, + include_req_body = {type = "boolean", default = false}, + include_req_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + include_resp_body = {type = "boolean", default = false}, + include_resp_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + max_req_body_bytes = {type = "integer", minimum = 1, default = 524288}, + max_resp_body_bytes = {type = "integer", minimum = 1, default = 524288}, + -- in lua-resty-kafka, cluster_name is defined as number + -- see https://github.com/doujiang24/lua-resty-kafka#new-1 + cluster_name = {type = "integer", minimum = 1, default = 1}, + -- config for lua-resty-kafka, default value is same as lua-resty-kafka + producer_batch_num = {type = "integer", minimum = 1, default = 200}, + producer_batch_size = {type = "integer", minimum = 0, default = 1048576}, + producer_max_buffering = {type = "integer", minimum = 1, default = 50000}, + producer_time_linger = {type = "integer", minimum = 1, default = 1}, + meta_refresh_interval = {type = "integer", minimum = 1, default = 30}, + }, + oneOf = { + { required = {"broker_list", "kafka_topic"},}, + { required = {"brokers", "kafka_topic"},}, + } +} + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + }, + max_pending_entries = { + type = "integer", + description = "maximum number of pending entries in the batch processor", + minimum = 1, + }, + }, +} + +local _M = { + version = 0.1, + priority = 403, + name = plugin_name, + schema = batch_processor_manager:wrap_schema(schema), + metadata_schema = metadata_schema, +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, err + end + return log_util.check_log_schema(conf) +end + + +local function get_partition_id(prod, topic, log_message) + if prod.async then + local ringbuffer = prod.ringbuffer + for i = 1, ringbuffer.size, 3 do + if ringbuffer.queue[i] == topic and + ringbuffer.queue[i+2] == log_message then + return math.floor(i / 3) + end + end + core.log.info("current topic in ringbuffer has no message") + return nil + end + + -- sync mode + local sendbuffer = prod.sendbuffer + if not sendbuffer.topics[topic] then + core.log.info("current topic in sendbuffer has no message") + return nil + end + for i, message in pairs(sendbuffer.topics[topic]) do + if log_message == message.queue[2] then + return i + end + end +end + + +local function create_producer(broker_list, broker_config, cluster_name) + core.log.info("create new kafka producer instance") + return producer:new(broker_list, broker_config, cluster_name) +end + + +local function send_kafka_data(conf, log_message, prod) + local ok, err = prod:send(conf.kafka_topic, conf.key, log_message) + core.log.info("partition_id: ", + core.log.delay_exec(get_partition_id, + prod, conf.kafka_topic, log_message)) + + if not ok then + return false, "failed to send data to Kafka topic: " .. err .. + ", brokers: " .. core.json.encode(conf.broker_list) + end + + return true +end + + +function _M.access(conf, ctx) + if conf.include_req_body then + local should_read_body = true + if conf.include_req_body_expr then + if not conf.request_expr then + local request_expr, err = expr.new(conf.include_req_body_expr) + if not request_expr then + core.log.error('generate request expr err ', err) + return + end + conf.request_expr = request_expr + end + + local result = conf.request_expr:eval(ctx.var) + + if not result then + should_read_body = false + end + end + if should_read_body then + req_read_body() + end + end +end + + +function _M.body_filter(conf, ctx) + log_util.collect_body(conf, ctx) +end + + +function _M.log(conf, ctx) + local metadata = plugin.plugin_metadata(plugin_name) + local max_pending_entries = metadata and metadata.value and + metadata.value.max_pending_entries or nil + local entry + if conf.meta_format == "origin" then + entry = log_util.get_req_original(ctx, conf) + -- core.log.info("origin entry: ", entry) + + else + entry = log_util.get_log_entry(plugin_name, conf, ctx) + end + + if batch_processor_manager:add_entry(conf, entry, max_pending_entries) then + return + end + + -- reuse producer via lrucache to avoid unbalanced partitions of messages in kafka + local broker_list = core.table.clone(conf.brokers or {}) + local broker_config = {} + + if conf.broker_list then + for host, port in pairs(conf.broker_list) do + local broker = { + host = host, + port = port + } + core.table.insert(broker_list, broker) + end + end + + broker_config["request_timeout"] = conf.timeout * 1000 + broker_config["producer_type"] = conf.producer_type + broker_config["required_acks"] = conf.required_acks + broker_config["batch_num"] = conf.producer_batch_num + broker_config["batch_size"] = conf.producer_batch_size + broker_config["max_buffering"] = conf.producer_max_buffering + broker_config["flush_time"] = conf.producer_time_linger * 1000 + broker_config["refresh_interval"] = conf.meta_refresh_interval * 1000 + + local prod, err = core.lrucache.plugin_ctx(lrucache, ctx, nil, create_producer, + broker_list, broker_config, conf.cluster_name) + core.log.info("kafka cluster name ", conf.cluster_name, ", broker_list[1] port ", + prod.client.broker_list[1].port) + if err then + return nil, "failed to identify the broker specified: " .. err + end + + -- Generate a function to be executed by the batch processor + local func = function(entries, batch_max_size) + local data, err + if batch_max_size == 1 then + data = entries[1] + if type(data) ~= "string" then + data, err = core.json.encode(data) -- encode as single {} + end + else + data, err = core.json.encode(entries) -- encode as array [{}] + end + + if not data then + return false, 'error occurred while encoding the data: ' .. err + end + + core.log.info("send data to kafka: ", data) + + return send_kafka_data(conf, data, prod) + end + + batch_processor_manager:add_entry_to_new_processor(conf, entry, ctx, func, max_pending_entries) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/kafka-proxy.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/kafka-proxy.lua new file mode 100644 index 0000000..0882692 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/kafka-proxy.lua @@ -0,0 +1,62 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") + + +local schema = { + type = "object", + properties = { + sasl = { + type = "object", + properties = { + username = { + type = "string", + }, + password = { + type = "string", + }, + }, + required = {"username", "password"}, + }, + }, + encrypt_fields = {"sasl.password"}, +} + + +local _M = { + version = 0.1, + priority = 508, + name = "kafka-proxy", + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +function _M.access(conf, ctx) + if conf.sasl then + ctx.kafka_consumer_enable_sasl = true + ctx.kafka_consumer_sasl_username = conf.sasl.username + ctx.kafka_consumer_sasl_password = conf.sasl.password + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/key-auth.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/key-auth.lua new file mode 100644 index 0000000..539a489 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/key-auth.lua @@ -0,0 +1,124 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local consumer_mod = require("apisix.consumer") +local plugin_name = "key-auth" +local schema_def = require("apisix.schema_def") + +local schema = { + type = "object", + properties = { + header = { + type = "string", + default = "apikey", + }, + query = { + type = "string", + default = "apikey", + }, + hide_credentials = { + type = "boolean", + default = false, + }, + anonymous_consumer = schema_def.anonymous_consumer_schema, + }, +} + +local consumer_schema = { + type = "object", + properties = { + key = { type = "string" }, + }, + encrypt_fields = {"key"}, + required = {"key"}, +} + + +local _M = { + version = 0.1, + priority = 2500, + type = 'auth', + name = plugin_name, + schema = schema, + consumer_schema = consumer_schema, +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_CONSUMER then + return core.schema.check(consumer_schema, conf) + else + return core.schema.check(schema, conf) + end +end + +local function find_consumer(ctx, conf) + local from_header = true + local key = core.request.header(ctx, conf.header) + + if not key then + local uri_args = core.request.get_uri_args(ctx) or {} + key = uri_args[conf.query] + from_header = false + end + + if not key then + return nil, nil, "Missing API key in request" + end + + local consumer, consumer_conf, err = consumer_mod.find_consumer(plugin_name, "key", key) + if not consumer then + core.log.warn("failed to find consumer: ", err or "invalid api key") + return nil, nil, "Invalid API key in request" + end + core.log.info("consumer: ", core.json.delay_encode(consumer)) + + if conf.hide_credentials then + if from_header then + core.request.set_header(ctx, conf.header, nil) + else + local args = core.request.get_uri_args(ctx) + args[conf.query] = nil + core.request.set_uri_args(ctx, args) + end + end + + return consumer, consumer_conf +end + + +function _M.rewrite(conf, ctx) + local consumer, consumer_conf, err = find_consumer(ctx, conf) + if not consumer then + if not conf.anonymous_consumer then + return 401, { message = err} + end + consumer, consumer_conf, err = consumer_mod.get_anonymous_consumer(conf.anonymous_consumer) + if not consumer then + err = "key-auth failed to authenticate the request, code: 401. error: " .. err + core.log.error(err) + return 401, { message = "Invalid user authorization"} + end + end + + core.log.info("consumer: ", core.json.delay_encode(consumer)) + consumer_mod.attach_consumer(ctx, consumer, consumer_conf) + core.log.info("hit key-auth rewrite") +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/lago.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/lago.lua new file mode 100644 index 0000000..3c5b1f1 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/lago.lua @@ -0,0 +1,229 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local type = type +local pairs = pairs +local math_random = math.random +local ngx = ngx + +local http = require("resty.http") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local core = require("apisix.core") +local str_format = core.string.format + +local plugin_name = "lago" +local batch_processor_manager = bp_manager_mod.new("lago logger") + +local schema = { + type = "object", + properties = { + -- core configurations + endpoint_addrs = { + type = "array", + minItems = 1, + items = core.schema.uri_def, + description = "Lago API address, like http://127.0.0.1:3000, " + .. "it supports both self-hosted and cloud. If multiple endpoints are" + .. " configured, the log will be pushed to a randomly determined" + .. " endpoint from the list.", + }, + endpoint_uri = { + type = "string", + minLength = 1, + default = "/api/v1/events/batch", + description = "Lago API endpoint, it needs to be set to the batch send endpoint.", + }, + token = { + type = "string", + description = "Lago API key, create one for your organization on dashboard." + }, + event_transaction_id = { + type = "string", + description = "Event's transaction ID, it is used to identify and de-duplicate" + .. " the event, it supports string templates containing APISIX and" + .. " NGINX variables, like \"req_${request_id}\", which allows you" + .. " to use values returned by upstream services or request-id" + .. " plugin integration", + }, + event_subscription_id = { + type = "string", + description = "Event's subscription ID, which is automatically generated or" + .. " specified by you when you assign the plan to the customer on" + .. " Lago, used to associate API consumption to a customer subscription," + .. " it supports string templates containing APISIX and NGINX variables," + .. " like \"cus_${consumer_name}\", which allows you to use values" + .. " returned by upstream services or APISIX consumer", + }, + event_code = { + type = "string", + description = "Lago billable metric's code for associating an event to a specified" + .. "billable item", + }, + event_properties = { + type = "object", + patternProperties = { + [".*"] = { + type = "string", + minLength = 1, + }, + }, + description = "Event's properties, used to attach information to an event, this" + .. " allows you to send certain information on a event to Lago, such" + .. " as sending HTTP status to take a failed request off the bill, or" + .. " sending the AI token consumption in the response body for accurate" + .. " billing, its keys are fixed strings and its values can be string" + .. " templates containing APISIX and NGINX variables, like \"${status}\"" + }, + + -- connection layer configurations + ssl_verify = {type = "boolean", default = true}, + timeout = { + type = "integer", + minimum = 1, + maximum = 60000, + default = 3000, + description = "timeout in milliseconds", + }, + keepalive = {type = "boolean", default = true}, + keepalive_timeout = { + type = "integer", + minimum = 1000, + default = 60000, + description = "keepalive timeout in milliseconds", + }, + keepalive_pool = {type = "integer", minimum = 1, default = 5}, + }, + required = {"endpoint_addrs", "token", "event_transaction_id", "event_subscription_id", + "event_code"}, + encrypt_fields = {"token"}, +} +schema = batch_processor_manager:wrap_schema(schema) + +-- According to https://getlago.com/docs/api-reference/events/batch, the maximum batch size is 100, +-- so we have to override the default batch size to make it work out of the box,the plugin does +-- not set a maximum limit, so if Lago relaxes the limit, then user can modify it +-- to a larger batch size +-- This does not affect other plugins, schema is appended after deep copy +schema.properties.batch_max_size.default = 100 + + +local _M = { + version = 0.1, + priority = 415, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf, schema_type) + local check = {"endpoint_addrs"} + core.utils.check_https(check, conf, plugin_name) + core.utils.check_tls_bool({"ssl_verify"}, conf, plugin_name) + + return core.schema.check(schema, conf) +end + + +local function send_http_data(conf, data) + local body, err = core.json.encode(data) + if not body then + return false, str_format("failed to encode json: %s", err) + end + local params = { + headers = { + ["Content-Type"] = "application/json", + ["Authorization"] = "Bearer " .. conf.token, + }, + keepalive = conf.keepalive, + ssl_verify = conf.ssl_verify, + method = "POST", + body = body, + } + + if conf.keepalive then + params.keepalive_timeout = conf.keepalive_timeout + params.keepalive_pool = conf.keepalive_pool + end + + local httpc, err = http.new() + if not httpc then + return false, str_format("create http client error: %s", err) + end + httpc:set_timeout(conf.timeout) + + -- select an random endpoint and build URL + local endpoint_url = conf.endpoint_addrs[math_random(#conf.endpoint_addrs)]..conf.endpoint_uri + local res, err = httpc:request_uri(endpoint_url, params) + if not res then + return false, err + end + + if res.status >= 300 then + return false, str_format("lago api returned status: %d, body: %s", + res.status, res.body or "") + end + + return true +end + + +function _M.log(conf, ctx) + -- build usage event + local event_transaction_id, err = core.utils.resolve_var(conf.event_transaction_id, ctx.var) + if err then + core.log.error("failed to resolve event_transaction_id, event dropped: ", err) + return + end + + local event_subscription_id, err = core.utils.resolve_var(conf.event_subscription_id, ctx.var) + if err then + core.log.error("failed to resolve event_subscription_id, event dropped: ", err) + return + end + + local entry = { + transaction_id = event_transaction_id, + external_subscription_id = event_subscription_id, + code = conf.event_code, + timestamp = ngx.req.start_time(), + } + + if conf.event_properties and type(conf.event_properties) == "table" then + entry.properties = core.table.deepcopy(conf.event_properties) + for key, value in pairs(entry.properties) do + local new_val, err, n_resolved = core.utils.resolve_var(value, ctx.var) + if not err and n_resolved > 0 then + entry.properties[key] = new_val + end + end + end + + if batch_processor_manager:add_entry(conf, entry) then + return + end + + -- generate a function to be executed by the batch processor + local func = function(entries) + return send_http_data(conf, { + events = entries, + }) + end + + batch_processor_manager:add_entry_to_new_processor(conf, entry, ctx, func) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ldap-auth.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ldap-auth.lua new file mode 100644 index 0000000..592d2d5 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ldap-auth.lua @@ -0,0 +1,160 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local ngx = ngx +local ngx_re = require("ngx.re") +local consumer_mod = require("apisix.consumer") +local ldap = require("resty.ldap") + +local schema = { + type = "object", + title = "work with route or service object", + properties = { + base_dn = { type = "string" }, + ldap_uri = { type = "string" }, + use_tls = { type = "boolean", default = false }, + tls_verify = { type = "boolean", default = false }, + uid = { type = "string", default = "cn" } + }, + required = {"base_dn","ldap_uri"}, +} + +local consumer_schema = { + type = "object", + title = "work with consumer object", + properties = { + user_dn = { type = "string" }, + }, + required = {"user_dn"}, +} + +local plugin_name = "ldap-auth" + + +local _M = { + version = 0.1, + priority = 2540, + type = 'auth', + name = plugin_name, + schema = schema, + consumer_schema = consumer_schema +} + +function _M.check_schema(conf, schema_type) + local ok, err + if schema_type == core.schema.TYPE_CONSUMER then + ok, err = core.schema.check(consumer_schema, conf) + else + core.utils.check_tls_bool({"use_tls", "tls_verify"}, conf, plugin_name) + ok, err = core.schema.check(schema, conf) + end + + return ok, err +end + +local function extract_auth_header(authorization) + local obj = { username = "", password = "" } + + local m, err = ngx.re.match(authorization, "Basic\\s(.+)", "jo") + if err then + -- error authorization + return nil, err + end + + if not m then + return nil, "Invalid authorization header format" + end + + local decoded = ngx.decode_base64(m[1]) + + if not decoded then + return nil, "Failed to decode authentication header: " .. m[1] + end + + local res + res, err = ngx_re.split(decoded, ":") + if err then + return nil, "Split authorization err:" .. err + end + if #res < 2 then + return nil, "Split authorization err: invalid decoded data: " .. decoded + end + + obj.username = ngx.re.gsub(res[1], "\\s+", "", "jo") + obj.password = ngx.re.gsub(res[2], "\\s+", "", "jo") + + return obj, nil +end + +function _M.rewrite(conf, ctx) + core.log.info("plugin rewrite phase, conf: ", core.json.delay_encode(conf)) + + -- 1. extract authorization from header + local auth_header = core.request.header(ctx, "Authorization") + if not auth_header then + core.response.set_header("WWW-Authenticate", "Basic realm='.'") + return 401, { message = "Missing authorization in request" } + end + + local user, err = extract_auth_header(auth_header) + if err or not user then + if err then + core.log.warn(err) + else + core.log.warn("nil user") + end + return 401, { message = "Invalid authorization in request" } + end + + -- 2. try authenticate the user against the ldap server + local ldap_host, ldap_port = core.utils.parse_addr(conf.ldap_uri) + local ldapconf = { + timeout = 10000, + start_tls = false, + ldap_host = ldap_host, + ldap_port = ldap_port or 389, + ldaps = conf.use_tls, + tls_verify = conf.tls_verify, + base_dn = conf.base_dn, + attribute = conf.uid, + keepalive = 60000, + } + local res, err = ldap.ldap_authenticate(user.username, user.password, ldapconf) + if not res then + core.log.warn("ldap-auth failed: ", err) + return 401, { message = "Invalid user authorization" } + end + + local user_dn = conf.uid .. "=" .. user.username .. "," .. conf.base_dn + + -- 3. Retrieve consumer for authorization plugin + local consumer_conf = consumer_mod.plugin(plugin_name) + if not consumer_conf then + return 401, { message = "Missing related consumer" } + end + + local consumers = consumer_mod.consumers_kv(plugin_name, consumer_conf, "user_dn") + local consumer = consumers[user_dn] + if not consumer then + return 401, {message = "Invalid user authorization"} + end + consumer_mod.attach_consumer(ctx, consumer, consumer_conf) + + core.log.info("hit basic-auth access") +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn.lua new file mode 100644 index 0000000..31a2919 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn.lua @@ -0,0 +1,94 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local limit_conn = require("apisix.plugins.limit-conn.init") +local redis_schema = require("apisix.utils.redis-schema") +local policy_to_additional_properties = redis_schema.schema +local plugin_name = "limit-conn" + + + +local schema = { + type = "object", + properties = { + conn = {type = "integer", exclusiveMinimum = 0}, -- limit.conn max + burst = {type = "integer", minimum = 0}, + default_conn_delay = {type = "number", exclusiveMinimum = 0}, + only_use_default_delay = {type = "boolean", default = false}, + key = {type = "string"}, + key_type = {type = "string", + enum = {"var", "var_combination"}, + default = "var", + }, + policy = { + type = "string", + enum = {"redis", "redis-cluster", "local"}, + default = "local", + }, + rejected_code = { + type = "integer", minimum = 200, maximum = 599, default = 503 + }, + rejected_msg = { + type = "string", minLength = 1 + }, + allow_degradation = {type = "boolean", default = false} + }, + required = {"conn", "burst", "default_conn_delay", "key"}, + ["if"] = { + properties = { + policy = { + enum = {"redis"}, + }, + }, + }, + ["then"] = policy_to_additional_properties.redis, + ["else"] = { + ["if"] = { + properties = { + policy = { + enum = {"redis-cluster"}, + }, + }, + }, + ["then"] = policy_to_additional_properties["redis-cluster"], + } +} + +local _M = { + version = 0.1, + priority = 1003, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +function _M.access(conf, ctx) + return limit_conn.increase(conf, ctx) +end + + +function _M.log(conf, ctx) + return limit_conn.decrease(conf, ctx) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/init.lua new file mode 100644 index 0000000..d7401df --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/init.lua @@ -0,0 +1,171 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local limit_conn_new = require("resty.limit.conn").new +local core = require("apisix.core") +local is_http = ngx.config.subsystem == "http" +local sleep = core.sleep +local shdict_name = "plugin-limit-conn" +if ngx.config.subsystem == "stream" then + shdict_name = shdict_name .. "-stream" +end + +local redis_single_new +local redis_cluster_new +do + local redis_src = "apisix.plugins.limit-conn.limit-conn-redis" + redis_single_new = require(redis_src).new + + local cluster_src = "apisix.plugins.limit-conn.limit-conn-redis-cluster" + redis_cluster_new = require(cluster_src).new +end + + +local lrucache = core.lrucache.new({ + type = "plugin", +}) +local _M = {} + + +local function create_limit_obj(conf) + if conf.policy == "local" then + core.log.info("create new limit-conn plugin instance") + return limit_conn_new(shdict_name, conf.conn, conf.burst, + conf.default_conn_delay) + elseif conf.policy == "redis" then + + core.log.info("create new limit-conn redis plugin instance") + + return redis_single_new("plugin-limit-conn", conf, conf.conn, conf.burst, + conf.default_conn_delay) + + elseif conf.policy == "redis-cluster" then + + core.log.info("create new limit-conn redis-cluster plugin instance") + + return redis_cluster_new("plugin-limit-conn", conf, conf.conn, conf.burst, + conf.default_conn_delay) + else + return nil, "policy enum not match" + end +end + + +function _M.increase(conf, ctx) + core.log.info("ver: ", ctx.conf_version) + local lim, err = lrucache(conf, nil, create_limit_obj, conf) + if not lim then + core.log.error("failed to instantiate a resty.limit.conn object: ", err) + if conf.allow_degradation then + return + end + return 500 + end + + local conf_key = conf.key + local key + if conf.key_type == "var_combination" then + local err, n_resolved + key, err, n_resolved = core.utils.resolve_var(conf_key, ctx.var) + if err then + core.log.error("could not resolve vars in ", conf_key, " error: ", err) + end + + if n_resolved == 0 then + key = nil + end + else + key = ctx.var[conf_key] + end + + if key == nil then + core.log.info("The value of the configured key is empty, use client IP instead") + -- When the value of key is empty, use client IP instead + key = ctx.var["remote_addr"] + end + + key = key .. ctx.conf_type .. ctx.conf_version + core.log.info("limit key: ", key) + + local delay, err = lim:incoming(key, true) + if not delay then + if err == "rejected" then + if conf.rejected_msg then + return conf.rejected_code, { error_msg = conf.rejected_msg } + end + return conf.rejected_code or 503 + end + + core.log.error("failed to limit conn: ", err) + if conf.allow_degradation then + return + end + return 500 + end + + if lim:is_committed() then + if not ctx.limit_conn then + ctx.limit_conn = core.tablepool.fetch("plugin#limit-conn", 0, 6) + end + + core.table.insert_tail(ctx.limit_conn, lim, key, delay, conf.only_use_default_delay) + end + + if delay >= 0.001 then + sleep(delay) + end +end + + +function _M.decrease(conf, ctx) + local limit_conn = ctx.limit_conn + if not limit_conn then + return + end + + for i = 1, #limit_conn, 4 do + local lim = limit_conn[i] + local key = limit_conn[i + 1] + local delay = limit_conn[i + 2] + local use_delay = limit_conn[i + 3] + + local latency + if is_http then + if not use_delay then + if ctx.proxy_passed then + latency = ctx.var.upstream_response_time + else + latency = ctx.var.request_time - delay + end + end + end + core.log.debug("request latency is ", latency) -- for test + + local conn, err = lim:leaving(key, latency) + if not conn then + core.log.error("failed to record the connection leaving request: ", + err) + break + end + end + + core.tablepool.release("plugin#limit-conn", limit_conn) + ctx.limit_conn = nil + return +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/limit-conn-redis-cluster.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/limit-conn-redis-cluster.lua new file mode 100644 index 0000000..9e46a04 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/limit-conn-redis-cluster.lua @@ -0,0 +1,78 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local redis_cluster = require("apisix.utils.rediscluster") +local core = require("apisix.core") +local util = require("apisix.plugins.limit-conn.util") +local setmetatable = setmetatable +local ngx_timer_at = ngx.timer.at + +local _M = {version = 0.1} + + +local mt = { + __index = _M +} + + +function _M.new(plugin_name, conf, max, burst, default_conn_delay) + + local red_cli, err = redis_cluster.new(conf, "plugin-limit-conn-redis-cluster-slot-lock") + if not red_cli then + return nil, err + end + local self = { + conf = conf, + plugin_name = plugin_name, + burst = burst, + max = max + 0, -- just to ensure the param is good + unit_delay = default_conn_delay, + red_cli = red_cli, + } + return setmetatable(self, mt) +end + + +function _M.incoming(self, key, commit) + return util.incoming(self, self.red_cli, key, commit) +end + + +function _M.is_committed(self) + return self.committed +end + + +local function leaving_thread(premature, self, key, req_latency) + return util.leaving(self, self.red_cli, key, req_latency) +end + + +function _M.leaving(self, key, req_latency) + -- log_by_lua can't use cosocket + local ok, err = ngx_timer_at(0, leaving_thread, self, key, req_latency) + if not ok then + core.log.error("failed to create timer: ", err) + return nil, err + end + + return ok + +end + + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/limit-conn-redis.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/limit-conn-redis.lua new file mode 100644 index 0000000..4de7a27 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/limit-conn-redis.lua @@ -0,0 +1,85 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local redis = require("apisix.utils.redis") +local core = require("apisix.core") +local util = require("apisix.plugins.limit-conn.util") +local ngx_timer_at = ngx.timer.at + +local setmetatable = setmetatable + + +local _M = {version = 0.1} + + +local mt = { + __index = _M +} + +function _M.new(plugin_name, conf, max, burst, default_conn_delay) + + local self = { + conf = conf, + plugin_name = plugin_name, + burst = burst, + max = max + 0, -- just to ensure the param is good + unit_delay = default_conn_delay, + } + return setmetatable(self, mt) +end + + +function _M.incoming(self, key, commit) + local conf = self.conf + local red, err = redis.new(conf) + if not red then + return red, err + end + return util.incoming(self, red, key, commit) +end + + +function _M.is_committed(self) + return self.committed +end + + +local function leaving_thread(premature, self, key, req_latency) + + local conf = self.conf + local red, err = redis.new(conf) + if not red then + return red, err + end + return util.leaving(self, red, key, req_latency) +end + + +function _M.leaving(self, key, req_latency) + -- log_by_lua can't use cosocket + local ok, err = ngx_timer_at(0, leaving_thread, self, key, req_latency) + if not ok then + core.log.error("failed to create timer: ", err) + return nil, err + end + + return ok + +end + + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/util.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/util.lua new file mode 100644 index 0000000..f3ba5bd --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-conn/util.lua @@ -0,0 +1,81 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local assert = assert +local math = require "math" +local floor = math.floor +local _M = {version = 0.3} + + +function _M.incoming(self, red, key, commit) + local max = self.max + self.committed = false + key = "limit_conn" .. ":" .. key + + local conn, err + if commit then + conn, err = red:incrby(key, 1) + if not conn then + return nil, err + end + + if conn > max + self.burst then + conn, err = red:incrby(key, -1) + if not conn then + return nil, err + end + return nil, "rejected" + end + self.committed = true + + else + local conn_from_red, err = red:get(key) + if err then + return nil, err + end + conn = (conn_from_red or 0) + 1 + end + + if conn > max then + -- make the excessive connections wait + return self.unit_delay * floor((conn - 1) / max), conn + end + + -- we return a 0 delay by default + return 0, conn +end + + +function _M.leaving(self, red, key, req_latency) + assert(key) + key = "limit_conn" .. ":" .. key + + local conn, err = red:incrby(key, -1) + if not conn then + return nil, err + end + + if req_latency then + local unit_delay = self.unit_delay + self.unit_delay = (req_latency + unit_delay) / 2 + end + + return conn +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count.lua new file mode 100644 index 0000000..1472a6d --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count.lua @@ -0,0 +1,51 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local fetch_secrets = require("apisix.secret").fetch_secrets +local limit_count = require("apisix.plugins.limit-count.init") +local workflow = require("apisix.plugins.workflow") + +local plugin_name = "limit-count" +local _M = { + version = 0.5, + priority = 1002, + name = plugin_name, + schema = limit_count.schema, + metadata_schema = limit_count.metadata_schema, +} + + +function _M.check_schema(conf, schema_type) + return limit_count.check_schema(conf, schema_type) +end + + +function _M.access(conf, ctx) + conf = fetch_secrets(conf, true, conf, "") + return limit_count.rate_limit(conf, ctx, plugin_name, 1) +end + +function _M.workflow_handler() + workflow.register(plugin_name, + function (conf, ctx) + return limit_count.rate_limit(conf, ctx, plugin_name, 1) + end, + function (conf) + return limit_count.check_schema(conf) + end) +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/init.lua new file mode 100644 index 0000000..1f37965 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/init.lua @@ -0,0 +1,332 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local apisix_plugin = require("apisix.plugin") +local tab_insert = table.insert +local ipairs = ipairs +local pairs = pairs +local redis_schema = require("apisix.utils.redis-schema") +local policy_to_additional_properties = redis_schema.schema +local get_phase = ngx.get_phase + +local limit_redis_cluster_new +local limit_redis_new +local limit_local_new +do + local local_src = "apisix.plugins.limit-count.limit-count-local" + limit_local_new = require(local_src).new + + local redis_src = "apisix.plugins.limit-count.limit-count-redis" + limit_redis_new = require(redis_src).new + + local cluster_src = "apisix.plugins.limit-count.limit-count-redis-cluster" + limit_redis_cluster_new = require(cluster_src).new +end +local lrucache = core.lrucache.new({ + type = 'plugin', serial_creating = true, +}) +local group_conf_lru = core.lrucache.new({ + type = 'plugin', +}) + +local metadata_defaults = { + limit_header = "X-RateLimit-Limit", + remaining_header = "X-RateLimit-Remaining", + reset_header = "X-RateLimit-Reset", +} + +local metadata_schema = { + type = "object", + properties = { + limit_header = { + type = "string", + default = metadata_defaults.limit_header, + }, + remaining_header = { + type = "string", + default = metadata_defaults.remaining_header, + }, + reset_header = { + type = "string", + default = metadata_defaults.reset_header, + }, + }, +} + +local schema = { + type = "object", + properties = { + count = {type = "integer", exclusiveMinimum = 0}, + time_window = {type = "integer", exclusiveMinimum = 0}, + group = {type = "string"}, + key = {type = "string", default = "remote_addr"}, + key_type = {type = "string", + enum = {"var", "var_combination", "constant"}, + default = "var", + }, + rejected_code = { + type = "integer", minimum = 200, maximum = 599, default = 503 + }, + rejected_msg = { + type = "string", minLength = 1 + }, + policy = { + type = "string", + enum = {"local", "redis", "redis-cluster"}, + default = "local", + }, + allow_degradation = {type = "boolean", default = false}, + show_limit_quota_header = {type = "boolean", default = true} + }, + required = {"count", "time_window"}, + ["if"] = { + properties = { + policy = { + enum = {"redis"}, + }, + }, + }, + ["then"] = policy_to_additional_properties.redis, + ["else"] = { + ["if"] = { + properties = { + policy = { + enum = {"redis-cluster"}, + }, + }, + }, + ["then"] = policy_to_additional_properties["redis-cluster"], + } +} + +local schema_copy = core.table.deepcopy(schema) + +local _M = { + schema = schema, + metadata_schema = metadata_schema, +} + + +local function group_conf(conf) + return conf +end + + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + if conf.group then + -- means that call by some plugin not support + if conf._vid then + return false, "group is not supported" + end + + local fields = {} + -- When the goup field is configured, + -- we will use schema_copy to get the whitelist of properties, + -- so that we can avoid getting injected properties. + for k in pairs(schema_copy.properties) do + tab_insert(fields, k) + end + local extra = policy_to_additional_properties[conf.policy] + if extra then + for k in pairs(extra.properties) do + tab_insert(fields, k) + end + end + + local prev_conf = group_conf_lru(conf.group, "", group_conf, conf) + + for _, field in ipairs(fields) do + if not core.table.deep_eq(prev_conf[field], conf[field]) then + core.log.error("previous limit-conn group ", prev_conf.group, + " conf: ", core.json.encode(prev_conf)) + core.log.error("current limit-conn group ", conf.group, + " conf: ", core.json.encode(conf)) + return false, "group conf mismatched" + end + end + end + + return true +end + + +local function create_limit_obj(conf, plugin_name) + core.log.info("create new " .. plugin_name .. " plugin instance") + + if not conf.policy or conf.policy == "local" then + return limit_local_new("plugin-" .. plugin_name, conf.count, + conf.time_window) + end + + if conf.policy == "redis" then + return limit_redis_new("plugin-" .. plugin_name, + conf.count, conf.time_window, conf) + end + + if conf.policy == "redis-cluster" then + return limit_redis_cluster_new("plugin-" .. plugin_name, conf.count, + conf.time_window, conf) + end + + return nil +end + + +local function gen_limit_key(conf, ctx, key) + if conf.group then + return conf.group .. ':' .. key + end + + -- here we add a separator ':' to mark the boundary of the prefix and the key itself + -- Here we use plugin-level conf version to prevent the counter from being resetting + -- because of the change elsewhere. + -- A route which reuses a previous route's ID will inherits its counter. + local conf_type = ctx.conf_type_without_consumer or ctx.conf_type + local conf_id = ctx.conf_id_without_consumer or ctx.conf_id + local new_key = conf_type .. conf_id .. ':' .. apisix_plugin.conf_version(conf) + .. ':' .. key + if conf._vid then + -- conf has _vid means it's from workflow plugin, add _vid to the key + -- so that the counter is unique per action. + return new_key .. ':' .. conf._vid + end + + return new_key +end + + +local function gen_limit_obj(conf, ctx, plugin_name) + if conf.group then + return lrucache(conf.group, "", create_limit_obj, conf, plugin_name) + end + + local extra_key + if conf._vid then + extra_key = conf.policy .. '#' .. conf._vid + else + extra_key = conf.policy + end + + return core.lrucache.plugin_ctx(lrucache, ctx, extra_key, create_limit_obj, conf, plugin_name) +end + +function _M.rate_limit(conf, ctx, name, cost, dry_run) + core.log.info("ver: ", ctx.conf_version) + core.log.info("conf: ", core.json.delay_encode(conf, true)) + + local lim, err = gen_limit_obj(conf, ctx, name) + + if not lim then + core.log.error("failed to fetch limit.count object: ", err) + if conf.allow_degradation then + return + end + return 500 + end + + local conf_key = conf.key + local key + if conf.key_type == "var_combination" then + local err, n_resolved + key, err, n_resolved = core.utils.resolve_var(conf_key, ctx.var) + if err then + core.log.error("could not resolve vars in ", conf_key, " error: ", err) + end + + if n_resolved == 0 then + key = nil + end + elseif conf.key_type == "constant" then + key = conf_key + else + key = ctx.var[conf_key] + end + + if key == nil then + core.log.info("The value of the configured key is empty, use client IP instead") + -- When the value of key is empty, use client IP instead + key = ctx.var["remote_addr"] + end + + key = gen_limit_key(conf, ctx, key) + core.log.info("limit key: ", key) + + local delay, remaining, reset + if not conf.policy or conf.policy == "local" then + delay, remaining, reset = lim:incoming(key, not dry_run, conf, cost) + else + delay, remaining, reset = lim:incoming(key, cost) + end + + local metadata = apisix_plugin.plugin_metadata("limit-count") + if metadata then + metadata = metadata.value + else + metadata = metadata_defaults + end + core.log.info("limit-count plugin-metadata: ", core.json.delay_encode(metadata)) + + local set_limit_headers = { + limit_header = conf.limit_header or metadata.limit_header, + remaining_header = conf.remaining_header or metadata.remaining_header, + reset_header = conf.reset_header or metadata.reset_header, + } + local phase = get_phase() + local set_header = phase ~= "log" + + if not delay then + local err = remaining + if err == "rejected" then + -- show count limit header when rejected + if conf.show_limit_quota_header and set_header then + core.response.set_header(set_limit_headers.limit_header, conf.count, + set_limit_headers.remaining_header, 0, + set_limit_headers.reset_header, reset) + end + + if conf.rejected_msg then + return conf.rejected_code, { error_msg = conf.rejected_msg } + end + return conf.rejected_code + end + + core.log.error("failed to limit count: ", err) + if conf.allow_degradation then + return + end + return 500, {error_msg = "failed to limit count"} + end + + if conf.show_limit_quota_header and set_header then + core.response.set_header(set_limit_headers.limit_header, conf.count, + set_limit_headers.remaining_header, remaining, + set_limit_headers.reset_header, reset) + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/limit-count-local.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/limit-count-local.lua new file mode 100644 index 0000000..b6f319a --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/limit-count-local.lua @@ -0,0 +1,79 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local limit_count = require("resty.limit.count") + +local ngx = ngx +local ngx_time = ngx.time +local assert = assert +local setmetatable = setmetatable +local core = require("apisix.core") + +local _M = {} + +local mt = { + __index = _M +} + +local function set_endtime(self, key, time_window) + -- set an end time + local end_time = ngx_time() + time_window + -- save to dict by key + local success, err = self.dict:set(key, end_time, time_window) + + if not success then + core.log.error("dict set key ", key, " error: ", err) + end + + local reset = time_window + return reset +end + +local function read_reset(self, key) + -- read from dict + local end_time = (self.dict:get(key) or 0) + local reset = end_time - ngx_time() + if reset < 0 then + reset = 0 + end + return reset +end + +function _M.new(plugin_name, limit, window) + assert(limit > 0 and window > 0) + + local self = { + limit_count = limit_count.new(plugin_name, limit, window), + dict = ngx.shared[plugin_name .. "-reset-header"] + } + + return setmetatable(self, mt) +end + +function _M.incoming(self, key, commit, conf, cost) + local delay, remaining = self.limit_count:incoming(key, commit, cost) + local reset + + if remaining == conf.count - cost then + reset = set_endtime(self, key, conf.time_window) + else + reset = read_reset(self, key) + end + + return delay, remaining, reset +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/limit-count-redis-cluster.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/limit-count-redis-cluster.lua new file mode 100644 index 0000000..be7029b --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/limit-count-redis-cluster.lua @@ -0,0 +1,83 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local redis_cluster = require("apisix.utils.rediscluster") +local core = require("apisix.core") +local setmetatable = setmetatable +local tostring = tostring + +local _M = {} + + +local mt = { + __index = _M +} + + +local script = core.string.compress_script([=[ + assert(tonumber(ARGV[3]) >= 1, "cost must be at least 1") + local ttl = redis.call('ttl', KEYS[1]) + if ttl < 0 then + redis.call('set', KEYS[1], ARGV[1] - ARGV[3], 'EX', ARGV[2]) + return {ARGV[1] - ARGV[3], ARGV[2]} + end + return {redis.call('incrby', KEYS[1], 0 - ARGV[3]), ttl} +]=]) + + +function _M.new(plugin_name, limit, window, conf) + local red_cli, err = redis_cluster.new(conf, "plugin-limit-count-redis-cluster-slot-lock") + if not red_cli then + return nil, err + end + + local self = { + limit = limit, + window = window, + conf = conf, + plugin_name = plugin_name, + red_cli = red_cli, + } + + return setmetatable(self, mt) +end + + +function _M.incoming(self, key, cost) + local red = self.red_cli + local limit = self.limit + local window = self.window + key = self.plugin_name .. tostring(key) + + local ttl = 0 + local res, err = red:eval(script, 1, key, limit, window, cost or 1) + + if err then + return nil, err, ttl + end + + local remaining = res[1] + ttl = res[2] + + if remaining < 0 then + return nil, "rejected", ttl + end + return 0, remaining, ttl +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/limit-count-redis.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/limit-count-redis.lua new file mode 100644 index 0000000..c40ed43 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-count/limit-count-redis.lua @@ -0,0 +1,89 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local redis = require("apisix.utils.redis") +local core = require("apisix.core") +local assert = assert +local setmetatable = setmetatable +local tostring = tostring + + +local _M = {version = 0.3} + + +local mt = { + __index = _M +} + + +local script = core.string.compress_script([=[ + assert(tonumber(ARGV[3]) >= 1, "cost must be at least 1") + local ttl = redis.call('ttl', KEYS[1]) + if ttl < 0 then + redis.call('set', KEYS[1], ARGV[1] - ARGV[3], 'EX', ARGV[2]) + return {ARGV[1] - ARGV[3], ARGV[2]} + end + return {redis.call('incrby', KEYS[1], 0 - ARGV[3]), ttl} +]=]) + + +function _M.new(plugin_name, limit, window, conf) + assert(limit > 0 and window > 0) + + local self = { + limit = limit, + window = window, + conf = conf, + plugin_name = plugin_name, + } + return setmetatable(self, mt) +end + +function _M.incoming(self, key, cost) + local conf = self.conf + local red, err = redis.new(conf) + if not red then + return red, err, 0 + end + + local limit = self.limit + local window = self.window + local res + key = self.plugin_name .. tostring(key) + + local ttl = 0 + res, err = red:eval(script, 1, key, limit, window, cost or 1) + + if err then + return nil, err, ttl + end + + local remaining = res[1] + ttl = res[2] + + local ok, err = red:set_keepalive(10000, 100) + if not ok then + return nil, err, ttl + end + + if remaining < 0 then + return nil, "rejected", ttl + end + return 0, remaining, ttl +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req.lua new file mode 100644 index 0000000..641eed4 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req.lua @@ -0,0 +1,183 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local limit_req_new = require("resty.limit.req").new +local core = require("apisix.core") +local redis_schema = require("apisix.utils.redis-schema") +local policy_to_additional_properties = redis_schema.schema +local plugin_name = "limit-req" +local sleep = core.sleep + +local redis_single_new +local redis_cluster_new +do + local redis_src = "apisix.plugins.limit-req.limit-req-redis" + redis_single_new = require(redis_src).new + + local cluster_src = "apisix.plugins.limit-req.limit-req-redis-cluster" + redis_cluster_new = require(cluster_src).new +end + + +local lrucache = core.lrucache.new({ + type = "plugin", +}) + + +local schema = { + type = "object", + properties = { + rate = {type = "number", exclusiveMinimum = 0}, + burst = {type = "number", minimum = 0}, + key = {type = "string"}, + key_type = {type = "string", + enum = {"var", "var_combination"}, + default = "var", + }, + policy = { + type = "string", + enum = {"redis", "redis-cluster", "local"}, + default = "local", + }, + rejected_code = { + type = "integer", minimum = 200, maximum = 599, default = 503 + }, + rejected_msg = { + type = "string", minLength = 1 + }, + nodelay = { + type = "boolean", default = false + }, + allow_degradation = {type = "boolean", default = false} + }, + required = {"rate", "burst", "key"}, + ["if"] = { + properties = { + policy = { + enum = {"redis"}, + }, + }, + }, + ["then"] = policy_to_additional_properties.redis, + ["else"] = { + ["if"] = { + properties = { + policy = { + enum = {"redis-cluster"}, + }, + }, + }, + ["then"] = policy_to_additional_properties["redis-cluster"], + } +} + + +local _M = { + version = 0.1, + priority = 1001, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + return true +end + + +local function create_limit_obj(conf) + if conf.policy == "local" then + core.log.info("create new limit-req plugin instance") + return limit_req_new("plugin-limit-req", conf.rate, conf.burst) + + elseif conf.policy == "redis" then + core.log.info("create new limit-req redis plugin instance") + return redis_single_new("plugin-limit-req", conf, conf.rate, conf.burst) + + elseif conf.policy == "redis-cluster" then + core.log.info("create new limit-req redis-cluster plugin instance") + return redis_cluster_new("plugin-limit-req", conf, conf.rate, conf.burst) + + else + return nil, "policy enum not match" + end +end + + +function _M.access(conf, ctx) + local lim, err = core.lrucache.plugin_ctx(lrucache, ctx, nil, + create_limit_obj, conf) + if not lim then + core.log.error("failed to instantiate a resty.limit.req object: ", err) + if conf.allow_degradation then + return + end + return 500 + end + + local conf_key = conf.key + local key + if conf.key_type == "var_combination" then + local err, n_resolved + key, err, n_resolved = core.utils.resolve_var(conf_key, ctx.var) + if err then + core.log.error("could not resolve vars in ", conf_key, " error: ", err) + end + + if n_resolved == 0 then + key = nil + end + + else + key = ctx.var[conf_key] + end + + if key == nil then + core.log.info("The value of the configured key is empty, use client IP instead") + -- When the value of key is empty, use client IP instead + key = ctx.var["remote_addr"] + end + + key = key .. ctx.conf_type .. ctx.conf_version + core.log.info("limit key: ", key) + + local delay, err = lim:incoming(key, true) + if not delay then + if err == "rejected" then + if conf.rejected_msg then + return conf.rejected_code, { error_msg = conf.rejected_msg } + end + return conf.rejected_code + end + + core.log.error("failed to limit req: ", err) + if conf.allow_degradation then + return + end + return 500 + end + + if delay >= 0.001 and not conf.nodelay then + sleep(delay) + end +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req/limit-req-redis-cluster.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req/limit-req-redis-cluster.lua new file mode 100644 index 0000000..21ae635 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req/limit-req-redis-cluster.lua @@ -0,0 +1,50 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local redis_cluster = require("apisix.utils.rediscluster") +local setmetatable = setmetatable +local util = require("apisix.plugins.limit-req.util") + +local _M = {version = 0.1} + + +local mt = { + __index = _M +} + + +function _M.new(plugin_name, conf, rate, burst) + local red_cli, err = redis_cluster.new(conf, "plugin-limit-req-redis-cluster-slot-lock") + if not red_cli then + return nil, err + end + local self = { + conf = conf, + plugin_name = plugin_name, + burst = burst * 1000, + rate = rate * 1000, + red_cli = red_cli, + } + return setmetatable(self, mt) +end + + +function _M.incoming(self, key, commit) + return util.incoming(self, self.red_cli, key, commit) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req/limit-req-redis.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req/limit-req-redis.lua new file mode 100644 index 0000000..e097800 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req/limit-req-redis.lua @@ -0,0 +1,54 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local redis = require("apisix.utils.redis") +local setmetatable = setmetatable +local util = require("apisix.plugins.limit-req.util") + +local setmetatable = setmetatable + + +local _M = {version = 0.1} + + +local mt = { + __index = _M +} + + +function _M.new(plugin_name, conf, rate, burst) + local self = { + conf = conf, + plugin_name = plugin_name, + burst = burst * 1000, + rate = rate * 1000, + } + return setmetatable(self, mt) +end + + +function _M.incoming(self, key, commit) + local conf = self.conf + local red, err = redis.new(conf) + if not red then + return red, err + end + + return util.incoming(self, red, key, commit) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req/util.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req/util.lua new file mode 100644 index 0000000..282c04c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/limit-req/util.lua @@ -0,0 +1,78 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local math = require "math" +local abs = math.abs +local max = math.max +local ngx_now = ngx.now +local ngx_null = ngx.null +local tonumber = tonumber + + +local _M = {version = 0.1} + + +-- the "commit" argument controls whether should we record the event in shm. +function _M.incoming(self, red, key, commit) + local rate = self.rate + local now = ngx_now() * 1000 + + key = "limit_req" .. ":" .. key + local excess_key = key .. "excess" + local last_key = key .. "last" + + local excess, err = red:get(excess_key) + if err then + return nil, err + end + local last, err = red:get(last_key) + if err then + return nil, err + end + + if excess ~= ngx_null and last ~= ngx_null then + excess = tonumber(excess) + last = tonumber(last) + local elapsed = now - last + excess = max(excess - rate * abs(elapsed) / 1000 + 1000, 0) + + if excess > self.burst then + return nil, "rejected" + end + else + excess = 0 + end + + if commit then + local ok + local err + ok, err = red:set(excess_key, excess) + if not ok then + return nil, err + end + + ok, err = red:set(last_key, now) + if not ok then + return nil, err + end + end + + -- return the delay in seconds, as well as excess + return excess / rate, excess / 1000 +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/log-rotate.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/log-rotate.lua new file mode 100644 index 0000000..4b0f327 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/log-rotate.lua @@ -0,0 +1,327 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local timers = require("apisix.timers") +local plugin = require("apisix.plugin") +local process = require("ngx.process") +local signal = require("resty.signal") +local shell = require("resty.shell") +local ipairs = ipairs +local ngx = ngx +local ngx_time = ngx.time +local ngx_update_time = ngx.update_time +local lfs = require("lfs") +local type = type +local io_open = io.open +local os_date = os.date +local os_remove = os.remove +local os_rename = os.rename +local str_sub = string.sub +local str_format = string.format +local str_byte = string.byte +local ngx_sleep = require("apisix.core.utils").sleep +local string_rfind = require("pl.stringx").rfind +local local_conf + + +local plugin_name = "log-rotate" +local INTERVAL = 60 * 60 -- rotate interval (unit: second) +local MAX_KEPT = 24 * 7 -- max number of log files will be kept +local MAX_SIZE = -1 -- max size of file will be rotated +local COMPRESSION_FILE_SUFFIX = ".tar.gz" -- compression file suffix +local rotate_time +local default_logs +local enable_compression = false +local DEFAULT_ACCESS_LOG_FILENAME = "access.log" +local DEFAULT_ERROR_LOG_FILENAME = "error.log" +local SLASH_BYTE = str_byte("/") + +local schema = { + type = "object", + properties = {}, +} + + +local _M = { + version = 0.1, + priority = 100, + name = plugin_name, + schema = schema, + scope = "global", +} + + +local function file_exists(path) + local file = io_open(path, "r") + if file then + file:close() + end + return file ~= nil +end + + +local function get_log_path_info(file_type) + local_conf = core.config.local_conf() + local conf_path + if file_type == "error.log" then + conf_path = local_conf and local_conf.nginx_config and + local_conf.nginx_config.error_log + else + conf_path = local_conf and local_conf.nginx_config and + local_conf.nginx_config.http and + local_conf.nginx_config.http.access_log + end + + local prefix = ngx.config.prefix() + + if conf_path then + -- relative path + if str_byte(conf_path) ~= SLASH_BYTE then + conf_path = prefix .. conf_path + end + local n = string_rfind(conf_path, "/") + if n ~= nil and n ~= #conf_path then + local dir = str_sub(conf_path, 1, n) + local name = str_sub(conf_path, n + 1) + return dir, name + end + end + + return prefix .. "logs/", file_type +end + + +local function tab_sort_comp(a, b) + return a > b +end + + +local function scan_log_folder(log_file_name) + local t = {} + + local log_dir, log_name = get_log_path_info(log_file_name) + + local compression_log_type = log_name .. COMPRESSION_FILE_SUFFIX + for file in lfs.dir(log_dir) do + local n = string_rfind(file, "__") + if n ~= nil then + local log_type = file:sub(n + 2) + if log_type == log_name or log_type == compression_log_type then + core.table.insert(t, file) + end + end + end + + core.table.sort(t, tab_sort_comp) + return t, log_dir +end + + +local function rename_file(log, date_str) + local new_file + if not log.new_file then + core.log.warn(log.type, " is off") + return + end + + new_file = str_format(log.new_file, date_str) + if file_exists(new_file) then + core.log.info("file exist: ", new_file) + return new_file + end + + local ok, err = os_rename(log.file, new_file) + if not ok then + core.log.error("move file from ", log.file, " to ", new_file, + " res:", ok, " msg:", err) + return + end + + return new_file +end + + +local function compression_file(new_file, timeout) + if not new_file or type(new_file) ~= "string" then + core.log.info("compression file: ", new_file, " invalid") + return + end + + local n = string_rfind(new_file, "/") + local new_filepath = str_sub(new_file, 1, n) + local new_filename = str_sub(new_file, n + 1) + local com_filename = new_filename .. COMPRESSION_FILE_SUFFIX + local cmd = str_format("cd %s && tar -zcf %s %s", new_filepath, + com_filename, new_filename) + core.log.info("log file compress command: " .. cmd) + + local ok, stdout, stderr, reason, status = shell.run(cmd, nil, timeout, nil) + if not ok then + core.log.error("compress log file from ", new_filename, " to ", com_filename, + " fail, stdout: ", stdout, " stderr: ", stderr, " reason: ", reason, + " status: ", status) + return + end + + ok, stderr = os_remove(new_file) + if stderr then + core.log.error("remove uncompressed log file: ", new_file, + " fail, err: ", stderr, " res:", ok) + end +end + + +local function init_default_logs(logs_info, log_type) + local filepath, filename = get_log_path_info(log_type) + logs_info[log_type] = { type = log_type } + if filename ~= "off" then + logs_info[log_type].file = filepath .. filename + logs_info[log_type].new_file = filepath .. "/%s__" .. filename + end +end + + +local function file_size(file) + local attr = lfs.attributes(file) + if attr then + return attr.size + end + return 0 +end + + +local function rotate_file(files, now_time, max_kept, timeout) + if core.table.isempty(files) then + return + end + + local new_files = core.table.new(2, 0) + -- rename the log files + for _, file in ipairs(files) do + local now_date = os_date("%Y-%m-%d_%H-%M-%S", now_time) + local new_file = rename_file(default_logs[file], now_date) + if not new_file then + return + end + + core.table.insert(new_files, new_file) + end + + -- send signal to reopen log files + local pid = process.get_master_pid() + core.log.warn("send USR1 signal to master process [", pid, "] for reopening log file") + local ok, err = signal.kill(pid, signal.signum("USR1")) + if not ok then + core.log.error("failed to send USR1 signal for reopening log file: ", err) + end + + if enable_compression then + -- Waiting for nginx reopen files + -- to avoid losing logs during compression + ngx_sleep(0.5) + + for _, new_file in ipairs(new_files) do + compression_file(new_file, timeout) + end + end + + for _, file in ipairs(files) do + -- clean the oldest file + local log_list, log_dir = scan_log_folder(file) + for i = max_kept + 1, #log_list do + local path = log_dir .. log_list[i] + local ok, err = os_remove(path) + if err then + core.log.error("remove old log file: ", path, " err: ", err, " res:", ok) + end + end + end +end + + +local function rotate() + local interval = INTERVAL + local max_kept = MAX_KEPT + local max_size = MAX_SIZE + local attr = plugin.plugin_attr(plugin_name) + local timeout = 10000 -- default timeout 10 seconds + if attr then + interval = attr.interval or interval + max_kept = attr.max_kept or max_kept + max_size = attr.max_size or max_size + timeout = attr.timeout or timeout + enable_compression = attr.enable_compression or enable_compression + end + + core.log.info("rotate interval:", interval) + core.log.info("rotate max keep:", max_kept) + core.log.info("rotate max size:", max_size) + core.log.info("rotate timeout:", timeout) + + if not default_logs then + -- first init default log filepath and filename + default_logs = {} + init_default_logs(default_logs, DEFAULT_ACCESS_LOG_FILENAME) + init_default_logs(default_logs, DEFAULT_ERROR_LOG_FILENAME) + end + + ngx_update_time() + local now_time = ngx_time() + if not rotate_time then + -- first init rotate time + rotate_time = now_time + interval - (now_time % interval) + core.log.info("first init rotate time is: ", rotate_time) + return + end + + if now_time >= rotate_time then + local files = {DEFAULT_ACCESS_LOG_FILENAME, DEFAULT_ERROR_LOG_FILENAME} + rotate_file(files, now_time, max_kept, timeout) + + -- reset rotate time + rotate_time = rotate_time + interval + + elseif max_size > 0 then + local access_log_file_size = file_size(default_logs[DEFAULT_ACCESS_LOG_FILENAME].file) + local error_log_file_size = file_size(default_logs[DEFAULT_ERROR_LOG_FILENAME].file) + local files = core.table.new(2, 0) + + if access_log_file_size >= max_size then + core.table.insert(files, DEFAULT_ACCESS_LOG_FILENAME) + end + + if error_log_file_size >= max_size then + core.table.insert(files, DEFAULT_ERROR_LOG_FILENAME) + end + + rotate_file(files, now_time, max_kept, timeout) + end +end + + +function _M.init() + timers.register_timer("plugin#log-rotate", rotate, true) +end + + +function _M.destroy() + timers.unregister_timer("plugin#log-rotate", true) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/loggly.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/loggly.lua new file mode 100644 index 0000000..16dc9b4 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/loggly.lua @@ -0,0 +1,351 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local plugin = require("apisix.plugin") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local log_util = require("apisix.utils.log-util") +local path = require("pl.path") +local http = require("resty.http") +local ngx = ngx +local tostring = tostring +local pairs = pairs +local tab_concat = table.concat +local udp = ngx.socket.udp + +local plugin_name = "loggly" +local batch_processor_manager = bp_manager_mod.new(plugin_name) + + +local severity = { + EMEGR = 0, -- system is unusable + ALERT = 1, -- action must be taken immediately + CRIT = 2, -- critical conditions + ERR = 3, -- error conditions + WARNING = 4, -- warning conditions + NOTICE = 5, -- normal but significant condition + INFO = 6, -- informational + DEBUG = 7, -- debug-level messages +} + + +local severity_enums = {} +do + for k, _ in pairs(severity) do + severity_enums[#severity_enums+1] = k + severity_enums[#severity_enums+1] = k:lower() + end +end + + +local schema = { + type = "object", + properties = { + customer_token = {type = "string"}, + severity = { + type = "string", + default = "INFO", + enum = severity_enums, + description = "base severity log level", + }, + include_req_body = {type = "boolean", default = false}, + include_req_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + include_resp_body = {type = "boolean", default = false}, + include_resp_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + tags = { + type = "array", + minItems = 1, + items = { + type = "string", + -- we prevent of having `tag=` prefix + pattern = "^(?!tag=)[ -~]*", + }, + default = {"apisix"} + }, + ssl_verify = { + -- applicable for https protocol + type = "boolean", + default = true + }, + log_format = {type = "object"}, + severity_map = { + type = "object", + description = "upstream response code vs syslog severity mapping", + patternProperties = { + ["^[1-5][0-9]{2}$"] = { + description = "keys are HTTP status code, values are severity", + type = "string", + enum = severity_enums + }, + }, + additionalProperties = false + } + }, + required = {"customer_token"} +} + + +local defaults = { + host = "logs-01.loggly.com", + port = 514, + protocol = "syslog", + timeout = 5000 +} + + +local metadata_schema = { + type = "object", + properties = { + host = { + type = "string", + default = defaults.host + }, + port = { + type = "integer", + default = defaults.port + }, + protocol = { + type = "string", + default = defaults.protocol, + -- in case of http and https, we use bulk endpoints + enum = {"syslog", "http", "https"} + }, + timeout = { + type = "integer", + minimum = 1, + default= defaults.timeout + }, + log_format = { + type = "object", + } + } +} + + +local _M = { + version = 0.1, + priority = 411, + name = plugin_name, + schema = batch_processor_manager:wrap_schema(schema), + metadata_schema = metadata_schema +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, err + end + + if conf.severity_map then + local cache = {} + for k, v in pairs(conf.severity_map) do + cache[k] = severity[v:upper()] + end + conf._severity_cache = cache + end + return log_util.check_log_schema(conf) +end + + +function _M.body_filter(conf, ctx) + log_util.collect_body(conf, ctx) +end + + +local function generate_log_message(conf, ctx) + local entry = log_util.get_log_entry(plugin_name, conf, ctx) + local json_str, err = core.json.encode(entry) + if not json_str then + core.log.error('error occurred while encoding the data: ', err) + return nil + end + + local metadata = plugin.plugin_metadata(plugin_name) + if metadata and metadata.value.protocol ~= "syslog" then + return json_str + end + + -- generate rfc5424 compliant syslog event + local timestamp = log_util.get_rfc3339_zulu_timestamp() + local taglist = {} + if conf.tags then + for i = 1, #conf.tags do + core.table.insert(taglist, "tag=\"" .. conf.tags[i] .. "\"") + end + end + + local message_severity = severity[conf.severity:upper()] + if conf._severity_cache and conf._severity_cache[tostring(ngx.status)] then + message_severity = conf._severity_cache[tostring(ngx.status)] + end + + local message = { + -- facility LOG_USER - random user level message + "<".. tostring(8 + message_severity) .. ">1",-- 1 + timestamp, -- timestamp + ctx.var.host or "-", -- hostname + "apisix", -- appname + ctx.var.pid, -- proc-id + "-", -- msgid + "[" .. conf.customer_token .. "@41058 " .. tab_concat(taglist, " ") .. "]", + json_str + } + + return tab_concat(message, " ") +end + + +local function send_data_over_udp(message, metadata) + local err_msg + local res = true + local sock = udp() + local host, port = metadata.value.host, metadata.value.port + sock:settimeout(metadata.value.timeout) + + local ok, err = sock:setpeername(host, port) + + if not ok then + core.log.error("failed to send log: ", err) + return false, "failed to connect to UDP server: host[" .. host + .. "] port[" .. tostring(port) .. "] err: " .. err + end + + ok, err = sock:send(message) + if not ok then + res = false + core.log.error("failed to send log: ", err) + err_msg = "failed to send data to UDP server: host[" .. host + .. "] port[" .. tostring(port) .. "] err:" .. err + end + + ok, err = sock:close() + if not ok then + core.log.error("failed to close the UDP connection, host[", + host, "] port[", port, "] ", err) + end + + return res, err_msg +end + + +local function send_bulk_over_http(message, metadata, conf) + local endpoint = path.join(metadata.value.host, "bulk", conf.customer_token, "tag", "bulk") + local has_prefix = core.string.has_prefix(metadata.value.host, "http") + if not has_prefix then + if metadata.value.protocol == "http" then + endpoint = "http://" .. endpoint + else + endpoint = "https://" .. endpoint + end + end + + local httpc = http.new() + httpc:set_timeout(metadata.value.timeout) + local res, err = httpc:request_uri(endpoint, { + ssl_verify = conf.ssl_verify, + method = "POST", + body = message, + headers = { + ["Content-Type"] = "application/json", + ["X-LOGGLY-TAG"] = conf.tags + }, + }) + + if not res then + return false, "failed to write log to loggly, " .. err + end + + if res.status ~= 200 then + local body = core.json.decode(res.body) + if not body then + return false, "failed to send log to loggly, http status code: " .. res.status + else + return false, "failed to send log to loggly, http status code: " .. res.status + .. " response body: ".. res.body + end + end + + return true +end + + +local handle_http_payload + +local function handle_log(entries) + local metadata = plugin.plugin_metadata(plugin_name) + core.log.info("metadata: ", core.json.delay_encode(metadata)) + + if not metadata then + core.log.info("received nil metadata: using metadata defaults: ", + core.json.delay_encode(defaults, true)) + metadata = {} + metadata.value = defaults + end + core.log.info("sending a batch logs to ", metadata.value.host) + + if metadata.value.protocol == "syslog" then + for i = 1, #entries do + local ok, err = send_data_over_udp(entries[i], metadata) + if not ok then + return false, err, i + end + end + else + return handle_http_payload(entries, metadata) + end + + return true +end + + +function _M.log(conf, ctx) + local log_data = generate_log_message(conf, ctx) + if not log_data then + return + end + + handle_http_payload = function (entries, metadata) + -- loggly bulk endpoint expects entries concatenated in newline("\n") + local message = tab_concat(entries, "\n") + return send_bulk_over_http(message, metadata, conf) + end + + if batch_processor_manager:add_entry(conf, log_data) then + return + end + + batch_processor_manager:add_entry_to_new_processor(conf, log_data, ctx, handle_log) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/loki-logger.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/loki-logger.lua new file mode 100644 index 0000000..6ff5311 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/loki-logger.lua @@ -0,0 +1,251 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local log_util = require("apisix.utils.log-util") +local core = require("apisix.core") +local http = require("resty.http") +local new_tab = require("table.new") + +local pairs = pairs +local ipairs = ipairs +local tostring = tostring +local math_random = math.random +local table_insert = table.insert +local ngx = ngx +local str_format = core.string.format + +local plugin_name = "loki-logger" +local batch_processor_manager = bp_manager_mod.new("loki logger") + +local schema = { + type = "object", + properties = { + -- core configurations + endpoint_addrs = { + type = "array", + minItems = 1, + items = core.schema.uri_def, + }, + endpoint_uri = { + type = "string", + minLength = 1, + default = "/loki/api/v1/push" + }, + tenant_id = {type = "string", default = "fake"}, + headers = { + type = "object", + patternProperties = { + [".*"] = { + type = "string", + minLength = 1, + }, + }, + }, + log_labels = { + type = "object", + patternProperties = { + [".*"] = { + type = "string", + minLength = 1, + }, + }, + default = { + job = "apisix", + }, + }, + + -- connection layer configurations + ssl_verify = {type = "boolean", default = false}, + timeout = { + type = "integer", + minimum = 1, + maximum = 60000, + default = 3000, + description = "timeout in milliseconds", + }, + keepalive = {type = "boolean", default = true}, + keepalive_timeout = { + type = "integer", + minimum = 1000, + default = 60000, + description = "keepalive timeout in milliseconds", + }, + keepalive_pool = {type = "integer", minimum = 1, default = 5}, + + -- logger related configurations + log_format = {type = "object"}, + include_req_body = {type = "boolean", default = false}, + include_req_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + include_resp_body = {type = "boolean", default = false}, + include_resp_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + }, + required = {"endpoint_addrs"} +} + + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + } + }, +} + + +local _M = { + version = 0.1, + priority = 414, + name = plugin_name, + schema = batch_processor_manager:wrap_schema(schema), + metadata_schema = metadata_schema, +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + + local check = {"endpoint_addrs"} + core.utils.check_https(check, conf, plugin_name) + core.utils.check_tls_bool({"ssl_verify"}, conf, plugin_name) + + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, err + end + return log_util.check_log_schema(conf) +end + + +local function send_http_data(conf, log) + local headers = conf.headers or {} + headers = core.table.clone(headers) + headers["X-Scope-OrgID"] = conf.tenant_id + headers["Content-Type"] = "application/json" + + local params = { + headers = headers, + keepalive = conf.keepalive, + ssl_verify = conf.ssl_verify, + method = "POST", + body = core.json.encode(log) + } + + if conf.keepalive then + params.keepalive_timeout = conf.keepalive_timeout + params.keepalive_pool = conf.keepalive_pool + end + + local httpc, err = http.new() + if not httpc then + return false, str_format("create http client error: %s", err) + end + httpc:set_timeout(conf.timeout) + + -- select an random endpoint and build URL + local endpoint_url = conf.endpoint_addrs[math_random(#conf.endpoint_addrs)] .. conf.endpoint_uri + local res, err = httpc:request_uri(endpoint_url, params) + if not res then + return false, err + end + + if res.status >= 300 then + return false, str_format("loki server returned status: %d, body: %s", + res.status, res.body or "") + end + + return true +end + + +function _M.body_filter(conf, ctx) + log_util.collect_body(conf, ctx) +end + + +function _M.log(conf, ctx) + local entry = log_util.get_log_entry(plugin_name, conf, ctx) + + if not entry.route_id then + entry.route_id = "no-matched" + end + + -- insert start time as log time, multiply to nanoseconds + -- use string concat to circumvent 64bit integers that LuaVM cannot handle + -- that is, first process the decimal part of the millisecond value + -- and then add 6 zeros by string concatenation + entry.loki_log_time = tostring(ngx.req.start_time() * 1000) .. "000000" + + if batch_processor_manager:add_entry(conf, entry) then + return + end + + local labels = conf.log_labels + + -- parsing possible variables in label value + for key, value in pairs(labels) do + local new_val, err, n_resolved = core.utils.resolve_var(value, ctx.var) + if not err and n_resolved > 0 then + labels[key] = new_val + end + end + + -- generate a function to be executed by the batch processor + local func = function(entries) + -- build loki request data + local data = { + streams = { + { + stream = labels, + values = new_tab(1, 0), + } + } + } + + -- add all entries to the batch + for _, entry in ipairs(entries) do + local log_time = entry.loki_log_time + entry.loki_log_time = nil -- clean logger internal field + + table_insert(data.streams[1].values, { + log_time, core.json.encode(entry) + }) + end + + return send_http_data(conf, data) + end + + batch_processor_manager:add_entry_to_new_processor(conf, entry, ctx, func) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp-bridge.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp-bridge.lua new file mode 100644 index 0000000..a73d943 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp-bridge.lua @@ -0,0 +1,173 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local unpack = unpack +local ngx = ngx +local thread_spawn = ngx.thread.spawn +local thread_kill = ngx.thread.kill +local worker_exiting = ngx.worker.exiting +local resty_signal = require("resty.signal") +local core = require("apisix.core") +local pipe = require("ngx.pipe") + +local mcp_server_wrapper = require("apisix.plugins.mcp.server_wrapper") + +local schema = { + type = "object", + properties = { + base_uri = { + type = "string", + minLength = 1, + default = "", + }, + command = { + type = "string", + minLength = 1, + }, + args = { + type = "array", + items = { + type = "string", + }, + minItems = 0, + }, + }, + required = { + "command" + }, +} + +local plugin_name = "mcp-bridge" + +local _M = { + version = 0.1, + priority = 510, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf, schema_type) + return core.schema.check(schema, conf) +end + + +local function on_connect(conf, ctx) + return function(additional) + local proc, err = pipe.spawn({conf.command, unpack(conf.args or {})}) + if not proc then + core.log.error("failed to spawn mcp process: ", err) + return 500 + end + proc:set_timeouts(nil, 100, 100) + ctx.mcp_bridge_proc = proc + + local server = additional.server + + -- ngx_pipe is a yield operation, so we no longer need + -- to explicitly yield to other threads by ngx_sleep + ctx.mcp_bridge_proc_event_loop = thread_spawn(function () + local stdout_partial, stderr_partial, need_exit + while not worker_exiting() do + -- read all the messages in stdout's pipe, line by line + -- if there is an incomplete message it is buffered and + -- spliced before the next message + repeat + local line, _ + line, _, stdout_partial = proc:stdout_read_line() + if line then + local ok, err = server.transport:send( + stdout_partial and stdout_partial .. line or line + ) + if not ok then + core.log.info("session ", server.session_id, + " exit, failed to send response message: ", err) + need_exit = true + break + end + stdout_partial = nil -- luacheck: ignore + end + until not line + if need_exit then + break + end + + repeat + local line, _ + line, _, stderr_partial = proc:stderr_read_line() + if line then + local ok, err = server.transport:send( + '{"jsonrpc":"2.0","method":"notifications/stderr","params":{"content":"' + .. (stderr_partial and stderr_partial .. line or line) .. '"}}') + if not ok then + core.log.info("session ", server.session_id, + " exit, failed to send response message: ", err) + need_exit = true + break + end + stderr_partial = "" -- luacheck: ignore + end + until not line + if need_exit then + break + end + end + end) + end +end + + +local function on_client_message(conf, ctx) + return function(message, additional) + core.log.info("session ", additional.server.session_id, + " send message to mcp server: ", additional.raw) + ctx.mcp_bridge_proc:write(additional.raw .. "\n") + end +end + + +local function on_disconnect(conf, ctx) + return function() + if ctx.mcp_bridge_proc_event_loop then + thread_kill(ctx.mcp_bridge_proc_event_loop) + ctx.mcp_bridge_proc_event_loop = nil + end + + local proc = ctx.mcp_bridge_proc + if proc then + proc:shutdown("stdin") + proc:wait() + local _, err = proc:wait() -- check if process not exited then kill it + if err ~= "exited" then + proc:kill(resty_signal.signum("KILL") or 9) + end + end + end +end + + +function _M.access(conf, ctx) + return mcp_server_wrapper.access(conf, ctx, { + event_handler = { + on_connect = on_connect(conf, ctx), + on_client_message = on_client_message(conf, ctx), + on_disconnect = on_disconnect(conf, ctx), + }, + }) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/broker/shared_dict.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/broker/shared_dict.lua new file mode 100644 index 0000000..83e3d86 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/broker/shared_dict.lua @@ -0,0 +1,90 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local type = type +local setmetatable = setmetatable +local ngx = ngx +local ngx_sleep = ngx.sleep +local thread_spawn = ngx.thread.spawn +local thread_kill = ngx.thread.kill +local worker_exiting = ngx.worker.exiting +local shared_dict = ngx.shared["mcp-session"] -- TODO: rename to something like mcp-broker +local core = require("apisix.core") +local broker_utils = require("apisix.plugins.mcp.broker.utils") + +local _M = {} +local mt = { __index = _M } + + +local STORAGE_SUFFIX_QUEUE = ":queue" + + +function _M.new(opts) + return setmetatable({ + session_id = opts.session_id, + event_handler = {} + }, mt) +end + + +function _M.on(self, event, cb) + self.event_handler[event] = cb +end + + +function _M.push(self, message) + if not message then + return nil, "message is nil" + end + local ok, err = shared_dict:rpush(self.session_id .. STORAGE_SUFFIX_QUEUE, message) + if not ok then + return nil, "failed to push message to queue: " .. err + end + return true +end + + +function _M.start(self) + self.thread = thread_spawn(function() + while not worker_exiting() do + local item, err = shared_dict:lpop(self.session_id .. STORAGE_SUFFIX_QUEUE) + if err then + core.log.info("session ", self.session_id, + " exit, failed to pop message from queue: ", err) + break + end + if item and type(item) == "string" + and type(self.event_handler[broker_utils.EVENT_MESSAGE]) == "function" then + self.event_handler[broker_utils.EVENT_MESSAGE]( + core.json.decode(item), { raw = item } + ) + end + + ngx_sleep(0.1) -- yield to other light threads + end + end) +end + + +function _M.close(self) + if self.thread then + thread_kill(self.thread) + self.thread = nil + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/broker/utils.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/broker/utils.lua new file mode 100644 index 0000000..ded12ae --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/broker/utils.lua @@ -0,0 +1,21 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local _M = {} + +_M.EVENT_MESSAGE = "message" + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/server.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/server.lua new file mode 100644 index 0000000..11a41b9 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/server.lua @@ -0,0 +1,116 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local setmetatable = setmetatable +local ngx = ngx +local ngx_sleep = ngx.sleep +local thread_spwan = ngx.thread.spawn +local thread_wait = ngx.thread.wait +local thread_kill = ngx.thread.kill +local worker_exiting = ngx.worker.exiting +local core = require("apisix.core") +local broker_utils = require("apisix.plugins.mcp.broker.utils") + + +local _M = {} +local mt = { __index = _M } + + +_M.EVENT_CLIENT_MESSAGE = "event:client_message" + + +-- TODO: ping requester and handler +function _M.new(opts) + local session_id = opts.session_id or core.id.gen_uuid_v4() + + -- TODO: configurable broker type + local message_broker = require("apisix.plugins.mcp.broker.shared_dict").new({ + session_id = session_id, + }) + + -- TODO: configurable transport type + local transport = require("apisix.plugins.mcp.transport.sse").new() + + local obj = setmetatable({ + opts = opts, + session_id = session_id, + next_ping_id = 0, + transport = transport, + message_broker = message_broker, + event_handler = {}, + need_exit = false, + }, mt) + + message_broker:on(broker_utils.EVENT_MESSAGE, function (message, additional) + if obj.event_handler[_M.EVENT_CLIENT_MESSAGE] then + obj.event_handler[_M.EVENT_CLIENT_MESSAGE](message, additional) + end + end) + + return obj +end + + +function _M.on(self, event, cb) + self.event_handler[event] = cb +end + + +function _M.start(self) + self.message_broker:start() + + -- ping loop + local ping = thread_spwan(function() + while not worker_exiting() do + if self.need_exit then + break + end + + self.next_ping_id = self.next_ping_id + 1 + local ok, err = self.transport:send( + '{"jsonrpc": "2.0","method": "ping","id":"ping:' .. self.next_ping_id .. '"}') + if not ok then + core.log.info("session ", self.session_id, + " exit, failed to send ping message: ", err) + self.need_exit = true + break + end + ngx_sleep(30) + end + end) + thread_wait(ping) + thread_kill(ping) +end + + +function _M.close(self) + if self.message_broker then + self.message_broker:close() + end +end + + +function _M.push_message(self, message) + local ok, err = self.message_broker:push(message) + if not ok then + return nil, "failed to push message to broker: " .. err + end + return true +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/server_wrapper.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/server_wrapper.lua new file mode 100644 index 0000000..5b0ed88 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/server_wrapper.lua @@ -0,0 +1,106 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local ngx = ngx +local ngx_exit = ngx.exit +local re_match = ngx.re.match +local core = require("apisix.core") +local mcp_server = require("apisix.plugins.mcp.server") + +local _M = {} + +local V241105_ENDPOINT_SSE = "sse" +local V241105_ENDPOINT_MESSAGE = "message" + + +local function sse_handler(conf, ctx, opts) + -- send SSE headers and first chunk + core.response.set_header("Content-Type", "text/event-stream") + core.response.set_header("Cache-Control", "no-cache") + + local server = opts.server + + -- send endpoint event to advertise the message endpoint + server.transport:send(conf.base_uri .. "/message?sessionId=" .. server.session_id, "endpoint") + + if opts.event_handler and opts.event_handler.on_client_message then + server:on(mcp_server.EVENT_CLIENT_MESSAGE, function(message, additional) + additional.server = server + opts.event_handler.on_client_message(message, additional) + end) + end + + if opts.event_handler and opts.event_handler.on_connect then + local code, body = opts.event_handler.on_connect({ server = server }) + if code then + return code, body + end + server:start() -- this is a sync call that only returns when the client disconnects + end + + if opts.event_handler.on_disconnect then + opts.event_handler.on_disconnect({ server = server }) + server:close() + end + + ngx_exit(0) -- exit current phase, skip the upstream module +end + + +local function message_handler(conf, ctx, opts) + local body = core.request.get_body(nil, ctx) + if not body then + return 400 + end + + local ok, err = opts.server:push_message(body) + if not ok then + core.log.error("failed to add task to queue: ", err) + return 500 + end + + return 202 +end + + +function _M.access(conf, ctx, opts) + local m, err = re_match(ctx.var.uri, "^" .. conf.base_uri .. "/(.*)", "jo") + if err then + core.log.info("failed to mcp base uri: ", err) + return core.response.exit(404) + end + local action = m and m[1] or false + if not action then + return core.response.exit(404) + end + + if action == V241105_ENDPOINT_SSE and core.request.get_method() == "GET" then + opts.server = mcp_server.new({}) + return sse_handler(conf, ctx, opts) + end + + if action == V241105_ENDPOINT_MESSAGE and core.request.get_method() == "POST" then + -- TODO: check ctx.var.arg_sessionId + -- recover server instead of create + opts.server = mcp_server.new({ session_id = ctx.var.arg_sessionId }) + return core.response.exit(message_handler(conf, ctx, opts)) + end + + return core.response.exit(404) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/transport/sse.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/transport/sse.lua new file mode 100644 index 0000000..83d72a1 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/mcp/transport/sse.lua @@ -0,0 +1,44 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local setmetatable = setmetatable +local type = type +local ngx = ngx +local ngx_print = ngx.print +local ngx_flush = ngx.flush +local core = require("apisix.core") + +local _M = {} +local mt = { __index = _M } + + +function _M.new() + return setmetatable({}, mt) +end + + +function _M.send(self, message, event_type) + local data = type(message) == "table" and core.json.encode(message) or message + local ok, err = ngx_print("event: " .. (event_type or "message") .. + "\ndata: " .. data .. "\n\n") + if not ok then + return ok, "failed to write buffer: " .. err + end + return ngx_flush(true) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/mocking.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/mocking.lua new file mode 100644 index 0000000..51c4bff --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/mocking.lua @@ -0,0 +1,243 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local xml2lua = require("xml2lua") + +local json = core.json +local math = math +local ngx = ngx +local ngx_re = ngx.re +local pairs = pairs +local string = string +local table = table +local type = type + +local support_content_type = { + ["application/xml"] = true, + ["application/json"] = true, + ["text/plain"] = true, + ["text/html"] = true, + ["text/xml"] = true +} + +local schema = { + type = "object", + properties = { + -- specify response delay time,default 0ms + delay = { type = "integer", default = 0 }, + -- specify response status,default 200 + response_status = { type = "integer", default = 200, minimum = 100 }, + -- specify response content type, support application/xml, text/plain + -- and application/json, default application/json + content_type = { type = "string", default = "application/json;charset=utf8" }, + -- specify response body. + response_example = { type = "string" }, + -- specify response json schema, if response_example is not nil, this conf will be ignore. + -- generate random response by json schema. + response_schema = { type = "object" }, + with_mock_header = { type = "boolean", default = true }, + response_headers = { + type = "object", + minProperties = 1, + patternProperties = { + ["^[^:]+$"] = { + oneOf = { + { type = "string" }, + { type = "number" } + } + } + }, + } + }, + anyOf = { + { required = { "response_example" } }, + { required = { "response_schema" } } + } +} + +local _M = { + version = 0.1, + priority = 10900, + name = "mocking", + schema = schema, +} + +local function parse_content_type(content_type) + if not content_type then + return "" + end + local m = ngx_re.match(content_type, "([ -~]*);([ -~]*)", "jo") + if m and #m == 2 then + return m[1], m[2] + end + return content_type +end + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + local typ = parse_content_type(conf.content_type) + if not support_content_type[typ] then + return false, "unsupported content type!" + end + return true +end + + +local function gen_string(example) + if example and type(example) == "string" then + return example + end + local n = math.random(1, 10) + local list = {} + for i = 1, n do + table.insert(list, string.char(math.random(97, 122))) + end + return table.concat(list) +end + + +local function gen_number(example) + if example and type(example) == "number" then + return example + end + return math.random() * 10000 +end + + +local function gen_integer(example) + if example and type(example) == "number" then + return math.floor(example) + end + return math.random(1, 10000) +end + + +local function gen_boolean(example) + if example and type(example) == "boolean" then + return example + end + local r = math.random(0, 1) + if r == 0 then + return false + end + return true +end + + +local gen_array, gen_object, gen_by_property + +function gen_array(property) + local output = {} + if property.items == nil then + return nil + end + local v = property.items + local n = math.random(1, 3) + for i = 1, n do + table.insert(output, gen_by_property(v)) + end + return output +end + + +function gen_object(property) + local output = {} + if not property.properties then + return output + end + for k, v in pairs(property.properties) do + output[k] = gen_by_property(v) + end + return output +end + + +function gen_by_property(property) + local typ = string.lower(property.type) + local example = property.example + + if typ == "array" then + return gen_array(property) + end + + if typ == "object" then + return gen_object(property) + end + + if typ == "string" then + return gen_string(example) + end + + if typ == "number" then + return gen_number(example) + end + + if typ == "integer" then + return gen_integer(example) + end + + if typ == "boolean" then + return gen_boolean(example) + end + + return nil +end + + +function _M.access(conf, ctx) + local response_content = "" + + if conf.response_example then + response_content = conf.response_example + else + local output = gen_object(conf.response_schema) + local typ = parse_content_type(conf.content_type) + if typ == "application/xml" or typ == "text/xml" then + response_content = xml2lua.toXml(output, "data") + + elseif typ == "application/json" or typ == "text/plain" then + response_content = json.encode(output) + + else + core.log.error("json schema body only support xml and json content type") + end + end + + ngx.header["Content-Type"] = conf.content_type + if conf.with_mock_header then + ngx.header["x-mock-by"] = "APISIX/" .. core.version.VERSION + end + + if conf.response_headers then + for key, value in pairs(conf.response_headers) do + value = core.utils.resolve_var(value, ctx.var) + core.response.add_header(key, value) + end + end + + if conf.delay > 0 then + ngx.sleep(conf.delay) + end + return conf.response_status, core.utils.resolve_var(response_content, ctx.var) +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/multi-auth.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/multi-auth.lua new file mode 100644 index 0000000..7d34ffb --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/multi-auth.lua @@ -0,0 +1,105 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local require = require +local pairs = pairs +local type = type +local plugin = require("apisix.plugin") + +local schema = { + type = "object", + title = "work with route or service object", + properties = { + auth_plugins = { type = "array", minItems = 2 } + }, + required = { "auth_plugins" }, +} + + +local plugin_name = "multi-auth" + +local _M = { + version = 0.1, + priority = 2600, + type = 'auth', + name = plugin_name, + schema = schema +} + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + local auth_plugins = conf.auth_plugins + for k, auth_plugin in pairs(auth_plugins) do + for auth_plugin_name, auth_plugin_conf in pairs(auth_plugin) do + local auth = plugin.get(auth_plugin_name) + if auth == nil then + return false, auth_plugin_name .. " plugin did not found" + else + if auth.type ~= 'auth' then + return false, auth_plugin_name .. " plugin is not supported" + end + local ok, err = auth.check_schema(auth_plugin_conf, auth.schema) + if not ok then + return false, "plugin " .. auth_plugin_name .. " check schema failed: " .. err + end + end + end + end + + return true +end + +function _M.rewrite(conf, ctx) + local auth_plugins = conf.auth_plugins + local status_code + local errors = {} + + for k, auth_plugin in pairs(auth_plugins) do + for auth_plugin_name, auth_plugin_conf in pairs(auth_plugin) do + local auth = plugin.get(auth_plugin_name) + -- returns 401 HTTP status code if authentication failed, otherwise returns nothing. + local auth_code, err = auth.rewrite(auth_plugin_conf, ctx) + if type(err) == "table" then + err = err.message -- compat + end + + status_code = auth_code + if auth_code == nil then + core.log.debug(auth_plugin_name .. " succeed to authenticate the request") + goto authenticated + else + core.table.insert(errors, auth_plugin_name .. + " failed to authenticate the request, code: " + .. auth_code .. ". error: " .. err) + end + end + end + + :: authenticated :: + if status_code ~= nil then + for _, error in pairs(errors) do + core.log.warn(error) + end + return 401, { message = "Authorization Failed" } + end +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/node-status.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/node-status.lua new file mode 100644 index 0000000..b80007b --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/node-status.lua @@ -0,0 +1,98 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local ngx = ngx +local re_gmatch = ngx.re.gmatch +local ngx_capture = ngx.location.capture +local plugin_name = "node-status" +local apisix_id = core.id.get() +local ipairs = ipairs + + +local schema = { + type = "object", +} + + +local _M = { + version = 0.1, + priority = 1000, + name = plugin_name, + schema = schema, + scope = "global", +} + + +local ngx_status = {} +local ngx_status_items = { + "active", "accepted", "handled", "total", + "reading", "writing", "waiting" +} + + +local function collect() + local res = ngx_capture("/apisix/nginx_status") + if res.status ~= 200 then + return res.status + end + + -- Active connections: 2 + -- server accepts handled requests + -- 26 26 84 + -- Reading: 0 Writing: 1 Waiting: 1 + + local iterator, err = re_gmatch(res.body, [[(\d+)]], "jmo") + if not iterator then + return 500, "failed to re.gmatch Nginx status: " .. err + end + + core.table.clear(ngx_status) + for _, name in ipairs(ngx_status_items) do + local val = iterator() + if not val then + break + end + + ngx_status[name] = val[0] + end + + return 200, core.json.encode({id = apisix_id, status = ngx_status}) +end + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + return true +end + + +function _M.api() + return { + { + methods = {"GET"}, + uri = "/apisix/status", + handler = collect, + } + } +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ocsp-stapling.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ocsp-stapling.lua new file mode 100644 index 0000000..cbe2bb1 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ocsp-stapling.lua @@ -0,0 +1,220 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. +-- + +local require = require +local http = require("resty.http") +local ngx = ngx +local ngx_ocsp = require("ngx.ocsp") +local ngx_ssl = require("ngx.ssl") +local radixtree_sni = require("apisix.ssl.router.radixtree_sni") +local core = require("apisix.core") + +local plugin_name = "ocsp-stapling" +local ocsp_resp_cache = ngx.shared[plugin_name] + +local plugin_schema = { + type = "object", + properties = {}, +} + +local _M = { + name = plugin_name, + schema = plugin_schema, + version = 0.1, + priority = -44, +} + + +function _M.check_schema(conf) + return core.schema.check(plugin_schema, conf) +end + + +local function fetch_ocsp_resp(der_cert_chain) + core.log.info("fetch ocsp response from remote") + local ocsp_url, err = ngx_ocsp.get_ocsp_responder_from_der_chain(der_cert_chain) + + if not ocsp_url then + -- if cert not support ocsp, the report error is nil + if not err then + err = "cert not contains authority_information_access extension" + end + return nil, "failed to get ocsp url: " .. err + end + + local ocsp_req, err = ngx_ocsp.create_ocsp_request(der_cert_chain) + if not ocsp_req then + return nil, "failed to create ocsp request: " .. err + end + + local httpc = http.new() + local res, err = httpc:request_uri(ocsp_url, { + method = "POST", + headers = { + ["Content-Type"] = "application/ocsp-request", + }, + body = ocsp_req + }) + + if not res then + return nil, "ocsp responder query failed: " .. err + end + + local http_status = res.status + if http_status ~= 200 then + return nil, "ocsp responder returns bad http status code: " + .. http_status + end + + if res.body and #res.body > 0 then + return res.body, nil + end + + return nil, "ocsp responder returns empty body" +end + + +local function set_ocsp_resp(full_chain_pem_cert, skip_verify, cache_ttl) + local der_cert_chain, err = ngx_ssl.cert_pem_to_der(full_chain_pem_cert) + if not der_cert_chain then + return false, "failed to convert certificate chain from PEM to DER: ", err + end + + local ocsp_resp = ocsp_resp_cache:get(der_cert_chain) + if ocsp_resp == nil then + core.log.info("not ocsp resp cache found, fetch from ocsp responder") + ocsp_resp, err = fetch_ocsp_resp(der_cert_chain) + if ocsp_resp == nil then + return false, err + end + core.log.info("fetch ocsp resp ok, cache it") + ocsp_resp_cache:set(der_cert_chain, ocsp_resp, cache_ttl) + end + + if not skip_verify then + local ok, err = ngx_ocsp.validate_ocsp_response(ocsp_resp, der_cert_chain) + if not ok then + return false, "failed to validate ocsp response: " .. err + end + end + + -- set the OCSP stapling + local ok, err = ngx_ocsp.set_ocsp_status_resp(ocsp_resp) + if not ok then + return false, "failed to set ocsp status response: " .. err + end + + return true +end + + +local original_set_cert_and_key +local function set_cert_and_key(sni, value) + if value.gm then + -- should not run with gm plugin + core.log.warn("gm plugin enabled, no need to run ocsp-stapling plugin") + return original_set_cert_and_key(sni, value) + end + + if not value.ocsp_stapling then + core.log.info("no 'ocsp_stapling' field found, no need to run ocsp-stapling plugin") + return original_set_cert_and_key(sni, value) + end + + if not value.ocsp_stapling.enabled then + return original_set_cert_and_key(sni, value) + end + + if not ngx.ctx.tls_ext_status_req then + core.log.info("no status request required, no need to send ocsp response") + return original_set_cert_and_key(sni, value) + end + + local ok, err = radixtree_sni.set_pem_ssl_key(sni, value.cert, value.key) + if not ok then + return false, err + end + local fin_pem_cert = value.cert + + -- multiple certificates support. + if value.certs then + for i = 1, #value.certs do + local cert = value.certs[i] + local key = value.keys[i] + ok, err = radixtree_sni.set_pem_ssl_key(sni, cert, key) + if not ok then + return false, err + end + fin_pem_cert = cert + end + end + + local ok, err = set_ocsp_resp(fin_pem_cert, + value.ocsp_stapling.skip_verify, + value.ocsp_stapling.cache_ttl) + if not ok then + core.log.error("no ocsp response send: ", err) + end + + return true +end + + +function _M.init() + if core.schema.ssl.properties.gm ~= nil then + core.log.error("ocsp-stapling plugin should not run with gm plugin") + end + + original_set_cert_and_key = radixtree_sni.set_cert_and_key + radixtree_sni.set_cert_and_key = set_cert_and_key + + if core.schema.ssl.properties.ocsp_stapling ~= nil then + core.log.error("Field 'ocsp_stapling' is occupied") + end + + core.schema.ssl.properties.ocsp_stapling = { + type = "object", + properties = { + enabled = { + type = "boolean", + default = false, + }, + skip_verify = { + type = "boolean", + default = false, + }, + cache_ttl = { + type = "integer", + minimum = 60, + default = 3600, + }, + } + } + +end + + +function _M.destroy() + radixtree_sni.set_cert_and_key = original_set_cert_and_key + core.schema.ssl.properties.ocsp_stapling = nil + ocsp_resp_cache:flush_all() +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/opa.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/opa.lua new file mode 100644 index 0000000..0475529 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/opa.lua @@ -0,0 +1,152 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local http = require("resty.http") +local helper = require("apisix.plugins.opa.helper") +local type = type +local ipairs = ipairs + +local schema = { + type = "object", + properties = { + host = {type = "string"}, + ssl_verify = { + type = "boolean", + default = true, + }, + policy = {type = "string"}, + timeout = { + type = "integer", + minimum = 1, + maximum = 60000, + default = 3000, + description = "timeout in milliseconds", + }, + keepalive = {type = "boolean", default = true}, + send_headers_upstream = { + type = "array", + minItems = 1, + items = { + type = "string" + }, + description = "list of headers to pass to upstream in request" + }, + keepalive_timeout = {type = "integer", minimum = 1000, default = 60000}, + keepalive_pool = {type = "integer", minimum = 1, default = 5}, + with_route = {type = "boolean", default = false}, + with_service = {type = "boolean", default = false}, + with_consumer = {type = "boolean", default = false}, + }, + required = {"host", "policy"} +} + + +local _M = { + version = 0.1, + priority = 2001, + name = "opa", + schema = schema, +} + + +function _M.check_schema(conf) + local check = {"host"} + core.utils.check_https(check, conf, _M.name) + core.utils.check_tls_bool({"ssl_verify"}, conf, _M.name) + return core.schema.check(schema, conf) +end + + +function _M.access(conf, ctx) + local body = helper.build_opa_input(conf, ctx, "http") + + local params = { + method = "POST", + body = core.json.encode(body), + headers = { + ["Content-Type"] = "application/json", + }, + keepalive = conf.keepalive, + ssl_verify = conf.ssl_verify + } + + if conf.keepalive then + params.keepalive_timeout = conf.keepalive_timeout + params.keepalive_pool = conf.keepalive_pool + end + + local endpoint = conf.host .. "/v1/data/" .. conf.policy + + local httpc = http.new() + httpc:set_timeout(conf.timeout) + + local res, err = httpc:request_uri(endpoint, params) + + -- block by default when decision is unavailable + if not res then + core.log.error("failed to process OPA decision, err: ", err) + return 403 + end + + -- parse the results of the decision + local data, err = core.json.decode(res.body) + + if not data then + core.log.error("invalid response body: ", res.body, " err: ", err) + return 503 + end + + if not data.result then + core.log.error("invalid OPA decision format: ", res.body, + " err: `result` field does not exist") + return 503 + end + + local result = data.result + + if not result.allow then + if result.headers then + core.response.set_header(result.headers) + end + + local status_code = 403 + if result.status_code then + status_code = result.status_code + end + + local reason = nil + if result.reason then + reason = type(result.reason) == "table" + and core.json.encode(result.reason) + or result.reason + end + + return status_code, reason + else if result.headers and conf.send_headers_upstream then + for _, name in ipairs(conf.send_headers_upstream) do + local value = result.headers[name] + if value then + core.request.set_header(ctx, name, value) + end + end + end + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/opa/helper.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/opa/helper.lua new file mode 100644 index 0000000..638adcf --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/opa/helper.lua @@ -0,0 +1,117 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local get_service = require("apisix.http.service").get +local ngx_time = ngx.time + +local _M = {} + + +-- build a table of Nginx variables with some generality +-- between http subsystem and stream subsystem +local function build_var(conf, ctx) + return { + server_addr = ctx.var.server_addr, + server_port = ctx.var.server_port, + remote_addr = ctx.var.remote_addr, + remote_port = ctx.var.remote_port, + timestamp = ngx_time(), + } +end + + +local function build_http_request(conf, ctx) + return { + scheme = core.request.get_scheme(ctx), + method = core.request.get_method(), + host = core.request.get_host(ctx), + port = core.request.get_port(ctx), + path = ctx.var.uri, + headers = core.request.headers(ctx), + query = core.request.get_uri_args(ctx), + } +end + + +local function build_http_route(conf, ctx, remove_upstream) + local route = core.table.deepcopy(ctx.matched_route).value + + if remove_upstream and route and route.upstream then + -- unimportant to send upstream info to OPA + route.upstream = nil + end + + return route +end + + +local function build_http_service(conf, ctx) + local service_id = ctx.service_id + + -- possible that there is no service bound to the route + if service_id then + local service = core.table.clone(get_service(service_id)).value + + if service then + if service.upstream then + service.upstream = nil + end + return service + end + end + + return nil +end + + +local function build_http_consumer(conf, ctx) + -- possible that there is no consumer bound to the route + if ctx.consumer then + return core.table.clone(ctx.consumer) + end + + return nil +end + + +function _M.build_opa_input(conf, ctx, subsystem) + local data = { + type = subsystem, + request = build_http_request(conf, ctx), + var = build_var(conf, ctx) + } + + if conf.with_route then + data.route = build_http_route(conf, ctx, true) + end + + if conf.with_consumer then + data.consumer = build_http_consumer(conf, ctx) + end + + if conf.with_service then + data.service = build_http_service(conf, ctx) + end + + return { + input = data, + } +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/openfunction.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/openfunction.lua new file mode 100644 index 0000000..935d6eb --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/openfunction.lua @@ -0,0 +1,35 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local ngx_encode_base64 = ngx.encode_base64 +local plugin_name, plugin_version, priority = "openfunction", 0.1, -1902 + +local openfunction_authz_schema = { + service_token = {type = "string"} +} + +local function request_processor(conf, ctx, params) + local headers = params.headers or {} + -- setting authorization headers if authorization.service_token exists + if conf.authorization and conf.authorization.service_token then + headers["authorization"] = "Basic " .. ngx_encode_base64(conf.authorization.service_token) + end + + params.headers = headers +end + +return require("apisix.plugins.serverless.generic-upstream")(plugin_name, + plugin_version, priority, request_processor, openfunction_authz_schema) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/openid-connect.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/openid-connect.lua new file mode 100644 index 0000000..6c94ea5 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/openid-connect.lua @@ -0,0 +1,717 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local ngx_re = require("ngx.re") +local openidc = require("resty.openidc") +local random = require("resty.random") +local string = string +local ngx = ngx +local ipairs = ipairs +local type = type +local concat = table.concat + +local ngx_encode_base64 = ngx.encode_base64 + +local plugin_name = "openid-connect" + + +local schema = { + type = "object", + properties = { + client_id = {type = "string"}, + client_secret = {type = "string"}, + discovery = {type = "string"}, + scope = { + type = "string", + default = "openid", + }, + ssl_verify = { + type = "boolean", + default = false, + }, + timeout = { + type = "integer", + minimum = 1, + default = 3, + description = "timeout in seconds", + }, + introspection_endpoint = { + type = "string" + }, + introspection_endpoint_auth_method = { + type = "string", + default = "client_secret_basic" + }, + token_endpoint_auth_method = { + type = "string", + default = "client_secret_basic" + }, + bearer_only = { + type = "boolean", + default = false, + }, + session = { + type = "object", + properties = { + secret = { + type = "string", + description = "the key used for the encrypt and HMAC calculation", + minLength = 16, + }, + cookie = { + type = "object", + properties = { + lifetime = { + type = "integer", + description = "it holds the cookie lifetime in seconds in the future", + } + } + } + }, + required = {"secret"}, + additionalProperties = false, + }, + realm = { + type = "string", + default = "apisix", + }, + claim_validator = { + type = "object", + properties = { + issuer = { + description = [[Whitelist the vetted issuers of the jwt. + When not passed by the user, the issuer returned by + discovery endpoint will be used. In case both are missing, + the issuer will not be validated.]], + type = "object", + properties = { + valid_issuers = { + type = "array", + items = { + type = "string" + } + } + } + }, + audience = { + type = "object", + description = "audience claim value to validate", + properties = { + claim = { + type = "string", + description = "custom claim name", + default = "aud", + }, + required = { + type = "boolean", + description = "audience claim is required", + default = false, + }, + match_with_client_id = { + type = "boolean", + description = "audience must euqal to or includes client_id", + default = false, + } + }, + }, + }, + }, + logout_path = { + type = "string", + default = "/logout", + }, + redirect_uri = { + type = "string", + description = "auto append '.apisix/redirect' to ngx.var.uri if not configured" + }, + post_logout_redirect_uri = { + type = "string", + description = "the URI will be redirect when request logout_path", + }, + unauth_action = { + type = "string", + default = "auth", + enum = {"auth", "deny", "pass"}, + description = "The action performed when client is not authorized. Use auth to " .. + "redirect user to identity provider, deny to respond with 401 Unauthorized, and " .. + "pass to allow the request regardless." + }, + public_key = {type = "string"}, + token_signing_alg_values_expected = {type = "string"}, + use_pkce = { + description = "when set to true the PKCE(Proof Key for Code Exchange) will be used.", + type = "boolean", + default = false + }, + set_access_token_header = { + description = "Whether the access token should be added as a header to the request " .. + "for downstream", + type = "boolean", + default = true + }, + access_token_in_authorization_header = { + description = "Whether the access token should be added in the Authorization " .. + "header as opposed to the X-Access-Token header.", + type = "boolean", + default = false + }, + set_id_token_header = { + description = "Whether the ID token should be added in the X-ID-Token header to " .. + "the request for downstream.", + type = "boolean", + default = true + }, + set_userinfo_header = { + description = "Whether the user info token should be added in the X-Userinfo " .. + "header to the request for downstream.", + type = "boolean", + default = true + }, + set_refresh_token_header = { + description = "Whether the refresh token should be added in the X-Refresh-Token " .. + "header to the request for downstream.", + type = "boolean", + default = false + }, + proxy_opts = { + description = "HTTP proxy server be used to access identity server.", + type = "object", + properties = { + http_proxy = { + type = "string", + description = "HTTP proxy like: http://proxy-server:80.", + }, + https_proxy = { + type = "string", + description = "HTTPS proxy like: http://proxy-server:80.", + }, + http_proxy_authorization = { + type = "string", + description = "Basic [base64 username:password].", + }, + https_proxy_authorization = { + type = "string", + description = "Basic [base64 username:password].", + }, + no_proxy = { + type = "string", + description = "Comma separated list of hosts that should not be proxied.", + } + }, + }, + authorization_params = { + description = "Extra authorization params to the authorize endpoint", + type = "object" + }, + client_rsa_private_key = { + description = "Client RSA private key used to sign JWT.", + type = "string" + }, + client_rsa_private_key_id = { + description = "Client RSA private key ID used to compute a signed JWT.", + type = "string" + }, + client_jwt_assertion_expires_in = { + description = "Life duration of the signed JWT in seconds.", + type = "integer", + default = 60 + }, + renew_access_token_on_expiry = { + description = "Whether to attempt silently renewing the access token.", + type = "boolean", + default = true + }, + access_token_expires_in = { + description = "Lifetime of the access token in seconds if expires_in is not present.", + type = "integer" + }, + refresh_session_interval = { + description = "Time interval to refresh user ID token without re-authentication.", + type = "integer" + }, + iat_slack = { + description = "Tolerance of clock skew in seconds with the iat claim in an ID token.", + type = "integer", + default = 120 + }, + accept_none_alg = { + description = "Set to true if the OpenID provider does not sign its ID token.", + type = "boolean", + default = false + }, + accept_unsupported_alg = { + description = "Ignore ID token signature to accept unsupported signature algorithm.", + type = "boolean", + default = true + }, + access_token_expires_leeway = { + description = "Expiration leeway in seconds for access token renewal.", + type = "integer", + default = 0 + }, + force_reauthorize = { + description = "Whether to execute the authorization flow when a token has been cached.", + type = "boolean", + default = false + }, + use_nonce = { + description = "Whether to include nonce parameter in authorization request.", + type = "boolean", + default = false + }, + revoke_tokens_on_logout = { + description = "Notify authorization server a previous token is no longer needed.", + type = "boolean", + default = false + }, + jwk_expires_in = { + description = "Expiration time for JWK cache in seconds.", + type = "integer", + default = 86400 + }, + jwt_verification_cache_ignore = { + description = "Whether to ignore cached verification and re-verify.", + type = "boolean", + default = false + }, + cache_segment = { + description = "Name of a cache segment to differentiate caches.", + type = "string" + }, + introspection_interval = { + description = "TTL of the cached and introspected access token in seconds.", + type = "integer", + default = 0 + }, + introspection_expiry_claim = { + description = "Name of the expiry claim that controls the cached access token TTL.", + type = "string" + }, + introspection_addon_headers = { + description = "Extra http headers in introspection", + type = "array", + minItems = 1, + items = { + type = "string", + pattern = "^[^:]+$" + } + }, + required_scopes = { + description = "List of scopes that are required to be granted to the access token", + type = "array", + items = { + type = "string" + } + } + }, + encrypt_fields = {"client_secret", "client_rsa_private_key"}, + required = {"client_id", "client_secret", "discovery"} +} + + +local _M = { + version = 0.2, + priority = 2599, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + if conf.ssl_verify == "no" then + -- we used to set 'ssl_verify' to "no" + conf.ssl_verify = false + end + + if not conf.bearer_only and not conf.session then + core.log.warn("when bearer_only = false, " .. + "you'd better complete the session configuration manually") + conf.session = { + -- generate a secret when bearer_only = false and no secret is configured + secret = ngx_encode_base64(random.bytes(32, true) or random.bytes(32)) + } + end + + local check = {"discovery", "introspection_endpoint", "redirect_uri", + "post_logout_redirect_uri", "proxy_opts.http_proxy", "proxy_opts.https_proxy"} + core.utils.check_https(check, conf, plugin_name) + core.utils.check_tls_bool({"ssl_verify"}, conf, plugin_name) + + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + return true +end + + +local function get_bearer_access_token(ctx) + -- Get Authorization header, maybe. + local auth_header = core.request.header(ctx, "Authorization") + if not auth_header then + -- No Authorization header, get X-Access-Token header, maybe. + local access_token_header = core.request.header(ctx, "X-Access-Token") + if not access_token_header then + -- No X-Access-Token header neither. + return false, nil, nil + end + + -- Return extracted header value. + return true, access_token_header, nil + end + + -- Check format of Authorization header. + local res, err = ngx_re.split(auth_header, " ", nil, nil, 2) + + if not res then + -- No result was returned. + return false, nil, err + elseif #res < 2 then + -- Header doesn't split into enough tokens. + return false, nil, "Invalid Authorization header format." + end + + if string.lower(res[1]) == "bearer" then + -- Return extracted token. + return true, res[2], nil + end + + return false, nil, nil +end + + +local function introspect(ctx, conf) + -- Extract token, maybe. + local has_token, token, err = get_bearer_access_token(ctx) + + if err then + return ngx.HTTP_BAD_REQUEST, err, nil, nil + end + + if not has_token then + -- Could not find token. + + if conf.bearer_only then + -- Token strictly required in request. + ngx.header["WWW-Authenticate"] = 'Bearer realm="' .. conf.realm .. '"' + return ngx.HTTP_UNAUTHORIZED, "No bearer token found in request.", nil, nil + else + -- Return empty result. + return nil, nil, nil, nil + end + end + + if conf.public_key or conf.use_jwks then + local opts = {} + -- Validate token against public key or jwks document of the oidc provider. + -- TODO: In the called method, the openidc module will try to extract + -- the token by itself again -- from a request header or session cookie. + -- It is inefficient that we also need to extract it (just from headers) + -- so we can add it in the configured header. Find a way to use openidc + -- module's internal methods to extract the token. + local valid_issuers + if conf.claim_validator and conf.claim_validator.issuer then + valid_issuers = conf.claim_validator.issuer.valid_issuers + end + if not valid_issuers then + local discovery, discovery_err = openidc.get_discovery_doc(conf) + if discovery_err then + core.log.warn("OIDC access discovery url failed : ", discovery_err) + else + core.log.info("valid_issuers not provided explicitly," .. + " using issuer from discovery doc: ", + discovery.issuer) + valid_issuers = {discovery.issuer} + end + end + if valid_issuers then + opts.valid_issuers = valid_issuers + end + local res, err = openidc.bearer_jwt_verify(conf, opts) + if err then + -- Error while validating or token invalid. + ngx.header["WWW-Authenticate"] = 'Bearer realm="' .. conf.realm .. + '", error="invalid_token", error_description="' .. err .. '"' + return ngx.HTTP_UNAUTHORIZED, err, nil, nil + end + + -- Token successfully validated. + local method = (conf.public_key and "public_key") or (conf.use_jwks and "jwks") + core.log.debug("token validate successfully by ", method) + return res, err, token, res + else + -- Validate token against introspection endpoint. + -- TODO: Same as above for public key validation. + if conf.introspection_addon_headers then + -- http_request_decorator option provided by lua-resty-openidc + conf.http_request_decorator = function(req) + local h = req.headers or {} + for _, name in ipairs(conf.introspection_addon_headers) do + local value = core.request.header(ctx, name) + if value then + h[name] = value + end + end + req.headers = h + return req + end + end + + local res, err = openidc.introspect(conf) + conf.http_request_decorator = nil + + if err then + ngx.header["WWW-Authenticate"] = 'Bearer realm="' .. conf.realm .. + '", error="invalid_token", error_description="' .. err .. '"' + return ngx.HTTP_UNAUTHORIZED, err, nil, nil + end + + -- Token successfully validated and response from the introspection + -- endpoint contains the userinfo. + core.log.debug("token validate successfully by introspection") + return res, err, token, res + end +end + + +local function add_access_token_header(ctx, conf, token) + if token then + -- Add Authorization or X-Access-Token header, respectively, if not already set. + if conf.set_access_token_header then + if conf.access_token_in_authorization_header then + if not core.request.header(ctx, "Authorization") then + -- Add Authorization header. + core.request.set_header(ctx, "Authorization", "Bearer " .. token) + end + else + if not core.request.header(ctx, "X-Access-Token") then + -- Add X-Access-Token header. + core.request.set_header(ctx, "X-Access-Token", token) + end + end + end + end +end + +-- Function to split the scope string into a table +local function split_scopes_by_space(scope_string) + local scopes = {} + for scope in string.gmatch(scope_string, "%S+") do + scopes[scope] = true + end + return scopes +end + +-- Function to check if all required scopes are present +local function required_scopes_present(required_scopes, http_scopes) + for _, scope in ipairs(required_scopes) do + if not http_scopes[scope] then + return false + end + end + return true +end + +function _M.rewrite(plugin_conf, ctx) + local conf = core.table.clone(plugin_conf) + + -- Previously, we multiply conf.timeout before storing it in etcd. + -- If the timeout is too large, we should not multiply it again. + if not (conf.timeout >= 1000 and conf.timeout % 1000 == 0) then + conf.timeout = conf.timeout * 1000 + end + + local path = ctx.var.request_uri + + if not conf.redirect_uri then + -- NOTE: 'lua-resty-openidc' requires that 'redirect_uri' be + -- different from 'uri'. So default to append the + -- '.apisix/redirect' suffix if not configured. + local suffix = "/.apisix/redirect" + local uri = ctx.var.uri + if core.string.has_suffix(uri, suffix) then + -- This is the redirection response from the OIDC provider. + conf.redirect_uri = uri + else + if string.sub(uri, -1, -1) == "/" then + conf.redirect_uri = string.sub(uri, 1, -2) .. suffix + else + conf.redirect_uri = uri .. suffix + end + end + core.log.debug("auto set redirect_uri: ", conf.redirect_uri) + end + + if not conf.ssl_verify then + -- openidc use "no" to disable ssl verification + conf.ssl_verify = "no" + end + + if path == (conf.logout_path or "/logout") then + local discovery, discovery_err = openidc.get_discovery_doc(conf) + if discovery_err then + core.log.error("OIDC access discovery url failed : ", discovery_err) + return 503 + end + if conf.post_logout_redirect_uri and not discovery.end_session_endpoint then + -- If the end_session_endpoint field does not exist in the OpenID Provider Discovery + -- Metadata, the redirect_after_logout_uri field is used for redirection. + conf.redirect_after_logout_uri = conf.post_logout_redirect_uri + end + end + + local response, err, session, _ + + if conf.bearer_only or conf.introspection_endpoint or conf.public_key or conf.use_jwks then + -- An introspection endpoint or a public key has been configured. Try to + -- validate the access token from the request, if it is present in a + -- request header. Otherwise, return a nil response. See below for + -- handling of the case where the access token is stored in a session cookie. + local access_token, userinfo + response, err, access_token, userinfo = introspect(ctx, conf) + + if err then + -- Error while validating token or invalid token. + core.log.error("OIDC introspection failed: ", err) + return response + end + + if response then + if conf.required_scopes then + local http_scopes = response.scope and split_scopes_by_space(response.scope) or {} + local is_authorized = required_scopes_present(conf.required_scopes, http_scopes) + if not is_authorized then + core.log.error("OIDC introspection failed: ", "required scopes not present") + local error_response = { + error = "required scopes " .. concat(conf.required_scopes, ", ") .. + " not present" + } + return 403, core.json.encode(error_response) + end + end + + -- jwt audience claim validator + local audience_claim = core.table.try_read_attr(conf, "claim_validator", + "audience", "claim") or "aud" + local audience_value = response[audience_claim] + if core.table.try_read_attr(conf, "claim_validator", "audience", "required") + and not audience_value then + core.log.error("OIDC introspection failed: required audience (", + audience_claim, ") not present") + local error_response = { error = "required audience claim not present" } + return 403, core.json.encode(error_response) + end + if core.table.try_read_attr(conf, "claim_validator", "audience", "match_with_client_id") + and audience_value ~= nil then + local error_response = { error = "mismatched audience" } + local matched = false + if type(audience_value) == "table" then + for _, v in ipairs(audience_value) do + if conf.client_id == v then + matched = true + end + end + if not matched then + core.log.error("OIDC introspection failed: ", + "audience list does not contain the client id") + return 403, core.json.encode(error_response) + end + elseif conf.client_id ~= audience_value then + core.log.error("OIDC introspection failed: ", + "audience does not match the client id") + return 403, core.json.encode(error_response) + end + end + + -- Add configured access token header, maybe. + add_access_token_header(ctx, conf, access_token) + + if userinfo and conf.set_userinfo_header then + -- Set X-Userinfo header to introspection endpoint response. + core.request.set_header(ctx, "X-Userinfo", + ngx_encode_base64(core.json.encode(userinfo))) + end + end + end + + if not response then + -- Either token validation via introspection endpoint or public key is + -- not configured, and/or token could not be extracted from the request. + + local unauth_action = conf.unauth_action + if unauth_action ~= "auth" then + unauth_action = "deny" + end + + -- Authenticate the request. This will validate the access token if it + -- is stored in a session cookie, and also renew the token if required. + -- If no token can be extracted, the response will redirect to the ID + -- provider's authorization endpoint to initiate the Relying Party flow. + -- This code path also handles when the ID provider then redirects to + -- the configured redirect URI after successful authentication. + response, err, _, session = openidc.authenticate(conf, nil, unauth_action, conf.session) + + if err then + if session then + session:close() + end + if err == "unauthorized request" then + if conf.unauth_action == "pass" then + return nil + end + return 401 + end + core.log.error("OIDC authentication failed: ", err) + return 500 + end + + if response then + -- If the openidc module has returned a response, it may contain, + -- respectively, the access token, the ID token, the refresh token, + -- and the userinfo. + -- Add respective headers to the request, if so configured. + + -- Add configured access token header, maybe. + add_access_token_header(ctx, conf, response.access_token) + + -- Add X-ID-Token header, maybe. + if response.id_token and conf.set_id_token_header then + local token = core.json.encode(response.id_token) + core.request.set_header(ctx, "X-ID-Token", ngx.encode_base64(token)) + end + + -- Add X-Userinfo header, maybe. + if response.user and conf.set_userinfo_header then + core.request.set_header(ctx, "X-Userinfo", + ngx_encode_base64(core.json.encode(response.user))) + end + + -- Add X-Refresh-Token header, maybe. + if session.data.refresh_token and conf.set_refresh_token_header then + core.request.set_header(ctx, "X-Refresh-Token", session.data.refresh_token) + end + end + end + if session then + session:close() + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/opentelemetry.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/opentelemetry.lua new file mode 100644 index 0000000..d98ac44 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/opentelemetry.lua @@ -0,0 +1,426 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local plugin_name = "opentelemetry" +local core = require("apisix.core") +local plugin = require("apisix.plugin") +local process = require("ngx.process") + +local always_off_sampler_new = require("opentelemetry.trace.sampling.always_off_sampler").new +local always_on_sampler_new = require("opentelemetry.trace.sampling.always_on_sampler").new +local parent_base_sampler_new = require("opentelemetry.trace.sampling.parent_base_sampler").new +local trace_id_ratio_sampler_new = + require("opentelemetry.trace.sampling.trace_id_ratio_sampler").new + +local exporter_client_new = require("opentelemetry.trace.exporter.http_client").new +local otlp_exporter_new = require("opentelemetry.trace.exporter.otlp").new +local batch_span_processor_new = require("opentelemetry.trace.batch_span_processor").new +local id_generator = require("opentelemetry.trace.id_generator") +local tracer_provider_new = require("opentelemetry.trace.tracer_provider").new + +local span_kind = require("opentelemetry.trace.span_kind") +local span_status = require("opentelemetry.trace.span_status") +local resource_new = require("opentelemetry.resource").new +local attr = require("opentelemetry.attribute") + +local context = require("opentelemetry.context").new() +local trace_context_propagator = + require("opentelemetry.trace.propagation.text_map.trace_context_propagator").new() + +local ngx = ngx +local ngx_var = ngx.var +local table = table +local type = type +local pairs = pairs +local ipairs = ipairs +local unpack = unpack +local string_format = string.format + +local lrucache = core.lrucache.new({ + type = 'plugin', count = 128, ttl = 24 * 60 * 60, +}) + +local asterisk = string.byte("*", 1) + +local metadata_schema = { + type = "object", + properties = { + trace_id_source = { + type = "string", + enum = {"x-request-id", "random"}, + description = "the source of trace id", + default = "random", + }, + resource = { + type = "object", + description = "additional resource", + additionalProperties = {{type = "boolean"}, {type = "number"}, {type = "string"}}, + }, + collector = { + type = "object", + description = "opentelemetry collector", + properties = { + address = {type = "string", description = "host:port", default = "127.0.0.1:4318"}, + request_timeout = {type = "integer", description = "second uint", default = 3}, + request_headers = { + type = "object", + description = "http headers", + additionalProperties = { + one_of = {{type = "boolean"},{type = "number"}, {type = "string"}}, + }, + } + }, + default = {address = "127.0.0.1:4318", request_timeout = 3} + }, + batch_span_processor = { + type = "object", + description = "batch span processor", + properties = { + drop_on_queue_full = { + type = "boolean", + description = "if true, drop span when queue is full," + .. " otherwise force process batches", + }, + max_queue_size = { + type = "integer", + description = "maximum queue size to buffer spans for delayed processing", + }, + batch_timeout = { + type = "number", + description = "maximum duration for constructing a batch", + }, + inactive_timeout = { + type = "number", + description = "maximum duration for processing batches", + }, + max_export_batch_size = { + type = "integer", + description = "maximum number of spans to process in a single batch", + } + }, + default = {}, + }, + set_ngx_var = { + type = "boolean", + description = "set nginx variables", + default = false, + }, + }, +} + +local schema = { + type = "object", + properties = { + sampler = { + type = "object", + properties = { + name = { + type = "string", + enum = {"always_on", "always_off", "trace_id_ratio", "parent_base"}, + title = "sampling strategy", + default = "always_off" + }, + options = { + type = "object", + properties = { + fraction = { + type = "number", title = "trace_id_ratio fraction", default = 0 + }, + root = { + type = "object", + title = "parent_base root sampler", + properties = { + name = { + type = "string", + enum = {"always_on", "always_off", "trace_id_ratio"}, + title = "sampling strategy", + default = "always_off" + }, + options = { + type = "object", + properties = { + fraction = { + type = "number", + title = "trace_id_ratio fraction parameter", + default = 0, + }, + }, + default = {fraction = 0} + } + }, + default = {name = "always_off", options = {fraction = 0}} + }, + }, + default = {fraction = 0, root = {name = "always_off"}} + } + }, + default = {name = "always_off", options = {fraction = 0, root = {name = "always_off"}}} + }, + additional_attributes = { + type = "array", + items = { + type = "string", + minLength = 1, + } + }, + additional_header_prefix_attributes = { + type = "array", + items = { + type = "string", + minLength = 1, + } + } + } +} + + +local _M = { + version = 0.1, + priority = 12009, + name = plugin_name, + schema = schema, + metadata_schema = metadata_schema, +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + local ok, err = core.schema.check(metadata_schema, conf) + if not ok then + return ok, err + end + local check = {"collector.address"} + core.utils.check_https(check, conf, plugin_name) + return true + end + return core.schema.check(schema, conf) +end + + +local hostname +local sampler_factory + +function _M.init() + if process.type() ~= "worker" then + return + end + + sampler_factory = { + always_off = always_off_sampler_new, + always_on = always_on_sampler_new, + parent_base = parent_base_sampler_new, + trace_id_ratio = trace_id_ratio_sampler_new, + } + hostname = core.utils.gethostname() +end + + +local function create_tracer_obj(conf, plugin_info) + if plugin_info.trace_id_source == "x-request-id" then + id_generator.new_ids = function() + local trace_id = core.request.headers()["x-request-id"] or ngx_var.request_id + return trace_id, id_generator.new_span_id() + end + end + -- create exporter + local exporter = otlp_exporter_new(exporter_client_new(plugin_info.collector.address, + plugin_info.collector.request_timeout, + plugin_info.collector.request_headers)) + -- create span processor + local batch_span_processor = batch_span_processor_new(exporter, + plugin_info.batch_span_processor) + -- create sampler + local sampler + local sampler_name = conf.sampler.name + local sampler_options = conf.sampler.options + if sampler_name == "parent_base" then + local root_sampler + if sampler_options.root then + local name, fraction = sampler_options.root.name, sampler_options.root.options.fraction + root_sampler = sampler_factory[name](fraction) + else + root_sampler = always_off_sampler_new() + end + sampler = sampler_factory[sampler_name](root_sampler) + else + sampler = sampler_factory[sampler_name](sampler_options.fraction) + end + local resource_attrs = {attr.string("hostname", hostname)} + if plugin_info.resource then + if not plugin_info.resource["service.name"] then + table.insert(resource_attrs, attr.string("service.name", "APISIX")) + end + for k, v in pairs(plugin_info.resource) do + if type(v) == "string" then + table.insert(resource_attrs, attr.string(k, v)) + end + if type(v) == "number" then + table.insert(resource_attrs, attr.double(k, v)) + end + if type(v) == "boolean" then + table.insert(resource_attrs, attr.bool(k, v)) + end + end + end + -- create tracer provider + local tp = tracer_provider_new(batch_span_processor, { + resource = resource_new(unpack(resource_attrs)), + sampler = sampler, + }) + -- create tracer + return tp:tracer("opentelemetry-lua") +end + + +local function inject_attributes(attributes, wanted_attributes, source, with_prefix) + for _, key in ipairs(wanted_attributes) do + local is_key_a_match = #key >= 2 and key:byte(-1) == asterisk and with_prefix + + if is_key_a_match then + local prefix = key:sub(0, -2) + for possible_key, value in pairs(source) do + if core.string.has_prefix(possible_key, prefix) then + core.table.insert(attributes, attr.string(possible_key, value)) + end + end + else + local val = source[key] + if val then + core.table.insert(attributes, attr.string(key, val)) + end + end + end +end + + +function _M.rewrite(conf, api_ctx) + local metadata = plugin.plugin_metadata(plugin_name) + if metadata == nil then + core.log.warn("plugin_metadata is required for opentelemetry plugin to working properly") + return + end + core.log.info("metadata: ", core.json.delay_encode(metadata)) + local plugin_info = metadata.value + local vars = api_ctx.var + + local tracer, err = core.lrucache.plugin_ctx(lrucache, api_ctx, nil, + create_tracer_obj, conf, plugin_info) + if not tracer then + core.log.error("failed to fetch tracer object: ", err) + return + end + + local span_name = vars.method + + local attributes = { + attr.string("net.host.name", vars.host), + attr.string("http.method", vars.method), + attr.string("http.scheme", vars.scheme), + attr.string("http.target", vars.request_uri), + attr.string("http.user_agent", vars.http_user_agent), + } + + if api_ctx.curr_req_matched then + table.insert(attributes, attr.string("apisix.route_id", api_ctx.route_id)) + table.insert(attributes, attr.string("apisix.route_name", api_ctx.route_name)) + table.insert(attributes, attr.string("http.route", api_ctx.curr_req_matched._path)) + span_name = span_name .. " " .. api_ctx.curr_req_matched._path + end + + if api_ctx.service_id then + table.insert(attributes, attr.string("apisix.service_id", api_ctx.service_id)) + table.insert(attributes, attr.string("apisix.service_name", api_ctx.service_name)) + end + + if conf.additional_attributes then + inject_attributes(attributes, conf.additional_attributes, api_ctx.var, false) + end + + if conf.additional_header_prefix_attributes then + inject_attributes( + attributes, + conf.additional_header_prefix_attributes, + core.request.headers(api_ctx), + true + ) + end + + -- extract trace context from the headers of downstream HTTP request + local upstream_context = trace_context_propagator:extract(context, ngx.req) + + local ctx = tracer:start(upstream_context, span_name, { + kind = span_kind.server, + attributes = attributes, + }) + + if plugin_info.set_ngx_var then + local span_context = ctx:span():context() + ngx_var.opentelemetry_context_traceparent = string_format("00-%s-%s-%02x", + span_context.trace_id, + span_context.span_id, + span_context.trace_flags) + ngx_var.opentelemetry_trace_id = span_context.trace_id + ngx_var.opentelemetry_span_id = span_context.span_id + end + + api_ctx.otel_context_token = ctx:attach() + + -- inject trace context into the headers of upstream HTTP request + trace_context_propagator:inject(ctx, ngx.req) +end + + +function _M.delayed_body_filter(conf, api_ctx) + if api_ctx.otel_context_token and ngx.arg[2] then + local ctx = context:current() + ctx:detach(api_ctx.otel_context_token) + api_ctx.otel_context_token = nil + + -- get span from current context + local span = ctx:span() + local upstream_status = core.response.get_upstream_status(api_ctx) + if upstream_status and upstream_status >= 500 then + span:set_status(span_status.ERROR, + "upstream response status: " .. upstream_status) + end + + span:set_attributes(attr.int("http.status_code", upstream_status)) + + span:finish() + end +end + + +-- body_filter maybe not called because of empty http body response +-- so we need to check if the span has finished in log phase +function _M.log(conf, api_ctx) + if api_ctx.otel_context_token then + -- ctx:detach() is not necessary, because of ctx is stored in ngx.ctx + local upstream_status = core.response.get_upstream_status(api_ctx) + + -- get span from current context + local span = context:current():span() + if upstream_status and upstream_status >= 500 then + span:set_status(span_status.ERROR, + "upstream response status: " .. upstream_status) + end + + span:finish() + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/openwhisk.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/openwhisk.lua new file mode 100644 index 0000000..f139925 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/openwhisk.lua @@ -0,0 +1,143 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local http = require("resty.http") +local ngx_encode_base64 = ngx.encode_base64 +local tostring = tostring + +local name_pattern = [[\A([\w]|[\w][\w@ .-]*[\w@.-]+)\z]] + +local schema = { + type = "object", + properties = { + api_host = {type = "string"}, + ssl_verify = { + type = "boolean", + default = true, + }, + service_token = {type = "string"}, + namespace = {type = "string", maxLength = 256, pattern = name_pattern}, + package = {type = "string", maxLength = 256, pattern = name_pattern}, + action = {type = "string", maxLength = 256, pattern = name_pattern}, + result = { + type = "boolean", + default = true, + }, + timeout = { + type = "integer", + minimum = 1, + maximum = 60000, + default = 3000, + description = "timeout in milliseconds", + }, + keepalive = {type = "boolean", default = true}, + keepalive_timeout = {type = "integer", minimum = 1000, default = 60000}, + keepalive_pool = {type = "integer", minimum = 1, default = 5} + }, + required = {"api_host", "service_token", "namespace", "action"}, + encrypt_fields = {"service_token"} +} + + +local _M = { + version = 0.1, + priority = -1901, + name = "openwhisk", + schema = schema, +} + + +function _M.check_schema(conf) + local check = {"api_host"} + core.utils.check_https(check, conf, _M.name) + core.utils.check_tls_bool({"ssl_verify"}, conf, _M.name) + + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + return true +end + + +function _M.access(conf, ctx) + local params = { + method = "POST", + body = core.request.get_body(), + query = { + blocking = "true", + result = tostring(conf.result), + timeout = conf.timeout + }, + headers = { + ["Authorization"] = "Basic " .. ngx_encode_base64(conf.service_token), + ["Content-Type"] = "application/json", + }, + keepalive = conf.keepalive, + ssl_verify = conf.ssl_verify + } + + if conf.keepalive then + params.keepalive_timeout = conf.keepalive_timeout + params.keepalive_pool = conf.keepalive_pool + end + + -- OpenWhisk action endpoint + local package = conf.package and conf.package .. "/" or "" + local endpoint = conf.api_host .. "/api/v1/namespaces/" .. conf.namespace .. + "/actions/" .. package .. conf.action + + local httpc = http.new() + httpc:set_timeout(conf.timeout) + + local res, err = httpc:request_uri(endpoint, params) + + if not res then + core.log.error("failed to process openwhisk action, err: ", err) + return 503 + end + + -- check if res.body is nil + if res.body == nil then + return res.status, res.body + end + + -- parse OpenWhisk JSON response + -- OpenWhisk supports two types of responses, the user can return only + -- the response body, or set the status code and header. + local result, err = core.json.decode(res.body) + + if not result then + core.log.error("failed to parse openwhisk response data: ", err) + return 503 + end + + -- setting response headers + if result.headers ~= nil then + core.response.set_header(result.headers) + end + + local code = result.statusCode or res.status + local body = result.body or res.body + return code, body + +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/prometheus.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/prometheus.lua new file mode 100644 index 0000000..b154697 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/prometheus.lua @@ -0,0 +1,58 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local exporter = require("apisix.plugins.prometheus.exporter") + + +local plugin_name = "prometheus" +local schema = { + type = "object", + properties = { + prefer_name = { + type = "boolean", + default = false + } + }, +} + + +local _M = { + version = 0.2, + priority = 500, + name = plugin_name, + log = exporter.http_log, + schema = schema, + run_policy = "prefer_route", +} + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + return true +end + + +function _M.api() + return exporter.get_api(true) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/prometheus/exporter.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/prometheus/exporter.lua new file mode 100644 index 0000000..d34ab87 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/prometheus/exporter.lua @@ -0,0 +1,569 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local base_prometheus = require("prometheus") +local core = require("apisix.core") +local plugin = require("apisix.plugin") +local control = require("apisix.control.v1") +local ipairs = ipairs +local pairs = pairs +local ngx = ngx +local re_gmatch = ngx.re.gmatch +local ffi = require("ffi") +local C = ffi.C +local pcall = pcall +local select = select +local type = type +local prometheus +local prometheus_bkp +local router = require("apisix.router") +local get_routes = router.http_routes +local get_ssls = router.ssls +local get_services = require("apisix.http.service").services +local get_consumers = require("apisix.consumer").consumers +local get_upstreams = require("apisix.upstream").upstreams +local get_global_rules = require("apisix.global_rules").global_rules +local get_global_rules_prev_index = require("apisix.global_rules").get_pre_index +local clear_tab = core.table.clear +local get_stream_routes = router.stream_routes +local get_protos = require("apisix.plugins.grpc-transcode.proto").protos +local service_fetch = require("apisix.http.service").get +local latency_details = require("apisix.utils.log-util").latency_details_in_ms +local xrpc = require("apisix.stream.xrpc") +local unpack = unpack +local next = next + + +local ngx_capture +if ngx.config.subsystem == "http" then + ngx_capture = ngx.location.capture +end + + +local plugin_name = "prometheus" +local default_export_uri = "/apisix/prometheus/metrics" +-- Default set of latency buckets, 1ms to 60s: +local DEFAULT_BUCKETS = {1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 30000, 60000} + +local metrics = {} + +local inner_tab_arr = {} + +local function gen_arr(...) + clear_tab(inner_tab_arr) + for i = 1, select('#', ...) do + inner_tab_arr[i] = select(i, ...) + end + + return inner_tab_arr +end + +local extra_labels_tbl = {} + +local function extra_labels(name, ctx) + clear_tab(extra_labels_tbl) + + local attr = plugin.plugin_attr("prometheus") + local metrics = attr.metrics + + if metrics and metrics[name] and metrics[name].extra_labels then + local labels = metrics[name].extra_labels + for _, kv in ipairs(labels) do + local val, v = next(kv) + if ctx then + val = ctx.var[v:sub(2)] + if val == nil then + val = "" + end + end + core.table.insert(extra_labels_tbl, val) + end + end + + return extra_labels_tbl +end + + +local _M = {} + + +local function init_stream_metrics() + metrics.stream_connection_total = prometheus:counter("stream_connection_total", + "Total number of connections handled per stream route in APISIX", + {"route"}) + + xrpc.init_metrics(prometheus) +end + + +function _M.http_init(prometheus_enabled_in_stream) + -- todo: support hot reload, we may need to update the lua-prometheus + -- library + if ngx.get_phase() ~= "init" and ngx.get_phase() ~= "init_worker" then + if prometheus_bkp then + prometheus = prometheus_bkp + end + return + end + + clear_tab(metrics) + + -- Newly added metrics should follow the naming best practices described in + -- https://prometheus.io/docs/practices/naming/#metric-names + -- For example, + -- 1. Add unit as the suffix + -- 2. Add `_total` as the suffix if the metric type is counter + -- 3. Use base unit + -- We keep the old metric names for the compatibility. + + -- across all services + local metric_prefix = "apisix_" + local attr = plugin.plugin_attr("prometheus") + if attr and attr.metric_prefix then + metric_prefix = attr.metric_prefix + end + + local status_metrics_exptime = core.table.try_read_attr(attr, "metrics", + "http_status", "expire") + local latency_metrics_exptime = core.table.try_read_attr(attr, "metrics", + "http_latency", "expire") + local bandwidth_metrics_exptime = core.table.try_read_attr(attr, "metrics", + "bandwidth", "expire") + local upstream_status_exptime = core.table.try_read_attr(attr, "metrics", + "upstream_status", "expire") + + prometheus = base_prometheus.init("prometheus-metrics", metric_prefix) + + metrics.connections = prometheus:gauge("nginx_http_current_connections", + "Number of HTTP connections", + {"state"}) + + metrics.requests = prometheus:gauge("http_requests_total", + "The total number of client requests since APISIX started") + + metrics.etcd_reachable = prometheus:gauge("etcd_reachable", + "Config server etcd reachable from APISIX, 0 is unreachable") + + metrics.node_info = prometheus:gauge("node_info", + "Info of APISIX node", + {"hostname", "version"}) + + metrics.etcd_modify_indexes = prometheus:gauge("etcd_modify_indexes", + "Etcd modify index for APISIX keys", + {"key"}) + + metrics.shared_dict_capacity_bytes = prometheus:gauge("shared_dict_capacity_bytes", + "The capacity of each nginx shared DICT since APISIX start", + {"name"}) + + metrics.shared_dict_free_space_bytes = prometheus:gauge("shared_dict_free_space_bytes", + "The free space of each nginx shared DICT since APISIX start", + {"name"}) + + metrics.upstream_status = prometheus:gauge("upstream_status", + "Upstream status from health check", + {"name", "ip", "port"}, + upstream_status_exptime) + + -- per service + + -- The consumer label indicates the name of consumer corresponds to the + -- request to the route/service, it will be an empty string if there is + -- no consumer in request. + metrics.status = prometheus:counter("http_status", + "HTTP status codes per service in APISIX", + {"code", "route", "matched_uri", "matched_host", "service", "consumer", "node", + unpack(extra_labels("http_status"))}, + status_metrics_exptime) + + local buckets = DEFAULT_BUCKETS + if attr and attr.default_buckets then + buckets = attr.default_buckets + end + + metrics.latency = prometheus:histogram("http_latency", + "HTTP request latency in milliseconds per service in APISIX", + {"type", "route", "service", "consumer", "node", unpack(extra_labels("http_latency"))}, + buckets, latency_metrics_exptime) + + metrics.bandwidth = prometheus:counter("bandwidth", + "Total bandwidth in bytes consumed per service in APISIX", + {"type", "route", "service", "consumer", "node", unpack(extra_labels("bandwidth"))}, + bandwidth_metrics_exptime) + + if prometheus_enabled_in_stream then + init_stream_metrics() + end +end + + +function _M.stream_init() + if ngx.get_phase() ~= "init" and ngx.get_phase() ~= "init_worker" then + return + end + + if not pcall(function() return C.ngx_meta_lua_ffi_shdict_udata_to_zone end) then + core.log.error("need to build APISIX-Runtime to support L4 metrics") + return + end + + clear_tab(metrics) + + local metric_prefix = "apisix_" + local attr = plugin.plugin_attr("prometheus") + if attr and attr.metric_prefix then + metric_prefix = attr.metric_prefix + end + + prometheus = base_prometheus.init("prometheus-metrics", metric_prefix) + + init_stream_metrics() +end + + +function _M.http_log(conf, ctx) + local vars = ctx.var + + local route_id = "" + local balancer_ip = ctx.balancer_ip or "" + local service_id = "" + local consumer_name = ctx.consumer_name or "" + + local matched_route = ctx.matched_route and ctx.matched_route.value + if matched_route then + route_id = matched_route.id + service_id = matched_route.service_id or "" + if conf.prefer_name == true then + route_id = matched_route.name or route_id + if service_id ~= "" then + local service = service_fetch(service_id) + service_id = service and service.value.name or service_id + end + end + end + + local matched_uri = "" + local matched_host = "" + if ctx.curr_req_matched then + matched_uri = ctx.curr_req_matched._path or "" + matched_host = ctx.curr_req_matched._host or "" + end + + metrics.status:inc(1, + gen_arr(vars.status, route_id, matched_uri, matched_host, + service_id, consumer_name, balancer_ip, + unpack(extra_labels("http_status", ctx)))) + + local latency, upstream_latency, apisix_latency = latency_details(ctx) + local latency_extra_label_values = extra_labels("http_latency", ctx) + + metrics.latency:observe(latency, + gen_arr("request", route_id, service_id, consumer_name, balancer_ip, + unpack(latency_extra_label_values))) + + if upstream_latency then + metrics.latency:observe(upstream_latency, + gen_arr("upstream", route_id, service_id, consumer_name, balancer_ip, + unpack(latency_extra_label_values))) + end + + metrics.latency:observe(apisix_latency, + gen_arr("apisix", route_id, service_id, consumer_name, balancer_ip, + unpack(latency_extra_label_values))) + + local bandwidth_extra_label_values = extra_labels("bandwidth", ctx) + + metrics.bandwidth:inc(vars.request_length, + gen_arr("ingress", route_id, service_id, consumer_name, balancer_ip, + unpack(bandwidth_extra_label_values))) + + metrics.bandwidth:inc(vars.bytes_sent, + gen_arr("egress", route_id, service_id, consumer_name, balancer_ip, + unpack(bandwidth_extra_label_values))) +end + + +function _M.stream_log(conf, ctx) + local route_id = "" + local matched_route = ctx.matched_route and ctx.matched_route.value + if matched_route then + route_id = matched_route.id + if conf.prefer_name == true then + route_id = matched_route.name or route_id + end + end + + metrics.stream_connection_total:inc(1, gen_arr(route_id)) +end + + +local ngx_status_items = {"active", "accepted", "handled", "total", + "reading", "writing", "waiting"} +local label_values = {} + +local function nginx_status() + local res = ngx_capture("/apisix/nginx_status") + if not res or res.status ~= 200 then + core.log.error("failed to fetch Nginx status") + return + end + + -- Active connections: 2 + -- server accepts handled requests + -- 26 26 84 + -- Reading: 0 Writing: 1 Waiting: 1 + + local iterator, err = re_gmatch(res.body, [[(\d+)]], "jmo") + if not iterator then + core.log.error("failed to re.gmatch Nginx status: ", err) + return + end + + for _, name in ipairs(ngx_status_items) do + local val = iterator() + if not val then + break + end + + if name == "total" then + metrics.requests:set(val[0]) + else + label_values[1] = name + metrics.connections:set(val[0], label_values) + end + end +end + + +local key_values = {} +local function set_modify_index(key, items, items_ver, global_max_index) + clear_tab(key_values) + local max_idx = 0 + if items_ver and items then + for _, item in ipairs(items) do + if type(item) == "table" then + local modify_index = item.orig_modifiedIndex or item.modifiedIndex + if modify_index > max_idx then + max_idx = modify_index + end + end + end + end + + key_values[1] = key + metrics.etcd_modify_indexes:set(max_idx, key_values) + + + global_max_index = max_idx > global_max_index and max_idx or global_max_index + + return global_max_index +end + + +local function etcd_modify_index() + clear_tab(key_values) + local global_max_idx = 0 + + -- routes + local routes, routes_ver = get_routes() + global_max_idx = set_modify_index("routes", routes, routes_ver, global_max_idx) + + -- services + local services, services_ver = get_services() + global_max_idx = set_modify_index("services", services, services_ver, global_max_idx) + + -- ssls + local ssls, ssls_ver = get_ssls() + global_max_idx = set_modify_index("ssls", ssls, ssls_ver, global_max_idx) + + -- consumers + local consumers, consumers_ver = get_consumers() + global_max_idx = set_modify_index("consumers", consumers, consumers_ver, global_max_idx) + + -- global_rules + local global_rules, global_rules_ver = get_global_rules() + if global_rules then + global_max_idx = set_modify_index("global_rules", global_rules, + global_rules_ver, global_max_idx) + + -- prev_index + key_values[1] = "prev_index" + local prev_index = get_global_rules_prev_index() + metrics.etcd_modify_indexes:set(prev_index, key_values) + + else + global_max_idx = set_modify_index("global_rules", nil, nil, global_max_idx) + end + + -- upstreams + local upstreams, upstreams_ver = get_upstreams() + global_max_idx = set_modify_index("upstreams", upstreams, upstreams_ver, global_max_idx) + + -- stream_routes + local stream_routes, stream_routes_ver = get_stream_routes() + global_max_idx = set_modify_index("stream_routes", stream_routes, + stream_routes_ver, global_max_idx) + + -- proto + local protos, protos_ver = get_protos() + global_max_idx = set_modify_index("protos", protos, protos_ver, global_max_idx) + + -- global max + key_values[1] = "max_modify_index" + metrics.etcd_modify_indexes:set(global_max_idx, key_values) + +end + + +local function shared_dict_status() + local name = {} + for shared_dict_name, shared_dict in pairs(ngx.shared) do + name[1] = shared_dict_name + metrics.shared_dict_capacity_bytes:set(shared_dict:capacity(), name) + metrics.shared_dict_free_space_bytes:set(shared_dict:free_space(), name) + end +end + + +local function collect(ctx, stream_only) + if not prometheus or not metrics then + core.log.error("prometheus: plugin is not initialized, please make sure ", + " 'prometheus_metrics' shared dict is present in nginx template") + return 500, {message = "An unexpected error occurred"} + end + + -- collect ngx.shared.DICT status + shared_dict_status() + + -- across all services + nginx_status() + + local config = core.config.new() + + -- config server status + local vars = ngx.var or {} + local hostname = vars.hostname or "" + local version = core.version.VERSION or "" + + -- we can't get etcd index in metric server if only stream subsystem is enabled + if config.type == "etcd" and not stream_only then + -- etcd modify index + etcd_modify_index() + + local version, err = config:server_version() + if version then + metrics.etcd_reachable:set(1) + + else + metrics.etcd_reachable:set(0) + core.log.error("prometheus: failed to reach config server while ", + "processing metrics endpoint: ", err) + end + + -- Because request any key from etcd will return the "X-Etcd-Index". + -- A non-existed key is preferred because it doesn't return too much data. + -- So use phantom key to get etcd index. + local res, _ = config:getkey("/phantomkey") + if res and res.headers then + clear_tab(key_values) + -- global max + key_values[1] = "x_etcd_index" + metrics.etcd_modify_indexes:set(res.headers["X-Etcd-Index"], key_values) + end + end + + metrics.node_info:set(1, gen_arr(hostname, version)) + + -- update upstream_status metrics + local stats = control.get_health_checkers() + for _, stat in ipairs(stats) do + for _, node in ipairs(stat.nodes) do + metrics.upstream_status:set( + (node.status == "healthy" or node.status == "mostly_healthy") and 1 or 0, + gen_arr(stat.name, node.ip, node.port) + ) + end + end + + core.response.set_header("content_type", "text/plain") + return 200, core.table.concat(prometheus:metric_data()) +end +_M.collect = collect + + +local function get_api(called_by_api_router) + local export_uri = default_export_uri + local attr = plugin.plugin_attr(plugin_name) + if attr and attr.export_uri then + export_uri = attr.export_uri + end + + local api = { + methods = {"GET"}, + uri = export_uri, + handler = collect + } + + if not called_by_api_router then + return api + end + + if attr.enable_export_server then + return {} + end + + return {api} +end +_M.get_api = get_api + + +function _M.export_metrics(stream_only) + if not prometheus then + core.response.exit(200, "{}") + end + local api = get_api(false) + local uri = ngx.var.uri + local method = ngx.req.get_method() + + if uri == api.uri and method == api.methods[1] then + local code, body = api.handler(nil, stream_only) + if code or body then + core.response.exit(code, body) + end + end + + return core.response.exit(404) +end + + +function _M.metric_data() + return prometheus:metric_data() +end + +function _M.get_prometheus() + return prometheus +end + + +function _M.destroy() + if prometheus ~= nil then + prometheus_bkp = core.table.deepcopy(prometheus) + prometheus = nil + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/disk_handler.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/disk_handler.lua new file mode 100644 index 0000000..70d3532 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/disk_handler.lua @@ -0,0 +1,102 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local os = os +local ngx_re = require("ngx.re") +local core = require("apisix.core") +local util = require("apisix.plugins.proxy-cache.util") + +local _M = {} + + +local function disk_cache_purge(conf, ctx) + local cache_zone_info = ngx_re.split(ctx.var.upstream_cache_zone_info, ",") + + local filename = util.generate_cache_filename(cache_zone_info[1], cache_zone_info[2], + ctx.var.upstream_cache_key) + + if util.file_exists(filename) then + os.remove(filename) + return nil + end + + return "Not found" +end + + +function _M.access(conf, ctx) + ctx.var.upstream_cache_zone = conf.cache_zone + + if ctx.var.request_method == "PURGE" then + local err = disk_cache_purge(conf, ctx) + if err ~= nil then + return 404 + end + + return 200 + end + + if conf.cache_bypass ~= nil then + local value = util.generate_complex_value(conf.cache_bypass, ctx) + ctx.var.upstream_cache_bypass = value + core.log.info("proxy-cache cache bypass value:", value) + end + + if not util.match_method(conf, ctx) then + ctx.var.upstream_cache_bypass = "1" + core.log.info("proxy-cache cache bypass method: ", ctx.var.request_method) + end +end + + +function _M.header_filter(conf, ctx) + local no_cache = "1" + + if util.match_method(conf, ctx) and util.match_status(conf, ctx) then + no_cache = "0" + end + + if conf.no_cache ~= nil then + local value = util.generate_complex_value(conf.no_cache, ctx) + core.log.info("proxy-cache no-cache value:", value) + + if value ~= nil and value ~= "" and value ~= "0" then + no_cache = "1" + end + end + + local upstream_hdr_cache_control + local upstream_hdr_expires + + if conf.hide_cache_headers == true then + upstream_hdr_cache_control = "" + upstream_hdr_expires = "" + else + upstream_hdr_cache_control = ctx.var.upstream_http_cache_control + upstream_hdr_expires = ctx.var.upstream_http_expires + end + + core.response.set_header("Cache-Control", upstream_hdr_cache_control, + "Expires", upstream_hdr_expires, + "Apisix-Cache-Status", ctx.var.upstream_cache_status) + + ctx.var.upstream_no_cache = no_cache + core.log.info("proxy-cache no cache:", no_cache) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/init.lua new file mode 100644 index 0000000..918f755 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/init.lua @@ -0,0 +1,198 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local memory_handler = require("apisix.plugins.proxy-cache.memory_handler") +local disk_handler = require("apisix.plugins.proxy-cache.disk_handler") +local util = require("apisix.plugins.proxy-cache.util") +local core = require("apisix.core") +local ipairs = ipairs + +local plugin_name = "proxy-cache" + +local STRATEGY_DISK = "disk" +local STRATEGY_MEMORY = "memory" +local DEFAULT_CACHE_ZONE = "disk_cache_one" + +local schema = { + type = "object", + properties = { + cache_zone = { + type = "string", + minLength = 1, + maxLength = 100, + default = DEFAULT_CACHE_ZONE, + }, + cache_strategy = { + type = "string", + enum = {STRATEGY_DISK, STRATEGY_MEMORY}, + default = STRATEGY_DISK, + }, + cache_key = { + type = "array", + minItems = 1, + items = { + description = "a key for caching", + type = "string", + pattern = [[(^[^\$].+$|^\$[0-9a-zA-Z_]+$)]], + }, + default = {"$host", "$request_uri"} + }, + cache_http_status = { + type = "array", + minItems = 1, + items = { + description = "http response status", + type = "integer", + minimum = 200, + maximum = 599, + }, + uniqueItems = true, + default = {200, 301, 404}, + }, + cache_method = { + type = "array", + minItems = 1, + items = { + description = "supported http method", + type = "string", + enum = {"GET", "POST", "HEAD"}, + }, + uniqueItems = true, + default = {"GET", "HEAD"}, + }, + hide_cache_headers = { + type = "boolean", + default = false, + }, + cache_control = { + type = "boolean", + default = false, + }, + cache_bypass = { + type = "array", + minItems = 1, + items = { + type = "string", + pattern = [[(^[^\$].+$|^\$[0-9a-zA-Z_]+$)]] + }, + }, + no_cache = { + type = "array", + minItems = 1, + items = { + type = "string", + pattern = [[(^[^\$].+$|^\$[0-9a-zA-Z_]+$)]] + }, + }, + cache_ttl = { + type = "integer", + minimum = 1, + default = 300, + }, + }, +} + + +local _M = { + version = 0.2, + priority = 1085, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + for _, key in ipairs(conf.cache_key) do + if key == "$request_method" then + return false, "cache_key variable " .. key .. " unsupported" + end + end + + local found = false + local local_conf = core.config.local_conf() + if local_conf.apisix.proxy_cache then + local err = "cache_zone " .. conf.cache_zone .. " not found" + for _, cache in ipairs(local_conf.apisix.proxy_cache.zones) do + -- cache_zone passed in plugin config matched one of the proxy_cache zones + if cache.name == conf.cache_zone then + -- check for the mismatch between cache_strategy and corresponding cache zone + if (conf.cache_strategy == STRATEGY_MEMORY and cache.disk_path) or + (conf.cache_strategy == STRATEGY_DISK and not cache.disk_path) then + err = "invalid or empty cache_zone for cache_strategy: "..conf.cache_strategy + else + found = true + end + break + end + end + + if found == false then + return false, err + end + end + + return true +end + + +function _M.access(conf, ctx) + core.log.info("proxy-cache plugin access phase, conf: ", core.json.delay_encode(conf)) + + local value = util.generate_complex_value(conf.cache_key, ctx) + ctx.var.upstream_cache_key = value + core.log.info("proxy-cache cache key value:", value) + + local handler + if conf.cache_strategy == STRATEGY_MEMORY then + handler = memory_handler + else + handler = disk_handler + end + + return handler.access(conf, ctx) +end + + +function _M.header_filter(conf, ctx) + core.log.info("proxy-cache plugin header filter phase, conf: ", core.json.delay_encode(conf)) + + local handler + if conf.cache_strategy == STRATEGY_MEMORY then + handler = memory_handler + else + handler = disk_handler + end + + handler.header_filter(conf, ctx) +end + + +function _M.body_filter(conf, ctx) + core.log.info("proxy-cache plugin body filter phase, conf: ", core.json.delay_encode(conf)) + + if conf.cache_strategy == STRATEGY_MEMORY then + memory_handler.body_filter(conf, ctx) + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/memory.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/memory.lua new file mode 100644 index 0000000..6d8d804 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/memory.lua @@ -0,0 +1,84 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local ngx = ngx +local ngx_shared = ngx.shared +local setmetatable = setmetatable +local core = require("apisix.core") + +local _M = {} +local mt = { __index = _M } + + +function _M.new(opts) + return setmetatable({ + dict = ngx_shared[opts.shdict_name], + }, mt) +end + + +function _M:set(key, obj, ttl) + if self.dict == nil then + return nil, "invalid cache_zone provided" + end + + local obj_json = core.json.encode(obj) + if not obj_json then + return nil, "could not encode object" + end + + local succ, err = self.dict:set(key, obj_json, ttl) + return succ and obj_json or nil, err +end + + +function _M:get(key) + if self.dict == nil then + return nil, "invalid cache_zone provided" + end + + -- If the key does not exist or has expired, then res_json will be nil. + local res_json, err, stale = self.dict:get_stale(key) + if not res_json then + if not err then + return nil, "not found" + else + return nil, err + end + end + if stale then + return nil, "expired" + end + + local res_obj, err = core.json.decode(res_json) + if not res_obj then + return nil, err + end + + return res_obj, nil +end + + +function _M:purge(key) + if self.dict == nil then + return nil, "invalid cache_zone provided" + end + self.dict:delete(key) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/memory_handler.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/memory_handler.lua new file mode 100644 index 0000000..e41cb72 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/memory_handler.lua @@ -0,0 +1,332 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local memory_strategy = require("apisix.plugins.proxy-cache.memory").new +local util = require("apisix.plugins.proxy-cache.util") +local core = require("apisix.core") +local tab_new = require("table.new") +local ngx_re_gmatch = ngx.re.gmatch +local ngx_re_match = ngx.re.match +local parse_http_time = ngx.parse_http_time +local concat = table.concat +local lower = string.lower +local floor = math.floor +local tostring = tostring +local tonumber = tonumber +local ngx = ngx +local type = type +local pairs = pairs +local time = ngx.now +local max = math.max + +local CACHE_VERSION = 1 + +local _M = {} + +-- http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.5.1 +-- note content-length & apisix-cache-status are not strictly +-- hop-by-hop but we will be adjusting it here anyhow +local hop_by_hop_headers = { + ["connection"] = true, + ["keep-alive"] = true, + ["proxy-authenticate"] = true, + ["proxy-authorization"] = true, + ["te"] = true, + ["trailers"] = true, + ["transfer-encoding"] = true, + ["upgrade"] = true, + ["content-length"] = true, + ["apisix-cache-status"] = true, +} + + +local function include_cache_header(header) + local n_header = lower(header) + if n_header == "expires" or n_header == "cache-control" then + return true + end + + return false +end + + +local function overwritable_header(header) + local n_header = lower(header) + + return not hop_by_hop_headers[n_header] + and not ngx_re_match(n_header, "ratelimit-remaining") +end + + +-- The following format can accept: +-- Cache-Control: no-cache +-- Cache-Control: no-store +-- Cache-Control: max-age=3600 +-- Cache-Control: max-stale=3600 +-- Cache-Control: min-fresh=3600 +-- Cache-Control: private, max-age=600 +-- Cache-Control: public, max-age=31536000 +-- Refer to: https://www.holisticseo.digital/pagespeed/cache-control/ +local function parse_directive_header(h) + if not h then + return {} + end + + if type(h) == "table" then + h = concat(h, ", ") + end + + local t = {} + local res = tab_new(3, 0) + local iter = ngx_re_gmatch(h, "([^,]+)", "oj") + + local m = iter() + while m do + local _, err = ngx_re_match(m[0], [[^\s*([^=]+)(?:=(.+))?]], + "oj", nil, res) + if err then + core.log.error(err) + end + + -- store the directive token as a numeric value if it looks like a number; + -- otherwise, store the string value. for directives without token, we just + -- set the key to true + t[lower(res[1])] = tonumber(res[2]) or res[2] or true + + m = iter() + end + + return t +end + + +local function parse_resource_ttl(ctx, cc) + local max_age = cc["s-maxage"] or cc["max-age"] + + if not max_age then + local expires = ctx.var.upstream_http_expires + + -- if multiple Expires headers are present, last one wins + if type(expires) == "table" then + expires = expires[#expires] + end + + local exp_time = parse_http_time(tostring(expires)) + if exp_time then + max_age = exp_time - time() + end + end + + return max_age and max(max_age, 0) or 0 +end + + +local function cacheable_request(conf, ctx, cc) + if not util.match_method(conf, ctx) then + return false, "MISS" + end + + if conf.cache_bypass ~= nil then + local value = util.generate_complex_value(conf.cache_bypass, ctx) + core.log.info("proxy-cache cache bypass value:", value) + if value ~= nil and value ~= "" and value ~= "0" then + return false, "BYPASS" + end + end + + if conf.cache_control and (cc["no-store"] or cc["no-cache"]) then + return false, "BYPASS" + end + + return true, "" +end + + +local function cacheable_response(conf, ctx, cc) + if not util.match_status(conf, ctx) then + return false + end + + if conf.no_cache ~= nil then + local value = util.generate_complex_value(conf.no_cache, ctx) + core.log.info("proxy-cache no-cache value:", value) + + if value ~= nil and value ~= "" and value ~= "0" then + return false + end + end + + if conf.cache_control and (cc["private"] or cc["no-store"] or cc["no-cache"]) then + return false + end + + if conf.cache_control and parse_resource_ttl(ctx, cc) <= 0 then + return false + end + + return true +end + + +function _M.access(conf, ctx) + local cc = parse_directive_header(ctx.var.http_cache_control) + + if ctx.var.request_method ~= "PURGE" then + local ret, msg = cacheable_request(conf, ctx, cc) + if not ret then + core.response.set_header("Apisix-Cache-Status", msg) + return + end + end + + if not ctx.cache then + ctx.cache = { + memory = memory_strategy({shdict_name = conf.cache_zone}), + hit = false, + ttl = 0, + } + end + + local res, err = ctx.cache.memory:get(ctx.var.upstream_cache_key) + + if ctx.var.request_method == "PURGE" then + if err == "not found" then + return 404 + end + ctx.cache.memory:purge(ctx.var.upstream_cache_key) + ctx.cache = nil + return 200 + end + + if err then + if err == "expired" then + core.response.set_header("Apisix-Cache-Status", "EXPIRED") + + elseif err ~= "not found" then + core.response.set_header("Apisix-Cache-Status", "MISS") + core.log.error("failed to get from cache, err: ", err) + + elseif conf.cache_control and cc["only-if-cached"] then + core.response.set_header("Apisix-Cache-Status", "MISS") + return 504 + + else + core.response.set_header("Apisix-Cache-Status", "MISS") + end + return + end + + if res.version ~= CACHE_VERSION then + core.log.warn("cache format mismatch, purging ", ctx.var.upstream_cache_key) + core.response.set_header("Apisix-Cache-Status", "BYPASS") + ctx.cache.memory:purge(ctx.var.upstream_cache_key) + return + end + + if conf.cache_control then + if cc["max-age"] and time() - res.timestamp > cc["max-age"] then + core.response.set_header("Apisix-Cache-Status", "STALE") + return + end + + if cc["max-stale"] and time() - res.timestamp - res.ttl > cc["max-stale"] then + core.response.set_header("Apisix-Cache-Status", "STALE") + return + end + + if cc["min-fresh"] and res.ttl - (time() - res.timestamp) < cc["min-fresh"] then + core.response.set_header("Apisix-Cache-Status", "STALE") + return + end + else + if time() - res.timestamp > res.ttl then + core.response.set_header("Apisix-Cache-Status", "STALE") + return + end + end + + ctx.cache.hit = true + + for key, value in pairs(res.headers) do + if conf.hide_cache_headers == true and include_cache_header(key) then + core.response.set_header(key, "") + elseif overwritable_header(key) then + core.response.set_header(key, value) + end + end + + core.response.set_header("Age", floor(time() - res.timestamp)) + core.response.set_header("Apisix-Cache-Status", "HIT") + + return res.status, res.body +end + + +function _M.header_filter(conf, ctx) + local cache = ctx.cache + if not cache or cache.hit then + return + end + + local res_headers = ngx.resp.get_headers(0, true) + + for key in pairs(res_headers) do + if conf.hide_cache_headers == true and include_cache_header(key) then + core.response.set_header(key, "") + end + end + + local cc = parse_directive_header(ctx.var.upstream_http_cache_control) + + if cacheable_response(conf, ctx, cc) then + cache.res_headers = res_headers + cache.ttl = conf.cache_control and parse_resource_ttl(ctx, cc) or conf.cache_ttl + else + ctx.cache = nil + end +end + + +function _M.body_filter(conf, ctx) + local cache = ctx.cache + if not cache or cache.hit then + return + end + + local res_body = core.response.hold_body_chunk(ctx, true) + if not res_body then + return + end + + local res = { + status = ngx.status, + body = res_body, + body_len = #res_body, + headers = cache.res_headers, + ttl = cache.ttl, + timestamp = time(), + version = CACHE_VERSION, + } + + local res, err = cache.memory:set(ctx.var.upstream_cache_key, res, cache.ttl) + if not res then + core.log.error("failed to set cache, err: ", err) + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/util.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/util.lua new file mode 100644 index 0000000..26c6e81 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-cache/util.lua @@ -0,0 +1,102 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local ngx_re = require("ngx.re") +local tab_concat = table.concat +local string = string +local io_open = io.open +local io_close = io.close +local ngx = ngx +local ipairs = ipairs +local pairs = pairs +local tonumber = tonumber + +local _M = {} + +local tmp = {} +function _M.generate_complex_value(data, ctx) + core.table.clear(tmp) + + core.log.info("proxy-cache complex value: ", core.json.delay_encode(data)) + for i, value in ipairs(data) do + core.log.info("proxy-cache complex value index-", i, ": ", value) + + if string.byte(value, 1, 1) == string.byte('$') then + tmp[i] = ctx.var[string.sub(value, 2)] or "" + else + tmp[i] = value + end + end + + return tab_concat(tmp, "") +end + + +-- check whether the request method match the user defined. +function _M.match_method(conf, ctx) + for _, method in ipairs(conf.cache_method) do + if method == ctx.var.request_method then + return true + end + end + + return false +end + + +-- check whether the response status match the user defined. +function _M.match_status(conf, ctx) + for _, status in ipairs(conf.cache_http_status) do + if status == ngx.status then + return true + end + end + + return false +end + + +function _M.file_exists(name) + local f = io_open(name, "r") + if f ~= nil then + io_close(f) + return true + end + return false +end + + +function _M.generate_cache_filename(cache_path, cache_levels, cache_key) + local md5sum = ngx.md5(cache_key) + local levels = ngx_re.split(cache_levels, ":") + local filename = "" + + local index = #md5sum + for k, v in pairs(levels) do + local length = tonumber(v) + index = index - length + filename = filename .. md5sum:sub(index+1, index+length) .. "/" + end + if cache_path:sub(-1) ~= "/" then + cache_path = cache_path .. "/" + end + filename = cache_path .. filename .. md5sum + return filename +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-control.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-control.lua new file mode 100644 index 0000000..fc87e45 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-control.lua @@ -0,0 +1,65 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local ok, apisix_ngx_client = pcall(require, "resty.apisix.client") + + +local schema = { + type = "object", + properties = { + request_buffering = { + type = "boolean", + default = true, + }, + }, +} + + +local plugin_name = "proxy-control" +local _M = { + version = 0.1, + priority = 21990, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +-- we want to control proxy behavior before auth, so put the code under rewrite method +function _M.rewrite(conf, ctx) + if not ok then + core.log.error("need to build APISIX-Runtime to support proxy control") + return 501 + end + + local request_buffering = conf.request_buffering + if request_buffering ~= nil then + local ok, err = apisix_ngx_client.set_proxy_request_buffering(request_buffering) + if not ok then + core.log.error("failed to set request_buffering: ", err) + return 503 + end + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-mirror.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-mirror.lua new file mode 100644 index 0000000..d6cede6 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-mirror.lua @@ -0,0 +1,133 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local url = require("net.url") + +local math_random = math.random +local has_mod, apisix_ngx_client = pcall(require, "resty.apisix.client") + + +local plugin_name = "proxy-mirror" +local schema = { + type = "object", + properties = { + host = { + type = "string", + pattern = [=[^(http(s)?|grpc(s)?):\/\/([\da-zA-Z.-]+|\[[\da-fA-F:]+\])(:\d+)?$]=], + }, + path = { + type = "string", + pattern = [[^/[^?&]+$]], + }, + path_concat_mode = { + type = "string", + default = "replace", + enum = {"replace", "prefix"}, + description = "the concatenation mode for custom path" + }, + sample_ratio = { + type = "number", + minimum = 0.00001, + maximum = 1, + default = 1, + }, + }, + required = {"host"}, +} + +local _M = { + version = 0.1, + priority = 1010, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + return true +end + + +local function resolver_host(prop_host) + local url_decoded = url.parse(prop_host) + local decoded_host = url_decoded.host + if not core.utils.parse_ipv4(decoded_host) and not core.utils.parse_ipv6(decoded_host) then + local ip, err = core.resolver.parse_domain(decoded_host) + + if not ip then + core.log.error("dns resolver resolves domain: ", decoded_host," error: ", err, + " will continue to use the host: ", decoded_host) + return url_decoded.scheme, prop_host + end + + local host = url_decoded.scheme .. '://' .. ip .. + (url_decoded.port and ':' .. url_decoded.port or '') + core.log.info(prop_host, " is resolved to: ", host) + return url_decoded.scheme, host + end + return url_decoded.scheme, prop_host +end + + +local function enable_mirror(ctx, conf) + local uri = (ctx.var.upstream_uri and ctx.var.upstream_uri ~= "") and + ctx.var.upstream_uri or + ctx.var.uri .. ctx.var.is_args .. (ctx.var.args or '') + + if conf.path then + if conf.path_concat_mode == "prefix" then + uri = conf.path .. uri + else + uri = conf.path .. ctx.var.is_args .. (ctx.var.args or '') + end + end + + local _, mirror_host = resolver_host(conf.host) + ctx.var.upstream_mirror_host = mirror_host + ctx.var.upstream_mirror_uri = mirror_host .. uri + + if has_mod then + apisix_ngx_client.enable_mirror() + end +end + + +function _M.rewrite(conf, ctx) + core.log.info("proxy mirror plugin rewrite phase, conf: ", core.json.delay_encode(conf)) + + if conf.sample_ratio == 1 then + enable_mirror(ctx, conf) + ctx.enable_mirror = true + else + local val = math_random() + core.log.info("mirror request sample_ratio conf: ", conf.sample_ratio, + ", random value: ", val) + if val < conf.sample_ratio then + enable_mirror(ctx, conf) + ctx.enable_mirror = true + end + end + +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-rewrite.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-rewrite.lua new file mode 100644 index 0000000..21f44bc --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/proxy-rewrite.lua @@ -0,0 +1,398 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local plugin_name = "proxy-rewrite" +local pairs = pairs +local ipairs = ipairs +local ngx = ngx +local type = type +local re_sub = ngx.re.sub +local re_match = ngx.re.match +local req_set_uri = ngx.req.set_uri +local sub_str = string.sub +local str_find = core.string.find + +local switch_map = {GET = ngx.HTTP_GET, POST = ngx.HTTP_POST, PUT = ngx.HTTP_PUT, + HEAD = ngx.HTTP_HEAD, DELETE = ngx.HTTP_DELETE, + OPTIONS = ngx.HTTP_OPTIONS, MKCOL = ngx.HTTP_MKCOL, + COPY = ngx.HTTP_COPY, MOVE = ngx.HTTP_MOVE, + PROPFIND = ngx.HTTP_PROPFIND, LOCK = ngx.HTTP_LOCK, + UNLOCK = ngx.HTTP_UNLOCK, PATCH = ngx.HTTP_PATCH, + TRACE = ngx.HTTP_TRACE, + } +local schema_method_enum = {} +for key in pairs(switch_map) do + core.table.insert(schema_method_enum, key) +end + +local lrucache = core.lrucache.new({ + type = "plugin", +}) + +core.ctx.register_var("proxy_rewrite_regex_uri_captures", function(ctx) + return ctx.proxy_rewrite_regex_uri_captures +end) + +local schema = { + type = "object", + properties = { + uri = { + description = "new uri for upstream", + type = "string", + minLength = 1, + maxLength = 4096, + pattern = [[^\/.*]], + }, + method = { + description = "proxy route method", + type = "string", + enum = schema_method_enum + }, + regex_uri = { + description = "new uri that substitute from client uri " .. + "for upstream, lower priority than uri property", + type = "array", + minItems = 2, + items = { + description = "regex uri", + type = "string", + } + }, + host = { + description = "new host for upstream", + type = "string", + pattern = [[^[0-9a-zA-Z-.]+(:\d{1,5})?$]], + }, + headers = { + description = "new headers for request", + oneOf = { + { + type = "object", + minProperties = 1, + additionalProperties = false, + properties = { + add = { + type = "object", + minProperties = 1, + patternProperties = { + ["^[^:]+$"] = { + oneOf = { + { type = "string" }, + { type = "number" } + } + } + }, + }, + set = { + type = "object", + minProperties = 1, + patternProperties = { + ["^[^:]+$"] = { + oneOf = { + { type = "string" }, + { type = "number" }, + } + } + }, + }, + remove = { + type = "array", + minItems = 1, + items = { + type = "string", + -- "Referer" + pattern = "^[^:]+$" + } + }, + }, + }, + { + type = "object", + minProperties = 1, + patternProperties = { + ["^[^:]+$"] = { + oneOf = { + { type = "string" }, + { type = "number" } + } + } + }, + } + }, + + }, + use_real_request_uri_unsafe = { + description = "use real_request_uri instead, THIS IS VERY UNSAFE.", + type = "boolean", + default = false, + }, + }, + minProperties = 1, +} + + +local _M = { + version = 0.1, + priority = 1008, + name = plugin_name, + schema = schema, +} + +local function is_new_headers_conf(headers) + return (headers.add and type(headers.add) == "table") or + (headers.set and type(headers.set) == "table") or + (headers.remove and type(headers.remove) == "table") +end + +local function check_set_headers(headers) + for field, value in pairs(headers) do + if type(field) ~= 'string' then + return false, 'invalid type as header field' + end + + if type(value) ~= 'string' and type(value) ~= 'number' then + return false, 'invalid type as header value' + end + + if #field == 0 then + return false, 'invalid field length in header' + end + + core.log.info("header field: ", field) + if not core.utils.validate_header_field(field) then + return false, 'invalid field character in header' + end + if not core.utils.validate_header_value(value) then + return false, 'invalid value character in header' + end + end + + return true +end + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + if conf.regex_uri and #conf.regex_uri > 0 then + if (#conf.regex_uri % 2 ~= 0) then + return false, "The length of regex_uri should be an even number" + end + for i = 1, #conf.regex_uri, 2 do + local _, _, err = re_sub("/fake_uri", conf.regex_uri[i], + conf.regex_uri[i + 1], "jo") + if err then + return false, "invalid regex_uri(" .. conf.regex_uri[i] .. + ", " .. conf.regex_uri[i + 1] .. "): " .. err + end + end + end + + -- check headers + if not conf.headers then + return true + end + + if conf.headers then + if not is_new_headers_conf(conf.headers) then + ok, err = check_set_headers(conf.headers) + if not ok then + return false, err + end + end + end + + return true +end + + +do + local upstream_vars = { + host = "upstream_host", + upgrade = "upstream_upgrade", + connection = "upstream_connection", + } + local upstream_names = {} + for name, _ in pairs(upstream_vars) do + core.table.insert(upstream_names, name) + end + + local function create_header_operation(hdr_conf) + local set = {} + local add = {} + + if is_new_headers_conf(hdr_conf) then + if hdr_conf.add then + for field, value in pairs(hdr_conf.add) do + core.table.insert_tail(add, field, value) + end + end + if hdr_conf.set then + for field, value in pairs(hdr_conf.set) do + core.table.insert_tail(set, field, value) + end + end + + else + for field, value in pairs(hdr_conf) do + core.table.insert_tail(set, field, value) + end + end + + return { + add = add, + set = set, + remove = hdr_conf.remove or {}, + } + end + + + local function escape_separator(s) + return re_sub(s, [[\?]], "%3F", "jo") + end + + +function _M.rewrite(conf, ctx) + for _, name in ipairs(upstream_names) do + if conf[name] then + ctx.var[upstream_vars[name]] = conf[name] + end + end + + local upstream_uri = ctx.var.uri + local separator_escaped = false + if conf.use_real_request_uri_unsafe then + upstream_uri = ctx.var.real_request_uri + end + + if conf.uri ~= nil then + separator_escaped = true + upstream_uri = core.utils.resolve_var(conf.uri, ctx.var, escape_separator) + + elseif conf.regex_uri ~= nil then + if not str_find(upstream_uri, "?") then + separator_escaped = true + end + + local error_msg + for i = 1, #conf.regex_uri, 2 do + local captures, err = re_match(upstream_uri, conf.regex_uri[i], "jo") + if err then + error_msg = "failed to match the uri " .. ctx.var.uri .. + " (" .. conf.regex_uri[i] .. ") " .. " : " .. err + break + end + + if captures then + ctx.proxy_rewrite_regex_uri_captures = captures + + local uri, _, err = re_sub(upstream_uri, + conf.regex_uri[i], conf.regex_uri[i + 1], "jo") + if uri then + upstream_uri = uri + else + error_msg = "failed to substitute the uri " .. ngx.var.uri .. + " (" .. conf.regex_uri[i] .. ") with " .. + conf.regex_uri[i + 1] .. " : " .. err + end + + break + end + end + + if error_msg ~= nil then + core.log.error(error_msg) + return 500, { error_msg = error_msg } + end + end + + if not conf.use_real_request_uri_unsafe then + local index + if separator_escaped then + index = str_find(upstream_uri, "?") + end + + if index then + upstream_uri = core.utils.uri_safe_encode(sub_str(upstream_uri, 1, index - 1)) .. + sub_str(upstream_uri, index) + else + -- The '?' may come from client request '%3f' when we use ngx.var.uri directly or + -- via regex_uri + upstream_uri = core.utils.uri_safe_encode(upstream_uri) + end + + req_set_uri(upstream_uri) + + if ctx.var.is_args == "?" then + if index then + ctx.var.upstream_uri = upstream_uri .. "&" .. (ctx.var.args or "") + else + ctx.var.upstream_uri = upstream_uri .. "?" .. (ctx.var.args or "") + end + else + ctx.var.upstream_uri = upstream_uri + end + else + ctx.var.upstream_uri = upstream_uri + end + + if conf.headers then + local hdr_op, err = core.lrucache.plugin_ctx(lrucache, ctx, nil, + create_header_operation, conf.headers) + if not hdr_op then + core.log.error("failed to create header operation: ", err) + return + end + + local field_cnt = #hdr_op.add + for i = 1, field_cnt, 2 do + local val = core.utils.resolve_var_with_captures(hdr_op.add[i + 1], + ctx.proxy_rewrite_regex_uri_captures) + val = core.utils.resolve_var(val, ctx.var) + -- A nil or empty table value will cause add_header function to throw an error. + if val then + local header = hdr_op.add[i] + core.request.add_header(ctx, header, val) + end + end + + local field_cnt = #hdr_op.set + for i = 1, field_cnt, 2 do + local val = core.utils.resolve_var_with_captures(hdr_op.set[i + 1], + ctx.proxy_rewrite_regex_uri_captures) + val = core.utils.resolve_var(val, ctx.var) + core.request.set_header(ctx, hdr_op.set[i], val) + end + + local field_cnt = #hdr_op.remove + for i = 1, field_cnt do + core.request.set_header(ctx, hdr_op.remove[i], nil) + end + + end + + if conf.method then + ngx.req.set_method(switch_map[conf.method]) + end +end + +end -- do + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/public-api.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/public-api.lua new file mode 100644 index 0000000..ad3f9dc --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/public-api.lua @@ -0,0 +1,55 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local router = require("apisix.router") + +local schema = { + type = "object", + properties = { + uri = {type = "string"}, + }, +} + + +local _M = { + version = 0.1, + priority = 501, + name = "public-api", + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +function _M.access(conf, ctx) + -- overwrite the uri in the ctx when the user has set the target uri + ctx.var.uri = conf.uri or ctx.var.uri + + -- perform route matching + if router.api.match(ctx) then + return + end + + return 404 +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/real-ip.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/real-ip.lua new file mode 100644 index 0000000..2121996 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/real-ip.lua @@ -0,0 +1,185 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local ngx_re_split = require("ngx.re").split +local is_apisix_or, client = pcall(require, "resty.apisix.client") +local str_byte = string.byte +local str_sub = string.sub +local ipairs = ipairs +local type = type + +local lrucache = core.lrucache.new({ + type = "plugin", +}) + +local schema = { + type = "object", + properties = { + trusted_addresses = { + type = "array", + items = {anyOf = core.schema.ip_def}, + minItems = 1 + }, + source = { + type = "string", + minLength = 1 + }, + recursive = { + type = "boolean", + default = false + } + }, + required = {"source"}, +} + + +local plugin_name = "real-ip" + + +local _M = { + version = 0.1, + priority = 23000, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + if conf.trusted_addresses then + for _, cidr in ipairs(conf.trusted_addresses) do + if not core.ip.validate_cidr_or_ip(cidr) then + return false, "invalid ip address: " .. cidr + end + end + end + return true +end + + +local function addr_match(conf, ctx, addr) + local matcher, err = core.lrucache.plugin_ctx(lrucache, ctx, nil, + core.ip.create_ip_matcher, conf.trusted_addresses) + if not matcher then + core.log.error("failed to create ip matcher: ", err) + return false + end + + return matcher:match(addr) +end + + +local function get_addr(conf, ctx) + if conf.source == "http_x_forwarded_for" then + -- use the last address from X-Forwarded-For header + -- after core.request.header function changed + -- we need to get original header value by using core.request.headers + local addrs = core.request.headers(ctx)["X-Forwarded-For"] + if not addrs then + return nil + end + + if type(addrs) == "table" then + addrs = addrs[#addrs] + end + + local idx = core.string.rfind_char(addrs, ",") + if not idx then + return addrs + end + + if conf.recursive and conf.trusted_addresses then + local split_addrs = ngx_re_split(addrs, ",\\s*", "jo") + for i = #split_addrs, 2, -1 do + if not addr_match(conf, ctx, split_addrs[i]) then + return split_addrs[i] + end + end + + return split_addrs[1] + end + + for i = idx + 1, #addrs do + if str_byte(addrs, i) == str_byte(" ") then + idx = idx + 1 + else + break + end + end + + return str_sub(addrs, idx + 1) + end + return ctx.var[conf.source] +end + + +function _M.rewrite(conf, ctx) + if not is_apisix_or then + core.log.error("need to build APISIX-Runtime to support setting real ip") + return 501 + end + + if conf.trusted_addresses then + local remote_addr = ctx.var.remote_addr + if not addr_match(conf, ctx, remote_addr) then + return + end + end + + local addr = get_addr(conf, ctx) + if not addr then + core.log.warn("missing real address") + return + end + + local ip, port = core.utils.parse_addr(addr) + if not ip or (not core.utils.parse_ipv4(ip) and not core.utils.parse_ipv6(ip)) then + core.log.warn("bad address: ", addr) + return + end + + if str_byte(ip, 1, 1) == str_byte("[") then + -- For IPv6, the `set_real_ip` accepts '::1' but not '[::1]' + ip = str_sub(ip, 2, #ip - 1) + end + + if port ~= nil and (port < 1 or port > 65535) then + core.log.warn("bad port: ", port) + return + end + + core.log.info("set real ip: ", ip, ", port: ", port) + + local ok, err = client.set_real_ip(ip, port) + if not ok then + core.log.error("failed to set real ip: ", err) + return + end + + -- flush cached vars in APISIX + ctx.var.remote_addr = nil + ctx.var.remote_port = nil + ctx.var.realip_remote_addr = nil + ctx.var.realip_remote_port = nil +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/redirect.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/redirect.lua new file mode 100644 index 0000000..421007d --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/redirect.lua @@ -0,0 +1,264 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local plugin = require("apisix.plugin") +local tab_insert = table.insert +local tab_concat = table.concat +local string_format = string.format +local re_gmatch = ngx.re.gmatch +local re_sub = ngx.re.sub +local ipairs = ipairs +local ngx = ngx +local str_find = core.string.find +local str_sub = string.sub +local type = type +local math_random = math.random + +local lrucache = core.lrucache.new({ + ttl = 300, count = 100 +}) + + +local reg = [[(\\\$[0-9a-zA-Z_]+)|]] -- \$host + .. [[\$\{([0-9a-zA-Z_]+)\}|]] -- ${host} + .. [[\$([0-9a-zA-Z_]+)|]] -- $host + .. [[(\$|[^$\\]+)]] -- $ or others +local schema = { + type = "object", + properties = { + ret_code = {type = "integer", minimum = 200, default = 302}, + uri = {type = "string", minLength = 2, pattern = reg}, + regex_uri = { + description = "params for generating new uri that substitute from client uri, " .. + "first param is regular expression, the second one is uri template", + type = "array", + maxItems = 2, + minItems = 2, + items = { + description = "regex uri", + type = "string", + } + }, + http_to_https = {type = "boolean"}, + encode_uri = {type = "boolean", default = false}, + append_query_string = {type = "boolean", default = false}, + }, + oneOf = { + {required = {"uri"}}, + {required = {"regex_uri"}}, + {required = {"http_to_https"}} + } +} + + +local plugin_name = "redirect" + +local _M = { + version = 0.1, + priority = 900, + name = plugin_name, + schema = schema, +} + + +local function parse_uri(uri) + local iterator, err = re_gmatch(uri, reg, "jiox") + if not iterator then + return nil, err + end + + local t = {} + while true do + local m, err = iterator() + if err then + return nil, err + end + + if not m then + break + end + + tab_insert(t, m) + end + + return t +end + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + + if not ok then + return false, err + end + + if conf.regex_uri and #conf.regex_uri > 0 then + local _, _, err = re_sub("/fake_uri", conf.regex_uri[1], + conf.regex_uri[2], "jo") + if err then + local msg = string_format("invalid regex_uri (%s, %s), err:%s", + conf.regex_uri[1], conf.regex_uri[2], err) + return false, msg + end + end + + if conf.http_to_https and conf.append_query_string then + return false, "only one of `http_to_https` and `append_query_string` can be configured." + end + + return true +end + + + local tmp = {} +local function concat_new_uri(uri, ctx) + local passed_uri_segs, err = lrucache(uri, nil, parse_uri, uri) + if not passed_uri_segs then + return nil, err + end + + core.table.clear(tmp) + + for _, uri_segs in ipairs(passed_uri_segs) do + local pat1 = uri_segs[1] -- \$host + local pat2 = uri_segs[2] -- ${host} + local pat3 = uri_segs[3] -- $host + local pat4 = uri_segs[4] -- $ or others + core.log.info("parsed uri segs: ", core.json.delay_encode(uri_segs)) + + if pat2 or pat3 then + tab_insert(tmp, ctx.var[pat2 or pat3]) + else + tab_insert(tmp, pat1 or pat4) + end + end + + return tab_concat(tmp, "") +end + +local function get_port(attr) + local port + if attr then + port = attr.https_port + end + + if port then + return port + end + + local local_conf = core.config.local_conf() + local ssl = core.table.try_read_attr(local_conf, "apisix", "ssl") + if not ssl or not ssl["enable"] then + return port + end + + local ports = ssl["listen"] + if ports and #ports > 0 then + local idx = math_random(1, #ports) + port = ports[idx] + if type(port) == "table" then + port = port.port + end + end + + return port +end + +function _M.rewrite(conf, ctx) + core.log.info("plugin rewrite phase, conf: ", core.json.delay_encode(conf)) + + local ret_code = conf.ret_code + + local attr = plugin.plugin_attr(plugin_name) + local ret_port = get_port(attr) + + local uri = conf.uri + local regex_uri = conf.regex_uri + + local proxy_proto = core.request.header(ctx, "X-Forwarded-Proto") + local _scheme = proxy_proto or core.request.get_scheme(ctx) + if conf.http_to_https and _scheme == "http" then + if ret_port == nil or ret_port == 443 or ret_port <= 0 or ret_port > 65535 then + uri = "https://$host$request_uri" + else + uri = "https://$host:" .. ret_port .. "$request_uri" + end + + local method_name = ngx.req.get_method() + if method_name == "GET" or method_name == "HEAD" then + ret_code = 301 + else + -- https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/308 + ret_code = 308 + end + end + + if ret_code then + local new_uri + if uri then + local err + new_uri, err = concat_new_uri(uri, ctx) + if not new_uri then + core.log.error("failed to generate new uri by: " .. uri .. err) + return 500 + end + elseif regex_uri then + local n, err + new_uri, n, err = re_sub(ctx.var.uri, regex_uri[1], + regex_uri[2], "jo") + if not new_uri then + local msg = string_format("failed to substitute the uri:%s (%s) with %s, error:%s", + ctx.var.uri, regex_uri[1], regex_uri[2], err) + core.log.error(msg) + return 500 + end + + if n < 1 then + return + end + end + + if not new_uri then + return + end + + local index = str_find(new_uri, "?") + if conf.encode_uri then + if index then + new_uri = core.utils.uri_safe_encode(str_sub(new_uri, 1, index-1)) .. + str_sub(new_uri, index) + else + new_uri = core.utils.uri_safe_encode(new_uri) + end + end + + if conf.append_query_string and ctx.var.is_args == "?" then + if index then + new_uri = new_uri .. "&" .. (ctx.var.args or "") + else + new_uri = new_uri .. "?" .. (ctx.var.args or "") + end + end + + core.response.set_header("Location", new_uri) + return ret_code + end + +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/referer-restriction.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/referer-restriction.lua new file mode 100644 index 0000000..85e8ea3 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/referer-restriction.lua @@ -0,0 +1,141 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local ipairs = ipairs +local core = require("apisix.core") +local http = require "resty.http" +local lrucache = core.lrucache.new({ + ttl = 300, count = 512 +}) + + +local schema = { + type = "object", + properties = { + bypass_missing = { + type = "boolean", + default = false, + }, + whitelist = { + type = "array", + items = core.schema.host_def, + minItems = 1, + }, + blacklist = { + type = "array", + items = core.schema.host_def, + minItems = 1, + }, + message = { + type = "string", + minLength = 1, + maxLength = 1024, + default = "Your referer host is not allowed", + }, + }, + oneOf = { + {required = {"whitelist"}}, + {required = {"blacklist"}}, + }, +} + + +local plugin_name = "referer-restriction" + + +local _M = { + version = 0.1, + priority = 2990, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +local function match_host(matcher, host) + if matcher.map[host] then + return true + end + for _, h in ipairs(matcher.suffixes) do + if core.string.has_suffix(host, h) then + return true + end + end + return false +end + + +local function create_host_matcher(hosts) + local hosts_suffix = {} + local hosts_map = {} + + for _, h in ipairs(hosts) do + if h:byte(1) == 42 then -- start with '*' + core.table.insert(hosts_suffix, h:sub(2)) + else + hosts_map[h] = true + end + end + + return { + suffixes = hosts_suffix, + map = hosts_map, + } +end + + +function _M.access(conf, ctx) + local block = false + local referer = ctx.var.http_referer + if referer then + -- parse_uri doesn't support IPv6 literal, it is OK since we only + -- expect hostname in the whitelist. + -- See https://github.com/ledgetech/lua-resty-http/pull/104 + local uri = http.parse_uri(nil, referer) + if not uri then + -- malformed Referer + referer = nil + else + -- take host part only + referer = uri[2] + end + end + + + if not referer then + block = not conf.bypass_missing + + elseif conf.whitelist then + local matcher = lrucache(conf.whitelist, nil, + create_host_matcher, conf.whitelist) + block = not match_host(matcher, referer) + elseif conf.blacklist then + local matcher = lrucache(conf.blacklist, nil, + create_host_matcher, conf.blacklist) + block = match_host(matcher, referer) + end + + if block then + return 403, { message = conf.message } + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/request-id.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/request-id.lua new file mode 100644 index 0000000..dac3162 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/request-id.lua @@ -0,0 +1,120 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local ngx = ngx +local core = require("apisix.core") +local uuid = require("resty.jit-uuid") +local nanoid = require("nanoid") +local math_random = math.random +local str_byte = string.byte +local ffi = require "ffi" + +local plugin_name = "request-id" + +local schema = { + type = "object", + properties = { + header_name = {type = "string", default = "X-Request-Id"}, + include_in_response = {type = "boolean", default = true}, + algorithm = { + type = "string", + enum = {"uuid", "nanoid", "range_id"}, + default = "uuid" + }, + range_id = { + type = "object", + properties = { + length = { + type = "integer", + minimum = 6, + default = 16 + }, + char_set = { + type = "string", + -- The Length is set to 6 just avoid too short length, it may repeat + minLength = 6, + default = "abcdefghijklmnopqrstuvwxyzABCDEFGHIGKLMNOPQRSTUVWXYZ0123456789" + } + }, + default = {} + } + } +} + +local _M = { + version = 0.1, + priority = 12015, + name = plugin_name, + schema = schema +} + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + +-- generate range_id +local function get_range_id(range_id) + local res = ffi.new("unsigned char[?]", range_id.length) + for i = 0, range_id.length - 1 do + res[i] = str_byte(range_id.char_set, math_random(#range_id.char_set)) + end + return ffi.string(res, range_id.length) +end + +local function get_request_id(conf) + if conf.algorithm == "uuid" then + return uuid() + end + if conf.algorithm == "nanoid" then + return nanoid.safe_simple() + end + + if conf.algorithm == "range_id" then + return get_range_id(conf.range_id) + end + + return uuid() +end + + +function _M.rewrite(conf, ctx) + local headers = ngx.req.get_headers() + local uuid_val + if not headers[conf.header_name] then + uuid_val = get_request_id(conf) + core.request.set_header(ctx, conf.header_name, uuid_val) + else + uuid_val = headers[conf.header_name] + end + + if conf.include_in_response then + ctx["request-id-" .. conf.header_name] = uuid_val + end +end + +function _M.header_filter(conf, ctx) + if not conf.include_in_response then + return + end + + local headers = ngx.resp.get_headers() + if not headers[conf.header_name] then + core.response.set_header(conf.header_name, ctx["request-id-" .. conf.header_name]) + end +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/request-validation.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/request-validation.lua new file mode 100644 index 0000000..0e6d36d --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/request-validation.lua @@ -0,0 +1,120 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local plugin_name = "request-validation" +local ngx = ngx + +local schema = { + type = "object", + properties = { + header_schema = {type = "object"}, + body_schema = {type = "object"}, + rejected_code = {type = "integer", minimum = 200, maximum = 599, default = 400}, + rejected_msg = {type = "string", minLength = 1, maxLength = 256} + }, + anyOf = { + {required = {"header_schema"}}, + {required = {"body_schema"}} + } +} + + +local _M = { + version = 0.1, + priority = 2800, + type = 'validation', + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + if conf.body_schema then + ok, err = core.schema.valid(conf.body_schema) + if not ok then + return false, err + end + end + + if conf.header_schema then + ok, err = core.schema.valid(conf.header_schema) + if not ok then + return false, err + end + end + + return true, nil +end + + +function _M.rewrite(conf, ctx) + local headers = core.request.headers(ctx) + + if conf.header_schema then + local ok, err = core.schema.check(conf.header_schema, headers) + if not ok then + core.log.error("req schema validation failed", err) + return conf.rejected_code, conf.rejected_msg or err + end + end + + if conf.body_schema then + local req_body + local body, err = core.request.get_body() + if not body then + if err then + core.log.error("failed to get body: ", err) + end + return conf.rejected_code, conf.rejected_msg + end + + local body_is_json = true + if headers["content-type"] == "application/x-www-form-urlencoded" then + -- use 0 to avoid truncated result and keep the behavior as the + -- same as other platforms + req_body, err = ngx.decode_args(body, 0) + body_is_json = false + else -- JSON as default + req_body, err = core.json.decode(body) + end + + if not req_body then + core.log.error('failed to decode the req body: ', err) + return conf.rejected_code, conf.rejected_msg or err + end + + local ok, err = core.schema.check(conf.body_schema, req_body) + if not ok then + core.log.error("req schema validation failed: ", err) + return conf.rejected_code, conf.rejected_msg or err + end + + if body_is_json then + -- ensure the JSON we check is the JSON we pass to the upstream, + -- see https://bishopfox.com/blog/json-interoperability-vulnerabilities + req_body = core.json.encode(req_body) + ngx.req.set_body_data(req_body) + end + end +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/response-rewrite.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/response-rewrite.lua new file mode 100644 index 0000000..adf630f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/response-rewrite.lua @@ -0,0 +1,390 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local expr = require("resty.expr.v1") +local re_compile = require("resty.core.regex").re_match_compile +local plugin_name = "response-rewrite" +local ngx = ngx +local ngx_header = ngx.header +local re_match = ngx.re.match +local re_sub = ngx.re.sub +local re_gsub = ngx.re.gsub +local pairs = pairs +local ipairs = ipairs +local type = type +local pcall = pcall +local content_decode = require("apisix.utils.content-decode") + + +local lrucache = core.lrucache.new({ + type = "plugin", +}) + +local schema = { + type = "object", + properties = { + headers = { + description = "new headers for response", + anyOf = { + { + type = "object", + minProperties = 1, + patternProperties = { + ["^[^:]+$"] = { + oneOf = { + {type = "string"}, + {type = "number"}, + } + } + }, + }, + { + properties = { + add = { + type = "array", + minItems = 1, + items = { + type = "string", + -- "Set-Cookie: =; Max-Age=" + pattern = "^[^:]+:[^:]*[^/]$" + } + }, + set = { + type = "object", + minProperties = 1, + patternProperties = { + ["^[^:]+$"] = { + oneOf = { + {type = "string"}, + {type = "number"}, + } + } + }, + }, + remove = { + type = "array", + minItems = 1, + items = { + type = "string", + -- "Set-Cookie" + pattern = "^[^:]+$" + } + }, + }, + } + } + }, + body = { + description = "new body for response", + type = "string", + }, + body_base64 = { + description = "whether new body for response need base64 decode before return", + type = "boolean", + default = false, + }, + status_code = { + description = "new status code for response", + type = "integer", + minimum = 200, + maximum = 598, + }, + vars = { + type = "array", + }, + filters = { + description = "a group of filters that modify response body" .. + "by replacing one specified string by another", + type = "array", + minItems = 1, + items = { + description = "filter that modifies response body", + type = "object", + required = {"regex", "replace"}, + properties = { + regex = { + description = "match pattern on response body", + type = "string", + minLength = 1, + }, + scope = { + description = "regex substitution range", + type = "string", + enum = {"once", "global"}, + default = "once", + }, + replace = { + description = "regex substitution content", + type = "string", + }, + options = { + description = "regex options", + type = "string", + default = "jo", + } + }, + }, + }, + }, + dependencies = { + body = { + ["not"] = {required = {"filters"}} + }, + filters = { + ["not"] = {required = {"body"}} + } + } +} + + +local _M = { + version = 0.1, + priority = 899, + name = plugin_name, + schema = schema, +} + +local function vars_matched(conf, ctx) + if not conf.vars then + return true + end + + if not conf.response_expr then + local response_expr, _ = expr.new(conf.vars) + conf.response_expr = response_expr + end + + local match_result = conf.response_expr:eval(ctx.var) + + return match_result +end + + +local function is_new_headers_conf(headers) + return + (headers.add and type(headers.add) == "table") or + (headers.set and type(headers.set) == "table") or + (headers.remove and type(headers.remove) == "table") +end + + +local function check_set_headers(headers) + for field, value in pairs(headers) do + if type(field) ~= 'string' then + return false, 'invalid type as header field' + end + + if type(value) ~= 'string' and type(value) ~= 'number' then + return false, 'invalid type as header value' + end + + if #field == 0 then + return false, 'invalid field length in header' + end + end + + return true +end + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + if conf.headers then + if not is_new_headers_conf(conf.headers) then + ok, err = check_set_headers(conf.headers) + if not ok then + return false, err + end + end + end + + if conf.body_base64 then + if not conf.body or #conf.body == 0 then + return false, 'invalid base64 content' + end + local body = ngx.decode_base64(conf.body) + if not body then + return false, 'invalid base64 content' + end + end + + if conf.vars then + local ok, err = expr.new(conf.vars) + if not ok then + return false, "failed to validate the 'vars' expression: " .. err + end + end + + if conf.filters then + for _, filter in ipairs(conf.filters) do + local ok, err = pcall(re_compile, filter.regex, filter.options) + if not ok then + return false, "regex \"" .. filter.regex .. + "\" validation failed: " .. err + end + end + end + + return true +end + + +do + +function _M.body_filter(conf, ctx) + if not ctx.response_rewrite_matched then + return + end + + if conf.filters then + + local body = core.response.hold_body_chunk(ctx) + if not body then + return + end + + local err + if ctx.response_encoding ~= nil then + local decoder = content_decode.dispatch_decoder(ctx.response_encoding) + if not decoder then + core.log.error("filters may not work as expected ", + "due to unsupported compression encoding type: ", + ctx.response_encoding) + return + end + body, err = decoder(body) + if err ~= nil then + core.log.error("filters may not work as expected: ", err) + return + end + end + + for _, filter in ipairs(conf.filters) do + if filter.scope == "once" then + body, _, err = re_sub(body, filter.regex, filter.replace, filter.options) + else + body, _, err = re_gsub(body, filter.regex, filter.replace, filter.options) + end + if err ~= nil then + core.log.error("regex \"" .. filter.regex .. "\" substitutes failed:" .. err) + end + end + + ngx.arg[1] = body + return + end + + if conf.body then + ngx.arg[2] = true + if conf.body_base64 then + ngx.arg[1] = ngx.decode_base64(conf.body) + else + ngx.arg[1] = conf.body + end + end +end + + +local function create_header_operation(hdr_conf) + local set = {} + local add = {} + if is_new_headers_conf(hdr_conf) then + if hdr_conf.add then + for _, value in ipairs(hdr_conf.add) do + local m, err = re_match(value, [[^([^:\s]+)\s*:\s*([^:]+)$]], "jo") + if not m then + return nil, err + end + core.table.insert_tail(add, m[1], m[2]) + end + end + + if hdr_conf.set then + for field, value in pairs(hdr_conf.set) do + --reform header from object into array, so can avoid use pairs, which is NYI + core.table.insert_tail(set, field, value) + end + end + + else + for field, value in pairs(hdr_conf) do + core.table.insert_tail(set, field, value) + end + end + + return { + add = add, + set = set, + remove = hdr_conf.remove or {}, + } +end + + +function _M.header_filter(conf, ctx) + ctx.response_rewrite_matched = vars_matched(conf, ctx) + if not ctx.response_rewrite_matched then + return + end + + if conf.status_code then + ngx.status = conf.status_code + end + + -- if filters have no any match, response body won't be modified. + if conf.filters or conf.body then + local response_encoding = ngx_header["Content-Encoding"] + core.response.clear_header_as_body_modified() + ctx.response_encoding = response_encoding + end + + if not conf.headers then + return + end + + local hdr_op, err = core.lrucache.plugin_ctx(lrucache, ctx, nil, + create_header_operation, conf.headers) + if not hdr_op then + core.log.error("failed to create header operation: ", err) + return + end + + local field_cnt = #hdr_op.add + for i = 1, field_cnt, 2 do + local val = core.utils.resolve_var(hdr_op.add[i+1], ctx.var) + core.response.add_header(hdr_op.add[i], val) + end + + local field_cnt = #hdr_op.set + for i = 1, field_cnt, 2 do + local val = core.utils.resolve_var(hdr_op.set[i+1], ctx.var) + core.response.set_header(hdr_op.set[i], val) + end + + local field_cnt = #hdr_op.remove + for i = 1, field_cnt do + core.response.set_header(hdr_op.remove[i], nil) + end +end + +end -- do + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/rocketmq-logger.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/rocketmq-logger.lua new file mode 100644 index 0000000..2f0cd5b --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/rocketmq-logger.lua @@ -0,0 +1,191 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local log_util = require("apisix.utils.log-util") +local producer = require ("resty.rocketmq.producer") +local acl_rpchook = require("resty.rocketmq.acl_rpchook") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") + +local type = type +local plugin_name = "rocketmq-logger" +local batch_processor_manager = bp_manager_mod.new("rocketmq logger") + +local lrucache = core.lrucache.new({ + type = "plugin", +}) + +local schema = { + type = "object", + properties = { + meta_format = { + type = "string", + default = "default", + enum = {"default", "origin"}, + }, + nameserver_list = { + type = "array", + minItems = 1, + items = { + type = "string" + } + }, + topic = {type = "string"}, + key = {type = "string"}, + tag = {type = "string"}, + log_format = {type = "object"}, + timeout = {type = "integer", minimum = 1, default = 3}, + use_tls = {type = "boolean", default = false}, + access_key = {type = "string", default = ""}, + secret_key = {type = "string", default = ""}, + include_req_body = {type = "boolean", default = false}, + include_req_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + include_resp_body = {type = "boolean", default = false}, + include_resp_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + }, + encrypt_fields = {"secret_key"}, + required = {"nameserver_list", "topic"} +} + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + } + }, +} + +local _M = { + version = 0.1, + priority = 402, + name = plugin_name, + schema = batch_processor_manager:wrap_schema(schema), + metadata_schema = metadata_schema, +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, err + end + core.utils.check_tls_bool({"use_tls"}, conf, plugin_name) + return log_util.check_log_schema(conf) +end + + +local function create_producer(nameserver_list, producer_config) + core.log.info("create new rocketmq producer instance") + local prod = producer.new(nameserver_list, "apisixLogProducer") + if producer_config.use_tls then + prod:setUseTLS(true) + end + if producer_config.access_key ~= '' then + local aclHook = acl_rpchook.new(producer_config.access_key, producer_config.secret_key) + prod:addRPCHook(aclHook) + end + prod:setTimeout(producer_config.timeout) + return prod +end + + +local function send_rocketmq_data(conf, log_message, prod) + local result, err = prod:send(conf.topic, log_message, conf.tag, conf.key) + if not result then + return false, "failed to send data to rocketmq topic: " .. err .. + ", nameserver_list: " .. core.json.encode(conf.nameserver_list) + end + + core.log.info("queue: ", result.sendResult.messageQueue.queueId) + + return true +end + + +function _M.body_filter(conf, ctx) + log_util.collect_body(conf, ctx) +end + + +function _M.log(conf, ctx) + local entry + if conf.meta_format == "origin" then + entry = log_util.get_req_original(ctx, conf) + else + entry = log_util.get_log_entry(plugin_name, conf, ctx) + end + + if batch_processor_manager:add_entry(conf, entry) then + return + end + + -- reuse producer via lrucache to avoid unbalanced partitions of messages in rocketmq + local producer_config = { + timeout = conf.timeout * 1000, + use_tls = conf.use_tls, + access_key = conf.access_key, + secret_key = conf.secret_key, + } + + local prod, err = core.lrucache.plugin_ctx(lrucache, ctx, nil, create_producer, + conf.nameserver_list, producer_config) + if err then + return nil, "failed to create the rocketmq producer: " .. err + end + core.log.info("rocketmq nameserver_list[1] port ", + prod.client.nameservers[1].port) + -- Generate a function to be executed by the batch processor + local func = function(entries, batch_max_size) + local data, err + if batch_max_size == 1 then + data = entries[1] + if type(data) ~= "string" then + data, err = core.json.encode(data) -- encode as single {} + end + else + data, err = core.json.encode(entries) -- encode as array [{}] + end + + if not data then + return false, 'error occurred while encoding the data: ' .. err + end + + core.log.info("send data to rocketmq: ", data) + return send_rocketmq_data(conf, data, prod) + end + + batch_processor_manager:add_entry_to_new_processor(conf, entry, ctx, func) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/server-info.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/server-info.lua new file mode 100644 index 0000000..441b2ae --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/server-info.lua @@ -0,0 +1,316 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local timers = require("apisix.timers") +local plugin = require("apisix.plugin") + +local ngx_time = ngx.time +local ngx_timer_at = ngx.timer.at +local ngx_worker_id = ngx.worker.id +local type = type + +local load_time = os.time() +local plugin_name = "server-info" +local default_report_ttl = 60 +local lease_id + +local schema = { + type = "object", +} +local attr_schema = { + type = "object", + properties = { + report_ttl = { + type = "integer", + description = "live time for server info in etcd", + default = default_report_ttl, + minimum = 3, + maximum = 86400, + } + } +} + +local internal_status = ngx.shared["internal-status"] +if not internal_status then + error("lua_shared_dict \"internal-status\" not configured") +end + + +local _M = { + version = 0.1, + priority = 990, + name = plugin_name, + schema = schema, + scope = "global", +} + + +local function get_boot_time() + local time, err = internal_status:get("server_info:boot_time") + if err ~= nil then + core.log.error("failed to get boot_time from shdict: ", err) + return load_time + end + + if time ~= nil then + return time + end + + local _, err = internal_status:set("server_info:boot_time", load_time) + if err ~= nil then + core.log.error("failed to save boot_time to shdict: ", err) + end + + return load_time +end + + +local function uninitialized_server_info() + local boot_time = get_boot_time() + return { + etcd_version = "unknown", + hostname = core.utils.gethostname(), + id = core.id.get(), + version = core.version.VERSION, + boot_time = boot_time, + } +end + + +local function get() + local data, err = internal_status:get("server_info") + if err ~= nil then + core.log.error("get error: ", err) + return nil, err + end + + if not data then + return uninitialized_server_info() + end + + local server_info, err = core.json.decode(data) + if not server_info then + core.log.error("failed to decode server_info: ", err) + return nil, err + end + + return server_info +end + + +local function get_server_info() + local info, err = get() + if not info then + core.log.error("failed to get server_info: ", err) + return 500 + end + + return 200, info +end + + +local function set(key, value, ttl) + local res_new, err = core.etcd.set(key, value, ttl) + if not res_new then + core.log.error("failed to set server_info: ", err) + return nil, err + end + + if not res_new.body.lease_id then + core.log.error("failed to get lease_id: ", err) + return nil, err + end + + lease_id = res_new.body.lease_id + + -- set or update lease_id + local ok, err = internal_status:set("lease_id", lease_id) + if not ok then + core.log.error("failed to set lease_id to shdict: ", err) + return nil, err + end + + return true +end + + +local function report(premature, report_ttl) + if premature then + return + end + + -- get apisix node info + local server_info, err = get() + if not server_info then + core.log.error("failed to get server_info: ", err) + return + end + + if server_info.etcd_version == "unknown" then + local res, err = core.etcd.server_version() + if not res then + core.log.error("failed to fetch etcd version: ", err) + return + + elseif type(res.body) ~= "table" then + core.log.error("failed to fetch etcd version: bad version info") + return + + else + if res.body.etcdcluster == "" then + server_info.etcd_version = res.body.etcdserver + else + server_info.etcd_version = res.body.etcdcluster + end + end + end + + -- get inside etcd data, if not exist, create it + local key = "/data_plane/server_info/" .. server_info.id + local res, err = core.etcd.get(key) + if not res or (res.status ~= 200 and res.status ~= 404) then + core.log.error("failed to get server_info from etcd: ", err) + return + end + + if not res.body.node then + local ok, err = set(key, server_info, report_ttl) + if not ok then + core.log.error("failed to set server_info to etcd: ", err) + return + end + + return + end + + local ok = core.table.deep_eq(server_info, res.body.node.value) + -- not equal, update it + if not ok then + local ok, err = set(key, server_info, report_ttl) + if not ok then + core.log.error("failed to set server_info to etcd: ", err) + return + end + + return + end + + -- get lease_id from ngx dict + lease_id, err = internal_status:get("lease_id") + if not lease_id then + core.log.error("failed to get lease_id from shdict: ", err) + return + end + + -- call keepalive + local res, err = core.etcd.keepalive(lease_id) + if not res then + core.log.error("send heartbeat failed: ", err) + return + end + + local data, err = core.json.encode(server_info) + if not data then + core.log.error("failed to encode server_info: ", err) + return + end + + local ok, err = internal_status:set("server_info", data) + if not ok then + core.log.error("failed to encode and save server info: ", err) + return + end +end + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + return true +end + + +function _M.control_api() + return { + { + methods = {"GET"}, + uris ={"/v1/server_info"}, + handler = get_server_info, + } + } +end + + +function _M.init() + core.log.warn("The server-info plugin is deprecated and will be removed in a future release.") + if core.config ~= require("apisix.core.config_etcd") then + -- we don't need to report server info if etcd is not in use. + return + end + + + local local_conf = core.config.local_conf() + local deployment_role = core.table.try_read_attr( + local_conf, "deployment", "role") + if deployment_role == "data_plane" then + -- data_plane should not write to etcd + return + end + + local attr = plugin.plugin_attr(plugin_name) + local ok, err = core.schema.check(attr_schema, attr) + if not ok then + core.log.error("failed to check plugin_attr: ", err) + return + end + + local report_ttl = attr and attr.report_ttl or default_report_ttl + local start_at = ngx_time() + + local fn = function() + local now = ngx_time() + -- If ttl remaining time is less than half, then flush the ttl + if now - start_at >= (report_ttl / 2) then + start_at = now + report(nil, report_ttl) + end + end + + if ngx_worker_id() == 0 then + local ok, err = ngx_timer_at(0, report, report_ttl) + if not ok then + core.log.error("failed to create initial timer to report server info: ", err) + return + end + end + + timers.register_timer("plugin#server-info", fn, true) + + core.log.info("timer update the server info ttl, current ttl: ", report_ttl) +end + + +function _M.destroy() + timers.unregister_timer("plugin#server-info", true) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless-post-function.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless-post-function.lua new file mode 100644 index 0000000..cd3a3f9 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless-post-function.lua @@ -0,0 +1,17 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +return require("apisix.plugins.serverless.init")("serverless-post-function", -2000) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless-pre-function.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless-pre-function.lua new file mode 100644 index 0000000..40c40ad --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless-pre-function.lua @@ -0,0 +1,17 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +return require("apisix.plugins.serverless.init")("serverless-pre-function", 10000) diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless/generic-upstream.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless/generic-upstream.lua new file mode 100644 index 0000000..52a0cb3 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless/generic-upstream.lua @@ -0,0 +1,136 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +local ngx = ngx +local require = require +local type = type +local string = string + +return function(plugin_name, version, priority, request_processor, authz_schema, metadata_schema) + local core = require("apisix.core") + local http = require("resty.http") + local url = require("net.url") + + if request_processor and type(request_processor) ~= "function" then + return "Failed to generate plugin due to invalid header processor type, " .. + "expected: function, received: " .. type(request_processor) + end + + local schema = { + type = "object", + properties = { + function_uri = {type = "string"}, + authorization = authz_schema, + timeout = {type = "integer", minimum = 100, default = 3000}, + ssl_verify = {type = "boolean", default = true}, + keepalive = {type = "boolean", default = true}, + keepalive_timeout = {type = "integer", minimum = 1000, default = 60000}, + keepalive_pool = {type = "integer", minimum = 1, default = 5} + }, + required = {"function_uri"} + } + + local _M = { + version = version, + priority = priority, + name = plugin_name, + schema = schema, + metadata_schema = metadata_schema + } + + function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + return core.schema.check(schema, conf) + end + + function _M.access(conf, ctx) + local uri_args = core.request.get_uri_args(ctx) + local headers = core.request.headers(ctx) or {} + + local req_body, err = core.request.get_body() + + if err then + core.log.error("error while reading request body: ", err) + return 400 + end + + -- forward the url path came through the matched uri + local url_decoded = url.parse(conf.function_uri) + local path = url_decoded.path or "/" + + if ctx.curr_req_matched and ctx.curr_req_matched[":ext"] then + local end_path = ctx.curr_req_matched[":ext"] + + if path:byte(-1) == string.byte("/") or end_path:byte(1) == string.byte("/") then + path = path .. end_path + else + path = path .. "/" .. end_path + end + end + + + headers["host"] = url_decoded.host + local params = { + method = ngx.req.get_method(), + body = req_body, + query = uri_args, + headers = headers, + path = path, + keepalive = conf.keepalive, + ssl_verify = conf.ssl_verify + } + + -- Keepalive options + if conf.keepalive then + params.keepalive_timeout = conf.keepalive_timeout + params.keepalive_pool = conf.keepalive_pool + end + + -- modify request info (if required) + request_processor(conf, ctx, params) + + local httpc = http.new() + httpc:set_timeout(conf.timeout) + + local res + res, err = httpc:request_uri(conf.function_uri, params) + + if not res then + core.log.error("failed to process ", plugin_name, ", err: ", err) + return 503 + end + + -- According to RFC7540 https://datatracker.ietf.org/doc/html/rfc7540#section-8.1.2.2, + -- endpoint must not generate any connection specific headers for HTTP/2 requests. + local response_headers = res.headers + if ngx.var.http2 then + response_headers["Connection"] = nil + response_headers["Keep-Alive"] = nil + response_headers["Proxy-Connection"] = nil + response_headers["Upgrade"] = nil + response_headers["Transfer-Encoding"] = nil + end + + -- setting response headers + core.response.set_header(response_headers) + + return res.status, res.body + end + + return _M +end diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless/init.lua new file mode 100644 index 0000000..6ed8c96 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/serverless/init.lua @@ -0,0 +1,124 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local ipairs = ipairs +local pcall = pcall +local loadstring = loadstring +local require = require +local type = type + + +local phases = { + "rewrite", "access", "header_filter", "body_filter", + "log", "before_proxy" +} + + +return function(plugin_name, priority) + local core = require("apisix.core") + + + local lrucache = core.lrucache.new({ + type = "plugin", + }) + + local schema = { + type = "object", + properties = { + phase = { + type = "string", + default = "access", + enum = phases, + }, + functions = { + type = "array", + items = {type = "string"}, + minItems = 1 + }, + }, + required = {"functions"} + } + + local _M = { + version = 0.1, + priority = priority, + name = plugin_name, + schema = schema, + } + + local function load_funcs(functions) + local funcs = core.table.new(#functions, 0) + + local index = 1 + for _, func_str in ipairs(functions) do + local _, func = pcall(loadstring(func_str)) + funcs[index] = func + index = index + 1 + end + + return funcs + end + + local function call_funcs(phase, conf, ctx) + if phase ~= conf.phase then + return + end + + local functions = core.lrucache.plugin_ctx(lrucache, ctx, nil, + load_funcs, conf.functions) + + for _, func in ipairs(functions) do + local code, body = func(conf, ctx) + if code or body then + return code, body + end + end + end + + function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + local functions = conf.functions + for _, func_str in ipairs(functions) do + local func, err = loadstring(func_str) + if err then + return false, 'failed to loadstring: ' .. err + end + + local ok, ret = pcall(func) + if not ok then + return false, 'pcall error: ' .. ret + end + if type(ret) ~= 'function' then + return false, 'only accept Lua function,' + .. ' the input code type is ' .. type(ret) + end + end + + return true + end + + for _, phase in ipairs(phases) do + _M[phase] = function (conf, ctx) + return call_funcs(phase, conf, ctx) + end + end + + return _M +end diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/skywalking-logger.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/skywalking-logger.lua new file mode 100644 index 0000000..8a7e309 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/skywalking-logger.lua @@ -0,0 +1,194 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local log_util = require("apisix.utils.log-util") +local core = require("apisix.core") +local http = require("resty.http") +local url = require("net.url") + +local base64 = require("ngx.base64") +local ngx_re = require("ngx.re") + +local ngx = ngx +local tostring = tostring +local tonumber = tonumber + +local plugin_name = "skywalking-logger" +local batch_processor_manager = bp_manager_mod.new("skywalking logger") +local schema = { + type = "object", + properties = { + endpoint_addr = core.schema.uri_def, + service_name = {type = "string", default = "APISIX"}, + service_instance_name = {type = "string", default = "APISIX Instance Name"}, + log_format = {type = "object"}, + timeout = {type = "integer", minimum = 1, default = 3}, + include_req_body = {type = "boolean", default = false}, + include_req_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + include_resp_body = { type = "boolean", default = false }, + include_resp_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + }, + required = {"endpoint_addr"}, +} + + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + } + }, +} + + +local _M = { + version = 0.1, + priority = 408, + name = plugin_name, + schema = batch_processor_manager:wrap_schema(schema), + metadata_schema = metadata_schema, +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + local check = {"endpoint_addr"} + core.utils.check_https(check, conf, plugin_name) + return core.schema.check(schema, conf) +end + + +local function send_http_data(conf, log_message) + local err_msg + local res = true + local url_decoded = url.parse(conf.endpoint_addr) + local host = url_decoded.host + local port = url_decoded.port + + core.log.info("sending a batch logs to ", conf.endpoint_addr) + + local httpc = http.new() + httpc:set_timeout(conf.timeout * 1000) + local ok, err = httpc:connect(host, port) + + if not ok then + return false, "failed to connect to host[" .. host .. "] port[" + .. tostring(port) .. "] " .. err + end + + local httpc_res, httpc_err = httpc:request({ + method = "POST", + path = "/v3/logs", + body = log_message, + headers = { + ["Host"] = url_decoded.host, + ["Content-Type"] = "application/json", + } + }) + + if not httpc_res then + return false, "error while sending data to [" .. host .. "] port[" + .. tostring(port) .. "] " .. httpc_err + end + + -- some error occurred in the server + if httpc_res.status >= 400 then + res = false + err_msg = "server returned status code[" .. httpc_res.status .. "] host[" + .. host .. "] port[" .. tostring(port) .. "] " + .. "body[" .. httpc_res:read_body() .. "]" + end + + return res, err_msg +end + + +function _M.body_filter(conf, ctx) + log_util.collect_body(conf, ctx) +end + + +function _M.log(conf, ctx) + local log_body = log_util.get_log_entry(plugin_name, conf, ctx) + local trace_context + local sw_header = ngx.req.get_headers()["sw8"] + if sw_header then + -- 1-TRACEID-SEGMENTID-SPANID-PARENT_SERVICE-PARENT_INSTANCE-PARENT_ENDPOINT-IPPORT + local ids = ngx_re.split(sw_header, '-') + if #ids == 8 then + trace_context = { + traceId = base64.decode_base64url(ids[2]), + traceSegmentId = base64.decode_base64url(ids[3]), + spanId = tonumber(ids[4]) + } + else + core.log.warn("failed to parse trace_context header: ", sw_header) + end + end + + local service_instance_name = conf.service_instance_name + if service_instance_name == "$hostname" then + service_instance_name = core.utils.gethostname() + end + + local entry = { + traceContext = trace_context, + body = { + json = { + json = core.json.encode(log_body, true) + } + }, + service = conf.service_name, + serviceInstance = service_instance_name, + endpoint = ctx.var.uri, + } + + if batch_processor_manager:add_entry(conf, entry) then + return + end + + -- Generate a function to be executed by the batch processor + local func = function(entries, batch_max_size) + local data, err = core.json.encode(entries) + if not data then + return false, 'error occurred while encoding the data: ' .. err + end + + return send_http_data(conf, data) + end + + batch_processor_manager:add_entry_to_new_processor(conf, entry, ctx, func) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/skywalking.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/skywalking.lua new file mode 100644 index 0000000..2ef435b --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/skywalking.lua @@ -0,0 +1,158 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local plugin = require("apisix.plugin") +local process = require("ngx.process") +local sw_tracer = require("skywalking.tracer") +local Span = require("skywalking.span") +local ngx = ngx +local math = math + +local plugin_name = "skywalking" +local attr_schema = { + type = "object", + properties = { + service_name = { + type = "string", + description = "service name for skywalking", + default = "APISIX", + }, + service_instance_name = { + type = "string", + description = "User Service Instance Name", + default = "APISIX Instance Name", + }, + endpoint_addr = { + type = "string", + default = "http://127.0.0.1:12800", + }, + report_interval = { + type = "integer", + }, + }, +} + +local schema = { + type = "object", + properties = { + sample_ratio = { + type = "number", + minimum = 0.00001, + maximum = 1, + default = 1 + } + }, +} + + +local _M = { + version = 0.1, + priority = 12010, + name = plugin_name, + schema = schema, + attr_schema = attr_schema, + run_policy = "prefer_route", +} + + +function _M.check_schema(conf) + local check = {"endpoint_addr"} + core.utils.check_https(check, conf, plugin_name) + return core.schema.check(schema, conf) +end + + +function _M.rewrite(conf, ctx) + core.log.debug("rewrite phase of skywalking plugin") + ctx.skywalking_sample = false + if conf.sample_ratio == 1 or math.random() < conf.sample_ratio then + ctx.skywalking_sample = true + sw_tracer:start("upstream service") + core.log.info("tracer start") + return + end + + core.log.info("miss sampling, ignore") +end + + +function _M.delayed_body_filter(conf, ctx) + if ctx.skywalking_sample and ngx.arg[2] then + Span.setComponentId(ngx.ctx.exitSpan, 6002) + Span.setComponentId(ngx.ctx.entrySpan, 6002) + sw_tracer:finish() + core.log.info("tracer finish") + end +end + + +function _M.log(conf, ctx) + if ctx.skywalking_sample then + sw_tracer:prepareForReport() + core.log.info("tracer prepare for report") + end +end + + +function _M.init() + if process.type() ~= "worker" then + return + end + + local local_plugin_info = plugin.plugin_attr(plugin_name) + local_plugin_info = local_plugin_info and core.table.clone(local_plugin_info) or {} + local ok, err = core.schema.check(attr_schema, local_plugin_info) + if not ok then + core.log.error("failed to check the plugin_attr[", plugin_name, "]", + ": ", err) + return + end + + core.log.info("plugin attribute: ", + core.json.delay_encode(local_plugin_info)) + + -- TODO: maybe need to fetch them from plugin-metadata + local metadata_shdict = ngx.shared.tracing_buffer + + if local_plugin_info.service_instance_name == "$hostname" then + local_plugin_info.service_instance_name = core.utils.gethostname() + end + + metadata_shdict:set('serviceName', local_plugin_info.service_name) + metadata_shdict:set('serviceInstanceName', local_plugin_info.service_instance_name) + + local sk_cli = require("skywalking.client") + if local_plugin_info.report_interval then + sk_cli.backendTimerDelay = local_plugin_info.report_interval + end + + sk_cli:startBackendTimer(local_plugin_info.endpoint_addr) +end + + +function _M.destroy() + if process.type() ~= "worker" then + return + end + + local sk_cli = require("skywalking.client") + sk_cli:destroyBackendTimer() +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/sls-logger.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/sls-logger.lua new file mode 100644 index 0000000..819f084 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/sls-logger.lua @@ -0,0 +1,197 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local log_util = require("apisix.utils.log-util") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") + + +local plugin_name = "sls-logger" +local ngx = ngx +local rf5424 = require("apisix.utils.rfc5424") +local tcp = ngx.socket.tcp +local tostring = tostring +local ipairs = ipairs +local table = table + + +local batch_processor_manager = bp_manager_mod.new(plugin_name) +local schema = { + type = "object", + properties = { + include_req_body = {type = "boolean", default = false}, + include_req_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + include_resp_body = { type = "boolean", default = false }, + include_resp_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + timeout = {type = "integer", minimum = 1, default= 5000}, + log_format = {type = "object"}, + host = {type = "string"}, + port = {type = "integer"}, + project = {type = "string"}, + logstore = {type = "string"}, + access_key_id = {type = "string"}, + access_key_secret = {type ="string"} + }, + encrypt_fields = {"access_key_secret"}, + required = {"host", "port", "project", "logstore", "access_key_id", "access_key_secret"} +} + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + } + }, +} + +local _M = { + version = 0.1, + priority = 406, + name = plugin_name, + schema = batch_processor_manager:wrap_schema(schema), + metadata_schema = metadata_schema, +} + +function _M.check_schema(conf,schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + return core.schema.check(schema, conf) +end + +local function send_tcp_data(route_conf, log_message) + local err_msg + local res = true + local sock, soc_err = tcp() + local can_close + + if not sock then + return false, "failed to init the socket" .. soc_err + end + + sock:settimeout(route_conf.timeout) + local ok, err = sock:connect(route_conf.host, route_conf.port) + if not ok then + return false, "failed to connect to TCP server: host[" .. route_conf.host + .. "] port[" .. tostring(route_conf.port) .. "] err: " .. err + end + + ok, err = sock:sslhandshake(true, nil, false) + if not ok then + return false, "failed to perform TLS handshake to TCP server: host[" + .. route_conf.host .. "] port[" .. tostring(route_conf.port) + .. "] err: " .. err + end + + core.log.debug("sls logger send data ", log_message) + ok, err = sock:send(log_message) + if not ok then + res = false + can_close = true + err_msg = "failed to send data to TCP server: host[" .. route_conf.host + .. "] port[" .. tostring(route_conf.port) .. "] err: " .. err + else + ok, err = sock:setkeepalive(120 * 1000, 20) + if not ok then + can_close = true + core.log.warn("failed to set socket keepalive: host[", route_conf.host, + "] port[", tostring(route_conf.port), "] err: ", err) + end + end + + if can_close then + ok, err = sock:close() + if not ok then + core.log.warn("failed to close the TCP connection, host[", + route_conf.host, "] port[", route_conf.port, "] ", err) + end + end + + return res, err_msg +end + +local function combine_syslog(entries) + local items = {} + for _, entry in ipairs(entries) do + table.insert(items, entry.data) + core.log.info("buffered logs:", entry.data) + end + + return table.concat(items) +end + +_M.combine_syslog = combine_syslog + +local function handle_log(entries) + local data = combine_syslog(entries) + if not data then + return true + end + + return send_tcp_data(entries[1].route_conf, data) +end + + +function _M.body_filter(conf, ctx) + log_util.collect_body(conf, ctx) +end + + +-- log phase in APISIX +function _M.log(conf, ctx) + local entry = log_util.get_log_entry(plugin_name, conf, ctx) + local json_str, err = core.json.encode(entry) + if not json_str then + core.log.error('error occurred while encoding the data: ', err) + return + end + + local structured_data = { + {name = "project", value = conf.project}, + {name = "logstore", value = conf.logstore}, + {name = "access-key-id", value = conf.access_key_id}, + {name = "access-key-secret", value = conf.access_key_secret}, + } + local rf5424_data = rf5424.encode("SYSLOG", "INFO", ctx.var.host, "apisix", + ctx.var.pid, json_str, structured_data) + core.log.info("collect_data:" .. rf5424_data) + local process_context = { + data = rf5424_data, + route_conf = conf + } + + if batch_processor_manager:add_entry(conf, process_context) then + return + end + + batch_processor_manager:add_entry_to_new_processor(conf, process_context, ctx, handle_log) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/splunk-hec-logging.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/splunk-hec-logging.lua new file mode 100644 index 0000000..c93b273 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/splunk-hec-logging.lua @@ -0,0 +1,186 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local ngx = ngx +local ngx_now = ngx.now +local http = require("resty.http") +local log_util = require("apisix.utils.log-util") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local table_insert = core.table.insert +local table_concat = core.table.concat +local ipairs = ipairs + + +local DEFAULT_SPLUNK_HEC_ENTRY_SOURCE = "apache-apisix-splunk-hec-logging" +local DEFAULT_SPLUNK_HEC_ENTRY_TYPE = "_json" + + +local plugin_name = "splunk-hec-logging" +local batch_processor_manager = bp_manager_mod.new(plugin_name) + + +local schema = { + type = "object", + properties = { + endpoint = { + type = "object", + properties = { + uri = core.schema.uri_def, + token = { + type = "string", + }, + channel = { + type = "string", + }, + timeout = { + type = "integer", + minimum = 1, + default = 10 + }, + keepalive_timeout = { + type = "integer", + minimum = 1000, + default = 60000, + description = "keepalive timeout in milliseconds", + } + }, + required = { "uri", "token" } + }, + ssl_verify = { + type = "boolean", + default = true + }, + log_format = {type = "object"}, + }, + required = { "endpoint" }, +} + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + } + }, +} + +local _M = { + version = 0.1, + priority = 409, + name = plugin_name, + metadata_schema = metadata_schema, + schema = batch_processor_manager:wrap_schema(schema), +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + + return core.schema.check(schema, conf) +end + + +local function get_logger_entry(conf, ctx) + local entry, customized = log_util.get_log_entry(plugin_name, conf, ctx) + local splunk_entry = { + time = ngx_now(), + source = DEFAULT_SPLUNK_HEC_ENTRY_SOURCE, + sourcetype = DEFAULT_SPLUNK_HEC_ENTRY_TYPE, + } + + if not customized then + splunk_entry.host = entry.server.hostname + splunk_entry.event = { + request_url = entry.request.url, + request_method = entry.request.method, + request_headers = entry.request.headers, + request_query = entry.request.querystring, + request_size = entry.request.size, + response_headers = entry.response.headers, + response_status = entry.response.status, + response_size = entry.response.size, + latency = entry.latency, + upstream = entry.upstream, + } + else + splunk_entry.host = core.utils.gethostname() + splunk_entry.event = entry + end + + return splunk_entry +end + + +local function send_to_splunk(conf, entries) + local request_headers = {} + request_headers["Content-Type"] = "application/json" + request_headers["Authorization"] = "Splunk " .. conf.endpoint.token + if conf.endpoint.channel then + request_headers["X-Splunk-Request-Channel"] = conf.endpoint.channel + end + + local http_new = http.new() + http_new:set_timeout(conf.endpoint.timeout * 1000) + local t = {} + for _, e in ipairs(entries) do + table_insert(t, core.json.encode(e)) + end + + local res, err = http_new:request_uri(conf.endpoint.uri, { + ssl_verify = conf.ssl_verify, + method = "POST", + body = table_concat(t), + headers = request_headers, + keepalive_timeout = conf.endpoint.keepalive_timeout + }) + + if not res then + return false, "failed to write log to splunk, " .. err + end + + if res.status ~= 200 then + local body = core.json.decode(res.body) + if not body then + return false, "failed to send splunk, http status code: " .. res.status + else + return false, "failed to send splunk, " .. body.text + end + end + + return true +end + + +function _M.log(conf, ctx) + local entry = get_logger_entry(conf, ctx) + + if batch_processor_manager:add_entry(conf, entry) then + return + end + + local process = function(entries) + return send_to_splunk(conf, entries) + end + + batch_processor_manager:add_entry_to_new_processor(conf, entry, ctx, process) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/syslog.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/syslog.lua new file mode 100644 index 0000000..1f35395 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/syslog.lua @@ -0,0 +1,99 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local log_util = require("apisix.utils.log-util") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local syslog = require("apisix.plugins.syslog.init") +local plugin_name = "syslog" + +local batch_processor_manager = bp_manager_mod.new("sys logger") +local schema = { + type = "object", + properties = { + host = {type = "string"}, + port = {type = "integer"}, + flush_limit = {type = "integer", minimum = 1, default = 4096}, + drop_limit = {type = "integer", default = 1048576}, + timeout = {type = "integer", minimum = 1, default = 3000}, + sock_type = {type = "string", default = "tcp", enum = {"tcp", "udp"}}, + pool_size = {type = "integer", minimum = 5, default = 5}, + tls = {type = "boolean", default = false}, + log_format = {type = "object"}, + include_req_body = {type = "boolean", default = false}, + include_req_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + include_resp_body = { type = "boolean", default = false }, + include_resp_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + }, + required = {"host", "port"} +} + + +local schema = batch_processor_manager:wrap_schema(schema) + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + } + }, +} + +local _M = { + version = 0.1, + priority = 401, + name = plugin_name, + schema = schema, + metadata_schema = metadata_schema, + flush_syslog = syslog.flush_syslog, +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + core.utils.check_tls_bool({"tls"}, conf, plugin_name) + return core.schema.check(schema, conf) +end + + +function _M.body_filter(conf, ctx) + log_util.collect_body(conf, ctx) +end + + +function _M.log(conf, ctx) + local entry = log_util.get_log_entry(plugin_name, conf, ctx) + syslog.push_entry(conf, ctx, entry) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/syslog/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/syslog/init.lua new file mode 100644 index 0000000..8a3d90e --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/syslog/init.lua @@ -0,0 +1,112 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local logger_socket = require("resty.logger.socket") +local rfc5424 = require("apisix.utils.rfc5424") +local ipairs = ipairs +local table_insert = core.table.insert +local table_concat = core.table.concat + +local batch_processor_manager = bp_manager_mod.new("sys logger") + +local lrucache = core.lrucache.new({ + ttl = 300, count = 512, serial_creating = true, +}) + +local _M = {} + +function _M.flush_syslog(logger) + local ok, err = logger:flush(logger) + if not ok then + core.log.error("failed to flush message:", err) + end + + return ok +end + + +local function send_syslog_data(conf, log_message, api_ctx) + local err_msg + local res = true + + core.log.info("sending a batch logs to ", conf.host, ":", conf.port) + + -- fetch it from lrucache + local logger, err = core.lrucache.plugin_ctx( + lrucache, api_ctx, nil, logger_socket.new, logger_socket, { + host = conf.host, + port = conf.port, + flush_limit = conf.flush_limit, + drop_limit = conf.drop_limit, + timeout = conf.timeout, + sock_type = conf.sock_type, + pool_size = conf.pool_size, + tls = conf.tls, + } + ) + + if not logger then + res = false + err_msg = "failed when initiating the sys logger processor".. err + end + + -- reuse the logger object + local ok, err = logger:log(log_message) + + if not ok then + res = false + err_msg = "failed to log message" .. err + end + + return res, err_msg +end + + +-- called in log phase of APISIX +function _M.push_entry(conf, ctx, entry) + local json_str, err = core.json.encode(entry) + if not json_str then + core.log.error('error occurred while encoding the data: ', err) + return + end + + local rfc5424_data = rfc5424.encode("SYSLOG", "INFO", ctx.var.host, + "apisix", ctx.var.pid, json_str) + core.log.info("collect_data:" .. rfc5424_data) + if batch_processor_manager:add_entry(conf, rfc5424_data) then + return + end + + -- Generate a function to be executed by the batch processor + local cp_ctx = core.table.clone(ctx) + local func = function(entries) + local items = {} + for _, e in ipairs(entries) do + table_insert(items, e) + core.log.debug("buffered logs:", e) + end + + return send_syslog_data(conf, table_concat(items), cp_ctx) + end + + batch_processor_manager:add_entry_to_new_processor(conf, rfc5424_data, ctx, func) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/tcp-logger.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/tcp-logger.lua new file mode 100644 index 0000000..7482fe5 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/tcp-logger.lua @@ -0,0 +1,161 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local log_util = require("apisix.utils.log-util") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local plugin_name = "tcp-logger" +local tostring = tostring +local ngx = ngx +local tcp = ngx.socket.tcp + + +local batch_processor_manager = bp_manager_mod.new("tcp logger") +local schema = { + type = "object", + properties = { + host = {type = "string"}, + port = {type = "integer", minimum = 0}, + tls = {type = "boolean", default = false}, + tls_options = {type = "string"}, + timeout = {type = "integer", minimum = 1, default= 1000}, + log_format = {type = "object"}, + include_req_body = {type = "boolean", default = false}, + include_req_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + include_resp_body = { type = "boolean", default = false }, + include_resp_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + }, + required = {"host", "port"} +} + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + } + }, +} + +local _M = { + version = 0.1, + priority = 405, + name = plugin_name, + metadata_schema = metadata_schema, + schema = batch_processor_manager:wrap_schema(schema), +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + + core.utils.check_tls_bool({"tls"}, conf, plugin_name) + return core.schema.check(schema, conf) +end + + +local function send_tcp_data(conf, log_message) + local err_msg + local res = true + local sock, soc_err = tcp() + + if not sock then + return false, "failed to init the socket" .. soc_err + end + + sock:settimeout(conf.timeout) + + core.log.info("sending a batch logs to ", conf.host, ":", conf.port) + core.log.info("sending log_message: ", log_message) + + local ok, err = sock:connect(conf.host, conf.port) + if not ok then + return false, "failed to connect to TCP server: host[" .. conf.host + .. "] port[" .. tostring(conf.port) .. "] err: " .. err + end + + if conf.tls then + ok, err = sock:sslhandshake(true, conf.tls_options, false) + if not ok then + return false, "failed to perform TLS handshake to TCP server: host[" + .. conf.host .. "] port[" .. tostring(conf.port) .. "] err: " .. err + end + end + + ok, err = sock:send(log_message) + if not ok then + res = false + err_msg = "failed to send data to TCP server: host[" .. conf.host + .. "] port[" .. tostring(conf.port) .. "] err: " .. err + end + + ok, err = sock:close() + if not ok then + core.log.error("failed to close the TCP connection, host[", + conf.host, "] port[", conf.port, "] ", err) + end + + return res, err_msg +end + + +function _M.body_filter(conf, ctx) + log_util.collect_body(conf, ctx) +end + + +function _M.log(conf, ctx) + local entry = log_util.get_log_entry(plugin_name, conf, ctx) + + if batch_processor_manager:add_entry(conf, entry) then + return + end + + -- Generate a function to be executed by the batch processor + local func = function(entries, batch_max_size) + local data, err + if batch_max_size == 1 then + data, err = core.json.encode(entries[1]) -- encode as single {} + else + data, err = core.json.encode(entries) -- encode as array [{}] + end + + if not data then + core.log.error('error occurred while encoding the data: ', err) + end + + return send_tcp_data(conf, data) + end + + batch_processor_manager:add_entry_to_new_processor(conf, entry, ctx, func) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/tencent-cloud-cls.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/tencent-cloud-cls.lua new file mode 100644 index 0000000..38fe565 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/tencent-cloud-cls.lua @@ -0,0 +1,146 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local log_util = require("apisix.utils.log-util") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local cls_sdk = require("apisix.plugins.tencent-cloud-cls.cls-sdk") +local math = math +local pairs = pairs + + +local plugin_name = "tencent-cloud-cls" +local batch_processor_manager = bp_manager_mod.new(plugin_name) +local schema = { + type = "object", + properties = { + cls_host = { type = "string" }, + cls_topic = { type = "string" }, + secret_id = { type = "string" }, + secret_key = { type = "string" }, + sample_ratio = { + type = "number", + minimum = 0.00001, + maximum = 1, + default = 1 + }, + include_req_body = { type = "boolean", default = false }, + include_req_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + include_resp_body = { type = "boolean", default = false }, + include_resp_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + global_tag = { type = "object" }, + log_format = {type = "object"}, + }, + encrypt_fields = {"secret_key"}, + required = { "cls_host", "cls_topic", "secret_id", "secret_key" } +} + + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + } + }, +} + + +local _M = { + version = 0.1, + priority = 397, + name = plugin_name, + schema = batch_processor_manager:wrap_schema(schema), + metadata_schema = metadata_schema, +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, err + end + return log_util.check_log_schema(conf) +end + + +function _M.access(conf, ctx) + ctx.cls_sample = false + if conf.sample_ratio == 1 or math.random() < conf.sample_ratio then + core.log.debug("cls sampled") + ctx.cls_sample = true + return + end +end + + +function _M.body_filter(conf, ctx) + if ctx.cls_sample then + log_util.collect_body(conf, ctx) + end +end + + +function _M.log(conf, ctx) + -- sample if set + if not ctx.cls_sample then + core.log.debug("cls not sampled, skip log") + return + end + + local entry = log_util.get_log_entry(plugin_name, conf, ctx) + + if conf.global_tag then + for k, v in pairs(conf.global_tag) do + entry[k] = v + end + end + + if batch_processor_manager:add_entry(conf, entry) then + return + end + + local process = function(entries) + local sdk, err = cls_sdk.new(conf.cls_host, conf.cls_topic, conf.secret_id, conf.secret_key) + if err then + core.log.error("init sdk failed err:", err) + return false, err + end + return sdk:send_to_cls(entries) + end + + batch_processor_manager:add_entry_to_new_processor(conf, entry, ctx, process) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/tencent-cloud-cls/cls-sdk.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/tencent-cloud-cls/cls-sdk.lua new file mode 100644 index 0000000..650d4ab --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/tencent-cloud-cls/cls-sdk.lua @@ -0,0 +1,329 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local pb = require "pb" +local protoc = require("protoc").new() +local http = require("resty.http") +local socket = require("socket") +local str_util = require("resty.string") +local core = require("apisix.core") +local core_gethostname = require("apisix.core.utils").gethostname +local json = core.json +local json_encode = json.encode +local ngx = ngx +local ngx_time = ngx.time +local ngx_now = ngx.now +local ngx_sha1_bin = ngx.sha1_bin +local ngx_hmac_sha1 = ngx.hmac_sha1 +local fmt = string.format +local table = table +local concat_tab = table.concat +local clear_tab = table.clear +local new_tab = table.new +local insert_tab = table.insert +local ipairs = ipairs +local pairs = pairs +local type = type +local tostring = tostring +local setmetatable = setmetatable +local pcall = pcall +local unpack = unpack + +-- api doc https://www.tencentcloud.com/document/product/614/16873 +local MAX_SINGLE_VALUE_SIZE = 1 * 1024 * 1024 +local MAX_LOG_GROUP_VALUE_SIZE = 5 * 1024 * 1024 -- 5MB + +local cls_api_path = "/structuredlog" +local auth_expire_time = 60 +local cls_conn_timeout = 1000 +local cls_read_timeout = 10000 +local cls_send_timeout = 10000 + +local headers_cache = {} +local params_cache = { + ssl_verify = false, + headers = headers_cache, +} + + +local function get_ip(hostname) + local _, resolved = socket.dns.toip(hostname) + local ip_list = {} + if not resolved.ip then + -- DNS parsing failure + local err = resolved + core.log.error("resolve ip failed, hostname: " .. hostname .. ", error: " .. err) + return nil, err + else + for _, v in ipairs(resolved.ip) do + insert_tab(ip_list, v) + end + end + return ip_list +end + +local host_ip +local log_group_list = {} +local log_group_list_pb = { + logGroupList = log_group_list, +} + + +local function sha1(msg) + return str_util.to_hex(ngx_sha1_bin(msg)) +end + + +local function sha1_hmac(key, msg) + return str_util.to_hex(ngx_hmac_sha1(key, msg)) +end + + +-- sign algorithm https://cloud.tencent.com/document/product/614/12445 +local function sign(secret_id, secret_key) + local method = "post" + local format_params = "" + local format_headers = "" + local sign_algorithm = "sha1" + local http_request_info = fmt("%s\n%s\n%s\n%s\n", + method, cls_api_path, format_params, format_headers) + local cur_time = ngx_time() + local sign_time = fmt("%d;%d", cur_time, cur_time + auth_expire_time) + local string_to_sign = fmt("%s\n%s\n%s\n", sign_algorithm, sign_time, sha1(http_request_info)) + + local sign_key = sha1_hmac(secret_key, sign_time) + local signature = sha1_hmac(sign_key, string_to_sign) + + local arr = { + "q-sign-algorithm=sha1", + "q-ak=" .. secret_id, + "q-sign-time=" .. sign_time, + "q-key-time=" .. sign_time, + "q-header-list=", + "q-url-param-list=", + "q-signature=" .. signature, + } + + return concat_tab(arr, '&') +end + + +-- normalized log data for CLS API +local function normalize_log(log) + local normalized_log = {} + local log_size = 4 -- empty obj alignment + for k, v in pairs(log) do + local v_type = type(v) + local field = { key = k, value = "" } + if v_type == "string" then + field["value"] = v + elseif v_type == "number" then + field["value"] = tostring(v) + elseif v_type == "table" then + field["value"] = json_encode(v) + else + field["value"] = tostring(v) + core.log.warn("unexpected type " .. v_type .. " for field " .. k) + end + if #field.value > MAX_SINGLE_VALUE_SIZE then + core.log.warn(field.key, " value size over ", MAX_SINGLE_VALUE_SIZE, " , truncated") + field.value = field.value:sub(1, MAX_SINGLE_VALUE_SIZE) + end + insert_tab(normalized_log, field) + log_size = log_size + #field.key + #field.value + end + return normalized_log, log_size +end + + +local _M = { version = 0.1 } +local mt = { __index = _M } + +local pb_state +local function init_pb_state() + local old_pb_state = pb.state(nil) + protoc.reload() + local cls_sdk_protoc = protoc.new() + -- proto file in https://www.tencentcloud.com/document/product/614/42787 + local ok, err = pcall(cls_sdk_protoc.load, cls_sdk_protoc, [[ +package cls; + +message Log +{ + message Content + { + required string key = 1; // Key of each field group + required string value = 2; // Value of each field group + } + required int64 time = 1; // Unix timestamp + repeated Content contents = 2; // Multiple key-value pairs in one log +} + +message LogTag +{ + required string key = 1; + required string value = 2; +} + +message LogGroup +{ + repeated Log logs = 1; // Log array consisting of multiple logs + optional string contextFlow = 2; // This parameter does not take effect currently + optional string filename = 3; // Log filename + optional string source = 4; // Log source, which is generally the machine IP + repeated LogTag logTags = 5; +} + +message LogGroupList +{ + repeated LogGroup logGroupList = 1; // Log group list +} + ]], "tencent-cloud-cls/cls.proto") + if not ok then + cls_sdk_protoc:reset() + pb.state(old_pb_state) + return "failed to load cls.proto: ".. err + end + pb_state = pb.state(old_pb_state) +end + + +function _M.new(host, topic, secret_id, secret_key) + if not pb_state then + local err = init_pb_state() + if err then + return nil, err + end + end + local self = { + host = host, + topic = topic, + secret_id = secret_id, + secret_key = secret_key, + } + return setmetatable(self, mt) +end + + +local function do_request_uri(uri, params) + local client = http:new() + client:set_timeouts(cls_conn_timeout, cls_send_timeout, cls_read_timeout) + local res, err = client:request_uri(uri, params) + client:close() + return res, err +end + + +function _M.send_cls_request(self, pb_obj) + -- recovery of stored pb_store + local old_pb_state = pb.state(pb_state) + local ok, pb_data = pcall(pb.encode, "cls.LogGroupList", pb_obj) + pb_state = pb.state(old_pb_state) + if not ok or not pb_data then + core.log.error("failed to encode LogGroupList, err: ", pb_data) + return false, pb_data + end + + clear_tab(headers_cache) + headers_cache["Host"] = self.host + headers_cache["Content-Type"] = "application/x-protobuf" + headers_cache["Authorization"] = sign(self.secret_id, self.secret_key, cls_api_path) + + -- TODO: support lz4/zstd compress + params_cache.method = "POST" + params_cache.body = pb_data + + local cls_url = "http://" .. self.host .. cls_api_path .. "?topic_id=" .. self.topic + core.log.debug("CLS request URL: ", cls_url) + + local res, err = do_request_uri(cls_url, params_cache) + if not res then + return false, err + end + + if res.status ~= 200 then + err = fmt("got wrong status: %s, headers: %s, body, %s", + res.status, json.encode(res.headers), res.body) + -- 413, 404, 401, 403 are not retryable + if res.status == 413 or res.status == 404 or res.status == 401 or res.status == 403 then + core.log.error(err, ", not retryable") + return true + end + + return false, err + end + + core.log.debug("CLS report success") + return true +end + + +function _M.send_to_cls(self, logs) + clear_tab(log_group_list) + local now = ngx_now() * 1000 + + local total_size = 0 + local format_logs = new_tab(#logs, 0) + -- sums of all value in all LogGroup should be no more than 5MB + -- so send whenever size exceed max size + local group_list_start = 1 + + if not host_ip then + local host_ip_list, err = get_ip(core_gethostname()) + if not host_ip_list then + return false, err + end + host_ip = tostring(unpack(host_ip_list)) + end + + for i = 1, #logs, 1 do + local contents, log_size = normalize_log(logs[i]) + if log_size > MAX_LOG_GROUP_VALUE_SIZE then + core.log.error("size of log is over 5MB, dropped") + goto continue + end + total_size = total_size + log_size + if total_size > MAX_LOG_GROUP_VALUE_SIZE then + insert_tab(log_group_list, { + logs = format_logs, + source = host_ip, + }) + local ok, err = self:send_cls_request(log_group_list_pb) + if not ok then + return false, err, group_list_start + end + group_list_start = i + format_logs = new_tab(#logs - i, 0) + total_size = 0 + clear_tab(log_group_list) + end + insert_tab(format_logs, { + time = now, + contents = contents, + }) + :: continue :: + end + + insert_tab(log_group_list, { + logs = format_logs, + source = host_ip, + }) + local ok, err = self:send_cls_request(log_group_list_pb) + return ok, err, group_list_start +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/traffic-split.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/traffic-split.lua new file mode 100644 index 0000000..da6014e --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/traffic-split.lua @@ -0,0 +1,305 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local upstream = require("apisix.upstream") +local schema_def = require("apisix.schema_def") +local roundrobin = require("resty.roundrobin") +local ipmatcher = require("resty.ipmatcher") +local expr = require("resty.expr.v1") +local pairs = pairs +local ipairs = ipairs +local type = type +local table_insert = table.insert +local tostring = tostring + +local lrucache = core.lrucache.new({ + ttl = 0, count = 512 +}) + + +local vars_schema = { + type = "array", +} + + +local match_schema = { + type = "array", + items = { + type = "object", + properties = { + vars = vars_schema + } + }, +} + + +local upstreams_schema = { + type = "array", + items = { + type = "object", + properties = { + upstream_id = schema_def.id_schema, + upstream = schema_def.upstream, + weight = { + description = "used to split traffic between different" .. + "upstreams for plugin configuration", + type = "integer", + default = 1, + minimum = 0 + } + } + }, + -- When the upstream configuration of the plugin is missing, + -- the upstream of `route` is used by default. + default = { + { + weight = 1 + } + }, + minItems = 1, + maxItems = 20 +} + + +local schema = { + type = "object", + properties = { + rules = { + type = "array", + items = { + type = "object", + properties = { + match = match_schema, + weighted_upstreams = upstreams_schema + }, + } + } + }, +} + +local plugin_name = "traffic-split" + +local _M = { + version = 0.1, + priority = 966, + name = plugin_name, + schema = schema +} + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + + if not ok then + return false, err + end + + if conf.rules then + for _, rule in ipairs(conf.rules) do + if rule.match then + for _, m in ipairs(rule.match) do + local ok, err = expr.new(m.vars) + if not ok then + return false, "failed to validate the 'vars' expression: " .. err + end + end + end + end + end + + return true +end + + +local function parse_domain_for_node(node) + local host = node.domain or node.host + if not ipmatcher.parse_ipv4(host) + and not ipmatcher.parse_ipv6(host) + then + node.domain = host + + local ip, err = core.resolver.parse_domain(host) + if ip then + node.host = ip + end + + if err then + core.log.error("dns resolver domain: ", host, " error: ", err) + end + end +end + + +local function set_upstream(upstream_info, ctx) + local nodes = upstream_info.nodes + local new_nodes = {} + if core.table.isarray(nodes) then + for _, node in ipairs(nodes) do + parse_domain_for_node(node) + table_insert(new_nodes, node) + end + else + for addr, weight in pairs(nodes) do + local node = {} + local port, host + host, port = core.utils.parse_addr(addr) + node.host = host + parse_domain_for_node(node) + node.port = port + node.weight = weight + table_insert(new_nodes, node) + end + end + + local up_conf = { + name = upstream_info.name, + type = upstream_info.type, + hash_on = upstream_info.hash_on, + pass_host = upstream_info.pass_host, + upstream_host = upstream_info.upstream_host, + key = upstream_info.key, + nodes = new_nodes, + timeout = upstream_info.timeout, + scheme = upstream_info.scheme + } + + local ok, err = upstream.check_schema(up_conf) + if not ok then + core.log.error("failed to validate generated upstream: ", err) + return 500, err + end + + local matched_route = ctx.matched_route + up_conf.parent = matched_route + local upstream_key = up_conf.type .. "#route_" .. + matched_route.value.id .. "_" .. upstream_info.vid + if upstream_info.node_tid then + upstream_key = upstream_key .. "_" .. upstream_info.node_tid + end + core.log.info("upstream_key: ", upstream_key) + upstream.set(ctx, upstream_key, ctx.conf_version, up_conf) + if upstream_info.scheme == "https" then + upstream.set_scheme(ctx, up_conf) + end + return +end + + +local function new_rr_obj(weighted_upstreams) + local server_list = {} + for i, upstream_obj in ipairs(weighted_upstreams) do + if upstream_obj.upstream_id then + server_list[upstream_obj.upstream_id] = upstream_obj.weight + elseif upstream_obj.upstream then + -- Add a virtual id field to uniquely identify the upstream key. + upstream_obj.upstream.vid = i + -- Get the table id of the nodes as part of the upstream_key, + -- avoid upstream_key duplicate because vid is the same in the loop + -- when multiple rules with multiple weighted_upstreams under each rule. + -- see https://github.com/apache/apisix/issues/5276 + local node_tid = tostring(upstream_obj.upstream.nodes):sub(#"table: " + 1) + upstream_obj.upstream.node_tid = node_tid + server_list[upstream_obj.upstream] = upstream_obj.weight + else + -- If the upstream object has only the weight value, it means + -- that the upstream weight value on the default route has been reached. + -- Mark empty upstream services in the plugin. + server_list["plugin#upstream#is#empty"] = upstream_obj.weight + + end + end + + return roundrobin:new(server_list) +end + + +function _M.access(conf, ctx) + if not conf or not conf.rules then + return + end + + local weighted_upstreams + local match_passed = true + + for _, rule in ipairs(conf.rules) do + -- check if all upstream_ids are valid + if rule.weighted_upstreams then + for _, wupstream in ipairs(rule.weighted_upstreams) do + local ups_id = wupstream.upstream_id + if ups_id then + local ups = upstream.get_by_id(ups_id) + if not ups then + return 500, "failed to fetch upstream info by " + .. "upstream id: " .. ups_id + end + end + end + end + + if not rule.match then + match_passed = true + weighted_upstreams = rule.weighted_upstreams + break + end + + for _, single_match in ipairs(rule.match) do + local expr, err = expr.new(single_match.vars) + if err then + core.log.error("vars expression does not match: ", err) + return 500, err + end + + match_passed = expr:eval(ctx.var) + if match_passed then + break + end + end + + if match_passed then + weighted_upstreams = rule.weighted_upstreams + break + end + end + + core.log.info("match_passed: ", match_passed) + + if not match_passed then + return + end + + local rr_up, err = lrucache(weighted_upstreams, nil, new_rr_obj, weighted_upstreams) + if not rr_up then + core.log.error("lrucache roundrobin failed: ", err) + return 500 + end + + local upstream = rr_up:find() + if upstream and type(upstream) == "table" then + core.log.info("upstream: ", core.json.encode(upstream)) + return set_upstream(upstream, ctx) + elseif upstream and upstream ~= "plugin#upstream#is#empty" then + ctx.upstream_id = upstream + core.log.info("upstream_id: ", upstream) + return + end + + ctx.upstream_id = nil + core.log.info("route_up: ", upstream) + return +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/ua-restriction.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ua-restriction.lua new file mode 100644 index 0000000..bf28685 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/ua-restriction.lua @@ -0,0 +1,178 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local ipairs = ipairs +local core = require("apisix.core") +local re_compile = require("resty.core.regex").re_match_compile +local stringx = require('pl.stringx') +local type = type +local str_strip = stringx.strip +local re_find = ngx.re.find + +local lrucache_allow = core.lrucache.new({ ttl = 300, count = 4096 }) +local lrucache_deny = core.lrucache.new({ ttl = 300, count = 4096 }) + +local schema = { + type = "object", + properties = { + bypass_missing = { + type = "boolean", + default = false, + }, + allowlist = { + type = "array", + minItems = 1, + items = { + type = "string", + minLength = 1, + } + }, + denylist = { + type = "array", + minItems = 1, + items = { + type = "string", + minLength = 1, + } + }, + message = { + type = "string", + minLength = 1, + maxLength = 1024, + default = "Not allowed" + }, + }, + oneOf = { + {required = {"allowlist"}}, + {required = {"denylist"}} + } +} + +local plugin_name = "ua-restriction" + +local _M = { + version = 0.1, + priority = 2999, + name = plugin_name, + schema = schema, +} + +local function check_with_allow_list(user_agents, allowlist) + local check = function (user_agent) + user_agent = str_strip(user_agent) + + for _, rule in ipairs(allowlist) do + if re_find(user_agent, rule, "jo") then + return true + end + end + return false + end + + if type(user_agents) == "table" then + for _, v in ipairs(user_agents) do + if lrucache_allow(v, allowlist, check, v) then + return true + end + end + return false + else + return lrucache_allow(user_agents, allowlist, check, user_agents) + end +end + + +local function check_with_deny_list(user_agents, denylist) + local check = function (user_agent) + user_agent = str_strip(user_agent) + + for _, rule in ipairs(denylist) do + if re_find(user_agent, rule, "jo") then + return false + end + end + return true + end + + if type(user_agents) == "table" then + for _, v in ipairs(user_agents) do + if lrucache_deny(v, denylist, check, v) then + return false + end + end + return true + else + return lrucache_deny(user_agents, denylist, check, user_agents) + end +end + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + + if not ok then + return false, err + end + + if conf.allowlist then + for _, re_rule in ipairs(conf.allowlist) do + ok, err = re_compile(re_rule, "j") + if not ok then + return false, err + end + end + end + + if conf.denylist then + for _, re_rule in ipairs(conf.denylist) do + ok, err = re_compile(re_rule, "j") + if not ok then + return false, err + end + end + end + + return true +end + + +function _M.access(conf, ctx) + -- after core.request.header function changed + -- we need to get original header value by using core.request.headers + local user_agent = core.request.headers(ctx)["User-Agent"] + + if not user_agent then + if conf.bypass_missing then + return + else + return 403, { message = conf.message } + end + end + + local is_passed + + if conf.allowlist then + is_passed = check_with_allow_list(user_agent, conf.allowlist) + else + is_passed = check_with_deny_list(user_agent, conf.denylist) + end + + if not is_passed then + return 403, { message = conf.message } + end +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/udp-logger.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/udp-logger.lua new file mode 100644 index 0000000..75e8bba --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/udp-logger.lua @@ -0,0 +1,145 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local log_util = require("apisix.utils.log-util") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local plugin_name = "udp-logger" +local tostring = tostring +local ngx = ngx +local udp = ngx.socket.udp + + +local batch_processor_manager = bp_manager_mod.new("udp logger") +local schema = { + type = "object", + properties = { + host = {type = "string"}, + port = {type = "integer", minimum = 0}, + timeout = {type = "integer", minimum = 1, default = 3}, + log_format = {type = "object"}, + include_req_body = {type = "boolean", default = false}, + include_req_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + include_resp_body = { type = "boolean", default = false }, + include_resp_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + }, + required = {"host", "port"} +} + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + } + }, +} + +local _M = { + version = 0.1, + priority = 400, + name = plugin_name, + metadata_schema = metadata_schema, + schema = batch_processor_manager:wrap_schema(schema), +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + + return core.schema.check(schema, conf) +end + + +local function send_udp_data(conf, log_message) + local err_msg + local res = true + local sock = udp() + sock:settimeout(conf.timeout * 1000) + + core.log.info("sending a batch logs to ", conf.host, ":", conf.port) + core.log.info("sending log_message: ", log_message) + + local ok, err = sock:setpeername(conf.host, conf.port) + + if not ok then + return false, "failed to connect to UDP server: host[" .. conf.host + .. "] port[" .. tostring(conf.port) .. "] err: " .. err + end + + ok, err = sock:send(log_message) + if not ok then + res = false + err_msg = "failed to send data to UDP server: host[" .. conf.host + .. "] port[" .. tostring(conf.port) .. "] err:" .. err + end + + ok, err = sock:close() + if not ok then + core.log.error("failed to close the UDP connection, host[", + conf.host, "] port[", conf.port, "] ", err) + end + + return res, err_msg +end + + +function _M.body_filter(conf, ctx) + log_util.collect_body(conf, ctx) +end + + +function _M.log(conf, ctx) + local entry = log_util.get_log_entry(plugin_name, conf, ctx) + + if batch_processor_manager:add_entry(conf, entry) then + return + end + + -- Generate a function to be executed by the batch processor + local func = function(entries, batch_max_size) + local data, err + if batch_max_size == 1 then + data, err = core.json.encode(entries[1]) -- encode as single {} + else + data, err = core.json.encode(entries) -- encode as array [{}] + end + + if not data then + return false, 'error occurred while encoding the data: ' .. err + end + + return send_udp_data(conf, data) + end + + batch_processor_manager:add_entry_to_new_processor(conf, entry, ctx, func) +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/uri-blocker.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/uri-blocker.lua new file mode 100644 index 0000000..4612532 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/uri-blocker.lua @@ -0,0 +1,108 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local re_compile = require("resty.core.regex").re_match_compile +local re_find = ngx.re.find +local ipairs = ipairs + +local schema = { + type = "object", + properties = { + block_rules = { + type = "array", + items = { + type = "string", + minLength = 1, + maxLength = 4096, + }, + uniqueItems = true + }, + rejected_code = { + type = "integer", + minimum = 200, + default = 403 + }, + rejected_msg = { + type = "string", + minLength = 1 + }, + case_insensitive = { + type = "boolean", + default = false + }, + }, + required = {"block_rules"}, +} + + +local plugin_name = "uri-blocker" + +local _M = { + version = 0.1, + priority = 2900, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + for i, re_rule in ipairs(conf.block_rules) do + local ok, err = re_compile(re_rule, "j") + -- core.log.warn("ok: ", tostring(ok), " err: ", tostring(err), + -- " re_rule: ", re_rule) + if not ok then + return false, err + end + end + + return true +end + + +function _M.rewrite(conf, ctx) + core.log.info("uri: ", ctx.var.request_uri) + core.log.info("block uri rules: ", conf.block_rules_concat) + + if not conf.block_rules_concat then + local block_rules = {} + for i, re_rule in ipairs(conf.block_rules) do + block_rules[i] = re_rule + end + + conf.block_rules_concat = core.table.concat(block_rules, "|") + if conf.case_insensitive then + conf.block_rules_concat = "(?i)" .. conf.block_rules_concat + end + core.log.info("concat block_rules: ", conf.block_rules_concat) + end + + local from = re_find(ctx.var.request_uri, conf.block_rules_concat, "jo") + if from then + if conf.rejected_msg then + return conf.rejected_code, { error_msg = conf.rejected_msg } + end + return conf.rejected_code + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/wolf-rbac.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/wolf-rbac.lua new file mode 100644 index 0000000..22a90c2 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/wolf-rbac.lua @@ -0,0 +1,492 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local consumer = require("apisix.consumer") +local json = require("apisix.core.json") +local sleep = core.sleep +local ngx_re = require("ngx.re") +local http = require("resty.http") +local ngx = ngx +local rawget = rawget +local rawset = rawset +local setmetatable = setmetatable +local type = type +local string = string +local req_read_body = ngx.req.read_body +local req_get_body_data = ngx.req.get_body_data + +local plugin_name = "wolf-rbac" + + +local schema = { + type = "object", + properties = { + appid = { + type = "string", + default = "unset" + }, + server = { + type = "string", + default = "http://127.0.0.1:12180" + }, + header_prefix = { + type = "string", + default = "X-" + }, + } +} + +local _M = { + version = 0.1, + priority = 2555, + type = 'auth', + name = plugin_name, + schema = schema, +} + + +local token_version = 'V1' +local function create_rbac_token(appid, wolf_token) + return token_version .. "#" .. appid .. "#" .. wolf_token +end + +local function fail_response(message, init_values) + local response = init_values or {} + response.message = message + return response +end + +local function success_response(message, init_values) + local response = init_values or {} + response.message = message + return response +end + +local function parse_rbac_token(rbac_token) + local res, err = ngx_re.split(rbac_token, "#", nil, nil, 3) + if not res then + return nil, err + end + + if #res ~= 3 or res[1] ~= token_version then + return nil, 'invalid rbac token: version' + end + local appid = res[2] + local wolf_token = res[3] + + return {appid = appid, wolf_token = wolf_token} +end + +local function new_headers() + local t = {} + local lt = {} + local _mt = { + __index = function(t, k) + return rawget(lt, string.lower(k)) + end, + __newindex = function(t, k, v) + rawset(t, k, v) + rawset(lt, string.lower(k), v) + end, + } + return setmetatable(t, _mt) +end + +-- timeout in ms +local function http_req(method, uri, body, myheaders, timeout) + if not myheaders then + myheaders = new_headers() + end + + local httpc = http.new() + if timeout then + httpc:set_timeout(timeout) + end + + local res, err = httpc:request_uri(uri, { + method = method, + headers = myheaders, + body = body, + ssl_verify = false + }) + + if not res then + core.log.error("FAIL REQUEST [ ",core.json.delay_encode( + {method = method, uri = uri, body = body, headers = myheaders}), + " ] failed! res is nil, err:", err) + return nil, err + end + + return res +end + +local function http_get(uri, myheaders, timeout) + return http_req("GET", uri, nil, myheaders, timeout) +end + + +function _M.check_schema(conf) + local check = {"server"} + core.utils.check_https(check, conf, plugin_name) + core.log.info("input conf: ", core.json.delay_encode(conf)) + + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + return true +end + + +local function fetch_rbac_token(ctx) + if ctx.var.arg_rbac_token then + return ngx.unescape_uri(ctx.var.arg_rbac_token) + end + + if ctx.var.http_authorization then + return ctx.var.http_authorization + end + + if ctx.var.http_x_rbac_token then + return ctx.var.http_x_rbac_token + end + + return ctx.var['cookie_x-rbac-token'] +end + + +local function check_url_permission(server, appid, action, resName, client_ip, wolf_token) + local retry_max = 3 + local errmsg + local userInfo + local res + local err + local access_check_url = server .. "/wolf/rbac/access_check" + local headers = new_headers() + headers["x-rbac-token"] = wolf_token + headers["Content-Type"] = "application/json; charset=utf-8" + local args = { appID = appid, resName = resName, action = action, clientIP = client_ip} + local url = access_check_url .. "?" .. ngx.encode_args(args) + local timeout = 1000 * 10 + + for i = 1, retry_max do + -- TODO: read apisix info. + res, err = http_get(url, headers, timeout) + if err then + break + else + core.log.info("check permission request:", url, ", status:", res.status, + ",body:", core.json.delay_encode(res.body)) + if res.status < 500 then + break + else + core.log.info("request [curl -v ", url, "] failed! status:", res.status) + if i < retry_max then + sleep(0.1) + end + end + end + end + + if err then + core.log.error("fail request: ", url, ", err:", err) + return { + status = 500, + err = "request to wolf-server failed, err:" .. err + } + end + + if res.status ~= 200 and res.status >= 500 then + return { + status = 500, + err = 'request to wolf-server failed, status:' .. res.status + } + end + + local body, err = json.decode(res.body) + if not body then + errmsg = 'check permission failed! parse response json failed!' + core.log.error( "json.decode(", res.body, ") failed! err:", err) + return {status = res.status, err = errmsg} + else + if body.data then + userInfo = body.data.userInfo + end + errmsg = body.reason + return {status = res.status, err = errmsg, userInfo = userInfo} + end +end + + +function _M.rewrite(conf, ctx) + local url = ctx.var.uri + local action = ctx.var.request_method + local client_ip = ctx.var.http_x_real_ip or core.request.get_ip(ctx) + local perm_item = {action = action, url = url, clientIP = client_ip} + core.log.info("hit wolf-rbac rewrite") + + local rbac_token = fetch_rbac_token(ctx) + if rbac_token == nil then + core.log.info("no permission to access ", + core.json.delay_encode(perm_item), ", need login!") + return 401, fail_response("Missing rbac token in request") + end + + local tokenInfo, err = parse_rbac_token(rbac_token) + core.log.info("token info: ", core.json.delay_encode(tokenInfo), + ", err: ", err) + if err then + return 401, fail_response('invalid rbac token: parse failed') + end + + local appid = tokenInfo.appid + local wolf_token = tokenInfo.wolf_token + perm_item.appid = appid + perm_item.wolf_token = wolf_token + + local consumer_conf = consumer.plugin(plugin_name) + if not consumer_conf then + return 401, fail_response("Missing related consumer") + end + + local consumers = consumer.consumers_kv(plugin_name, consumer_conf, "appid") + + core.log.info("------ consumers: ", core.json.delay_encode(consumers)) + local cur_consumer = consumers[appid] + if not cur_consumer then + core.log.error("consumer [", appid, "] not found") + return 401, fail_response("Invalid appid in rbac token") + end + core.log.info("consumer: ", core.json.delay_encode(cur_consumer)) + local server = cur_consumer.auth_conf.server + + local res = check_url_permission(server, appid, action, url, + client_ip, wolf_token) + core.log.info(" check_url_permission(", core.json.delay_encode(perm_item), + ") res: ",core.json.delay_encode(res)) + + local username = nil + local nickname = nil + if type(res.userInfo) == 'table' then + local userInfo = res.userInfo + ctx.userInfo = userInfo + local userId = userInfo.id + username = userInfo.username + nickname = userInfo.nickname or userInfo.username + local prefix = cur_consumer.auth_conf.header_prefix or '' + core.response.set_header(prefix .. "UserId", userId) + core.response.set_header(prefix .. "Username", username) + core.response.set_header(prefix .. "Nickname", ngx.escape_uri(nickname)) + core.request.set_header(ctx, prefix .. "UserId", userId) + core.request.set_header(ctx, prefix .. "Username", username) + core.request.set_header(ctx, prefix .. "Nickname", ngx.escape_uri(nickname)) + end + + if res.status ~= 200 then + -- no permission. + core.log.error(" check_url_permission(", + core.json.delay_encode(perm_item), + ") failed, res: ",core.json.delay_encode(res)) + return res.status, fail_response(res.err, { username = username, nickname = nickname }) + end + consumer.attach_consumer(ctx, cur_consumer, consumer_conf) + core.log.info("wolf-rbac check permission passed") +end + +local function get_args() + local ctx = ngx.ctx.api_ctx + local args, err + req_read_body() + if string.find(ctx.var.http_content_type or "","application/json", + 1, true) then + local req_body = req_get_body_data() + args, err = json.decode(req_body) + if not args then + core.log.error("json.decode(", req_body, ") failed! ", err) + end + else + args = core.request.get_post_args(ctx) + end + + return args +end + +local function get_consumer(appid) + local consumer_conf = consumer.plugin(plugin_name) + if not consumer_conf then + core.response.exit(500) + end + + local consumers = consumer.consumers_kv(plugin_name, consumer_conf, "appid") + + core.log.info("------ consumers: ", core.json.delay_encode(consumers)) + local consumer = consumers[appid] + if not consumer then + core.log.info("request appid [", appid, "] not found") + core.response.exit(400, + fail_response("appid not found") + ) + end + return consumer +end + +local function request_to_wolf_server(method, uri, headers, body) + headers["Content-Type"] = "application/json; charset=utf-8" + local timeout = 1000 * 5 + local request_debug = core.json.delay_encode( + { + method = method, uri = uri, body = body, + headers = headers,timeout = timeout + } + ) + + core.log.info("request [", request_debug, "] ....") + local res, err = http_req(method, uri, core.json.encode(body), headers, timeout) + if not res then + core.log.error("request [", request_debug, "] failed! err: ", err) + return core.response.exit(500, + fail_response("request to wolf-server failed!") + ) + end + core.log.info("request [", request_debug, "] status: ", res.status, + ", body: ", res.body) + + if res.status ~= 200 then + core.log.error("request [", request_debug, "] failed! status: ", + res.status) + return core.response.exit(500, + fail_response("request to wolf-server failed!") + ) + end + local body, err = json.decode(res.body) + if not body then + core.log.error("request [", request_debug, "] failed! err:", err) + return core.response.exit(500, fail_response("request to wolf-server failed!")) + end + if not body.ok then + core.log.error("request [", request_debug, "] failed! response body:", + core.json.delay_encode(body)) + return core.response.exit(200, fail_response("request to wolf-server failed!")) + end + + core.log.info("request [", request_debug, "] success! response body:", + core.json.delay_encode(body)) + return body +end + +local function wolf_rbac_login() + local args = get_args() + if not args then + return core.response.exit(400, fail_response("invalid request")) + end + if not args.appid then + return core.response.exit(400, fail_response("appid is missing")) + end + + local appid = args.appid + local consumer = get_consumer(appid) + core.log.info("consumer: ", core.json.delay_encode(consumer)) + + local uri = consumer.auth_conf.server .. '/wolf/rbac/login.rest' + local headers = new_headers() + local body = request_to_wolf_server('POST', uri, headers, args) + + local userInfo = body.data.userInfo + local wolf_token = body.data.token + + local rbac_token = create_rbac_token(appid, wolf_token) + core.response.exit(200, success_response(nil, {rbac_token = rbac_token, user_info = userInfo})) +end + +local function get_wolf_token(ctx) + core.log.info("hit wolf-rbac change_password api") + local rbac_token = fetch_rbac_token(ctx) + if rbac_token == nil then + local url = ctx.var.uri + local action = ctx.var.request_method + local client_ip = core.request.get_ip(ctx) + local perm_item = {action = action, url = url, clientIP = client_ip} + core.log.info("no permission to access ", + core.json.delay_encode(perm_item), ", need login!") + return core.response.exit(401, fail_response("Missing rbac token in request")) + end + + local tokenInfo, err = parse_rbac_token(rbac_token) + core.log.info("token info: ", core.json.delay_encode(tokenInfo), + ", err: ", err) + if err then + return core.response.exit(401, fail_response('invalid rbac token: parse failed')) + end + return tokenInfo +end + +local function wolf_rbac_change_pwd() + local args = get_args() + + local ctx = ngx.ctx.api_ctx + local tokenInfo = get_wolf_token(ctx) + local appid = tokenInfo.appid + local wolf_token = tokenInfo.wolf_token + local consumer = get_consumer(appid) + core.log.info("consumer: ", core.json.delay_encode(consumer)) + + local uri = consumer.auth_conf.server .. '/wolf/rbac/change_pwd' + local headers = new_headers() + headers['x-rbac-token'] = wolf_token + request_to_wolf_server('POST', uri, headers, args) + core.response.exit(200, success_response('success to change password', { })) +end + +local function wolf_rbac_user_info() + local ctx = ngx.ctx.api_ctx + local tokenInfo = get_wolf_token(ctx) + local appid = tokenInfo.appid + local wolf_token = tokenInfo.wolf_token + local consumer = get_consumer(appid) + core.log.info("consumer: ", core.json.delay_encode(consumer)) + + local uri = consumer.auth_conf.server .. '/wolf/rbac/user_info' + local headers = new_headers() + headers['x-rbac-token'] = wolf_token + local body = request_to_wolf_server('GET', uri, headers, {}) + local userInfo = body.data.userInfo + core.response.exit(200, success_response(nil, {user_info = userInfo})) +end + +function _M.api() + return { + { + methods = {"POST"}, + uri = "/apisix/plugin/wolf-rbac/login", + handler = wolf_rbac_login, + }, + { + methods = {"PUT"}, + uri = "/apisix/plugin/wolf-rbac/change_pwd", + handler = wolf_rbac_change_pwd, + }, + { + methods = {"GET"}, + uri = "/apisix/plugin/wolf-rbac/user_info", + handler = wolf_rbac_user_info, + }, + } +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/workflow.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/workflow.lua new file mode 100644 index 0000000..e41679b --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/workflow.lua @@ -0,0 +1,161 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local expr = require("resty.expr.v1") +local ipairs = ipairs + +local schema = { + type = "object", + properties = { + rules = { + type = "array", + items = { + type = "object", + properties = { + case = { + type = "array", + items = { + anyOf = { + { + type = "array", + }, + { + type = "string", + }, + } + }, + minItems = 1, + }, + actions = { + type = "array", + items = { + type = "array", + minItems = 1 + } + } + }, + required = {"actions"} + } + } + }, + required = {"rules"} +} + +local plugin_name = "workflow" + +local _M = { + version = 0.1, + priority = 1006, + name = plugin_name, + schema = schema +} + + +local return_schema = { + type = "object", + properties = { + code = { + type = "integer", + minimum = 100, + maximum = 599 + } + }, + required = {"code"} +} + + +local function check_return_schema(conf) + local ok, err = core.schema.check(return_schema, conf) + if not ok then + return false, err + end + return true +end + + +local function exit(conf) + return conf.code, {error_msg = "rejected by workflow"} +end + + + +local support_action = { + ["return"] = { + handler = exit, + check_schema = check_return_schema, + } +} + + +function _M.register(plugin_name, handler, check_schema) + support_action[plugin_name] = { + handler = handler, + check_schema = check_schema + } +end + +function _M.check_schema(conf) + local ok, err = core.schema.check(schema, conf) + if not ok then + return false, err + end + + for idx, rule in ipairs(conf.rules) do + if rule.case then + local ok, err = expr.new(rule.case) + if not ok then + return false, "failed to validate the 'case' expression: " .. err + end + end + + local actions = rule.actions + for _, action in ipairs(actions) do + + if not support_action[action[1]] then + return false, "unsupported action: " .. action[1] + end + + -- use the action's idx as an identifier to isolate between confs + action[2]["_vid"] = idx + local ok, err = support_action[action[1]].check_schema(action[2], plugin_name) + if not ok then + return false, "failed to validate the '" .. action[1] .. "' action: " .. err + end + end + end + + return true +end + + +function _M.access(conf, ctx) + for _, rule in ipairs(conf.rules) do + local match_result = true + if rule.case then + local expr, _ = expr.new(rule.case) + match_result = expr:eval(ctx.var) + end + if match_result then + -- only one action is currently supported + local action = rule.actions[1] + return support_action[action[1]].handler(action[2], ctx) + end + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin.lua new file mode 100644 index 0000000..dc814f1 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin.lua @@ -0,0 +1,318 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local new_tracer = require("opentracing.tracer").new +local zipkin_codec = require("apisix.plugins.zipkin.codec") +local new_random_sampler = require("apisix.plugins.zipkin.random_sampler").new +local new_reporter = require("apisix.plugins.zipkin.reporter").new +local ngx = ngx +local ngx_var = ngx.var +local ngx_re = require("ngx.re") +local pairs = pairs +local tonumber = tonumber +local to_hex = require "resty.string".to_hex + +local plugin_name = "zipkin" +local ZIPKIN_SPAN_VER_1 = 1 +local ZIPKIN_SPAN_VER_2 = 2 +local plugin = require("apisix.plugin") +local string_format = string.format + + +local lrucache = core.lrucache.new({ + type = "plugin", +}) + +local schema = { + type = "object", + properties = { + endpoint = {type = "string"}, + sample_ratio = {type = "number", minimum = 0.00001, maximum = 1}, + service_name = { + type = "string", + description = "service name for zipkin reporter", + default = "APISIX", + }, + server_addr = { + type = "string", + description = "default is $server_addr, you can specify your external ip address", + pattern = "^[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}$" + }, + span_version = { + enum = {ZIPKIN_SPAN_VER_1, ZIPKIN_SPAN_VER_2}, + default = ZIPKIN_SPAN_VER_2, + }, + }, + required = {"endpoint", "sample_ratio"} +} + + +local _M = { + version = 0.1, + priority = 12011, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + local check = {"endpoint"} + core.utils.check_https(check, conf, plugin_name) + return core.schema.check(schema, conf) +end + +local plugin_info = plugin.plugin_attr(plugin_name) or {} + + +local function create_tracer(conf,ctx) + conf.route_id = ctx.route_id + local reporter = new_reporter(conf) + reporter:init_processor() + local tracer = new_tracer(reporter, new_random_sampler(conf)) + tracer:register_injector("http_headers", zipkin_codec.new_injector()) + tracer:register_extractor("http_headers", zipkin_codec.new_extractor()) + return tracer +end + + +local function parse_b3(b3) + -- See https://github.com/openzipkin/b3-propagation#single-header + if b3 == "0" then + return nil, nil, nil, "0", nil + end + + local pieces, err = ngx_re.split(b3, "-", nil, nil, 4) + if not pieces then + return err + end + if not pieces[1] then + return "missing trace_id" + end + if not pieces[2] then + return "missing span_id" + end + return nil, pieces[1], pieces[2], pieces[3], pieces[4] +end + +local function inject_header(ctx) + local opentracing = ctx.opentracing + local tracer = opentracing.tracer + local outgoing_headers = {} + + local span = opentracing.request_span + if ctx.opentracing_sample then + span = opentracing.proxy_span + end + tracer:inject(span, "http_headers", outgoing_headers) + + for k, v in pairs(outgoing_headers) do + core.request.set_header(ctx, k, v) + end +end + +function _M.rewrite(plugin_conf, ctx) + local conf = core.table.clone(plugin_conf) + -- once the server started, server_addr and server_port won't change, so we can cache it. + conf.server_port = tonumber(ctx.var['server_port']) + + if not conf.server_addr or conf.server_addr == '' then + conf.server_addr = ctx.var["server_addr"] + end + + local tracer = core.lrucache.plugin_ctx(lrucache, ctx, conf.server_addr .. conf.server_port, + create_tracer, conf, ctx) + + local headers = core.request.headers(ctx) + local per_req_sample_ratio + + -- X-B3-Flags: if it equals '1' then it overrides sampling policy + -- We still want to warn on invalid sampled header, so do this after the above + local debug = headers["x-b3-flags"] + if debug == "1" then + per_req_sample_ratio = 1 + end + + local trace_id, request_span_id, sampled, parent_span_id + local b3 = headers["b3"] + if b3 then + -- don't pass b3 header by default + -- TODO: add an option like 'single_b3_header' so we can adapt to the upstream + -- which doesn't support b3 header without always breaking down the header + core.request.set_header(ctx, "b3", nil) + + local err + err, trace_id, request_span_id, sampled, parent_span_id = parse_b3(b3) + + if err then + core.log.error("invalid b3 header: ", b3, ", ignored: ", err) + return 400 + end + + if sampled == "d" then + core.request.set_header(ctx, "x-b3-flags", "1") + sampled = "1" + end + else + -- X-B3-Sampled: if the client decided to sample this request, we do too. + sampled = headers["x-b3-sampled"] + trace_id = headers["x-b3-traceid"] + parent_span_id = headers["x-b3-parentspanid"] + request_span_id = headers["x-b3-spanid"] + end + + local zipkin_ctx = core.tablepool.fetch("zipkin_ctx", 0, 3) + zipkin_ctx.trace_id = trace_id + zipkin_ctx.parent_span_id = parent_span_id + zipkin_ctx.request_span_id = request_span_id + ctx.zipkin = zipkin_ctx + + local wire_context = tracer:extract("http_headers", ctx) + + local start_timestamp = ngx.req.start_time() + local request_span = tracer:start_span("apisix.request", { + child_of = wire_context, + start_timestamp = start_timestamp, + tags = { + component = "apisix", + ["span.kind"] = "server", + ["http.method"] = ctx.var.request_method, + ["http.url"] = ctx.var.request_uri, + -- TODO: support ipv6 + ["peer.ipv4"] = core.request.get_remote_client_ip(ctx), + ["peer.port"] = core.request.get_remote_client_port(ctx), + } + }) + + ctx.opentracing = { + tracer = tracer, + wire_context = wire_context, + request_span = request_span, + } + + -- Process sampled + if sampled == "1" or sampled == "true" then + per_req_sample_ratio = 1 + elseif sampled == "0" or sampled == "false" then + per_req_sample_ratio = 0 + end + + ctx.opentracing_sample = tracer.sampler:sample(per_req_sample_ratio or conf.sample_ratio) + if not ctx.opentracing_sample then + request_span:set_baggage_item("x-b3-sampled","0") + else + request_span:set_baggage_item("x-b3-sampled","1") + end + + if plugin_info.set_ngx_var then + local span_context = request_span:context() + ngx_var.zipkin_context_traceparent = string_format("00-%s-%s-%02x", + to_hex(span_context.trace_id), + to_hex(span_context.span_id), + span_context:get_baggage_item("x-b3-sampled")) + ngx_var.zipkin_trace_id = span_context.trace_id + ngx_var.zipkin_span_id = span_context.span_id + end + + if not ctx.opentracing_sample then + return + end + + local request_span = ctx.opentracing.request_span + if conf.span_version == ZIPKIN_SPAN_VER_1 then + ctx.opentracing.rewrite_span = request_span:start_child_span("apisix.rewrite", + start_timestamp) + ctx.REWRITE_END_TIME = tracer:time() + ctx.opentracing.rewrite_span:finish(ctx.REWRITE_END_TIME) + else + ctx.opentracing.proxy_span = request_span:start_child_span("apisix.proxy", + start_timestamp) + end +end + +function _M.access(conf, ctx) + local opentracing = ctx.opentracing + local tracer = opentracing.tracer + + if conf.span_version == ZIPKIN_SPAN_VER_1 then + opentracing.access_span = opentracing.request_span:start_child_span( + "apisix.access", ctx.REWRITE_END_TIME) + + ctx.ACCESS_END_TIME = tracer:time() + opentracing.access_span:finish(ctx.ACCESS_END_TIME) + + opentracing.proxy_span = opentracing.request_span:start_child_span( + "apisix.proxy", ctx.ACCESS_END_TIME) + end + + -- send headers to upstream + inject_header(ctx) +end + + +function _M.header_filter(conf, ctx) + if not ctx.opentracing_sample then + return + end + + local opentracing = ctx.opentracing + local end_time = opentracing.tracer:time() + + if conf.span_version == ZIPKIN_SPAN_VER_1 then + if opentracing.proxy_span then + opentracing.body_filter_span = opentracing.proxy_span:start_child_span( + "apisix.body_filter", end_time) + end + else + opentracing.proxy_span:finish(end_time) + opentracing.response_span = opentracing.request_span:start_child_span( + "apisix.response_span", end_time) + end +end + + +function _M.log(conf, ctx) + if ctx.zipkin then + core.tablepool.release("zipkin_ctx", ctx.zipkin) + ctx.zipkin = nil + end + + if not ctx.opentracing_sample then + return + end + + local opentracing = ctx.opentracing + + local log_end_time = opentracing.tracer:time() + + if conf.span_version == ZIPKIN_SPAN_VER_1 then + if opentracing.body_filter_span then + opentracing.body_filter_span:finish(log_end_time) + end + if opentracing.proxy_span then + opentracing.proxy_span:finish(log_end_time) + end + elseif opentracing.response_span then + opentracing.response_span:finish(log_end_time) + end + + local upstream_status = core.response.get_upstream_status(ctx) + opentracing.request_span:set_tag("http.status_code", upstream_status) + + opentracing.request_span:finish(log_end_time) +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin/codec.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin/codec.lua new file mode 100644 index 0000000..917c492 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin/codec.lua @@ -0,0 +1,114 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local to_hex = require "resty.string".to_hex +local new_span_context = require("opentracing.span_context").new +local ngx = ngx +local string = string +local pairs = pairs +local tonumber = tonumber + +local function hex_to_char(c) + return string.char(tonumber(c, 16)) +end + +local function from_hex(str) + if str ~= nil then -- allow nil to pass through + str = str:gsub("%x%x", hex_to_char) + end + return str +end + +local function new_extractor() + return function(ctx) + local had_invalid_id = false + + local zipkin_ctx = ctx.zipkin + local trace_id = zipkin_ctx.trace_id + local parent_span_id = zipkin_ctx.parent_span_id + local request_span_id = zipkin_ctx.request_span_id + + -- Validate trace id + if trace_id and + ((#trace_id ~= 16 and #trace_id ~= 32) or trace_id:match("%X")) then + core.log.warn("x-b3-traceid header invalid; ignoring.") + had_invalid_id = true + end + + -- Validate parent_span_id + if parent_span_id and + (#parent_span_id ~= 16 or parent_span_id:match("%X")) then + core.log.warn("x-b3-parentspanid header invalid; ignoring.") + had_invalid_id = true + end + + -- Validate request_span_id + if request_span_id and + (#request_span_id ~= 16 or request_span_id:match("%X")) then + core.log.warn("x-b3-spanid header invalid; ignoring.") + had_invalid_id = true + end + + if trace_id == nil or had_invalid_id then + return nil + end + + -- Process jaegar baggage header + local baggage = {} + local headers = core.request.headers(ctx) + for k, v in pairs(headers) do + local baggage_key = k:match("^uberctx%-(.*)$") + if baggage_key then + baggage[baggage_key] = ngx.unescape_uri(v) + end + end + + core.log.info("new span context: trace id: ", trace_id, + ", span id: ", request_span_id, + ", parent span id: ", parent_span_id) + + trace_id = from_hex(trace_id) + parent_span_id = from_hex(parent_span_id) + request_span_id = from_hex(request_span_id) + + return new_span_context(trace_id, request_span_id, parent_span_id, + baggage) + end +end + +local function new_injector() + return function(span_context, headers) + -- We want to remove headers if already present + headers["x-b3-traceid"] = to_hex(span_context.trace_id) + headers["x-b3-parentspanid"] = span_context.parent_id + and to_hex(span_context.parent_id) or nil + headers["x-b3-spanid"] = to_hex(span_context.span_id) + headers["x-b3-sampled"] = span_context:get_baggage_item("x-b3-sampled") + for key, value in span_context:each_baggage_item() do + -- skip x-b3-sampled baggage + if key ~= "x-b3-sampled" then + -- XXX: https://github.com/opentracing/specification/issues/117 + headers["uberctx-"..key] = ngx.escape_uri(value) + end + end + end +end + +return { + new_extractor = new_extractor, + new_injector = new_injector, +} diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin/random_sampler.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin/random_sampler.lua new file mode 100644 index 0000000..d458bce --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin/random_sampler.lua @@ -0,0 +1,37 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local assert = assert +local type = type +local setmetatable = setmetatable +local math = math + + +local _M = {} +local mt = { __index = _M } + +function _M.new(conf) + return setmetatable({}, mt) +end + +function _M.sample(self, sample_ratio) + assert(type(sample_ratio) == "number" and + sample_ratio >= 0 and sample_ratio <= 1, "invalid sample_ratio") + return math.random() < sample_ratio +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin/reporter.lua b/CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin/reporter.lua new file mode 100644 index 0000000..2edf1c1 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/plugins/zipkin/reporter.lua @@ -0,0 +1,184 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local resty_http = require "resty.http" +local to_hex = require "resty.string".to_hex +local cjson = require "cjson.safe".new() +cjson.encode_number_precision(16) +local assert = assert +local type = type +local setmetatable = setmetatable +local math = math +local tostring = tostring +local batch_processor = require("apisix.utils.batch-processor") +local core = require("apisix.core") + +local _M = {} +local mt = { __index = _M } + + +local span_kind_map = { + client = "CLIENT", + server = "SERVER", + producer = "PRODUCER", + consumer = "CONSUMER", +} + + +function _M.new(conf) + local endpoint = conf.endpoint + local service_name = conf.service_name + local server_port = conf.server_port + local server_addr = conf.server_addr + assert(type(endpoint) == "string", "invalid http endpoint") + return setmetatable({ + endpoint = endpoint, + service_name = service_name, + server_addr = server_addr, + server_port = server_port, + pending_spans_n = 0, + route_id = conf.route_id + }, mt) +end + + +function _M.report(self, span) + if span:get_baggage_item("x-b3-sampled") == "0" then + return + end + local span_context = span:context() + + local zipkin_tags = {} + for k, v in span:each_tag() do + -- Zipkin tag values should be strings + zipkin_tags[k] = tostring(v) + end + + local span_kind = zipkin_tags["span.kind"] + zipkin_tags["span.kind"] = nil + + local localEndpoint = { + serviceName = self.service_name, + ipv4 = self.server_addr, + port = self.server_port, + -- TODO: ip/port from ngx.var.server_name/ngx.var.server_port? + } + + local remoteEndpoint do + local peer_port = span:get_tag "peer.port" -- get as number + if peer_port then + zipkin_tags["peer.port"] = nil + remoteEndpoint = { + ipv4 = zipkin_tags["peer.ipv4"], + -- ipv6 = zipkin_tags["peer.ipv6"], + port = peer_port, -- port is *not* optional + } + zipkin_tags["peer.ipv4"] = nil + zipkin_tags["peer.ipv6"] = nil + else + remoteEndpoint = cjson.null + end + end + + local zipkin_span = { + traceId = to_hex(span_context.trace_id), + name = span.name, + parentId = span_context.parent_id and + to_hex(span_context.parent_id) or nil, + id = to_hex(span_context.span_id), + kind = span_kind_map[span_kind], + timestamp = span.timestamp * 1000000, + duration = math.floor(span.duration * 1000000), -- zipkin wants integer + -- TODO: debug? + localEndpoint = localEndpoint, + remoteEndpoint = remoteEndpoint, + tags = zipkin_tags, + annotations = span.logs + } + + self.pending_spans_n = self.pending_spans_n + 1 + if self.processor then + self.processor:push(zipkin_span) + end +end + + +local function send_span(pending_spans, report) + local httpc = resty_http.new() + local res, err = httpc:request_uri(report.endpoint, { + method = "POST", + headers = { + ["content-type"] = "application/json", + }, + body = pending_spans, + keepalive = 5000, + keepalive_pool = 5 + }) + + if not res then + -- for zipkin test + core.log.error("report zipkin span failed") + return nil, "failed: " .. err .. ", url: " .. report.endpoint + elseif res.status < 200 or res.status >= 300 then + return nil, "failed: " .. report.endpoint .. " " + .. res.status .. " " .. res.reason + end + + return true +end + + +function _M.init_processor(self) + local process_conf = { + name = "zipkin_report", + retry_delay = 1, + batch_max_size = 1000, + max_retry_count = 0, + buffer_duration = 60, + inactive_timeout = 5, + route_id = self.route_id, + server_addr = self.server_addr, + } + + local flush = function (entries, batch_max_size) + if not entries then + return true + end + + local pending_spans, err + if batch_max_size == 1 then + pending_spans, err = cjson.encode(entries[1]) + else + pending_spans, err = cjson.encode(entries) + end + + if not pending_spans then + return false, 'error occurred while encoding the data: ' .. err + end + + return send_span(pending_spans, self) + end + + local processor, err = batch_processor:new(flush, process_conf) + if not processor then + return false, "create processor error: " .. err + end + + self.processor = processor +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/pubsub/kafka.lua b/CloudronPackages/APISIX/apisix-source/apisix/pubsub/kafka.lua new file mode 100644 index 0000000..2cce1a0 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/pubsub/kafka.lua @@ -0,0 +1,149 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local bconsumer = require("resty.kafka.basic-consumer") +local ffi = require("ffi") +local C = ffi.C +local tostring = tostring +local type = type +local ipairs = ipairs +local str_sub = string.sub + +ffi.cdef[[ + int64_t atoll(const char *num); +]] + + +local _M = {} + + +-- Handles the conversion of 64-bit integers in the lua-protobuf. +-- +-- Because of the limitations of luajit, we cannot use native 64-bit +-- numbers, so pb decode converts int64 to a string in #xxx format +-- to avoid loss of precision, by this function, we convert this +-- string to int64 cdata numbers. +local function pb_convert_to_int64(src) + if type(src) == "string" then + -- the format is #1234, so there is a small minimum length of 2 + if #src < 2 then + return 0 + end + return C.atoll(ffi.cast("char *", src) + 1) + else + return src + end +end + + +-- Takes over requests of type kafka upstream in the http_access phase. +function _M.access(api_ctx) + local pubsub, err = core.pubsub.new() + if not pubsub then + core.log.error("failed to initialize pubsub module, err: ", err) + core.response.exit(400) + return + end + + local up_nodes = api_ctx.matched_upstream.nodes + + -- kafka client broker-related configuration + local broker_list = {} + for i, node in ipairs(up_nodes) do + broker_list[i] = { + host = node.host, + port = node.port, + } + + if api_ctx.kafka_consumer_enable_sasl then + broker_list[i].sasl_config = { + mechanism = "PLAIN", + user = api_ctx.kafka_consumer_sasl_username, + password = api_ctx.kafka_consumer_sasl_password, + } + end + end + + local client_config = {refresh_interval = 30 * 60 * 1000} + if api_ctx.matched_upstream.tls then + client_config.ssl = true + client_config.ssl_verify = api_ctx.matched_upstream.tls.verify + end + + -- load and create the consumer instance when it is determined + -- that the websocket connection was created successfully + local consumer = bconsumer:new(broker_list, client_config) + + pubsub:on("cmd_kafka_list_offset", function (params) + -- The timestamp parameter uses a 64-bit integer, which is difficult + -- for luajit to handle well, so the int64_as_string option in + -- lua-protobuf is used here. Smaller numbers will be decoded as + -- lua number, while overly larger numbers will be decoded as strings + -- in the format #number, where the # symbol at the beginning of the + -- string will be removed and converted to int64_t with the atoll function. + local timestamp = pb_convert_to_int64(params.timestamp) + + local offset, err = consumer:list_offset(params.topic, params.partition, timestamp) + + if not offset then + return nil, "failed to list offset, topic: " .. params.topic .. + ", partition: " .. params.partition .. ", err: " .. err + end + + offset = tostring(offset) + return { + kafka_list_offset_resp = { + offset = str_sub(offset, 1, #offset - 2) + } + } + end) + + pubsub:on("cmd_kafka_fetch", function (params) + local offset = pb_convert_to_int64(params.offset) + + local ret, err = consumer:fetch(params.topic, params.partition, offset) + if not ret then + return nil, "failed to fetch message, topic: " .. params.topic .. + ", partition: " .. params.partition .. ", err: " .. err + end + + -- split into multiple messages when the amount of data in + -- a single batch is too large + local messages = ret.records + + -- special handling of int64 for luajit compatibility + for _, message in ipairs(messages) do + local timestamp = tostring(message.timestamp) + message.timestamp = str_sub(timestamp, 1, #timestamp - 2) + local offset = tostring(message.offset) + message.offset = str_sub(offset, 1, #offset - 2) + end + + return { + kafka_fetch_resp = { + messages = messages, + }, + } + end) + + -- start processing client commands + pubsub:wait() +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/router.lua b/CloudronPackages/APISIX/apisix-source/apisix/router.lua new file mode 100644 index 0000000..93b123e --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/router.lua @@ -0,0 +1,131 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local http_route = require("apisix.http.route") +local apisix_upstream = require("apisix.upstream") +local core = require("apisix.core") +local str_lower = string.lower +local ipairs = ipairs + + +local _M = {version = 0.3} + + +local function filter(route) + route.orig_modifiedIndex = route.modifiedIndex + + route.has_domain = false + if not route.value then + return + end + + if route.value.host then + route.value.host = str_lower(route.value.host) + elseif route.value.hosts then + for i, v in ipairs(route.value.hosts) do + route.value.hosts[i] = str_lower(v) + end + end + + apisix_upstream.filter_upstream(route.value.upstream, route) + + core.log.info("filter route: ", core.json.delay_encode(route, true)) +end + + +-- attach common methods if the router doesn't provide its custom implementation +local function attach_http_router_common_methods(http_router) + if http_router.routes == nil then + http_router.routes = function () + if not http_router.user_routes then + return nil, nil + end + + local user_routes = http_router.user_routes + return user_routes.values, user_routes.conf_version + end + end + + if http_router.init_worker == nil then + http_router.init_worker = function (filter) + http_router.user_routes = http_route.init_worker(filter) + end + end +end + + +function _M.http_init_worker() + local conf = core.config.local_conf() + local router_http_name = "radixtree_uri" + local router_ssl_name = "radixtree_sni" + + if conf and conf.apisix and conf.apisix.router then + router_http_name = conf.apisix.router.http or router_http_name + router_ssl_name = conf.apisix.router.ssl or router_ssl_name + end + + local router_http = require("apisix.http.router." .. router_http_name) + attach_http_router_common_methods(router_http) + router_http.init_worker(filter) + _M.router_http = router_http + + local router_ssl = require("apisix.ssl.router." .. router_ssl_name) + router_ssl.init_worker() + _M.router_ssl = router_ssl + + _M.api = require("apisix.api_router") +end + + +function _M.stream_init_worker() + local router_ssl_name = "radixtree_sni" + + local router_stream = require("apisix.stream.router.ip_port") + router_stream.stream_init_worker(filter) + _M.router_stream = router_stream + + local router_ssl = require("apisix.ssl.router." .. router_ssl_name) + router_ssl.init_worker() + _M.router_ssl = router_ssl +end + + +function _M.ssls() + return _M.router_ssl.ssls() +end + +function _M.http_routes() + if not _M.router_http then + return nil, nil + end + return _M.router_http.routes() +end + +function _M.stream_routes() + -- maybe it's not inited. + if not _M.router_stream then + return nil, nil + end + return _M.router_stream.routes() +end + + +-- for test +_M.filter_test = filter + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/schema_def.lua b/CloudronPackages/APISIX/apisix-source/apisix/schema_def.lua new file mode 100644 index 0000000..d8b6208 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/schema_def.lua @@ -0,0 +1,1094 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local schema = require('apisix.core.schema') +local table_insert = table.insert +local table_concat = table.concat +local setmetatable = setmetatable +local error = error + +local _M = {version = 0.5} + + +local plugins_schema = { + type = "object" +} + +_M.anonymous_consumer_schema = { + type = "string", + minLength = "1" +} + +local id_schema = { + anyOf = { + { + type = "string", minLength = 1, maxLength = 64, + pattern = [[^[a-zA-Z0-9-_.]+$]] + }, + {type = "integer", minimum = 1} + } +} + +local host_def_pat = "^\\*?[0-9a-zA-Z-._\\[\\]:]+$" +local host_def = { + type = "string", + pattern = host_def_pat, +} +_M.host_def = host_def + + +local ipv4_seg = "([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])" +local ipv4_def_buf = {} +for i = 1, 4 do + table_insert(ipv4_def_buf, ipv4_seg) +end +local ipv4_def = table_concat(ipv4_def_buf, [[\.]]) +-- There is false negative for ipv6/cidr. For instance, `:/8` will be valid. +-- It is fine as the correct regex will be too complex. +local ipv6_def = "([a-fA-F0-9]{0,4}:){1,8}(:[a-fA-F0-9]{0,4}){0,8}" + .. "([a-fA-F0-9]{0,4})?" +local ip_def = { + {title = "IPv4", type = "string", format = "ipv4"}, + {title = "IPv4/CIDR", type = "string", pattern = "^" .. ipv4_def .. "/([12]?[0-9]|3[0-2])$"}, + {title = "IPv6", type = "string", format = "ipv6"}, + {title = "IPv6/CIDR", type = "string", pattern = "^" .. ipv6_def .. "/[0-9]{1,3}$"}, +} +_M.ip_def = ip_def + + +_M.uri_def = {type = "string", pattern = [=[^[^\/]+:\/\/([\da-zA-Z.-]+|\[[\da-fA-F:]+\])(:\d+)?]=]} + + +local timestamp_def = { + type = "integer", +} + +local remote_addr_def = { + description = "client IP", + type = "string", + anyOf = ip_def, +} + + +local label_value_def = { + description = "value of label", + type = "string", + pattern = [[^\S+$]], + maxLength = 256, + minLength = 1 +} +_M.label_value_def = label_value_def + + +local labels_def = { + description = "key/value pairs to specify attributes", + type = "object", + patternProperties = { + [".*"] = label_value_def + }, +} + + +local rule_name_def = { + type = "string", + maxLength = 100, + minLength = 1, +} + + +local desc_def = { + type = "string", + maxLength = 256, +} + + +local timeout_def = { + type = "object", + properties = { + connect = {type = "number", exclusiveMinimum = 0}, + send = {type = "number", exclusiveMinimum = 0}, + read = {type = "number", exclusiveMinimum = 0}, + }, + required = {"connect", "send", "read"}, +} + + +local health_checker = { + type = "object", + properties = { + active = { + type = "object", + properties = { + type = { + type = "string", + enum = {"http", "https", "tcp"}, + default = "http" + }, + timeout = {type = "number", default = 1}, + concurrency = {type = "integer", default = 10}, + host = host_def, + port = { + type = "integer", + minimum = 1, + maximum = 65535 + }, + http_path = {type = "string", default = "/"}, + https_verify_certificate = {type = "boolean", default = true}, + healthy = { + type = "object", + properties = { + interval = {type = "integer", minimum = 1, default = 1}, + http_statuses = { + type = "array", + minItems = 1, + items = { + type = "integer", + minimum = 200, + maximum = 599 + }, + uniqueItems = true, + default = {200, 302} + }, + successes = { + type = "integer", + minimum = 1, + maximum = 254, + default = 2 + } + } + }, + unhealthy = { + type = "object", + properties = { + interval = {type = "integer", minimum = 1, default = 1}, + http_statuses = { + type = "array", + minItems = 1, + items = { + type = "integer", + minimum = 200, + maximum = 599 + }, + uniqueItems = true, + default = {429, 404, 500, 501, 502, 503, 504, 505} + }, + http_failures = { + type = "integer", + minimum = 1, + maximum = 254, + default = 5 + }, + tcp_failures = { + type = "integer", + minimum = 1, + maximum = 254, + default = 2 + }, + timeouts = { + type = "integer", + minimum = 1, + maximum = 254, + default = 3 + } + } + }, + req_headers = { + type = "array", + minItems = 1, + items = { + type = "string", + uniqueItems = true, + }, + } + } + }, + passive = { + type = "object", + properties = { + type = { + type = "string", + enum = {"http", "https", "tcp"}, + default = "http" + }, + healthy = { + type = "object", + properties = { + http_statuses = { + type = "array", + minItems = 1, + items = { + type = "integer", + minimum = 200, + maximum = 599, + }, + uniqueItems = true, + default = {200, 201, 202, 203, 204, 205, 206, 207, + 208, 226, 300, 301, 302, 303, 304, 305, + 306, 307, 308} + }, + successes = { + type = "integer", + minimum = 0, + maximum = 254, + default = 5 + } + } + }, + unhealthy = { + type = "object", + properties = { + http_statuses = { + type = "array", + minItems = 1, + items = { + type = "integer", + minimum = 200, + maximum = 599, + }, + uniqueItems = true, + default = {429, 500, 503} + }, + tcp_failures = { + type = "integer", + minimum = 0, + maximum = 254, + default = 2 + }, + timeouts = { + type = "integer", + minimum = 0, + maximum = 254, + default = 7 + }, + http_failures = { + type = "integer", + minimum = 0, + maximum = 254, + default = 5 + }, + } + } + }, + } + }, + anyOf = { + {required = {"active"}}, + {required = {"active", "passive"}}, + }, + additionalProperties = false, +} + + +local nodes_schema = { + anyOf = { + { + type = "object", + patternProperties = { + [".*"] = { + description = "weight of node", + type = "integer", + minimum = 0, + } + }, + }, + { + type = "array", + items = { + type = "object", + properties = { + host = host_def, + port = { + description = "port of node", + type = "integer", + minimum = 1, + maximum = 65535 + }, + weight = { + description = "weight of node", + type = "integer", + minimum = 0, + }, + priority = { + description = "priority of node", + type = "integer", + default = 0, + }, + metadata = { + description = "metadata of node", + type = "object", + } + }, + required = {"host", "weight"}, + }, + } + } +} +_M.discovery_nodes = { + type = "array", + items = { + type = "object", + properties = { + host = { + description = "domain or ip", + }, + port = { + description = "port of node", + type = "integer", + minimum = 1, + maximum = 65535 + }, + weight = { + description = "weight of node", + type = "integer", + minimum = 0, + }, + priority = { + description = "priority of node", + type = "integer", + }, + metadata = { + description = "metadata of node", + type = "object", + } + }, + -- nodes from DNS discovery may not contain port + required = {"host", "weight"}, + }, +} + + +local certificate_scheme = { + type = "string", minLength = 128, maxLength = 64*1024 +} + + +local private_key_schema = { + type = "string", minLength = 128, maxLength = 64*1024 +} + + +local upstream_schema = { + type = "object", + properties = { + -- metadata + id = id_schema, + name = rule_name_def, + desc = desc_def, + labels = labels_def, + create_time = timestamp_def, + update_time = timestamp_def, + + -- properties + nodes = nodes_schema, + retries = { + type = "integer", + minimum = 0, + }, + retry_timeout = { + type = "number", + minimum = 0, + }, + timeout = timeout_def, + tls = { + type = "object", + properties = { + client_cert_id = id_schema, + client_cert = certificate_scheme, + client_key = private_key_schema, + verify = { + type = "boolean", + description = "Turn on server certificate verification, ".. + "currently only kafka upstream is supported", + default = false, + }, + }, + dependencies = { + client_cert = {required = {"client_key"}}, + client_key = {required = {"client_cert"}}, + client_cert_id = { + ["not"] = {required = {"client_cert", "client_key"}} + } + } + }, + keepalive_pool = { + type = "object", + properties = { + size = { + type = "integer", + default = 320, + minimum = 1, + }, + idle_timeout = { + type = "number", + default = 60, + minimum = 0, + }, + requests = { + type = "integer", + default = 1000, + minimum = 1, + }, + }, + }, + type = { + description = "algorithms of load balancing", + type = "string", + default = "roundrobin", + }, + checks = health_checker, + hash_on = { + type = "string", + default = "vars", + enum = { + "vars", + "header", + "cookie", + "consumer", + "vars_combinations", + }, + }, + key = { + description = "the key of chash for dynamic load balancing", + type = "string", + }, + scheme = { + default = "http", + enum = {"grpc", "grpcs", "http", "https", "tcp", "tls", "udp", + "kafka"}, + description = "The scheme of the upstream." .. + " For L7 proxy, it can be one of grpc/grpcs/http/https." .. + " For L4 proxy, it can be one of tcp/tls/udp." .. + " For specific protocols, it can be kafka." + }, + discovery_type = { + description = "discovery type", + type = "string", + }, + discovery_args = { + type = "object", + properties = { + namespace_id = { + description = "namespace id", + type = "string", + }, + group_name = { + description = "group name", + type = "string", + }, + } + }, + pass_host = { + description = "mod of host passing", + type = "string", + enum = {"pass", "node", "rewrite"}, + default = "pass" + }, + upstream_host = host_def, + service_name = { + type = "string", + maxLength = 256, + minLength = 1 + }, + }, + oneOf = { + {required = {"nodes"}}, + {required = {"service_name", "discovery_type"}}, + }, + additionalProperties = false +} + +-- TODO: add more nginx variable support +_M.upstream_hash_vars_schema = { + type = "string", + pattern = [[^((uri|server_name|server_addr|request_uri|remote_port]] + .. [[|remote_addr|query_string|host|hostname|mqtt_client_id)]] + .. [[|arg_[0-9a-zA-z_-]+)$]], +} + +-- validates header name, cookie name. +-- a-z, A-Z, 0-9, '_' and '-' are allowed. +-- when "underscores_in_headers on", header name allow '_'. +-- http://nginx.org/en/docs/http/ngx_http_core_module.html#underscores_in_headers +_M.upstream_hash_header_schema = { + type = "string", + pattern = [[^[a-zA-Z0-9-_]+$]] +} + +-- validates string only +_M.upstream_hash_vars_combinations_schema = { + type = "string" +} + + +local method_schema = { + description = "HTTP method", + type = "string", + enum = {"GET", "POST", "PUT", "DELETE", "PATCH", "HEAD", + "OPTIONS", "CONNECT", "TRACE", "PURGE"}, +} +_M.method_schema = method_schema + + +_M.route = { + type = "object", + properties = { + -- metadata + id = id_schema, + name = rule_name_def, + desc = desc_def, + labels = labels_def, + create_time = timestamp_def, + update_time = timestamp_def, + + -- properties + uri = {type = "string", minLength = 1, maxLength = 4096}, + uris = { + type = "array", + items = { + description = "HTTP uri", + type = "string", + }, + minItems = 1, + uniqueItems = true, + }, + priority = {type = "integer", default = 0}, + + methods = { + type = "array", + items = method_schema, + uniqueItems = true, + }, + host = host_def, + hosts = { + type = "array", + items = host_def, + minItems = 1, + uniqueItems = true, + }, + remote_addr = remote_addr_def, + remote_addrs = { + type = "array", + items = remote_addr_def, + minItems = 1, + uniqueItems = true, + }, + timeout = timeout_def, + vars = { + type = "array", + }, + filter_func = { + type = "string", + minLength = 10, + pattern = [[^function]], + }, + + -- The 'script' fields below are used by dashboard for plugin orchestration + script = {type = "string", minLength = 10, maxLength = 102400}, + script_id = id_schema, + + plugins = plugins_schema, + plugin_config_id = id_schema, + + upstream = upstream_schema, + + service_id = id_schema, + upstream_id = id_schema, + + enable_websocket = { + description = "enable websocket for request", + type = "boolean", + }, + + status = { + description = "route status, 1 to enable, 0 to disable", + type = "integer", + enum = {1, 0}, + default = 1 + }, + }, + allOf = { + { + oneOf = { + {required = {"uri"}}, + {required = {"uris"}}, + }, + }, + { + oneOf = { + {["not"] = { + anyOf = { + {required = {"host"}}, + {required = {"hosts"}}, + } + }}, + {required = {"host"}}, + {required = {"hosts"}} + }, + }, + { + oneOf = { + {["not"] = { + anyOf = { + {required = {"remote_addr"}}, + {required = {"remote_addrs"}}, + } + }}, + {required = {"remote_addr"}}, + {required = {"remote_addrs"}} + }, + }, + }, + anyOf = { + {required = {"plugins", "uri"}}, + {required = {"upstream", "uri"}}, + {required = {"upstream_id", "uri"}}, + {required = {"service_id", "uri"}}, + {required = {"plugins", "uris"}}, + {required = {"upstream", "uris"}}, + {required = {"upstream_id", "uris"}}, + {required = {"service_id", "uris"}}, + {required = {"script", "uri"}}, + {required = {"script", "uris"}}, + }, + ["not"] = { + anyOf = { + {required = {"script", "plugins"}}, + {required = {"script", "plugin_config_id"}}, + } + }, + additionalProperties = false, +} + + +_M.service = { + type = "object", + properties = { + -- metadata + id = id_schema, + name = rule_name_def, + desc = desc_def, + labels = labels_def, + create_time = timestamp_def, + update_time = timestamp_def, + + -- properties + plugins = plugins_schema, + upstream = upstream_schema, + upstream_id = id_schema, + script = {type = "string", minLength = 10, maxLength = 102400}, + enable_websocket = { + description = "enable websocket for request", + type = "boolean", + }, + hosts = { + type = "array", + items = host_def, + minItems = 1, + uniqueItems = true, + }, + }, + additionalProperties = false, +} + + +_M.consumer = { + type = "object", + properties = { + -- metadata + username = { + type = "string", minLength = 1, maxLength = rule_name_def.maxLength, + pattern = [[^[a-zA-Z0-9_\-]+$]] + }, + desc = desc_def, + labels = labels_def, + create_time = timestamp_def, + update_time = timestamp_def, + + -- properties + group_id = id_schema, + plugins = plugins_schema, + }, + required = {"username"}, + additionalProperties = false, +} + +_M.credential = { + type = "object", + properties = { + -- metadata + id = id_schema, + name = rule_name_def, + desc = desc_def, + labels = labels_def, + create_time = timestamp_def, + update_time = timestamp_def, + + -- properties + plugins = { + type = "object", + maxProperties = 1, + }, + }, + additionalProperties = false, +} + +_M.upstream = upstream_schema + + +local secret_uri_schema = { + type = "string", + pattern = "^\\$(secret|env|ENV)://" +} + + +_M.ssl = { + type = "object", + properties = { + -- metadata + id = id_schema, + desc = desc_def, + labels = labels_def, + create_time = timestamp_def, + update_time = timestamp_def, + + -- properties + type = { + description = "ssl certificate type, " .. + "server to server certificate, " .. + "client to client certificate for upstream", + type = "string", + default = "server", + enum = {"server", "client"} + }, + cert = { + oneOf = { + certificate_scheme, + secret_uri_schema + } + }, + key = { + oneOf = { + private_key_schema, + secret_uri_schema + } + }, + sni = { + type = "string", + pattern = host_def_pat, + }, + snis = { + type = "array", + items = { + type = "string", + pattern = host_def_pat, + }, + minItems = 1, + }, + certs = { + type = "array", + items = { + oneOf = { + certificate_scheme, + secret_uri_schema + } + } + }, + keys = { + type = "array", + items = { + oneOf = { + private_key_schema, + secret_uri_schema + } + } + }, + client = { + type = "object", + properties = { + ca = certificate_scheme, + depth = { + type = "integer", + minimum = 0, + default = 1, + }, + skip_mtls_uri_regex = { + type = "array", + minItems = 1, + uniqueItems = true, + items = { + description = "uri regular expression to skip mtls", + type = "string", + } + }, + }, + required = {"ca"}, + }, + status = { + description = "ssl status, 1 to enable, 0 to disable", + type = "integer", + enum = {1, 0}, + default = 1 + }, + ssl_protocols = { + description = "set ssl protocols", + type = "array", + maxItems = 3, + uniqueItems = true, + items = { + enum = {"TLSv1.1", "TLSv1.2", "TLSv1.3"} + }, + }, + }, + ["if"] = { + properties = { + type = { + enum = {"server"}, + }, + }, + }, + ["then"] = { + oneOf = { + {required = {"sni", "key", "cert"}}, + {required = {"snis", "key", "cert"}} + } + }, + ["else"] = {required = {"key", "cert"}}, + additionalProperties = false, +} + + + +-- TODO: Design a plugin resource registration framework used by plugins and move the proto +-- resource to grpc-transcode plugin, which should not be an APISIX core resource +_M.proto = { + type = "object", + properties = { + -- metadata + id = id_schema, + name = rule_name_def, + desc = desc_def, + labels = labels_def, + create_time = timestamp_def, + update_time = timestamp_def, + + -- properties + content = { + type = "string", minLength = 1, maxLength = 1024*1024 + } + }, + required = {"content"}, + additionalProperties = false, +} + + +_M.global_rule = { + type = "object", + properties = { + -- metadata + id = id_schema, + create_time = timestamp_def, + update_time = timestamp_def, + + -- properties + plugins = plugins_schema, + }, + required = {"id", "plugins"}, + additionalProperties = false, +} + + +local xrpc_protocol_schema = { + type = "object", + properties = { + name = { + type = "string", + }, + superior_id = id_schema, + conf = { + description = "protocol-specific configuration", + type = "object", + }, + logger = { + type = "array", + items = { + properties = { + name = { + type = "string", + }, + filter = { + description = "logger filter rules", + type = "array", + }, + conf = { + description = "logger plugin configuration", + type = "object", + }, + }, + dependencies = { + name = {"conf"}, + }, + additionalProperties = false, + }, + }, + + }, + required = {"name"} +} + + +_M.stream_route = { + type = "object", + properties = { + -- metadata + id = id_schema, + name = rule_name_def, + desc = desc_def, + labels = labels_def, + create_time = timestamp_def, + update_time = timestamp_def, + + -- properties + remote_addr = remote_addr_def, + server_addr = { + description = "server IP", + type = "string", + anyOf = ip_def, + }, + server_port = { + description = "server port", + type = "integer", + minimum = 1, + maximum = 65535 + }, + sni = { + description = "server name indication", + type = "string", + pattern = host_def_pat, + }, + upstream = upstream_schema, + upstream_id = id_schema, + service_id = id_schema, + plugins = plugins_schema, + protocol = xrpc_protocol_schema, + }, + additionalProperties = false, +} + + +_M.plugins = { + type = "array", + items = { + type = "object", + properties = { + name = { + type = "string", + minLength = 1, + }, + stream = { + type = "boolean" + }, + additionalProperties = false, + }, + required = {"name"} + } +} + + +_M.plugin_config = { + type = "object", + properties = { + -- metadata + id = id_schema, + name = { + type = "string", + }, + desc = desc_def, + labels = labels_def, + create_time = timestamp_def, + update_time = timestamp_def, + + -- properties + plugins = plugins_schema, + }, + required = {"id", "plugins"}, + additionalProperties = false, +} + + +_M.consumer_group = { + type = "object", + properties = { + -- metadata + id = id_schema, + name = rule_name_def, + desc = desc_def, + labels = labels_def, + create_time = timestamp_def, + update_time = timestamp_def, + + -- properties + plugins = plugins_schema, + }, + required = {"id", "plugins"}, + additionalProperties = false, +} + + +_M.id_schema = id_schema + + +_M.plugin_injected_schema = { + ["$comment"] = "this is a mark for our injected plugin schema", + _meta = { + type = "object", + properties = { + disable = { + type = "boolean", + }, + error_response = { + oneOf = { + { type = "string" }, + { type = "object" }, + } + }, + priority = { + description = "priority of plugins by customized order", + type = "integer", + }, + filter = { + description = "filter determines whether the plugin ".. + "needs to be executed at runtime", + type = "array", + }, + pre_function = { + description = "function to be executed in each phase " .. + "before execution of plugins. The pre_function will have access " .. + "to two arguments: `conf` and `ctx`.", + type = "string", + }, + }, + additionalProperties = false, + } +} + + +setmetatable(_M, { + __index = schema, + __newindex = function() error("no modification allowed") end, +}) + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/script.lua b/CloudronPackages/APISIX/apisix-source/apisix/script.lua new file mode 100644 index 0000000..49f13a9 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/script.lua @@ -0,0 +1,59 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local loadstring = loadstring +local error = error + + +local _M = {} + + +function _M.load(route, api_ctx) + local script = route.value.script + if script == nil or script == "" then + error("missing valid script") + end + + local loadfun, err = loadstring(script, "route#" .. route.value.id) + if not loadfun then + error("failed to load script: " .. err .. " script: " .. script) + return nil + end + api_ctx.script_obj = loadfun() +end + + +function _M.run(phase, api_ctx) + local obj = api_ctx and api_ctx.script_obj + if not obj then + core.log.error("missing loaded script object") + return api_ctx + end + + core.log.info("loaded script_obj: ", core.json.delay_encode(obj, true)) + + local phase_func = obj[phase] + if phase_func then + phase_func(api_ctx) + end + + return api_ctx +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/secret.lua b/CloudronPackages/APISIX/apisix-source/apisix/secret.lua new file mode 100644 index 0000000..60e575b --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/secret.lua @@ -0,0 +1,227 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local require = require +local core = require("apisix.core") +local string = require("apisix.core.string") + +local find = string.find +local sub = string.sub +local upper = string.upper +local byte = string.byte +local type = type +local pcall = pcall +local pairs = pairs + +local _M = {} + + +local PREFIX = "$secret://" +local secrets + +local function check_secret(conf) + local idx = find(conf.id or "", "/") + if not idx then + return false, "no secret id" + end + local manager = sub(conf.id, 1, idx - 1) + + local ok, secret_manager = pcall(require, "apisix.secret." .. manager) + if not ok then + return false, "secret manager not exits, manager: " .. manager + end + + return core.schema.check(secret_manager.schema, conf) +end + + + local function secret_kv(manager, confid) + local secret_values + secret_values = core.config.fetch_created_obj("/secrets") + if not secret_values or not secret_values.values then + return nil + end + + local secret = secret_values:get(manager .. "/" .. confid) + if not secret then + return nil + end + + return secret.value +end + + +function _M.secrets() + if not secrets then + return nil, nil + end + + return secrets.values, secrets.conf_version +end + + +function _M.init_worker() + local cfg = { + automatic = true, + checker = check_secret, + } + + secrets = core.config.new("/secrets", cfg) +end + + +local function check_secret_uri(secret_uri) + -- Avoid the error caused by has_prefix to cause a crash. + if type(secret_uri) ~= "string" then + return false, "error secret_uri type: " .. type(secret_uri) + end + + if not string.has_prefix(secret_uri, PREFIX) and + not string.has_prefix(upper(secret_uri), core.env.PREFIX) then + return false, "error secret_uri prefix: " .. secret_uri + end + + return true +end + +_M.check_secret_uri = check_secret_uri + + +local function parse_secret_uri(secret_uri) + local is_secret_uri, err = check_secret_uri(secret_uri) + if not is_secret_uri then + return is_secret_uri, err + end + + local path = sub(secret_uri, #PREFIX + 1) + local idx1 = find(path, "/") + if not idx1 then + return nil, "error format: no secret manager" + end + local manager = sub(path, 1, idx1 - 1) + + local idx2 = find(path, "/", idx1 + 1) + if not idx2 then + return nil, "error format: no secret conf id" + end + local confid = sub(path, idx1 + 1, idx2 - 1) + + local key = sub(path, idx2 + 1) + if key == "" then + return nil, "error format: no secret key id" + end + + local opts = { + manager = manager, + confid = confid, + key = key + } + return opts +end + + +local function fetch_by_uri(secret_uri) + core.log.info("fetching data from secret uri: ", secret_uri) + local opts, err = parse_secret_uri(secret_uri) + if not opts then + return nil, err + end + + local conf = secret_kv(opts.manager, opts.confid) + if not conf then + return nil, "no secret conf, secret_uri: " .. secret_uri + end + + local ok, sm = pcall(require, "apisix.secret." .. opts.manager) + if not ok then + return nil, "no secret manager: " .. opts.manager + end + + local value, err = sm.get(conf, opts.key) + if err then + return nil, err + end + + return value +end + +-- for test +_M.fetch_by_uri = fetch_by_uri + + +local function fetch(uri) + -- do a quick filter to improve retrieval speed + if byte(uri, 1, 1) ~= byte('$') then + return nil + end + + local val, err + if string.has_prefix(upper(uri), core.env.PREFIX) then + val, err = core.env.fetch_by_uri(uri) + elseif string.has_prefix(uri, PREFIX) then + val, err = fetch_by_uri(uri) + end + + if err then + core.log.error("failed to fetch secret value: ", err) + return + end + + return val +end + + +local secrets_lrucache = core.lrucache.new({ + ttl = 300, count = 512 +}) + +local fetch_secrets +do + local retrieve_refs + function retrieve_refs(refs) + for k, v in pairs(refs) do + local typ = type(v) + if typ == "string" then + refs[k] = fetch(v) or v + elseif typ == "table" then + retrieve_refs(v) + end + end + return refs + end + + local function retrieve(refs) + core.log.info("retrieve secrets refs") + + local new_refs = core.table.deepcopy(refs) + return retrieve_refs(new_refs) + end + + function fetch_secrets(refs, cache, key, version) + if not refs or type(refs) ~= "table" then + return nil + end + if not cache then + return retrieve(refs) + end + return secrets_lrucache(key, version, retrieve, refs) + end +end + +_M.fetch_secrets = fetch_secrets + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/secret/aws.lua b/CloudronPackages/APISIX/apisix-source/apisix/secret/aws.lua new file mode 100644 index 0000000..af2e045 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/secret/aws.lua @@ -0,0 +1,140 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- AWS Tools. +require("resty.aws.config") -- to read env vars before initing aws module + +local core = require("apisix.core") +local http = require("resty.http") +local aws = require("resty.aws") +local aws_instance + +local sub = core.string.sub +local find = core.string.find +local env = core.env +local unpack = unpack + +local schema = { + type = "object", + properties = { + access_key_id = { + type = "string", + }, + secret_access_key = { + type = "string", + }, + session_token = { + type = "string", + }, + region = { + type = "string", + default = "us-east-1", + }, + endpoint_url = core.schema.uri_def, + }, + required = {"access_key_id", "secret_access_key"}, +} + +local _M = { + schema = schema +} + +local function make_request_to_aws(conf, key) + if not aws_instance then + aws_instance = aws() + end + + local region = conf.region + + local access_key_id = env.fetch_by_uri(conf.access_key_id) or conf.access_key_id + + local secret_access_key = env.fetch_by_uri(conf.secret_access_key) or conf.secret_access_key + + local session_token = env.fetch_by_uri(conf.session_token) or conf.session_token + + local credentials = aws_instance:Credentials({ + accessKeyId = access_key_id, + secretAccessKey = secret_access_key, + sessionToken = session_token, + }) + + local default_endpoint = "https://secretsmanager." .. region .. ".amazonaws.com" + local scheme, host, port, _, _ = unpack(http:parse_uri(conf.endpoint_url or default_endpoint)) + local endpoint = scheme .. "://" .. host + + local sm = aws_instance:SecretsManager({ + credentials = credentials, + endpoint = endpoint, + region = region, + port = port, + }) + + local res, err = sm:getSecretValue({ + SecretId = key, + VersionStage = "AWSCURRENT", + }) + + if not res then + return nil, err + end + + if res.status ~= 200 then + local data = core.json.encode(res.body) + if data then + return nil, "invalid status code " .. res.status .. ", " .. data + end + + return nil, "invalid status code " .. res.status + end + + return res.body.SecretString +end + +-- key is the aws secretId +function _M.get(conf, key) + core.log.info("fetching data from aws for key: ", key) + + local idx = find(key, '/') + + local main_key = idx and sub(key, 1, idx - 1) or key + if main_key == "" then + return nil, "can't find main key, key: " .. key + end + + local sub_key = idx and sub(key, idx + 1) or nil + + core.log.info("main: ", main_key, sub_key and ", sub: " .. sub_key or "") + + local res, err = make_request_to_aws(conf, main_key) + if not res then + return nil, "failed to retrtive data from aws secret manager: " .. err + end + + if not sub_key then + return res + end + + local data, err = core.json.decode(res) + if not data then + return nil, "failed to decode result, res: " .. res .. ", err: " .. err + end + + return data[sub_key] +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/secret/gcp.lua b/CloudronPackages/APISIX/apisix-source/apisix/secret/gcp.lua new file mode 100644 index 0000000..6b6e661 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/secret/gcp.lua @@ -0,0 +1,202 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- GCP Tools. +local core = require("apisix.core") +local http = require("resty.http") +local google_oauth = require("apisix.utils.google-cloud-oauth") + +local str_sub = core.string.sub +local str_find = core.string.find +local decode_base64 = ngx.decode_base64 + +local lrucache = core.lrucache.new({ ttl = 300, count = 8 }) + +local schema = { + type = "object", + properties = { + auth_config = { + type = "object", + properties = { + client_email = { type = "string" }, + private_key = { type = "string" }, + project_id = { type = "string" }, + token_uri = { + type = "string", + default = "https://oauth2.googleapis.com/token" + }, + scope = { + type = "array", + items = { + type = "string" + }, + default = { + "https://www.googleapis.com/auth/cloud-platform" + } + }, + entries_uri = { + type = "string", + default = "https://secretmanager.googleapis.com/v1" + }, + }, + required = { "client_email", "private_key", "project_id" } + }, + ssl_verify = { + type = "boolean", + default = true + }, + auth_file = { type = "string" }, + }, + oneOf = { + { required = { "auth_config" } }, + { required = { "auth_file" } }, + }, +} + +local _M = { + schema = schema +} + +local function fetch_oauth_conf(conf) + if conf.auth_config then + return conf.auth_config + end + + local file_content, err = core.io.get_file(conf.auth_file) + if not file_content then + return nil, "failed to read configuration, file: " .. conf.auth_file .. ", err: " .. err + end + + local config_tab, err = core.json.decode(file_content) + if not config_tab then + return nil, "config parse failure, data: " .. file_content .. ", err: " .. err + end + + local config = { + auth_config = { + client_email = config_tab.client_email, + private_key = config_tab.private_key, + project_id = config_tab.project_id + } + } + + local ok, err = core.schema.check(schema, config) + if not ok then + return nil, "config parse failure, file: " .. conf.auth_file .. ", err: " .. err + end + + return config_tab +end + + +local function get_secret(oauth, secrets_id) + local httpc = http.new() + + local access_token = oauth:generate_access_token() + if not access_token then + return nil, "failed to get google oauth token" + end + + local entries_uri = oauth.entries_uri .. "/projects/" .. oauth.project_id + .. "/secrets/" .. secrets_id .. "/versions/latest:access" + + local res, err = httpc:request_uri(entries_uri, { + ssl_verify = oauth.ssl_verify, + method = "GET", + headers = { + ["Content-Type"] = "application/json", + ["Authorization"] = (oauth.access_token_type or "Bearer") .. " " .. access_token, + }, + }) + + if not res then + return nil, err + end + + if res.status ~= 200 then + return nil, res.body + end + + local body, err = core.json.decode(res.body) + if not body then + return nil, "failed to parse response data, " .. err + end + + local payload = body.payload + if not payload then + return nil, "invalid payload" + end + + return decode_base64(payload.data) +end + + +local function make_request_to_gcp(conf, secrets_id) + local auth_config, err = fetch_oauth_conf(conf) + if not auth_config then + return nil, err + end + + local lru_key = auth_config.client_email .. "#" .. auth_config.project_id + + local oauth, err = lrucache(lru_key, "gcp", google_oauth.new, auth_config, conf.ssl_verify) + if not oauth then + return nil, "failed to create oauth object, " .. err + end + + local secret, err = get_secret(oauth, secrets_id) + if not secret then + return nil, err + end + + return secret +end + + +function _M.get(conf, key) + core.log.info("fetching data from gcp for key: ", key) + + local idx = str_find(key, '/') + + local main_key = idx and str_sub(key, 1, idx - 1) or key + if main_key == "" then + return nil, "can't find main key, key: " .. key + end + + local sub_key = idx and str_sub(key, idx + 1) + + core.log.info("main: ", main_key, sub_key and ", sub: " .. sub_key or "") + + local res, err = make_request_to_gcp(conf, main_key) + if not res then + return nil, "failed to retrtive data from gcp secret manager: " .. err + end + + if not sub_key then + return res + end + + local data, err = core.json.decode(res) + if not data then + return nil, "failed to decode result, err: " .. err + end + + return data[sub_key] +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/secret/vault.lua b/CloudronPackages/APISIX/apisix-source/apisix/secret/vault.lua new file mode 100644 index 0000000..40b5d40 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/secret/vault.lua @@ -0,0 +1,122 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Vault Tools. +-- Vault is an identity-based secrets and encryption management system. + +local core = require("apisix.core") +local http = require("resty.http") + +local norm_path = require("pl.path").normpath + +local sub = core.string.sub +local rfind_char = core.string.rfind_char +local env = core.env + +local schema = { + type = "object", + properties = { + uri = core.schema.uri_def, + prefix = { + type = "string", + }, + token = { + type = "string", + }, + namespace = { + type = "string", + }, + }, + required = {"uri", "prefix", "token"}, +} + +local _M = { + schema = schema +} + +local function make_request_to_vault(conf, method, key, data) + local httpc = http.new() + -- config timeout or default to 5000 ms + httpc:set_timeout((conf.timeout or 5)*1000) + + local req_addr = conf.uri .. norm_path("/v1/" + .. conf.prefix .. "/" .. key) + + local token, _ = env.fetch_by_uri(conf.token) + if not token then + token = conf.token + end + + local headers = { + ["X-Vault-Token"] = token + } + if conf.namespace then + -- The namespace rule is referenced in + -- https://developer.hashicorp.com/vault/docs/enterprise/namespaces#vault-api-and-namespaces + headers["X-Vault-Namespace"] = conf.namespace + end + + local res, err = httpc:request_uri(req_addr, { + method = method, + headers = headers, + body = core.json.encode(data or {}, true) + }) + + if not res then + return nil, err + end + + return res.body +end + +-- key is the vault kv engine path +local function get(conf, key) + core.log.info("fetching data from vault for key: ", key) + + local idx = rfind_char(key, '/') + if not idx then + return nil, "error key format, key: " .. key + end + + local main_key = sub(key, 1, idx - 1) + if main_key == "" then + return nil, "can't find main key, key: " .. key + end + local sub_key = sub(key, idx + 1) + if sub_key == "" then + return nil, "can't find sub key, key: " .. key + end + + core.log.info("main: ", main_key, " sub: ", sub_key) + + local res, err = make_request_to_vault(conf, "GET", main_key) + if not res then + return nil, "failed to retrtive data from vault kv engine: " .. err + end + + local ret = core.json.decode(res) + if not ret or not ret.data then + return nil, "failed to decode result, res: " .. res + end + + return ret.data[sub_key] +end + +_M.get = get + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/ssl.lua b/CloudronPackages/APISIX/apisix-source/apisix/ssl.lua new file mode 100644 index 0000000..2bd7570 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/ssl.lua @@ -0,0 +1,342 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local secret = require("apisix.secret") +local ngx_ssl = require("ngx.ssl") +local ngx_ssl_client = require("ngx.ssl.clienthello") +local ffi = require("ffi") + +local C = ffi.C +local ngx_encode_base64 = ngx.encode_base64 +local ngx_decode_base64 = ngx.decode_base64 +local aes = require("resty.aes") +local str_lower = string.lower +local str_byte = string.byte +local assert = assert +local type = type +local ipairs = ipairs +local ngx_sub = ngx.re.sub + +ffi.cdef[[ +unsigned long ERR_peek_error(void); +void ERR_clear_error(void); +]] + +local cert_cache = core.lrucache.new { + ttl = 3600, count = 1024, +} + +local pkey_cache = core.lrucache.new { + ttl = 3600, count = 1024, +} + + +local _M = {} + + +function _M.server_name(clienthello) + local sni, err + if clienthello then + sni, err = ngx_ssl_client.get_client_hello_server_name() + else + sni, err = ngx_ssl.server_name() + end + if err then + return nil, err + end + + if not sni then + local local_conf = core.config.local_conf() + sni = core.table.try_read_attr(local_conf, "apisix", "ssl", "fallback_sni") + if not sni then + return nil + end + end + + sni = ngx_sub(sni, "\\.$", "", "jo") + sni = str_lower(sni) + return sni +end + + +function _M.session_hostname() + return ngx_ssl.session_hostname() +end + + +function _M.set_protocols_by_clienthello(ssl_protocols) + if ssl_protocols then + return ngx_ssl_client.set_protocols(ssl_protocols) + end + return true +end + + +local function init_iv_tbl(ivs) + local _aes_128_cbc_with_iv_tbl = core.table.new(2, 0) + local type_ivs = type(ivs) + + if type_ivs == "table" then + for _, iv in ipairs(ivs) do + local aes_with_iv = assert(aes:new(iv, nil, aes.cipher(128, "cbc"), {iv = iv})) + core.table.insert(_aes_128_cbc_with_iv_tbl, aes_with_iv) + end + elseif type_ivs == "string" then + local aes_with_iv = assert(aes:new(ivs, nil, aes.cipher(128, "cbc"), {iv = ivs})) + core.table.insert(_aes_128_cbc_with_iv_tbl, aes_with_iv) + end + + return _aes_128_cbc_with_iv_tbl +end + + +local _aes_128_cbc_with_iv_tbl_gde +local function get_aes_128_cbc_with_iv_gde(local_conf) + if _aes_128_cbc_with_iv_tbl_gde == nil then + local ivs = core.table.try_read_attr(local_conf, "apisix", "data_encryption", "keyring") + _aes_128_cbc_with_iv_tbl_gde = init_iv_tbl(ivs) + end + + return _aes_128_cbc_with_iv_tbl_gde +end + + + +local function encrypt(aes_128_cbc_with_iv, origin) + local encrypted = aes_128_cbc_with_iv:encrypt(origin) + if encrypted == nil then + core.log.error("failed to encrypt key[", origin, "] ") + return origin + end + + return ngx_encode_base64(encrypted) +end + +function _M.aes_encrypt_pkey(origin, field) + local local_conf = core.config.local_conf() + local aes_128_cbc_with_iv_tbl_gde = get_aes_128_cbc_with_iv_gde(local_conf) + local aes_128_cbc_with_iv_gde = aes_128_cbc_with_iv_tbl_gde[1] + + if not field then + if aes_128_cbc_with_iv_gde ~= nil and core.string.has_prefix(origin, "---") then + return encrypt(aes_128_cbc_with_iv_gde, origin) + end + else + if field == "data_encrypt" then + if aes_128_cbc_with_iv_gde ~= nil then + return encrypt(aes_128_cbc_with_iv_gde, origin) + end + end + end + return origin +end + + +local function aes_decrypt_pkey(origin, field) + if not field and core.string.has_prefix(origin, "---") then + return origin + end + + local local_conf = core.config.local_conf() + local aes_128_cbc_with_iv_tbl = get_aes_128_cbc_with_iv_gde(local_conf) + if #aes_128_cbc_with_iv_tbl == 0 then + return origin + end + + local decoded_key = ngx_decode_base64(origin) + if not decoded_key then + core.log.error("base64 decode ssl key failed. key[", origin, "] ") + return nil + end + + for _, aes_128_cbc_with_iv in ipairs(aes_128_cbc_with_iv_tbl) do + local decrypted = aes_128_cbc_with_iv:decrypt(decoded_key) + if decrypted then + return decrypted + end + + if C.ERR_peek_error() then + -- clean up the error queue of OpenSSL to prevent + -- normal requests from being interfered with. + C.ERR_clear_error() + end + end + + return nil, "decrypt ssl key failed" +end +_M.aes_decrypt_pkey = aes_decrypt_pkey + + +local function validate(cert, key) + local parsed_cert, err = ngx_ssl.parse_pem_cert(cert) + if not parsed_cert then + return nil, "failed to parse cert: " .. err + end + + if key == nil then + -- sometimes we only need to validate the cert + return true + end + + local err + key, err = aes_decrypt_pkey(key) + if not key then + core.log.error(err) + return nil, "failed to decrypt previous encrypted key" + end + + local parsed_key, err = ngx_ssl.parse_pem_priv_key(key) + if not parsed_key then + return nil, "failed to parse key: " .. err + end + + -- TODO: check if key & cert match + return true +end +_M.validate = validate + + +local function parse_pem_cert(sni, cert) + core.log.debug("parsing cert for sni: ", sni) + + local parsed, err = ngx_ssl.parse_pem_cert(cert) + return parsed, err +end + + +function _M.fetch_cert(sni, cert) + local parsed_cert, err = cert_cache(cert, nil, parse_pem_cert, sni, cert) + if not parsed_cert then + return false, err + end + + return parsed_cert +end + + +local function parse_pem_priv_key(sni, pkey) + core.log.debug("parsing priv key for sni: ", sni) + + local key, err = aes_decrypt_pkey(pkey) + if not key then + core.log.error(err) + return nil, err + end + local parsed, err = ngx_ssl.parse_pem_priv_key(key) + return parsed, err +end + + +function _M.fetch_pkey(sni, pkey) + local parsed_pkey, err = pkey_cache(pkey, nil, parse_pem_priv_key, sni, pkey) + if not parsed_pkey then + return false, err + end + + return parsed_pkey +end + + +local function support_client_verification() + return ngx_ssl.verify_client ~= nil +end +_M.support_client_verification = support_client_verification + + +function _M.check_ssl_conf(in_dp, conf) + if not in_dp then + local ok, err = core.schema.check(core.schema.ssl, conf) + if not ok then + return nil, "invalid configuration: " .. err + end + end + + if not secret.check_secret_uri(conf.cert) and + not secret.check_secret_uri(conf.key) then + + local ok, err = validate(conf.cert, conf.key) + if not ok then + return nil, err + end + end + + if conf.type == "client" then + return true + end + + local numcerts = conf.certs and #conf.certs or 0 + local numkeys = conf.keys and #conf.keys or 0 + if numcerts ~= numkeys then + return nil, "mismatched number of certs and keys" + end + + for i = 1, numcerts do + if not secret.check_secret_uri(conf.certs[i]) and + not secret.check_secret_uri(conf.keys[i]) then + + local ok, err = validate(conf.certs[i], conf.keys[i]) + if not ok then + return nil, "failed to handle cert-key pair[" .. i .. "]: " .. err + end + end + end + + if conf.client then + if not support_client_verification() then + return nil, "client tls verify unsupported" + end + + local ok, err = validate(conf.client.ca, nil) + if not ok then + return nil, "failed to validate client_cert: " .. err + end + end + + return true +end + + +function _M.get_status_request_ext() + core.log.debug("parsing status request extension ... ") + local ext = ngx_ssl_client.get_client_hello_ext(5) + if not ext then + core.log.debug("no contains status request extension") + return false + end + local total_len = #ext + -- 1-byte for CertificateStatusType + -- 2-byte for zero-length "responder_id_list" + -- 2-byte for zero-length "request_extensions" + if total_len < 5 then + core.log.error("bad ssl client hello extension: ", + "extension data error") + return false + end + + -- CertificateStatusType + local status_type = str_byte(ext, 1) + if status_type == 1 then + core.log.debug("parsing status request extension ok: ", + "status_type is ocsp(1)") + return true + end + + return false +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/ssl/router/radixtree_sni.lua b/CloudronPackages/APISIX/apisix-source/apisix/ssl/router/radixtree_sni.lua new file mode 100644 index 0000000..ae7e5b2 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/ssl/router/radixtree_sni.lua @@ -0,0 +1,332 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local get_request = require("resty.core.base").get_request +local router_new = require("apisix.utils.router").new +local core = require("apisix.core") +local apisix_ssl = require("apisix.ssl") +local secret = require("apisix.secret") +local ngx_ssl = require("ngx.ssl") +local config_util = require("apisix.core.config_util") +local ngx = ngx +local ipairs = ipairs +local type = type +local error = error +local str_find = core.string.find +local str_gsub = string.gsub +local str_lower = string.lower +local tostring = tostring +local ssl_certificates +local radixtree_router +local radixtree_router_ver + + +local _M = { + version = 0.1, + server_name = ngx_ssl.server_name, +} + + +local function create_router(ssl_items) + local ssl_items = ssl_items or {} + + local route_items = core.table.new(#ssl_items, 0) + local idx = 0 + + for _, ssl in config_util.iterate_values(ssl_items) do + if ssl.value ~= nil and ssl.value.type == "server" and + (ssl.value.status == nil or ssl.value.status == 1) then -- compatible with old version + + local j = 0 + local sni + if type(ssl.value.snis) == "table" and #ssl.value.snis > 0 then + sni = core.table.new(0, #ssl.value.snis) + for _, s in ipairs(ssl.value.snis) do + j = j + 1 + sni[j] = s:reverse() + end + else + sni = ssl.value.sni:reverse() + end + + idx = idx + 1 + route_items[idx] = { + paths = sni, + handler = function (api_ctx) + if not api_ctx then + return + end + api_ctx.matched_ssl = ssl + api_ctx.matched_sni = sni + end + } + end + end + + core.log.info("route items: ", core.json.delay_encode(route_items, true)) + -- for testing + if idx > 1 then + core.log.info("we have more than 1 ssl certs now") + end + local router, err = router_new(route_items) + if not router then + return nil, err + end + + return router +end + + +local function set_pem_ssl_key(sni, cert, pkey) + local r = get_request() + if r == nil then + return false, "no request found" + end + + local parsed_cert, err = apisix_ssl.fetch_cert(sni, cert) + if not parsed_cert then + return false, "failed to parse PEM cert: " .. err + end + + local ok, err = ngx_ssl.set_cert(parsed_cert) + if not ok then + return false, "failed to set PEM cert: " .. err + end + + local parsed_pkey, err = apisix_ssl.fetch_pkey(sni, pkey) + if not parsed_pkey then + return false, "failed to parse PEM priv key: " .. err + end + + ok, err = ngx_ssl.set_priv_key(parsed_pkey) + if not ok then + return false, "failed to set PEM priv key: " .. err + end + + return true +end +_M.set_pem_ssl_key = set_pem_ssl_key + + +-- export the set cert/key process so we can hook it in the other plugins +function _M.set_cert_and_key(sni, value) + local ok, err = set_pem_ssl_key(sni, value.cert, value.key) + if not ok then + return false, err + end + + -- multiple certificates support. + if value.certs then + for i = 1, #value.certs do + local cert = value.certs[i] + local key = value.keys[i] + + ok, err = set_pem_ssl_key(sni, cert, key) + if not ok then + return false, err + end + end + end + + return true +end + + +function _M.match_and_set(api_ctx, match_only, alt_sni) + local err + if not radixtree_router or + radixtree_router_ver ~= ssl_certificates.conf_version then + radixtree_router, err = create_router(ssl_certificates.values) + if not radixtree_router then + return false, "failed to create radixtree router: " .. err + end + radixtree_router_ver = ssl_certificates.conf_version + end + + local sni = alt_sni + if not sni then + sni, err = apisix_ssl.server_name() + if type(sni) ~= "string" then + local advise = "please check if the client requests via IP or uses an outdated " .. + "protocol. If you need to report an issue, " .. + "provide a packet capture file of the TLS handshake." + return false, "failed to find SNI: " .. (err or advise) + end + end + + core.log.debug("sni: ", sni) + + local sni_rev = sni:reverse() + local ok = radixtree_router:dispatch(sni_rev, nil, api_ctx) + if not ok then + if not alt_sni then + -- it is expected that alternative SNI doesn't have a SSL certificate associated + -- with it sometimes + core.log.error("failed to find any SSL certificate by SNI: ", sni) + end + return false + end + + + if type(api_ctx.matched_sni) == "table" then + local matched = false + for _, msni in ipairs(api_ctx.matched_sni) do + if sni_rev == msni or not str_find(sni_rev, ".", #msni) then + matched = true + break + end + end + if not matched then + local log_snis = core.json.encode(api_ctx.matched_sni, true) + if log_snis ~= nil then + log_snis = str_gsub(log_snis:reverse(), "%[", "%]") + log_snis = str_gsub(log_snis, "%]", "%[", 1) + end + core.log.warn("failed to find any SSL certificate by SNI: ", + sni, " matched SNIs: ", log_snis) + return false + end + else + if str_find(sni_rev, ".", #api_ctx.matched_sni) then + core.log.warn("failed to find any SSL certificate by SNI: ", + sni, " matched SNI: ", api_ctx.matched_sni:reverse()) + return false + end + end + + core.log.info("debug - matched: ", core.json.delay_encode(api_ctx.matched_ssl, true)) + + if match_only then + return true + end + + ok, err = _M.set(api_ctx.matched_ssl, sni) + if not ok then + return false, err + end + + return true +end + + +function _M.set(matched_ssl, sni) + if not matched_ssl then + return false, "failed to match ssl certificate" + end + local ok, err + if not sni then + sni, err = apisix_ssl.server_name() + if type(sni) ~= "string" then + local advise = "please check if the client requests via IP or uses an outdated " .. + "protocol. If you need to report an issue, " .. + "provide a packet capture file of the TLS handshake." + return false, "failed to find SNI: " .. (err or advise) + end + end + ngx_ssl.clear_certs() + + local new_ssl_value = secret.fetch_secrets(matched_ssl.value, true, matched_ssl.value, "") + or matched_ssl.value + + ok, err = _M.set_cert_and_key(sni, new_ssl_value) + if not ok then + return false, err + end + + if matched_ssl.value.client then + local ca_cert = matched_ssl.value.client.ca + local depth = matched_ssl.value.client.depth + if apisix_ssl.support_client_verification() then + local parsed_cert, err = apisix_ssl.fetch_cert(sni, ca_cert) + if not parsed_cert then + return false, "failed to parse client cert: " .. err + end + + local reject_in_handshake = + (ngx.config.subsystem == "stream") or + (matched_ssl.value.client.skip_mtls_uri_regex == nil) + -- TODO: support passing `trusted_certs` (3rd arg, keep it nil for now) + local ok, err = ngx_ssl.verify_client(parsed_cert, depth, nil, + reject_in_handshake) + if not ok then + return false, err + end + end + end + + return true +end + + +function _M.ssls() + if not ssl_certificates then + return nil, nil + end + + return ssl_certificates.values, ssl_certificates.conf_version +end + + +local function ssl_filter(ssl) + if not ssl.value then + return + end + + if ssl.value.sni then + ssl.value.sni = ngx.re.sub(ssl.value.sni, "\\.$", "", "jo") + ssl.value.sni = str_lower(ssl.value.sni) + elseif ssl.value.snis then + for i, v in ipairs(ssl.value.snis) do + v = ngx.re.sub(v, "\\.$", "", "jo") + ssl.value.snis[i] = str_lower(v) + end + end +end + + +function _M.init_worker() + local err + ssl_certificates, err = core.config.new("/ssls", { + automatic = true, + item_schema = core.schema.ssl, + checker = function (item, schema_type) + return apisix_ssl.check_ssl_conf(true, item) + end, + filter = ssl_filter, + }) + if not ssl_certificates then + error("failed to create etcd instance for fetching ssl certificates: " + .. err) + end +end + + +function _M.get_by_id(ssl_id) + local ssl + local ssls = core.config.fetch_created_obj("/ssls") + if ssls then + ssl = ssls:get(tostring(ssl_id)) + end + + if not ssl then + return nil + end + + return ssl.value +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/ip-restriction.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/ip-restriction.lua new file mode 100644 index 0000000..66c6c11 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/ip-restriction.lua @@ -0,0 +1,26 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local base = require("apisix.plugins.ip-restriction.init") + + +-- avoid unexpected data sharing +local ip_restriction = core.table.clone(base) +ip_restriction.preread = base.restrict + + +return ip_restriction diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/limit-conn.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/limit-conn.lua new file mode 100644 index 0000000..1beb7c7 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/limit-conn.lua @@ -0,0 +1,61 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local limit_conn = require("apisix.plugins.limit-conn.init") + + +local plugin_name = "limit-conn" +local schema = { + type = "object", + properties = { + conn = {type = "integer", exclusiveMinimum = 0}, + burst = {type = "integer", minimum = 0}, + default_conn_delay = {type = "number", exclusiveMinimum = 0}, + only_use_default_delay = {type = "boolean", default = false}, + key = {type = "string"}, + key_type = {type = "string", + enum = {"var", "var_combination"}, + default = "var", + }, + }, + required = {"conn", "burst", "default_conn_delay", "key"} +} + +local _M = { + version = 0.1, + priority = 1003, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +function _M.preread(conf, ctx) + return limit_conn.increase(conf, ctx) +end + + +function _M.log(conf, ctx) + return limit_conn.decrease(conf, ctx) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/mqtt-proxy.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/mqtt-proxy.lua new file mode 100644 index 0000000..f075e20 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/mqtt-proxy.lua @@ -0,0 +1,186 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local bit = require("bit") +local ngx = ngx +local str_byte = string.byte +local str_sub = string.sub + + +core.ctx.register_var("mqtt_client_id", function(ctx) + return ctx.mqtt_client_id +end) + + +local schema = { + type = "object", + properties = { + protocol_name = {type = "string"}, + protocol_level = {type = "integer"} + }, + required = {"protocol_name", "protocol_level"}, +} + + +local plugin_name = "mqtt-proxy" + + +local _M = { + version = 0.1, + priority = 1000, + name = plugin_name, + schema = schema, +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +local function decode_variable_byte_int(data, offset) + local multiplier = 1 + local len = 0 + local pos + for i = offset, offset + 3 do + pos = i + local byte = str_byte(data, i, i) + len = len + bit.band(byte, 127) * multiplier + multiplier = multiplier * 128 + if bit.band(byte, 128) == 0 then + break + end + end + + return len, pos +end + + +local function parse_msg_hdr(data) + local packet_type_flags_byte = str_byte(data, 1, 1) + if packet_type_flags_byte < 16 or packet_type_flags_byte > 32 then + return nil, nil, + "Received unexpected MQTT packet type+flags: " .. packet_type_flags_byte + end + + local len, pos = decode_variable_byte_int(data, 2) + return len, pos +end + + +local function parse_mqtt(data, parsed_pos) + local res = {} + + local protocol_len = str_byte(data, parsed_pos + 1, parsed_pos + 1) * 256 + + str_byte(data, parsed_pos + 2, parsed_pos + 2) + parsed_pos = parsed_pos + 2 + res.protocol = str_sub(data, parsed_pos + 1, parsed_pos + protocol_len) + parsed_pos = parsed_pos + protocol_len + + res.protocol_ver = str_byte(data, parsed_pos + 1, parsed_pos + 1) + parsed_pos = parsed_pos + 1 + + -- skip control flags & keepalive + parsed_pos = parsed_pos + 3 + + if res.protocol_ver == 5 then + -- skip properties + local property_len + property_len, parsed_pos = decode_variable_byte_int(data, parsed_pos + 1) + parsed_pos = parsed_pos + property_len + end + + local client_id_len = str_byte(data, parsed_pos + 1, parsed_pos + 1) * 256 + + str_byte(data, parsed_pos + 2, parsed_pos + 2) + parsed_pos = parsed_pos + 2 + + if parsed_pos + client_id_len > #data then + res.expect_len = parsed_pos + client_id_len + return res + end + + if client_id_len == 0 then + -- A Server MAY allow a Client to supply a ClientID that has a length of zero bytes + res.client_id = "" + else + res.client_id = str_sub(data, parsed_pos + 1, parsed_pos + client_id_len) + end + + parsed_pos = parsed_pos + client_id_len + + res.expect_len = parsed_pos + return res +end + + +function _M.preread(conf, ctx) + local sock = ngx.req.socket() + -- the header format of MQTT CONNECT can be found in + -- https://docs.oasis-open.org/mqtt/mqtt/v5.0/os/mqtt-v5.0-os.html#_Toc3901033 + local data, err = sock:peek(5) + if not data then + core.log.error("failed to read the msg header: ", err) + return 503 + end + + local remain_len, pos, err = parse_msg_hdr(data) + if not remain_len then + core.log.error("failed to parse the msg header: ", err) + return 503 + end + + local data, err = sock:peek(pos + remain_len) + if not data then + core.log.error("failed to read the Connect Command: ", err) + return 503 + end + + local res = parse_mqtt(data, pos) + if res.expect_len > #data then + core.log.error("failed to parse mqtt request, expect len: ", + res.expect_len, " but got ", #data) + return 503 + end + + if res.protocol and res.protocol ~= conf.protocol_name then + core.log.error("expect protocol name: ", conf.protocol_name, + ", but got ", res.protocol) + return 503 + end + + if res.protocol_ver and res.protocol_ver ~= conf.protocol_level then + core.log.error("expect protocol level: ", conf.protocol_level, + ", but got ", res.protocol_ver) + return 503 + end + + core.log.info("mqtt client id: ", res.client_id) + + -- when client id is missing, fallback to balance by client IP + if res.client_id ~= "" then + ctx.mqtt_client_id = res.client_id + end + return +end + + +function _M.log(conf, ctx) + core.log.info("plugin log phase, conf: ", core.json.encode(conf)) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/prometheus.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/prometheus.lua new file mode 100644 index 0000000..46222ec --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/prometheus.lua @@ -0,0 +1,48 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local exporter = require("apisix.plugins.prometheus.exporter") + + +local plugin_name = "prometheus" +local schema = { + type = "object", + properties = { + prefer_name = { + type = "boolean", + default = false -- stream route doesn't have name yet + } + }, +} + + +local _M = { + version = 0.1, + priority = 500, + name = plugin_name, + log = exporter.stream_log, + schema = schema, + run_policy = "prefer_route", +} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/syslog.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/syslog.lua new file mode 100644 index 0000000..5a44ce4 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/plugins/syslog.lua @@ -0,0 +1,80 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local log_util = require("apisix.utils.log-util") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local syslog = require("apisix.plugins.syslog.init") +local plugin_name = "syslog" + +local batch_processor_manager = bp_manager_mod.new("stream sys logger") +local schema = { + type = "object", + properties = { + host = {type = "string"}, + port = {type = "integer"}, + flush_limit = {type = "integer", minimum = 1, default = 4096}, + drop_limit = {type = "integer", default = 1048576}, + timeout = {type = "integer", minimum = 1, default = 3000}, + log_format = {type = "object"}, + sock_type = {type = "string", default = "tcp", enum = {"tcp", "udp"}}, + pool_size = {type = "integer", minimum = 5, default = 5}, + tls = {type = "boolean", default = false} + }, + required = {"host", "port"} +} + +local schema = batch_processor_manager:wrap_schema(schema) + +local metadata_schema = { + type = "object", + properties = { + log_format = { + type = "object" + } + }, +} + +local _M = { + version = 0.1, + priority = 401, + name = plugin_name, + schema = schema, + metadata_schema = metadata_schema, + flush_syslog = syslog.flush_syslog, +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + return core.schema.check(schema, conf) +end + + +function _M.log(conf, ctx) + local entry = log_util.get_log_entry(plugin_name, conf, ctx) + if not entry then + return + end + + syslog.push_entry(conf, ctx, entry) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/router/ip_port.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/router/ip_port.lua new file mode 100644 index 0000000..4d502ca --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/router/ip_port.lua @@ -0,0 +1,249 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local core_ip = require("apisix.core.ip") +local config_util = require("apisix.core.config_util") +local stream_plugin_checker = require("apisix.plugin").stream_plugin_checker +local router_new = require("apisix.utils.router").new +local apisix_ssl = require("apisix.ssl") +local xrpc = require("apisix.stream.xrpc") +local error = error +local tonumber = tonumber +local ipairs = ipairs + +local user_routes +local router_ver +local tls_router +local other_routes = {} +local _M = {version = 0.1} + + + +local function match_addrs(route, vars) + -- todo: use resty-ipmatcher to support multiple ip address + if route.value.remote_addr then + local ok, _ = route.value.remote_addr_matcher:match(vars.remote_addr) + if not ok then + return false + end + end + + if route.value.server_addr then + local ok, _ = route.value.server_addr_matcher:match(vars.server_addr) + if not ok then + return false + end + end + + -- todo: use resty-ipmatcher to support multiple ip address + if route.value.server_port and + route.value.server_port ~= tonumber(vars.server_port) then + return false + end + + return true +end + + +local create_router +do + local sni_to_items = {} + local tls_routes = {} + + function create_router(items) + local tls_routes_idx = 1 + local other_routes_idx = 1 + core.table.clear(tls_routes) + core.table.clear(other_routes) + core.table.clear(sni_to_items) + + for _, item in config_util.iterate_values(items) do + if item.value == nil then + goto CONTINUE + end + + local route = item.value + if route.protocol and route.protocol.superior_id then + -- subordinate route won't be matched in the entry + -- TODO: check the subordinate relationship in the Admin API + goto CONTINUE + end + + if item.value.remote_addr then + item.value.remote_addr_matcher = core_ip.create_ip_matcher({item.value.remote_addr}) + end + if item.value.server_addr then + item.value.server_addr_matcher = core_ip.create_ip_matcher({item.value.server_addr}) + end + if not route.sni then + other_routes[other_routes_idx] = item + other_routes_idx = other_routes_idx + 1 + goto CONTINUE + end + + local sni_rev = route.sni:reverse() + local stored = sni_to_items[sni_rev] + if stored then + core.table.insert(stored, item) + goto CONTINUE + end + + sni_to_items[sni_rev] = {item} + tls_routes[tls_routes_idx] = { + paths = sni_rev, + filter_fun = function (vars, opts, ctx) + local items = sni_to_items[sni_rev] + for _, route in ipairs(items) do + local hit = match_addrs(route, vars) + if hit then + ctx.matched_route = route + return true + end + end + return false + end, + handler = function (ctx, sni_rev) + -- done in the filter_fun + end + } + tls_routes_idx = tls_routes_idx + 1 + + ::CONTINUE:: + end + + if #tls_routes > 0 then + local router, err = router_new(tls_routes) + if not router then + return err + end + + tls_router = router + end + + return nil + end +end + + +do + local match_opts = {} + + function _M.match(api_ctx) + if router_ver ~= user_routes.conf_version then + local err = create_router(user_routes.values) + if err then + return false, "failed to create router: " .. err + end + + router_ver = user_routes.conf_version + end + + local sni = apisix_ssl.server_name() + if sni and tls_router then + local sni_rev = sni:reverse() + + core.table.clear(match_opts) + match_opts.vars = api_ctx.var + + local _, err = tls_router:dispatch(sni_rev, match_opts, api_ctx) + if err then + return false, "failed to match TLS router: " .. err + end + end + + if api_ctx.matched_route then + -- unlike the matcher for the SSL, it is fine to let + -- '*.x.com' to match 'a.b.x.com' as we don't care about + -- the certificate + return true + end + + for _, route in ipairs(other_routes) do + local hit = match_addrs(route, api_ctx.var) + if hit then + api_ctx.matched_route = route + return true + end + end + + core.log.info("not hit any route") + return true + end +end + + +function _M.routes() + if not user_routes then + return nil, nil + end + + return user_routes.values, user_routes.conf_version +end + +local function stream_route_checker(item, in_cp) + if item.plugins then + local ok, message = stream_plugin_checker(item, in_cp) + if not ok then + return false, message + end + end + -- validate the address format when remote_address or server_address is not nil + if item.remote_addr then + if not core_ip.validate_cidr_or_ip(item.remote_addr) then + return false, "invalid remote_addr: " .. item.remote_addr + end + end + if item.server_addr then + if not core_ip.validate_cidr_or_ip(item.server_addr) then + return false, "invalid server_addr: " .. item.server_addr + end + end + + if item.protocol then + local prot_conf = item.protocol + if prot_conf then + local ok, message = xrpc.check_schema(prot_conf, false) + if not ok then + return false, message + end + end + end + + return true +end +_M.stream_route_checker = stream_route_checker + + +function _M.stream_init_worker(filter) + local err + user_routes, err = core.config.new("/stream_routes", { + automatic = true, + item_schema = core.schema.stream_route, + checker = function(item) + return stream_route_checker(item) + end, + filter = filter, + }) + + if not user_routes then + error("failed to create etcd instance for fetching /stream_routes : " + .. err) + end +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc.lua new file mode 100644 index 0000000..f9cfa8c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc.lua @@ -0,0 +1,121 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local metrics = require("apisix.stream.xrpc.metrics") +local ipairs = ipairs +local pairs = pairs +local ngx_exit = ngx.exit + + +local is_http = true +local runner +if ngx.config.subsystem ~= "http" then + is_http = false + runner = require("apisix.stream.xrpc.runner") +end + +local _M = {} +local registered_protocols = {} +local registered_protocol_schemas = {} + + +-- only need to load schema module when it is used in Admin API +local function register_protocol(name, is_http) + if not is_http then + registered_protocols[name] = require("apisix.stream.xrpc.protocols." .. name) + end + + registered_protocol_schemas[name] = + require("apisix.stream.xrpc.protocols." .. name .. ".schema") +end + + +function _M.init() + local local_conf = core.config.local_conf() + if not local_conf.xrpc then + return + end + + local prot_conf = local_conf.xrpc.protocols + if not prot_conf then + return + end + + if is_http and not local_conf.apisix.enable_admin then + -- we need to register xRPC protocols in HTTP only when Admin API is enabled + return + end + + for _, prot in ipairs(prot_conf) do + core.log.info("register xprc protocol ", prot.name) + register_protocol(prot.name, is_http) + end +end + + +function _M.init_metrics(collector) + local local_conf = core.config.local_conf() + if not local_conf.xrpc then + return + end + + local prot_conf = local_conf.xrpc.protocols + if not prot_conf then + return + end + + for _, prot in ipairs(prot_conf) do + metrics.store(collector, prot.name) + end +end + + +function _M.init_worker() + for name, prot in pairs(registered_protocols) do + if not is_http and prot.init_worker then + prot.init_worker() + end + end +end + + +function _M.check_schema(item, skip_disabled_plugin) + local name = item.name + local protocol = registered_protocol_schemas[name] + if not protocol and not skip_disabled_plugin then + -- like plugins, ignore unknown plugin if the schema is checked in the DP + return false, "unknown protocol [" .. name .. "]" + end + + -- check protocol-specific configuration + if not item.conf then + return true + end + return protocol.check_schema(item.conf) +end + + +function _M.run_protocol(conf, ctx) + local name = conf.name + local protocol = registered_protocols[name] + local code = runner.run(protocol, ctx) + return ngx_exit(code) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/metrics.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/metrics.lua new file mode 100644 index 0000000..41b77d4 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/metrics.lua @@ -0,0 +1,50 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local pairs = pairs +local pcall = pcall + + +local _M = {} +local hubs = {} + + +function _M.store(prometheus, name) + local ok, m = pcall(require, "apisix.stream.xrpc.protocols." .. name .. ".metrics") + if not ok then + core.log.notice("no metric for protocol ", name) + return + end + + local hub = {} + for metric, conf in pairs(m) do + core.log.notice("register metric ", metric, " for protocol ", name) + hub[metric] = prometheus[conf.type](prometheus, name .. '_' .. metric, + conf.help, conf.labels, conf.buckets) + end + + hubs[name] = hub +end + + +function _M.load(name) + return hubs[name] +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/dubbo/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/dubbo/init.lua new file mode 100644 index 0000000..19160d6 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/dubbo/init.lua @@ -0,0 +1,231 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local sdk = require("apisix.stream.xrpc.sdk") +local xrpc_socket = require("resty.apisix.stream.xrpc.socket") +local math_random = math.random +local ngx = ngx +local OK = ngx.OK +local str_format = string.format +local DECLINED = ngx.DECLINED +local DONE = ngx.DONE +local bit = require("bit") +local ffi = require("ffi") +local ffi_str = ffi.string + + +-- dubbo protocol spec: https://cn.dubbo.apache.org/zh-cn/overview/reference/protocols/tcp/ +local header_len = 16 +local _M = {} + + +function _M.init_downstream(session) + session.req_id_seq = 0 + session.resp_id_seq = 0 + session.cmd_labels = { session.route.id, "" } + return xrpc_socket.downstream.socket() +end + + +local function parse_dubbo_header(header) + for i = 1, header_len do + local currentByte = header:byte(i) + if not currentByte then + return nil + end + end + + local magic_number = str_format("%04x", header:byte(1) * 256 + header:byte(2)) + local message_flag = header:byte(3) + local status = header:byte(4) + local request_id = 0 + for i = 5, 12 do + request_id = request_id * 256 + header:byte(i) + end + + local byte13Val = header:byte(13) * 256 * 256 * 256 + local byte14Val = header:byte(14) * 256 * 256 + local data_length = byte13Val + byte14Val + header:byte(15) * 256 + header:byte(16) + + local is_request = bit.band(bit.rshift(message_flag, 7), 0x01) == 1 and 1 or 0 + local is_two_way = bit.band(bit.rshift(message_flag, 6), 0x01) == 1 and 1 or 0 + local is_event = bit.band(bit.rshift(message_flag, 5), 0x01) == 1 and 1 or 0 + + return { + magic_number = magic_number, + message_flag = message_flag, + is_request = is_request, + is_two_way = is_two_way, + is_event = is_event, + status = status, + request_id = request_id, + data_length = data_length + } +end + + +local function read_data(sk, is_req) + local header_data, err = sk:read(header_len) + if not header_data then + return nil, err, false + end + + local header_str = ffi_str(header_data, header_len) + local header_info = parse_dubbo_header(header_str) + if not header_info then + return nil, "header insufficient", false + end + + local is_valid_magic_number = header_info.magic_number == "dabb" + if not is_valid_magic_number then + return nil, str_format("unknown magic number: \"%s\"", header_info.magic_number), false + end + + local body_data, err = sk:read(header_info.data_length) + if not body_data then + core.log.error("failed to read dubbo request body") + return nil, err, false + end + + local ctx = ngx.ctx + ctx.dubbo_serialization_id = bit.band(header_info.message_flag, 0x1F) + + if is_req then + ctx.dubbo_req_body_data = body_data + else + ctx.dubbo_rsp_body_data = body_data + end + + return true, nil, false +end + + +local function read_req(sk) + return read_data(sk, true) +end + + +local function read_reply(sk) + return read_data(sk, false) +end + + +local function handle_reply(session, sk) + local ok, err = read_reply(sk) + if not ok then + return nil, err + end + + local ctx = sdk.get_req_ctx(session, 10) + + return ctx +end + + +function _M.from_downstream(session, downstream) + local read_pipeline = false + session.req_id_seq = session.req_id_seq + 1 + local ctx = sdk.get_req_ctx(session, session.req_id_seq) + session._downstream_ctx = ctx + while true do + local ok, err, pipelined = read_req(downstream) + if not ok then + if err ~= "timeout" and err ~= "closed" then + core.log.error("failed to read request: ", err) + end + + if read_pipeline and err == "timeout" then + break + end + + return DECLINED + end + + if not pipelined then + break + end + + if not read_pipeline then + read_pipeline = true + -- set minimal read timeout to read pipelined data + downstream:settimeouts(0, 0, 1) + end + end + + if read_pipeline then + -- set timeout back + downstream:settimeouts(0, 0, 0) + end + + return OK, ctx +end + + +function _M.connect_upstream(session, ctx) + local conf = session.upstream_conf + local nodes = conf.nodes + if #nodes == 0 then + core.log.error("failed to connect: no nodes") + return DECLINED + end + + local node = nodes[math_random(#nodes)] + local sk = sdk.connect_upstream(node, conf) + if not sk then + return DECLINED + end + + core.log.debug("dubbo_connect_upstream end") + + return OK, sk +end + +function _M.disconnect_upstream(session, upstream) + sdk.disconnect_upstream(upstream, session.upstream_conf) +end + + +function _M.to_upstream(session, ctx, downstream, upstream) + local ok, _ = upstream:move(downstream) + if not ok then + return DECLINED + end + + return OK +end + + +function _M.from_upstream(session, downstream, upstream) + local ctx,err = handle_reply(session, upstream) + if err then + return DECLINED + end + + local ok, _ = downstream:move(upstream) + if not ok then + return DECLINED + end + + return DONE, ctx +end + + +function _M.log(_, _) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/dubbo/schema.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/dubbo/schema.lua new file mode 100644 index 0000000..3a9d733 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/dubbo/schema.lua @@ -0,0 +1,32 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") + + +local schema = { + type = "object", +} + +local _M = {} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/commands.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/commands.lua new file mode 100644 index 0000000..ff3338f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/commands.lua @@ -0,0 +1,222 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local ipairs = ipairs +local pairs = pairs + + +local cmd_to_key_finder = {} +--[[ +-- the data is generated from the script below +local redis = require "resty.redis" +local red = redis:new() + +local ok, err = red:connect("127.0.0.1", 6379) +if not ok then + ngx.say("failed to connect: ", err) + return +end + +local res = red:command("info") +local map = {} +for _, r in ipairs(res) do + local first_key = r[4] + local last_key = r[5] + local step = r[6] + local idx = first_key .. ':' .. last_key .. ':' .. step + + if idx ~= "1:1:1" then + -- "1:1:1" is the default + if map[idx] then + table.insert(map[idx], r[1]) + else + map[idx] = {r[1]} + end + end +end +for _, r in pairs(map) do + table.sort(r) +end +local dump = require('pl.pretty').dump; dump(map) +--]] +local key_to_cmd = { + ["0:0:0"] = { + "acl", + "asking", + "auth", + "bgrewriteaof", + "bgsave", + "blmpop", + "bzmpop", + "client", + "cluster", + "command", + "config", + "dbsize", + "debug", + "discard", + "echo", + "eval", + "eval_ro", + "evalsha", + "evalsha_ro", + "exec", + "failover", + "fcall", + "fcall_ro", + "flushall", + "flushdb", + "function", + "hello", + "info", + "keys", + "lastsave", + "latency", + "lmpop", + "lolwut", + "memory", + "module", + "monitor", + "multi", + "object", + "pfselftest", + "ping", + "psubscribe", + "psync", + "publish", + "pubsub", + "punsubscribe", + "quit", + "randomkey", + "readonly", + "readwrite", + "replconf", + "replicaof", + "reset", + "role", + "save", + "scan", + "script", + "select", + "shutdown", + "sintercard", + "slaveof", + "slowlog", + "subscribe", + "swapdb", + "sync", + "time", + "unsubscribe", + "unwatch", + "wait", + "xgroup", + "xinfo", + "xread", + "xreadgroup", + "zdiff", + "zinter", + "zintercard", + "zmpop", + "zunion" + }, + ["1:-1:1"] = { + "del", + "exists", + "mget", + "pfcount", + "pfmerge", + "sdiff", + "sdiffstore", + "sinter", + "sinterstore", + "ssubscribe", + "sunion", + "sunionstore", + "sunsubscribe", + "touch", + "unlink", + "watch" + }, + ["1:-1:2"] = { + "mset", + "msetnx" + }, + ["1:-2:1"] = { + "blpop", + "brpop", + "bzpopmax", + "bzpopmin" + }, + ["1:2:1"] = { + "blmove", + "brpoplpush", + "copy", + "geosearchstore", + "lcs", + "lmove", + "rename", + "renamenx", + "rpoplpush", + "smove", + "zrangestore" + }, + ["2:-1:1"] = { + "bitop" + }, + ["2:2:1"] = { + "pfdebug" + }, + ["3:3:1"] = { + "migrate" + } +} +local key_finders = { + ["0:0:0"] = false, + ["1:-1:1"] = function (idx, narg) + return 1 < idx + end, + ["1:-1:2"] = function (idx, narg) + return 1 < idx and idx % 2 == 0 + end, + ["1:-2:1"] = function (idx, narg) + return 1 < idx and idx < narg - 1 + end, + ["1:2:1"] = function (idx, narg) + return idx == 2 or idx == 3 + end, + ["2:-1:1"] = function (idx, narg) + return 2 < idx + end, + ["2:2:1"] = function (idx, narg) + return idx == 3 + end, + ["3:3:1"] = function (idx, narg) + return idx == 4 + end +} +for k, cmds in pairs(key_to_cmd) do + for _, cmd in ipairs(cmds) do + cmd_to_key_finder[cmd] = key_finders[k] + end +end + + +return { + cmd_to_key_finder = cmd_to_key_finder, + default_key_finder = function (idx, narg) + return idx == 2 + end, +} diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/init.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/init.lua new file mode 100644 index 0000000..9aff6d0 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/init.lua @@ -0,0 +1,499 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local sdk = require("apisix.stream.xrpc.sdk") +local commands = require("apisix.stream.xrpc.protocols.redis.commands") +local xrpc_socket = require("resty.apisix.stream.xrpc.socket") +local ffi = require("ffi") +local ffi_str = ffi.string +local math_random = math.random +local OK = ngx.OK +local DECLINED = ngx.DECLINED +local DONE = ngx.DONE +local sleep = ngx.sleep +local str_byte = string.byte +local str_fmt = string.format +local ipairs = ipairs +local tonumber = tonumber + + +-- this variable is only used to log the redis command line in log_format +-- and is not used for filter in the logger phase. +core.ctx.register_var("redis_cmd_line", function(ctx) + return core.table.concat(ctx.cmd_line, " ") +end) + +-- redis protocol spec: https://redis.io/docs/reference/protocol-spec/ +-- There is no plan to support inline command format +local protocol_name = "redis" +local _M = {} +local MAX_LINE_LEN = 128 +local MAX_VALUE_LEN = 128 +local PREFIX_ARR = str_byte("*") +local PREFIX_STR = str_byte("$") +local PREFIX_STA = str_byte("+") +local PREFIX_INT = str_byte(":") +local PREFIX_ERR = str_byte("-") + + +local lrucache = core.lrucache.new({ + type = "plugin", +}) + + +local function create_matcher(conf) + local matcher = {} + --[[ + {"delay": 5, "key":"x", "commands":["GET", "MGET"]} + {"delay": 5, "commands":["GET"]} + => { + get = {keys = {x = {delay = 5}, * = {delay = 5}}} + mget = {keys = {x = {delay = 5}}} + } + ]]-- + for _, rule in ipairs(conf.faults) do + for _, cmd in ipairs(rule.commands) do + cmd = cmd:lower() + local key = rule.key + local kf = commands.cmd_to_key_finder[cmd] + local key_matcher = matcher[cmd] + if not key_matcher then + key_matcher = { + keys = {} + } + matcher[cmd] = key_matcher + end + + if not key or kf == false then + key = "*" + end + + if key_matcher.keys[key] then + core.log.warn("override existent fault rule of cmd: ", cmd, ", key: ", key) + end + + key_matcher.keys[key] = rule + end + end + + return matcher +end + + +local function get_matcher(conf, ctx) + return core.lrucache.plugin_ctx(lrucache, ctx, nil, create_matcher, conf) +end + + +function _M.init_downstream(session) + local conf = session.route.protocol.conf + if conf and conf.faults then + local matcher = get_matcher(conf, session.conn_ctx) + session.matcher = matcher + end + + session.req_id_seq = 0 + session.resp_id_seq = 0 + session.cmd_labels = {session.route.id, ""} + return xrpc_socket.downstream.socket() +end + + +local function read_line(sk) + local p, err, len = sk:read_line(MAX_LINE_LEN) + if not p then + return nil, err + end + + if len < 2 then + return nil, "line too short" + end + + return p, nil, len +end + + +local function read_len(sk) + local p, err, len = read_line(sk) + if not p then + return nil, err + end + + local s = ffi_str(p + 1, len - 1) + local n = tonumber(s) + if not n then + return nil, str_fmt("invalid len string: \"%s\"", s) + end + return n +end + + +local function read_req(session, sk) + local narg, err = read_len(sk) + if not narg then + return nil, err + end + + local cmd_line = core.tablepool.fetch("xrpc_redis_cmd_line", narg, 0) + + local n, err = read_len(sk) + if not n then + return nil, err + end + + local p, err = sk:read(n + 2) + if not p then + return nil, err + end + + local s = ffi_str(p, n) + local cmd = s:lower() + cmd_line[1] = cmd + + if cmd == "subscribe" or cmd == "psubscribe" then + session.in_pub_sub = true + end + + local key_finder + local matcher = session.matcher + if matcher then + matcher = matcher[s:lower()] + if matcher then + key_finder = commands.cmd_to_key_finder[s] or commands.default_key_finder + end + end + + for i = 2, narg do + local is_key = false + if key_finder then + is_key = key_finder(i, narg) + end + + local n, err = read_len(sk) + if not n then + return nil, err + end + + local s + if not is_key and n > MAX_VALUE_LEN then + -- avoid recording big value + local p, err = sk:read(MAX_VALUE_LEN) + if not p then + return nil, err + end + + local ok, err = sk:drain(n - MAX_VALUE_LEN + 2) + if not ok then + return nil, err + end + + s = ffi_str(p, MAX_VALUE_LEN) .. "...(" .. n .. " bytes)" + else + local p, err = sk:read(n + 2) + if not p then + return nil, err + end + + s = ffi_str(p, n) + + if is_key and matcher.keys[s] then + matcher = matcher.keys[s] + key_finder = nil + end + end + + cmd_line[i] = s + end + + session.req_id_seq = session.req_id_seq + 1 + local ctx = sdk.get_req_ctx(session, session.req_id_seq) + ctx.cmd_line = cmd_line + ctx.cmd = cmd + + local pipelined = sk:has_pending_data() + + if matcher then + if matcher.keys then + -- try to match any key of this command + matcher = matcher.keys["*"] + end + + if matcher then + sleep(matcher.delay) + end + end + + return true, nil, pipelined +end + + +local function read_subscribe_reply(sk) + local line, err, n = read_line(sk) + if not line then + return nil, err + end + + local prefix = line[0] + + if prefix == PREFIX_STR then -- char '$' + local size = tonumber(ffi_str(line + 1, n - 1)) + if size < 0 then + return true + end + + local p, err = sk:read(size + 2) + if not p then + return nil, err + end + + return ffi_str(p, size) + + elseif prefix == PREFIX_INT then -- char ':' + return tonumber(ffi_str(line + 1, n - 1)) + + else + return nil, str_fmt("unknown prefix: \"%s\"", prefix) + end +end + + +local function read_reply(sk, session) + local line, err, n = read_line(sk) + if not line then + return nil, err + end + + local prefix = line[0] + + if prefix == PREFIX_STR then -- char '$' + -- print("bulk reply") + + local size = tonumber(ffi_str(line + 1, n - 1)) + if size < 0 then + return true + end + + local ok, err = sk:drain(size + 2) + if not ok then + return nil, err + end + + return true + + elseif prefix == PREFIX_STA then -- char '+' + -- print("status reply") + return true + + elseif prefix == PREFIX_ARR then -- char '*' + local narr = tonumber(ffi_str(line + 1, n - 1)) + + -- print("multi-bulk reply: ", narr) + if narr < 0 then + return true + end + + if session and session.in_pub_sub and (narr == 3 or narr == 4) then + local msg_type, err = read_subscribe_reply(sk) + if msg_type == nil then + return nil, err + end + + session.pub_sub_msg_type = msg_type + + local res, err = read_reply(sk) + if res == nil then + return nil, err + end + + if msg_type == "unsubscribe" or msg_type == "punsubscribe" then + local n_ch, err = read_subscribe_reply(sk) + if n_ch == nil then + return nil, err + end + + if n_ch == 0 then + session.in_pub_sub = -1 + -- clear this flag later at the end of `handle_reply` + end + + else + local n = msg_type == "pmessage" and 2 or 1 + for i = 1, n do + local res, err = read_reply(sk) + if res == nil then + return nil, err + end + end + end + + else + for i = 1, narr do + local res, err = read_reply(sk) + if res == nil then + return nil, err + end + end + end + + return true + + elseif prefix == PREFIX_INT then -- char ':' + -- print("integer reply") + return true + + elseif prefix == PREFIX_ERR then -- char '-' + -- print("error reply: ", n) + return true + + else + return nil, str_fmt("unknown prefix: \"%s\"", prefix) + end +end + + +local function handle_reply(session, sk) + local ok, err = read_reply(sk, session) + if not ok then + return nil, err + end + + local ctx + if session.in_pub_sub and session.pub_sub_msg_type then + local msg_type = session.pub_sub_msg_type + session.pub_sub_msg_type = nil + if session.resp_id_seq < session.req_id_seq then + local cur_ctx = sdk.get_req_ctx(session, session.resp_id_seq + 1) + local cmd = cur_ctx.cmd + if cmd == msg_type then + ctx = cur_ctx + session.resp_id_seq = session.resp_id_seq + 1 + end + end + + if session.in_pub_sub == -1 then + session.in_pub_sub = nil + end + else + session.resp_id_seq = session.resp_id_seq + 1 + ctx = sdk.get_req_ctx(session, session.resp_id_seq) + end + + return ctx +end + + +function _M.from_downstream(session, downstream) + local read_pipeline = false + while true do + local ok, err, pipelined = read_req(session, downstream) + if not ok then + if err ~= "timeout" and err ~= "closed" then + core.log.error("failed to read request: ", err) + end + + if read_pipeline and err == "timeout" then + break + end + + return DECLINED + end + + if not pipelined then + break + end + + if not read_pipeline then + read_pipeline = true + -- set minimal read timeout to read pipelined data + downstream:settimeouts(0, 0, 1) + end + end + + if read_pipeline then + -- set timeout back + downstream:settimeouts(0, 0, 0) + end + + return OK +end + + +function _M.connect_upstream(session, ctx) + local conf = session.upstream_conf + local nodes = conf.nodes + if #nodes == 0 then + core.log.error("failed to connect: no nodes") + return DECLINED + end + + local node = nodes[math_random(#nodes)] + local sk = sdk.connect_upstream(node, conf) + if not sk then + return DECLINED + end + + return OK, sk +end + + +function _M.disconnect_upstream(session, upstream) + sdk.disconnect_upstream(upstream, session.upstream_conf) +end + + +function _M.to_upstream(session, ctx, downstream, upstream) + local ok, err = upstream:move(downstream) + if not ok then + core.log.error("failed to send to upstream: ", err) + return DECLINED + end + + return OK +end + + +function _M.from_upstream(session, downstream, upstream) + local ctx, err = handle_reply(session, upstream) + if err then + core.log.error("failed to handle upstream: ", err) + return DECLINED + end + + local ok, err = downstream:move(upstream) + if not ok then + core.log.error("failed to handle upstream: ", err) + return DECLINED + end + + return DONE, ctx +end + + +function _M.log(session, ctx) + local metrics = sdk.get_metrics(session, protocol_name) + if metrics then + session.cmd_labels[2] = ctx.cmd + metrics.commands_total:inc(1, session.cmd_labels) + metrics.commands_latency_seconds:observe(ctx.var.rpc_time, session.cmd_labels) + end + + core.tablepool.release("xrpc_redis_cmd_line", ctx.cmd_line) + ctx.cmd_line = nil +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/metrics.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/metrics.lua new file mode 100644 index 0000000..6009a50 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/metrics.lua @@ -0,0 +1,33 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local _M = { + commands_total = { + type = "counter", + help = "Total number of requests for a specific Redis command", + labels = {"route", "command"}, + }, + commands_latency_seconds = { + type = "histogram", + help = "Latency of requests for a specific Redis command", + labels = {"route", "command"}, + -- latency buckets, 1ms to 1s: + buckets = {0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1} + }, +} + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/schema.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/schema.lua new file mode 100644 index 0000000..0b6c90c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/protocols/redis/schema.lua @@ -0,0 +1,59 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") + + +local schema = { + type = "object", + properties = { + faults = { + type = "array", + minItems = 1, + items = { + type = "object", + properties = { + commands = { + type = "array", + minItems = 1, + items = { + type = "string" + }, + }, + key = { + type = "string", + minLength = 1, + }, + delay = { + type = "number", + description = "additional delay in seconds", + } + }, + required = {"commands", "delay"} + }, + }, + }, +} + +local _M = {} + + +function _M.check_schema(conf) + return core.schema.check(schema, conf) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/runner.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/runner.lua new file mode 100644 index 0000000..5f1b97d --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/runner.lua @@ -0,0 +1,279 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local expr = require("resty.expr.v1") +local pairs = pairs +local ngx = ngx +local ngx_now = ngx.now +local OK = ngx.OK +local DECLINED = ngx.DECLINED +local DONE = ngx.DONE +local pcall = pcall +local ipairs = ipairs +local tostring = tostring + + +core.ctx.register_var("rpc_time", function(ctx) + return ctx._rpc_end_time - ctx._rpc_start_time +end) + +local logger_expr_cache = core.lrucache.new({ + ttl = 300, count = 1024 +}) + +local _M = {} + + +local function open_session(conn_ctx) + conn_ctx.xrpc_session = { + conn_ctx = conn_ctx, + route = conn_ctx.matched_route.value, + -- fields start with '_' should not be accessed by the protocol implementation + _upstream_conf = conn_ctx.matched_upstream, + _ctxs = {}, + } + return conn_ctx.xrpc_session +end + + +local function put_req_ctx(session, ctx) + local id = ctx._id + session._ctxs[id] = nil + + core.ctx.release_vars(ctx) + + core.tablepool.release("xrpc_ctxs", ctx) +end + + +local function filter_logger(ctx, logger) + if not logger then + return false + end + + if not logger.filter or #logger.filter == 0 then + -- no valid filter, default execution plugin + return true + end + + local version = tostring(logger.filter) + local filter_expr, err = logger_expr_cache(ctx.conf_id, version, expr.new, logger.filter) + if not filter_expr or err then + core.log.error("failed to validate the 'filter' expression: ", err) + return false + end + return filter_expr:eval(ctx.var) +end + + +local function run_log_plugin(ctx, logger) + local pkg_name = "apisix.stream.plugins." .. logger.name + local ok, plugin = pcall(require, pkg_name) + if not ok then + core.log.error("failed to load plugin [", logger.name, "] err: ", plugin) + return + end + + local log_func = plugin.log + if log_func then + log_func(logger.conf, ctx) + end +end + + +local function finialize_req(protocol, session, ctx) + ctx._rpc_end_time = ngx_now() + local loggers = session.route.protocol.logger + if loggers and #loggers > 0 then + for _, logger in ipairs(loggers) do + ctx.conf_id = tostring(logger.conf) + local matched = filter_logger(ctx, logger) + core.log.info("log filter: ", logger.name, " filter result: ", matched) + if matched then + run_log_plugin(ctx, logger) + end + end + end + + protocol.log(session, ctx) + put_req_ctx(session, ctx) +end + + +local function close_session(session, protocol) + local upstream_ctx = session._upstream_ctx + if upstream_ctx then + upstream_ctx.closed = true + + local up = upstream_ctx.upstream + protocol.disconnect_upstream(session, up) + end + + local upstream_ctxs = session._upstream_ctxs + if upstream_ctxs then + for _, upstream_ctx in pairs(upstream_ctxs) do + upstream_ctx.closed = true + + local up = upstream_ctx.upstream + protocol.disconnect_upstream(session, up) + end + end + + for id, ctx in pairs(session._ctxs) do + core.log.notice("RPC is not finished, id: ", id) + ctx.unfinished = true + finialize_req(protocol, session, ctx) + end +end + + +local function open_upstream(protocol, session, ctx) + local key = session._upstream_key + session._upstream_key = nil + + if key then + if not session._upstream_ctxs then + session._upstream_ctxs = {} + end + + local up_ctx = session._upstream_ctxs[key] + if up_ctx then + return OK, up_ctx + end + else + if session._upstream_ctx then + return OK, session._upstream_ctx + end + + session.upstream_conf = session._upstream_conf + end + + local state, upstream = protocol.connect_upstream(session, session) + if state ~= OK then + return state, nil + end + + local up_ctx = { + upstream = upstream, + closed = false, + } + if key then + session._upstream_ctxs[key] = up_ctx + else + session._upstream_ctx = up_ctx + end + + return OK, up_ctx +end + + +local function start_upstream_coroutine(session, protocol, downstream, up_ctx) + local upstream = up_ctx.upstream + while not up_ctx.closed do + local status, ctx = protocol.from_upstream(session, downstream, upstream) + if status ~= OK then + if ctx ~= nil then + finialize_req(protocol, session, ctx) + end + + if status == DECLINED then + -- fail to read + break + end + + if status == DONE then + -- a rpc is finished + goto continue + end + end + + ::continue:: + end +end + + +function _M.run(protocol, conn_ctx) + local session = open_session(conn_ctx) + local downstream = protocol.init_downstream(session) + + while true do + local status, ctx = protocol.from_downstream(session, downstream) + if status ~= OK then + if ctx ~= nil then + finialize_req(protocol, session, ctx) + end + + if status == DECLINED then + -- fail to read or can't be authorized + break + end + + if status == DONE then + -- heartbeat or fault injection, already reply to downstream + goto continue + end + end + + -- need to do some auth/routing jobs before reaching upstream + local status, up_ctx = open_upstream(protocol, session, ctx) + if status ~= OK then + if ctx ~= nil then + finialize_req(protocol, session, ctx) + end + + break + end + + status = protocol.to_upstream(session, ctx, downstream, up_ctx.upstream) + if status ~= OK then + if ctx ~= nil then + finialize_req(protocol, session, ctx) + end + + if status == DECLINED then + break + end + + if status == DONE then + -- for Unary request we can directly reply here + goto continue + end + end + + if not up_ctx.coroutine then + local co, err = ngx.thread.spawn( + start_upstream_coroutine, session, protocol, downstream, up_ctx) + if not co then + core.log.error("failed to start upstream coroutine: ", err) + break + end + + up_ctx.coroutine = co + end + + ::continue:: + end + + close_session(session, protocol) + + -- return non-zero code to terminal the session + return 200 +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/sdk.lua b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/sdk.lua new file mode 100644 index 0000000..60f100c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/stream/xrpc/sdk.lua @@ -0,0 +1,202 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +--- Upstream helper functions which can be used in xRPC +-- +-- @module xrpc.sdk +local core = require("apisix.core") +local config_util = require("apisix.core.config_util") +local router = require("apisix.stream.router.ip_port") +local metrics = require("apisix.stream.xrpc.metrics") +local apisix_upstream = require("apisix.upstream") +local xrpc_socket = require("resty.apisix.stream.xrpc.socket") +local ngx_now = ngx.now +local str_fmt = string.format +local tab_insert = table.insert +local error = error +local tostring = tostring + + +local _M = {} + + +--- +-- Returns the connected xRPC upstream socket according to the configuration +-- +-- @function xrpc.sdk.connect_upstream +-- @tparam table node selected upstream node +-- @tparam table up_conf upstream configuration +-- @treturn table|nil the xRPC upstream socket, or nil if failed +function _M.connect_upstream(node, up_conf) + local sk = xrpc_socket.upstream.socket() + + local timeout = up_conf.timeout + if not timeout then + -- use the default timeout of Nginx proxy + sk:settimeouts(60 * 1000, 600 * 1000, 600 * 1000) + else + -- the timeout unit for balancer is second while the unit for cosocket is millisecond + sk:settimeouts(timeout.connect * 1000, timeout.send * 1000, timeout.read * 1000) + end + + local ok, err = sk:connect(node.host, node.port) + if not ok then + core.log.error("failed to connect: ", err) + return nil + end + + if up_conf.scheme == "tls" then + -- TODO: support mTLS + local ok, err = sk:sslhandshake(nil, node.host) + if not ok then + core.log.error("failed to handshake: ", err) + return nil + end + end + + return sk +end + + +--- +-- Disconnect xRPC upstream socket according to the configuration +-- +-- @function xrpc.sdk.disconnect_upstream +-- @tparam table upstream xRPC upstream socket +-- @tparam table up_conf upstream configuration +function _M.disconnect_upstream(upstream, up_conf) + return upstream:close() +end + + +--- +-- Returns the request level ctx with an id +-- +-- @function xrpc.sdk.get_req_ctx +-- @tparam table session xrpc session +-- @tparam string id ctx id +-- @treturn table the request level ctx +function _M.get_req_ctx(session, id) + if not id then + error("id is required") + end + + local ctx = session._ctxs[id] + if ctx then + return ctx + end + + local ctx = core.tablepool.fetch("xrpc_ctxs", 4, 4) + -- fields start with '_' should not be accessed by the protocol implementation + ctx._id = id + core.ctx.set_vars_meta(ctx) + ctx.conf_type = "xrpc-" .. session.route.protocol.name .. "-logger" + + session._ctxs[id] = ctx + + ctx._rpc_start_time = ngx_now() + return ctx +end + + +--- +-- Returns the new router if the stream routes are changed +-- +-- @function xrpc.sdk.get_router +-- @tparam table session xrpc session +-- @tparam string version the current router version, should come from the last call +-- @treturn boolean whether there is a change +-- @treturn table the new router under the specific protocol +-- @treturn string the new router version +function _M.get_router(session, version) + local protocol_name = session.route.protocol.name + local id = session.route.id + + local items, conf_version = router.routes() + if version == conf_version then + return false + end + + local proto_router = {} + for _, item in config_util.iterate_values(items) do + if item.value == nil then + goto CONTINUE + end + + local route = item.value + if route.protocol.name ~= protocol_name then + goto CONTINUE + end + + if tostring(route.protocol.superior_id) ~= id then + goto CONTINUE + end + + tab_insert(proto_router, route) + + ::CONTINUE:: + end + + return true, proto_router, conf_version +end + + +--- +-- Set the session's current upstream according to the route's configuration +-- +-- @function xrpc.sdk.set_upstream +-- @tparam table session xrpc session +-- @tparam table conf the route configuration +-- @treturn nil|string error message if present +function _M.set_upstream(session, conf) + local up + if conf.upstream then + up = conf.upstream + else + local id = conf.upstream_id + up = apisix_upstream.get_by_id(id) + if not up then + return str_fmt("upstream %s can't be got", id) + end + end + + local key = tostring(up) + core.log.info("set upstream to: ", key, " conf: ", core.json.delay_encode(up, true)) + + session._upstream_key = key + session.upstream_conf = up + return nil +end + + +--- +-- Returns the protocol specific metrics object +-- +-- @function xrpc.sdk.get_metrics +-- @tparam table session xrpc session +-- @tparam string protocol_name protocol name +-- @treturn nil|table the metrics under the specific protocol if available +function _M.get_metrics(session, protocol_name) + local metric_conf = session.route.protocol.metric + if not (metric_conf and metric_conf.enable) then + return nil + end + return metrics.load(protocol_name) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/timers.lua b/CloudronPackages/APISIX/apisix-source/apisix/timers.lua new file mode 100644 index 0000000..aebe346 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/timers.lua @@ -0,0 +1,103 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local process = require("ngx.process") +local pairs = pairs +local unpack = unpack +local thread_spawn = ngx.thread.spawn +local thread_wait = ngx.thread.wait + +local check_interval = 1 + +local timers = {} + + +local _M = {} + + +local function background_timer() + if core.table.nkeys(timers) == 0 then + return + end + + local threads = {} + for name, timer in pairs(timers) do + core.log.info("run timer[", name, "]") + + local th, err = thread_spawn(timer) + if not th then + core.log.error("failed to spawn thread for timer [", name, "]: ", err) + goto continue + end + + core.table.insert(threads, th) + +::continue:: + end + + local ok = thread_wait(unpack(threads)) + if not ok then + core.log.error("failed to wait threads") + end +end + + +local function is_privileged() + return process.type() == "privileged agent" +end + + +function _M.init_worker() + local opts = { + each_ttl = 0, + sleep_succ = 0, + check_interval = check_interval, + } + local timer, err = core.timer.new("background", background_timer, opts) + if not timer then + core.log.error("failed to create background timer: ", err) + return + end + + core.log.notice("succeed to create background timer") +end + + +function _M.register_timer(name, f, privileged) + if privileged and not is_privileged() then + return + end + + timers[name] = f +end + + +function _M.unregister_timer(name, privileged) + if privileged and not is_privileged() then + return + end + + timers[name] = nil +end + + +function _M.check_interval() + return check_interval +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/upstream.lua b/CloudronPackages/APISIX/apisix-source/apisix/upstream.lua new file mode 100644 index 0000000..ffd5e39 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/upstream.lua @@ -0,0 +1,659 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local require = require +local core = require("apisix.core") +local discovery = require("apisix.discovery.init").discovery +local upstream_util = require("apisix.utils.upstream") +local apisix_ssl = require("apisix.ssl") +local events = require("apisix.events") +local error = error +local tostring = tostring +local ipairs = ipairs +local pairs = pairs +local pcall = pcall +local ngx_var = ngx.var +local is_http = ngx.config.subsystem == "http" +local upstreams +local healthcheck + +local healthcheck_shdict_name = "upstream-healthcheck" +if not is_http then + healthcheck_shdict_name = healthcheck_shdict_name .. "-" .. ngx.config.subsystem +end + +local set_upstream_tls_client_param +local ok, apisix_ngx_upstream = pcall(require, "resty.apisix.upstream") +if ok then + set_upstream_tls_client_param = apisix_ngx_upstream.set_cert_and_key +else + set_upstream_tls_client_param = function () + return nil, "need to build APISIX-Runtime to support upstream mTLS" + end +end + +local set_stream_upstream_tls +if not is_http then + local ok, apisix_ngx_stream_upstream = pcall(require, "resty.apisix.stream.upstream") + if ok then + set_stream_upstream_tls = apisix_ngx_stream_upstream.set_tls + else + set_stream_upstream_tls = function () + return nil, "need to build APISIX-Runtime to support TLS over TCP upstream" + end + end +end + + + +local HTTP_CODE_UPSTREAM_UNAVAILABLE = 503 +local _M = {} + + +local function set_directly(ctx, key, ver, conf) + if not ctx then + error("missing argument ctx", 2) + end + if not key then + error("missing argument key", 2) + end + if not ver then + error("missing argument ver", 2) + end + if not conf then + error("missing argument conf", 2) + end + + ctx.upstream_conf = conf + ctx.upstream_version = ver + ctx.upstream_key = key + return +end +_M.set = set_directly + + +local function release_checker(healthcheck_parent) + if not healthcheck_parent or not healthcheck_parent.checker then + return + end + local checker = healthcheck_parent.checker + core.log.info("try to release checker: ", tostring(checker)) + checker:delayed_clear(3) + checker:stop() +end + + +local function get_healthchecker_name(value) + return "upstream#" .. value.key +end +_M.get_healthchecker_name = get_healthchecker_name + + +local function create_checker(upstream) + if healthcheck == nil then + healthcheck = require("resty.healthcheck") + end + + local healthcheck_parent = upstream.parent + if healthcheck_parent.checker and healthcheck_parent.checker_upstream == upstream + and healthcheck_parent.checker_nodes_ver == upstream._nodes_ver then + return healthcheck_parent.checker + end + + if upstream.is_creating_checker then + core.log.info("another request is creating new checker") + return nil + end + upstream.is_creating_checker = true + + core.log.debug("events module used by the healthcheck: ", events.events_module, + ", module name: ",events:get_healthcheck_events_modele()) + + local checker, err = healthcheck.new({ + name = get_healthchecker_name(healthcheck_parent), + shm_name = healthcheck_shdict_name, + checks = upstream.checks, + -- the events.init_worker will be executed in the init_worker phase, + -- events.healthcheck_events_module is set + -- while the healthcheck object is executed in the http access phase, + -- so it can be used here + events_module = events:get_healthcheck_events_modele(), + }) + + if not checker then + core.log.error("fail to create healthcheck instance: ", err) + upstream.is_creating_checker = nil + return nil + end + + if healthcheck_parent.checker then + local ok, err = pcall(core.config_util.cancel_clean_handler, healthcheck_parent, + healthcheck_parent.checker_idx, true) + if not ok then + core.log.error("cancel clean handler error: ", err) + end + end + + core.log.info("create new checker: ", tostring(checker)) + + local host = upstream.checks and upstream.checks.active and upstream.checks.active.host + local port = upstream.checks and upstream.checks.active and upstream.checks.active.port + local up_hdr = upstream.pass_host == "rewrite" and upstream.upstream_host + local use_node_hdr = upstream.pass_host == "node" or nil + for _, node in ipairs(upstream.nodes) do + local host_hdr = up_hdr or (use_node_hdr and node.domain) + local ok, err = checker:add_target(node.host, port or node.port, host, + true, host_hdr) + if not ok then + core.log.error("failed to add new health check target: ", node.host, ":", + port or node.port, " err: ", err) + end + end + + local check_idx, err = core.config_util.add_clean_handler(healthcheck_parent, release_checker) + if not check_idx then + upstream.is_creating_checker = nil + checker:clear() + checker:stop() + core.log.error("failed to add clean handler, err:", + err, " healthcheck parent:", core.json.delay_encode(healthcheck_parent, true)) + + return nil + end + + healthcheck_parent.checker = checker + healthcheck_parent.checker_upstream = upstream + healthcheck_parent.checker_nodes_ver = upstream._nodes_ver + healthcheck_parent.checker_idx = check_idx + + upstream.is_creating_checker = nil + + return checker +end + + +local function fetch_healthchecker(upstream) + if not upstream.checks then + return nil + end + + return create_checker(upstream) +end + + +local function set_upstream_scheme(ctx, upstream) + -- plugins like proxy-rewrite may already set ctx.upstream_scheme + if not ctx.upstream_scheme then + -- the old configuration doesn't have scheme field, so fallback to "http" + ctx.upstream_scheme = upstream.scheme or "http" + end + + ctx.var["upstream_scheme"] = ctx.upstream_scheme +end +_M.set_scheme = set_upstream_scheme + +local scheme_to_port = { + http = 80, + https = 443, + grpc = 80, + grpcs = 443, +} + + +_M.scheme_to_port = scheme_to_port + + +local function fill_node_info(up_conf, scheme, is_stream) + local nodes = up_conf.nodes + if up_conf.nodes_ref == nodes then + -- filled + return true + end + + local need_filled = false + for _, n in ipairs(nodes) do + if not is_stream and not n.port then + if up_conf.scheme ~= scheme then + return nil, "Can't detect upstream's scheme. " .. + "You should either specify a port in the node " .. + "or specify the upstream.scheme explicitly" + end + + need_filled = true + end + + if not n.priority then + need_filled = true + end + end + + if not need_filled then + up_conf.nodes_ref = nodes + return true + end + + core.log.debug("fill node info for upstream: ", + core.json.delay_encode(up_conf, true)) + + -- keep the original nodes for slow path in `compare_upstream_node()`, + -- can't use `core.table.deepcopy()` for whole `nodes` array here, + -- because `compare_upstream_node()` compare `metadata` of node by address. + up_conf.original_nodes = core.table.new(#nodes, 0) + for i, n in ipairs(nodes) do + up_conf.original_nodes[i] = core.table.clone(n) + if not n.port or not n.priority then + nodes[i] = core.table.clone(n) + + if not is_stream and not n.port then + nodes[i].port = scheme_to_port[scheme] + end + + -- fix priority for non-array nodes and nodes from service discovery + if not n.priority then + nodes[i].priority = 0 + end + end + end + + up_conf.nodes_ref = nodes + return true +end + + +function _M.set_by_route(route, api_ctx) + if api_ctx.upstream_conf then + -- upstream_conf has been set by traffic-split plugin + return + end + + local up_conf = api_ctx.matched_upstream + if not up_conf then + return 503, "missing upstream configuration in Route or Service" + end + -- core.log.info("up_conf: ", core.json.delay_encode(up_conf, true)) + + if up_conf.service_name then + if not discovery then + return 503, "discovery is uninitialized" + end + if not up_conf.discovery_type then + return 503, "discovery server need appoint" + end + + local dis = discovery[up_conf.discovery_type] + if not dis then + local err = "discovery " .. up_conf.discovery_type .. " is uninitialized" + return 503, err + end + + local new_nodes, err = dis.nodes(up_conf.service_name, up_conf.discovery_args) + if not new_nodes then + return HTTP_CODE_UPSTREAM_UNAVAILABLE, "no valid upstream node: " .. (err or "nil") + end + + local same = upstream_util.compare_upstream_node(up_conf, new_nodes) + if not same then + if not up_conf._nodes_ver then + up_conf._nodes_ver = 0 + end + up_conf._nodes_ver = up_conf._nodes_ver + 1 + + local pass, err = core.schema.check(core.schema.discovery_nodes, new_nodes) + if not pass then + return HTTP_CODE_UPSTREAM_UNAVAILABLE, "invalid nodes format: " .. err + end + + core.log.info("discover new upstream from ", up_conf.service_name, ", type ", + up_conf.discovery_type, ": ", + core.json.delay_encode(up_conf, true)) + end + + -- in case the value of new_nodes is the same as the old one, + -- but discovery lib return a new table for it. + -- for example, when watch loop of kubernetes discovery is broken or done, + -- it will fetch full data again and return a new table for every services. + up_conf.nodes = new_nodes + end + + local id = up_conf.parent.value.id + local conf_version = up_conf.parent.modifiedIndex + -- include the upstream object as part of the version, because the upstream will be changed + -- by service discovery or dns resolver. + set_directly(api_ctx, id, conf_version .. "#" .. tostring(up_conf) .. "#" + .. tostring(up_conf._nodes_ver or ''), up_conf) + + local nodes_count = up_conf.nodes and #up_conf.nodes or 0 + if nodes_count == 0 then + release_checker(up_conf.parent) + return HTTP_CODE_UPSTREAM_UNAVAILABLE, "no valid upstream node" + end + + if not is_http then + local ok, err = fill_node_info(up_conf, nil, true) + if not ok then + return 503, err + end + + local scheme = up_conf.scheme + if scheme == "tls" then + local ok, err = set_stream_upstream_tls() + if not ok then + return 503, err + end + + local sni = apisix_ssl.server_name() + if sni then + ngx_var.upstream_sni = sni + end + end + + local checker = fetch_healthchecker(up_conf) + api_ctx.up_checker = checker + return + end + + set_upstream_scheme(api_ctx, up_conf) + + local ok, err = fill_node_info(up_conf, api_ctx.upstream_scheme, false) + if not ok then + return 503, err + end + + local checker = fetch_healthchecker(up_conf) + api_ctx.up_checker = checker + + local scheme = up_conf.scheme + if (scheme == "https" or scheme == "grpcs") and up_conf.tls then + + local client_cert, client_key + if up_conf.tls.client_cert_id then + client_cert = api_ctx.upstream_ssl.cert + client_key = api_ctx.upstream_ssl.key + else + client_cert = up_conf.tls.client_cert + client_key = up_conf.tls.client_key + end + + -- the sni here is just for logging + local sni = api_ctx.var.upstream_host + local cert, err = apisix_ssl.fetch_cert(sni, client_cert) + if not ok then + return 503, err + end + + local key, err = apisix_ssl.fetch_pkey(sni, client_key) + if not ok then + return 503, err + end + + if scheme == "grpcs" then + api_ctx.upstream_grpcs_cert = cert + api_ctx.upstream_grpcs_key = key + else + local ok, err = set_upstream_tls_client_param(cert, key) + if not ok then + return 503, err + end + end + end + + return +end + + +function _M.set_grpcs_upstream_param(ctx) + if ctx.upstream_grpcs_cert then + local cert = ctx.upstream_grpcs_cert + local key = ctx.upstream_grpcs_key + local ok, err = set_upstream_tls_client_param(cert, key) + if not ok then + return 503, err + end + end +end + + +function _M.upstreams() + if not upstreams then + return nil, nil + end + + return upstreams.values, upstreams.conf_version +end + + +function _M.check_schema(conf) + return core.schema.check(core.schema.upstream, conf) +end + + +local function get_chash_key_schema(hash_on) + if not hash_on then + return nil, "hash_on is nil" + end + + if hash_on == "vars" then + return core.schema.upstream_hash_vars_schema + end + + if hash_on == "header" or hash_on == "cookie" then + return core.schema.upstream_hash_header_schema + end + + if hash_on == "consumer" then + return nil, nil + end + + if hash_on == "vars_combinations" then + return core.schema.upstream_hash_vars_combinations_schema + end + + return nil, "invalid hash_on type " .. hash_on +end + + +local function check_upstream_conf(in_dp, conf) + if not in_dp then + local ok, err = core.schema.check(core.schema.upstream, conf) + if not ok then + return false, "invalid configuration: " .. err + end + + if conf.nodes and not core.table.isarray(conf.nodes) then + local port + for addr,_ in pairs(conf.nodes) do + _, port = core.utils.parse_addr(addr) + if port then + if port < 1 or port > 65535 then + return false, "invalid port " .. tostring(port) + end + end + end + end + + local ssl_id = conf.tls and conf.tls.client_cert_id + if ssl_id then + local key = "/ssls/" .. ssl_id + local res, err = core.etcd.get(key) + if not res then + return nil, "failed to fetch ssl info by " + .. "ssl id [" .. ssl_id .. "]: " .. err + end + + if res.status ~= 200 then + return nil, "failed to fetch ssl info by " + .. "ssl id [" .. ssl_id .. "], " + .. "response code: " .. res.status + end + if res.body and res.body.node and + res.body.node.value and res.body.node.value.type ~= "client" then + + return nil, "failed to fetch ssl info by " + .. "ssl id [" .. ssl_id .. "], " + .. "wrong ssl type" + end + end + + -- encrypt the key in the admin + if conf.tls and conf.tls.client_key then + conf.tls.client_key = apisix_ssl.aes_encrypt_pkey(conf.tls.client_key) + end + end + + if is_http then + if conf.pass_host == "rewrite" and + (conf.upstream_host == nil or conf.upstream_host == "") + then + return false, "`upstream_host` can't be empty when `pass_host` is `rewrite`" + end + end + + if conf.tls and conf.tls.client_cert then + local cert = conf.tls.client_cert + local key = conf.tls.client_key + local ok, err = apisix_ssl.validate(cert, key) + if not ok then + return false, err + end + end + + if conf.type ~= "chash" then + return true + end + + if conf.hash_on ~= "consumer" and not conf.key then + return false, "missing key" + end + + local key_schema, err = get_chash_key_schema(conf.hash_on) + if err then + return false, "type is chash, err: " .. err + end + + if key_schema then + local ok, err = core.schema.check(key_schema, conf.key) + if not ok then + return false, "invalid configuration: " .. err + end + end + + return true +end + + +function _M.check_upstream_conf(conf) + return check_upstream_conf(false, conf) +end + + +local function filter_upstream(value, parent) + if not value then + return + end + + value.parent = parent + + if not is_http and value.scheme == "http" then + -- For L4 proxy, the default scheme is "tcp" + value.scheme = "tcp" + end + + if not value.nodes then + return + end + + local nodes = value.nodes + if core.table.isarray(nodes) then + for _, node in ipairs(nodes) do + local host = node.host + if not core.utils.parse_ipv4(host) and + not core.utils.parse_ipv6(host) then + parent.has_domain = true + break + end + end + else + local new_nodes = core.table.new(core.table.nkeys(nodes), 0) + for addr, weight in pairs(nodes) do + local host, port = core.utils.parse_addr(addr) + if not core.utils.parse_ipv4(host) and + not core.utils.parse_ipv6(host) then + parent.has_domain = true + end + local node = { + host = host, + port = port, + weight = weight, + } + core.table.insert(new_nodes, node) + end + value.nodes = new_nodes + end +end +_M.filter_upstream = filter_upstream + + +function _M.init_worker() + local err + upstreams, err = core.config.new("/upstreams", { + automatic = true, + item_schema = core.schema.upstream, + -- also check extra fields in the DP side + checker = function (item, schema_type) + return check_upstream_conf(true, item) + end, + filter = function(upstream) + upstream.has_domain = false + + filter_upstream(upstream.value, upstream) + + core.log.info("filter upstream: ", core.json.delay_encode(upstream, true)) + end, + }) + if not upstreams then + error("failed to create etcd instance for fetching upstream: " .. err) + return + end +end + + +function _M.get_by_id(up_id) + local upstream + local upstreams = core.config.fetch_created_obj("/upstreams") + if upstreams then + upstream = upstreams:get(tostring(up_id)) + end + + if not upstream then + core.log.error("failed to find upstream by id: ", up_id) + return nil + end + + if upstream.has_domain then + local err + upstream, err = upstream_util.parse_domain_in_up(upstream) + if err then + core.log.error("failed to get resolved upstream: ", err) + return nil + end + end + + core.log.info("parsed upstream: ", core.json.delay_encode(upstream, true)) + return upstream.dns_value or upstream.value +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/utils/auth.lua b/CloudronPackages/APISIX/apisix-source/apisix/utils/auth.lua new file mode 100644 index 0000000..b7c9186 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/utils/auth.lua @@ -0,0 +1,24 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local _M = {} + +function _M.is_running_under_multi_auth(ctx) + return ctx._plugin_name == "multi-auth" +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/utils/batch-processor-manager.lua b/CloudronPackages/APISIX/apisix-source/apisix/utils/batch-processor-manager.lua new file mode 100644 index 0000000..4e97bd6 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/utils/batch-processor-manager.lua @@ -0,0 +1,158 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local batch_processor = require("apisix.utils.batch-processor") +local timer_at = ngx.timer.at +local pairs = pairs +local setmetatable = setmetatable + + +local _M = {} +local mt = { __index = _M } + + +function _M.new(name) + return setmetatable({ + stale_timer_running = false, + buffers = {}, + total_pushed_entries = 0, + name = name, + }, mt) +end + + +function _M:wrap_schema(schema) + local bp_schema = core.table.deepcopy(batch_processor.schema) + local properties = schema.properties + for k, v in pairs(bp_schema.properties) do + if not properties[k] then + properties[k] = v + end + -- don't touch if the plugin overrides the property + end + + properties.name.default = self.name + return schema +end + + +-- remove stale objects from the memory after timer expires +local function remove_stale_objects(premature, self) + if premature then + return + end + + for key, batch in pairs(self.buffers) do + if #batch.entry_buffer.entries == 0 and #batch.batch_to_process == 0 then + core.log.info("removing batch processor stale object, conf: ", + core.json.delay_encode(key)) + self.buffers[key] = nil + end + end + + self.stale_timer_running = false +end + + +local check_stale +do + local interval = 1800 + + function check_stale(self) + if not self.stale_timer_running then + -- run the timer every 30 mins if any log is present + timer_at(interval, remove_stale_objects, self) + self.stale_timer_running = true + end + end + + function _M.set_check_stale_interval(time) + interval = time + end +end + + +local function total_processed_entries(self) + local processed_entries = 0 + for _, log_buffer in pairs(self.buffers) do + processed_entries = processed_entries + log_buffer.processed_entries + end + return processed_entries +end + +function _M:add_entry(conf, entry, max_pending_entries) + if max_pending_entries then + local total_processed_entries_count = total_processed_entries(self) + if self.total_pushed_entries - total_processed_entries_count > max_pending_entries then + core.log.error("max pending entries limit exceeded. discarding entry.", + " total_pushed_entries: ", self.total_pushed_entries, + " total_processed_entries: ", total_processed_entries_count, + " max_pending_entries: ", max_pending_entries) + return + end + end + check_stale(self) + + local log_buffer = self.buffers[conf] + if not log_buffer then + return false + end + + log_buffer:push(entry) + self.total_pushed_entries = self.total_pushed_entries + 1 + return true +end + + +function _M:add_entry_to_new_processor(conf, entry, ctx, func, max_pending_entries) + if max_pending_entries then + local total_processed_entries_count = total_processed_entries(self) + if self.total_pushed_entries - total_processed_entries_count > max_pending_entries then + core.log.error("max pending entries limit exceeded. discarding entry.", + " total_pushed_entries: ", self.total_pushed_entries, + " total_processed_entries: ", total_processed_entries_count, + " max_pending_entries: ", max_pending_entries) + return + end + end + check_stale(self) + + local config = { + name = conf.name, + batch_max_size = conf.batch_max_size, + max_retry_count = conf.max_retry_count, + retry_delay = conf.retry_delay, + buffer_duration = conf.buffer_duration, + inactive_timeout = conf.inactive_timeout, + route_id = ctx.var.route_id, + server_addr = ctx.var.server_addr, + } + + local log_buffer, err = batch_processor:new(func, config) + if not log_buffer then + core.log.error("error when creating the batch processor: ", err) + return false + end + + log_buffer:push(entry) + self.buffers[conf] = log_buffer + self.total_pushed_entries = self.total_pushed_entries + 1 + return true +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/utils/batch-processor.lua b/CloudronPackages/APISIX/apisix-source/apisix/utils/batch-processor.lua new file mode 100644 index 0000000..eabee4f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/utils/batch-processor.lua @@ -0,0 +1,235 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local setmetatable = setmetatable +local timer_at = ngx.timer.at +local ipairs = ipairs +local table = table +local now = ngx.now +local type = type +local batch_processor = {} +local batch_processor_mt = { + __index = batch_processor +} +local execute_func +local create_buffer_timer +local batch_metrics +local prometheus +if ngx.config.subsystem == "http" then + prometheus = require("apisix.plugins.prometheus.exporter") +end + + +local schema = { + type = "object", + properties = { + name = {type = "string", default = "log buffer"}, + max_retry_count = {type = "integer", minimum = 0, default= 0}, + retry_delay = {type = "integer", minimum = 0, default= 1}, + buffer_duration = {type = "integer", minimum = 1, default= 60}, + inactive_timeout = {type = "integer", minimum = 1, default= 5}, + batch_max_size = {type = "integer", minimum = 1, default= 1000}, + } +} +batch_processor.schema = schema + + +local function schedule_func_exec(self, delay, batch) + local hdl, err = timer_at(delay, execute_func, self, batch) + if not hdl then + core.log.error("failed to create process timer: ", err) + return + end +end + + +local function set_metrics(self, count) + -- add batch metric for every route + if batch_metrics and self.name and self.route_id and self.server_addr then + self.label = {self.name, self.route_id, self.server_addr} + batch_metrics:set(count, self.label) + end +end + + +local function slice_batch(batch, n) + local slice = {} + local idx = 1 + for i = n or 1, #batch do + slice[idx] = batch[i] + idx = idx + 1 + end + return slice +end + + +function execute_func(premature, self, batch) + if premature then + return + end + + -- In case of "err" and a valid "first_fail" batch processor considers, all first_fail-1 + -- entries have been successfully consumed and hence reschedule the job for entries with + -- index first_fail to #entries based on the current retry policy. + local ok, err, first_fail = self.func(batch.entries, self.batch_max_size) + if not ok then + if first_fail then + core.log.error("Batch Processor[", self.name, "] failed to process entries [", + #batch.entries + 1 - first_fail, "/", #batch.entries ,"]: ", err) + batch.entries = slice_batch(batch.entries, first_fail) + self.processed_entries = self.processed_entries + first_fail - 1 + else + core.log.error("Batch Processor[", self.name, + "] failed to process entries: ", err) + end + + batch.retry_count = batch.retry_count + 1 + if batch.retry_count <= self.max_retry_count and #batch.entries > 0 then + schedule_func_exec(self, self.retry_delay, + batch) + else + self.processed_entries = self.processed_entries + #batch.entries + core.log.error("Batch Processor[", self.name,"] exceeded ", + "the max_retry_count[", batch.retry_count, + "] dropping the entries") + end + return + end + self.processed_entries = self.processed_entries + #batch.entries + core.log.debug("Batch Processor[", self.name, + "] successfully processed the entries") +end + + +local function flush_buffer(premature, self) + if premature then + return + end + + if now() - self.last_entry_t >= self.inactive_timeout or + now() - self.first_entry_t >= self.buffer_duration + then + core.log.debug("Batch Processor[", self.name ,"] buffer ", + "duration exceeded, activating buffer flush") + self:process_buffer() + self.is_timer_running = false + return + end + + -- buffer duration did not exceed or the buffer is active, + -- extending the timer + core.log.debug("Batch Processor[", self.name ,"] extending buffer timer") + create_buffer_timer(self) +end + + +function create_buffer_timer(self) + local hdl, err = timer_at(self.inactive_timeout, flush_buffer, self) + if not hdl then + core.log.error("failed to create buffer timer: ", err) + return + end + self.is_timer_running = true +end + + +function batch_processor:new(func, config) + local ok, err = core.schema.check(schema, config) + if not ok then + return nil, err + end + + if type(func) ~= "function" then + return nil, "Invalid argument, arg #1 must be a function" + end + + local processor = { + func = func, + buffer_duration = config.buffer_duration, + inactive_timeout = config.inactive_timeout, + max_retry_count = config.max_retry_count, + batch_max_size = config.batch_max_size, + retry_delay = config.retry_delay, + name = config.name, + batch_to_process = {}, + entry_buffer = {entries = {}, retry_count = 0}, + is_timer_running = false, + first_entry_t = 0, + last_entry_t = 0, + route_id = config.route_id, + server_addr = config.server_addr, + processed_entries = 0 + } + + return setmetatable(processor, batch_processor_mt) +end + +function batch_processor:push(entry) + -- if the batch size is one then immediately send for processing + if self.batch_max_size == 1 then + local batch = {entries = {entry}, retry_count = 0} + schedule_func_exec(self, 0, batch) + return + end + + if prometheus and prometheus.get_prometheus() and not batch_metrics and self.name + and self.route_id and self.server_addr then + batch_metrics = prometheus.get_prometheus():gauge("batch_process_entries", + "batch process remaining entries", + {"name", "route_id", "server_addr"}) + end + + local entries = self.entry_buffer.entries + table.insert(entries, entry) + set_metrics(self, #entries) + + if #entries == 1 then + self.first_entry_t = now() + end + self.last_entry_t = now() + + if self.batch_max_size <= #entries then + core.log.debug("Batch Processor[", self.name , + "] batch max size has exceeded") + self:process_buffer() + end + + if not self.is_timer_running then + create_buffer_timer(self) + end +end + + +function batch_processor:process_buffer() + -- If entries are present in the buffer move the entries to processing + if #self.entry_buffer.entries > 0 then + core.log.debug("transferring buffer entries to processing pipe line, ", + "buffercount[", #self.entry_buffer.entries ,"]") + self.batch_to_process[#self.batch_to_process + 1] = self.entry_buffer + self.entry_buffer = {entries = {}, retry_count = 0} + set_metrics(self, 0) + end + + for _, batch in ipairs(self.batch_to_process) do + schedule_func_exec(self, 0, batch) + end + + self.batch_to_process = {} +end + + +return batch_processor diff --git a/CloudronPackages/APISIX/apisix-source/apisix/utils/content-decode.lua b/CloudronPackages/APISIX/apisix-source/apisix/utils/content-decode.lua new file mode 100644 index 0000000..c22c965 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/utils/content-decode.lua @@ -0,0 +1,112 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local pcall = pcall +local zlib = require("ffi-zlib") +local str_buffer = require("string.buffer") +local is_br_libs_loaded, brotli = pcall(require, "brotli") +local content_decode_funcs = {} +local _M = {} + + +local function inflate_gzip(data) + local inputs = str_buffer.new():set(data) + local outputs = str_buffer.new() + + local read_inputs = function(size) + local data = inputs:get(size) + if data == "" then + return nil + end + return data + end + + local write_outputs = function(data) + return outputs:put(data) + end + + local ok, err = zlib.inflateGzip(read_inputs, write_outputs) + if not ok then + return nil, "inflate gzip err: " .. err + end + + return outputs:get() +end +content_decode_funcs.gzip = inflate_gzip + + +local function brotli_stream_decode(read_inputs, write_outputs) + -- read 64k data per times + local read_size = 64 * 1024 + local decompressor = brotli.decompressor:new() + + local chunk, ok, res + repeat + chunk = read_inputs(read_size) + if chunk then + ok, res = pcall(function() + return decompressor:decompress(chunk) + end) + else + ok, res = pcall(function() + return decompressor:finish() + end) + end + if not ok then + return false, res + end + write_outputs(res) + until not chunk + + return true, nil +end + + +local function brotli_decode(data) + local inputs = str_buffer.new():set(data) + local outputs = str_buffer.new() + + local read_inputs = function(size) + local data = inputs:get(size) + if data == "" then + return nil + end + return data + end + + local write_outputs = function(data) + return outputs:put(data) + end + + local ok, err = brotli_stream_decode(read_inputs, write_outputs) + if not ok then + return nil, "brotli decode err: " .. err + end + + return outputs:get() +end + +if is_br_libs_loaded then + content_decode_funcs.br = brotli_decode +end + + +function _M.dispatch_decoder(response_encoding) + return content_decode_funcs[response_encoding] +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/utils/google-cloud-oauth.lua b/CloudronPackages/APISIX/apisix-source/apisix/utils/google-cloud-oauth.lua new file mode 100644 index 0000000..6cb3528 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/utils/google-cloud-oauth.lua @@ -0,0 +1,130 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local core = require("apisix.core") +local type = type +local setmetatable = setmetatable + +local ngx_update_time = ngx.update_time +local ngx_time = ngx.time +local ngx_encode_args = ngx.encode_args + +local http = require("resty.http") +local jwt = require("resty.jwt") + + +local function get_timestamp() + ngx_update_time() + return ngx_time() +end + + +local _M = {} + + +function _M.generate_access_token(self) + if not self.access_token or get_timestamp() > self.access_token_expire_time - 60 then + self:refresh_access_token() + end + return self.access_token +end + + +function _M.refresh_access_token(self) + local http_new = http.new() + local res, err = http_new:request_uri(self.token_uri, { + ssl_verify = self.ssl_verify, + method = "POST", + body = ngx_encode_args({ + grant_type = "urn:ietf:params:oauth:grant-type:jwt-bearer", + assertion = self:generate_jwt_token() + }), + headers = { + ["Content-Type"] = "application/x-www-form-urlencoded", + }, + }) + + if not res then + core.log.error("failed to refresh google oauth access token, ", err) + return + end + + if res.status ~= 200 then + core.log.error("failed to refresh google oauth access token: ", res.body) + return + end + + res, err = core.json.decode(res.body) + if not res then + core.log.error("failed to parse google oauth response data: ", err) + return + end + + self.access_token = res.access_token + self.access_token_type = res.token_type + self.access_token_expire_time = get_timestamp() + res.expires_in +end + + +function _M.generate_jwt_token(self) + local payload = core.json.encode({ + iss = self.client_email, + aud = self.token_uri, + scope = self.scope, + iat = get_timestamp(), + exp = get_timestamp() + (60 * 60) + }) + + local jwt_token = jwt:sign(self.private_key, { + header = { alg = "RS256", typ = "JWT" }, + payload = payload, + }) + + return jwt_token +end + + +function _M.new(config, ssl_verify) + local oauth = { + client_email = config.client_email, + private_key = config.private_key, + project_id = config.project_id, + token_uri = config.token_uri or "https://oauth2.googleapis.com/token", + auth_uri = config.auth_uri or "https://accounts.google.com/o/oauth2/auth", + entries_uri = config.entries_uri, + access_token = nil, + access_token_type = nil, + access_token_expire_time = 0, + } + + oauth.ssl_verify = ssl_verify + + if config.scope then + if type(config.scope) == "string" then + oauth.scope = config.scope + end + + if type(config.scope) == "table" then + oauth.scope = core.table.concat(config.scope, " ") + end + end + + return setmetatable(oauth, { __index = _M }) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/utils/log-util.lua b/CloudronPackages/APISIX/apisix-source/apisix/utils/log-util.lua new file mode 100644 index 0000000..c9cda1d --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/utils/log-util.lua @@ -0,0 +1,403 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local plugin = require("apisix.plugin") +local expr = require("resty.expr.v1") +local content_decode = require("apisix.utils.content-decode") +local ngx = ngx +local pairs = pairs +local ngx_now = ngx.now +local ngx_header = ngx.header +local os_date = os.date +local str_byte = string.byte +local str_sub = string.sub +local math_floor = math.floor +local ngx_update_time = ngx.update_time +local req_get_body_data = ngx.req.get_body_data +local is_http = ngx.config.subsystem == "http" +local req_get_body_file = ngx.req.get_body_file +local MAX_REQ_BODY = 524288 -- 512 KiB +local MAX_RESP_BODY = 524288 -- 512 KiB +local io = io + +local lru_log_format = core.lrucache.new({ + ttl = 300, count = 512 +}) + +local _M = {} + + +local function get_request_body(max_bytes) + local req_body = req_get_body_data() + if req_body then + if max_bytes and #req_body >= max_bytes then + req_body = str_sub(req_body, 1, max_bytes) + end + return req_body + end + + local file_name = req_get_body_file() + if not file_name then + return nil + end + + core.log.info("attempt to read body from file: ", file_name) + + local f, err = io.open(file_name, 'r') + if not f then + return nil, "fail to open file " .. err + end + + req_body = f:read(max_bytes) + f:close() + + return req_body +end + + +local function gen_log_format(format) + local log_format = {} + for k, var_name in pairs(format) do + if var_name:byte(1, 1) == str_byte("$") then + log_format[k] = {true, var_name:sub(2)} + else + log_format[k] = {false, var_name} + end + end + core.log.info("log_format: ", core.json.delay_encode(log_format)) + return log_format +end + + +local function get_custom_format_log(ctx, format, max_req_body_bytes) + local log_format = lru_log_format(format or "", nil, gen_log_format, format) + local entry = core.table.new(0, core.table.nkeys(log_format)) + for k, var_attr in pairs(log_format) do + if var_attr[1] then + local key = var_attr[2] + if key == "request_body" then + local max_req_body_bytes = max_req_body_bytes or MAX_REQ_BODY + local req_body, err = get_request_body(max_req_body_bytes) + if err then + core.log.error("fail to get request body: ", err) + else + entry[k] = req_body + end + else + entry[k] = ctx.var[var_attr[2]] + end + else + entry[k] = var_attr[2] + end + end + + local matched_route = ctx.matched_route and ctx.matched_route.value + if matched_route then + entry.service_id = matched_route.service_id + entry.route_id = matched_route.id + end + return entry +end +-- export the log getter so we can mock in tests +_M.get_custom_format_log = get_custom_format_log + + +-- for test +function _M.inject_get_custom_format_log(f) + get_custom_format_log = f + _M.get_custom_format_log = f +end + + +local function latency_details_in_ms(ctx) + local latency = (ngx_now() - ngx.req.start_time()) * 1000 + local upstream_latency, apisix_latency = nil, latency + + if ctx.var.upstream_response_time then + upstream_latency = ctx.var.upstream_response_time * 1000 + apisix_latency = apisix_latency - upstream_latency + + -- The latency might be negative, as Nginx uses different time measurements in + -- different metrics. + -- See https://github.com/apache/apisix/issues/5146#issuecomment-928919399 + if apisix_latency < 0 then + apisix_latency = 0 + end + end + + return latency, upstream_latency, apisix_latency +end +_M.latency_details_in_ms = latency_details_in_ms + + +local function get_full_log(ngx, conf) + local ctx = ngx.ctx.api_ctx + local var = ctx.var + local service_id + local route_id + local url = var.scheme .. "://" .. var.host .. ":" .. var.server_port + .. var.request_uri + local matched_route = ctx.matched_route and ctx.matched_route.value + + if matched_route then + service_id = matched_route.service_id or "" + route_id = matched_route.id + else + service_id = var.host + end + + local consumer + if ctx.consumer then + consumer = { + username = ctx.consumer.username + } + end + + local latency, upstream_latency, apisix_latency = latency_details_in_ms(ctx) + + local log = { + request = { + url = url, + uri = var.request_uri, + method = ngx.req.get_method(), + headers = ngx.req.get_headers(), + querystring = ngx.req.get_uri_args(), + size = var.request_length + }, + response = { + status = ngx.status, + headers = ngx.resp.get_headers(), + size = var.bytes_sent + }, + server = { + hostname = core.utils.gethostname(), + version = core.version.VERSION + }, + upstream = var.upstream_addr, + service_id = service_id, + route_id = route_id, + consumer = consumer, + client_ip = core.request.get_remote_client_ip(ngx.ctx.api_ctx), + start_time = ngx.req.start_time() * 1000, + latency = latency, + upstream_latency = upstream_latency, + apisix_latency = apisix_latency + } + + if ctx.resp_body then + log.response.body = ctx.resp_body + end + + if conf.include_req_body then + + local log_request_body = true + + if conf.include_req_body_expr then + + if not conf.request_expr then + local request_expr, err = expr.new(conf.include_req_body_expr) + if not request_expr then + core.log.error('generate request expr err ' .. err) + return log + end + conf.request_expr = request_expr + end + + local result = conf.request_expr:eval(ctx.var) + + if not result then + log_request_body = false + end + end + + if log_request_body then + local max_req_body_bytes = conf.max_req_body_bytes or MAX_REQ_BODY + local body, err = get_request_body(max_req_body_bytes) + if err then + core.log.error("fail to get request body: ", err) + return + end + log.request.body = body + end + end + + return log +end +_M.get_full_log = get_full_log + + +-- for test +function _M.inject_get_full_log(f) + get_full_log = f + _M.get_full_log = f +end + + +local function is_match(match, ctx) + local match_result + for _, m in pairs(match) do + local expr, _ = expr.new(m) + match_result = expr:eval(ctx.var) + if match_result then + break + end + end + + return match_result +end + + +function _M.get_log_entry(plugin_name, conf, ctx) + -- If the "match" configuration is set and the matching conditions are not met, + -- then do not log the message. + if conf.match and not is_match(conf.match, ctx) then + return + end + + local metadata = plugin.plugin_metadata(plugin_name) + core.log.info("metadata: ", core.json.delay_encode(metadata)) + + local entry + local customized = false + + local has_meta_log_format = metadata and metadata.value.log_format + and core.table.nkeys(metadata.value.log_format) > 0 + + if conf.log_format or has_meta_log_format then + customized = true + entry = get_custom_format_log(ctx, conf.log_format or metadata.value.log_format, + conf.max_req_body_bytes) + else + if is_http then + entry = get_full_log(ngx, conf) + else + -- get_full_log doesn't work in stream + core.log.error(plugin_name, "'s log_format is not set") + end + end + + return entry, customized +end + + +function _M.get_req_original(ctx, conf) + local data = { + ctx.var.request, "\r\n" + } + for k, v in pairs(ngx.req.get_headers()) do + core.table.insert_tail(data, k, ": ", v, "\r\n") + end + core.table.insert(data, "\r\n") + + if conf.include_req_body then + local max_req_body_bytes = conf.max_req_body_bytes or MAX_REQ_BODY + local req_body = get_request_body(max_req_body_bytes) + core.table.insert(data, req_body) + end + + return core.table.concat(data, "") +end + + +function _M.check_log_schema(conf) + if conf.include_req_body_expr then + local ok, err = expr.new(conf.include_req_body_expr) + if not ok then + return nil, "failed to validate the 'include_req_body_expr' expression: " .. err + end + end + if conf.include_resp_body_expr then + local ok, err = expr.new(conf.include_resp_body_expr) + if not ok then + return nil, "failed to validate the 'include_resp_body_expr' expression: " .. err + end + end + return true, nil +end + + +function _M.collect_body(conf, ctx) + if conf.include_resp_body then + local log_response_body = true + + if conf.include_resp_body_expr then + if not conf.response_expr then + local response_expr, err = expr.new(conf.include_resp_body_expr) + if not response_expr then + core.log.error('generate response expr err ' .. err) + return + end + conf.response_expr = response_expr + end + + if ctx.res_expr_eval_result == nil then + ctx.res_expr_eval_result = conf.response_expr:eval(ctx.var) + end + + if not ctx.res_expr_eval_result then + log_response_body = false + end + end + + if log_response_body then + local max_resp_body_bytes = conf.max_resp_body_bytes or MAX_RESP_BODY + + if ctx._resp_body_bytes and ctx._resp_body_bytes >= max_resp_body_bytes then + return + end + local final_body = core.response.hold_body_chunk(ctx, true, max_resp_body_bytes) + if not final_body then + return + end + + local response_encoding = ngx_header["Content-Encoding"] + if not response_encoding then + ctx.resp_body = final_body + return + end + + local decoder = content_decode.dispatch_decoder(response_encoding) + if not decoder then + core.log.warn("unsupported compression encoding type: ", + response_encoding) + ctx.resp_body = final_body + return + end + + local decoded_body, err = decoder(final_body) + if err ~= nil then + core.log.warn("try decode compressed data err: ", err) + ctx.resp_body = final_body + return + end + + ctx.resp_body = decoded_body + end + end +end + + +function _M.get_rfc3339_zulu_timestamp(timestamp) + ngx_update_time() + local now = timestamp or ngx_now() + local second = math_floor(now) + local millisecond = math_floor((now - second) * 1000) + return os_date("!%Y-%m-%dT%T.", second) .. core.string.format("%03dZ", millisecond) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/utils/redis-schema.lua b/CloudronPackages/APISIX/apisix-source/apisix/utils/redis-schema.lua new file mode 100644 index 0000000..c9fdec4 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/utils/redis-schema.lua @@ -0,0 +1,81 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +local policy_to_additional_properties = { + redis = { + properties = { + redis_host = { + type = "string", minLength = 2 + }, + redis_port = { + type = "integer", minimum = 1, default = 6379, + }, + redis_username = { + type = "string", minLength = 1, + }, + redis_password = { + type = "string", minLength = 0, + }, + redis_database = { + type = "integer", minimum = 0, default = 0, + }, + redis_timeout = { + type = "integer", minimum = 1, default = 1000, + }, + redis_ssl = { + type = "boolean", default = false, + }, + redis_ssl_verify = { + type = "boolean", default = false, + }, + }, + required = {"redis_host"}, + }, + ["redis-cluster"] = { + properties = { + redis_cluster_nodes = { + type = "array", + minItems = 1, + items = { + type = "string", minLength = 2, maxLength = 100 + }, + }, + redis_password = { + type = "string", minLength = 0, + }, + redis_timeout = { + type = "integer", minimum = 1, default = 1000, + }, + redis_cluster_name = { + type = "string", + }, + redis_cluster_ssl = { + type = "boolean", default = false, + }, + redis_cluster_ssl_verify = { + type = "boolean", default = false, + }, + }, + required = {"redis_cluster_nodes", "redis_cluster_name"}, + }, +} + +local _M = { + schema = policy_to_additional_properties +} + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/utils/redis.lua b/CloudronPackages/APISIX/apisix-source/apisix/utils/redis.lua new file mode 100644 index 0000000..423ad6d --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/utils/redis.lua @@ -0,0 +1,74 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local redis_new = require("resty.redis").new +local core = require("apisix.core") + + +local _M = {version = 0.1} + +local function redis_cli(conf) + local red = redis_new() + local timeout = conf.redis_timeout or 1000 -- default 1sec + + red:set_timeouts(timeout, timeout, timeout) + + local sock_opts = { + ssl = conf.redis_ssl, + ssl_verify = conf.redis_ssl_verify + } + + local ok, err = red:connect(conf.redis_host, conf.redis_port or 6379, sock_opts) + if not ok then + core.log.error(" redis connect error, error: ", err) + return false, err + end + + local count + count, err = red:get_reused_times() + if 0 == count then + if conf.redis_password and conf.redis_password ~= '' then + local ok, err + if conf.redis_username then + ok, err = red:auth(conf.redis_username, conf.redis_password) + else + ok, err = red:auth(conf.redis_password) + end + if not ok then + return nil, err + end + end + + -- select db + if conf.redis_database ~= 0 then + local ok, err = red:select(conf.redis_database) + if not ok then + return false, "failed to change redis db, err: " .. err + end + end + elseif err then + -- core.log.info(" err: ", err) + return nil, err + end + return red, nil +end + + +function _M.new(conf) + return redis_cli(conf) +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/utils/rediscluster.lua b/CloudronPackages/APISIX/apisix-source/apisix/utils/rediscluster.lua new file mode 100644 index 0000000..e3bda4a --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/utils/rediscluster.lua @@ -0,0 +1,60 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local rediscluster = require("resty.rediscluster") +local core = require("apisix.core") +local ipairs = ipairs + +local _M = {version = 0.1} + +local function new_redis_cluster(conf, dict_name) + local config = { + name = conf.redis_cluster_name, + serv_list = {}, + read_timeout = conf.redis_timeout, + auth = conf.redis_password, + dict_name = dict_name, + connect_opts = { + ssl = conf.redis_cluster_ssl, + ssl_verify = conf.redis_cluster_ssl_verify, + } + } + + for i, conf_item in ipairs(conf.redis_cluster_nodes) do + local host, port, err = core.utils.parse_addr(conf_item) + if err then + return nil, "failed to parse address: " .. conf_item + .. " err: " .. err + end + + config.serv_list[i] = {ip = host, port = port} + end + + local red_cli, err = rediscluster:new(config) + if not red_cli then + return nil, "failed to new redis cluster: " .. err + end + + return red_cli +end + + +function _M.new(conf, dict_name) + return new_redis_cluster(conf, dict_name) +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/utils/rfc5424.lua b/CloudronPackages/APISIX/apisix-source/apisix/utils/rfc5424.lua new file mode 100644 index 0000000..e046194 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/utils/rfc5424.lua @@ -0,0 +1,114 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local LOG_EMERG = 0 -- system is unusable +local LOG_ALERT = 1 -- action must be taken immediately +local LOG_CRIT = 2 -- critical conditions +local LOG_ERR = 3 -- error conditions +local LOG_WARNING = 4 -- warning conditions +local LOG_NOTICE = 5 -- normal but significant condition +local LOG_INFO = 6 -- informational +local LOG_DEBUG = 7 -- debug-level messages + +local LOG_KERN = 0 -- kernel messages +local LOG_USER = 1 -- random user-level messages +local LOG_MAIL = 2 -- mail system +local LOG_DAEMON = 3 -- system daemons +local LOG_AUTH = 4 -- security/authorization messages +local LOG_SYSLOG = 5 -- messages generated internally by syslogd +local LOG_LPR = 6 -- line printer subsystem +local LOG_NEWS = 7 -- network news subsystem +local LOG_UUCP = 8 -- UUCP subsystem +local LOG_CRON = 9 -- clock daemon +local LOG_AUTHPRIV = 10 -- security/authorization messages (private) +local LOG_FTP = 11 -- FTP daemon +local LOG_LOCAL0 = 16 -- reserved for local use +local LOG_LOCAL1 = 17 -- reserved for local use +local LOG_LOCAL2 = 18 -- reserved for local use +local LOG_LOCAL3 = 19 -- reserved for local use +local LOG_LOCAL4 = 20 -- reserved for local use +local LOG_LOCAL5 = 21 -- reserved for local use +local LOG_LOCAL6 = 22 -- reserved for local use +local LOG_LOCAL7 = 23 -- reserved for local use + +local Facility = { + KERN = LOG_KERN, + USER = LOG_USER, + MAIL = LOG_MAIL, + DAEMON = LOG_DAEMON, + AUTH = LOG_AUTH, + SYSLOG = LOG_SYSLOG, + LPR = LOG_LPR, + NEWS = LOG_NEWS, + UUCP = LOG_UUCP, + CRON = LOG_CRON, + AUTHPRIV = LOG_AUTHPRIV, + FTP = LOG_FTP, + LOCAL0 = LOG_LOCAL0, + LOCAL1 = LOG_LOCAL1, + LOCAL2 = LOG_LOCAL2, + LOCAL3 = LOG_LOCAL3, + LOCAL4 = LOG_LOCAL4, + LOCAL5 = LOG_LOCAL5, + LOCAL6 = LOG_LOCAL6, + LOCAL7 = LOG_LOCAL7, +} + +local Severity = { + EMEGR = LOG_EMERG, + ALERT = LOG_ALERT, + CRIT = LOG_CRIT, + ERR = LOG_ERR, + WARNING = LOG_WARNING, + NOTICE = LOG_NOTICE, + INFO = LOG_INFO, + DEBUG = LOG_DEBUG, +} + +local log_util = require("apisix.utils.log-util") +local ipairs = ipairs +local str_format = string.format + +local _M = { version = 0.1 } + + +function _M.encode(facility, severity, hostname, appname, pid, msg, structured_data) + local pri = (Facility[facility] * 8 + Severity[severity]) + local t = log_util.get_rfc3339_zulu_timestamp() + if not hostname then + hostname = "-" + end + + if not appname then + appname = "-" + end + + local structured_data_str = "-" + + if structured_data then + structured_data_str = "[logservice" + for _, sd_param in ipairs(structured_data) do + structured_data_str = structured_data_str .. " " .. sd_param.name + .. "=\"" .. sd_param.value .. "\"" + end + structured_data_str = structured_data_str .. "]" + end + + return str_format("<%d>1 %s %s %s %d - %s %s\n", pri, t, hostname, + appname, pid, structured_data_str, msg) +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/utils/router.lua b/CloudronPackages/APISIX/apisix-source/apisix/utils/router.lua new file mode 100644 index 0000000..8b6b604 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/utils/router.lua @@ -0,0 +1,34 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local resty_router = require("resty.radixtree") + + +local _M = {} + +do + local router_opts = { + no_param_match = true + } + +function _M.new(routes) + return resty_router.new(routes, router_opts) +end + +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/utils/upstream.lua b/CloudronPackages/APISIX/apisix-source/apisix/utils/upstream.lua new file mode 100644 index 0000000..3c0b9a3 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/utils/upstream.lua @@ -0,0 +1,133 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local ipmatcher = require("resty.ipmatcher") +local ngx_now = ngx.now +local ipairs = ipairs +local type = type +local tostring = tostring + + +local _M = {} + + +local function sort_by_key_host(a, b) + return a.host < b.host +end + + +local function compare_upstream_node(up_conf, new_t) + if up_conf == nil then + return false + end + + -- fast path + local old_t = up_conf.nodes + if old_t == new_t then + return true + end + + if type(old_t) ~= "table" then + return false + end + + -- slow path + core.log.debug("compare upstream nodes by value, ", + "old: ", tostring(old_t) , " ", core.json.delay_encode(old_t, true), + "new: ", tostring(new_t) , " ", core.json.delay_encode(new_t, true)) + + if up_conf.original_nodes then + -- if original_nodes is set, it means that the upstream nodes + -- are changed by `fill_node_info`, so we need to compare the new nodes with the + -- original nodes. + old_t = up_conf.original_nodes + end + + if #new_t ~= #old_t then + return false + end + + core.table.sort(old_t, sort_by_key_host) + core.table.sort(new_t, sort_by_key_host) + + for i = 1, #new_t do + local new_node = new_t[i] + local old_node = old_t[i] + for _, name in ipairs({"host", "port", "weight", "priority", "metadata"}) do + if new_node[name] ~= old_node[name] then + return false + end + end + end + + return true +end +_M.compare_upstream_node = compare_upstream_node + + +local function parse_domain_for_nodes(nodes) + local new_nodes = core.table.new(#nodes, 0) + for _, node in ipairs(nodes) do + local host = node.host + if not ipmatcher.parse_ipv4(host) and + not ipmatcher.parse_ipv6(host) then + local ip, err = core.resolver.parse_domain(host) + if ip then + local new_node = core.table.clone(node) + new_node.host = ip + new_node.domain = host + core.table.insert(new_nodes, new_node) + end + + if err then + core.log.error("dns resolver domain: ", host, " error: ", err) + end + else + core.table.insert(new_nodes, node) + end + end + return new_nodes +end +_M.parse_domain_for_nodes = parse_domain_for_nodes + + +function _M.parse_domain_in_up(up) + local nodes = up.value.nodes + local new_nodes, err = parse_domain_for_nodes(nodes) + if not new_nodes then + return nil, err + end + + local ok = compare_upstream_node(up.dns_value, new_nodes) + if ok then + return up + end + + if not up.orig_modifiedIndex then + up.orig_modifiedIndex = up.modifiedIndex + end + up.modifiedIndex = up.orig_modifiedIndex .. "#" .. ngx_now() + + up.dns_value = core.table.clone(up.value) + up.dns_value.nodes = new_nodes + core.log.info("resolve upstream which contain domain: ", + core.json.delay_encode(up, true)) + return up +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/apisix/wasm.lua b/CloudronPackages/APISIX/apisix-source/apisix/wasm.lua new file mode 100644 index 0000000..6a93728 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/apisix/wasm.lua @@ -0,0 +1,203 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local type = type +local support_wasm, wasm = pcall(require, "resty.proxy-wasm") +local ngx_var = ngx.var + + +local schema = { + type = "object", + properties = { + conf = { + oneOf = { + { type = "object", minProperties = 1}, + { type = "string", minLength = 1}, + } + }, + }, + required = {"conf"} +} +local _M = {} + + +local function check_schema(conf) + return core.schema.check(schema, conf) +end + + +local function get_plugin_ctx_key(ctx) + return ctx.conf_type .. "#" .. ctx.conf_id +end + +local function fetch_plugin_ctx(conf, ctx, plugin) + if not conf.plugin_ctxs then + conf.plugin_ctxs = {} + end + + local ctxs = conf.plugin_ctxs + local key = get_plugin_ctx_key(ctx) + local plugin_ctx = ctxs[key] + local err + if not plugin_ctx then + if type(conf.conf) == "table" then + plugin_ctx, err = wasm.on_configure(plugin, core.json.encode(conf.conf)) + elseif type(conf.conf) == "string" then + plugin_ctx, err = wasm.on_configure(plugin, conf.conf) + else + return nil, "invalid conf type" + end + if not plugin_ctx then + return nil, err + end + + ctxs[key] = plugin_ctx + end + + return plugin_ctx +end + + +local function http_request_wrapper(self, conf, ctx) + local name = self.name + local plugin_ctx, err = fetch_plugin_ctx(conf, ctx, self.plugin) + if not plugin_ctx then + core.log.error(name, ": failed to fetch wasm plugin ctx: ", err) + return 503 + end + + local ok, err = wasm.on_http_request_headers(plugin_ctx) + if not ok then + core.log.error(name, ": failed to run wasm plugin: ", err) + return 503 + end + + -- $wasm_process_req_body is predefined in ngx_tpl.lua + local handle_body = ngx_var.wasm_process_req_body + if handle_body ~= '' then + -- reset the flag so we can use it for the next Wasm plugin + -- use ngx.var to bypass the cache + ngx_var.wasm_process_req_body = '' + + local body, err = core.request.get_body() + if err ~= nil then + core.log.error(name, ": failed to get request body: ", err) + return 503 + end + + local ok, err = wasm.on_http_request_body(plugin_ctx, body, true) + if not ok then + core.log.error(name, ": failed to run wasm plugin: ", err) + return 503 + end + end +end + + +local function header_filter_wrapper(self, conf, ctx) + local name = self.name + local plugin_ctx, err = fetch_plugin_ctx(conf, ctx, self.plugin) + if not plugin_ctx then + core.log.error(name, ": failed to fetch wasm plugin ctx: ", err) + return 503 + end + + local ok, err = wasm.on_http_response_headers(plugin_ctx) + if not ok then + core.log.error(name, ": failed to run wasm plugin: ", err) + return 503 + end + + -- $wasm_process_resp_body is predefined in ngx_tpl.lua + local handle_body = ngx_var.wasm_process_resp_body + if handle_body ~= '' then + -- reset the flag so we can use it for the next Wasm plugin + -- use ngx.var to bypass the cache + ngx_var.wasm_process_resp_body = "" + ctx["wasm_" .. name .. "_process_resp_body"] = true + end +end + + +local function body_filter_wrapper(self, conf, ctx) + local name = self.name + + local enabled = ctx["wasm_" .. name .. "_process_resp_body"] + if not enabled then + return + end + + local plugin_ctx, err = fetch_plugin_ctx(conf, ctx, self.plugin) + if not plugin_ctx then + core.log.error(name, ": failed to fetch wasm plugin ctx: ", err) + return + end + + local ok, err = wasm.on_http_response_body(plugin_ctx) + if not ok then + core.log.error(name, ": failed to run wasm plugin: ", err) + return + end +end + + +function _M.require(attrs) + if not support_wasm then + return nil, "need to build APISIX-Runtime to support wasm" + end + + local name = attrs.name + local priority = attrs.priority + local plugin, err = wasm.load(name, attrs.file) + if not plugin then + return nil, err + end + + local mod = { + version = 0.1, + name = name, + priority = priority, + schema = schema, + check_schema = check_schema, + plugin = plugin, + type = "wasm", + } + + if attrs.http_request_phase == "rewrite" then + mod.rewrite = function (conf, ctx) + return http_request_wrapper(mod, conf, ctx) + end + else + mod.access = function (conf, ctx) + return http_request_wrapper(mod, conf, ctx) + end + end + + mod.header_filter = function (conf, ctx) + return header_filter_wrapper(mod, conf, ctx) + end + + mod.body_filter = function (conf, ctx) + return body_filter_wrapper(mod, conf, ctx) + end + + -- the returned values need to be the same as the Lua's 'require' + return true, mod +end + + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/autodocs/config.ld b/CloudronPackages/APISIX/apisix-source/autodocs/config.ld new file mode 100644 index 0000000..d5d3cca --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/autodocs/config.ld @@ -0,0 +1,11 @@ +project='Apache APISIX' +title='Plugin Develop Docs' +description='Functions in APISIX core' +format='markdown' +backtick_references = false +no_lua_ref = true +all = false +no_space_before_args = true +ext = "md" +template = true -- use the ldoc.ltp as markdown template +template_escape = ">" diff --git a/CloudronPackages/APISIX/apisix-source/autodocs/generate.sh b/CloudronPackages/APISIX/apisix-source/autodocs/generate.sh new file mode 100755 index 0000000..4b918cd --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/autodocs/generate.sh @@ -0,0 +1,51 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +set -ex + +# workdir is the root of the apisix, use command: autodocs/generate.sh build to generate the docs, +# and the output will be in the workdir/autodocs/output/ directory. +build() { + # install dependencies + apt-get -y update --fix-missing + apt-get -y install lua5.1 liblua5.1-0-dev + curl https://raw.githubusercontent.com/apache/apisix/master/utils/linux-install-luarocks.sh -sL | bash - + luarocks install ldoc + + # generate docs + rm -rf autodocs/output || true + mkdir autodocs/output || true + cd autodocs/output + find ../../apisix/core -name "*.lua" -type f -exec ldoc -c ../config.ld {} \; + + # generate the markdown files' name + rm ../md_files_name.txt || true + output="./" + mds=$(ls $output) + for md in $mds + do + echo $md >> ../md_files_name.txt + done +} + +case_opt=$1 +case $case_opt in + (build) + build + ;; +esac diff --git a/CloudronPackages/APISIX/apisix-source/autodocs/ldoc.ltp b/CloudronPackages/APISIX/apisix-source/autodocs/ldoc.ltp new file mode 100644 index 0000000..72d77c2 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/autodocs/ldoc.ltp @@ -0,0 +1,98 @@ +> local iter = ldoc.modules.iter +> local display_name = ldoc.display_name +> local function trim_newline(s) +> return (s:gsub("\n", "\r")) +> end +--- +title: APISIX Plugin Development Docs +--- + + + +## $(ldoc.title) + +### $(module.name) + +$(module.summary) $(module.description) +> +> for kind, items in module.kinds() do +> for item in items() do + +#### $(trim_newline(display_name(item))) +> if item.type == "function" then +> if item.summary and item.summary ~= '' then + +**Summary**: $(item.summary) +> end -- if item.summary +> if item.description and item.description ~= '' then + +**Description**: + +```text$(trim_newline(item.description)) +``` +> end -- if item.description +> end -- if item.type +> if item.params and #item.params > 0 then +> local subnames = module.kinds:type_of(item).subnames +> if subnames then + +**$(subnames)** + +> end -- if subnames +> -- print the parameters +> for par in iter(item.params) do +> local param = item:subparam(par) +> for p in iter(param) do +> local name = item:display_name_of(p) +> local tp = item:type_of_param(p) +* **$(name)**($(tp)):$(item.params.map[p]) +> if tp ~= '' then +> end -- if tp +> +> end -- for p +> end -- for par +> end -- if item.params and #item.params > 0 +> +> -- print the returns +> if item.retgroups then +> local groups = item.retgroups + +**Returns:** + +> for i, group in ldoc.ipairs(groups) do +> for r in group:iter() do +> local type, ctypes = item:return_type(r); +* `$(type)`: $(r.text) +> end -- for r in group:iter() +> end -- for i,group +> end -- if item.retgroups + +> if item.usage then +**Usage** + +> for usage in item.usage:iter() do +```lua +$(trim_newline(usage)) +``` +> end -- for usage in item.usage:iter() +> local usage = item.usage +> end -- if item.usage +> end -- end for item in items() +> end -- for kinds, items diff --git a/CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/cert/apisix.crt b/CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/cert/apisix.crt new file mode 100644 index 0000000..503f277 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/cert/apisix.crt @@ -0,0 +1,27 @@ +-----BEGIN CERTIFICATE----- +MIIEojCCAwqgAwIBAgIJAK253pMhgCkxMA0GCSqGSIb3DQEBCwUAMFYxCzAJBgNV +BAYTAkNOMRIwEAYDVQQIDAlHdWFuZ0RvbmcxDzANBgNVBAcMBlpodUhhaTEPMA0G +A1UECgwGaXJlc3R5MREwDwYDVQQDDAh0ZXN0LmNvbTAgFw0xOTA2MjQyMjE4MDVa +GA8yMTE5MDUzMTIyMTgwNVowVjELMAkGA1UEBhMCQ04xEjAQBgNVBAgMCUd1YW5n +RG9uZzEPMA0GA1UEBwwGWmh1SGFpMQ8wDQYDVQQKDAZpcmVzdHkxETAPBgNVBAMM +CHRlc3QuY29tMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAyCM0rqJe +cvgnCfOw4fATotPwk5Ba0gC2YvIrO+gSbQkyxXF5jhZB3W6BkWUWR4oNFLLSqcVb +VDPitz/Mt46Mo8amuS6zTbQetGnBARzPLtmVhJfoeLj0efMiOepOSZflj9Ob4yKR +2bGdEFOdHPjm+4ggXU9jMKeLqdVvxll/JiVFBW5smPtW1Oc/BV5terhscJdOgmRr +abf9xiIis9/qVYfyGn52u9452V0owUuwP7nZ01jt6iMWEGeQU6mwPENgvj1olji2 +WjdG2UwpUVp3jp3l7j1ekQ6mI0F7yI+LeHzfUwiyVt1TmtMWn1ztk6FfLRqwJWR/ +Evm95vnfS3Le4S2ky3XAgn2UnCMyej3wDN6qHR1onpRVeXhrBajbCRDRBMwaNw/1 +/3Uvza8QKK10PzQR6OcQ0xo9psMkd9j9ts/dTuo2fzaqpIfyUbPST4GdqNG9NyIh +/B9g26/0EWcjyO7mYVkaycrtLMaXm1u9jyRmcQQI1cGrGwyXbrieNp63AgMBAAGj +cTBvMB0GA1UdDgQWBBSZtSvV8mBwl0bpkvFtgyiOUUcbszAfBgNVHSMEGDAWgBSZ +tSvV8mBwl0bpkvFtgyiOUUcbszAMBgNVHRMEBTADAQH/MB8GA1UdEQQYMBaCCHRl +c3QuY29tggoqLnRlc3QuY29tMA0GCSqGSIb3DQEBCwUAA4IBgQAHGEul/x7ViVgC +tC8CbXEslYEkj1XVr2Y4hXZXAXKd3W7V3TC8rqWWBbr6L/tsSVFt126V5WyRmOaY +1A5pju8VhnkhYxYfZALQxJN2tZPFVeME9iGJ9BE1wPtpMgITX8Rt9kbNlENfAgOl +PYzrUZN1YUQjX+X8t8/1VkSmyZysr6ngJ46/M8F16gfYXc9zFj846Z9VST0zCKob +rJs3GtHOkS9zGGldqKKCj+Awl0jvTstI4qtS1ED92tcnJh5j/SSXCAB5FgnpKZWy +hme45nBQj86rJ8FhN+/aQ9H9/2Ib6Q4wbpaIvf4lQdLUEcWAeZGW6Rk0JURwEog1 +7/mMgkapDglgeFx9f/XztSTrkHTaX4Obr+nYrZ2V4KOB4llZnK5GeNjDrOOJDk2y +IJFgBOZJWyS93dQfuKEj42hA79MuX64lMSCVQSjX+ipR289GQZqFrIhiJxLyA+Ve +U/OOcSRr39Kuis/JJ+DkgHYa/PWHZhnJQBxcqXXk1bJGw9BNbhM= +-----END CERTIFICATE----- diff --git a/CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/cert/apisix.key b/CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/cert/apisix.key new file mode 100644 index 0000000..7105067 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/cert/apisix.key @@ -0,0 +1,39 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIG5AIBAAKCAYEAyCM0rqJecvgnCfOw4fATotPwk5Ba0gC2YvIrO+gSbQkyxXF5 +jhZB3W6BkWUWR4oNFLLSqcVbVDPitz/Mt46Mo8amuS6zTbQetGnBARzPLtmVhJfo +eLj0efMiOepOSZflj9Ob4yKR2bGdEFOdHPjm+4ggXU9jMKeLqdVvxll/JiVFBW5s +mPtW1Oc/BV5terhscJdOgmRrabf9xiIis9/qVYfyGn52u9452V0owUuwP7nZ01jt +6iMWEGeQU6mwPENgvj1olji2WjdG2UwpUVp3jp3l7j1ekQ6mI0F7yI+LeHzfUwiy +Vt1TmtMWn1ztk6FfLRqwJWR/Evm95vnfS3Le4S2ky3XAgn2UnCMyej3wDN6qHR1o +npRVeXhrBajbCRDRBMwaNw/1/3Uvza8QKK10PzQR6OcQ0xo9psMkd9j9ts/dTuo2 +fzaqpIfyUbPST4GdqNG9NyIh/B9g26/0EWcjyO7mYVkaycrtLMaXm1u9jyRmcQQI +1cGrGwyXbrieNp63AgMBAAECggGBAJM8g0duoHmIYoAJzbmKe4ew0C5fZtFUQNmu +O2xJITUiLT3ga4LCkRYsdBnY+nkK8PCnViAb10KtIT+bKipoLsNWI9Xcq4Cg4G3t +11XQMgPPgxYXA6m8t+73ldhxrcKqgvI6xVZmWlKDPn+CY/Wqj5PA476B5wEmYbNC +GIcd1FLl3E9Qm4g4b/sVXOHARF6iSvTR+6ol4nfWKlaXSlx2gNkHuG8RVpyDsp9c +z9zUqAdZ3QyFQhKcWWEcL6u9DLBpB/gUjyB3qWhDMe7jcCBZR1ALyRyEjmDwZzv2 +jlv8qlLFfn9R29UI0pbuL1eRAz97scFOFme1s9oSU9a12YHfEd2wJOM9bqiKju8y +DZzePhEYuTZ8qxwiPJGy7XvRYTGHAs8+iDlG4vVpA0qD++1FTpv06cg/fOdnwshE +OJlEC0ozMvnM2rZ2oYejdG3aAnUHmSNa5tkJwXnmj/EMw1TEXf+H6+xknAkw05nh +zsxXrbuFUe7VRfgB5ElMA/V4NsScgQKBwQDmMRtnS32UZjw4A8DsHOKFzugfWzJ8 +Gc+3sTgs+4dNIAvo0sjibQ3xl01h0BB2Pr1KtkgBYB8LJW/FuYdCRS/KlXH7PHgX +84gYWImhNhcNOL3coO8NXvd6+m+a/Z7xghbQtaraui6cDWPiCNd/sdLMZQ/7LopM +RbM32nrgBKMOJpMok1Z6zsPzT83SjkcSxjVzgULNYEp03uf1PWmHuvjO1yELwX9/ +goACViF+jst12RUEiEQIYwr4y637GQBy+9cCgcEA3pN9W5OjSPDVsTcVERig8++O +BFURiUa7nXRHzKp2wT6jlMVcu8Pb2fjclxRyaMGYKZBRuXDlc/RNO3uTytGYNdC2 +IptU5N4M7iZHXj190xtDxRnYQWWo/PR6EcJj3f/tc3Itm1rX0JfuI3JzJQgDb9Z2 +s/9/ub8RRvmQV9LM/utgyOwNdf5dyVoPcTY2739X4ZzXNH+CybfNa+LWpiJIVEs2 +txXbgZrhmlaWzwA525nZ0UlKdfktdcXeqke9eBghAoHARVTHFy6CjV7ZhlmDEtqE +U58FBOS36O7xRDdpXwsHLnCXhbFu9du41mom0W4UdzjgVI9gUqG71+SXrKr7lTc3 +dMHcSbplxXkBJawND/Q1rzLG5JvIRHO1AGJLmRgIdl8jNgtxgV2QSkoyKlNVbM2H +Wy6ZSKM03lIj74+rcKuU3N87dX4jDuwV0sPXjzJxL7NpR/fHwgndgyPcI14y2cGz +zMC44EyQdTw+B/YfMnoZx83xaaMNMqV6GYNnTHi0TO2TAoHBAKmdrh9WkE2qsr59 +IoHHygh7Wzez+Ewr6hfgoEK4+QzlBlX+XV/9rxIaE0jS3Sk1txadk5oFDebimuSk +lQkv1pXUOqh+xSAwk5v88dBAfh2dnnSa8HFN3oz+ZfQYtnBcc4DR1y2X+fVNgr3i +nxruU2gsAIPFRnmvwKPc1YIH9A6kIzqaoNt1f9VM243D6fNzkO4uztWEApBkkJgR +4s/yOjp6ovS9JG1NMXWjXQPcwTq3sQVLnAHxZRJmOvx69UmK4QKBwFYXXjeXiU3d +bcrPfe6qNGjfzK+BkhWznuFUMbuxyZWDYQD5yb6ukUosrj7pmZv3BxKcKCvmONU+ +CHgIXB+hG+R9S2mCcH1qBQoP/RSm+TUzS/Bl2UeuhnFZh2jSZQy3OwryUi6nhF0u +LDzMI/6aO1ggsI23Ri0Y9ZtqVKczTkxzdQKR9xvoNBUufjimRlS80sJCEB3Qm20S +wzarryret/7GFW1/3cz+hTj9/d45i25zArr3Pocfpur5mfz3fJO8jg== +-----END RSA PRIVATE KEY----- diff --git a/CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/cert/openssl.conf b/CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/cert/openssl.conf new file mode 100644 index 0000000..c99afc4 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/cert/openssl.conf @@ -0,0 +1,40 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +[req] +distinguished_name = req_distinguished_name +x509_extensions = v3_req +prompt = no + +[req_distinguished_name] +C = CN +ST = GuangDong +L = ZhuHai +O = iresty +CN = test.com + +[v3_req] +subjectKeyIdentifier = hash +authorityKeyIdentifier = keyid,issuer +basicConstraints = CA:TRUE +subjectAltName = @alt_names + +[alt_names] +DNS.1 = test.com +DNS.2 = *.test.com + +## openssl genrsa -out apisix.key 3072 -nodes +## openssl req -new -x509 -key apisix.key -sha256 -config openssl.conf -out apisix.crt -days 36500 diff --git a/CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/nginx.conf b/CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/nginx.conf new file mode 100644 index 0000000..f35131c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/conf/nginx.conf @@ -0,0 +1,131 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +master_process on; + +worker_processes 1; + +error_log logs/error.log warn; +pid logs/nginx.pid; + +worker_rlimit_nofile 20480; + +events { + worker_connections 10620; +} + +worker_shutdown_timeout 3; + +http { + lua_package_path "$prefix/lua/?.lua;;"; + + log_format main '$remote_addr - $remote_user [$time_local] $http_host "$request" $status $body_bytes_sent $request_time "$http_referer" "$http_user_agent" $upstream_addr $upstream_status $upstream_response_time'; + access_log logs/access.log main buffer=16384 flush=5; + + init_by_lua_block { + require "resty.core" + apisix = require("apisix") + apisix.http_init() + } + + init_worker_by_lua_block { + apisix.http_init_worker() + } + + upstream apisix_backend { + server 0.0.0.1; + balancer_by_lua_block { + apisix.http_balancer_phase() + } + + keepalive 320; + } + + server { + listen 9443 ssl; + ssl_certificate cert/apisix.crt; + ssl_certificate_key cert/apisix.key; + ssl_session_cache shared:SSL:1m; + + listen 9080; + + server_tokens off; + more_set_headers 'Server: APISIX web server'; + + location = /apisix/nginx_status { + allow 127.0.0.0/24; + access_log off; + stub_status; + } + + location /apisix/admin { + allow 127.0.0.0/24; + content_by_lua_block { + apisix.http_admin() + } + } + + ssl_certificate_by_lua_block { + apisix.http_ssl_phase() + } + + location / { + set $upstream_scheme 'http'; + set $upstream_host $http_host; + set $upstream_upgrade ''; + set $upstream_connection ''; + set $upstream_uri ''; + + access_by_lua_block { + apisix.http_access_phase() + } + + proxy_http_version 1.1; + proxy_set_header Host $upstream_host; + proxy_set_header Upgrade $upstream_upgrade; + proxy_set_header Connection $upstream_connection; + proxy_set_header X-Real-IP $remote_addr; + proxy_pass_header Server; + proxy_pass_header Date; + + ### the following x-forwarded-* headers is to send to upstream server + + set $var_x_forwarded_proto $scheme; + set $var_x_forwarded_host $host; + set $var_x_forwarded_port $server_port; + + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $var_x_forwarded_proto; + proxy_set_header X-Forwarded-Host $var_x_forwarded_host; + proxy_set_header X-Forwarded-Port $var_x_forwarded_port; + + # proxy pass + proxy_pass $upstream_scheme://apisix_backend$upstream_uri; + + header_filter_by_lua_block { + apisix.http_header_filter_phase() + } + + body_filter_by_lua_block { + apisix.http_body_filter_phase() + } + + log_by_lua_block { + apisix.http_log_phase() + } + } + } +} diff --git a/CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/lua/apisix.lua b/CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/lua/apisix.lua new file mode 100644 index 0000000..ea5bf15 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/benchmark/fake-apisix/lua/apisix.lua @@ -0,0 +1,74 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local balancer = require "ngx.balancer" +local _M = {version = 0.1} + +function _M.http_init() +end + +function _M.http_init_worker() +end + +local function fake_fetch() + ngx.ctx.ip = "127.0.0.1" + ngx.ctx.port = 1980 +end + +function _M.http_access_phase() + local uri = ngx.var.uri + local host = ngx.var.host + local method = ngx.req.get_method() + local remote_addr = ngx.var.remote_addr + fake_fetch(uri, host, method, remote_addr) +end + +function _M.http_header_filter_phase() + if ngx.ctx then + -- do something + end +end + +function _M.http_body_filter_phase() + if ngx.ctx then + -- do something + end +end + +function _M.http_log_phase() + if ngx.ctx then + -- do something + end +end + +function _M.http_admin() +end + +function _M.http_ssl_phase() + if ngx.ctx then + -- do something + end +end + +function _M.http_balancer_phase() + local ok, err = balancer.set_current_peer(ngx.ctx.ip, ngx.ctx.port) + if not ok then + ngx.log(ngx.ERR, "failed to set the current peer: ", err) + return ngx.exit(500) + end +end + +return _M diff --git a/CloudronPackages/APISIX/apisix-source/benchmark/run.sh b/CloudronPackages/APISIX/apisix-source/benchmark/run.sh new file mode 100755 index 0000000..44d0efa --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/benchmark/run.sh @@ -0,0 +1,155 @@ +#! /bin/bash -x + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +if [ -n "$1" ]; then + worker_cnt=$1 +else + worker_cnt=1 +fi + +if [ -n "$2" ]; then + upstream_cnt=$2 +else + upstream_cnt=1 +fi + +mkdir -p benchmark/server/logs +mkdir -p benchmark/fake-apisix/logs + + +make init + +fake_apisix_cmd="openresty -p $PWD/benchmark/fake-apisix -c $PWD/benchmark/fake-apisix/conf/nginx.conf" +server_cmd="openresty -p $PWD/benchmark/server -c $PWD/benchmark/server/conf/nginx.conf" + +trap 'onCtrlC' INT +function onCtrlC () { + sudo killall wrk + sudo killall openresty + sudo ${fake_apisix_cmd} -s stop || exit 1 + sudo ${server_cmd} -s stop || exit 1 +} + +for up_cnt in $(seq 1 $upstream_cnt); +do + port=$((1979+$up_cnt)) + nginx_listen=$nginx_listen"listen $port;" + upstream_nodes=$upstream_nodes"\"127.0.0.1:$port\":1" + + if [ $up_cnt -lt $upstream_cnt ]; then + upstream_nodes=$upstream_nodes"," + fi +done + +sed -i "s/\- proxy-mirror/#\- proxy-mirror/g" conf/config-default.yaml +sed -i "s/\- proxy-cache/#\- proxy-cache/g" conf/config-default.yaml +sed -i "s/listen .*;/$nginx_listen/g" benchmark/server/conf/nginx.conf + +echo " +nginx_config: + worker_processes: ${worker_cnt} +" > conf/config.yaml + +sudo ${server_cmd} || exit 1 + +make run + +sleep 3 + +############################################# +echo -e "\n\napisix: $worker_cnt worker + $upstream_cnt upstream + no plugin" +admin_key=$(yq '.deployment.admin.admin_key[0].key' conf/config.yaml | sed 's/"//g') +curl http://127.0.0.1:9180/apisix/admin/routes/1 -H "X-API-KEY: $admin_key" -X PUT -d ' +{ + "uri": "/hello", + "plugins": { + }, + "upstream": { + "type": "roundrobin", + "nodes": { + '$upstream_nodes' + } + } +}' + +sleep 1 + +wrk -d 5 -c 16 http://127.0.0.1:9080/hello + +sleep 1 + +wrk -d 5 -c 16 http://127.0.0.1:9080/hello + +sleep 1 + +############################################# +echo -e "\n\napisix: $worker_cnt worker + $upstream_cnt upstream + 2 plugins (limit-count + prometheus)" +admin_key=$(yq '.deployment.admin.admin_key[0].key' conf/config.yaml | sed 's/"//g') +curl http://127.0.0.1:9180/apisix/admin/routes/1 -H "X-API-KEY: $admin_key" -X PUT -d ' +{ + "uri": "/hello", + "plugins": { + "limit-count": { + "count": 2000000000000, + "time_window": 60, + "rejected_code": 503, + "key": "remote_addr" + }, + "prometheus": {} + }, + "upstream": { + "type": "roundrobin", + "nodes": { + '$upstream_nodes' + } + } +}' + +sleep 3 + +wrk -d 5 -c 16 http://127.0.0.1:9080/hello + +sleep 1 + +wrk -d 5 -c 16 http://127.0.0.1:9080/hello + +sleep 1 + +make stop + +############################################# +echo -e "\n\nfake empty apisix server: $worker_cnt worker" + +sleep 1 + +sed -i "s/worker_processes [0-9]*/worker_processes $worker_cnt/g" benchmark/fake-apisix/conf/nginx.conf + +sudo ${fake_apisix_cmd} || exit 1 + +sleep 1 + +wrk -d 5 -c 16 http://127.0.0.1:9080/hello + +sleep 1 + +wrk -d 5 -c 16 http://127.0.0.1:9080/hello + +sudo ${fake_apisix_cmd} -s stop || exit 1 + +sudo ${server_cmd} -s stop || exit 1 diff --git a/CloudronPackages/APISIX/apisix-source/benchmark/server/conf/nginx.conf b/CloudronPackages/APISIX/apisix-source/benchmark/server/conf/nginx.conf new file mode 100644 index 0000000..6328d89 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/benchmark/server/conf/nginx.conf @@ -0,0 +1,42 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +master_process on; + +worker_processes 2; + +error_log logs/error.log warn; +pid logs/nginx.pid; + +worker_rlimit_nofile 20480; + +events { + accept_mutex off; + worker_connections 10620; +} + +worker_shutdown_timeout 3; + +http { + server { + listen 1980; + + access_log off; + location / { + echo_duplicate 1 "1234567890"; + } + } +} diff --git a/CloudronPackages/APISIX/apisix-source/bin/apisix b/CloudronPackages/APISIX/apisix-source/bin/apisix new file mode 100755 index 0000000..f4c75fb --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/bin/apisix @@ -0,0 +1,48 @@ +#!/bin/bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +if [ -s './apisix/cli/apisix.lua' ]; then + # install via source + APISIX_LUA=./apisix/cli/apisix.lua +elif [ -s '/usr/local/share/lua/5.1/apisix/cli/apisix.lua' ]; then + # install via luarock + APISIX_LUA=/usr/local/share/lua/5.1/apisix/cli/apisix.lua +else + # install via official rpm or docker + APISIX_LUA=/usr/local/apisix/apisix/cli/apisix.lua +fi + +# find the openresty +OR_BIN=$(command -v openresty || exit 1) +OR_EXEC=${OR_BIN:-'/usr/local/openresty-debug/bin/openresty'} +OR_VER=$(openresty -v 2>&1 | awk -F '/' '{print $2}' | awk -F '.' '{print $1 * 100 + $2}') +LUA_VERSION=$(lua -v 2>&1| grep -E -o "Lua [0-9]+.[0-9]+") + +if [[ -e $OR_EXEC && "$OR_VER" -ge 119 ]]; then + # OpenResty version is >= 1.19, use luajit by default + ROOT=$(${OR_EXEC} -V 2>&1 | grep prefix | grep -Eo 'prefix=(.*)/nginx\s+--' | grep -Eo '/.*/') + # find the luajit binary of openresty + LUAJIT_BIN="$ROOT"/luajit/bin/luajit + + # use the luajit of openresty + echo "$LUAJIT_BIN $APISIX_LUA $*" + exec $LUAJIT_BIN $APISIX_LUA $* +else + echo "ERROR: Please check the version of OpenResty and Lua, OpenResty 1.19+ + LuaJIT is required for Apache APISIX." +fi diff --git a/CloudronPackages/APISIX/apisix-source/ci/backup-docker-images.sh b/CloudronPackages/APISIX/apisix-source/ci/backup-docker-images.sh new file mode 100644 index 0000000..bc87987 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/backup-docker-images.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +test_type=$1 + +echo "started backing up, time: $(date)" +mkdir docker-images-backup +sum=$(cat ci/pod/docker-compose.$test_type.yml | grep image | wc -l) +special_tag=$(cat ci/pod/docker-compose.$test_type.yml | grep image: | awk '{print $2}' | awk 'ORS=NR%"'$sum'"?" ":"\n"{print}') +echo special: $special_tag +openwhisk_tag="openwhisk/action-nodejs-v14:nightly openwhisk/standalone:nightly" +echo +echo special_tag: $special_tag +echo openwhisk_tag: $openwhisk_tag +echo +all_tags="${special_tag} ${openwhisk_tag}" +to_pull="" + +for tag in $all_tags +do + if ! ( docker inspect $tag &> /dev/null ) + then + to_pull="${to_pull} ${tag}" + fi +done + +echo to pull : $to_pull + +if [[ -n $to_pull ]] +then + echo "$to_pull" | xargs -P10 -n1 docker pull +fi + +docker save $special_tag $openwhisk_tag -o docker-images-backup/apisix-images.tar +echo "docker save done, time: $(date)" diff --git a/CloudronPackages/APISIX/apisix-source/ci/check_changelog_prs.ts b/CloudronPackages/APISIX/apisix-source/ci/check_changelog_prs.ts new file mode 100755 index 0000000..e2cad27 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/check_changelog_prs.ts @@ -0,0 +1,238 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { execSync } from 'child_process'; +import { readFileSync } from 'fs'; +import { join } from 'path'; + +// Types +interface Version { + tag: string; + ref: string; +} + +interface PR { + number: number; + title: string; + commit: string; +} + +// Configuration +const IGNORE_TYPES = [ + 'docs', + 'chore', + 'test', + 'ci' +]; + +const IGNORE_PRS = [ + // 3.9.0 + 10655, 10857, 10858, 10887, 10959, 11029, 11041, 11053, 11055, 11061, 10976, 10984, 11025, + // 3.10.0 + 11105, 11128, 11169, 11171, 11280, 11333, 11081, 11202, 11469, + // 3.11.0 + 11463, 11570, + // 3.12.0 + 11769, 11816, 11881, 11905, 11924, 11926, 11973, 11991, 11992, 11829, + // 3.13.0 + 9945, 11420, 11765, 12036, 12048, 12057, 12076, 12122, 12123, 12168, 12199, 12218, 12225, 12272, 12277, 12300, 12306, 12329, 12353, 12364, 12375, 12358 +]; + + +function getGitRef(version: string): string { + try { + execSync(`git rev-parse ${version}`, { stdio: 'ignore' }); + return version; + } catch { + return 'HEAD'; + } +} + +function extractVersionsFromChangelog(): Version[] { + const changelogPath = join(process.cwd(), '..', 'CHANGELOG.md'); + const content = readFileSync(changelogPath, 'utf-8'); + const versionRegex = /^## ([0-9]+\.[0-9]+\.[0-9]+)/gm; + const versions: Version[] = []; + let match; + + while ((match = versionRegex.exec(content)) !== null) { + const tag = match[1]; + versions.push({ + tag, + ref: getGitRef(tag) + }); + } + + return versions; +} + +function extractPRsFromChangelog(startTag: string, endTag: string): number[] { + const changelogPath = join(process.cwd(), '..', 'CHANGELOG.md'); + const content = readFileSync(changelogPath, 'utf-8'); + const lines = content.split('\n'); + let inRange = false; + const prs: number[] = []; + + for (const line of lines) { + if (line.startsWith(`## ${startTag}`)) { + inRange = true; + continue; + } + if (inRange && line.startsWith(`## ${endTag}`)) { + break; + } + if (inRange) { + const match = line.match(/#(\d+)/); + if (match) { + prs.push(parseInt(match[1], 10)); + } + } + } + + return prs.sort((a, b) => a - b); +} + +function shouldIgnoreCommitMessage(message: string): boolean { + // Extract the commit message part (remove the commit hash) + const messagePart = message.split(' ').slice(1).join(' '); + + // Check if the message starts with any of the ignored types + for (const type of IGNORE_TYPES) { + // Check simple format: "type: message" + if (messagePart.startsWith(`${type}:`)) { + return true; + } + // Check format with scope: "type(scope): message" + if (messagePart.startsWith(`${type}(`)) { + const closingBracketIndex = messagePart.indexOf('):'); + if (closingBracketIndex !== -1) { + return true; + } + } + } + return false; +} + +function extractPRsFromGitLog(oldRef: string, newRef: string): PR[] { + const log = execSync(`git log ${oldRef}..${newRef} --oneline`, { encoding: 'utf-8' }); + const prs: PR[] = []; + + for (const line of log.split('\n')) { + if (!line.trim()) continue; + + // Check if this commit should be ignored + if (shouldIgnoreCommitMessage(line)) continue; + + // Find PR number + const prMatch = line.match(/#(\d+)/); + if (prMatch) { + const prNumber = parseInt(prMatch[1], 10); + if (!IGNORE_PRS.includes(prNumber)) { + prs.push({ + number: prNumber, + title: line, + commit: line.split(' ')[0] + }); + } + } + } + + return prs.sort((a, b) => a.number - b.number); +} + +function findMissingPRs(changelogPRs: number[], gitPRs: PR[]): PR[] { + const changelogPRSet = new Set(changelogPRs); + return gitPRs.filter(pr => !changelogPRSet.has(pr.number)); +} + +function versionGreaterThan(v1: string, v2: string): boolean { + // Remove 'v' prefix if present + const cleanV1 = v1.replace(/^v/, ''); + const cleanV2 = v2.replace(/^v/, ''); + + // Split version strings into arrays of numbers + const v1Parts = cleanV1.split('.').map(Number); + const v2Parts = cleanV2.split('.').map(Number); + + // Compare each part + for (let i = 0; i < Math.max(v1Parts.length, v2Parts.length); i++) { + const v1Part = v1Parts[i] || 0; + const v2Part = v2Parts[i] || 0; + + if (v1Part > v2Part) return true; + if (v1Part < v2Part) return false; + } + + // If all parts are equal, return false + return false; +} + +// Main function +async function main() { + try { + const versions = extractVersionsFromChangelog(); + let hasErrors = false; + + for (let i = 0; i < versions.length - 1; i++) { + const newVersion = versions[i]; + const oldVersion = versions[i + 1]; + + // Skip if new version is less than or equal to 3.8.0 + if (!versionGreaterThan(newVersion.tag, '3.8.0')) { + continue; + } + + console.log(`\n=== Checking changes between ${newVersion.tag} (${newVersion.ref}) and ${oldVersion.tag} (${oldVersion.ref}) ===`); + + const changelogPRs = extractPRsFromChangelog(newVersion.tag, oldVersion.tag); + const gitPRs = extractPRsFromGitLog(oldVersion.ref, newVersion.ref); + const missingPRs = findMissingPRs(changelogPRs, gitPRs); + + console.log(`\n=== PR Comparison Results for ${newVersion.tag} ===`); + + if (missingPRs.length === 0) { + console.log(`\n✅ All PRs are included in CHANGELOG.md for version ${newVersion.tag}`); + } else { + console.log(`\n❌ Missing PRs in CHANGELOG.md for version ${newVersion.tag} (sorted):`); + missingPRs.forEach(pr => { + console.log(` #${pr.number}`); + }); + + console.log(`\nDetailed information about missing PRs for version ${newVersion.tag}:`); + missingPRs.forEach(pr => { + console.log(`\nPR #${pr.number}:`); + console.log(` - ${pr.title}`); + console.log(` - PR URL: https://github.com/apache/apisix/pull/${pr.number}`); + }); + + console.log('Note: If you confirm that a PR should not appear in the changelog, please add its number to the IGNORE_PRS array in this script.'); + hasErrors = true; + } + } + + if (hasErrors) { + process.exit(1); + } + } catch (error) { + console.error('Error:', error); + process.exit(1); + } +} + +(async () => { + await main(); +})(); diff --git a/CloudronPackages/APISIX/apisix-source/ci/common.sh b/CloudronPackages/APISIX/apisix-source/ci/common.sh new file mode 100644 index 0000000..3850460 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/common.sh @@ -0,0 +1,217 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +set -ex + +export_version_info() { + source ./.requirements +} + +export_or_prefix() { + export OPENRESTY_PREFIX="/usr/local/openresty" + + export PATH=$OPENRESTY_PREFIX/nginx/sbin:$OPENRESTY_PREFIX/luajit/bin:$OPENRESTY_PREFIX/bin:$PATH + export OPENSSL_PREFIX=$OPENRESTY_PREFIX/openssl3 + export OPENSSL_BIN=$OPENSSL_PREFIX/bin/openssl +} + +create_lua_deps() { + echo "Create lua deps" + + make deps + + # just for jwt-auth test + luarocks install lua-resty-openssl --tree deps + + # maybe reopen this feature later + # luarocks install luacov-coveralls --tree=deps --local > build.log 2>&1 || (cat build.log && exit 1) + # for github action cache + chmod -R a+r deps +} + +rerun_flaky_tests() { + if tail -1 "$1" | grep "Result: PASS"; then + exit 0 + fi + + if ! tail -1 "$1" | grep "Result: FAIL"; then + # CI failure not caused by failed test + exit 1 + fi + + local tests + local n_test + tests="$(awk '/^t\/.*.t\s+\(.+ Failed: .+\)/{ print $1 }' "$1")" + n_test="$(echo "$tests" | wc -l)" + if [ "$n_test" -gt 10 ]; then + # too many tests failed + exit 1 + fi + + echo "Rerun $(echo "$tests" | xargs)" + FLUSH_ETCD=1 prove --timer -I./test-nginx/lib -I./ $(echo "$tests" | xargs) +} + +install_curl () { + CURL_VERSION="8.13.0" + wget -q https://github.com/stunnel/static-curl/releases/download/${CURL_VERSION}/curl-linux-x86_64-glibc-${CURL_VERSION}.tar.xz + tar -xf curl-linux-x86_64-glibc-${CURL_VERSION}.tar.xz + sudo cp curl /usr/bin + curl -V +} + +install_apisix_runtime() { + export runtime_version=${APISIX_RUNTIME} + wget "https://raw.githubusercontent.com/api7/apisix-build-tools/apisix-runtime/${APISIX_RUNTIME}/build-apisix-runtime.sh" + chmod +x build-apisix-runtime.sh + ./build-apisix-runtime.sh latest +} + +install_grpcurl () { + # For more versions, visit https://github.com/fullstorydev/grpcurl/releases + GRPCURL_VERSION="1.8.5" + wget -q https://github.com/fullstorydev/grpcurl/releases/download/v${GRPCURL_VERSION}/grpcurl_${GRPCURL_VERSION}_linux_x86_64.tar.gz + tar -xvf grpcurl_${GRPCURL_VERSION}_linux_x86_64.tar.gz -C /usr/local/bin +} + +install_vault_cli () { + VAULT_VERSION="1.9.0" + wget -q https://releases.hashicorp.com/vault/${VAULT_VERSION}/vault_${VAULT_VERSION}_linux_amd64.zip + unzip vault_${VAULT_VERSION}_linux_amd64.zip && mv ./vault /usr/local/bin +} + +install_nodejs () { + curl -fsSL https://raw.githubusercontent.com/tj/n/master/bin/n | bash -s install --cleanup lts + corepack enable pnpm +} + +install_brotli () { + local BORTLI_VERSION="1.1.0" + wget -q https://github.com/google/brotli/archive/refs/tags/v${BORTLI_VERSION}.zip + unzip v${BORTLI_VERSION}.zip && cd ./brotli-${BORTLI_VERSION} && mkdir build && cd build + local CMAKE=$(command -v cmake3 > /dev/null 2>&1 && echo cmake3 || echo cmake) + ${CMAKE} -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local/brotli .. + sudo ${CMAKE} --build . --config Release --target install + if [ -d "/usr/local/brotli/lib64" ]; then + echo /usr/local/brotli/lib64 | sudo tee /etc/ld.so.conf.d/brotli.conf + else + echo /usr/local/brotli/lib | sudo tee /etc/ld.so.conf.d/brotli.conf + fi + sudo ldconfig + cd ../.. + rm -rf brotli-${BORTLI_VERSION} +} + +set_coredns() { + # test a domain name is configured as upstream + echo "127.0.0.1 test.com" | sudo tee -a /etc/hosts + echo "::1 ipv6.local" | sudo tee -a /etc/hosts + # test certificate verification + echo "127.0.0.1 admin.apisix.dev" | sudo tee -a /etc/hosts + cat /etc/hosts # check GitHub Action's configuration + + # override DNS configures + if [ -f "/etc/netplan/50-cloud-init.yaml" ]; then + sudo pip3 install yq + + tmp=$(mktemp) + yq -y '.network.ethernets.eth0."dhcp4-overrides"."use-dns"=false' /etc/netplan/50-cloud-init.yaml | \ + yq -y '.network.ethernets.eth0."dhcp4-overrides"."use-domains"=false' | \ + yq -y '.network.ethernets.eth0.nameservers.addresses[0]="8.8.8.8"' | \ + yq -y '.network.ethernets.eth0.nameservers.search[0]="apache.org"' > $tmp + mv $tmp /etc/netplan/50-cloud-init.yaml + cat /etc/netplan/50-cloud-init.yaml + sudo netplan apply + sleep 3 + + sudo mv /etc/resolv.conf /etc/resolv.conf.bak + sudo ln -s /run/systemd/resolve/resolv.conf /etc/ + fi + cat /etc/resolv.conf + + mkdir -p build-cache + + if [ ! -f "build-cache/coredns_1_8_1" ]; then + wget -q https://github.com/coredns/coredns/releases/download/v1.8.1/coredns_1.8.1_linux_amd64.tgz + tar -xvf coredns_1.8.1_linux_amd64.tgz + mv coredns build-cache/ + + touch build-cache/coredns_1_8_1 + fi + + pushd t/coredns || exit 1 + ../../build-cache/coredns -dns.port=1053 & + popd || exit 1 + + touch build-cache/test_resolve.conf + echo "nameserver 127.0.0.1:1053" > build-cache/test_resolve.conf +} + +GRPC_SERVER_EXAMPLE_VER=20210819 + +linux_get_dependencies () { + apt update + apt install -y cpanminus build-essential libncurses5-dev libreadline-dev libssl-dev perl libpcre3 libpcre3-dev xz-utils + apt remove -y curl + apt-get install -y libyaml-dev + wget https://github.com/mikefarah/yq/releases/download/3.4.1/yq_linux_amd64 -O /usr/bin/yq && sudo chmod +x /usr/bin/yq + + # install curl with http3 support + install_curl +} + +function start_grpc_server_example() { + ./t/grpc_server_example/grpc_server_example \ + -grpc-address :10051 -grpcs-address :10052 -grpcs-mtls-address :10053 -grpc-http-address :10054 \ + -crt ./t/certs/apisix.crt -key ./t/certs/apisix.key -ca ./t/certs/mtls_ca.crt \ + > grpc_server_example.log 2>&1 & + + for (( i = 0; i <= 10; i++ )); do + sleep 0.5 + GRPC_PROC=`ps -ef | grep grpc_server_example | grep -v grep || echo "none"` + if [[ $GRPC_PROC == "none" || "$i" -eq 10 ]]; then + echo "failed to start grpc_server_example" + ss -antp | grep 1005 || echo "no proc listen port 1005x" + cat grpc_server_example.log + + exit 1 + fi + + ss -lntp | grep 10051 | grep grpc_server && break + done +} + + +function start_sse_server_example() { + # build sse_server_example + pushd t/sse_server_example + go build + ./sse_server_example 7737 2>&1 & + + for (( i = 0; i <= 10; i++ )); do + sleep 0.5 + SSE_PROC=`ps -ef | grep sse_server_example | grep -v grep || echo "none"` + if [[ $SSE_PROC == "none" || "$i" -eq 10 ]]; then + echo "failed to start sse_server_example" + ss -antp | grep 7737 || echo "no proc listen port 7737" + exit 1 + else + break + fi + done + popd +} diff --git a/CloudronPackages/APISIX/apisix-source/ci/free_disk_space.sh b/CloudronPackages/APISIX/apisix-source/ci/free_disk_space.sh new file mode 100644 index 0000000..462258e --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/free_disk_space.sh @@ -0,0 +1,48 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# GitHub Action CI runner comes with a limited disk space, due to several reasons +# it may become full. For example, caching docker images creates an archive of +# several GBs of size, this sometimes leads to disk usage becoming full. +# To keep CI functional, we delete large directories that we do not need. + +echo "==============================================================================" +echo "Freeing up disk space on CI system" +echo "==============================================================================" + +echo "Initial disk usage:" +df -h + +echo "Removing large directories and runtimes..." +sudo rm -rf /usr/local/lib/android /usr/share/dotnet /opt/ghc /usr/local/.ghcup /usr/share/swift + +echo "Removing large packages and performing clean-up..." +sudo apt-get remove -y '^aspnetcore-.*' '^dotnet-.*' '^llvm-.*' 'php.*' '^mongodb-.*' '^mysql-.*' \ +azure-cli google-chrome-stable firefox powershell mono-devel libgl1-mesa-dri google-cloud-sdk google-cloud-cli --fix-missing +sudo apt-get autoremove -y +sudo apt-get clean + +echo "Removing Docker images..." +sudo docker image prune --all --force + +echo "Removing and Swap storage..." +sudo swapoff -a +sudo rm -f /mnt/swapfile + +echo "Final disk usage:" +df -h diff --git a/CloudronPackages/APISIX/apisix-source/ci/init-common-test-service.sh b/CloudronPackages/APISIX/apisix-source/ci/init-common-test-service.sh new file mode 100755 index 0000000..602f01a --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/init-common-test-service.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# prepare vault kv engine +sleep 3s +docker exec -i vault sh -c "VAULT_TOKEN='root' VAULT_ADDR='http://0.0.0.0:8200' vault secrets enable -path=kv -version=1 kv" + +# prepare localstack +sleep 3s +docker exec -i localstack sh -c "awslocal secretsmanager create-secret --name apisix-key --description 'APISIX Secret' --secret-string '{\"jack\":\"value\"}'" +sleep 3s +docker exec -i localstack sh -c "awslocal secretsmanager create-secret --name apisix-mysql --description 'APISIX Secret' --secret-string 'secret'" diff --git a/CloudronPackages/APISIX/apisix-source/ci/init-last-test-service.sh b/CloudronPackages/APISIX/apisix-source/ci/init-last-test-service.sh new file mode 100755 index 0000000..6943490 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/init-last-test-service.sh @@ -0,0 +1,45 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +before() { + # generating SSL certificates for Kafka + sudo keytool -genkeypair -keyalg RSA -dname "CN=127.0.0.1" -alias 127.0.0.1 -keystore ./ci/pod/kafka/kafka-server/selfsigned.jks -validity 365 -keysize 2048 -storepass changeit +} + +after() { + docker exec -i apache-apisix-kafka-server1-1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 1 --topic test2 + docker exec -i apache-apisix-kafka-server1-1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 3 --topic test3 + docker exec -i apache-apisix-kafka-server2-1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server2:2181 --replication-factor 1 --partitions 1 --topic test4 + docker exec -i apache-apisix-kafka-server1-1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 1 --topic test-consumer + # create messages for test-consumer + for i in `seq 30` + do + docker exec -i apache-apisix-kafka-server1-1 bash -c "echo "testmsg$i" | /opt/bitnami/kafka/bin/kafka-console-producer.sh --bootstrap-server 127.0.0.1:9092 --topic test-consumer" + echo "Produces messages to the test-consumer topic, msg: testmsg$i" + done + echo "Kafka service initialization completed" +} + +case $1 in + 'after') + after + ;; + 'before') + before + ;; +esac diff --git a/CloudronPackages/APISIX/apisix-source/ci/init-plugin-test-service.sh b/CloudronPackages/APISIX/apisix-source/ci/init-plugin-test-service.sh new file mode 100755 index 0000000..2da891e --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/init-plugin-test-service.sh @@ -0,0 +1,73 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +after() { + docker exec -i apache-apisix-kafka-server1-1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 1 --topic test2 + docker exec -i apache-apisix-kafka-server1-1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server1:2181 --replication-factor 1 --partitions 3 --topic test3 + docker exec -i apache-apisix-kafka-server2-1 /opt/bitnami/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper-server2:2181 --replication-factor 1 --partitions 1 --topic test4 + + # prepare openwhisk env + docker pull openwhisk/action-nodejs-v14:1.20.0 + docker run --rm -d --name openwhisk -p 3233:3233 -p 3232:3232 -v /var/run/docker.sock:/var/run/docker.sock openwhisk/standalone:1.0.0 + docker exec -i openwhisk waitready + docker exec -i openwhisk bash -c "wsk package create pkg" + docker exec -i openwhisk bash -c "wsk action update /guest/pkg/testpkg <(echo 'function main(args){return {\"hello\": \"world\"}}') --kind nodejs:14" + docker exec -i openwhisk bash -c "wsk action update test <(echo 'function main(args){return {\"hello\": \"test\"}}') --kind nodejs:14" + docker exec -i openwhisk bash -c "wsk action update test-params <(echo 'function main(args){return {\"hello\": args.name || \"test\"}}') --kind nodejs:14" + docker exec -i openwhisk bash -c "wsk action update test-statuscode <(echo 'function main(args){return {\"statusCode\": 407}}') --kind nodejs:14" + docker exec -i openwhisk bash -c "wsk action update test-headers <(echo 'function main(args){return {\"headers\": {\"test\":\"header\"}}}') --kind nodejs:14" + docker exec -i openwhisk bash -c "wsk action update test-body <(echo 'function main(args){return {\"body\": {\"test\":\"body\"}}}') --kind nodejs:14" + + + docker exec -i rmqnamesrv rm /home/rocketmq/rocketmq-4.6.0/conf/tools.yml + docker exec -i rmqnamesrv /home/rocketmq/rocketmq-4.6.0/bin/mqadmin updateTopic -n rocketmq_namesrv:9876 -t test -c DefaultCluster + docker exec -i rmqnamesrv /home/rocketmq/rocketmq-4.6.0/bin/mqadmin updateTopic -n rocketmq_namesrv:9876 -t test2 -c DefaultCluster + docker exec -i rmqnamesrv /home/rocketmq/rocketmq-4.6.0/bin/mqadmin updateTopic -n rocketmq_namesrv:9876 -t test3 -c DefaultCluster + docker exec -i rmqnamesrv /home/rocketmq/rocketmq-4.6.0/bin/mqadmin updateTopic -n rocketmq_namesrv:9876 -t test4 -c DefaultCluster + + # wait for keycloak ready + bash -c 'while true; do curl -s localhost:8080 &>/dev/null; ret=$?; [[ $ret -eq 0 ]] && break; sleep 3; done' + + # install jq + wget https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 -O jq + chmod +x jq + docker cp jq apisix_keycloak:/usr/bin/ + + # configure keycloak + docker exec apisix_keycloak bash /tmp/kcadm_configure_cas.sh + docker exec apisix_keycloak bash /tmp/kcadm_configure_university.sh + docker exec apisix_keycloak bash /tmp/kcadm_configure_basic.sh + + # configure clickhouse + echo 'CREATE TABLE default.test (`host` String, `client_ip` String, `route_id` String, `service_id` String, `@timestamp` String, PRIMARY KEY(`@timestamp`)) ENGINE = MergeTree()' | curl 'http://localhost:8123/' --data-binary @- + echo 'CREATE TABLE default.test (`host` String, `client_ip` String, `route_id` String, `service_id` String, `@timestamp` String, PRIMARY KEY(`@timestamp`)) ENGINE = MergeTree()' | curl 'http://localhost:8124/' --data-binary @- +} + +before() { + # download keycloak cas provider + sudo wget -q https://github.com/jacekkow/keycloak-protocol-cas/releases/download/18.0.2/keycloak-protocol-cas-18.0.2.jar -O /opt/keycloak-protocol-cas-18.0.2.jar +} + +case $1 in + 'after') + after + ;; + 'before') + before + ;; +esac diff --git a/CloudronPackages/APISIX/apisix-source/ci/kubernetes-ci.sh b/CloudronPackages/APISIX/apisix-source/ci/kubernetes-ci.sh new file mode 100755 index 0000000..c40b8c7 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/kubernetes-ci.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +. ./ci/common.sh + +run_case() { + export_or_prefix + export PERL5LIB=.:$PERL5LIB + prove -Itest-nginx/lib -I./ -r t/kubernetes | tee test-result + rerun_flaky_tests test-result +} + +case_opt=$1 +case $case_opt in + (run_case) + run_case + ;; +esac diff --git a/CloudronPackages/APISIX/apisix-source/ci/linux-install-etcd-client.sh b/CloudronPackages/APISIX/apisix-source/ci/linux-install-etcd-client.sh new file mode 100755 index 0000000..33e5b8a --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/linux-install-etcd-client.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +ETCD_ARCH="amd64" +ETCD_VERSION=${ETCD_VERSION:-'3.5.4'} +ARCH=${ARCH:-`(uname -m | tr '[:upper:]' '[:lower:]')`} + +if [[ $ARCH == "arm64" ]] || [[ $ARCH == "aarch64" ]]; then + ETCD_ARCH="arm64" +fi + +wget -q https://github.com/etcd-io/etcd/releases/download/v${ETCD_VERSION}/etcd-v${ETCD_VERSION}-linux-${ETCD_ARCH}.tar.gz +tar xf etcd-v${ETCD_VERSION}-linux-${ETCD_ARCH}.tar.gz +sudo cp etcd-v${ETCD_VERSION}-linux-${ETCD_ARCH}/etcdctl /usr/local/bin/ +rm -rf etcd-v${ETCD_VERSION}-linux-${ETCD_ARCH} diff --git a/CloudronPackages/APISIX/apisix-source/ci/linux-install-openresty.sh b/CloudronPackages/APISIX/apisix-source/ci/linux-install-openresty.sh new file mode 100755 index 0000000..465df32 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/linux-install-openresty.sh @@ -0,0 +1,62 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -euo pipefail + +source ./ci/common.sh + +export_version_info + +ARCH=${ARCH:-`(uname -m | tr '[:upper:]' '[:lower:]')`} +arch_path="" +if [[ $ARCH == "arm64" ]] || [[ $ARCH == "aarch64" ]]; then + arch_path="arm64/" +fi + +wget -qO - https://openresty.org/package/pubkey.gpg | sudo apt-key add - +wget -qO - http://repos.apiseven.com/pubkey.gpg | sudo apt-key add - +sudo apt-get -y update --fix-missing +sudo apt-get -y install software-properties-common +sudo add-apt-repository -y "deb https://openresty.org/package/${arch_path}ubuntu $(lsb_release -sc) main" +sudo add-apt-repository -y "deb http://repos.apiseven.com/packages/${arch_path}debian bullseye main" + +sudo apt-get update +sudo apt-get install -y openresty-pcre-dev openresty-zlib-dev build-essential gcc g++ cpanminus + +SSL_LIB_VERSION=${SSL_LIB_VERSION-openssl} +ENABLE_FIPS=${ENABLE_FIPS:-"false"} + +if [ "$OPENRESTY_VERSION" == "source" ]; then + if [ "$SSL_LIB_VERSION" == "tongsuo" ]; then + export openssl_prefix=/usr/local/tongsuo + export zlib_prefix=$OPENRESTY_PREFIX/zlib + export pcre_prefix=$OPENRESTY_PREFIX/pcre + + export cc_opt="-DNGX_LUA_ABORT_AT_PANIC -I${zlib_prefix}/include -I${pcre_prefix}/include -I${openssl_prefix}/include" + export ld_opt="-L${zlib_prefix}/lib -L${pcre_prefix}/lib -L${openssl_prefix}/lib64 -Wl,-rpath,${zlib_prefix}/lib:${pcre_prefix}/lib:${openssl_prefix}/lib64" + fi +fi + +install_apisix_runtime + +if [ ! "$ENABLE_FIPS" == "true" ]; then +curl -o /usr/local/openresty/openssl3/ssl/openssl.cnf \ + https://raw.githubusercontent.com/api7/apisix-build-tools/apisix-runtime/${APISIX_RUNTIME}/conf/openssl3/openssl.cnf +fi + +# patch lua-resty-events +sed -i 's/log(ERR, "event worker failed: ", perr)/log(ngx.WARN, "event worker failed: ", perr)/' /usr/local/openresty/lualib/resty/events/worker.lua diff --git a/CloudronPackages/APISIX/apisix-source/ci/linux_apisix_current_luarocks_in_customed_nginx_runner.sh b/CloudronPackages/APISIX/apisix-source/ci/linux_apisix_current_luarocks_in_customed_nginx_runner.sh new file mode 100755 index 0000000..3eaba07 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/linux_apisix_current_luarocks_in_customed_nginx_runner.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +export OPENRESTY_VERSION=source +. ./ci/linux_apisix_current_luarocks_runner.sh diff --git a/CloudronPackages/APISIX/apisix-source/ci/linux_apisix_current_luarocks_runner.sh b/CloudronPackages/APISIX/apisix-source/ci/linux_apisix_current_luarocks_runner.sh new file mode 100755 index 0000000..39b9df8 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/linux_apisix_current_luarocks_runner.sh @@ -0,0 +1,89 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +. ./ci/common.sh + +do_install() { + linux_get_dependencies + install_brotli + + export_or_prefix + + ./ci/linux-install-openresty.sh + ./utils/linux-install-luarocks.sh + ./ci/linux-install-etcd-client.sh +} + +script() { + export_or_prefix + openresty -V + + sudo rm -rf /usr/local/share/lua/5.1/apisix + + # install APISIX with local version + luarocks install apisix-master-0.rockspec --only-deps > build.log 2>&1 || (cat build.log && exit 1) + luarocks make apisix-master-0.rockspec > build.log 2>&1 || (cat build.log && exit 1) + # ensure all files under apisix is installed + diff -rq apisix /usr/local/share/lua/5.1/apisix + + mkdir cli_tmp && cd cli_tmp + + # show install file + luarocks show apisix + + sudo PATH=$PATH apisix help + sudo PATH=$PATH apisix init + sudo PATH=$PATH apisix start + sudo PATH=$PATH apisix stop + + grep '\[error\]' /usr/local/apisix/logs/error.log > /tmp/error.log | true + if [ -s /tmp/error.log ]; then + echo "=====found error log=====" + cat /usr/local/apisix/logs/error.log + exit 1 + fi + + cd .. + + # apisix cli test + set_coredns + + # install test dependencies + sudo pip install requests + + # dismiss "maximum number of open file descriptors too small" warning + ulimit -n 10240 + ulimit -n -S + ulimit -n -H + + for f in ./t/cli/test_*.sh; do + PATH="$PATH" "$f" + done +} + +case_opt=$1 +shift + +case ${case_opt} in +do_install) + do_install "$@" + ;; +script) + script "$@" + ;; +esac diff --git a/CloudronPackages/APISIX/apisix-source/ci/linux_openresty_common_runner.sh b/CloudronPackages/APISIX/apisix-source/ci/linux_openresty_common_runner.sh new file mode 100755 index 0000000..afaf948 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/linux_openresty_common_runner.sh @@ -0,0 +1,127 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +. ./ci/common.sh + +before_install() { + linux_get_dependencies + + sudo cpanm --notest Test::Nginx >build.log 2>&1 || (cat build.log && exit 1) +} + +do_install() { + export_or_prefix + + ./ci/linux-install-openresty.sh + + ./utils/linux-install-luarocks.sh + + ./ci/linux-install-etcd-client.sh + + create_lua_deps + + # sudo apt-get install tree -y + # tree deps + + # The latest version of test-nginx is not compatible with the current set of tests with ---http2 + # due to this commit: https://github.com/openresty/test-nginx/commit/0ccd106cbe6878318e5a591634af8f1707c411a6 + # This change pins test-nginx to a commit before this one. + git clone --depth 1 https://github.com/openresty/test-nginx.git test-nginx + cd test-nginx + git fetch --depth=1 origin ced30a31bafab6c68873efb17b6d80f39bcd95f5 + git checkout ced30a31bafab6c68873efb17b6d80f39bcd95f5 + cd .. + + make utils + + mkdir -p build-cache + # install and start grpc_server_example + cd t/grpc_server_example + + CGO_ENABLED=0 go build + cd ../../ + + # install grpcurl + install_grpcurl + + # install nodejs + install_nodejs + + # grpc-web server && client + cd t/plugin/grpc-web + ./setup.sh + # back to home directory + cd ../../../ + + # install mcp test suite + pushd t/plugin/mcp + pnpm install + popd + + # install common jest test suite + pushd t + pnpm install + popd + + # install vault cli capabilities + install_vault_cli + + # install brotli + install_brotli +} + +script() { + export_or_prefix + openresty -V + + make init + + set_coredns + + start_grpc_server_example + + start_sse_server_example + + # APISIX_ENABLE_LUACOV=1 PERL5LIB=.:$PERL5LIB prove -Itest-nginx/lib -r t + FLUSH_ETCD=1 TEST_EVENTS_MODULE=$TEST_EVENTS_MODULE prove --timer -Itest-nginx/lib -I./ -r $TEST_FILE_SUB_DIR | tee /tmp/test.result + rerun_flaky_tests /tmp/test.result +} + +after_success() { + # cat luacov.stats.out + # luacov-coveralls + echo "done" +} + +case_opt=$1 +shift + +case ${case_opt} in +before_install) + before_install "$@" + ;; +do_install) + do_install "$@" + ;; +script) + script "$@" + ;; +after_success) + after_success "$@" + ;; +esac diff --git a/CloudronPackages/APISIX/apisix-source/ci/linux_openresty_runner.sh b/CloudronPackages/APISIX/apisix-source/ci/linux_openresty_runner.sh new file mode 100755 index 0000000..2e39224 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/linux_openresty_runner.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +export OPENRESTY_VERSION=source +. ./ci/linux_openresty_common_runner.sh diff --git a/CloudronPackages/APISIX/apisix-source/ci/linux_openresty_tongsuo_runner.sh b/CloudronPackages/APISIX/apisix-source/ci/linux_openresty_tongsuo_runner.sh new file mode 100755 index 0000000..2afdcea --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/linux_openresty_tongsuo_runner.sh @@ -0,0 +1,53 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +export OPENRESTY_VERSION=source +export SSL_LIB_VERSION=tongsuo + + +before_install() { + if [ -n "$COMPILE_TONGSUO" ]; then + git clone https://github.com/api7/tongsuo --depth 1 + pushd tongsuo + # build binary + ./config enable-ntls -static + make -j2 + mv apps/openssl apps/static-openssl + ./config shared enable-ntls -g --prefix=/usr/local/tongsuo + make -j2 + popd + fi + + pushd tongsuo + sudo make install_sw + sudo cp apps/static-openssl /usr/local/tongsuo/bin/openssl + export PATH=/usr/local/tongsuo/bin:$PATH + openssl version + popd +} + + +case_opt=$1 + +case ${case_opt} in +before_install) + # shellcheck disable=SC2218 + before_install + ;; +esac + +. ./ci/linux_openresty_common_runner.sh diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.common.yml b/CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.common.yml new file mode 100644 index 0000000..67504cb --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.common.yml @@ -0,0 +1,113 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +version: "3.8" + +services: + ## Etcd + etcd_old: + image: bitnami/etcd:3.3.8 + restart: unless-stopped + env_file: + - ci/pod/etcd/env/common.env + environment: + ETCD_ADVERTISE_CLIENT_URLS: http://0.0.0.0:2379 + ports: + - "3379:2379" + - "3380:2380" + + etcd: + image: bitnami/etcd:3.5.4 + restart: unless-stopped + env_file: + - ci/pod/etcd/env/common.env + environment: + ETCD_ADVERTISE_CLIENT_URLS: http://0.0.0.0:2379 + ports: + - "2379:2379" + - "2380:2380" + + etcd_tls: + image: bitnami/etcd:3.5.4 + restart: unless-stopped + env_file: + - ci/pod/etcd/env/common.env + environment: + ETCD_ADVERTISE_CLIENT_URLS: https://0.0.0.0:12379 + ETCD_LISTEN_CLIENT_URLS: https://0.0.0.0:12379 + ETCD_CERT_FILE: /certs/etcd.pem + ETCD_KEY_FILE: /certs/etcd.key + ports: + - "12379:12379" + - "12380:12380" + volumes: + - ./t/certs:/certs + + etcd_mtls: + image: bitnami/etcd:3.5.4 + restart: unless-stopped + env_file: + - ci/pod/etcd/env/common.env + environment: + ETCD_ADVERTISE_CLIENT_URLS: https://0.0.0.0:22379 + ETCD_LISTEN_CLIENT_URLS: https://0.0.0.0:22379 + ETCD_CERT_FILE: /certs/mtls_server.crt + ETCD_KEY_FILE: /certs/mtls_server.key + ETCD_CLIENT_CERT_AUTH: "true" + ETCD_TRUSTED_CA_FILE: /certs/mtls_ca.crt + ports: + - "22379:22379" + - "22380:22380" + volumes: + - ./t/certs:/certs + + + ## Redis cluster + redis-cluster: + image: vishnunair/docker-redis-cluster:latest + restart: unless-stopped + ports: + - "5000:6379" + - "5002:6380" + - "5003:6381" + - "5004:6382" + - "5005:6383" + - "5006:6384" + + + ## HashiCorp Vault + vault: + image: vault:1.9.0 + container_name: vault + restart: unless-stopped + ports: + - "8200:8200" + cap_add: + - IPC_LOCK + environment: + VAULT_DEV_ROOT_TOKEN_ID: root + VAULT_DEV_LISTEN_ADDRESS: 0.0.0.0:8200 + command: [ "vault", "server", "-dev" ] + + + ## LocalStack + localstack: + image: localstack/localstack + container_name: localstack + restart: unless-stopped + ports: + - "127.0.0.1:4566:4566" # LocalStack Gateway diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.first.yml b/CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.first.yml new file mode 100644 index 0000000..d203a96 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.first.yml @@ -0,0 +1,304 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +version: "3.8" + +services: + ## Eureka + eureka: + image: bitinit/eureka + env_file: + - ci/pod/eureka/env/common.env + restart: unless-stopped + ports: + - "8761:8761" + + ## Consul + consul_1: + image: consul:1.7 + restart: unless-stopped + ports: + - "8500:8500" + command: [ "consul", "agent", "-server", "-bootstrap-expect=1", "-client", "0.0.0.0", "-log-level", "info", "-data-dir=/consul/data", "-enable-script-checks" ] + networks: + consul_net: + + consul_2: + image: consul:1.7 + restart: unless-stopped + ports: + - "8600:8500" + command: [ "consul", "agent", "-server", "-bootstrap-expect=1", "-client", "0.0.0.0", "-log-level", "info", "-data-dir=/consul/data", "-enable-script-checks" ] + networks: + consul_net: + + consul_3: + image: hashicorp/consul:1.16.2 + restart: unless-stopped + ports: + - "8502:8500" + command: [ "consul", "agent", "-server", "-bootstrap-expect=1", "-client", "0.0.0.0", "-log-level", "info", "-data-dir=/consul/data", "-enable-script-checks", "-ui", "-hcl", "acl = {\nenabled = true\ndefault_policy = \"deny\"\nenable_token_persistence = true\ntokens = {\nagent = \"2b778dd9-f5f1-6f29-b4b4-9a5fa948757a\"\n}}" ] + networks: + consul_net: + + ## Consul cluster + consul_node_1: + image: consul:1.7 + restart: unless-stopped + ports: + - "9500:8500" + - "8300:8300" + - "8301:8301" + - "8302:8302" + - "9600:8600" + command: [ "consul", "agent", "-server", "-bootstrap-expect=1", "-bind", "0.0.0.0", "-client", "0.0.0.0", "-node", "node-1", "-log-level", "info", "-data-dir=/consul/data", "-enable-script-checks" ] + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8500/"] + interval: 10s + timeout: 10s + retries: 5 + networks: + consul_cluster_net: + aliases: + - consul.cluster + + consul_node_2: + image: consul:1.7 + restart: unless-stopped + environment: + - CONSUL_BIND_INTERFACE=eth0 + ports: + - "9501:8500" + command: [ "consul", "agent", "-server", "-bind", "0.0.0.0", "-client", "0.0.0.0", "-retry-join", "consul.cluster", "-node", "node-2", "-log-level", "info", "-data-dir=/consul/data", "-enable-script-checks" ] + depends_on: + consul_node_1: + condition: service_healthy + networks: + consul_cluster_net: + aliases: + - consul.cluster + + consul_node_3: + image: consul:1.7 + restart: unless-stopped + environment: + - CONSUL_BIND_INTERFACE=eth0 + ports: + - "9502:8500" + command: [ "consul", "agent", "-server", "-bind", "0.0.0.0", "-client", "0.0.0.0", "-retry-join", "consul.cluster", "-node", "node-3", "-log-level", "info", "-data-dir=/consul/data", "-enable-script-checks" ] + depends_on: + consul_node_1: + condition: service_healthy + networks: + consul_cluster_net: + aliases: + - consul.cluster + + ## Nacos cluster + nacos_auth: + hostname: nacos1 + image: nacos/nacos-server:1.4.1 + env_file: + - ci/pod/nacos/env/common.env + environment: + NACOS_AUTH_ENABLE: "true" + restart: unless-stopped + ports: + - "8848:8848" + networks: + nacos_net: + + nacos_no_auth: + hostname: nacos2 + image: nacos/nacos-server:1.4.1 + env_file: + - ci/pod/nacos/env/common.env + restart: unless-stopped + ports: + - "8858:8848" + networks: + nacos_net: + + nacos_server_health_check: + build: + context: ci/pod/nacos/healthcheck + dockerfile: Dockerfile + environment: + CHECK_URI: "http://nacos2:8848/nacos/v1/ns/service/list?pageNo=1&pageSize=2" + tty: true + # debug healthcheck script +# volumes: +# - ./ci/pod/nacos/healthcheck/nacos-server-healthcheck.sh:/nacos-server-healthcheck.sh + healthcheck: + test: [ "CMD", "bash", "/nacos-server-healthcheck.sh" ] + interval: 5s + timeout: 5s + retries: 60 + start_period: 10s + networks: + nacos_net: + + nacos_service_health_check: + build: + context: ci/pod/nacos/healthcheck + dockerfile: Dockerfile + # debug healthcheck script +# volumes: +# - ./ci/pod/nacos/healthcheck/nacos-service-healthcheck.sh:/nacos-service-healthcheck.sh + tty: true + healthcheck: + test: [ "CMD", "bash", "/nacos-service-healthcheck.sh" ] + interval: 5s + timeout: 30s + retries: 60 + start_period: 10s + networks: + nacos_net: + + ### Nacos services + nacos-service1: + build: + context: ci/pod/nacos/service + dockerfile: Dockerfile + env_file: + - ci/pod/nacos/env/service.env + environment: + SUFFIX_NUM: 1 + restart: unless-stopped + ports: + - "18001:18001" + depends_on: + nacos_server_health_check: + condition: service_healthy + networks: + nacos_net: + + nacos-service2: + build: + context: ci/pod/nacos/service + dockerfile: Dockerfile + env_file: + - ci/pod/nacos/env/service.env + environment: + SUFFIX_NUM: 2 + restart: unless-stopped + ports: + - "18002:18001" + depends_on: + nacos_server_health_check: + condition: service_healthy + networks: + nacos_net: + + nacos-service3: + build: + context: ci/pod/nacos/service + dockerfile: Dockerfile + env_file: + - ci/pod/nacos/env/service.env + environment: + SUFFIX_NUM: 1 + NAMESPACE: test_ns + restart: unless-stopped + ports: + - "18003:18001" + depends_on: + nacos_server_health_check: + condition: service_healthy + networks: + nacos_net: + + nacos-service4: + build: + context: ci/pod/nacos/service + dockerfile: Dockerfile + env_file: + - ci/pod/nacos/env/service.env + environment: + SUFFIX_NUM: 1 + GROUP: test_group + restart: unless-stopped + ports: + - "18004:18001" + depends_on: + nacos_server_health_check: + condition: service_healthy + networks: + nacos_net: + + nacos-service5: + build: + context: ci/pod/nacos/service + dockerfile: Dockerfile + env_file: + - ci/pod/nacos/env/service.env + environment: + SUFFIX_NUM: 1 + GROUP: test_group + NAMESPACE: test_ns + restart: unless-stopped + ports: + - "18005:18001" + depends_on: + nacos_server_health_check: + condition: service_healthy + networks: + nacos_net: + + nacos-service6: + build: + context: ci/pod/nacos/service + dockerfile: Dockerfile + env_file: + - ci/pod/nacos/env/service.env + environment: + SUFFIX_NUM: 3 + GROUP: test_group2 + NAMESPACE: test_ns + restart: unless-stopped + ports: + - "18006:18001" + depends_on: + nacos_server_health_check: + condition: service_healthy + networks: + nacos_net: + + nacos-service7: + build: + context: ci/pod/nacos/service + dockerfile: Dockerfile + env_file: + - ci/pod/nacos/env/service.env + environment: + SUFFIX_NUM: 4 + GROUP: test_group + NAMESPACE: test_ns2 + restart: unless-stopped + ports: + - "18007:18001" + depends_on: + nacos_server_health_check: + condition: service_healthy + networks: + nacos_net: + + +networks: + consul_cluster_net: + consul_net: + nacos_net: diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.last.yml b/CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.last.yml new file mode 100644 index 0000000..dbc835f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.last.yml @@ -0,0 +1,97 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +version: "3.8" + +services: + ## Redis + apisix_redis: + # The latest image is the latest stable version + image: redis:latest + restart: unless-stopped + ports: + - "6379:6379" + networks: + apisix_net: + + ## kafka-cluster + zookeeper-server1: + image: bitnami/zookeeper:3.6.0 + env_file: + - ci/pod/kafka/zookeeper-server/env/common.env + restart: unless-stopped + ports: + - "2181:2181" + networks: + kafka_net: + + zookeeper-server2: + image: bitnami/zookeeper:3.6.0 + env_file: + - ci/pod/kafka/zookeeper-server/env/common.env + restart: unless-stopped + ports: + - "12181:12181" + networks: + kafka_net: + + kafka-server1: + image: bitnami/kafka:2.8.1 + env_file: + - ci/pod/kafka/kafka-server/env/last.env + environment: + KAFKA_CFG_ZOOKEEPER_CONNECT: zookeeper-server1:2181 + restart: unless-stopped + ports: + - "9092:9092" + - "9093:9093" + - "9094:9094" + depends_on: + - zookeeper-server1 + - zookeeper-server2 + networks: + kafka_net: + volumes: + - ./ci/pod/kafka/kafka-server/kafka_jaas.conf:/opt/bitnami/kafka/config/kafka_jaas.conf:ro + - ./ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.keystore.jks:ro + - ./ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.truststore.jks:ro + + kafka-server2: + image: bitnami/kafka:2.8.1 + env_file: + - ci/pod/kafka/kafka-server/env/last.env + environment: + KAFKA_CFG_ZOOKEEPER_CONNECT: zookeeper-server2:2181 + restart: unless-stopped + ports: + - "19092:9092" + - "19093:9093" + - "19094:9094" + depends_on: + - zookeeper-server1 + - zookeeper-server2 + networks: + kafka_net: + volumes: + - ./ci/pod/kafka/kafka-server/kafka_jaas.conf:/opt/bitnami/kafka/config/kafka_jaas.conf:ro + - ./ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.keystore.jks:ro + - ./ci/pod/kafka/kafka-server/selfsigned.jks:/opt/bitnami/kafka/config/certs/kafka.truststore.jks:ro + + +networks: + apisix_net: + kafka_net: diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.plugin.yml b/CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.plugin.yml new file mode 100644 index 0000000..c0a598c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/docker-compose.plugin.yml @@ -0,0 +1,400 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +version: "3.8" + +services: + ## Redis + apisix_redis: + # The latest image is the latest stable version + image: redis:latest + restart: unless-stopped + volumes: + - ./t/certs:/certs + command: "--tls-port 6380 \ + --tls-cert-file /certs/mtls_server.crt \ + --tls-key-file /certs/mtls_server.key \ + --tls-ca-cert-file /certs/mtls_ca.crt \ + --tls-auth-clients no \ + --user alice on +@all ~* \\&* \\>somepassword" + ports: + - "6379:6379" + - "6380:6380" + networks: + apisix_net: + + ## keycloak + apisix_keycloak: + container_name: apisix_keycloak + image: quay.io/keycloak/keycloak:18.0.2 + # use host network because in CAS auth, + # keycloak needs to send back-channel POST to apisix. + network_mode: host + environment: + KEYCLOAK_ADMIN: admin + KEYCLOAK_ADMIN_PASSWORD: admin + KC_HTTPS_CERTIFICATE_FILE: /opt/keycloak/conf/server.crt.pem + KC_HTTPS_CERTIFICATE_KEY_FILE: /opt/keycloak/conf/server.key.pem + restart: unless-stopped + command: ["start-dev"] + volumes: + - /opt/keycloak-protocol-cas-18.0.2.jar:/opt/keycloak/providers/keycloak-protocol-cas-18.0.2.jar + - ./ci/pod/keycloak/server.crt.pem:/opt/keycloak/conf/server.crt.pem + - ./ci/pod/keycloak/server.key.pem:/opt/keycloak/conf/server.key.pem + - ./ci/pod/keycloak/kcadm_configure_cas.sh:/tmp/kcadm_configure_cas.sh + - ./ci/pod/keycloak/kcadm_configure_university.sh:/tmp/kcadm_configure_university.sh + - ./ci/pod/keycloak/kcadm_configure_basic.sh:/tmp/kcadm_configure_basic.sh + + ## kafka-cluster + zookeeper-server1: + image: bitnami/zookeeper:3.6.0 + env_file: + - ci/pod/kafka/zookeeper-server/env/common.env + restart: unless-stopped + ports: + - "2181:2181" + networks: + kafka_net: + + zookeeper-server2: + image: bitnami/zookeeper:3.6.0 + env_file: + - ci/pod/kafka/zookeeper-server/env/common.env + restart: unless-stopped + ports: + - "12181:12181" + networks: + kafka_net: + + kafka-server1: + image: bitnami/kafka:2.8.1 + env_file: + - ci/pod/kafka/kafka-server/env/common.env + environment: + KAFKA_CFG_ZOOKEEPER_CONNECT: zookeeper-server1:2181 + restart: unless-stopped + ports: + - "9092:9092" + depends_on: + - zookeeper-server1 + - zookeeper-server2 + networks: + kafka_net: + + kafka-server2: + image: bitnami/kafka:2.8.1 + env_file: + - ci/pod/kafka/kafka-server/env/common2.env + environment: + KAFKA_CFG_ZOOKEEPER_CONNECT: zookeeper-server2:2181 + restart: unless-stopped + ports: + - "19092:19092" + - "19094:19094" + depends_on: + - zookeeper-server1 + - zookeeper-server2 + networks: + kafka_net: + volumes: + - ./ci/pod/kafka/kafka-server/kafka_jaas.conf:/opt/bitnami/kafka/config/kafka_jaas.conf:ro + + ## SkyWalking + skywalking: + image: apache/skywalking-oap-server:8.7.0-es6 + restart: unless-stopped + ports: + - "1234:1234" + - "11800:11800" + - "12800:12800" + networks: + skywalk_net: + + + ## OpenLDAP + openldap: + image: bitnami/openldap:2.5.8 + environment: + - LDAP_ADMIN_USERNAME=amdin + - LDAP_ADMIN_PASSWORD=adminpassword + - LDAP_USERS=user01,user02 + - LDAP_PASSWORDS=password1,password2 + - LDAP_ENABLE_TLS=yes + - LDAP_TLS_CERT_FILE=/certs/localhost_slapd_cert.pem + - LDAP_TLS_KEY_FILE=/certs/localhost_slapd_key.pem + - LDAP_TLS_CA_FILE=/certs/apisix.crt + ports: + - "1389:1389" + - "1636:1636" + volumes: + - ./t/certs:/certs + + + ## Grafana Loki + loki: + image: grafana/loki:2.8.0 + command: -config.file=/etc/loki/local-config.yaml -auth.enabled -querier.multi-tenant-queries-enabled + ports: + - "3100:3100" + networks: + - loki_net + + rocketmq_namesrv: + image: apacherocketmq/rocketmq:4.6.0 + container_name: rmqnamesrv + restart: unless-stopped + ports: + - "9876:9876" + command: sh mqnamesrv + networks: + rocketmq_net: + + rocketmq_broker: + image: apacherocketmq/rocketmq:4.6.0 + container_name: rmqbroker + restart: unless-stopped + ports: + - "10909:10909" + - "10911:10911" + - "10912:10912" + depends_on: + - rocketmq_namesrv + command: sh mqbroker -n rocketmq_namesrv:9876 -c ../conf/broker.conf + networks: + rocketmq_net: + + # Open Policy Agent + opa: + image: openpolicyagent/opa:0.35.0 + restart: unless-stopped + ports: + - 8181:8181 + command: run -s /example.rego /echo.rego /data.json /with_route.rego + volumes: + - type: bind + source: ./ci/pod/opa/with_route.rego + target: /with_route.rego + - type: bind + source: ./ci/pod/opa/example.rego + target: /example.rego + - type: bind + source: ./ci/pod/opa/echo.rego + target: /echo.rego + - type: bind + source: ./ci/pod/opa/data.json + target: /data.json + networks: + opa_net: + + # Elasticsearch Logger Service + elasticsearch-noauth: + image: docker.elastic.co/elasticsearch/elasticsearch:8.12.0 + restart: unless-stopped + ports: + - "9200:9200" + - "9300:9300" + environment: + ES_JAVA_OPTS: -Xms512m -Xmx512m + discovery.type: single-node + xpack.security.enabled: 'false' + + elasticsearch-auth: + image: docker.elastic.co/elasticsearch/elasticsearch:8.12.0 + restart: unless-stopped + ports: + - "9201:9201" + environment: + ES_JAVA_OPTS: -Xms512m -Xmx512m + discovery.type: single-node + ELASTIC_USERNAME: elastic + ELASTIC_PASSWORD: 123456 + http.port: 9201 + xpack.security.enabled: 'true' + + elasticsearch-auth-2: + image: docker.elastic.co/elasticsearch/elasticsearch:9.0.2 + restart: unless-stopped + ports: + - "9301:9201" + environment: + ES_JAVA_OPTS: -Xms512m -Xmx512m + discovery.type: single-node + ELASTIC_USERNAME: elastic + ELASTIC_PASSWORD: 123456 + http.port: 9201 + xpack.security.enabled: 'true' + + elasticsearch-auth-3: + image: docker.elastic.co/elasticsearch/elasticsearch:7.0.0 + restart: unless-stopped + ports: + - "9401:9201" + environment: + ES_JAVA_OPTS: -Xms512m -Xmx512m + discovery.type: single-node + ELASTIC_USERNAME: elastic + ELASTIC_PASSWORD: 123456 + http.port: 9201 + xpack.security.enabled: 'true' + + elasticsearch-auth-4: + image: docker.elastic.co/elasticsearch/elasticsearch:6.7.0 + restart: unless-stopped + ports: + - "9501:9201" + environment: + ES_JAVA_OPTS: -Xms512m -Xmx512m + discovery.type: single-node + ELASTIC_USERNAME: elastic + ELASTIC_PASSWORD: 123456 + http.port: 9201 + xpack.security.enabled: 'true' + + # The function services of OpenFunction + test-header: + image: test-header-image:latest + restart: unless-stopped + ports: + - "30583:8080" + environment: + CONTEXT_MODE: "self-host" + FUNC_CONTEXT: "{\"name\":\"HelloWorld\",\"version\":\"v1.0.0\",\"port\":\"8080\",\"runtime\":\"Knative\"}" + + test-uri: + image: test-uri-image:latest + restart: unless-stopped + ports: + - "30584:8080" + environment: + CONTEXT_MODE: "self-host" + FUNC_CONTEXT: "{\"name\":\"HelloWorld\",\"version\":\"v1.0.0\",\"port\":\"8080\",\"runtime\":\"Knative\"}" + + test-body: + image: test-body-image:latest + restart: unless-stopped + ports: + - "30585:8080" + environment: + CONTEXT_MODE: "self-host" + FUNC_CONTEXT: "{\"name\":\"HelloWorld\",\"version\":\"v1.0.0\",\"port\":\"8080\",\"runtime\":\"Knative\"}" + + ## RedisCluster Enable TLS + redis-node-0: + image: docker.io/bitnami/redis-cluster:7.0 + volumes: + - ./t/certs:/certs + environment: + - 'ALLOW_EMPTY_PASSWORD=yes' + - 'REDIS_NODES=redis-node-0 redis-node-1 redis-node-2' + - 'REDIS_TLS_ENABLED=yes' + - 'REDIS_TLS_CERT_FILE=/certs/mtls_server.crt' + - 'REDIS_TLS_KEY_FILE=/certs/mtls_server.key' + - 'REDIS_TLS_CA_FILE=/certs/mtls_ca.crt' + - 'REDIS_TLS_AUTH_CLIENTS=no' + ports: + - '7000:6379' + + redis-node-1: + image: docker.io/bitnami/redis-cluster:7.0 + volumes: + - ./t/certs:/certs + environment: + - 'ALLOW_EMPTY_PASSWORD=yes' + - 'REDIS_NODES=redis-node-0 redis-node-1 redis-node-2' + - 'REDIS_TLS_ENABLED=yes' + - 'REDIS_TLS_CERT_FILE=/certs/mtls_server.crt' + - 'REDIS_TLS_KEY_FILE=/certs/mtls_server.key' + - 'REDIS_TLS_CA_FILE=/certs/mtls_ca.crt' + - 'REDIS_TLS_AUTH_CLIENTS=no' + ports: + - '7001:6379' + + redis-node-2: + image: docker.io/bitnami/redis-cluster:7.0 + volumes: + - ./t/certs:/certs + depends_on: + - redis-node-0 + - redis-node-1 + environment: + - 'ALLOW_EMPTY_PASSWORD=yes' + - 'REDIS_CLUSTER_REPLICAS=0' + - 'REDIS_NODES=redis-node-0 redis-node-1 redis-node-2' + - 'REDIS_CLUSTER_CREATOR=yes' + - 'REDIS_TLS_ENABLED=yes' + - 'REDIS_TLS_CERT_FILE=/certs/mtls_server.crt' + - 'REDIS_TLS_KEY_FILE=/certs/mtls_server.key' + - 'REDIS_TLS_CA_FILE=/certs/mtls_ca.crt' + - 'REDIS_TLS_AUTH_CLIENTS=no' + ports: + - '7002:6379' + + graphql-demo: + # the owner doesn't provide a semver tag + image: npalm/graphql-java-demo:latest + ports: + - '8888:8080' + + vector: + image: timberio/vector:0.29.1-debian + container_name: vector + volumes: + - ./ci/pod/vector:/etc/vector/ + - ./t/certs:/certs + ports: + - '3000:3000' #tcp logger + - '8127:8127/udp' + - '43000:43000' + - '5140:5140' + - "18088:18088" # For splunk logging tests + - '5150:5150/udp' + - "3001:3001" #http logger + networks: + vector_net: + + clickhouse: + image: clickhouse/clickhouse-server:23.4.2-alpine + container_name: clickhouse + ports: + - '8123:8123' + networks: + clickhouse_net: + + clickhouse2: + image: clickhouse/clickhouse-server:23.4.2-alpine + container_name: clickhouse2 + ports: + - '8124:8123' + networks: + clickhouse_net: + otel-collector: + image: otel/opentelemetry-collector-contrib + volumes: + - ./ci/pod/otelcol-contrib:/etc/otelcol-contrib:rw + ports: + - '4318:4318' + + +networks: + apisix_net: + kafka_net: + skywalk_net: + rocketmq_net: + opa_net: + vector_net: + clickhouse_net: + loki_net: diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/etcd/env/common.env b/CloudronPackages/APISIX/apisix-source/ci/pod/etcd/env/common.env new file mode 100644 index 0000000..24ba47f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/etcd/env/common.env @@ -0,0 +1 @@ +ALLOW_NONE_AUTHENTICATION=yes diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/eureka/env/common.env b/CloudronPackages/APISIX/apisix-source/ci/pod/eureka/env/common.env new file mode 100644 index 0000000..11169cb --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/eureka/env/common.env @@ -0,0 +1,7 @@ +ENVIRONMENT=apisix +spring.application.name=apisix-eureka +server.port=8761 +eureka.instance.ip-address=127.0.0.1 +eureka.client.registerWithEureka=true +eureka.client.fetchRegistry=false +eureka.client.serviceUrl.defaultZone=http://127.0.0.1:8761/eureka/ diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/env/common.env b/CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/env/common.env new file mode 100644 index 0000000..06200b9 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/env/common.env @@ -0,0 +1,3 @@ +ALLOW_PLAINTEXT_LISTENER=yes +KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true +KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:9092 diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/env/common2.env b/CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/env/common2.env new file mode 100644 index 0000000..c0fb5f5 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/env/common2.env @@ -0,0 +1,8 @@ +ALLOW_PLAINTEXT_LISTENER=yes +KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=false +KAFKA_CFG_LISTENERS=PLAINTEXT://0.0.0.0:19092,SASL_PLAINTEXT://0.0.0.0:19094 +KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:19092,SASL_PLAINTEXT://127.0.0.1:19094 +KAFKA_CFG_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM= +KAFKA_CFG_SSL_KEYSTORE_LOCATION=/opt/bitnami/kafka/config/certs/kafka.keystore.jks +KAFKA_CFG_SSL_KEYSTORE_PASSWORD=changeit +KAFKA_CFG_SSL_KEY_PASSWORD=changeit diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/env/last.env b/CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/env/last.env new file mode 100644 index 0000000..adc9d7c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/env/last.env @@ -0,0 +1,8 @@ +ALLOW_PLAINTEXT_LISTENER=yes +KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=false +KAFKA_CFG_LISTENERS=PLAINTEXT://0.0.0.0:9092,SSL://0.0.0.0:9093,SASL_PLAINTEXT://0.0.0.0:9094 +KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:9092,SSL://127.0.0.1:9093,SASL_PLAINTEXT://127.0.0.1:9094 +KAFKA_CFG_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM= +KAFKA_CFG_SSL_KEYSTORE_LOCATION=/opt/bitnami/kafka/config/certs/kafka.keystore.jks +KAFKA_CFG_SSL_KEYSTORE_PASSWORD=changeit +KAFKA_CFG_SSL_KEY_PASSWORD=changeit diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/kafka_jaas.conf b/CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/kafka_jaas.conf new file mode 100644 index 0000000..4bc1938 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/kafka/kafka-server/kafka_jaas.conf @@ -0,0 +1,23 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one or more +// contributor license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright ownership. +// The ASF licenses this file to You under the Apache License, Version 2.0 +// (the "License"); you may not use this file except in compliance with +// the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +KafkaServer { + org.apache.kafka.common.security.plain.PlainLoginModule required + username="admin" + password="admin-secret" + user_admin="admin-secret"; +}; diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/kafka/zookeeper-server/env/common.env b/CloudronPackages/APISIX/apisix-source/ci/pod/kafka/zookeeper-server/env/common.env new file mode 100644 index 0000000..aa3cf9f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/kafka/zookeeper-server/env/common.env @@ -0,0 +1 @@ +ALLOW_ANONYMOUS_LOGIN=yes diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/kcadm_configure_basic.sh b/CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/kcadm_configure_basic.sh new file mode 100755 index 0000000..9c2a7b1 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/kcadm_configure_basic.sh @@ -0,0 +1,85 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +export PATH=/opt/keycloak/bin:$PATH + +kcadm.sh config credentials --server http://127.0.0.1:8080 --realm master --user admin --password admin + +# create realm +kcadm.sh create realms -s realm=basic -s enabled=true + +# set realm keys with specific private key, reuse tls cert and key +PRIVATE_KEY=$(awk 'NF {sub(/\r/, ""); printf "%s\\n", $0}' /opt/keycloak/conf/server.key.pem) +CERTIFICATE=$(awk 'NF {sub(/\r/, ""); printf "%s\\n", $0}' /opt/keycloak/conf/server.crt.pem) +kcadm.sh create components -r basic -s name=rsa-apisix -s providerId=rsa \ + -s providerType=org.keycloak.keys.KeyProvider \ + -s 'config.priority=["1000"]' \ + -s 'config.enabled=["true"]' \ + -s 'config.active=["true"]' \ + -s "config.privateKey=[\"$PRIVATE_KEY\"]" \ + -s "config.certificate=[\"$CERTIFICATE\"]" \ + -s 'config.algorithm=["RS256"]' + +# create client apisix +kcadm.sh create clients \ + -r basic \ + -s clientId=apisix \ + -s enabled=true \ + -s clientAuthenticatorType=client-secret \ + -s secret=secret \ + -s 'redirectUris=["*"]' \ + -s 'directAccessGrantsEnabled=true' + +# add audience to client apisix, so that the access token will contain the client id ("apisix") as audience +APISIX_CLIENT_UUID=$(kcadm.sh get clients -r basic -q clientId=apisix | jq -r '.[0].id') +kcadm.sh create clients/$APISIX_CLIENT_UUID/protocol-mappers/models \ + -r basic \ + -s protocol=openid-connect \ + -s name=aud \ + -s protocolMapper=oidc-audience-mapper \ + -s 'config."id.token.claim"=false' \ + -s 'config."access.token.claim"=true' \ + -s 'config."included.client.audience"=apisix' + +# create client apisix +kcadm.sh create clients \ + -r basic \ + -s clientId=apisix \ + -s enabled=true \ + -s clientAuthenticatorType=client-secret \ + -s secret=secret \ + -s 'redirectUris=["*"]' \ + -s 'directAccessGrantsEnabled=true' + +# create client apisix-no-aud, without client id audience +# according to Keycloak's default implementation, when unconfigured, +# only the account is listed as an audience, not the client id + +kcadm.sh create clients \ + -r basic \ + -s clientId=apisix-no-aud \ + -s enabled=true \ + -s clientAuthenticatorType=client-secret \ + -s secret=secret \ + -s 'redirectUris=["*"]' \ + -s 'directAccessGrantsEnabled=true' + +# create user jack +kcadm.sh create users -r basic -s username=jack -s enabled=true +kcadm.sh set-password -r basic --username jack --new-password jack diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/kcadm_configure_cas.sh b/CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/kcadm_configure_cas.sh new file mode 100644 index 0000000..3486667 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/kcadm_configure_cas.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +set -ex + +export PATH=/opt/keycloak/bin:$PATH + +kcadm.sh config credentials --server http://localhost:8080 --realm master --user admin --password admin + +kcadm.sh create realms -s realm=test -s enabled=true + +kcadm.sh create users -r test -s username=test -s enabled=true +kcadm.sh set-password -r test --username test --new-password test + +clients=("cas1" "cas2") +rootUrls=("http://127.0.0.1:1984" "http://127.0.0.2:1984") + +for i in ${!clients[@]}; do + kcadm.sh create clients -r test -s clientId=${clients[$i]} -s enabled=true \ + -s protocol=cas -s frontchannelLogout=false -s rootUrl=${rootUrls[$i]} -s 'redirectUris=["/*"]' +done diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/kcadm_configure_university.sh b/CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/kcadm_configure_university.sh new file mode 100644 index 0000000..162b624 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/kcadm_configure_university.sh @@ -0,0 +1,90 @@ +#!/usr/bin/env bash + +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +export PATH=/opt/keycloak/bin:$PATH + +kcadm.sh config credentials --server http://localhost:8080 --realm master --user admin --password admin + +# create realm University +kcadm.sh create realms -s realm=University -s enabled=true + +# create roles `Teacher, Student` +kcadm.sh create roles -r University -s name=Teacher +kcadm.sh create roles -r University -s name=Student + +# create users `teacher@gmail.com, student@gmail.com` +kcadm.sh create users -r University -s username=teacher@gmail.com -s enabled=true +kcadm.sh create users -r University -s username=student@gmail.com -s enabled=true + +# set password +kcadm.sh set-password -r University --username teacher@gmail.com --new-password 123456 +kcadm.sh set-password -r University --username student@gmail.com --new-password 123456 + +# bind roles to users +kcadm.sh add-roles -r University --uusername teacher@gmail.com --rolename Teacher +kcadm.sh add-roles -r University --uusername student@gmail.com --rolename Student + +# create client course_management +kcadm.sh create clients -r University -s clientId=course_management -s enabled=true -s clientAuthenticatorType=client-secret -s secret=d1ec69e9-55d2-4109-a3ea-befa071579d5 + +client_id=$(kcadm.sh get clients -r University --fields id,clientId 2>/dev/null | jq -r '.[] | select(.clientId=='\"course_management\"') | .id') +teacher_id=$(kcadm.sh get roles -r University --fields id,name 2>/dev/null | jq -r '.[] | select(.name=='\"Teacher\"') | .id') +student_id=$(kcadm.sh get roles -r University --fields id,name 2>/dev/null | jq -r '.[] | select(.name=='\"Student\"') | .id') + +# update client course_management +kcadm.sh update clients/${client_id} -r University -s protocol=openid-connect -s standardFlowEnabled=true \ + -s implicitFlowEnabled=true -s directAccessGrantsEnabled=true -s serviceAccountsEnabled=true \ + -s authorizationServicesEnabled=true -s 'redirectUris=["*"]' -s 'webOrigins=["*"]' + +kcadm.sh update clients/${client_id}/authz/resource-server -r University -s allowRemoteResourceManagement=false -s policyEnforcementMode="ENFORCING" + +# create authz-resource with name `course_resource`, uri `/course/*`, scope `DELETE, delete, view, GET` +kcadm.sh create clients/${client_id}/authz/resource-server/resource -r University -s name=course_resource \ + -s ownerManagedAccess=false -s uris='["/course/*"]' -s scopes='[{"name": "DELETE"},{"name": "view"},{"name": "GET"},{"name": "delete"}]' + +course_resource_id=$(kcadm.sh get clients/${client_id}/authz/resource-server/resource -r University --fields _id,name 2>/dev/null | jq -r '.[] | select(.name=='\"course_resource\"') | ._id') +DELETE_scope_id=$(kcadm.sh get clients/${client_id}/authz/resource-server/scope -r University --fields id,name 2>/dev/null | jq -r '.[] | select(.name=='\"DELETE\"') | .id') +delete_scope_id=$(kcadm.sh get clients/${client_id}/authz/resource-server/scope -r University --fields id,name 2>/dev/null | jq -r '.[] | select(.name=='\"delete\"') | .id') +GET_scope_id=$(kcadm.sh get clients/${client_id}/authz/resource-server/scope -r University --fields id,name 2>/dev/null | jq -r '.[] | select(.name=='\"GET\"') | .id') +view_scope_id=$(kcadm.sh get clients/${client_id}/authz/resource-server/scope -r University --fields id,name 2>/dev/null | jq -r '.[] | select(.name=='\"view\"') | .id') + +# create authz-policy `AllowTeacherPolicy, AllowStudentPolicy` +kcadm.sh create clients/${client_id}/authz/resource-server/policy/role -r University \ + -s name="AllowTeacherPolicy" -s logic="POSITIVE" -s decisionStrategy="UNANIMOUS" \ + -s roles='[{"id": '\"${teacher_id}\"'}]' + +kcadm.sh create clients/${client_id}/authz/resource-server/policy/role -r University \ + -s name="AllowStudentPolicy" -s logic="POSITIVE" -s decisionStrategy="UNANIMOUS" \ + -s roles='[{"id": '\"${student_id}\"'}]' + +allow_teacher_policy_id=$(kcadm.sh get clients/${client_id}/authz/resource-server/policy -r University --fields id,name 2>/dev/null | jq -r '.[] | select(.name=='\"AllowTeacherPolicy\"') | .id') +allow_student_policy_id=$(kcadm.sh get clients/${client_id}/authz/resource-server/policy -r University --fields id,name 2>/dev/null | jq -r '.[] | select(.name=='\"AllowStudentPolicy\"') | .id') + +# create authz-permission `Delete Course Permission` and `View Course Permission` +kcadm.sh create clients/${client_id}/authz/resource-server/permission/scope -r University \ + -s name="Delete Course Permission" -s logic="POSITIVE" -s decisionStrategy="UNANIMOUS" \ + -s policies='['\"${allow_teacher_policy_id}\"']' \ + -s scopes='['\"${DELETE_scope_id}\"', '\"${delete_scope_id}\"']' \ + -s resources='['\"${course_resource_id}\"']' + +kcadm.sh create clients/${client_id}/authz/resource-server/permission/scope -r University \ + -s name="View Course Permission" -s logic="POSITIVE" -s decisionStrategy="AFFIRMATIVE" \ + -s policies='['\"${allow_teacher_policy_id}\"', '\"${allow_student_policy_id}\"']' \ + -s scopes='['\"${GET_scope_id}\"', '\"${view_scope_id}\"']' \ + -s resources='['\"${course_resource_id}\"']' diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/server.crt.pem b/CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/server.crt.pem new file mode 100644 index 0000000..9c7bde3 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/server.crt.pem @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDazCCAlOgAwIBAgIUbZfnhty/ZiHPz5Aq8kK5Kr8kcSQwDQYJKoZIhvcNAQEL +BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM +GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMzA0MTgxMTQzNDJaFw0zMzA0 +MTUxMTQzNDJaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw +HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQC/F4wK7eMTVAKGDMLCXE+Y6REdA5GU6/AakJf3NEKQ +wCrtrqO+VBPIz445+edf3EEXhjFFGPdU6p0EkF0SMLaMsVBQQJ2qcP6FloIYiyT3 +WCs/gbtdoWq53ucAfWueIyHWsovLc0VhOXm0rhTYg88nMjJ7y6vYkfLMT6qlwASn +9Tozgjat09fWATbN7yBi4ivVVsKDo2S3jkOyVnYYMjzZO3CSkyUSMl+ZsSesseSK +A9c2zogfKIU833njraA8blMFfdinEMI/9yceEx57IUjnpY1iWHLSItiZF+LKEpeL +vp9gpr88ghR85ISusqAqwcmnsdAqjjw7gbPm1DIvUgVBAgMBAAGjUzBRMB0GA1Ud +DgQWBBRvlz5ZiE2fD9ikPRqpYwsVrxZfxTAfBgNVHSMEGDAWgBRvlz5ZiE2fD9ik +PRqpYwsVrxZfxTAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQCX +5fOeFnX67eHI5dJB8p3U2GS21qykDVLV5ZV+JZfZwXJEygIvr/T9vs772EPxv+0/ +TO0+pGdcVswXq/6BoUFCV0rWWTDP5wTS3sV1ZsSSHil5zEutXuAI1LQGlit6w5xn +iDURFZw3ZmOFytXKXNbca1ma4yaCZtOwVe3O36GZeOiZFzBYE2DELqy77Nz1E5+3 +jZaDnx0vonV8/hhX6FAPRPQnIXkaEH3BnVQZGD1jxipbFQQtmeeNPELy18MQo30N +W1wOsbMMouniKUjdT16tdtzJzC+l9pVqRC+8df5PJfN56Uv9Ed6pjytkSF1SvHyJ +iTWmyxJL9AonUkc5Oiri +-----END CERTIFICATE----- diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/server.key.pem b/CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/server.key.pem new file mode 100755 index 0000000..f2bc2d9 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/keycloak/server.key.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC/F4wK7eMTVAKG +DMLCXE+Y6REdA5GU6/AakJf3NEKQwCrtrqO+VBPIz445+edf3EEXhjFFGPdU6p0E +kF0SMLaMsVBQQJ2qcP6FloIYiyT3WCs/gbtdoWq53ucAfWueIyHWsovLc0VhOXm0 +rhTYg88nMjJ7y6vYkfLMT6qlwASn9Tozgjat09fWATbN7yBi4ivVVsKDo2S3jkOy +VnYYMjzZO3CSkyUSMl+ZsSesseSKA9c2zogfKIU833njraA8blMFfdinEMI/9yce +Ex57IUjnpY1iWHLSItiZF+LKEpeLvp9gpr88ghR85ISusqAqwcmnsdAqjjw7gbPm +1DIvUgVBAgMBAAECggEBAKUrrkGYI2mGePPzPbiP38E02zTv67sEQLJFfwUOp+bE +I5b0F9agh8VQGghkyKgkEiNKO3YVQVuluvjB66CYeIGdleT4JQ+4wVcoo+ShCN++ +1wr6kMA6kKx+Tb8vqYCzr0ELbSf6x+Jksp0Ixz3qmHixu88jWbNFW89boQ3JrnyZ +TUgRSRdPoXcxspwcbhy6mMhwUfUSy8Zcck81dBEAjokvzbYh4jtFYMipWqro66KJ +B9uqQme2J/rN/2PSrA6chI85Wa+JaGOSPDaGNp+DrADjoVZf1tXgzGCsA/lmVtQ0 +8YN4Dh21EjLxz4Dj5GE7RWET4Ejvv1XEih1p+zKne00CgYEA327raCD5Fnr1nGTb +Q4ZWkcDR6EGSD6JGD0ur+UqqJhirM/5b4iGcsVK5uufb5dwk9+9z0EucXOVq/il0 +vgG2FbgRYM8kx3CDLvMYAqKJ8e5NsGJWwJVq6DsmsO1SaEId+SVFH83RHfG5/ksq +/DgRg0Wl9FoL7sHchuSIP2QiLrMCgYEA2vHcKsMZk/KGMBHVffY3PUckirIM6vLa +idMmm0T0HSAdviZRxQGyOnjd93ZhMqFJPTrmHOq0uAxfdFt+oRoHk/pGarBCv76L +NnPrSnVe1pJOh7Mm7LHLgrAgeM2WW7xz6jZwc8On+9qHK97I/wAnJB8J7DvQJ2hR +sWCDSbfKtjsCgYEAnVE77tVIjMuGo9dfiuvLiFR7d0yzys43Bg4ByEUKCEjWQoWV +rGJ+MVxN6YvXCME4RloS8VZLgh0GeG44BJCv5Br2IXO4MbTGqQgAn9pRxkZD7S1Q +Z8jMvTboxypSG5ZyBDp5sSr5Ulwg2SuT2IKh0gv4DVRZkoJtA41lYTzf1IECgYBd +3NJGgt20T4S3lu2v0p5b5uQDkdF36CVIcP1cE3OUCPC3VDY5/0ApUSfXryh8TCjZ +1yZPv086mBNUDuV6q24UQndtxaLYERgdgBSfFzJRSuffxS4qyw40OM2y/HA5Y9FN +14jeGEMr9cN9S0VgDPC6y5O1cu8J9e8P3BBsyh5dgQKBgHMlIhOJDO/neVnax79X +d3+5GaiggUnkd27OkYC4LhXEc/QWeHE0ByA0bDhhnsE7IVK2CVC18axOLmEJVy2g +F6ZtxcpNrlVtF4YaOiRVUcDNnz9gX48efrpdoX2iBSFEd1NRDo/bjkVXI1L08LNf +BbMB104PadChoGpl5R3NQQsP +-----END PRIVATE KEY----- diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/env/common.env b/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/env/common.env new file mode 100644 index 0000000..1a549cb --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/env/common.env @@ -0,0 +1,6 @@ +EMBEDDED_STORAGE=embedded +PREFER_HOST_MODE=hostname +MODE=cluster +NACOS_SERVERS="nacos1:8848 nacos2:8848" +JVM_XMS=512m +JVM_XMX=512m diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/env/service.env b/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/env/service.env new file mode 100644 index 0000000..d09eaba --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/env/service.env @@ -0,0 +1,2 @@ +SERVICE_NAME=APISIX-NACOS +NACOS_ADDR=nacos2:8848 diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/healthcheck/Dockerfile b/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/healthcheck/Dockerfile new file mode 100644 index 0000000..e4109df --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/healthcheck/Dockerfile @@ -0,0 +1,30 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +FROM alpine:latest + +# change workdir to / +WORKDIR / + +# install curl +RUN apk --no-cache add bash curl + +# add healthcheck script +COPY *.sh / + +# add hosted process +CMD ["cat"] diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/healthcheck/nacos-server-healthcheck.sh b/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/healthcheck/nacos-server-healthcheck.sh new file mode 100644 index 0000000..232f457 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/healthcheck/nacos-server-healthcheck.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +set -ex + +# nacos server healthcheck +REQ_STATUS=$(curl -s -o /dev/null -w '%{http_code}' "${CHECK_URI}") + +if [ "${REQ_STATUS}" -ne "200" ]; then + exit 1; +fi diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/healthcheck/nacos-service-healthcheck.sh b/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/healthcheck/nacos-service-healthcheck.sh new file mode 100644 index 0000000..bd540d7 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/healthcheck/nacos-service-healthcheck.sh @@ -0,0 +1,43 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +set -ex + +# nacos service healthcheck +URI_LIST=( + "http://nacos2:8848/nacos/v1/ns/service/list?pageNo=1&pageSize=2" + "http://nacos2:8848/nacos/v1/ns/service/list?groupName=test_group&pageNo=1&pageSize=2" + "http://nacos2:8848/nacos/v1/ns/service/list?groupName=DEFAULT_GROUP&namespaceId=test_ns&pageNo=1&pageSize=2" + "http://nacos2:8848/nacos/v1/ns/service/list?groupName=test_group&namespaceId=test_ns&pageNo=1&pageSize=2" +) + +for URI in "${URI_LIST[@]}"; do + if [[ $(curl -s "${URI}" | grep "APISIX-NACOS") ]]; then + continue + else + exit 1; + fi +done + + +for IDX in {1..7..1}; do + REQ_STATUS=$(curl -s -o /dev/null -w '%{http_code}' "http://nacos-service${IDX}:18001/hello") + if [ "${REQ_STATUS}" -ne "200" ]; then + exit 1; + fi +done diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/service/Dockerfile b/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/service/Dockerfile new file mode 100644 index 0000000..d279c74 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/nacos/service/Dockerfile @@ -0,0 +1,32 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +FROM eclipse-temurin:8 + +ENV SUFFIX_NUM=${SUFFIX_NUM:-1} +ENV NACOS_ADDR=${NACOS_ADDR:-127.0.0.1:8848} +ENV SERVICE_NAME=${SERVICE_NAME:-gateway-service} +ENV NAMESPACE=${NAMESPACE} +ENV GROUP=${GROUP:-DEFAULT_GROUP} + +ADD https://raw.githubusercontent.com/api7/nacos-test-service/main/spring-nacos-1.0-SNAPSHOT.jar /app.jar + +ENTRYPOINT ["java","-Djava.security.egd=file:/dev/./urandom","-jar","/app.jar",\ + "--suffix.num=${SUFFIX_NUM}","--spring.cloud.nacos.discovery.server-addr=${NACOS_ADDR}",\ + "--spring.application.name=${SERVICE_NAME}","--spring.cloud.nacos.discovery.group=${GROUP}",\ + "--spring.cloud.nacos.discovery.namespace=${NAMESPACE}"] +EXPOSE 18001 diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/opa/data.json b/CloudronPackages/APISIX/apisix-source/ci/pod/opa/data.json new file mode 100644 index 0000000..b1652ed --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/opa/data.json @@ -0,0 +1,30 @@ +{ + "users": { + "alice": { + "headers": { + "Location": "http://example.com/auth" + }, + "status_code": 302 + }, + "bob": { + "headers": { + "test": "abcd", + "abcd": "test" + } + }, + "carla": { + "reason": "Give you a string reason" + }, + "dylon": { + "reason": { + "code": 40001, + "desc": "Give you a object reason" + } + }, + "elisa": { + "reason": { + "info": [] + } + } + } +} diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/opa/echo.rego b/CloudronPackages/APISIX/apisix-source/ci/pod/opa/echo.rego new file mode 100644 index 0000000..611f64f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/opa/echo.rego @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +package echo + +allow = false +reason = input diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/opa/example.rego b/CloudronPackages/APISIX/apisix-source/ci/pod/opa/example.rego new file mode 100644 index 0000000..a916104 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/opa/example.rego @@ -0,0 +1,55 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +package example + +import input.request +import data.users + +default allow = false + +allow { + request.headers["test-header"] == "only-for-test" + request.method == "GET" + startswith(request.path, "/hello") + request.query["test"] != "abcd" + request.query["user"] +} + +allow { + request.method == "GET" + startswith(request.path, "/echo") +} + +reason = users[request.query["user"]].reason { + not allow + request.query["user"] +} + +headers = users[request.query["user"]].headers { + not allow + request.query["user"] +} + +headers = {"user": request.query["user"]} { + allow + request.query["user"] +} + +status_code = users[request.query["user"]].status_code { + not allow + request.query["user"] +} diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/opa/with_route.rego b/CloudronPackages/APISIX/apisix-source/ci/pod/opa/with_route.rego new file mode 100644 index 0000000..c6a848e --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/opa/with_route.rego @@ -0,0 +1,24 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +package with_route +default allow = false + +allow { + input.route.name == "valid" +} + +status_code = 403 {not allow} diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/build-function-image.sh b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/build-function-image.sh new file mode 100755 index 0000000..dc9f34a --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/build-function-image.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -xeuo pipefail + +if [ ! -f "./pack" ]; then + wget -q https://github.com/buildpacks/pack/releases/download/v0.27.0/pack-v0.27.0-linux.tgz + tar -zxvf pack-v0.27.0-linux.tgz +fi + +# please update function-example/*/hello.go if you want to update function +./pack build test-uri-image --path ./ci/pod/openfunction/function-example/test-uri --builder openfunction/builder-go:v2.4.0-1.17 --env FUNC_NAME="HelloWorld" --env FUNC_CLEAR_SOURCE=true --env FUNC_GOPROXY="https://proxy.golang.org" +./pack build test-body-image --path ./ci/pod/openfunction/function-example/test-body --builder openfunction/builder-go:v2.4.0-1.17 --env FUNC_NAME="HelloWorld" --env FUNC_CLEAR_SOURCE=true --env FUNC_GOPROXY="https://proxy.golang.org" +./pack build test-header-image --path ./ci/pod/openfunction/function-example/test-header --builder openfunction/builder-go:v2.4.0-1.17 --env FUNC_NAME="HelloWorld" --env FUNC_CLEAR_SOURCE=true --env FUNC_GOPROXY="https://proxy.golang.org" diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-body/go.mod b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-body/go.mod new file mode 100644 index 0000000..bf571e0 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-body/go.mod @@ -0,0 +1,31 @@ +module example.com/hello + +go 1.17 + +require github.com/OpenFunction/functions-framework-go v0.3.0 + +require ( + github.com/SkyAPM/go2sky v1.4.1 // indirect + github.com/cloudevents/sdk-go/v2 v2.4.1 // indirect + github.com/dapr/dapr v1.6.0 // indirect + github.com/dapr/go-sdk v1.3.1 // indirect + github.com/go-logr/logr v1.2.0 // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/google/uuid v1.3.0 // indirect + github.com/json-iterator/go v1.1.11 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.1 // indirect + github.com/pkg/errors v0.9.1 // indirect + go.uber.org/atomic v1.9.0 // indirect + go.uber.org/multierr v1.7.0 // indirect + go.uber.org/zap v1.19.1 // indirect + golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f // indirect + golang.org/x/sys v0.0.0-20211019181941-9d821ace8654 // indirect + golang.org/x/text v0.3.7 // indirect + google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2 // indirect + google.golang.org/grpc v1.40.0 // indirect + google.golang.org/protobuf v1.33.0 // indirect + gopkg.in/yaml.v3 v3.0.0 // indirect + k8s.io/klog/v2 v2.30.0 // indirect + skywalking.apache.org/repo/goapi v0.0.0-20220401015832-2c9eee9481eb // indirect +) diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-body/go.sum b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-body/go.sum new file mode 100644 index 0000000..f51a530 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-body/go.sum @@ -0,0 +1,1760 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.51.0/go.mod h1:hWtGJ6gnXH+KgDv+V0zFGDvpi07n3z8ZNj3T1RW0Gcw= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= +cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= +cloud.google.com/go v0.86.0/go.mod h1:YG2MRW8zzPSZaztnTZtxbMPK2VYaHg4NTDYZMG+5ZqQ= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/pubsub v1.12.2/go.mod h1:BmI/dqa6eXfm8WTp+JIN6d6vtVGq+vcsnglFKn/aVkY= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +contrib.go.opencensus.io/exporter/prometheus v0.4.0/go.mod h1:o7cosnyfuPVK0tB8q0QmaQNhGnptITnPQB+z1+qeFB0= +contrib.go.opencensus.io/exporter/zipkin v0.1.1/go.mod h1:GMvdSl3eJ2gapOaLKzTKE3qDgUkJ86k9k3yY2eqwkzc= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/99designs/keyring v1.1.5/go.mod h1:7hsVvt2qXgtadGevGJ4ujg+u8m6SpJ5TpHqTozIPqf0= +github.com/AdhityaRamadhanus/fasthttpcors v0.0.0-20170121111917-d4c07198763a/go.mod h1:C0A1KeiVHs+trY6gUTPhhGammbrZ30ZfXRW/nuT7HLw= +github.com/AthenZ/athenz v1.10.15/go.mod h1:7KMpEuJ9E4+vMCMI3UQJxwWs0RZtQq7YXZ1IteUjdsc= +github.com/Azure/azure-amqp-common-go/v3 v3.0.1/go.mod h1:PBIGdzcO1teYoufTKMcGibdKaYZv4avS+O6LNIp8bq0= +github.com/Azure/azure-amqp-common-go/v3 v3.1.0/go.mod h1:PBIGdzcO1teYoufTKMcGibdKaYZv4avS+O6LNIp8bq0= +github.com/Azure/azure-event-hubs-go/v3 v3.3.10/go.mod h1:sszMsQpFy8Au2s2NColbnJY8lRVm1koW0XxBJ3rN5TY= +github.com/Azure/azure-pipeline-go v0.1.8/go.mod h1:XA1kFWRVhSK+KNFiOhfv83Fv8L9achrP7OxIzeTn1Yg= +github.com/Azure/azure-pipeline-go v0.1.9/go.mod h1:XA1kFWRVhSK+KNFiOhfv83Fv8L9achrP7OxIzeTn1Yg= +github.com/Azure/azure-pipeline-go v0.2.2/go.mod h1:4rQ/NZncSvGqNkkOsNpOU1tgoNuIlp9AfUH5G1tvCHc= +github.com/Azure/azure-sdk-for-go v37.1.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go v59.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go/sdk/azcore v0.20.0/go.mod h1:ZPW/Z0kLCTdDZaDbYTetxc9Cxl/2lNqxYHYNOF2bti0= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v0.12.0/go.mod h1:GJzjM4SR9T0KyX5gKCVyz1ytD8FeWeUPCwtFCt1AyfE= +github.com/Azure/azure-sdk-for-go/sdk/internal v0.8.1/go.mod h1:KLF4gFr6DcKFZwSuH8w8yEK6DpFl3LP5rhdvAb7Yz5I= +github.com/Azure/azure-sdk-for-go/sdk/keyvault/azsecrets v0.3.0/go.mod h1:aJ4Pej3ivJnoNJ4UPgh/snHVLSSV2Mcc62srBQZ4TWE= +github.com/Azure/azure-sdk-for-go/sdk/keyvault/internal v0.1.0/go.mod h1:qKJHexVLI0iqKFeV/2WnqbRBQtJTPOMeBdmHOxs+E88= +github.com/Azure/azure-service-bus-go v0.10.10/go.mod h1:o5z/3lDG1iT/T/G7vgIwIqVDTx9Qa2wndf5OdzSzpF8= +github.com/Azure/azure-storage-blob-go v0.6.0/go.mod h1:oGfmITT1V6x//CswqY2gtAHND+xIP64/qL7a5QJix0Y= +github.com/Azure/azure-storage-blob-go v0.10.0/go.mod h1:ep1edmW+kNQx4UfWM9heESNmQdijykocJ0YOxmMX8SE= +github.com/Azure/azure-storage-queue-go v0.0.0-20191125232315-636801874cdd/go.mod h1:K6am8mT+5iFXgingS9LUc7TmbsW6XBw3nxaRyaMyWc8= +github.com/Azure/go-amqp v0.13.0/go.mod h1:qj+o8xPCz9tMSbQ83Vp8boHahuRDl5mkNHyt1xlxUTs= +github.com/Azure/go-amqp v0.13.1/go.mod h1:qj+o8xPCz9tMSbQ83Vp8boHahuRDl5mkNHyt1xlxUTs= +github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= +github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= +github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= +github.com/Azure/go-autorest/autorest v0.9.3/go.mod h1:GsRuLYvwzLjjjRoWEIyMUaYq8GNUx2nRB378IPt/1p0= +github.com/Azure/go-autorest/autorest v0.9.6/go.mod h1:/FALq9T/kS7b5J5qsQ+RSTUdAmGFqi0vUdVNNx8q630= +github.com/Azure/go-autorest/autorest v0.11.1/go.mod h1:JFgpikqFJ/MleTTxwepExTKnFUKKszPS8UavbQYUMuw= +github.com/Azure/go-autorest/autorest v0.11.3/go.mod h1:JFgpikqFJ/MleTTxwepExTKnFUKKszPS8UavbQYUMuw= +github.com/Azure/go-autorest/autorest v0.11.7/go.mod h1:V6p3pKZx1KKkJubbxnDWrzNhEIfOy/pTGasLqzHIPHs= +github.com/Azure/go-autorest/autorest v0.11.17/go.mod h1:eipySxLmqSyC5s5k1CLupqet0PSENBEDP93LQ9a8QYw= +github.com/Azure/go-autorest/autorest v0.11.23/go.mod h1:BAWYUWGPEtKPzjVkp0Q6an0MJcJDsoh5Z1BFAEFs4Xs= +github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= +github.com/Azure/go-autorest/autorest/adal v0.8.0/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc= +github.com/Azure/go-autorest/autorest/adal v0.8.1/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q= +github.com/Azure/go-autorest/autorest/adal v0.8.2/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q= +github.com/Azure/go-autorest/autorest/adal v0.8.3/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q= +github.com/Azure/go-autorest/autorest/adal v0.9.0/go.mod h1:/c022QCutn2P7uY+/oQWWNcK9YU+MH96NgK+jErpbcg= +github.com/Azure/go-autorest/autorest/adal v0.9.4/go.mod h1:/3SMAM86bP6wC9Ev35peQDUeqFZBMH07vvUOmg4z/fE= +github.com/Azure/go-autorest/autorest/adal v0.9.5/go.mod h1:B7KF7jKIeC9Mct5spmyCB/A8CG/sEz1vwIRGv/bbw7A= +github.com/Azure/go-autorest/autorest/adal v0.9.11/go.mod h1:nBKAnTomx8gDtl+3ZCJv2v0KACFHWTB2drffI1B68Pk= +github.com/Azure/go-autorest/autorest/adal v0.9.14/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M= +github.com/Azure/go-autorest/autorest/adal v0.9.16/go.mod h1:tGMin8I49Yij6AQ+rvV+Xa/zwxYQB5hmsd6DkfAx2+A= +github.com/Azure/go-autorest/autorest/azure/auth v0.4.2/go.mod h1:90gmfKdlmKgfjUpnCEpOJzsUEjrWDSLwHIG73tSXddM= +github.com/Azure/go-autorest/autorest/azure/auth v0.5.8/go.mod h1:kxyKZTSfKh8OVFWPAgOgQ/frrJgeYQJPyR5fLFmXko4= +github.com/Azure/go-autorest/autorest/azure/cli v0.3.1/go.mod h1:ZG5p860J94/0kI9mNJVoIoLgXcirM2gF5i2kWloofxw= +github.com/Azure/go-autorest/autorest/azure/cli v0.4.2/go.mod h1:7qkJkT+j6b+hIpzMOwPChJhTqS8VbsqqgULzMNRugoM= +github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA= +github.com/Azure/go-autorest/autorest/date v0.2.0/go.mod h1:vcORJHLJEh643/Ioh9+vPmf1Ij9AEBM5FuBIXLmIy0g= +github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74= +github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= +github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= +github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM= +github.com/Azure/go-autorest/autorest/mocks v0.4.0/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= +github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= +github.com/Azure/go-autorest/autorest/to v0.3.0/go.mod h1:MgwOyqaIuKdG4TL/2ywSsIWKAfJfgHDo8ObuUk3t5sA= +github.com/Azure/go-autorest/autorest/to v0.4.0/go.mod h1:fE8iZBn7LQR7zH/9XU2NcPR4o9jEImooCeWJcYV/zLE= +github.com/Azure/go-autorest/autorest/validation v0.2.0/go.mod h1:3EEqHnBxQGHXRYq3HT1WyXAvT7LLY3tl70hw6tQIbjI= +github.com/Azure/go-autorest/autorest/validation v0.3.0/go.mod h1:yhLgjC0Wda5DYXl6JAsWyUe4KVNffhoDhG0zVzUMo3E= +github.com/Azure/go-autorest/autorest/validation v0.3.1/go.mod h1:yhLgjC0Wda5DYXl6JAsWyUe4KVNffhoDhG0zVzUMo3E= +github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= +github.com/Azure/go-autorest/logger v0.2.0/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= +github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= +github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= +github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= +github.com/DataDog/datadog-go v2.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= +github.com/DataDog/zstd v1.3.6-0.20190409195224-796139022798/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo= +github.com/DataDog/zstd v1.4.6-0.20210211175136-c6db21d202f4/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= +github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= +github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA= +github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA= +github.com/Microsoft/hcsshim v0.8.6/go.mod h1:Op3hHsoHPAvb6lceZHDtd9OkTew38wNoXnJs8iY7rUg= +github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/OneOfOne/xxhash v1.2.7/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q= +github.com/OpenFunction/functions-framework-go v0.3.0 h1:yiVwk7IysrMPnG3eCOgRLZbpsCUaYU3gRYA7dqIPREo= +github.com/OpenFunction/functions-framework-go v0.3.0/go.mod h1:DbssgwZJRVd8VOls6aLpQwqBWu6gbDM4G+7RwwCJMEQ= +github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= +github.com/Shopify/sarama v1.23.1/go.mod h1:XLH1GYJnLVE0XCr6KdJGVJRTwY30moWNJ4sERjXX6fs= +github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= +github.com/SkyAPM/go2sky v1.4.1 h1:FV0jUB8UeC5CW0Z12j8xgrK0LoVV85Z92ShQU0G3Xfo= +github.com/SkyAPM/go2sky v1.4.1/go.mod h1:cebzbFtq5oc9VrgJy0Sv7oePj/TjIlXPdj2ntHdCXd0= +github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg= +github.com/StackExchange/wmi v0.0.0-20210224194228-fe8f1750fd46/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg= +github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= +github.com/a8m/documentdb v1.3.1-0.20211026005403-13c3593b3c3a/go.mod h1:4Z0mpi7fkyqjxUdGiNMO3vagyiUoiwLncaIX6AsW5z0= +github.com/aerospike/aerospike-client-go v4.5.0+incompatible/go.mod h1:zj8LBEnWBDOVEIJt8LvaRvDG5ARAoa5dBeHaB472NRc= +github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= +github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= +github.com/agrea/ptr v0.0.0-20180711073057-77a518d99b7b/go.mod h1:Tie46d3UWzXpj+Fh9+DQTyaUxEpFBPOLXrnx7nxlKRo= +github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/alibaba/sentinel-golang v1.0.3/go.mod h1:Lag5rIYyJiPOylK8Kku2P+a23gdKMMqzQS7wTnjWEpk= +github.com/alibabacloud-go/alibabacloud-gateway-spi v0.0.2/go.mod h1:sCavSAvdzOjul4cEqeVtvlSaSScfNsTQ+46HwlTL1hc= +github.com/alibabacloud-go/darabonba-openapi v0.1.4/go.mod h1:j03z4XUkIC9aBj/w5Bt7H0cygmPNt5sug8NXle68+Og= +github.com/alibabacloud-go/darabonba-openapi v0.1.14/go.mod h1:w4CosR7O/kapCtEEMBm3JsQqWBU/CnZ2o0pHorsTWDI= +github.com/alibabacloud-go/darabonba-string v1.0.0/go.mod h1:93cTfV3vuPhhEwGGpKKqhVW4jLe7tDpo3LUM0i0g6mA= +github.com/alibabacloud-go/debug v0.0.0-20190504072949-9472017b5c68/go.mod h1:6pb/Qy8c+lqua8cFpEy7g39NRRqOWc3rOwAy8m5Y2BY= +github.com/alibabacloud-go/endpoint-util v1.1.0/go.mod h1:O5FuCALmCKs2Ff7JFJMudHs0I5EBgecXXxZRyswlEjE= +github.com/alibabacloud-go/oos-20190601 v1.0.1/go.mod h1:t7g1ubvGwLe0cP+uLSrTza2S6xthOFZw43h9Zajt+Kw= +github.com/alibabacloud-go/openapi-util v0.0.7/go.mod h1:sQuElr4ywwFRlCCberQwKRFhRzIyG4QTP/P4y1CJ6Ws= +github.com/alibabacloud-go/openapi-util v0.0.10/go.mod h1:sQuElr4ywwFRlCCberQwKRFhRzIyG4QTP/P4y1CJ6Ws= +github.com/alibabacloud-go/tea v1.1.0/go.mod h1:IkGyUSX4Ba1V+k4pCtJUc6jDpZLFph9QMy2VUPTwukg= +github.com/alibabacloud-go/tea v1.1.7/go.mod h1:/tmnEaQMyb4Ky1/5D+SE1BAsa5zj/KeGOFfwYm3N/p4= +github.com/alibabacloud-go/tea v1.1.8/go.mod h1:/tmnEaQMyb4Ky1/5D+SE1BAsa5zj/KeGOFfwYm3N/p4= +github.com/alibabacloud-go/tea v1.1.11/go.mod h1:/tmnEaQMyb4Ky1/5D+SE1BAsa5zj/KeGOFfwYm3N/p4= +github.com/alibabacloud-go/tea v1.1.15/go.mod h1:nXxjm6CIFkBhwW4FQkNrolwbfon8Svy6cujmKFUq98A= +github.com/alibabacloud-go/tea v1.1.17/go.mod h1:nXxjm6CIFkBhwW4FQkNrolwbfon8Svy6cujmKFUq98A= +github.com/alibabacloud-go/tea-utils v1.3.1/go.mod h1:EI/o33aBfj3hETm4RLiAxF/ThQdSngxrpF8rKUDJjPE= +github.com/alibabacloud-go/tea-utils v1.3.9/go.mod h1:EI/o33aBfj3hETm4RLiAxF/ThQdSngxrpF8rKUDJjPE= +github.com/alibabacloud-go/tea-utils v1.4.3/go.mod h1:KNcT0oXlZZxOXINnZBs6YvgOd5aYp9U67G+E3R8fcQw= +github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc= +github.com/alicebob/miniredis/v2 v2.13.3/go.mod h1:uS970Sw5Gs9/iK3yBg0l9Uj9s25wXxSpQUE9EaJ/Blg= +github.com/aliyun/alibaba-cloud-sdk-go v1.61.18/go.mod h1:v8ESoHo4SyHmuB4b1tJqDHxfTGEciD+yhvOU/5s1Rfk= +github.com/aliyun/aliyun-oss-go-sdk v2.0.7+incompatible/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8= +github.com/aliyun/aliyun-tablestore-go-sdk v1.6.0/go.mod h1:jixoiNNRR/4ziq0yub1fTlxmDcQwlpkaujpaWIATQWM= +github.com/aliyun/credentials-go v1.1.2/go.mod h1:ozcZaMR5kLM7pwtCMEpVmQ242suV6qTJya2bDq4X1Tw= +github.com/aliyunmq/mq-http-go-sdk v1.0.3/go.mod h1:JYfRMQoPexERvnNNBcal0ZQ2TVQ5ialDiW9ScjaadEM= +github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129/go.mod h1:rFgpPQZYZ8vdbc+48xibu8ALc3yeyd64IhHS+PU6Yyg= +github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= +github.com/andybalholm/brotli v1.0.0/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= +github.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= +github.com/andybalholm/brotli v1.0.2/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20210826220005-b48c857c3a0e/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY= +github.com/apache/pulsar-client-go v0.6.1-0.20211027182823-171ef578e91a/go.mod h1:EauTUv9sTmP9QRznRgK9hxnzCsIVfS8fyhTfGcuJBrE= +github.com/apache/pulsar-client-go/oauth2 v0.0.0-20201120111947-b8bd55bc02bd/go.mod h1:0UtvvETGDdvXNDCHa8ZQpxl+w3HbdFtfYZvDHLgWGTY= +github.com/apache/rocketmq-client-go v1.2.5/go.mod h1:Kap8oXIVLlHF50BGUbN9z97QUp1GaK1nOoCfsZnR2bw= +github.com/apache/rocketmq-client-go/v2 v2.1.0/go.mod h1:oEZKFDvS7sz/RWU0839+dQBupazyBV7WX5cP6nrio0Q= +github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/apache/thrift v0.14.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/ardielle/ardielle-go v1.5.2/go.mod h1:I4hy1n795cUhaVt/ojz83SNVCYIGsAFAONtv2Dr7HUI= +github.com/ardielle/ardielle-tools v1.5.4/go.mod h1:oZN+JRMnqGiIhrzkRN9l26Cej9dEx4jeNG6A+AdkShk= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-metrics v0.0.0-20190430140413-ec5e00d3c878/go.mod h1:3AMJUQhVx52RsWOnlkpikZr01T/yAVN2gn0861vByNg= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A= +github.com/asaskevich/EventBus v0.0.0-20200907212545-49d423059eef/go.mod h1:JS7hed4L1fj0hXcyEejnW57/7LCetXggd+vwrRnYeII= +github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= +github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= +github.com/asaskevich/govalidator v0.0.0-20200108200545-475eaeb16496/go.mod h1:oGkLhpf+kjZl6xBf758TQhh5XrAeiJv/7FRz/2spLIg= +github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= +github.com/aws/aws-sdk-go v1.19.48/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go v1.32.6/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= +github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= +github.com/aws/aws-sdk-go v1.41.7/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q= +github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= +github.com/awslabs/kinesis-aggregation/go v0.0.0-20210630091500-54e17340d32f/go.mod h1:SghidfnxvX7ribW6nHI7T+IBbc9puZ9kk5Tx/88h8P4= +github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= +github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f/go.mod h1:AuiFmCCPBSrqvVMvuqFuk0qogytodnVFVSN5CeJB8Gc= +github.com/beefsack/go-rate v0.0.0-20180408011153-efa7637bb9b6/go.mod h1:6YNgTHLutezwnBvyneBbwvB8C82y3dcoOj5EQJIdGXA= +github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= +github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k= +github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84= +github.com/blang/semver v3.5.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= +github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= +github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= +github.com/bmizerany/perks v0.0.0-20141205001514-d9a9656a3a4b/go.mod h1:ac9efd0D1fsDb3EJvhqgXRbFx7bs2wqZ10HQPeU8U/Q= +github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps= +github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA= +github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= +github.com/camunda-cloud/zeebe/clients/go v1.0.1/go.mod h1:slW2ZP0pMmiZdxBLJHjGxax+E2AjjLFB608DRhounJI= +github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= +github.com/cenkalti/backoff v2.0.0+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/cenkalti/backoff v2.1.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/cinience/go_rocketmq v0.0.2/go.mod h1:2YNY7emT546dcFpMEWLesmAEi4ndW7+tX5VfNf1Zsgs= +github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= +github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= +github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cloudevents/sdk-go/v2 v2.4.1 h1:rZJoz9QVLbWQmnvLPDFEmv17Czu+CfSPwMO6lhJ72xQ= +github.com/cloudevents/sdk-go/v2 v2.4.1/go.mod h1:MZiMwmAh5tGj+fPFvtHv9hKurKqXtdB9haJYMJ/7GJY= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= +github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= +github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= +github.com/containerd/containerd v1.4.1/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/continuity v0.0.0-20190426062206-aaeac12a7ffc/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= +github.com/containerd/continuity v0.0.0-20190827140505-75bee3e2ccb6/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= +github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= +github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-oidc v2.1.0+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= +github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/cyberdelia/templates v0.0.0-20141128023046-ca7fffd4298c/go.mod h1:GyV+0YP4qX0UQ7r2MoYZ+AvYDp12OF5yg4q8rGnyNh4= +github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4= +github.com/dancannon/gorethink v4.0.0+incompatible/go.mod h1:BLvkat9KmZc1efyYwhz3WnybhRZtgF1K929FD8z1avU= +github.com/danieljoos/wincred v1.0.2/go.mod h1:SnuYRW9lp1oJrZX/dXJqr0cPK5gYXqx3EJbmjhLdK9U= +github.com/dapr/components-contrib v1.6.0-rc.2/go.mod h1:30BaLseZXoK+UPD5E93dCTGZwlG3nWLNJazoJ9bKGlU= +github.com/dapr/dapr v1.6.0 h1:zc6/jHVkD4LkNosVM+PNVDPBnmwYqnXXPD7knvE9etU= +github.com/dapr/dapr v1.6.0/go.mod h1:ilH7anASii1b6hBRy2GTmf63Kj1/ejjaN9GcQJ2z5R8= +github.com/dapr/go-sdk v1.3.1 h1:VI7vp3ZwZu+O8k9vPZ0gTTCRywj+ZsLm7MIQqB9S7FU= +github.com/dapr/go-sdk v1.3.1/go.mod h1:tFH/t0z3qypmk5CXHvYSjf/1dGVi04voXfNnhbGgy/A= +github.com/dapr/kit v0.0.2-0.20210614175626-b9074b64d233/go.mod h1:y8r0VqUNKyd6xBXp7gQjwA59wlCLGfKzL5J8iJsN09w= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/deepmap/oapi-codegen v1.3.6/go.mod h1:aBozjEveG+33xPiP55Iw/XbVkhtZHEGLq3nxlX0+hfU= +github.com/deepmap/oapi-codegen v1.8.1/go.mod h1:YLgSKSDv/bZQB7N4ws6luhozi3cEdRktEqrX88CvjIw= +github.com/denisenkom/go-mssqldb v0.0.0-20210411162248-d9abbec934ba/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= +github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY= +github.com/dghubble/go-twitter v0.0.0-20190719072343-39e5462e111f/go.mod h1:xfg4uS5LEzOj8PgZV7SQYRHbG7jPUnelEiaAVJxmhJE= +github.com/dghubble/oauth1 v0.6.0/go.mod h1:8pFdfPkv/jr8mkChVbNVuJ0suiHe278BtWI4Tk1ujxk= +github.com/dghubble/sling v1.3.0/go.mod h1:XXShWaBWKzNLhu2OxikSNFrlsvowtz4kyRuXUG7oQKY= +github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dgrijalva/jwt-go v3.2.1-0.20210802184156-9742bd7fca1c+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= +github.com/didip/tollbooth v4.0.2+incompatible/go.mod h1:A9b0665CE6l1KmzpDws2++elm/CsuWBMa5Jv4WY0PEY= +github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8= +github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= +github.com/dimfeld/httptreemux v5.0.1+incompatible/go.mod h1:rbUlSV+CCpv/SuqUTP/8Bk2O3LyUV436/yaRGkhP6Z0= +github.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko= +github.com/docker/distribution v2.7.1-0.20190205005809-0d3efadf0154+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v17.12.0-ce-rc1.0.20200916142827-bd33bbf0497b+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= +github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM= +github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= +github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dvsekhvalnov/jose2go v0.0.0-20180829124132-7f401d37b68a/go.mod h1:7BvyPhdbLxMXIYTFPLsyJRFMsKmOZnQmzh6Gb+uquuM= +github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/eapache/go-resiliency v1.2.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= +github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= +github.com/eclipse/paho.mqtt.golang v1.3.5/go.mod h1:eTzb4gxwwyWpqBUHGQZ4ABAV7+Jgm1PklsYT/eo8Hcc= +github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= +github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= +github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= +github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= +github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o= +github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/evanphx/json-patch v4.5.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= +github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= +github.com/facebookgo/stack v0.0.0-20160209184415-751773369052/go.mod h1:UbMTZqLaRiH3MsBH8va0n7s1pQYcu3uTb8G4tygF4Zg= +github.com/fasthttp-contrib/sessions v0.0.0-20160905201309-74f6ac73d5d5/go.mod h1:MQXNGeXkpojWTxbN7vXoE3f7EmlA11MlJbsrJpVBINA= +github.com/fasthttp/router v1.3.8/go.mod h1:DQBvuHvYbn3SUN6pGjwjPbpCNpWfCFc5Ipn/Fj6XxFc= +github.com/fastly/go-utils v0.0.0-20180712184237-d95a45783239/go.mod h1:Gdwt2ce0yfBxPvZrHkprdPPTTS3N5rwmLE8T22KBXlw= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= +github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= +github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= +github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= +github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= +github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/gavv/httpexpect v2.0.0+incompatible/go.mod h1:x+9tiU1YnrOvnB725RkpoLv1M62hOWzwo5OXotisrKc= +github.com/getkin/kin-openapi v0.2.0/go.mod h1:V1z9xl9oF5Wt7v32ne4FmiF1alpS4dM6mNzoywPOXlk= +github.com/getkin/kin-openapi v0.61.0/go.mod h1:7Yn5whZr5kJi6t+kShccXS8ae1APpYTW6yheSwk8Yi4= +github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/ghodss/yaml v0.0.0-20180820084758-c7ce16629ff4/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M= +github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= +github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= +github.com/go-chi/chi v4.0.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= +github.com/go-chi/chi/v5 v5.0.0/go.mod h1:BBug9lr0cqtdAhsu6R4AAdvufI0/XBzAQSsUqJpoZOs= +github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= +github.com/go-errors/errors v1.4.0/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= +github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= +github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= +github.com/go-logr/logr v0.3.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= +github.com/go-logr/logr v1.2.0 h1:QK40JKJyMdUDz+h+xvCsru/bJhvG0UxvePV0ufL/AcE= +github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/zapr v0.2.0/go.mod h1:qhKdvif7YF5GI9NWEpyxTSSBdGmzkNguibrdCNVPunU= +github.com/go-ole/go-ole v1.2.4/go.mod h1:XCwSNxSkXRo4vlyPy93sltvi/qJq0jqQhjqQNIwKuxM= +github.com/go-ole/go-ole v1.2.5/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-openapi/analysis v0.0.0-20180825180245-b006789cd277/go.mod h1:k70tL6pCuVxPJOHXQ+wIac1FUrvNkHolPie/cLEU6hI= +github.com/go-openapi/analysis v0.17.0/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik= +github.com/go-openapi/analysis v0.18.0/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik= +github.com/go-openapi/analysis v0.19.2/go.mod h1:3P1osvZa9jKjb8ed2TPng3f0i/UY9snX6gxi44djMjk= +github.com/go-openapi/analysis v0.19.5/go.mod h1:hkEAkxagaIvIP7VTn8ygJNkd4kAYON2rCu0v0ObL0AU= +github.com/go-openapi/errors v0.17.0/go.mod h1:LcZQpmvG4wyF5j4IhA73wkLFQg+QJXOQHVjmcZxhka0= +github.com/go-openapi/errors v0.18.0/go.mod h1:LcZQpmvG4wyF5j4IhA73wkLFQg+QJXOQHVjmcZxhka0= +github.com/go-openapi/errors v0.19.2/go.mod h1:qX0BLWsyaKfvhluLejVpVNwNRdXZhEbTA4kxxpKBC94= +github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0= +github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M= +github.com/go-openapi/jsonpointer v0.18.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M= +github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg= +github.com/go-openapi/jsonreference v0.17.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= +github.com/go-openapi/jsonreference v0.18.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= +github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc= +github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8= +github.com/go-openapi/loads v0.17.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU= +github.com/go-openapi/loads v0.18.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU= +github.com/go-openapi/loads v0.19.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU= +github.com/go-openapi/loads v0.19.2/go.mod h1:QAskZPMX5V0C2gvfkGZzJlINuP7Hx/4+ix5jWFxsNPs= +github.com/go-openapi/loads v0.19.4/go.mod h1:zZVHonKd8DXyxyw4yfnVjPzBjIQcLt0CCsn0N0ZrQsk= +github.com/go-openapi/runtime v0.0.0-20180920151709-4f900dc2ade9/go.mod h1:6v9a6LTXWQCdL8k1AO3cvqx5OtZY/Y9wKTgaoP6YRfA= +github.com/go-openapi/runtime v0.19.0/go.mod h1:OwNfisksmmaZse4+gpV3Ne9AyMOlP1lt4sK4FXt0O64= +github.com/go-openapi/runtime v0.19.4/go.mod h1:X277bwSUBxVlCYR3r7xgZZGKVvBd/29gLDlFGtJ8NL4= +github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc= +github.com/go-openapi/spec v0.17.0/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI= +github.com/go-openapi/spec v0.18.0/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI= +github.com/go-openapi/spec v0.19.2/go.mod h1:sCxk3jxKgioEJikev4fgkNmwS+3kuYdJtcsZsD5zxMY= +github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8Lj9mJglo= +github.com/go-openapi/strfmt v0.17.0/go.mod h1:P82hnJI0CXkErkXi8IKjPbNBM6lV6+5pLP5l494TcyU= +github.com/go-openapi/strfmt v0.18.0/go.mod h1:P82hnJI0CXkErkXi8IKjPbNBM6lV6+5pLP5l494TcyU= +github.com/go-openapi/strfmt v0.19.0/go.mod h1:+uW+93UVvGGq2qGaZxdDeJqSAqBqBdl+ZPMF/cC8nDY= +github.com/go-openapi/strfmt v0.19.3/go.mod h1:0yX7dbo8mKIvc3XSKp7MNfxw4JytCfCD6+bY1AVL9LU= +github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I= +github.com/go-openapi/swag v0.17.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg= +github.com/go-openapi/swag v0.18.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg= +github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/validate v0.18.0/go.mod h1:Uh4HdOzKt19xGIGm1qHf/ofbX1YQ4Y+MYsct2VUrAJ4= +github.com/go-openapi/validate v0.19.2/go.mod h1:1tRCw7m3jtI8eNWEEliiAqUIcBztB2KDnRCRMUi7GTA= +github.com/go-openapi/validate v0.19.5/go.mod h1:8DJv2CVJQ6kGNpFW6eV9N3JviE1C85nY1c2z52x1Gk4= +github.com/go-ozzo/ozzo-validation/v4 v4.3.0/go.mod h1:2NKgrcHl3z6cJs+3Oo940FPRiTzuqKbvfrL2RxCj6Ew= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI= +github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= +github.com/go-redis/redis/v8 v8.8.0/go.mod h1:F7resOH5Kdug49Otu24RjHWwgK7u9AmtqWMnCV1iP5Y= +github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= +github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= +github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY= +github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg= +github.com/gobuffalo/envy v1.6.15/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= +github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= +github.com/gobuffalo/flect v0.1.0/go.mod h1:d2ehjJqGOH/Kjqcoz+F7jHTBbmDb38yXA598Hb50EGs= +github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= +github.com/gobuffalo/flect v0.1.3/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= +github.com/gobuffalo/genny v0.0.0-20190329151137-27723ad26ef9/go.mod h1:rWs4Z12d1Zbf19rlsn0nurr75KqhYp52EAGGxTbBhNk= +github.com/gobuffalo/genny v0.0.0-20190403191548-3ca520ef0d9e/go.mod h1:80lIj3kVJWwOrXWWMRzzdhW3DsrdjILVil/SFKBzF28= +github.com/gobuffalo/genny v0.1.0/go.mod h1:XidbUqzak3lHdS//TPu2OgiFB+51Ur5f7CSnXZ/JDvo= +github.com/gobuffalo/genny v0.1.1/go.mod h1:5TExbEyY48pfunL4QSXxlDOmdsD44RRq4mVZ0Ex28Xk= +github.com/gobuffalo/gitgen v0.0.0-20190315122116-cc086187d211/go.mod h1:vEHJk/E9DmhejeLeNt7UVvlSGv3ziL+djtTr3yyzcOw= +github.com/gobuffalo/gogen v0.0.0-20190315121717-8f38393713f5/go.mod h1:V9QVDIxsgKNZs6L2IYiGR8datgMhB577vzTDqypH360= +github.com/gobuffalo/gogen v0.1.0/go.mod h1:8NTelM5qd8RZ15VjQTFkAW6qOMx5wBbW4dSCS3BY8gg= +github.com/gobuffalo/gogen v0.1.1/go.mod h1:y8iBtmHmGc4qa3urIyo1shvOD8JftTtfcKi+71xfDNE= +github.com/gobuffalo/logger v0.0.0-20190315122211-86e12af44bc2/go.mod h1:QdxcLw541hSGtBnhUc4gaNIXRjiDppFGaDqzbrBd3v8= +github.com/gobuffalo/mapi v1.0.1/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= +github.com/gobuffalo/mapi v1.0.2/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= +github.com/gobuffalo/packd v0.0.0-20190315124812-a385830c7fc0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= +github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= +github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= +github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= +github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= +github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= +github.com/gocql/gocql v0.0.0-20210515062232-b7ef815b4556/go.mod h1:DL0ekTmBSTdlNF25Orwt/JMzqIq3EJ4MVa/J/uK64OY= +github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gogap/errors v0.0.0-20200228125012-531a6449b28c/go.mod h1:tbRYYYC7g/H7QlCeX0Z2zaThWKowF4QQCFIsGgAsqRo= +github.com/gogap/stack v0.0.0-20150131034635-fef68dddd4f8/go.mod h1:6q1WEv2BiAO4FSdwLQTJbWQYAn1/qDNJHUGJNXCj9kM= +github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= +github.com/gogo/protobuf v1.3.0/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/goji/httpauth v0.0.0-20160601135302-2da839ab0f4d/go.mod h1:nnjvkQ9ptGaCkuDUx6wNykzzlUixGxvkme+H/lnzb+A= +github.com/golang-jwt/jwt v3.2.1+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= +github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v0.0.0-20181025225059-d3de96c4c28e/go.mod h1:Qd/q+1AKNOZr9uGQzbzCmRO6sUih6GTPZv6a1/R87v0= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.0-20170215233205-553a64147049/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golangci/lint-1 v0.0.0-20181222135242-d2cdd8c08219/go.mod h1:/X8TswGSh1pIozq4ZwCfxS0WA5JGXguxk94ar/4c87Y= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/cel-go v0.9.0/go.mod h1:U7ayypeSkw23szu4GaQTPJGx66c20mx8JklMSxrmI1w= +github.com/google/cel-spec v0.6.0/go.mod h1:Nwjgxy5CbjlPrtCWjeDjUyKMl8w41YBYGjsyDdqk0xA= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg= +github.com/googleapis/gnostic v0.5.1/go.mod h1:6U4PtQXGIEt/Z3h5MAT7FNofLnw9vXk2cUuW7uA/OeU= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= +github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= +github.com/gorilla/mux v0.0.0-20181024020800-521ea7b17d02/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= +github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= +github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grandcat/zeroconf v0.0.0-20190424104450-85eadb44205c/go.mod h1:YjKB0WsLXlMkO9p+wGTCoPIDGRJH0mz7E526PxkQVxI= +github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-middleware v1.2.2/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= +github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4= +github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= +github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= +github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-hclog v0.9.1/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= +github.com/hashicorp/go-hclog v0.14.1/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-msgpack v0.5.5/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-msgpack v1.1.5/go.mod h1:gWVc3sv/wbDmR3rQsj1CAktEZzoz1YNK9NfGLXJ69/4= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/raft v1.2.0/go.mod h1:vPAJM8Asw6u8LxC3eJCUZmRP/E4QmUGE1R7g7k8sG/8= +github.com/hashicorp/raft-boltdb v0.0.0-20171010151810-6e5ba93211ea/go.mod h1:pNv7Wc3ycL6F5oOWn+tPGo2gWD4a5X+yp/ntwdKLjRk= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/hazelcast/hazelcast-go-client v0.0.0-20190530123621-6cf767c2f31a/go.mod h1:VhwtcZ7sg3xq7REqGzEy7ylSWGKz4jZd05eCJropNzI= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= +github.com/imdario/mergo v0.3.10/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/influxdata/influxdb-client-go v1.4.0/go.mod h1:S+oZsPivqbcP1S9ur+T+QqXvrYS3NCZeMQtBoH4D1dw= +github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= +github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839/go.mod h1:xaLFMmpvUxqXtVkUJfg9QmT88cDaCJ3ZKgdZ78oO8Qo= +github.com/influxdata/line-protocol v0.0.0-20210311194329-9aa0e372d097/go.mod h1:xaLFMmpvUxqXtVkUJfg9QmT88cDaCJ3ZKgdZ78oO8Qo= +github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= +github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= +github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= +github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= +github.com/jackc/pgconn v1.5.0/go.mod h1:QeD3lBfpTFe8WUnPZWN5KY/mB8FGMIYRdd8P8Jr0fAI= +github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= +github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= +github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgservicefile v0.0.0-20200307190119-3430c5407db8/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= +github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= +github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= +github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= +github.com/jackc/pgtype v1.3.0/go.mod h1:b0JqxHvPmljG+HQ5IsvQ0yqeSi4nGcDTVjFoiLDb0Ik= +github.com/jackc/pgx v3.6.2+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I= +github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= +github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= +github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= +github.com/jackc/pgx/v4 v4.6.0/go.mod h1:vPh43ZzxijXUVJ+t/EmXBtFmbFVO72cuneCT9oAlxAg= +github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jawher/mow.cli v1.0.4/go.mod h1:5hQj2V8g+qYmLUVWqu4Wuja1pI57M83EChYLVZ0sMKk= +github.com/jawher/mow.cli v1.2.0/go.mod h1:y+pcA3jBAdo/GIZx/0rFjw/K2bVEODP9rfZOfaiq8Ko= +github.com/jcmturner/gofork v0.0.0-20190328161633-dc7c13fece03/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o= +github.com/jcmturner/gofork v1.0.0/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o= +github.com/jehiah/go-strftime v0.0.0-20171201141054-1d33003b3869/go.mod h1:cJ6Cj7dQo+O6GJNiMx+Pa94qKj+TG8ONdKHgMNIyyag= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= +github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/jonboulle/clockwork v0.2.0/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8= +github.com/jpillora/backoff v0.0.0-20180909062703-3050d21c67d7/go.mod h1:2iMrUgbbvHEiQClaW2NsSzMyGHqN+rDFqY705q49KG0= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/json-iterator/go v1.1.5/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4= +github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= +github.com/kataras/go-errors v0.0.3/go.mod h1:K3ncz8UzwI3bpuksXt5tQLmrRlgxfv+52ARvAu1+I+o= +github.com/kataras/go-serializer v0.0.4/go.mod h1:/EyLBhXKQOJ12dZwpUZZje3lGy+3wnvG7QKaVJtm/no= +github.com/keighl/postmark v0.0.0-20190821160221-28358b1a94e3/go.mod h1:Pz+php+2qQ4fWYwCa5O/rcnovTT2ylkKg3OnMLuFUbg= +github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= +github.com/keybase/go-keychain v0.0.0-20190712205309-48d3d31d256d/go.mod h1:JJNrCn9otv/2QP4D7SMJBgaleKpOf66PnW6F5WGNRIc= +github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= +github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= +github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.10.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.10.8/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.11.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.11.12/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.13.4/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= +github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g= +github.com/labstack/echo/v4 v4.2.1/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4FW1e6jwpg= +github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= +github.com/lestrrat/go-envload v0.0.0-20180220120943-6ed08b54a570/go.mod h1:BLt8L9ld7wVsvEWQbuLrUZnCMnUmLZ+CGDzKtclrTlE= +github.com/lestrrat/go-file-rotatelogs v0.0.0-20180223000712-d3151e2a480f/go.mod h1:UGmTpUd3rjbtfIpwAPrcfmGf/Z1HS95TATB+m57TPB8= +github.com/lestrrat/go-strftime v0.0.0-20180220042222-ba3bf9c1d042/go.mod h1:TPpsiPUEh0zFL1Snz4crhMlBe60PYxRHr5oFF3rRYg0= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de/go.mod h1:zAbeS9B/r2mtpb6U+EI2rYA5OAXxsYw6wTamcNW+zcE= +github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= +github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= +github.com/linkedin/goavro/v2 v2.9.8/go.mod h1:UgQUb2N/pmueQYH9bfqFioWxzYCZXSfF8Jw03O5sjqA= +github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= +github.com/machinebox/graphql v0.2.2/go.mod h1:F+kbVMHuwrQ5tYgU9JXlnskM8nOaFxCAEolaQybkjWA= +github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs= +github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= +github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= +github.com/matoous/go-nanoid v1.5.0/go.mod h1:zyD2a71IubI24efhpvkJz+ZwfwagzgSO6UNiFsZKN7U= +github.com/matoous/go-nanoid/v2 v2.0.0/go.mod h1:FtS4aGPVfEkxKxhdWPAspZpZSh1cOjtM7Ej/So3hR0g= +github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= +github.com/matryer/moq v0.0.0-20190312154309-6cfb0558e1bd/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= +github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-ieproxy v0.0.0-20190702010315-6dee0af9227d/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.13/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-runewidth v0.0.0-20181025052659-b20a3daf6a39/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/microcosm-cc/bluemonday v1.0.7/go.mod h1:HOT/6NaBlR0f9XlxD3zolN6Z3N8Lp4pvhp+jLS5ihnI= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/miekg/dns v1.1.35/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM= +github.com/minio/blake2b-simd v0.0.0-20160723061019-3f5f724cb5b1/go.mod h1:pD8RvIylQ358TN4wwqatJ8rNavkEINozVn9DtGI3dfQ= +github.com/minio/highwayhash v1.0.0/go.mod h1:xQboMTeM9nY9v/LlAOxFctujiv5+Aq2hR5dxBpaMbdc= +github.com/minio/highwayhash v1.0.1/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/moby/term v0.0.0-20200312100748-672ec06f55cd/go.mod h1:DdlQx2hp0Ss5/fLikoLlEeIYiATotOjgB//nb973jeo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8= +github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= +github.com/morikuni/aec v0.0.0-20170113033406-39771216ff4c/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= +github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= +github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/grpc-proxy v0.0.0-20181017164139-0f1106ef9c76/go.mod h1:x5OoJHDHqxHS801UIuhqGl6QdSAEJvtausosHSdazIo= +github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= +github.com/nacos-group/nacos-sdk-go v1.0.8/go.mod h1:hlAPn3UdzlxIlSILAyOXKxjFSvDJ9oLzTJ9hLAK1KzA= +github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= +github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU= +github.com/nats-io/jwt v0.3.3-0.20200519195258-f2bf5ce574c7/go.mod h1:n3cvmLfBfnpV4JJRN7lRYCyZnw48ksGsbThGXEk4w9M= +github.com/nats-io/jwt v1.1.0/go.mod h1:n3cvmLfBfnpV4JJRN7lRYCyZnw48ksGsbThGXEk4w9M= +github.com/nats-io/jwt v1.2.2/go.mod h1:/xX356yQA6LuXI9xWW7mZNpxgF2mBmGecH+Fj34sP5Q= +github.com/nats-io/jwt/v2 v2.0.0-20200916203241-1f8ce17dff02/go.mod h1:vs+ZEjP+XKy8szkBmQwCB7RjYdIlMaPsFPs4VdS4bTQ= +github.com/nats-io/jwt/v2 v2.0.0-20201015190852-e11ce317263c/go.mod h1:vs+ZEjP+XKy8szkBmQwCB7RjYdIlMaPsFPs4VdS4bTQ= +github.com/nats-io/jwt/v2 v2.0.0-20210125223648-1c24d462becc/go.mod h1:PuO5FToRL31ecdFqVjc794vK0Bj0CwzveQEDvkb7MoQ= +github.com/nats-io/jwt/v2 v2.0.0-20210208203759-ff814ca5f813/go.mod h1:PuO5FToRL31ecdFqVjc794vK0Bj0CwzveQEDvkb7MoQ= +github.com/nats-io/jwt/v2 v2.0.1/go.mod h1:VRP+deawSXyhNjXmxPCHskrR6Mq50BqpEI5SEcNiGlY= +github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k= +github.com/nats-io/nats-server/v2 v2.1.8-0.20200524125952-51ebd92a9093/go.mod h1:rQnBf2Rv4P9adtAs/Ti6LfFmVtFG6HLhl/H7cVshcJU= +github.com/nats-io/nats-server/v2 v2.1.8-0.20200601203034-f8d6dd992b71/go.mod h1:Nan/1L5Sa1JRW+Thm4HNYcIDcVRFc5zK9OpSZeI2kk4= +github.com/nats-io/nats-server/v2 v2.1.8-0.20200929001935-7f44d075f7ad/go.mod h1:TkHpUIDETmTI7mrHN40D1pzxfzHZuGmtMbtb83TGVQw= +github.com/nats-io/nats-server/v2 v2.1.8-0.20201129161730-ebe63db3e3ed/go.mod h1:XD0zHR/jTXdZvWaQfS5mQgsXj6x12kMjKLyAk/cOGgY= +github.com/nats-io/nats-server/v2 v2.1.8-0.20210205154825-f7ab27f7dad4/go.mod h1:kauGd7hB5517KeSqspW2U1Mz/jhPbTrE8eOXzUPk1m0= +github.com/nats-io/nats-server/v2 v2.1.8-0.20210227190344-51550e242af8/go.mod h1:/QQ/dpqFavkNhVnjvMILSQ3cj5hlmhB66adlgNbjuoA= +github.com/nats-io/nats-server/v2 v2.1.9/go.mod h1:9qVyoewoYXzG1ME9ox0HwkkzyYvnlBDugfR4Gg/8uHU= +github.com/nats-io/nats-server/v2 v2.2.1-0.20210330155036-61cbd74e213d/go.mod h1:eKlAaGmSQHZMFQA6x56AaP5/Bl9N3mWF4awyT2TTpzc= +github.com/nats-io/nats-server/v2 v2.2.1/go.mod h1:A+5EOqdnhH7FvLxtAK6SEDx6hyHriVOwf+FT/eEV99c= +github.com/nats-io/nats-streaming-server v0.21.2/go.mod h1:2W8QfNVOtcFpmf0bRiwuLtRb0/hkX4NuOxPOFNOThVQ= +github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= +github.com/nats-io/nats.go v1.10.0/go.mod h1:AjGArbfyR50+afOUotNX2Xs5SYHf+CoOa5HH1eEl2HE= +github.com/nats-io/nats.go v1.10.1-0.20200531124210-96f2130e4d55/go.mod h1:ARiFsjW9DVxk48WJbO3OSZ2DG8fjkMi7ecLmXoY/n9I= +github.com/nats-io/nats.go v1.10.1-0.20200606002146-fc6fed82929a/go.mod h1:8eAIv96Mo9QW6Or40jUHejS7e4VwZ3VRYD6Sf0BTDp4= +github.com/nats-io/nats.go v1.10.1-0.20201021145452-94be476ad6e0/go.mod h1:VU2zERjp8xmF+Lw2NH4u2t5qWZxwc7jB3+7HVMWQXPI= +github.com/nats-io/nats.go v1.10.1-0.20210127212649-5b4924938a9a/go.mod h1:Sa3kLIonafChP5IF0b55i9uvGR10I3hPETFbi4+9kOI= +github.com/nats-io/nats.go v1.10.1-0.20210211000709-75ded9c77585/go.mod h1:uBWnCKg9luW1g7hgzPxUjHFRI40EuTSX7RCzgnc74Jk= +github.com/nats-io/nats.go v1.10.1-0.20210228004050-ed743748acac/go.mod h1:hxFvLNbNmT6UppX5B5Tr/r3g+XSwGjJzFn6mxPNJEHc= +github.com/nats-io/nats.go v1.10.1-0.20210330225420-a0b1f60162f8/go.mod h1:Zq9IEHy7zurF0kFbU5aLIknnFI7guh8ijHk+2v+Vf5g= +github.com/nats-io/nats.go v1.12.0/go.mod h1:BPko4oXsySz4aSWeFgOHLZs3G4Jq4ZAyE6/zMCxRT6w= +github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nkeys v0.1.4/go.mod h1:XdZpAbhgyyODYqjTawOnIOI7VlbKSarI9Gfy1tqEu/s= +github.com/nats-io/nkeys v0.2.0/go.mod h1:XdZpAbhgyyODYqjTawOnIOI7VlbKSarI9Gfy1tqEu/s= +github.com/nats-io/nkeys v0.3.0/go.mod h1:gvUNGjVcM2IPr5rCsRsC6Wb3Hr2CQAm08dsxtV6A5y4= +github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= +github.com/nats-io/stan.go v0.8.3/go.mod h1:Ejm8bbHnMTSptU6uNMAVuxeapMJYBB/Ml3ej6z4GoSY= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= +github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/olekukonko/tablewriter v0.0.1/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= +github.com/onsi/ginkgo v1.14.1/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= +github.com/onsi/ginkgo v1.15.0/go.mod h1:hF8qUzuuC8DJGygJH3726JnCZX4MYbRB8yFfISqnKUg= +github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= +github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.10.2/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.10.5/go.mod h1:gza4q3jKQJijlu05nKWRCW/GavJumGt8aNRxWg7mt48= +github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= +github.com/open-policy-agent/opa v0.23.2/go.mod h1:rrwxoT/b011T0cyj+gg2VvxqTtn6N3gp/jzmr3fjW44= +github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= +github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/runc v0.1.1/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= +github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= +github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= +github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= +github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5/go.mod h1:/wsWhb9smxSfWAKL3wpBW7V8scJMt8N8gnaMCS9E/cA= +github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw= +github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/oracle/oci-go-sdk/v54 v54.0.0/go.mod h1:+t+yvcFGVp+3ZnztnyxqXfQDsMlq8U25faBLa+mqCMc= +github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ= +github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= +github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= +github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= +github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= +github.com/peterh/liner v0.0.0-20170211195444-bf27d3ba8e1d/go.mod h1:xIteQHvHuaLYG9IFj6mSxM0fCKrs34IrEQUhOYuGPHc= +github.com/phayes/freeport v0.0.0-20171002181615-b8543db493a5/go.mod h1:iIss55rKnNBTvrwdmkUpLnDpZoAHvWaiq5+iMmen4AE= +github.com/pierrec/lz4 v0.0.0-20190327172049-315a67e90e41/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= +github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= +github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= +github.com/pkg/errors v0.0.0-20181023235946-059132a15dd0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= +github.com/pquerna/cachecontrol v0.0.0-20180517163645-1555304b9b35/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= +github.com/prometheus/client_golang v0.0.0-20181025174421-f30f42803563/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.2/go.mod h1:OsXs2jCmiKlQ1lTBmv21f2mNfw4xf/QclQDMrYNZzcM= +github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= +github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og= +github.com/prometheus/client_golang v1.4.1/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.9.0/go.mod h1:FqZLKOZnGdFAhOK4nqGHa7D66IdsO+O441Eve7ptJDU= +github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.0.0-20181020173914-7e9e6cabbd39/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.0.0-20181126121408-4724e9255275/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.15.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.28.0/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/common v0.30.0/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.2.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/statsd_exporter v0.21.0/go.mod h1:rbT83sZq2V+p73lHhPZfMc3MLCHmSHelCh9hSGYNLTQ= +github.com/prometheus/statsd_exporter v0.22.3/go.mod h1:N4Z1+iSqc9rnxlT1N8Qn3l65Vzb5t4Uq0jpg8nxyhio= +github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= +github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= +github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/xid v1.3.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= +github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= +github.com/rs/zerolog v1.18.0/go.mod h1:9nvC1axdVrAHcu/s9taAVfBuIdTZLVQmKQyvrUjF5+I= +github.com/rs/zerolog v1.25.0/go.mod h1:7KHcEGe0QZPOm2IE4Kpb5rTh6n1h2hIgS5OOnu1rUaI= +github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= +github.com/russross/blackfriday v2.0.0+incompatible/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= +github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/savsgio/gotils v0.0.0-20210217112953-d4a072536008/go.mod h1:TWNAOTaVzGOXq8RbEvHnhzA/A2sLZzgn0m6URjnukY8= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/sendgrid/rest v2.6.3+incompatible/go.mod h1:kXX7q3jZtJXK5c5qK83bSGMdV6tsOE70KbHoqJls4lE= +github.com/sendgrid/sendgrid-go v3.5.0+incompatible/go.mod h1:QRQt+LX/NmgVEvmdRw0VT/QgUn499+iza2FnDca9fg8= +github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= +github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/shirou/gopsutil/v3 v3.21.6/go.mod h1:JfVbDpIBLVzT8oKbvMg9P3wEIMDDpVn+LwHTKj0ST88= +github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/assertions v1.1.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= +github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/smartystreets/goconvey v0.0.0-20190710185942-9d28bd7c0945/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= +github.com/sony/gobreaker v0.4.2-0.20210216022020-dd874f9dd33b/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= +github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= +github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cobra v0.0.0-20181021141114-fe5e611709b0/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE= +github.com/spf13/cobra v1.1.1/go.mod h1:WnodtKOvamDL/PwE2M4iKs8aMDBZ5Q5klgD3qfVJQMI= +github.com/spf13/cobra v1.1.3/go.mod h1:pGADOWyqRD/YMrPZigI/zbliZ2wVD/23d+is3pSWzOo= +github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v0.0.0-20181024212040-082b515c9490/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= +github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= +github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= +github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/amqp v1.0.0/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/supplyon/gremcos v0.1.0/go.mod h1:ZnXsXGVbGCYDFU5GLPX9HZLWfD+ZWkiPo30KUjNoOtw= +github.com/tebeka/strftime v0.1.3/go.mod h1:7wJm3dZlpr4l/oVK0t1HYIc4rMzQ2XJlOMIUJUJH6XQ= +github.com/testcontainers/testcontainers-go v0.9.0/go.mod h1:b22BFXhRbg4PJmeMVWh6ftqjyZHgiIl3w274e9r3C2E= +github.com/tidwall/gjson v1.2.1/go.mod h1:c/nTNbUr0E0OrXEhq1pwa8iEgc2DOt4ZZqAt1HtCkPA= +github.com/tidwall/gjson v1.8.0/go.mod h1:5/xDoumyyDNerp2U36lyolv46b3uF/9Bu6OfyQ9GImk= +github.com/tidwall/match v1.0.1/go.mod h1:LujAq0jyVjBy028G1WhWfIzbpQfMO8bBZ6Tyb0+pL9E= +github.com/tidwall/match v1.0.3/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v0.0.0-20190325153808-1166b9ac2b65/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tidwall/pretty v1.1.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tjfoc/gmsm v1.3.2/go.mod h1:HaUcFuY0auTiaHB9MHFGCPx5IaLhTUd2atbCFBQXn9w= +github.com/tklauser/go-sysconf v0.3.6/go.mod h1:MkWzOF4RMCshBAMXuhXJs64Rte09mITnppBXY/rYEFI= +github.com/tklauser/numcpus v0.2.2/go.mod h1:x3qojaO3uyYt0i56EW/VUYs7uBvdl2fkfZFu0T9wgjM= +github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/toolkits/concurrent v0.0.0-20150624120057-a4371d70e3e3/go.mod h1:QDlpd3qS71vYtakd2hmdpqhJ9nwv6mD6A30bQ1BPBFE= +github.com/trusch/grpc-proxy v0.0.0-20190529073533-02b64529f274/go.mod h1:dzrPb02OTNDVimdCCBR1WAPu9a69n3VnfDyCX/GT/gE= +github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= +github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= +github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= +github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= +github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= +github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasthttp v1.21.0/go.mod h1:jjraHZVbKOXftJfsOYoAjaeygpj5hr8ermTRJNroD7A= +github.com/valyala/fasthttp v1.31.1-0.20211216042702-258a4c17b4f4/go.mod h1:2rsYD01CKFrjjsvFxx75KlEUNpWNBY9JWD3K/7o2Cus= +github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= +github.com/valyala/fasttemplate v1.1.0/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= +github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= +github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= +github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc= +github.com/vektah/gqlparser v1.1.2/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw= +github.com/vmware/vmware-go-kcl v1.5.0/go.mod h1:P92YfaWfQyudNf62BNx+E2rJn9pd165MhHsRt8ajkpM= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= +github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= +github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= +github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= +github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI= +github.com/yashtewari/glob-intersection v0.0.0-20180916065949-5c77d914dd0b/go.mod h1:HptNXiXVDcJjXe9SqMd0v2FsL9f8dz4GnXgltU6q/co= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= +github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= +github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.30/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/gopher-lua v0.0.0-20191220021717-ab39c6098bdb/go.mod h1:gqRgreBUhTSL0GeU64rtZ3Uq3wtjOa/TB2YfrtkCbVQ= +github.com/yuin/gopher-lua v0.0.0-20200603152657-dc2b0ca8b37e/go.mod h1:gqRgreBUhTSL0GeU64rtZ3Uq3wtjOa/TB2YfrtkCbVQ= +github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= +go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= +go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= +go.etcd.io/etcd v0.5.0-alpha.5.0.20200819165624-17cef6e3e9d5/go.mod h1:skWido08r9w6Lq/w70DO5XYIKMu4QFu1+4VsqLQuJy8= +go.etcd.io/etcd v0.5.0-alpha.5.0.20200910180754-dd1b699fc489/go.mod h1:yVHk9ub3CSBatqGNg7GRmsnfLWtoW60w4eDYfh7vHDg= +go.mongodb.org/mongo-driver v1.0.3/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM= +go.mongodb.org/mongo-driver v1.1.1/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM= +go.mongodb.org/mongo-driver v1.1.2/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM= +go.mongodb.org/mongo-driver v1.5.1/go.mod h1:gRXCHX4Jo7J0IJ1oDQyUxF7jfy19UfxniMS4xxMmUqw= +go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.opentelemetry.io/otel v0.19.0/go.mod h1:j9bF567N9EfomkSidSfmMwIwIBuP37AMAIzVW85OxSg= +go.opentelemetry.io/otel/metric v0.19.0/go.mod h1:8f9fglJPRnXuskQmKpnad31lcLJ2VmNNqIsx/uIwBSc= +go.opentelemetry.io/otel/oteltest v0.19.0/go.mod h1:tI4yxwh8U21v7JD6R3BcA/2+RBoTKFexE/PJ/nSO7IA= +go.opentelemetry.io/otel/trace v0.19.0/go.mod h1:4IXiNextNOpPnRlI4ryK69mn5iC84bjBWZQA5DXz/qg= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.5.1/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= +go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/automaxprocs v1.4.0/go.mod h1:/mTEdr7LvHhs0v7mjdxDreTz1OG5zdZGqgOnhWiR/+Q= +go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= +go.uber.org/goleak v1.1.11-0.20210813005559-691160354723 h1:sHOAIxRGBp443oHZIPB+HsUGaksVCXVQENPxwTfQdH4= +go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/multierr v1.7.0 h1:zaiO/rmgFjbmCXdSYJWQcdvOCsthmdaHfr3Gm2Kx4Ec= +go.uber.org/multierr v1.7.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= +go.uber.org/ratelimit v0.2.0/go.mod h1:YYBV4e4naJvhpitQrWJu1vCpgB7CboMe0qhltKt6mUg= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.8.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= +go.uber.org/zap v1.15.0/go.mod h1:Mb2vm2krFEG5DV0W9qcHBYFtp/Wku1cvYaqPsS/WYfc= +go.uber.org/zap v1.19.1 h1:ue41HOKd1vGURxrmeKIgELGb3jPW9DMUDGtsinblHwI= +go.uber.org/zap v1.19.1/go.mod h1:j3DNczoxDZroyBnOT1L/Q79cfUMGZxlv/9dzN7SM1rI= +goji.io v2.0.2+incompatible/go.mod h1:sbqFwrtqZACxLBTQcdgVjFh54yGVCvwq8+w49MVMMIk= +golang.org/x/crypto v0.0.0-20180820150726-614d502a4dac/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190320223903-b7391e95e576/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190404164418-38d8ce5564a5/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190617133340-57b3e21c3d56/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191112222119-e1110fd1c708/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20191219195013-becbf705a915/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201016220609-9e8e0b390897/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210314154223-e6e6c4f2bb5b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181023182221-1baf3a9d7d67/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190320064053-1272bf9dcd53/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191112182307-2180aed22343/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200425230154-ff2c4b7c35a0/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200602114024-627f9648deb9/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201010224723-4f7140c49acb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201016165138-7b1cca2348c0/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210331212208-0fccb6fa2b5c/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210510120150-4163338589ed/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210825183410-e898025ed96a/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f h1:OfiFi4JbukWwe3lzw+xunroH1mnC1e2Gy5cxNJApiSY= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180828065106-d99a578cf41b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190130150945-aca44879d564/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190321052220-f7bb7a8bee54/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190419153524-e8e3143a4f4a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190523142557-0e01d883c5c5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190712062909-fae7ac547cb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191115151921-52ab43148777/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200420163511-1957bb5e6d1f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200509044756-6aff5f38e54f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200622214017-ed371f2e16b4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201112073958-5cba982894dd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201214210602-f9fddec55a1e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210316164454-77fc1eacc6aa/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654 h1:id054HUawV2/6IGm2IV8KZQjqtwAOo2CYlOToYqa0d0= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20210611083556-38a9dc6acbc6/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180810170437-e96c4e24768d/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190424220101-1e8e1cfdf96b/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190617190820-da514acc4774/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190828213141-aed303cbaa74/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216052735-49a3e744a425/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200505023115-26f46d2f7ef8/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200509030707-2212a7e161a5/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200616133436-c1934b75d054/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gomodules.xyz/jsonpatch/v2 v2.1.0/go.mod h1:IhYNNY4jnS53ZnfE4PAmpKtDpTCj1JFXc+3mwe7XcUU= +google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= +google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= +google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201102152239-715cce707fb0/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201110150050-8816d57aaa9a/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= +google.golang.org/genproto v0.0.0-20210701133433-6b8dcf568a95/go.mod h1:yiaVoXHpRzHGyxV3o4DktVWY4mSUErTKaeEOq6C3t3U= +google.golang.org/genproto v0.0.0-20210707164411-8c882eb9abba/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2 h1:NHN4wOCScVzKhPenJ2dt+BTs3X/XkBVI/Rh4iDt55T8= +google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.40.0 h1:AGJ0Ih4mHjSeibYkFGh1dD9KJ/eOtZ93I6hoHhukQ5Q= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc/go.mod h1:m7x9LTH6d71AHyAX77c9yqWCCa3UKHcVEj9y7hAtKDk= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/couchbase/gocb.v1 v1.6.4/go.mod h1:Ri5Qok4ZKiwmPr75YxZ0uELQy45XJgUSzeUnK806gTY= +gopkg.in/couchbase/gocbcore.v7 v7.1.18/go.mod h1:48d2Be0MxRtsyuvn+mWzqmoGUG9uA00ghopzOs148/E= +gopkg.in/couchbaselabs/gocbconnstr.v1 v1.0.4/go.mod h1:ZjII0iKx4Veo6N6da+pEZu/ptNyKLg9QTVt7fFmR6sw= +gopkg.in/couchbaselabs/gojcbmock.v1 v1.0.4/go.mod h1:jl/gd/aQ2S8whKVSTnsPs6n7BPeaAuw9UglBD/OF7eo= +gopkg.in/couchbaselabs/jsonx.v1 v1.0.0/go.mod h1:oR201IRovxvLW/eISevH12/+MiKHtNQAKfcX8iWZvJY= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fatih/pool.v2 v2.0.0/go.mod h1:8xVGeu1/2jr2wm5V9SPuMht2H5AEmf5aFMGSQixtjTY= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= +gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df/go.mod h1:LRQQ+SO6ZHR7tOkpBDuZnXENFzX8qRjMDMyPD6BRkCw= +gopkg.in/gorethink/gorethink.v4 v4.1.0/go.mod h1:M7JgwrUAmshJ3iUbEK0Pt049MPyPK+CYDGGaEjdZb/c= +gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= +gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= +gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.56.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/jcmturner/aescts.v1 v1.0.1/go.mod h1:nsR8qBOg+OucoIW+WMhB3GspUQXq9XorLnQb9XtvcOo= +gopkg.in/jcmturner/dnsutils.v1 v1.0.1/go.mod h1:m3v+5svpVOhtFAP/wSz+yzh4Mc0Fg7eRhxkJMWSIz9Q= +gopkg.in/jcmturner/goidentity.v3 v3.0.0/go.mod h1:oG2kH0IvSYNIu80dVAyu/yoefjq1mNfM5bm88whjWx4= +gopkg.in/jcmturner/gokrb5.v7 v7.2.3/go.mod h1:l8VISx+WGYp+Fp7KRbsiUuXTTOnxIc3Tuvyavf11/WM= +gopkg.in/jcmturner/gokrb5.v7 v7.3.0/go.mod h1:l8VISx+WGYp+Fp7KRbsiUuXTTOnxIc3Tuvyavf11/WM= +gopkg.in/jcmturner/rpc.v1 v1.1.0/go.mod h1:YIdkC4XfD6GXbzje11McwsDuOlZQSb9W4vfLvuNnlv8= +gopkg.in/kataras/go-serializer.v0 v0.0.4/go.mod h1:v2jHg/3Wp7uncDNzenTsX75PRDxhzlxoo/qDvM4ZGxk= +gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= +gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= +gopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= +gopkg.in/square/go-jose.v2 v2.4.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= +gopkg.in/square/go-jose.v2 v2.5.0/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= +gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0 h1:hjy8E9ON/egN1tAYqKb61G10WtihqetD4sz2H+8nIeA= +gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools v0.0.0-20181223230014-1083505acf35/go.mod h1:R//lfYlUuTOTfblYI3lGoAAAebUdzjvbmQsuB7Ykd90= +gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= +gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= +honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +k8s.io/api v0.19.2/go.mod h1:IQpK0zFQ1xc5iNIQPqzgoOwuFugaYHK4iCknlAQP9nI= +k8s.io/api v0.20.0/go.mod h1:HyLC5l5eoS/ygQYl1BXBgFzWNlkHiAuyNAbevIn+FKg= +k8s.io/apiextensions-apiserver v0.19.2/go.mod h1:EYNjpqIAvNZe+svXVx9j4uBaVhTB4C94HkY3w058qcg= +k8s.io/apiextensions-apiserver v0.20.0/go.mod h1:ZH+C33L2Bh1LY1+HphoRmN1IQVLTShVcTojivK3N9xg= +k8s.io/apimachinery v0.19.2/go.mod h1:DnPGDnARWFvYa3pMHgSxtbZb7gpzzAZ1pTfaUNDVlmA= +k8s.io/apimachinery v0.20.0/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= +k8s.io/apiserver v0.19.2/go.mod h1:FreAq0bJ2vtZFj9Ago/X0oNGC51GfubKK/ViOKfVAOA= +k8s.io/apiserver v0.20.0/go.mod h1:6gRIWiOkvGvQt12WTYmsiYoUyYW0FXSiMdNl4m+sxY8= +k8s.io/cli-runtime v0.20.0/go.mod h1:C5tewU1SC1t09D7pmkk83FT4lMAw+bvMDuRxA7f0t2s= +k8s.io/client-go v0.19.2/go.mod h1:S5wPhCqyDNAlzM9CnEdgTGV4OqhsW3jGO1UM1epwfJA= +k8s.io/client-go v0.20.0/go.mod h1:4KWh/g+Ocd8KkCwKF8vUNnmqgv+EVnQDK4MBF4oB5tY= +k8s.io/code-generator v0.19.2/go.mod h1:moqLn7w0t9cMs4+5CQyxnfA/HV8MF6aAVENF+WZZhgk= +k8s.io/code-generator v0.20.0/go.mod h1:UsqdF+VX4PU2g46NC2JRs4gc+IfrctnwHb76RNbWHJg= +k8s.io/component-base v0.19.2/go.mod h1:g5LrsiTiabMLZ40AR6Hl45f088DevyGY+cCE2agEIVo= +k8s.io/component-base v0.20.0/go.mod h1:wKPj+RHnAr8LW2EIBIK7AxOHPde4gme2lzXwVSoRXeA= +k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= +k8s.io/gengo v0.0.0-20200428234225-8167cfdcfc14/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= +k8s.io/gengo v0.0.0-20201113003025-83324d819ded/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E= +k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8= +k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= +k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE= +k8s.io/klog/v2 v2.2.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= +k8s.io/klog/v2 v2.4.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= +k8s.io/klog/v2 v2.30.0 h1:bUO6drIvCIsvZ/XFgfxoGFQU/a4Qkh0iAlvUR7vlHJw= +k8s.io/klog/v2 v2.30.0/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= +k8s.io/kube-openapi v0.0.0-20200805222855-6aeccd4b50c6/go.mod h1:UuqjUnNftUyPE5H64/qeyjQoUZhGpeFDVdxjTeEVN2o= +k8s.io/kube-openapi v0.0.0-20201113171705-d219536bb9fd/go.mod h1:WOJ3KddDSol4tAGcJo0Tvi+dK12EcqSLqcWsryKMpfM= +k8s.io/metrics v0.20.0/go.mod h1:9yiRhfr8K8sjdj2EthQQE9WvpYDvsXIV3CjN4Ruq4Jw= +k8s.io/utils v0.0.0-20200729134348-d5654de09c73/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= +k8s.io/utils v0.0.0-20200912215256-4140de9c8800/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= +k8s.io/utils v0.0.0-20201110183641-67b214c5f920/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= +nhooyr.io/websocket v1.8.6/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= +nhooyr.io/websocket v1.8.7/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= +sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.9/go.mod h1:dzAXnQbTRyDlZPJX2SUPEqvnB+j7AJjtlox7PEwigU0= +sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.14/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg= +sigs.k8s.io/controller-runtime v0.7.0/go.mod h1:pJ3YBrJiAqMAZKi6UVGuE98ZrroV1p+pIhoHsMm9wdU= +sigs.k8s.io/kustomize v2.0.3+incompatible/go.mod h1:MkjgH3RdOWrievjo6c9T245dYlB5QeXV4WCbnt/PEpU= +sigs.k8s.io/structured-merge-diff/v4 v4.0.1/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= +sigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= +sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= +sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= +skywalking.apache.org/repo/goapi v0.0.0-20220401015832-2c9eee9481eb h1:+PP2DpKFN/rEporLdPI4A7bPWQjwfARlUDKNhSab8iM= +skywalking.apache.org/repo/goapi v0.0.0-20220401015832-2c9eee9481eb/go.mod h1:uWwwvhcwe2MD/nJCg0c1EE/eL6KzaBosLHDfMFoEJ30= +sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= +stathat.com/c/consistent v1.0.0/go.mod h1:QkzMWzcbB+yQBL2AttO6sgsQS/JSTapcDISJalmCDS0= diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-body/hello.go b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-body/hello.go new file mode 100644 index 0000000..df45e24 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-body/hello.go @@ -0,0 +1,37 @@ +/* + * Copyright 2022 The OpenFunction Authors. + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package hello + +import ( + "fmt" + "io" + "net/http" + + "github.com/OpenFunction/functions-framework-go/functions" +) + +func init() { + functions.HTTP("HelloWorld", HelloWorld) +} + +func HelloWorld(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + fmt.Fprintf(w, "Hello, %s!\n", string(body)) +} diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-header/go.mod b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-header/go.mod new file mode 100644 index 0000000..76b2646 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-header/go.mod @@ -0,0 +1,3 @@ +module example.com/hello + +go 1.17 diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-header/hello.go b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-header/hello.go new file mode 100644 index 0000000..418f9fb --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-header/hello.go @@ -0,0 +1,30 @@ +/* + * Copyright 2022 The OpenFunction Authors. + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package hello + +import ( + "fmt" + "net/http" +) + +func HelloWorld(w http.ResponseWriter, r *http.Request) { + header := r.Header + fmt.Fprintf(w, "%s", header["Authorization"]) +} diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-uri/go.mod b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-uri/go.mod new file mode 100644 index 0000000..046483f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-uri/go.mod @@ -0,0 +1,32 @@ +module example.com/hello + +go 1.17 + +require github.com/OpenFunction/functions-framework-go v0.4.0 + +require ( + github.com/SkyAPM/go2sky v1.4.1 // indirect + github.com/cloudevents/sdk-go/v2 v2.4.1 // indirect + github.com/dapr/dapr v1.8.3 // indirect + github.com/dapr/go-sdk v1.5.0 // indirect + github.com/go-logr/logr v1.2.3 // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/google/uuid v1.3.0 // indirect + github.com/gorilla/mux v1.8.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/pkg/errors v0.9.1 // indirect + go.uber.org/atomic v1.9.0 // indirect + go.uber.org/multierr v1.7.0 // indirect + go.uber.org/zap v1.21.0 // indirect + golang.org/x/net v0.0.0-20220621193019-9d032be2e588 // indirect + golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a // indirect + golang.org/x/text v0.3.7 // indirect + google.golang.org/genproto v0.0.0-20220622171453-ea41d75dfa0f // indirect + google.golang.org/grpc v1.47.0 // indirect + google.golang.org/protobuf v1.33.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + k8s.io/klog/v2 v2.30.0 // indirect + skywalking.apache.org/repo/goapi v0.0.0-20220401015832-2c9eee9481eb // indirect +) diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-uri/go.sum b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-uri/go.sum new file mode 100644 index 0000000..7aaa3ce --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-uri/go.sum @@ -0,0 +1,2615 @@ +bazil.org/fuse v0.0.0-20160811212531-371fbbdaa898/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8= +bazil.org/fuse v0.0.0-20200407214033-5883e5a4b512/go.mod h1:FbcW6z/2VytnFDhZfumh8Ss8zxHE6qpMP5sHTRe0EaM= +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= +cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= +cloud.google.com/go v0.86.0/go.mod h1:YG2MRW8zzPSZaztnTZtxbMPK2VYaHg4NTDYZMG+5ZqQ= +cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= +cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= +cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= +cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= +cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= +cloud.google.com/go v0.98.0/go.mod h1:ua6Ush4NALrHk5QXDWnjvZHN93OuF0HfuEPq9I1X0cM= +cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= +cloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U= +cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= +cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= +cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= +cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= +cloud.google.com/go/iam v0.1.0/go.mod h1:vcUNEa0pEm0qRVpmWepWaFMIAI8/hjB9mO8rNCJtF6c= +cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= +cloud.google.com/go/kms v1.4.0/go.mod h1:fajBHndQ+6ubNw6Ss2sSd+SWvjL26RNo/dr7uxsnnOA= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/pubsub v1.12.2/go.mod h1:BmI/dqa6eXfm8WTp+JIN6d6vtVGq+vcsnglFKn/aVkY= +cloud.google.com/go/secretmanager v1.4.0/go.mod h1:h2VZz7Svt1W9/YVl7mfcX9LddvS6SOLOvMoOXBhYT1k= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +code.cloudfoundry.org/clock v0.0.0-20180518195852-02e53af36e6c/go.mod h1:QD9Lzhd/ux6eNQVUDVRJX/RKTigpewimNYBi7ivZKY8= +contrib.go.opencensus.io/exporter/prometheus v0.4.1/go.mod h1:t9wvfitlUjGXG2IXAZsuFq26mDGid/JwCEXp+gTG/9U= +contrib.go.opencensus.io/exporter/zipkin v0.1.1/go.mod h1:GMvdSl3eJ2gapOaLKzTKE3qDgUkJ86k9k3yY2eqwkzc= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +dubbo.apache.org/dubbo-go/v3 v3.0.3-0.20220610080020-48691a404537/go.mod h1:O7eTHAilCWlqBjEkG2MW9khZFImiARb/tSOE8PJas+g= +github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4= +github.com/99designs/keyring v1.1.6/go.mod h1:16e0ds7LGQQcT59QqkTg72Hh5ShM51Byv5PEmW6uoRU= +github.com/99designs/keyring v1.2.0/go.mod h1:ETJn2A9cfvJKq1Q4FeOc+eetK52Ik0kUGog7Uy+xvX8= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20210715213245-6c3934b029d8/go.mod h1:CzsSbkDixRphAF5hS6wbMKq0eI6ccJRb7/A0M6JBnwg= +github.com/AdhityaRamadhanus/fasthttpcors v0.0.0-20170121111917-d4c07198763a/go.mod h1:C0A1KeiVHs+trY6gUTPhhGammbrZ30ZfXRW/nuT7HLw= +github.com/AthenZ/athenz v1.10.39/go.mod h1:3Tg8HLsiQZp81BJY58JBeU2BR6B/H4/0MQGfCwhHNEA= +github.com/Azure/azure-amqp-common-go/v3 v3.0.1/go.mod h1:PBIGdzcO1teYoufTKMcGibdKaYZv4avS+O6LNIp8bq0= +github.com/Azure/azure-amqp-common-go/v3 v3.2.3/go.mod h1:7rPmbSfszeovxGfc5fSAXE4ehlXQZHpMja2OtxC2Tas= +github.com/Azure/azure-event-hubs-go/v3 v3.3.18/go.mod h1:R5H325+EzgxcBDkUerEwtor7ZQg77G7HiOTwpcuIVXY= +github.com/Azure/azure-pipeline-go v0.1.8/go.mod h1:XA1kFWRVhSK+KNFiOhfv83Fv8L9achrP7OxIzeTn1Yg= +github.com/Azure/azure-pipeline-go v0.1.9/go.mod h1:XA1kFWRVhSK+KNFiOhfv83Fv8L9achrP7OxIzeTn1Yg= +github.com/Azure/azure-pipeline-go v0.2.2/go.mod h1:4rQ/NZncSvGqNkkOsNpOU1tgoNuIlp9AfUH5G1tvCHc= +github.com/Azure/azure-pipeline-go v0.2.3/go.mod h1:x841ezTBIMG6O3lAcl8ATHnsOPVl2bqk7S3ta6S6u4k= +github.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go v37.1.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go v51.1.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go v56.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go v65.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.0.0/go.mod h1:uGG2W01BaETf0Ozp+QxxKJdMBNRWPdstHG0Fmdwn1/U= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.1.0/go.mod h1:uGG2W01BaETf0Ozp+QxxKJdMBNRWPdstHG0Fmdwn1/U= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.0.0/go.mod h1:+6sju8gk8FRmSajX3Oz4G5Gm7P+mbqE9FVaXXFYTkCM= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.1.0/go.mod h1:bhXu1AjYL+wutSL/kpSq6s7733q2Rb0yuot9Zgfqa/0= +github.com/Azure/azure-sdk-for-go/sdk/data/aztables v1.0.1/go.mod h1:l3wvZkG9oW07GLBW5Cd0WwG5asOfJ8aqE8raUvNzLpk= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.0.0/go.mod h1:eWRD7oawr1Mu1sLCawqVc0CUiF43ia3qQMxLscsKQ9w= +github.com/Azure/azure-sdk-for-go/sdk/keyvault/azsecrets v0.7.1/go.mod h1:WcC2Tk6JyRlqjn2byvinNnZzgdXmZ1tOiIOWNh1u0uA= +github.com/Azure/azure-sdk-for-go/sdk/keyvault/internal v0.5.0/go.mod h1:9V2j0jn9jDEkCkv8w/bKTNppX/d0FVA1ud77xCIP4KA= +github.com/Azure/azure-sdk-for-go/sdk/messaging/azservicebus v1.0.1/go.mod h1:LH9XQnMr2ZYxQdVdCrzLO9mxeDyrDFa6wbSI3x5zCZk= +github.com/Azure/azure-service-bus-go v0.10.10/go.mod h1:o5z/3lDG1iT/T/G7vgIwIqVDTx9Qa2wndf5OdzSzpF8= +github.com/Azure/azure-storage-blob-go v0.6.0/go.mod h1:oGfmITT1V6x//CswqY2gtAHND+xIP64/qL7a5QJix0Y= +github.com/Azure/azure-storage-blob-go v0.10.0/go.mod h1:ep1edmW+kNQx4UfWM9heESNmQdijykocJ0YOxmMX8SE= +github.com/Azure/azure-storage-queue-go v0.0.0-20191125232315-636801874cdd/go.mod h1:K6am8mT+5iFXgingS9LUc7TmbsW6XBw3nxaRyaMyWc8= +github.com/Azure/go-amqp v0.13.0/go.mod h1:qj+o8xPCz9tMSbQ83Vp8boHahuRDl5mkNHyt1xlxUTs= +github.com/Azure/go-amqp v0.13.1/go.mod h1:qj+o8xPCz9tMSbQ83Vp8boHahuRDl5mkNHyt1xlxUTs= +github.com/Azure/go-amqp v0.17.0/go.mod h1:9YJ3RhxRT1gquYnzpZO1vcYMMpAdJT+QEg6fwmw9Zlg= +github.com/Azure/go-amqp v0.17.4/go.mod h1:9YJ3RhxRT1gquYnzpZO1vcYMMpAdJT+QEg6fwmw9Zlg= +github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= +github.com/Azure/go-ansiterm v0.0.0-20210608223527-2377c96fe795/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/Azure/go-autorest v10.8.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= +github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= +github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= +github.com/Azure/go-autorest/autorest v0.9.3/go.mod h1:GsRuLYvwzLjjjRoWEIyMUaYq8GNUx2nRB378IPt/1p0= +github.com/Azure/go-autorest/autorest v0.11.1/go.mod h1:JFgpikqFJ/MleTTxwepExTKnFUKKszPS8UavbQYUMuw= +github.com/Azure/go-autorest/autorest v0.11.3/go.mod h1:JFgpikqFJ/MleTTxwepExTKnFUKKszPS8UavbQYUMuw= +github.com/Azure/go-autorest/autorest v0.11.7/go.mod h1:V6p3pKZx1KKkJubbxnDWrzNhEIfOy/pTGasLqzHIPHs= +github.com/Azure/go-autorest/autorest v0.11.18/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA= +github.com/Azure/go-autorest/autorest v0.11.20/go.mod h1:o3tqFY+QR40VOlk+pV4d77mORO64jOXSgEnPQgLK6JY= +github.com/Azure/go-autorest/autorest v0.11.24/go.mod h1:G6kyRlFnTuSbEYkQGawPfsCswgme4iYf6rfSKUDzbCc= +github.com/Azure/go-autorest/autorest v0.11.27/go.mod h1:7l8ybrIdUmGqZMTD0sRtAr8NvbHjfofbf8RSP2q7w7U= +github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= +github.com/Azure/go-autorest/autorest/adal v0.8.0/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc= +github.com/Azure/go-autorest/autorest/adal v0.8.1/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q= +github.com/Azure/go-autorest/autorest/adal v0.8.3/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q= +github.com/Azure/go-autorest/autorest/adal v0.9.0/go.mod h1:/c022QCutn2P7uY+/oQWWNcK9YU+MH96NgK+jErpbcg= +github.com/Azure/go-autorest/autorest/adal v0.9.4/go.mod h1:/3SMAM86bP6wC9Ev35peQDUeqFZBMH07vvUOmg4z/fE= +github.com/Azure/go-autorest/autorest/adal v0.9.5/go.mod h1:B7KF7jKIeC9Mct5spmyCB/A8CG/sEz1vwIRGv/bbw7A= +github.com/Azure/go-autorest/autorest/adal v0.9.13/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M= +github.com/Azure/go-autorest/autorest/adal v0.9.15/go.mod h1:tGMin8I49Yij6AQ+rvV+Xa/zwxYQB5hmsd6DkfAx2+A= +github.com/Azure/go-autorest/autorest/adal v0.9.18/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= +github.com/Azure/go-autorest/autorest/azure/auth v0.4.2/go.mod h1:90gmfKdlmKgfjUpnCEpOJzsUEjrWDSLwHIG73tSXddM= +github.com/Azure/go-autorest/autorest/azure/auth v0.5.11/go.mod h1:84w/uV8E37feW2NCJ08uT9VBfjfUHpgLVnG2InYD6cg= +github.com/Azure/go-autorest/autorest/azure/cli v0.3.1/go.mod h1:ZG5p860J94/0kI9mNJVoIoLgXcirM2gF5i2kWloofxw= +github.com/Azure/go-autorest/autorest/azure/cli v0.4.5/go.mod h1:ADQAXrkgm7acgWVUNamOgh8YNrv4p27l3Wc55oVfpzg= +github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA= +github.com/Azure/go-autorest/autorest/date v0.2.0/go.mod h1:vcORJHLJEh643/Ioh9+vPmf1Ij9AEBM5FuBIXLmIy0g= +github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74= +github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= +github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= +github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM= +github.com/Azure/go-autorest/autorest/mocks v0.4.0/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= +github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= +github.com/Azure/go-autorest/autorest/mocks v0.4.2/go.mod h1:Vy7OitM9Kei0i1Oj+LvyAWMXJHeKH1MVlzFugfVrmyU= +github.com/Azure/go-autorest/autorest/to v0.4.0/go.mod h1:fE8iZBn7LQR7zH/9XU2NcPR4o9jEImooCeWJcYV/zLE= +github.com/Azure/go-autorest/autorest/validation v0.3.0/go.mod h1:yhLgjC0Wda5DYXl6JAsWyUe4KVNffhoDhG0zVzUMo3E= +github.com/Azure/go-autorest/autorest/validation v0.3.1/go.mod h1:yhLgjC0Wda5DYXl6JAsWyUe4KVNffhoDhG0zVzUMo3E= +github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= +github.com/Azure/go-autorest/logger v0.2.0/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= +github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= +github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= +github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= +github.com/AzureAD/microsoft-authentication-library-for-go v0.4.0/go.mod h1:Vt9sXTKwMyGcOxSmLDMnGPgqsUg7m8pe215qMLrDXw4= +github.com/AzureAD/microsoft-authentication-library-for-go v0.5.1/go.mod h1:Vt9sXTKwMyGcOxSmLDMnGPgqsUg7m8pe215qMLrDXw4= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= +github.com/DataDog/datadog-go v2.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= +github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= +github.com/DataDog/zstd v1.3.6-0.20190409195224-796139022798/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo= +github.com/DataDog/zstd v1.5.0/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= +github.com/Flaque/filet v0.0.0-20201012163910-45f684403088/go.mod h1:TK+jB3mBs+8ZMWhU5BqZKnZWJ1MrLo8etNVg51ueTBo= +github.com/HdrHistogram/hdrhistogram-go v1.1.2/go.mod h1:yDgFjdqOqDEKOvasDdhWNXYg9BVp4O+o5f6V/ehm6Oo= +github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= +github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA= +github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA= +github.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw= +github.com/Microsoft/go-winio v0.4.16-0.20201130162521-d1ffc52c7331/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0= +github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0= +github.com/Microsoft/go-winio v0.4.17-0.20210211115548-6eac466e5fa3/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/go-winio v0.4.17-0.20210324224401-5516f17a5958/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/go-winio v0.4.17/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/go-winio v0.5.1/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/hcsshim v0.8.6/go.mod h1:Op3hHsoHPAvb6lceZHDtd9OkTew38wNoXnJs8iY7rUg= +github.com/Microsoft/hcsshim v0.8.7-0.20190325164909-8abdbb8205e4/go.mod h1:Op3hHsoHPAvb6lceZHDtd9OkTew38wNoXnJs8iY7rUg= +github.com/Microsoft/hcsshim v0.8.7/go.mod h1:OHd7sQqRFrYd3RmSgbgji+ctCwkbq2wbEYNSzOYtcBQ= +github.com/Microsoft/hcsshim v0.8.9/go.mod h1:5692vkUqntj1idxauYlpoINNKeqCiG6Sg38RRsjT5y8= +github.com/Microsoft/hcsshim v0.8.14/go.mod h1:NtVKoYxQuTLx6gEq0L96c9Ju4JbRJ4nY2ow3VK6a9Lg= +github.com/Microsoft/hcsshim v0.8.15/go.mod h1:x38A4YbHbdxJtc0sF6oIz+RG0npwSCAvn69iY6URG00= +github.com/Microsoft/hcsshim v0.8.16/go.mod h1:o5/SZqmR7x9JNKsW3pu+nqHm0MF8vbA+VxGOoXdC600= +github.com/Microsoft/hcsshim v0.8.20/go.mod h1:+w2gRZ5ReXQhFOrvSQeNfhrYB/dg3oDwTOcER2fw4I4= +github.com/Microsoft/hcsshim v0.8.21/go.mod h1:+w2gRZ5ReXQhFOrvSQeNfhrYB/dg3oDwTOcER2fw4I4= +github.com/Microsoft/hcsshim v0.8.23/go.mod h1:4zegtUJth7lAvFyc6cH2gGQ5B3OFQim01nnU2M8jKDg= +github.com/Microsoft/hcsshim v0.9.1/go.mod h1:Y/0uV2jUab5kBI7SQgl62at0AVX7uaruzADAVmxm3eM= +github.com/Microsoft/hcsshim v0.9.2/go.mod h1:7pLA8lDk46WKDWlVsENo92gC0XFa8rbKfyFRBqxEbCc= +github.com/Microsoft/hcsshim/test v0.0.0-20201218223536-d3e5debf77da/go.mod h1:5hlzMzRKMLyo42nCZ9oml8AdTlq/0cvIaBv6tK1RehU= +github.com/Microsoft/hcsshim/test v0.0.0-20210227013316-43a75bb4edd3/go.mod h1:mw7qgWloBUl75W/gVH3cQszUg1+gUITj7D6NY7ywVnY= +github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= +github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/OneOfOne/xxhash v1.2.8/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q= +github.com/OpenFunction/functions-framework-go v0.4.0 h1:WHuKHRgwFNiTe+6/lJqDiQC0zOU7cS+HVf/XN/dA1j4= +github.com/OpenFunction/functions-framework-go v0.4.0/go.mod h1:+uYjTEYmn2uqIyViZtg9OF+bUNdjbkWNd7jrQWc7iEc= +github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/RoaringBitmap/roaring v1.1.0/go.mod h1:icnadbWcNyfEHlYdr+tDlOTih1Bf/h+rzPpv4sbomAA= +github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ= +github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= +github.com/Shopify/sarama v1.23.1/go.mod h1:XLH1GYJnLVE0XCr6KdJGVJRTwY30moWNJ4sERjXX6fs= +github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= +github.com/SkyAPM/go2sky v1.4.1 h1:FV0jUB8UeC5CW0Z12j8xgrK0LoVV85Z92ShQU0G3Xfo= +github.com/SkyAPM/go2sky v1.4.1/go.mod h1:cebzbFtq5oc9VrgJy0Sv7oePj/TjIlXPdj2ntHdCXd0= +github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg= +github.com/StackExchange/wmi v0.0.0-20210224194228-fe8f1750fd46/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg= +github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= +github.com/Workiva/go-datastructures v1.0.52/go.mod h1:Z+F2Rca0qCsVYDS8z7bAGm8f3UkzuWYS/oBZz5a7VVA= +github.com/a8m/documentdb v1.3.1-0.20220405205223-5b41ba0aaeb1/go.mod h1:4Z0mpi7fkyqjxUdGiNMO3vagyiUoiwLncaIX6AsW5z0= +github.com/aerospike/aerospike-client-go v4.5.0+incompatible/go.mod h1:zj8LBEnWBDOVEIJt8LvaRvDG5ARAoa5dBeHaB472NRc= +github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= +github.com/agiledragon/gomonkey v2.0.2+incompatible/go.mod h1:2NGfXu1a80LLr2cmWXGBDaHEjb1idR6+FVlX5T3D9hw= +github.com/agrea/ptr v0.0.0-20180711073057-77a518d99b7b/go.mod h1:Tie46d3UWzXpj+Fh9+DQTyaUxEpFBPOLXrnx7nxlKRo= +github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= +github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/alexflint/go-filemutex v0.0.0-20171022225611-72bdc8eae2ae/go.mod h1:CgnQgUtFrFz9mxFNtED3jI5tLDjKlOM+oUF/sTk6ps0= +github.com/alexflint/go-filemutex v1.1.0/go.mod h1:7P4iRhttt/nUvUOrYIhcpMzv2G6CY9UnI16Z+UJqRyk= +github.com/alibaba/sentinel-golang v1.0.4/go.mod h1:Lag5rIYyJiPOylK8Kku2P+a23gdKMMqzQS7wTnjWEpk= +github.com/alibabacloud-go/alibabacloud-gateway-spi v0.0.4/go.mod h1:sCavSAvdzOjul4cEqeVtvlSaSScfNsTQ+46HwlTL1hc= +github.com/alibabacloud-go/darabonba-openapi v0.1.4/go.mod h1:j03z4XUkIC9aBj/w5Bt7H0cygmPNt5sug8NXle68+Og= +github.com/alibabacloud-go/darabonba-openapi v0.1.16/go.mod h1:ZjyqRbbZOaUBSh7keeH8VQN/BzCPvxCQwMuJGDdbmXQ= +github.com/alibabacloud-go/darabonba-string v1.0.0/go.mod h1:93cTfV3vuPhhEwGGpKKqhVW4jLe7tDpo3LUM0i0g6mA= +github.com/alibabacloud-go/debug v0.0.0-20190504072949-9472017b5c68/go.mod h1:6pb/Qy8c+lqua8cFpEy7g39NRRqOWc3rOwAy8m5Y2BY= +github.com/alibabacloud-go/endpoint-util v1.1.0/go.mod h1:O5FuCALmCKs2Ff7JFJMudHs0I5EBgecXXxZRyswlEjE= +github.com/alibabacloud-go/oos-20190601 v1.0.1/go.mod h1:t7g1ubvGwLe0cP+uLSrTza2S6xthOFZw43h9Zajt+Kw= +github.com/alibabacloud-go/openapi-util v0.0.7/go.mod h1:sQuElr4ywwFRlCCberQwKRFhRzIyG4QTP/P4y1CJ6Ws= +github.com/alibabacloud-go/openapi-util v0.0.10/go.mod h1:sQuElr4ywwFRlCCberQwKRFhRzIyG4QTP/P4y1CJ6Ws= +github.com/alibabacloud-go/tea v1.1.0/go.mod h1:IkGyUSX4Ba1V+k4pCtJUc6jDpZLFph9QMy2VUPTwukg= +github.com/alibabacloud-go/tea v1.1.7/go.mod h1:/tmnEaQMyb4Ky1/5D+SE1BAsa5zj/KeGOFfwYm3N/p4= +github.com/alibabacloud-go/tea v1.1.8/go.mod h1:/tmnEaQMyb4Ky1/5D+SE1BAsa5zj/KeGOFfwYm3N/p4= +github.com/alibabacloud-go/tea v1.1.11/go.mod h1:/tmnEaQMyb4Ky1/5D+SE1BAsa5zj/KeGOFfwYm3N/p4= +github.com/alibabacloud-go/tea v1.1.15/go.mod h1:nXxjm6CIFkBhwW4FQkNrolwbfon8Svy6cujmKFUq98A= +github.com/alibabacloud-go/tea v1.1.17/go.mod h1:nXxjm6CIFkBhwW4FQkNrolwbfon8Svy6cujmKFUq98A= +github.com/alibabacloud-go/tea-utils v1.3.1/go.mod h1:EI/o33aBfj3hETm4RLiAxF/ThQdSngxrpF8rKUDJjPE= +github.com/alibabacloud-go/tea-utils v1.3.9/go.mod h1:EI/o33aBfj3hETm4RLiAxF/ThQdSngxrpF8rKUDJjPE= +github.com/alibabacloud-go/tea-utils v1.4.3/go.mod h1:KNcT0oXlZZxOXINnZBs6YvgOd5aYp9U67G+E3R8fcQw= +github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc= +github.com/alicebob/miniredis/v2 v2.13.3/go.mod h1:uS970Sw5Gs9/iK3yBg0l9Uj9s25wXxSpQUE9EaJ/Blg= +github.com/aliyun/alibaba-cloud-sdk-go v1.61.18/go.mod h1:v8ESoHo4SyHmuB4b1tJqDHxfTGEciD+yhvOU/5s1Rfk= +github.com/aliyun/aliyun-oss-go-sdk v2.0.7+incompatible/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8= +github.com/aliyun/aliyun-tablestore-go-sdk v1.6.0/go.mod h1:jixoiNNRR/4ziq0yub1fTlxmDcQwlpkaujpaWIATQWM= +github.com/aliyun/credentials-go v1.1.2/go.mod h1:ozcZaMR5kLM7pwtCMEpVmQ242suV6qTJya2bDq4X1Tw= +github.com/aliyunmq/mq-http-go-sdk v1.0.3/go.mod h1:JYfRMQoPexERvnNNBcal0ZQ2TVQ5ialDiW9ScjaadEM= +github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129/go.mod h1:rFgpPQZYZ8vdbc+48xibu8ALc3yeyd64IhHS+PU6Yyg= +github.com/andybalholm/brotli v1.0.0/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= +github.com/andybalholm/brotli v1.0.2/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20210826220005-b48c857c3a0e/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY= +github.com/apache/dubbo-getty v1.4.9-0.20220610060150-8af010f3f3dc/go.mod h1:cPJlbcHUTNTpiboMQjMHhE9XBni11LiBiG8FdrDuVzk= +github.com/apache/dubbo-go-hessian2 v1.9.1/go.mod h1:xQUjE7F8PX49nm80kChFvepA/AvqAZ0oh/UaB6+6pBE= +github.com/apache/dubbo-go-hessian2 v1.9.3/go.mod h1:xQUjE7F8PX49nm80kChFvepA/AvqAZ0oh/UaB6+6pBE= +github.com/apache/dubbo-go-hessian2 v1.11.0/go.mod h1:7rEw9guWABQa6Aqb8HeZcsYPHsOS7XT1qtJvkmI6c5w= +github.com/apache/pulsar-client-go v0.8.1/go.mod h1:yJNcvn/IurarFDxwmoZvb2Ieylg630ifxeO/iXpk27I= +github.com/apache/pulsar-client-go/oauth2 v0.0.0-20220120090717-25e59572242e/go.mod h1:Xee4tgYLFpYcPMcTfBYWE1uKRzeciodGTSEDMzsR6i8= +github.com/apache/rocketmq-client-go v1.2.5/go.mod h1:Kap8oXIVLlHF50BGUbN9z97QUp1GaK1nOoCfsZnR2bw= +github.com/apache/rocketmq-client-go/v2 v2.1.0/go.mod h1:oEZKFDvS7sz/RWU0839+dQBupazyBV7WX5cP6nrio0Q= +github.com/apache/rocketmq-client-go/v2 v2.1.1-rc2/go.mod h1:DDYjQ9wxYmJLjgNK4+RqyFE8/13gLK/Bugz4U6zD5MI= +github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/apache/thrift v0.14.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/appscode/go-querystring v0.0.0-20170504095604-0126cfb3f1dc/go.mod h1:w648aMHEgFYS6xb0KVMMtZ2uMeemhiKCuD2vj6gY52A= +github.com/ardielle/ardielle-go v1.5.2/go.mod h1:I4hy1n795cUhaVt/ojz83SNVCYIGsAFAONtv2Dr7HUI= +github.com/ardielle/ardielle-tools v1.5.4/go.mod h1:oZN+JRMnqGiIhrzkRN9l26Cej9dEx4jeNG6A+AdkShk= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-metrics v0.0.0-20190430140413-ec5e00d3c878/go.mod h1:3AMJUQhVx52RsWOnlkpikZr01T/yAVN2gn0861vByNg= +github.com/armon/go-metrics v0.3.9/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= +github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A= +github.com/asaskevich/EventBus v0.0.0-20200907212545-49d423059eef/go.mod h1:JS7hed4L1fj0hXcyEejnW57/7LCetXggd+vwrRnYeII= +github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= +github.com/asaskevich/govalidator v0.0.0-20200108200545-475eaeb16496/go.mod h1:oGkLhpf+kjZl6xBf758TQhh5XrAeiJv/7FRz/2spLIg= +github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= +github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= +github.com/aws/aws-sdk-go v1.19.48/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go v1.32.6/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= +github.com/aws/aws-sdk-go v1.34.9/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= +github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= +github.com/aws/aws-sdk-go v1.41.7/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q= +github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= +github.com/aws/aws-sdk-go-v2 v1.9.2/go.mod h1:cK/D0BBs0b/oWPIcX/Z/obahJK1TT7IPVjy53i/mX/4= +github.com/aws/aws-sdk-go-v2/config v1.8.3/go.mod h1:4AEiLtAb8kLs7vgw2ZV3p2VZ1+hBavOc84hqxVNpCyw= +github.com/aws/aws-sdk-go-v2/credentials v1.4.3/go.mod h1:FNNC6nQZQUuyhq5aE5c7ata8o9e4ECGmS4lAXC7o1mQ= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.6.0/go.mod h1:gqlclDEZp4aqJOancXK6TN24aKhT0W0Ae9MHk3wzTMM= +github.com/aws/aws-sdk-go-v2/internal/ini v1.2.4/go.mod h1:ZcBrrI3zBKlhGFNYWvju0I3TR93I7YIgAfy82Fh4lcQ= +github.com/aws/aws-sdk-go-v2/service/appconfig v1.4.2/go.mod h1:FZ3HkCe+b10uFZZkFdvf98LHW21k49W8o8J366lqVKY= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.2/go.mod h1:72HRZDLMtmVQiLG2tLfQcaWLCssELvGl+Zf2WVxMmR8= +github.com/aws/aws-sdk-go-v2/service/sso v1.4.2/go.mod h1:NBvT9R1MEF+Ud6ApJKM0G+IkPchKS7p7c2YPKwHmBOk= +github.com/aws/aws-sdk-go-v2/service/sts v1.7.2/go.mod h1:8EzeIqfWt2wWT4rJVu3f21TfrhJ8AEMzVybRNSb/b4g= +github.com/aws/smithy-go v1.8.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E= +github.com/awslabs/kinesis-aggregation/go v0.0.0-20210630091500-54e17340d32f/go.mod h1:SghidfnxvX7ribW6nHI7T+IBbc9puZ9kk5Tx/88h8P4= +github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= +github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f/go.mod h1:AuiFmCCPBSrqvVMvuqFuk0qogytodnVFVSN5CeJB8Gc= +github.com/beefsack/go-rate v0.0.0-20220214233405-116f4ca011a0/go.mod h1:6YNgTHLutezwnBvyneBbwvB8C82y3dcoOj5EQJIdGXA= +github.com/benbjohnson/clock v1.0.3/go.mod h1:bGMdMPoPVvcYyt1gHDf4J2KE153Yf9BuiUKYMaxlTDM= +github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= +github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= +github.com/beorn7/perks v0.0.0-20160804104726-4c0e84591b9a/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k= +github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA= +github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA= +github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84= +github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= +github.com/blang/semver v3.1.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= +github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= +github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= +github.com/bmizerany/perks v0.0.0-20141205001514-d9a9656a3a4b/go.mod h1:ac9efd0D1fsDb3EJvhqgXRbFx7bs2wqZ10HQPeU8U/Q= +github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps= +github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA= +github.com/bshuster-repo/logrus-logstash-hook v0.4.1/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk= +github.com/bshuster-repo/logrus-logstash-hook v1.0.0/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk= +github.com/buger/jsonparser v0.0.0-20180808090653-f4dd9f5a6b44/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= +github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= +github.com/bugsnag/bugsnag-go v0.0.0-20141110184014-b1d153021fcd/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8= +github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50= +github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE= +github.com/bytecodealliance/wasmtime-go v0.35.0/go.mod h1:q320gUxqyI8yB+ZqRuaJOEnGkAnHh6WtJjMaT2CW4wI= +github.com/camunda/zeebe/clients/go/v8 v8.0.3/go.mod h1:iOEgFlCYAPdqae6iPp0ajeo2RSxJirU39i+UAN74NOY= +github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= +github.com/cenkalti/backoff v2.0.0+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/cenkalti/backoff v2.1.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= +github.com/cenkalti/backoff/v4 v4.1.2/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/certifi/gocertifi v0.0.0-20191021191039-0944d244cd40/go.mod h1:sGbDF6GwGcLpkNXPUTkMRoywsNa/ol15pxFe6ERfguA= +github.com/certifi/gocertifi v0.0.0-20200922220541-2c3bb06c6054/go.mod h1:sGbDF6GwGcLpkNXPUTkMRoywsNa/ol15pxFe6ERfguA= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/checkpoint-restore/go-criu/v4 v4.1.0/go.mod h1:xUQBLp4RLc5zJtWY++yjOoMoB5lihDt7fai+75m+rGw= +github.com/checkpoint-restore/go-criu/v5 v5.0.0/go.mod h1:cfwC0EG7HMUenopBsUf9d89JlCLQIfgVcNsNN0t6T2M= +github.com/checkpoint-restore/go-criu/v5 v5.3.0/go.mod h1:E/eQpaFtUKGOOSEBZgmKAcn+zUUwWxqcaKZlF54wK8E= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/cilium/ebpf v0.0.0-20200110133405-4032b1d8aae3/go.mod h1:MA5e5Lr8slmEg9bt0VpxxWqJlO4iwu3FBdHUzV7wQVg= +github.com/cilium/ebpf v0.0.0-20200702112145-1c8d4c9ef775/go.mod h1:7cR51M8ViRLIdUjrmSXlK9pkrsDlLHbO8jiB8X8JnOc= +github.com/cilium/ebpf v0.2.0/go.mod h1:To2CFviqOWL/M0gIMsvSMlqe7em/l1ALkX1PyjrX2Qs= +github.com/cilium/ebpf v0.4.0/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJXRs= +github.com/cilium/ebpf v0.6.2/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJXRs= +github.com/cilium/ebpf v0.7.0/go.mod h1:/oI2+1shJiTGAMgl6/RgJr36Eo1jzrRcAWbcXO2usCA= +github.com/cinience/go_rocketmq v0.0.2/go.mod h1:2YNY7emT546dcFpMEWLesmAEi4ndW7+tX5VfNf1Zsgs= +github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= +github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= +github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cloudevents/sdk-go/v2 v2.4.1 h1:rZJoz9QVLbWQmnvLPDFEmv17Czu+CfSPwMO6lhJ72xQ= +github.com/cloudevents/sdk-go/v2 v2.4.1/go.mod h1:MZiMwmAh5tGj+fPFvtHv9hKurKqXtdB9haJYMJ/7GJY= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= +github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= +github.com/cockroachdb/datadriven v0.0.0-20200714090401-bf6692d28da5/go.mod h1:h6jFvWxBdQXxjopDMZyH2UVceIRfR84bdzbkoKrsWNo= +github.com/cockroachdb/errors v1.2.4/go.mod h1:rQD95gz6FARkaKkQXUksEje/d9a6wBJoCr5oaCLELYA= +github.com/cockroachdb/logtags v0.0.0-20190617123548-eb05cc24525f/go.mod h1:i/u985jwjWRlyHXQbwatDASoW0RMlZ/3i9yJHE2xLkI= +github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= +github.com/containerd/aufs v0.0.0-20200908144142-dab0cbea06f4/go.mod h1:nukgQABAEopAHvB6j7cnP5zJ+/3aVcE7hCYqvIwAHyE= +github.com/containerd/aufs v0.0.0-20201003224125-76a6863f2989/go.mod h1:AkGGQs9NM2vtYHaUen+NljV0/baGCAPELGm2q9ZXpWU= +github.com/containerd/aufs v0.0.0-20210316121734-20793ff83c97/go.mod h1:kL5kd6KM5TzQjR79jljyi4olc1Vrx6XBlcyj3gNv2PU= +github.com/containerd/aufs v1.0.0/go.mod h1:kL5kd6KM5TzQjR79jljyi4olc1Vrx6XBlcyj3gNv2PU= +github.com/containerd/btrfs v0.0.0-20201111183144-404b9149801e/go.mod h1:jg2QkJcsabfHugurUvvPhS3E08Oxiuh5W/g1ybB4e0E= +github.com/containerd/btrfs v0.0.0-20210316141732-918d888fb676/go.mod h1:zMcX3qkXTAi9GI50+0HOeuV8LU2ryCE/V2vG/ZBiTss= +github.com/containerd/btrfs v1.0.0/go.mod h1:zMcX3qkXTAi9GI50+0HOeuV8LU2ryCE/V2vG/ZBiTss= +github.com/containerd/cgroups v0.0.0-20190717030353-c4b9ac5c7601/go.mod h1:X9rLEHIqSf/wfK8NsPqxJmeZgW4pcfzdXITDrUSJ6uI= +github.com/containerd/cgroups v0.0.0-20190919134610-bf292b21730f/go.mod h1:OApqhQ4XNSNC13gXIwDjhOQxjWa/NxkwZXJ1EvqT0ko= +github.com/containerd/cgroups v0.0.0-20200531161412-0dbf7f05ba59/go.mod h1:pA0z1pT8KYB3TCXK/ocprsh7MAkoW8bZVzPdih9snmM= +github.com/containerd/cgroups v0.0.0-20200710171044-318312a37340/go.mod h1:s5q4SojHctfxANBDvMeIaIovkq29IP48TKAxnhYRxvo= +github.com/containerd/cgroups v0.0.0-20200824123100-0b889c03f102/go.mod h1:s5q4SojHctfxANBDvMeIaIovkq29IP48TKAxnhYRxvo= +github.com/containerd/cgroups v0.0.0-20210114181951-8a68de567b68/go.mod h1:ZJeTFisyysqgcCdecO57Dj79RfL0LNeGiFUqLYQRYLE= +github.com/containerd/cgroups v1.0.1/go.mod h1:0SJrPIenamHDcZhEcJMNBB85rHcUsw4f25ZfBiPYRkU= +github.com/containerd/cgroups v1.0.2/go.mod h1:qpbpJ1jmlqsR9f2IyaLPsdkCdnt0rbDVqIDlhuu5tRY= +github.com/containerd/cgroups v1.0.3/go.mod h1:/ofk34relqNjSGyqPrmEULrO4Sc8LJhvJmWbUCUKqj8= +github.com/containerd/console v0.0.0-20180822173158-c12b1e7919c1/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw= +github.com/containerd/console v0.0.0-20181022165439-0650fd9eeb50/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw= +github.com/containerd/console v0.0.0-20191206165004-02ecf6a7291e/go.mod h1:8Pf4gM6VEbTNRIT26AyyU7hxdQU3MvAvxVI0sc00XBE= +github.com/containerd/console v1.0.1/go.mod h1:XUsP6YE/mKtz6bxc+I8UiKKTP04qjQL4qcS3XoQ5xkw= +github.com/containerd/console v1.0.2/go.mod h1:ytZPjGgY2oeTkAONYafi2kSj0aYggsf8acV1PGKCbzQ= +github.com/containerd/console v1.0.3/go.mod h1:7LqA/THxQ86k76b8c/EMSiaJ3h1eZkMkXar0TQ1gf3U= +github.com/containerd/containerd v1.2.10/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.3.0-beta.2.0.20190828155532-0293cbd26c69/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.3.0/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.3.1-0.20191213020239-082f7e3aed57/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.3.2/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.4.0-beta.2.0.20200729163537-40b22ef07410/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.4.1/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.4.3/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.4.9/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.5.0-beta.1/go.mod h1:5HfvG1V2FsKesEGQ17k5/T7V960Tmcumvqn8Mc+pCYQ= +github.com/containerd/containerd v1.5.0-beta.3/go.mod h1:/wr9AVtEM7x9c+n0+stptlo/uBBoBORwEx6ardVcmKU= +github.com/containerd/containerd v1.5.0-beta.4/go.mod h1:GmdgZd2zA2GYIBZ0w09ZvgqEq8EfBp/m3lcVZIvPHhI= +github.com/containerd/containerd v1.5.0-rc.0/go.mod h1:V/IXoMqNGgBlabz3tHD2TWDoTJseu1FGOKuoA4nNb2s= +github.com/containerd/containerd v1.5.1/go.mod h1:0DOxVqwDy2iZvrZp2JUx/E+hS0UNTVn7dJnIOwtYR4g= +github.com/containerd/containerd v1.5.7/go.mod h1:gyvv6+ugqY25TiXxcZC3L5yOeYgEw0QMhscqVp1AR9c= +github.com/containerd/containerd v1.5.8/go.mod h1:YdFSv5bTFLpG2HIYmfqDpSYYTDX+mc5qtSuYx1YUb/s= +github.com/containerd/containerd v1.5.9/go.mod h1:fvQqCfadDGga5HZyn3j4+dx56qj2I9YwBrlSdalvJYQ= +github.com/containerd/containerd v1.6.2/go.mod h1:sidY30/InSE1j2vdD1ihtKoJz+lWdaXMdiAeIupaf+s= +github.com/containerd/continuity v0.0.0-20190426062206-aaeac12a7ffc/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= +github.com/containerd/continuity v0.0.0-20190815185530-f2a389ac0a02/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= +github.com/containerd/continuity v0.0.0-20191127005431-f65d91d395eb/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= +github.com/containerd/continuity v0.0.0-20200710164510-efbc4488d8fe/go.mod h1:cECdGN1O8G9bgKTlLhuPJimka6Xb/Gg7vYzCTNVxhvo= +github.com/containerd/continuity v0.0.0-20201208142359-180525291bb7/go.mod h1:kR3BEg7bDFaEddKm54WSmrol1fKWDU1nKYkgrcgZT7Y= +github.com/containerd/continuity v0.0.0-20210208174643-50096c924a4e/go.mod h1:EXlVlkqNba9rJe3j7w3Xa924itAMLgZH4UD/Q4PExuQ= +github.com/containerd/continuity v0.1.0/go.mod h1:ICJu0PwR54nI0yPEnJ6jcS+J7CZAUXrLh8lPo2knzsM= +github.com/containerd/continuity v0.2.2/go.mod h1:pWygW9u7LtS1o4N/Tn0FoCFDIXZ7rxcMX7HX1Dmibvk= +github.com/containerd/fifo v0.0.0-20180307165137-3d5202aec260/go.mod h1:ODA38xgv3Kuk8dQz2ZQXpnv/UZZUHUCL7pnLehbXgQI= +github.com/containerd/fifo v0.0.0-20190226154929-a9fb20d87448/go.mod h1:ODA38xgv3Kuk8dQz2ZQXpnv/UZZUHUCL7pnLehbXgQI= +github.com/containerd/fifo v0.0.0-20200410184934-f15a3290365b/go.mod h1:jPQ2IAeZRCYxpS/Cm1495vGFww6ecHmMk1YJH2Q5ln0= +github.com/containerd/fifo v0.0.0-20201026212402-0724c46b320c/go.mod h1:jPQ2IAeZRCYxpS/Cm1495vGFww6ecHmMk1YJH2Q5ln0= +github.com/containerd/fifo v0.0.0-20210316144830-115abcc95a1d/go.mod h1:ocF/ME1SX5b1AOlWi9r677YJmCPSwwWnQ9O123vzpE4= +github.com/containerd/fifo v1.0.0/go.mod h1:ocF/ME1SX5b1AOlWi9r677YJmCPSwwWnQ9O123vzpE4= +github.com/containerd/go-cni v1.0.1/go.mod h1:+vUpYxKvAF72G9i1WoDOiPGRtQpqsNW/ZHtSlv++smU= +github.com/containerd/go-cni v1.0.2/go.mod h1:nrNABBHzu0ZwCug9Ije8hL2xBCYh/pjfMb1aZGrrohk= +github.com/containerd/go-cni v1.1.0/go.mod h1:Rflh2EJ/++BA2/vY5ao3K6WJRR/bZKsX123aPk+kUtA= +github.com/containerd/go-cni v1.1.3/go.mod h1:Rflh2EJ/++BA2/vY5ao3K6WJRR/bZKsX123aPk+kUtA= +github.com/containerd/go-runc v0.0.0-20180907222934-5a6d9f37cfa3/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0= +github.com/containerd/go-runc v0.0.0-20190911050354-e029b79d8cda/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0= +github.com/containerd/go-runc v0.0.0-20200220073739-7016d3ce2328/go.mod h1:PpyHrqVs8FTi9vpyHwPwiNEGaACDxT/N/pLcvMSRA9g= +github.com/containerd/go-runc v0.0.0-20201020171139-16b287bc67d0/go.mod h1:cNU0ZbCgCQVZK4lgG3P+9tn9/PaJNmoDXPpoJhDR+Ok= +github.com/containerd/go-runc v1.0.0/go.mod h1:cNU0ZbCgCQVZK4lgG3P+9tn9/PaJNmoDXPpoJhDR+Ok= +github.com/containerd/imgcrypt v1.0.1/go.mod h1:mdd8cEPW7TPgNG4FpuP3sGBiQ7Yi/zak9TYCG3juvb0= +github.com/containerd/imgcrypt v1.0.4-0.20210301171431-0ae5c75f59ba/go.mod h1:6TNsg0ctmizkrOgXRNQjAPFWpMYRWuiB6dSF4Pfa5SA= +github.com/containerd/imgcrypt v1.1.1-0.20210312161619-7ed62a527887/go.mod h1:5AZJNI6sLHJljKuI9IHnw1pWqo/F0nGDOuR9zgTs7ow= +github.com/containerd/imgcrypt v1.1.1/go.mod h1:xpLnwiQmEUJPvQoAapeb2SNCxz7Xr6PJrXQb0Dpc4ms= +github.com/containerd/imgcrypt v1.1.3/go.mod h1:/TPA1GIDXMzbj01yd8pIbQiLdQxed5ue1wb8bP7PQu4= +github.com/containerd/nri v0.0.0-20201007170849-eb1350a75164/go.mod h1:+2wGSDGFYfE5+So4M5syatU0N0f0LbWpuqyMi4/BE8c= +github.com/containerd/nri v0.0.0-20210316161719-dbaa18c31c14/go.mod h1:lmxnXF6oMkbqs39FiCt1s0R2HSMhcLel9vNL3m4AaeY= +github.com/containerd/nri v0.1.0/go.mod h1:lmxnXF6oMkbqs39FiCt1s0R2HSMhcLel9vNL3m4AaeY= +github.com/containerd/stargz-snapshotter/estargz v0.4.1/go.mod h1:x7Q9dg9QYb4+ELgxmo4gBUeJB0tl5dqH1Sdz0nJU1QM= +github.com/containerd/ttrpc v0.0.0-20190828154514-0e0f228740de/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o= +github.com/containerd/ttrpc v0.0.0-20190828172938-92c8520ef9f8/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o= +github.com/containerd/ttrpc v0.0.0-20191028202541-4f1b8fe65a5c/go.mod h1:LPm1u0xBw8r8NOKoOdNMeVHSawSsltak+Ihv+etqsE8= +github.com/containerd/ttrpc v1.0.1/go.mod h1:UAxOpgT9ziI0gJrmKvgcZivgxOp8iFPSk8httJEt98Y= +github.com/containerd/ttrpc v1.0.2/go.mod h1:UAxOpgT9ziI0gJrmKvgcZivgxOp8iFPSk8httJEt98Y= +github.com/containerd/ttrpc v1.1.0/go.mod h1:XX4ZTnoOId4HklF4edwc4DcqskFZuvXB1Evzy5KFQpQ= +github.com/containerd/typeurl v0.0.0-20180627222232-a93fcdb778cd/go.mod h1:Cm3kwCdlkCfMSHURc+r6fwoGH6/F1hH3S4sg0rLFWPc= +github.com/containerd/typeurl v0.0.0-20190911142611-5eb25027c9fd/go.mod h1:GeKYzf2pQcqv7tJ0AoCuuhtnqhva5LNU3U+OyKxxJpk= +github.com/containerd/typeurl v1.0.1/go.mod h1:TB1hUtrpaiO88KEK56ijojHS1+NeF0izUACaJW2mdXg= +github.com/containerd/typeurl v1.0.2/go.mod h1:9trJWW2sRlGub4wZJRTW83VtbOLS6hwcDZXTn6oPz9s= +github.com/containerd/zfs v0.0.0-20200918131355-0a33824f23a2/go.mod h1:8IgZOBdv8fAgXddBT4dBXJPtxyRsejFIpXoklgxgEjw= +github.com/containerd/zfs v0.0.0-20210301145711-11e8f1707f62/go.mod h1:A9zfAbMlQwE+/is6hi0Xw8ktpL+6glmqZYtevJgaB8Y= +github.com/containerd/zfs v0.0.0-20210315114300-dde8f0fda960/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY= +github.com/containerd/zfs v0.0.0-20210324211415-d5c4544f0433/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY= +github.com/containerd/zfs v1.0.0/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY= +github.com/containernetworking/cni v0.7.1/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= +github.com/containernetworking/cni v0.8.0/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= +github.com/containernetworking/cni v0.8.1/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= +github.com/containernetworking/cni v1.0.1/go.mod h1:AKuhXbN5EzmD4yTNtfSsX3tPcmtrBI6QcRV0NiNt15Y= +github.com/containernetworking/plugins v0.8.6/go.mod h1:qnw5mN19D8fIwkqW7oHHYDHVlzhJpcY6TQxn/fUyDDM= +github.com/containernetworking/plugins v0.9.1/go.mod h1:xP/idU2ldlzN6m4p5LmGiwRDjeJr6FLK6vuiUwoH7P8= +github.com/containernetworking/plugins v1.0.1/go.mod h1:QHCfGpaTwYTbbH+nZXKVTxNBDZcxSOplJT5ico8/FLE= +github.com/containers/ocicrypt v1.0.1/go.mod h1:MeJDzk1RJHv89LjsH0Sp5KTY3ZYkjXO/C+bKAeWFIrc= +github.com/containers/ocicrypt v1.1.0/go.mod h1:b8AOe0YR67uU8OqfVNcznfFpAzu3rdgUV4GP9qXPfu4= +github.com/containers/ocicrypt v1.1.1/go.mod h1:Dm55fwWm1YZAjYRaJ94z2mfZikIyIN4B0oB3dj3jFxY= +github.com/containers/ocicrypt v1.1.2/go.mod h1:Dm55fwWm1YZAjYRaJ94z2mfZikIyIN4B0oB3dj3jFxY= +github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= +github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= +github.com/coreos/go-iptables v0.4.5/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU= +github.com/coreos/go-iptables v0.5.0/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU= +github.com/coreos/go-iptables v0.6.0/go.mod h1:Qe8Bv2Xik5FyTXwgIbLAnv2sWSBmvWdFETJConOQ//Q= +github.com/coreos/go-oidc v2.1.0+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= +github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd v0.0.0-20161114122254-48702e0da86b/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd/v22 v22.0.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk= +github.com/coreos/go-systemd/v22 v22.1.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creasty/defaults v1.5.2/go.mod h1:FPZ+Y0WNrbqOVw+c6av63eyHUAl6pMHZwqLPvXUZGfY= +github.com/cyberdelia/templates v0.0.0-20141128023046-ca7fffd4298c/go.mod h1:GyV+0YP4qX0UQ7r2MoYZ+AvYDp12OF5yg4q8rGnyNh4= +github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4= +github.com/cyphar/filepath-securejoin v0.2.3/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= +github.com/d2g/dhcp4 v0.0.0-20170904100407-a1d1b6c41b1c/go.mod h1:Ct2BUK8SB0YC1SMSibvLzxjeJLnrYEVLULFNiHY9YfQ= +github.com/d2g/dhcp4client v1.0.0/go.mod h1:j0hNfjhrt2SxUOw55nL0ATM/z4Yt3t2Kd1mW34z5W5s= +github.com/d2g/dhcp4server v0.0.0-20181031114812-7d4a0a7f59a5/go.mod h1:Eo87+Kg/IX2hfWJfwxMzLyuSZyxSoAug2nGa1G2QAi8= +github.com/d2g/hardwareaddr v0.0.0-20190221164911-e7d9fbe030e4/go.mod h1:bMl4RjIciD2oAxI7DmWRx6gbeqrkoLqv3MV0vzNad+I= +github.com/dancannon/gorethink v4.0.0+incompatible/go.mod h1:BLvkat9KmZc1efyYwhz3WnybhRZtgF1K929FD8z1avU= +github.com/danieljoos/wincred v1.0.2/go.mod h1:SnuYRW9lp1oJrZX/dXJqr0cPK5gYXqx3EJbmjhLdK9U= +github.com/danieljoos/wincred v1.1.0/go.mod h1:XYlo+eRTsVA9aHGp7NGjFkPla4m+DCL7hqDjlFjiygg= +github.com/danieljoos/wincred v1.1.2/go.mod h1:GijpziifJoIBfYh+S7BbkdUTU4LfM+QnGqR5Vl2tAx0= +github.com/dapr/components-contrib v1.8.0-rc.6/go.mod h1:gxrCpaosbI0n3SFW7fKSvJU/ymjryHqrdRgqmsknuno= +github.com/dapr/components-contrib v1.8.1-rc.1/go.mod h1:gxrCpaosbI0n3SFW7fKSvJU/ymjryHqrdRgqmsknuno= +github.com/dapr/dapr v1.8.0/go.mod h1:yAsDiK5oecG0htw2S8JG9RFaeHJVdlTfZyOrL57AvRM= +github.com/dapr/dapr v1.8.3 h1:wAmP8lXeI1OeCnLGi3XT1PokbSaM0/N71ChZhjPdTCw= +github.com/dapr/dapr v1.8.3/go.mod h1:/0JyKebxzz0vPwYXc/2qHBXIicUi01HUWnpQ8AiJ0zM= +github.com/dapr/go-sdk v1.5.0 h1:OVkrupquJEOL1qRtwKcMVrFKYhw4UJQvgOJNduo2VxE= +github.com/dapr/go-sdk v1.5.0/go.mod h1:Cvz3taCVu22WCNEUbc9/szvG/yJxWPAV4dcaG+zDWA4= +github.com/dapr/kit v0.0.2-0.20210614175626-b9074b64d233/go.mod h1:y8r0VqUNKyd6xBXp7gQjwA59wlCLGfKzL5J8iJsN09w= +github.com/dave/jennifer v1.4.0/go.mod h1:fIb+770HOpJ2fmN9EPPKOqm1vMGhB+TwXKMZhrIygKg= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/deepmap/oapi-codegen v1.3.6/go.mod h1:aBozjEveG+33xPiP55Iw/XbVkhtZHEGLq3nxlX0+hfU= +github.com/deepmap/oapi-codegen v1.8.1/go.mod h1:YLgSKSDv/bZQB7N4ws6luhozi3cEdRktEqrX88CvjIw= +github.com/denisenkom/go-mssqldb v0.0.0-20210411162248-d9abbec934ba/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= +github.com/denverdino/aliyungo v0.0.0-20190125010748-a747050bb1ba/go.mod h1:dV8lFg6daOBZbT6/BDGIz6Y3WFGn8juu6G+CQ6LHtl0= +github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY= +github.com/dghubble/go-twitter v0.0.0-20190719072343-39e5462e111f/go.mod h1:xfg4uS5LEzOj8PgZV7SQYRHbG7jPUnelEiaAVJxmhJE= +github.com/dghubble/oauth1 v0.6.0/go.mod h1:8pFdfPkv/jr8mkChVbNVuJ0suiHe278BtWI4Tk1ujxk= +github.com/dghubble/sling v1.3.0/go.mod h1:XXShWaBWKzNLhu2OxikSNFrlsvowtz4kyRuXUG7oQKY= +github.com/dgraph-io/badger/v3 v3.2103.2/go.mod h1:RHo4/GmYcKKh5Lxu63wLEMHJ70Pac2JqZRYGhlyAo2M= +github.com/dgraph-io/ristretto v0.1.0/go.mod h1:fux0lOrBhrVCJd3lcTHsIJhq1T2rokOu6v9Vcb3Q9ug= +github.com/dgrijalva/jwt-go v0.0.0-20170104182250-a601269ab70c/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= +github.com/didip/tollbooth v4.0.2+incompatible/go.mod h1:A9b0665CE6l1KmzpDws2++elm/CsuWBMa5Jv4WY0PEY= +github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8= +github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= +github.com/dimfeld/httptreemux v5.0.1+incompatible/go.mod h1:rbUlSV+CCpv/SuqUTP/8Bk2O3LyUV436/yaRGkhP6Z0= +github.com/distribution/distribution/v3 v3.0.0-20211118083504-a29a3c99a684/go.mod h1:UfCu3YXJJCI+IdnqGgYP82dk2+Joxmv+mUTVBES6wac= +github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= +github.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko= +github.com/docker/cli v0.0.0-20191017083524-a8ff7f821017/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/cli v20.10.11+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY= +github.com/docker/distribution v2.7.1-0.20190205005809-0d3efadf0154+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v1.4.2-0.20190924003213-a8608b5b67c7/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v20.10.11+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v20.10.14+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker-credential-helpers v0.6.3/go.mod h1:WRaJzqw3CTB9bk10avuGsjVBZsD05qeibJ1/TYlvc0Y= +github.com/docker/docker-credential-helpers v0.6.4/go.mod h1:ofX3UI0Gz1TteYBjtgs07O36Pyasyp66D2uKT7H8W1c= +github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= +github.com/docker/go-events v0.0.0-20170721190031-9461782956ad/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA= +github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA= +github.com/docker/go-metrics v0.0.0-20180209012529-399ea8c73916/go.mod h1:/u0gXw0Gay3ceNrsHubL3BtdOL2fHf93USgMTe0W5dI= +github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw= +github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/docker/libtrust v0.0.0-20150114040149-fa567046d9b1/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE= +github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM= +github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= +github.com/dubbogo/go-zookeeper v1.0.3/go.mod h1:fn6n2CAEer3novYgk9ULLwAjuV8/g4DdC2ENwRb6E+c= +github.com/dubbogo/go-zookeeper v1.0.4-0.20211212162352-f9d2183d89d5/go.mod h1:fn6n2CAEer3novYgk9ULLwAjuV8/g4DdC2ENwRb6E+c= +github.com/dubbogo/gost v1.9.0/go.mod h1:pPTjVyoJan3aPxBPNUX0ADkXjPibLo+/Ib0/fADXSG8= +github.com/dubbogo/gost v1.11.18/go.mod h1:vIcP9rqz2KsXHPjsAwIUtfJIJjppQLQDcYaZTy/61jI= +github.com/dubbogo/gost v1.11.23/go.mod h1:PhJ8+qZJx+Txjx1KthNPuVkCvUca0jRLgKWj/noGgeI= +github.com/dubbogo/gost v1.11.25/go.mod h1:iovrPhv0hyakhQGVr4jwiECBL9HXNuBY4VV3HWK5pM0= +github.com/dubbogo/grpc-go v1.42.9/go.mod h1:F1T9hnUvYGW4JLK1QNriavpOkhusU677ovPzLkk6zHM= +github.com/dubbogo/jsonparser v1.0.1/go.mod h1:tYAtpctvSP/tWw4MeelsowSPgXQRVHHWbqL6ynps8jU= +github.com/dubbogo/net v0.0.4/go.mod h1:1CGOnM7X3he+qgGNqjeADuE5vKZQx/eMSeUkpU3ujIc= +github.com/dubbogo/triple v1.0.9/go.mod h1:1t9me4j4CTvNDcsMZy6/OGarbRyAUSY0tFXGXHCp7Iw= +github.com/dubbogo/triple v1.1.8/go.mod h1:9pgEahtmsY/avYJp3dzUQE8CMMVe1NtGBmUhfICKLJk= +github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dvsekhvalnov/jose2go v0.0.0-20200901110807-248326c1351b/go.mod h1:7BvyPhdbLxMXIYTFPLsyJRFMsKmOZnQmzh6Gb+uquuM= +github.com/dvsekhvalnov/jose2go v1.5.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU= +github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/eapache/go-resiliency v1.2.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= +github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= +github.com/eclipse/paho.mqtt.golang v1.3.5/go.mod h1:eTzb4gxwwyWpqBUHGQZ4ABAV7+Jgm1PklsYT/eo8Hcc= +github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= +github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= +github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= +github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= +github.com/emicklei/go-restful/v3 v3.8.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= +github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o= +github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.0/go.mod h1:AY7fTTXNdv/aJ2O5jwpxAPOWUZ7hQAEvzN5Pf27BkQQ= +github.com/envoyproxy/go-control-plane v0.10.1/go.mod h1:AY7fTTXNdv/aJ2O5jwpxAPOWUZ7hQAEvzN5Pf27BkQQ= +github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E9/baC+qXE/TeeyBRzgJDws= +github.com/evanphx/json-patch v0.5.2/go.mod h1:ZWS5hhDbVDyob71nXKNL0+PWn6ToqBHMikGIFbs31qQ= +github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= +github.com/evanphx/json-patch v4.11.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= +github.com/evanphx/json-patch v4.12.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= +github.com/evanphx/json-patch/v5 v5.5.0/go.mod h1:G79N1coSVB93tBe7j6PhzjmR3/2VvlbKOFpnXhI9Bw4= +github.com/facebookgo/stack v0.0.0-20160209184415-751773369052/go.mod h1:UbMTZqLaRiH3MsBH8va0n7s1pQYcu3uTb8G4tygF4Zg= +github.com/fasthttp-contrib/sessions v0.0.0-20160905201309-74f6ac73d5d5/go.mod h1:MQXNGeXkpojWTxbN7vXoE3f7EmlA11MlJbsrJpVBINA= +github.com/fasthttp/router v1.3.8/go.mod h1:DQBvuHvYbn3SUN6pGjwjPbpCNpWfCFc5Ipn/Fj6XxFc= +github.com/fastly/go-utils v0.0.0-20180712184237-d95a45783239/go.mod h1:Gdwt2ce0yfBxPvZrHkprdPPTTS3N5rwmLE8T22KBXlw= +github.com/fatih/camelcase v1.0.0/go.mod h1:yN2Sb0lFhZJUdVvtELVWefmrXpuZESvPmqwoZc+/fpc= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= +github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/felixge/httpsnoop v1.0.2/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= +github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= +github.com/form3tech-oss/jwt-go v3.2.3+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= +github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= +github.com/foxcpp/go-mockdns v0.0.0-20210729171921-fb145fc6f897/go.mod h1:lgRN6+KxQBawyIghpnl5CezHFGS9VLzvtVlwxvzXTQ4= +github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= +github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= +github.com/frankban/quicktest v1.10.0/go.mod h1:ui7WezCLWMWxVWr1GETZY3smRy0G4KWq9vcPtJmFl7Y= +github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= +github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= +github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= +github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa/go.mod h1:KnogPXtdwXqoenmZCw6S+25EAm2MkxbG0deNDu4cbSA= +github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY= +github.com/gavv/httpexpect v2.0.0+incompatible/go.mod h1:x+9tiU1YnrOvnB725RkpoLv1M62hOWzwo5OXotisrKc= +github.com/getkin/kin-openapi v0.2.0/go.mod h1:V1z9xl9oF5Wt7v32ne4FmiF1alpS4dM6mNzoywPOXlk= +github.com/getkin/kin-openapi v0.61.0/go.mod h1:7Yn5whZr5kJi6t+kShccXS8ae1APpYTW6yheSwk8Yi4= +github.com/getkin/kin-openapi v0.76.0/go.mod h1:660oXbgy5JFMKreazJaQTw7o+X00qeSyhcnluiMv+Xg= +github.com/getsentry/raven-go v0.2.0/go.mod h1:KungGk8q33+aIAZUIVWZDr2OfAEBsO49PX4NzFV5kcQ= +github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M= +github.com/gin-gonic/gin v1.7.7/go.mod h1:axIBovoeJpVj8S3BwE0uPMTeReE4+AfFtqpqaZ1qq1U= +github.com/go-asn1-ber/asn1-ber v1.3.1/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0= +github.com/go-chi/chi v4.0.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= +github.com/go-chi/chi/v5 v5.0.0/go.mod h1:BBug9lr0cqtdAhsu6R4AAdvufI0/XBzAQSsUqJpoZOs= +github.com/go-co-op/gocron v1.9.0/go.mod h1:DbJm9kdgr1sEvWpHCA7dFFs/PGHPMil9/97EXCRPr4k= +github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= +github.com/go-errors/errors v1.4.0/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= +github.com/go-ini/ini v1.66.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-kit/log v0.2.0/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= +github.com/go-ldap/ldap v3.0.2+incompatible/go.mod h1:qfd9rJvER9Q0/D/Sqn1DfHRoBp40uXYvFoEVrNEPqRc= +github.com/go-ldap/ldap/v3 v3.1.10/go.mod h1:5Zun81jBTabRaI8lzN7E1JjyEl1g6zI6u9pd8luAK4Q= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= +github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= +github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= +github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= +github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.2.1/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= +github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/stdr v1.2.0/go.mod h1:YkVgnZu1ZjjL7xTxrfm/LLZBfkhTqSR1ydtm6jTKKwI= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-logr/zapr v1.2.0/go.mod h1:Qa4Bsj2Vb+FAVeAKsLD8RLQ+YRJB8YDmOAKxaBQf7Ro= +github.com/go-ole/go-ole v1.2.4/go.mod h1:XCwSNxSkXRo4vlyPy93sltvi/qJq0jqQhjqQNIwKuxM= +github.com/go-ole/go-ole v1.2.5/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0= +github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg= +github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc= +github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8= +github.com/go-openapi/jsonreference v0.19.5/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg= +github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc= +github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8Lj9mJglo= +github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I= +github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-ozzo/ozzo-validation/v4 v4.3.0/go.mod h1:2NKgrcHl3z6cJs+3Oo940FPRiTzuqKbvfrL2RxCj6Ew= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= +github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI= +github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= +github.com/go-playground/validator/v10 v10.11.0/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU= +github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= +github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo= +github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I= +github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= +github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= +github.com/go-test/deep v1.0.2-0.20181118220953-042da051cf31/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/go-test/deep v1.0.2/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= +github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY= +github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg= +github.com/gobuffalo/envy v1.6.15/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= +github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= +github.com/gobuffalo/flect v0.1.0/go.mod h1:d2ehjJqGOH/Kjqcoz+F7jHTBbmDb38yXA598Hb50EGs= +github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= +github.com/gobuffalo/flect v0.1.3/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= +github.com/gobuffalo/genny v0.0.0-20190329151137-27723ad26ef9/go.mod h1:rWs4Z12d1Zbf19rlsn0nurr75KqhYp52EAGGxTbBhNk= +github.com/gobuffalo/genny v0.0.0-20190403191548-3ca520ef0d9e/go.mod h1:80lIj3kVJWwOrXWWMRzzdhW3DsrdjILVil/SFKBzF28= +github.com/gobuffalo/genny v0.1.0/go.mod h1:XidbUqzak3lHdS//TPu2OgiFB+51Ur5f7CSnXZ/JDvo= +github.com/gobuffalo/genny v0.1.1/go.mod h1:5TExbEyY48pfunL4QSXxlDOmdsD44RRq4mVZ0Ex28Xk= +github.com/gobuffalo/gitgen v0.0.0-20190315122116-cc086187d211/go.mod h1:vEHJk/E9DmhejeLeNt7UVvlSGv3ziL+djtTr3yyzcOw= +github.com/gobuffalo/gogen v0.0.0-20190315121717-8f38393713f5/go.mod h1:V9QVDIxsgKNZs6L2IYiGR8datgMhB577vzTDqypH360= +github.com/gobuffalo/gogen v0.1.0/go.mod h1:8NTelM5qd8RZ15VjQTFkAW6qOMx5wBbW4dSCS3BY8gg= +github.com/gobuffalo/gogen v0.1.1/go.mod h1:y8iBtmHmGc4qa3urIyo1shvOD8JftTtfcKi+71xfDNE= +github.com/gobuffalo/logger v0.0.0-20190315122211-86e12af44bc2/go.mod h1:QdxcLw541hSGtBnhUc4gaNIXRjiDppFGaDqzbrBd3v8= +github.com/gobuffalo/mapi v1.0.1/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= +github.com/gobuffalo/mapi v1.0.2/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= +github.com/gobuffalo/packd v0.0.0-20190315124812-a385830c7fc0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= +github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= +github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= +github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= +github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= +github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= +github.com/gocql/gocql v0.0.0-20210515062232-b7ef815b4556/go.mod h1:DL0ekTmBSTdlNF25Orwt/JMzqIq3EJ4MVa/J/uK64OY= +github.com/godbus/dbus v0.0.0-20151105175453-c7fdd8b5cd55/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw= +github.com/godbus/dbus v0.0.0-20180201030542-885f9cc04c9c/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw= +github.com/godbus/dbus v0.0.0-20190422162347-ade71ed3457e/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= +github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= +github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/godbus/dbus/v5 v5.0.6/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gofrs/uuid v3.3.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gogap/errors v0.0.0-20200228125012-531a6449b28c/go.mod h1:tbRYYYC7g/H7QlCeX0Z2zaThWKowF4QQCFIsGgAsqRo= +github.com/gogap/stack v0.0.0-20150131034635-fef68dddd4f8/go.mod h1:6q1WEv2BiAO4FSdwLQTJbWQYAn1/qDNJHUGJNXCj9kM= +github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= +github.com/gogo/googleapis v1.2.0/go.mod h1:Njal3psf3qN6dwBtQfUmBZh2ybovJ0tlu3o/AC7HYjU= +github.com/gogo/googleapis v1.4.0/go.mod h1:5YRNX2z1oM5gXdAkurHa942MDgEJyk02w4OecKY87+c= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= +github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/gogo/protobuf v1.3.0/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/goji/httpauth v0.0.0-20160601135302-2da839ab0f4d/go.mod h1:nnjvkQ9ptGaCkuDUx6wNykzzlUixGxvkme+H/lnzb+A= +github.com/golang-jwt/jwt v3.2.1+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= +github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= +github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.0-20170215233205-553a64147049/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golangci/lint-1 v0.0.0-20181222135242-d2cdd8c08219/go.mod h1:/X8TswGSh1pIozq4ZwCfxS0WA5JGXguxk94ar/4c87Y= +github.com/gomodule/redigo v1.8.2/go.mod h1:P9dn9mFrCBvWhGE1wpxx6fgq7BAeLBk+UUUzlpkBYO0= +github.com/gonum/blas v0.0.0-20181208220705-f22b278b28ac/go.mod h1:P32wAyui1PQ58Oce/KYkOqQv8cVw1zAapXOl+dRFGbc= +github.com/gonum/floats v0.0.0-20181209220543-c233463c7e82/go.mod h1:PxC8OnwL11+aosOB5+iEPoV3picfs8tUpkVd0pDo+Kg= +github.com/gonum/integrate v0.0.0-20181209220457-a422b5c0fdf2/go.mod h1:pDgmNM6seYpwvPos3q+zxlXMsbve6mOIPucUnUOrI7Y= +github.com/gonum/internal v0.0.0-20181124074243-f884aa714029/go.mod h1:Pu4dmpkhSyOzRwuXkOgAvijx4o+4YMUJJo9OvPYMkks= +github.com/gonum/lapack v0.0.0-20181123203213-e4cdc5a0bff9/go.mod h1:XA3DeT6rxh2EAE789SSiSJNqxPaC0aE9J8NTOI0Jo/A= +github.com/gonum/matrix v0.0.0-20181209220409-c518dec07be9/go.mod h1:0EXg4mc1CNP0HCqCz+K4ts155PXIlUywf0wqN+GfPZw= +github.com/gonum/stat v0.0.0-20181125101827-41a0da705a5b/go.mod h1:Z4GIJBJO3Wa4gD4vbwQxXXZ+WHmW6E9ixmNrwvs0iZs= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA= +github.com/google/cel-go v0.9.0/go.mod h1:U7ayypeSkw23szu4GaQTPJGx66c20mx8JklMSxrmI1w= +github.com/google/cel-spec v0.6.0/go.mod h1:Nwjgxy5CbjlPrtCWjeDjUyKMl8w41YBYGjsyDdqk0xA= +github.com/google/flatbuffers v1.12.1/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= +github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-containerregistry v0.5.1/go.mod h1:Ct15B4yir3PLOP5jsy0GNeYVaIZs/MK/Jz5any1wFW0= +github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= +github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= +github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= +github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= +github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg= +github.com/googleapis/gnostic v0.5.1/go.mod h1:6U4PtQXGIEt/Z3h5MAT7FNofLnw9vXk2cUuW7uA/OeU= +github.com/googleapis/gnostic v0.5.5/go.mod h1:7+EbHbldMins07ALC74bsA81Ovc97DwqyJO1AENw9kA= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= +github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= +github.com/gorilla/handlers v0.0.0-20150720190736-60c7bfde3e33/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ= +github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q= +github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.7.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= +github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= +github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= +github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grandcat/zeroconf v0.0.0-20190424104450-85eadb44205c/go.mod h1:YjKB0WsLXlMkO9p+wGTCoPIDGRJH0mz7E526PxkQVxI= +github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-middleware v1.2.2/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= +github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.14.6/go.mod h1:zdiPV4Yse/1gnckTHtghG4GkDEdKCRJduHpTxT3/jcw= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks= +github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645/go.mod h1:6iZfnjpejD4L/4DwD7NryNaJyCQdzwWwH2MWhCA90Kw= +github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= +github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4= +github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= +github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= +github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= +github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= +github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI= +github.com/hashicorp/go-hclog v0.8.0/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= +github.com/hashicorp/go-hclog v0.9.1/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= +github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-hclog v0.14.1/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-hclog v0.16.2/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-kms-wrapping/entropy v0.1.0/go.mod h1:d1g9WGtAunDNpek8jUIEJnBlbgKS1N2Q61QkHiZyR1g= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-msgpack v0.5.5/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-msgpack v1.1.5/go.mod h1:gWVc3sv/wbDmR3rQsj1CAktEZzoz1YNK9NfGLXJ69/4= +github.com/hashicorp/go-multierror v0.0.0-20161216184304-ed905158d874/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-plugin v1.0.1/go.mod h1:++UyYGoz3o5w9ZzAdZxtQKrWWP+iqPBn3cQptSMzBuY= +github.com/hashicorp/go-plugin v1.4.3/go.mod h1:5fGEH17QVwTTcR0zV7yhDPLLmFX9YSZ38b18Udy6vYQ= +github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= +github.com/hashicorp/go-retryablehttp v0.5.4/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-rootcerts v1.0.1/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= +github.com/hashicorp/go-secure-stdlib/base62 v0.1.1/go.mod h1:EdWO6czbmthiwZ3/PUsDV+UD1D5IRU4ActiaWGwt0Yw= +github.com/hashicorp/go-secure-stdlib/mlock v0.1.1/go.mod h1:zq93CJChV6L9QTfGKtfBxKqD7BqqXx5O04A/ns2p5+I= +github.com/hashicorp/go-secure-stdlib/parseutil v0.1.1/go.mod h1:QmrqtbKuxxSWTN3ETMPuB+VtEiBJ/A9XhoYGv8E1uD8= +github.com/hashicorp/go-secure-stdlib/password v0.1.1/go.mod h1:9hH302QllNwu1o2TGYtSk8I8kTAN0ca1EHpwhm5Mmzo= +github.com/hashicorp/go-secure-stdlib/strutil v0.1.1/go.mod h1:gKOamz3EwoIoJq7mlMIRBpVTAUn8qPCrEclOKKWhD3U= +github.com/hashicorp/go-secure-stdlib/tlsutil v0.1.1/go.mod h1:l8slYwnJA26yBz+ErHpp2IRCLr0vuOMGBORIz4rRiAs= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY= +github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/raft v1.2.0/go.mod h1:vPAJM8Asw6u8LxC3eJCUZmRP/E4QmUGE1R7g7k8sG/8= +github.com/hashicorp/raft-boltdb v0.0.0-20171010151810-6e5ba93211ea/go.mod h1:pNv7Wc3ycL6F5oOWn+tPGo2gWD4a5X+yp/ntwdKLjRk= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= +github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= +github.com/hashicorp/vault/api v1.0.4/go.mod h1:gDcqh3WGcR1cpF5AJz/B1UFheUEneMoIospckxBxk6Q= +github.com/hashicorp/vault/sdk v0.1.13/go.mod h1:B+hVj7TpuQY1Y/GPbCpffmgd+tSEwvhkWnjtSYCaS2M= +github.com/hashicorp/vault/sdk v0.3.0/go.mod h1:aZ3fNuL5VNydQk8GcLJ2TV8YCRVvyaakYkhZRoVuhj0= +github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= +github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= +github.com/hazelcast/hazelcast-go-client v0.0.0-20190530123621-6cf767c2f31a/go.mod h1:VhwtcZ7sg3xq7REqGzEy7ylSWGKz4jZd05eCJropNzI= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/huaweicloud/huaweicloud-sdk-go-obs v3.21.12+incompatible/go.mod h1:l7VUhRbTKCzdOacdT4oWCwATKyvZqUOlOqr0Ous3k4s= +github.com/huaweicloud/huaweicloud-sdk-go-v3 v0.0.87/go.mod h1:IvF+Pe06JMUivVgN6B4wcsPEoFvVa40IYaOPZyUt5HE= +github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= +github.com/iancoleman/strcase v0.0.0-20191112232945-16388991a334/go.mod h1:SK73tn/9oHe+/Y0h39VT4UCxmurVJkR5NA7kMEAOgSE= +github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= +github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= +github.com/imdario/mergo v0.3.10/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/influxdata/influxdb-client-go v1.4.0/go.mod h1:S+oZsPivqbcP1S9ur+T+QqXvrYS3NCZeMQtBoH4D1dw= +github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= +github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839/go.mod h1:xaLFMmpvUxqXtVkUJfg9QmT88cDaCJ3ZKgdZ78oO8Qo= +github.com/influxdata/line-protocol v0.0.0-20210311194329-9aa0e372d097/go.mod h1:xaLFMmpvUxqXtVkUJfg9QmT88cDaCJ3ZKgdZ78oO8Qo= +github.com/intel/goresctrl v0.2.0/go.mod h1:+CZdzouYFn5EsxgqAQTEzMfwKwuc0fVdMrT9FCCAVRQ= +github.com/j-keck/arping v0.0.0-20160618110441-2cf9dc699c56/go.mod h1:ymszkNOg6tORTn+6F6j+Jc8TOr5osrynvN6ivFWZ2GA= +github.com/j-keck/arping v1.0.2/go.mod h1:aJbELhR92bSk7tp79AWM/ftfc90EfEi2bQJrbBFOsPw= +github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= +github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= +github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= +github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= +github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= +github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= +github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= +github.com/jackc/pgconn v1.11.0/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= +github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= +github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= +github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= +github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.2.0/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= +github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= +github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= +github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= +github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= +github.com/jackc/pgtype v1.10.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= +github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= +github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= +github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= +github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs= +github.com/jackc/pgx/v4 v4.15.0/go.mod h1:D/zyOyXiaM1TmVWnOM18p0xdDtdakRBa0RsVGI3U3bw= +github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.2.1/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jawher/mow.cli v1.0.4/go.mod h1:5hQj2V8g+qYmLUVWqu4Wuja1pI57M83EChYLVZ0sMKk= +github.com/jawher/mow.cli v1.2.0/go.mod h1:y+pcA3jBAdo/GIZx/0rFjw/K2bVEODP9rfZOfaiq8Ko= +github.com/jcmturner/gofork v0.0.0-20190328161633-dc7c13fece03/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o= +github.com/jcmturner/gofork v1.0.0/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o= +github.com/jehiah/go-strftime v0.0.0-20171201141054-1d33003b3869/go.mod h1:cJ6Cj7dQo+O6GJNiMx+Pa94qKj+TG8ONdKHgMNIyyag= +github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jhump/protoreflect v1.6.0/go.mod h1:eaTn3RZAmMBcV0fifFvlm6VHNz3wSkYyXYWUh7ymB74= +github.com/jinzhu/copier v0.3.5/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg= +github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmespath/go-jmespath v0.0.0-20160803190731-bd40a432e4c7/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/joefitzgerald/rainbow-reporter v0.1.0/go.mod h1:481CNgqmVHQZzdIbN52CupLJyoVwB10FQ/IQlF1pdL8= +github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= +github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/jpillora/backoff v0.0.0-20180909062703-3050d21c67d7/go.mod h1:2iMrUgbbvHEiQClaW2NsSzMyGHqN+rDFqY705q49KG0= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/json-iterator/go v1.1.5/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= +github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= +github.com/k0kubun/pp v3.0.1+incompatible/go.mod h1:GWse8YhT0p8pT4ir3ZgBbfZild3tgzSScAn6HmfYukg= +github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4= +github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= +github.com/kataras/go-errors v0.0.3/go.mod h1:K3ncz8UzwI3bpuksXt5tQLmrRlgxfv+52ARvAu1+I+o= +github.com/kataras/go-serializer v0.0.4/go.mod h1:/EyLBhXKQOJ12dZwpUZZje3lGy+3wnvG7QKaVJtm/no= +github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= +github.com/keybase/go-keychain v0.0.0-20190712205309-48d3d31d256d/go.mod h1:JJNrCn9otv/2QP4D7SMJBgaleKpOf66PnW6F5WGNRIc= +github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= +github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= +github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.10.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.10.8/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.11.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.11.13/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.12.3/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= +github.com/klauspost/compress v1.13.4/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= +github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/klauspost/compress v1.14.4/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/knadh/koanf v1.4.1/go.mod h1:1cfH5223ZeZUOs8FU2UdTmaNfHpqgtjV0+NHjRO43gs= +github.com/koding/multiconfig v0.0.0-20171124222453-69c27309b2d7/go.mod h1:Y2SaZf2Rzd0pXkLVhLlCiAXFCLSXAIbTKDivVgff/AM= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= +github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/labd/commercetools-go-sdk v0.3.2/go.mod h1:I+KKNALlg6PcSertsVA7E442koO99GT7gldWqwZlUGo= +github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g= +github.com/labstack/echo/v4 v4.2.1/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4FW1e6jwpg= +github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= +github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/lestrrat/go-envload v0.0.0-20180220120943-6ed08b54a570/go.mod h1:BLt8L9ld7wVsvEWQbuLrUZnCMnUmLZ+CGDzKtclrTlE= +github.com/lestrrat/go-file-rotatelogs v0.0.0-20180223000712-d3151e2a480f/go.mod h1:UGmTpUd3rjbtfIpwAPrcfmGf/Z1HS95TATB+m57TPB8= +github.com/lestrrat/go-strftime v0.0.0-20180220042222-ba3bf9c1d042/go.mod h1:TPpsiPUEh0zFL1Snz4crhMlBe60PYxRHr5oFF3rRYg0= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de/go.mod h1:zAbeS9B/r2mtpb6U+EI2rYA5OAXxsYw6wTamcNW+zcE= +github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= +github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= +github.com/linkedin/goavro/v2 v2.9.8/go.mod h1:UgQUb2N/pmueQYH9bfqFioWxzYCZXSfF8Jw03O5sjqA= +github.com/linuxkit/virtsock v0.0.0-20201010232012-f8cee7dfc7a3/go.mod h1:3r6x7q95whyfWQpmGZTu3gk3v2YkMi05HEzl7Tf7YEo= +github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w= +github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= +github.com/machinebox/graphql v0.2.2/go.mod h1:F+kbVMHuwrQ5tYgU9JXlnskM8nOaFxCAEolaQybkjWA= +github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs= +github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= +github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= +github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho= +github.com/matoous/go-nanoid v1.5.0/go.mod h1:zyD2a71IubI24efhpvkJz+ZwfwagzgSO6UNiFsZKN7U= +github.com/matoous/go-nanoid/v2 v2.0.0/go.mod h1:FtS4aGPVfEkxKxhdWPAspZpZSh1cOjtM7Ej/So3hR0g= +github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= +github.com/matryer/moq v0.0.0-20190312154309-6cfb0558e1bd/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= +github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-ieproxy v0.0.0-20190702010315-6dee0af9227d/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc= +github.com/mattn/go-ieproxy v0.0.1/go.mod h1:pYabZ6IHcRpFh7vIaLfK7rdcWgFEb3SFJ6/gNWuh88E= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= +github.com/mattn/go-shellwords v1.0.3/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= +github.com/mattn/go-shellwords v1.0.6/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= +github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/maxbrunsfeld/counterfeiter/v6 v6.2.2/go.mod h1:eD9eIE7cdwcMi9rYluz88Jz2VyhSmden33/aXg4oVIY= +github.com/microcosm-cc/bluemonday v1.0.7/go.mod h1:HOT/6NaBlR0f9XlxD3zolN6Z3N8Lp4pvhp+jLS5ihnI= +github.com/microsoft/ApplicationInsights-Go v0.4.4/go.mod h1:fKRUseBqkw6bDiXTs3ESTiU/4YTIHsQS4W3fP2ieF4U= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/miekg/dns v1.1.25/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= +github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= +github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= +github.com/miekg/dns v1.1.43/go.mod h1:+evo5L0630/F6ca/Z9+GAqzhjGyn8/c+TBaOyfEl0V4= +github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= +github.com/minio/blake2b-simd v0.0.0-20160723061019-3f5f724cb5b1/go.mod h1:pD8RvIylQ358TN4wwqatJ8rNavkEINozVn9DtGI3dfQ= +github.com/minio/highwayhash v1.0.2/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY= +github.com/mistifyio/go-zfs v2.1.2-0.20190413222219-f784269be439+incompatible/go.mod h1:8AuVvqP/mXw1px98n46wfvcGfQ4ci2FwoAjKYxuo3Z4= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= +github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= +github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/go-testing-interface v1.14.0/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8= +github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.3.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A= +github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc= +github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= +github.com/moby/sys/mount v0.2.0/go.mod h1:aAivFE2LB3W4bACsUXChRHQ0qKWsetY4Y9V7sxOougM= +github.com/moby/sys/mountinfo v0.4.0/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A= +github.com/moby/sys/mountinfo v0.4.1/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A= +github.com/moby/sys/mountinfo v0.5.0/go.mod h1:3bMD3Rg+zkqx8MRYPi7Pyb0Ie97QEBmdxbhnCLlSvSU= +github.com/moby/sys/signal v0.6.0/go.mod h1:GQ6ObYZfqacOwTtlXvcmh9A26dVRul/hbOZn88Kg8Tg= +github.com/moby/sys/symlink v0.1.0/go.mod h1:GGDODQmbFOjFsXvfLVn3+ZRxkch54RkSiGqsZeMYowQ= +github.com/moby/sys/symlink v0.2.0/go.mod h1:7uZVF2dqJjG/NsClqul95CqKOBRQyYSNnJ6BMgR/gFs= +github.com/moby/term v0.0.0-20200312100748-672ec06f55cd/go.mod h1:DdlQx2hp0Ss5/fLikoLlEeIYiATotOjgB//nb973jeo= +github.com/moby/term v0.0.0-20201216013528-df9cb8a40635/go.mod h1:FBS0z0QWA44HXygs7VXDUOGoN/1TV3RuWkLO04am3wc= +github.com/moby/term v0.0.0-20210610120745-9d4ed1856297/go.mod h1:vgPCkQMyxTZ7IDy8SXRufE172gr8+K/JE/7hHFxHW3A= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8= +github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00/go.mod h1:Pm3mSP3c5uWn86xMLZ5Sa7JB9GsEZySvHYXCTK4E9q4= +github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= +github.com/montanaflynn/stats v0.6.6/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/morikuni/aec v0.0.0-20170113033406-39771216ff4c/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= +github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= +github.com/mrz1836/postmark v1.2.9/go.mod h1:xNRms8jgTfqBneqg0+PzvBrhuojefqXIWc6Np0nHiEM= +github.com/mschoch/smat v0.2.0/go.mod h1:kc9mz7DoBKqDyiRL7VZN8KvXQMWeTaVnttLRXOlotKw= +github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= +github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= +github.com/nacos-group/nacos-sdk-go v1.0.8/go.mod h1:hlAPn3UdzlxIlSILAyOXKxjFSvDJ9oLzTJ9hLAK1KzA= +github.com/nacos-group/nacos-sdk-go v1.1.1/go.mod h1:UHOtQNQY/qpk2dhg6gDq8u5+/CEIc3+lWmrmxEzX0/g= +github.com/nacos-group/nacos-sdk-go/v2 v2.0.1/go.mod h1:SlhyCAv961LcZ198XpKfPEQqlJWt2HkL1fDLas0uy/w= +github.com/natefinch/lumberjack v2.0.0+incompatible/go.mod h1:Wi9p2TTF5DG5oU+6YfsmYQpsTIOm0B1VNzQg9Mw6nPk= +github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= +github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU= +github.com/nats-io/jwt v1.1.0/go.mod h1:n3cvmLfBfnpV4JJRN7lRYCyZnw48ksGsbThGXEk4w9M= +github.com/nats-io/jwt/v2 v2.2.1-0.20220113022732-58e87895b296/go.mod h1:0tqz9Hlu6bCBFLWAASKhE5vUA4c24L9KPUUgvwumE/k= +github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k= +github.com/nats-io/nats-server/v2 v2.1.9/go.mod h1:9qVyoewoYXzG1ME9ox0HwkkzyYvnlBDugfR4Gg/8uHU= +github.com/nats-io/nats-server/v2 v2.7.4/go.mod h1:1vZ2Nijh8tcyNe8BDVyTviCd9NYzRbubQYiEHsvOQWc= +github.com/nats-io/nats-streaming-server v0.21.2/go.mod h1:2W8QfNVOtcFpmf0bRiwuLtRb0/hkX4NuOxPOFNOThVQ= +github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= +github.com/nats-io/nats.go v1.10.0/go.mod h1:AjGArbfyR50+afOUotNX2Xs5SYHf+CoOa5HH1eEl2HE= +github.com/nats-io/nats.go v1.13.1-0.20220308171302-2f2f6968e98d/go.mod h1:BPko4oXsySz4aSWeFgOHLZs3G4Jq4ZAyE6/zMCxRT6w= +github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nkeys v0.1.4/go.mod h1:XdZpAbhgyyODYqjTawOnIOI7VlbKSarI9Gfy1tqEu/s= +github.com/nats-io/nkeys v0.3.0/go.mod h1:gvUNGjVcM2IPr5rCsRsC6Wb3Hr2CQAm08dsxtV6A5y4= +github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= +github.com/nats-io/stan.go v0.8.3/go.mod h1:Ejm8bbHnMTSptU6uNMAVuxeapMJYBB/Ml3ej6z4GoSY= +github.com/ncw/swift v1.0.47/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/npillmayer/nestext v0.1.3/go.mod h1:h2lrijH8jpicr25dFY+oAJLyzlya6jhnuG+zWp9L0Uk= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= +github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= +github.com/onsi/ginkgo v0.0.0-20151202141238-7f8ab55aaf3b/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.13.0/go.mod h1:+REjRxOmWfHCjfv9TTWB1jD1Frx4XydAD3zm1lskyM0= +github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= +github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= +github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= +github.com/onsi/ginkgo/v2 v2.0.0/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= +github.com/onsi/gomega v0.0.0-20151007035656-2152b45fa28a/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= +github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= +github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.9.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc= +github.com/onsi/gomega v1.15.0/go.mod h1:cIuvLEne0aoVhAgh/O6ac0Op8WWw9H6eYCriF+tEHG0= +github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs= +github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= +github.com/open-policy-agent/opa v0.40.0/go.mod h1:UQqv8nJ1njs2+Od1lrPFzUAApdj22ABxTO35+Vpsjz4= +github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= +github.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= +github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= +github.com/opencontainers/go-digest v1.0.0-rc1.0.20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.0.0/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/image-spec v1.0.2-0.20211117181255-693428a734f5/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/image-spec v1.0.2/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/runc v0.0.0-20190115041553-12f6a991201f/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= +github.com/opencontainers/runc v0.1.1/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= +github.com/opencontainers/runc v1.0.0-rc8.0.20190926000215-3e425f80a8c9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= +github.com/opencontainers/runc v1.0.0-rc9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= +github.com/opencontainers/runc v1.0.0-rc93/go.mod h1:3NOsor4w32B2tC0Zbl8Knk4Wg84SM2ImC1fxBuqJ/H0= +github.com/opencontainers/runc v1.0.2/go.mod h1:aTaHFFwQXuA71CiyxOdFFIorAoemI04suvGRQFzWTD0= +github.com/opencontainers/runc v1.0.3/go.mod h1:aTaHFFwQXuA71CiyxOdFFIorAoemI04suvGRQFzWTD0= +github.com/opencontainers/runc v1.1.0/go.mod h1:Tj1hFw6eFWp/o33uxGf5yF2BX5yz2Z6iptFpuvbbKqc= +github.com/opencontainers/runtime-spec v0.1.2-0.20190507144316-5b71a03e2700/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/runtime-spec v1.0.1/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/runtime-spec v1.0.2-0.20190207185410-29686dbc5559/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/runtime-spec v1.0.3-0.20200929063507-e6143ca7d51d/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/runtime-spec v1.0.3-0.20210326190908-1c3f411f0417/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opencontainers/runtime-tools v0.0.0-20181011054405-1d69bd0f9c39/go.mod h1:r3f7wjNzSs2extwzU3Y+6pKfobzPh+kKFJ3ofN+3nfs= +github.com/opencontainers/selinux v1.6.0/go.mod h1:VVGKuOLlE7v4PJyT6h7mNWvq1rzqiriPsEqVhc+svHE= +github.com/opencontainers/selinux v1.8.0/go.mod h1:RScLhm78qiWa2gbVCcGkC7tCGdgk3ogry1nUQF8Evvo= +github.com/opencontainers/selinux v1.8.2/go.mod h1:MUIHuUEvKB1wtJjQdOyYRgOnLD2xAPP8dBsCoU0KuF8= +github.com/opencontainers/selinux v1.10.0/go.mod h1:2i0OySw99QjzBBQByd1Gr9gSjvuho1lHsJxIJ3gGbJI= +github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= +github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= +github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= +github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5/go.mod h1:/wsWhb9smxSfWAKL3wpBW7V8scJMt8N8gnaMCS9E/cA= +github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw= +github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/oracle/oci-go-sdk/v54 v54.0.0/go.mod h1:+t+yvcFGVp+3ZnztnyxqXfQDsMlq8U25faBLa+mqCMc= +github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ= +github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= +github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= +github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= +github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= +github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= +github.com/peterh/liner v0.0.0-20170211195444-bf27d3ba8e1d/go.mod h1:xIteQHvHuaLYG9IFj6mSxM0fCKrs34IrEQUhOYuGPHc= +github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2/go.mod h1:iIss55rKnNBTvrwdmkUpLnDpZoAHvWaiq5+iMmen4AE= +github.com/pierrec/lz4 v0.0.0-20190327172049-315a67e90e41/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= +github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= +github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pierrec/lz4 v2.5.2+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4/go.mod h1:N6UoU20jOqggOuDwUaBQpluzLNDqif3kq9z2wpdYEfQ= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/polarismesh/polaris-go v1.1.0/go.mod h1:tquawfjEKp1W3ffNJQSzhfditjjoZ7tvhOCElN7Efzs= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= +github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= +github.com/pquerna/cachecontrol v0.0.0-20180517163645-1555304b9b35/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= +github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U= +github.com/prometheus/client_golang v0.0.0-20180209125602-c332b6f63c06/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.2/go.mod h1:OsXs2jCmiKlQ1lTBmv21f2mNfw4xf/QclQDMrYNZzcM= +github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= +github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.1.0/go.mod h1:I1FGZT9+L76gKKOs5djB6ezCbFQP1xR9D75/vuwEF3g= +github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og= +github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_golang v1.4.1/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_golang v1.5.1/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.9.0/go.mod h1:FqZLKOZnGdFAhOK4nqGHa7D66IdsO+O441Eve7ptJDU= +github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +github.com/prometheus/client_golang v1.12.2/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.0.0-20180110214958-89604d197083/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.0.0-20181126121408-4724e9255275/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.6.0/go.mod h1:eBmuwkDJBwy6iBfxCBob6t6dR6ENT/y+J+Zk0j9GMYc= +github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.15.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.28.0/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/common v0.30.0/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/common v0.34.0/go.mod h1:gB3sOl7P0TvJabZpLY5uQMpUqRCPPCyRLCZYc7JZTNE= +github.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.0-20190522114515-bc1a522cf7b1/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.3/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ= +github.com/prometheus/procfs v0.0.5/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.2.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/statsd_exporter v0.21.0/go.mod h1:rbT83sZq2V+p73lHhPZfMc3MLCHmSHelCh9hSGYNLTQ= +github.com/prometheus/statsd_exporter v0.22.3/go.mod h1:N4Z1+iSqc9rnxlT1N8Qn3l65Vzb5t4Uq0jpg8nxyhio= +github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= +github.com/rabbitmq/amqp091-go v1.3.4/go.mod h1:ogQDLSOACsLPsIq0NpbtiifNZi2YOz0VTJ0kHRghqbM= +github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA= +github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= +github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= +github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= +github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/xid v1.3.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= +github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= +github.com/rs/zerolog v1.18.0/go.mod h1:9nvC1axdVrAHcu/s9taAVfBuIdTZLVQmKQyvrUjF5+I= +github.com/rs/zerolog v1.25.0/go.mod h1:7KHcEGe0QZPOm2IE4Kpb5rTh6n1h2hIgS5OOnu1rUaI= +github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= +github.com/russross/blackfriday v2.0.0+incompatible/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= +github.com/safchain/ethtool v0.0.0-20190326074333-42ed695e3de8/go.mod h1:Z0q5wiBQGYcxhMZ6gUqHn6pYNLypFAvaL3UvgZLR0U4= +github.com/safchain/ethtool v0.0.0-20210803160452-9aa261dae9b1/go.mod h1:Z0q5wiBQGYcxhMZ6gUqHn6pYNLypFAvaL3UvgZLR0U4= +github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig= +github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= +github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/satori/go.uuid v1.2.1-0.20181028125025-b2ce2384e17b/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/savsgio/gotils v0.0.0-20210217112953-d4a072536008/go.mod h1:TWNAOTaVzGOXq8RbEvHnhzA/A2sLZzgn0m6URjnukY8= +github.com/sclevine/agouti v3.0.0+incompatible/go.mod h1:b4WX9W9L1sfQKXeJf1mUTLZKJ48R1S7H23Ji7oFO5Bw= +github.com/sclevine/spec v1.2.0/go.mod h1:W4J29eT/Kzv7/b9IWLB055Z+qvVC9vt0Arko24q7p+U= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo= +github.com/seccomp/libseccomp-golang v0.9.2-0.20210429002308-3879420cc921/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg= +github.com/sendgrid/rest v2.6.3+incompatible/go.mod h1:kXX7q3jZtJXK5c5qK83bSGMdV6tsOE70KbHoqJls4lE= +github.com/sendgrid/sendgrid-go v3.5.0+incompatible/go.mod h1:QRQt+LX/NmgVEvmdRw0VT/QgUn499+iza2FnDca9fg8= +github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/shirou/gopsutil v3.20.11+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA= +github.com/shirou/gopsutil/v3 v3.21.6/go.mod h1:JfVbDpIBLVzT8oKbvMg9P3wEIMDDpVn+LwHTKj0ST88= +github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/sijms/go-ora/v2 v2.2.22/go.mod h1:jzfAFD+4CXHE+LjGWFl6cPrtiIpQVxakI2gvrMF2w6Y= +github.com/sirupsen/logrus v1.0.4-0.20170822132746-89742aefa4b2/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= +github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/assertions v1.1.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= +github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/smartystreets/goconvey v0.0.0-20190710185942-9d28bd7c0945/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/soheilhy/cmux v0.1.5-0.20210205191134-5ec6847320e5/go.mod h1:T7TcVDs9LWfQgPlPsdngu6I6QIoyIFZDDC6sNE1GqG0= +github.com/soheilhy/cmux v0.1.5/go.mod h1:T7TcVDs9LWfQgPlPsdngu6I6QIoyIFZDDC6sNE1GqG0= +github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= +github.com/sony/gobreaker v0.4.2-0.20210216022020-dd874f9dd33b/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= +github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= +github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= +github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= +github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cobra v0.0.2-0.20171109065643-2da4a54c5cee/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= +github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE= +github.com/spf13/cobra v1.1.1/go.mod h1:WnodtKOvamDL/PwE2M4iKs8aMDBZ5Q5klgD3qfVJQMI= +github.com/spf13/cobra v1.1.3/go.mod h1:pGADOWyqRD/YMrPZigI/zbliZ2wVD/23d+is3pSWzOo= +github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk= +github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g= +github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.1-0.20171106142849-4c012f6dcd95/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= +github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= +github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= +github.com/spf13/viper v1.7.1/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= +github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= +github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM= +github.com/stathat/consistent v1.0.0/go.mod h1:uajTPbgSygZBJ+V+0mY7meZ8i0XAcZs7AQ6V121XSxw= +github.com/stefanberger/go-pkcs11uri v0.0.0-20201008174630-78d3cae3a980/go.mod h1:AO3tvPzVZ/ayst6UlUKUv6rcPQInYe3IknH3jYhAKu8= +github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= +github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI= +github.com/stretchr/objx v0.0.0-20180129172003-8a3f7159479f/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/objx v0.3.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.4 h1:wZRexSlwd7ZXfKINDLsO4r7WBt3gTKONc6K/VesHvHM= +github.com/stretchr/testify v1.7.4/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/supplyon/gremcos v0.1.0/go.mod h1:ZnXsXGVbGCYDFU5GLPX9HZLWfD+ZWkiPo30KUjNoOtw= +github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= +github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= +github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= +github.com/tchap/go-patricia v2.2.6+incompatible/go.mod h1:bmLyhP68RS6kStMGxByiQ23RP/odRBOTVjwp2cDyi6I= +github.com/tebeka/strftime v0.1.3/go.mod h1:7wJm3dZlpr4l/oVK0t1HYIc4rMzQ2XJlOMIUJUJH6XQ= +github.com/tedsuo/ifrit v0.0.0-20180802180643-bea94bb476cc/go.mod h1:eyZnKCc955uh98WQvzOm0dgAeLnf2O0Rz0LPoC5ze+0= +github.com/testcontainers/testcontainers-go v0.12.0/go.mod h1:SIndOQXZng0IW8iWU1Js0ynrfZ8xcxrTtDfF6rD2pxs= +github.com/tetratelabs/wazero v0.0.0-20220425003459-ad61d9a6ff43/go.mod h1:Y4X/zO4sC2dJjZG9GDYNRbJGogfqFYJY/BbyKlOxXGI= +github.com/tevid/gohamcrest v1.1.1/go.mod h1:3UvtWlqm8j5JbwYZh80D/PVBt0mJ1eJiYgZMibh0H/k= +github.com/tidwall/gjson v1.2.1/go.mod h1:c/nTNbUr0E0OrXEhq1pwa8iEgc2DOt4ZZqAt1HtCkPA= +github.com/tidwall/gjson v1.8.1/go.mod h1:5/xDoumyyDNerp2U36lyolv46b3uF/9Bu6OfyQ9GImk= +github.com/tidwall/match v1.0.1/go.mod h1:LujAq0jyVjBy028G1WhWfIzbpQfMO8bBZ6Tyb0+pL9E= +github.com/tidwall/match v1.0.3/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v0.0.0-20190325153808-1166b9ac2b65/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tidwall/pretty v1.1.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tjfoc/gmsm v1.3.2/go.mod h1:HaUcFuY0auTiaHB9MHFGCPx5IaLhTUd2atbCFBQXn9w= +github.com/tklauser/go-sysconf v0.3.6/go.mod h1:MkWzOF4RMCshBAMXuhXJs64Rte09mITnppBXY/rYEFI= +github.com/tklauser/numcpus v0.2.2/go.mod h1:x3qojaO3uyYt0i56EW/VUYs7uBvdl2fkfZFu0T9wgjM= +github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tmc/grpc-websocket-proxy v0.0.0-20200427203606-3cfed13b9966/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tmc/grpc-websocket-proxy v0.0.0-20201229170055-e5319fda7802/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/toolkits/concurrent v0.0.0-20150624120057-a4371d70e3e3/go.mod h1:QDlpd3qS71vYtakd2hmdpqhJ9nwv6mD6A30bQ1BPBFE= +github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= +github.com/tv42/httpunix v0.0.0-20191220191345-2ba4b9c3382c/go.mod h1:hzIxponao9Kjc7aWznkXaL4U4TWaDSs8zcsY4Ka08nM= +github.com/uber/jaeger-client-go v2.29.1+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= +github.com/uber/jaeger-lib v2.4.1+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U= +github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= +github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= +github.com/ugorji/go v1.2.6/go.mod h1:anCg0y61KIhDlPZmnH+so+RQbysYVyDko0IMgJv0Nn0= +github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= +github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= +github.com/ugorji/go/codec v1.2.6/go.mod h1:V6TCNZ4PHqoHGFZuSG1W8nrCzzdgA2DozYxWFFpvxTw= +github.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= +github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= +github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasthttp v1.21.0/go.mod h1:jjraHZVbKOXftJfsOYoAjaeygpj5hr8ermTRJNroD7A= +github.com/valyala/fasthttp v1.31.1-0.20211216042702-258a4c17b4f4/go.mod h1:2rsYD01CKFrjjsvFxx75KlEUNpWNBY9JWD3K/7o2Cus= +github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= +github.com/valyala/fasttemplate v1.1.0/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= +github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= +github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= +github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc= +github.com/vishvananda/netlink v0.0.0-20181108222139-023a6dafdcdf/go.mod h1:+SR5DhBJrl6ZM7CoCKvpw5BKroDKQ+PJqOg65H/2ktk= +github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE= +github.com/vishvananda/netlink v1.1.1-0.20201029203352-d40f9887b852/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho= +github.com/vishvananda/netlink v1.1.1-0.20210330154013-f5de75959ad5/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho= +github.com/vishvananda/netns v0.0.0-20180720170159-13995c7128cc/go.mod h1:ZjcWmFBXmLKZu9Nxj3WKYEafiSqer2rnvPr0en9UNpI= +github.com/vishvananda/netns v0.0.0-20191106174202-0a2b9b5464df/go.mod h1:JP3t17pCcGlemwknint6hfoeCVQrEMVwxRLRjXpq+BU= +github.com/vishvananda/netns v0.0.0-20200728191858-db3c7e526aae/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0= +github.com/vishvananda/netns v0.0.0-20210104183010-2eb08e3e575f/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0= +github.com/vmware/vmware-go-kcl v1.5.0/go.mod h1:P92YfaWfQyudNf62BNx+E2rJn9pd165MhHsRt8ajkpM= +github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4= +github.com/willf/bitset v1.1.11/go.mod h1:83CECat5yLh5zVOf4P1ErAgKA5UDvKtgyUABdr3+MjI= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= +github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= +github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= +github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v0.0.0-20180618132009-1d523034197f/go.mod h1:5yf86TLmAcydyeJq5YvxkGPE2fm/u4myDekKRoLuqhs= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/xlab/treeprint v0.0.0-20181112141820-a009c3971eca/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg= +github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= +github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI= +github.com/yashtewari/glob-intersection v0.1.0/go.mod h1:LK7pIC3piUjovexikBbJ26Yml7g8xa5bsjfx2v1fwok= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= +github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= +github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.30/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/gopher-lua v0.0.0-20191220021717-ab39c6098bdb/go.mod h1:gqRgreBUhTSL0GeU64rtZ3Uq3wtjOa/TB2YfrtkCbVQ= +github.com/yuin/gopher-lua v0.0.0-20200603152657-dc2b0ca8b37e/go.mod h1:gqRgreBUhTSL0GeU64rtZ3Uq3wtjOa/TB2YfrtkCbVQ= +github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs= +github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA= +github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg= +github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= +github.com/zouyx/agollo/v3 v3.4.5/go.mod h1:LJr3kDmm23QSW+F1Ol4TMHDa7HvJvscMdVxJ2IpUTVc= +go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= +go.etcd.io/bbolt v1.3.6/go.mod h1:qXsaaIqmgQH0T+OPdb99Bf+PKfBBQVAdyD6TY9G8XM4= +go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= +go.etcd.io/etcd v0.5.0-alpha.5.0.20200910180754-dd1b699fc489/go.mod h1:yVHk9ub3CSBatqGNg7GRmsnfLWtoW60w4eDYfh7vHDg= +go.etcd.io/etcd/api/v3 v3.5.0-alpha.0/go.mod h1:mPcW6aZJukV6Aa81LSKpBjQXTWlXB5r74ymPoSWa3Sw= +go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A= +go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/pkg/v3 v3.5.4/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.0-alpha.0/go.mod h1:kdV+xzCJ3luEBSIeQyB/OEKkWKd8Zkux4sbDeANrosU= +go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= +go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs= +go.etcd.io/etcd/client/v3 v3.5.0-alpha.0/go.mod h1:wKt7jgDgf/OfKiYmCq5WFGxOFAkVMLxiiXgLDFhECr8= +go.etcd.io/etcd/client/v3 v3.5.0/go.mod h1:AIKXXVX/DQXtfTEqBryiLTUXwON+GuvO6Z7lLS/oTh0= +go.etcd.io/etcd/client/v3 v3.5.4/go.mod h1:ZaRkVgBZC+L+dLCjTcF1hRXpgZXQPOvnA/Ak/gq3kiY= +go.etcd.io/etcd/pkg/v3 v3.5.0-alpha.0/go.mod h1:tV31atvwzcybuqejDoY3oaNRTtlD2l/Ot78Pc9w7DMY= +go.etcd.io/etcd/pkg/v3 v3.5.0/go.mod h1:UzJGatBQ1lXChBkQF0AuAtkRQMYnHubxAEYIrC3MSsE= +go.etcd.io/etcd/raft/v3 v3.5.0-alpha.0/go.mod h1:FAwse6Zlm5v4tEWZaTjmNhe17Int4Oxbu7+2r0DiD3w= +go.etcd.io/etcd/raft/v3 v3.5.0/go.mod h1:UFOHSIvO/nKwd4lhkwabrTD3cqW5yVyYYf/KlD00Szc= +go.etcd.io/etcd/server/v3 v3.5.0-alpha.0/go.mod h1:tsKetYpt980ZTpzl/gb+UOJj9RkIyCb1u4wjzMg90BQ= +go.etcd.io/etcd/server/v3 v3.5.0/go.mod h1:3Ah5ruV+M+7RZr0+Y/5mNLwC+eQlni+mQmOVdCRJoS4= +go.mongodb.org/mongo-driver v1.5.1/go.mod h1:gRXCHX4Jo7J0IJ1oDQyUxF7jfy19UfxniMS4xxMmUqw= +go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk= +go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.opentelemetry.io/contrib v0.20.0/go.mod h1:G/EtFaa6qaN7+LxqfIAT3GiZa7Wv5DTBUzl5H4LY0Kc= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.20.0/go.mod h1:oVGt1LRbBOBq1A5BQLlUg9UaU/54aiHw8cgjV3aWZ/E= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.28.0/go.mod h1:vEhqr0m4eTc+DWxfsXoXue2GBgV2uUwVznkGIHW/e5w= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.20.0/go.mod h1:2AboqHi0CiIZU0qwhtUfCYD1GeUzvvIXWNkhDt7ZMG4= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.31.0/go.mod h1:PFmBsWbldL1kiWZk9+0LBZz2brhByaGsvp6pRICMlPE= +go.opentelemetry.io/otel v0.20.0/go.mod h1:Y3ugLH2oa81t5QO+Lty+zXf8zC9L26ax4Nzoxm/dooo= +go.opentelemetry.io/otel v1.3.0/go.mod h1:PWIKzi6JCp7sM0k9yZ43VX+T345uNbAkDKwHVjb2PTs= +go.opentelemetry.io/otel v1.6.0/go.mod h1:bfJD2DZVw0LBxghOTlgnlI0CV3hLDu9XF/QKOUXMTQQ= +go.opentelemetry.io/otel v1.6.1/go.mod h1:blzUabWHkX6LJewxvadmzafgh/wnvBSDBdOuwkAtrWQ= +go.opentelemetry.io/otel v1.6.3/go.mod h1:7BgNga5fNlF/iZjG06hM3yofffp0ofKCDwSXx1GC4dI= +go.opentelemetry.io/otel v1.7.0/go.mod h1:5BdUoMIz5WEs0vt0CUEMtSSaTSHBBVwrhnz7+nrD5xk= +go.opentelemetry.io/otel/exporters/otlp v0.20.0/go.mod h1:YIieizyaN77rtLJra0buKiNBOm9XQfkPEKBeuhoMwAM= +go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.3.0/go.mod h1:VpP4/RMn8bv8gNo9uK7/IMY4mtWLELsS+JIP0inH0h4= +go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.6.3/go.mod h1:NEu79Xo32iVb+0gVNV8PMd7GoWqnyDXRlj04yFjqz40= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.3.0/go.mod h1:hO1KLR7jcKaDDKDkvI9dP/FIhpmna5lkqPUQdEjFAM8= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.6.3/go.mod h1:UJmXdiVVBaZ63umRUTwJuCMAV//GCMvDiQwn703/GoY= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.3.0/go.mod h1:keUU7UfnwWTWpJ+FWnyqmogPa82nuU5VUANFq49hlMY= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.6.3/go.mod h1:ycItY/esVj8c0dKgYTOztTERXtPzcfDU/0o8EdwCjoA= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.3.0/go.mod h1:QNX1aly8ehqqX1LEa6YniTU7VY9I6R3X/oPxhGdTceE= +go.opentelemetry.io/otel/metric v0.20.0/go.mod h1:598I5tYlH1vzBjn+BTuhzTCSb/9debfNp6R3s7Pr1eU= +go.opentelemetry.io/otel/metric v0.28.0/go.mod h1:TrzsfQAmQaB1PDcdhBauLMk7nyyg9hm+GoQq/ekE9Iw= +go.opentelemetry.io/otel/oteltest v0.20.0/go.mod h1:L7bgKf9ZB7qCwT9Up7i9/pn0PWIa9FqQ2IQ8LoxiGnw= +go.opentelemetry.io/otel/sdk v0.20.0/go.mod h1:g/IcepuwNsoiX5Byy2nNV0ySUF1em498m7hBWC279Yc= +go.opentelemetry.io/otel/sdk v1.3.0/go.mod h1:rIo4suHNhQwBIPg9axF8V9CA72Wz2mKF1teNrup8yzs= +go.opentelemetry.io/otel/sdk v1.6.3/go.mod h1:A4iWF7HTXa+GWL/AaqESz28VuSBIcZ+0CV+IzJ5NMiQ= +go.opentelemetry.io/otel/sdk/export/metric v0.20.0/go.mod h1:h7RBNMsDJ5pmI1zExLi+bJK+Dr8NQCh0qGhm1KDnNlE= +go.opentelemetry.io/otel/sdk/metric v0.20.0/go.mod h1:knxiS8Xd4E/N+ZqKmUPf3gTTZ4/0TjTXukfxjzSTpHE= +go.opentelemetry.io/otel/trace v0.20.0/go.mod h1:6GjCW8zgDjwGHGa6GkyeB8+/5vjT16gUEi0Nf1iBdgw= +go.opentelemetry.io/otel/trace v1.3.0/go.mod h1:c/VDhno8888bvQYmbYLqe41/Ldmr/KKunbvWM4/fEjk= +go.opentelemetry.io/otel/trace v1.6.0/go.mod h1:qs7BrU5cZ8dXQHBGxHMOxwME/27YH2qEp4/+tZLLwJE= +go.opentelemetry.io/otel/trace v1.6.1/go.mod h1:RkFRM1m0puWIq10oxImnGEduNBzxiN7TXluRBtE+5j0= +go.opentelemetry.io/otel/trace v1.6.3/go.mod h1:GNJQusJlUgZl9/TQBPKU/Y/ty+0iVB5fjhKeJGZPGFs= +go.opentelemetry.io/otel/trace v1.7.0/go.mod h1:fzLSB9nqR2eXzxPXb2JW9IKE+ScyXA48yyE4TNvoHqU= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.opentelemetry.io/proto/otlp v0.11.0/go.mod h1:QpEjXPrNQzrFDZgoTo49dgHR9RYRSrg3NAKnUGl9YpQ= +go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= +go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5/go.mod h1:nmDLcffg48OtT/PSW0Hg7FvpRQsQh5OSqIylirxKC7o= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.5.1/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= +go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/automaxprocs v1.5.1/go.mod h1:BF4eumQw0P9GtnuxxovUd06vwm1o18oMzFtK66vU6XU= +go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= +go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go.uber.org/goleak v1.1.12 h1:gZAh5/EyT/HQwlpkCy6wTpqfH9H8Lz8zbm3dZh+OyzA= +go.uber.org/goleak v1.1.12/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/multierr v1.7.0 h1:zaiO/rmgFjbmCXdSYJWQcdvOCsthmdaHfr3Gm2Kx4Ec= +go.uber.org/multierr v1.7.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= +go.uber.org/ratelimit v0.2.0/go.mod h1:YYBV4e4naJvhpitQrWJu1vCpgB7CboMe0qhltKt6mUg= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= +go.uber.org/zap v1.15.0/go.mod h1:Mb2vm2krFEG5DV0W9qcHBYFtp/Wku1cvYaqPsS/WYfc= +go.uber.org/zap v1.16.0/go.mod h1:MA8QOfq0BHJwdXa996Y4dYkAqRKB8/1K1QMMZVaNZjQ= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +go.uber.org/zap v1.19.0/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= +go.uber.org/zap v1.19.1/go.mod h1:j3DNczoxDZroyBnOT1L/Q79cfUMGZxlv/9dzN7SM1rI= +go.uber.org/zap v1.21.0 h1:WefMeulhovoZ2sYXz7st6K0sLj7bBhpiFaud4r4zST8= +go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw= +goji.io v2.0.2+incompatible/go.mod h1:sbqFwrtqZACxLBTQcdgVjFh54yGVCvwq8+w49MVMMIk= +golang.org/x/crypto v0.0.0-20171113213409-9f005a07e0d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20180820150726-614d502a4dac/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181009213950-7c1a557ab941/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190404164418-38d8ce5564a5/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191112222119-e1110fd1c708/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20191219195013-becbf705a915/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210314154223-e6e6c4f2bb5b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= +golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210920023735-84f357641f63/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20211117183948-ae814b36b871/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220131195533-30dcbda58838/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220511200225-c6db032c6c88/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= +golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/net v0.0.0-20180530234432-1e491301e022/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181011144130-49bb7cea24b1/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190619014844-b5b0513f8c1b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191002035440-2ec189313ef0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191004110552-13f9640d40b9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191112182307-2180aed22343/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200425230154-ff2c4b7c35a0/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201006153459-a7d1128ccaa0/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201010224723-4f7140c49acb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201016165138-7b1cca2348c0/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210331212208-0fccb6fa2b5c/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= +golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210510120150-4163338589ed/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210520170846-37e1c6afe023/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210726213435-c6fcb2dbf985/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210825183410-e898025ed96a/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210917221730-978cfadd31cf/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211105192438-b53810dc28af/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211108170745-6635138e15ea/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211209124913-491a49abca63/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211216030914-fe4d6282115f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220107192237-5cfca573fb4d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220621193019-9d032be2e588 h1:9ubFuySsnAJYGyJrZ3koiEv8FyqofCBdz3G9Mbf2YFc= +golang.org/x/net v0.0.0-20220621193019-9d032be2e588/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180828065106-d99a578cf41b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190129075346-302c3dd5f1cc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190130150945-aca44879d564/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190419153524-e8e3143a4f4a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190514135907-3a4b5fb9f71f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190522044717-8097e1b27ff5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190523142557-0e01d883c5c5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190602015325-4c4f7f33c9ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606203320-7fc4e5ec1444/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190712062909-fae7ac547cb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190801041406-cbf593c0f2f3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190812073006-9eafafc0a87e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191002063906-3421d5a6bb1c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191112214154-59a1497f0cea/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191115151921-52ab43148777/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191210023423-ac6580df4449/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200120151820-655fe14d7479/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200217220822-9197077df867/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200420163511-1957bb5e6d1f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200509044756-6aff5f38e54f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200622214017-ed371f2e16b4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200728102440-3e129f6d46b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200817155316-9781c653f443/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200831180312-196b9ba8737a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200909081042-eff7692f9009/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200916030750-2334cc1a136f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200922070232-aee5d888a860/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200923182605-d9f96fdee20d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201009025420-dfb3f7c4e634/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201112073958-5cba982894dd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201117170446-d9b008d0a637/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201202213521-69691e467435/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201214210602-f9fddec55a1e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201223074533-0d417f636930/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210316164454-77fc1eacc6aa/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210819135213-f52c844e1c1c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210903071746-97244b99971b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210906170528-6f6e22806c34/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211029165221-6e7872819dc8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211106132015-ebca88c72f68/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211109184856-51b60fd695b3/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211116061358-0a5406a5449c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211117180635-dee7805ff2e1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220111092808-5a964db01320/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220204135822-1c1b9b1eba6a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a h1:dGzPydgVsqGcTRVwiLJ1jVbufYwmzD3LfVPLKsKg+0k= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20181227161524-e6919f6577db/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20210611083556-38a9dc6acbc6/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190424220101-1e8e1cfdf96b/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190706070813-72ffa07ba3db/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190828213141-aed303cbaa74/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200505023115-26f46d2f7ef8/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200509030707-2212a7e161a5/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200616133436-c1934b75d054/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20200916195026-c9a70fc28ce3/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= +golang.org/x/tools v0.0.0-20201014170642-d1624618ad65/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.6-0.20210820212750-d4cc65f0b2ff/go.mod h1:YD9qOF0M9xpSpdWTBbzEl5e/RnCefISl8E5Noe10jFM= +golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= +golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gomodules.xyz/jsonpatch/v2 v2.2.0/go.mod h1:WXp+iVDkoLQqPudfQ9GBlwB2eZ5DKOnjQZCYdOS8GPY= +gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo= +gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0= +gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= +gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc= +google.golang.org/api v0.0.0-20160322025152-9bf6e6e569ff/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= +google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= +google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= +google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= +google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= +google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= +google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= +google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= +google.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUbuZU= +google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= +google.golang.org/api v0.62.0/go.mod h1:dKmwPCydfsad4qCH08MSdgWjfHOyfpd4VtDGgRFdavw= +google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= +google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= +google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA= +google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= +google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/cloud v0.0.0-20151119220103-975617b05ea8/go.mod h1:0H1ncTHf11KCFhTc/+EFRbzSCOZx+VUbRMk55Yv5MYk= +google.golang.org/genproto v0.0.0-20170818010345-ee236bd376b0/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190404172233-64821d5d2107/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190522204451-c2c4e71fbf69/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s= +google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200117163144-32f20d992d24/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200527145253-8367513e4ece/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200806141610-86f49bd18e98/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201019141844-1ed22bb0c154/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201102152239-715cce707fb0/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201110150050-8816d57aaa9a/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210106152847-07624b53cd92/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= +google.golang.org/genproto v0.0.0-20210701133433-6b8dcf568a95/go.mod h1:yiaVoXHpRzHGyxV3o4DktVWY4mSUErTKaeEOq6C3t3U= +google.golang.org/genproto v0.0.0-20210707164411-8c882eb9abba/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= +google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211104193956-4c6863e31247/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211129164237-f09f9a12af12/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220107163113-42d7afdf6368/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= +google.golang.org/genproto v0.0.0-20220405205423-9d709892a2bf/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220622171453-ea41d75dfa0f h1:kYlCnpX4eB0QEnXm12j4DAX4yrjjhJmsyuWtSSZ+Buo= +google.golang.org/genproto v0.0.0-20220622171453-ea41d75dfa0f/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= +google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= +google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= +google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.22.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.24.0/go.mod h1:XDChyiUovWa60DnaeDeZmSW86xtLtjtZbwvSiRnRtcA= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.32.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.41.0/go.mod h1:U3l9uK9J0sini8mHphKoXyaqDA/8VyGnDee1zzIUK6k= +google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.43.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.47.0 h1:9n77onPX5F3qfFCqjy9dhn8PbNQsIKeVU04J9G7umt8= +google.golang.org/grpc v1.47.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc/go.mod h1:m7x9LTH6d71AHyAX77c9yqWCCa3UKHcVEj9y7hAtKDk= +gopkg.in/asn1-ber.v1 v1.0.0-20181015200546-f715ec2f112d/go.mod h1:cuepJuh7vyXfUyUwEgHQXw849cJrilpS5NeIjOWESAw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20141024133853-64131543e789/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/couchbase/gocb.v1 v1.6.4/go.mod h1:Ri5Qok4ZKiwmPr75YxZ0uELQy45XJgUSzeUnK806gTY= +gopkg.in/couchbase/gocbcore.v7 v7.1.18/go.mod h1:48d2Be0MxRtsyuvn+mWzqmoGUG9uA00ghopzOs148/E= +gopkg.in/couchbaselabs/gocbconnstr.v1 v1.0.4/go.mod h1:ZjII0iKx4Veo6N6da+pEZu/ptNyKLg9QTVt7fFmR6sw= +gopkg.in/couchbaselabs/gojcbmock.v1 v1.0.4/go.mod h1:jl/gd/aQ2S8whKVSTnsPs6n7BPeaAuw9UglBD/OF7eo= +gopkg.in/couchbaselabs/jsonx.v1 v1.0.1/go.mod h1:oR201IRovxvLW/eISevH12/+MiKHtNQAKfcX8iWZvJY= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fatih/pool.v2 v2.0.0/go.mod h1:8xVGeu1/2jr2wm5V9SPuMht2H5AEmf5aFMGSQixtjTY= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= +gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo= +gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df/go.mod h1:LRQQ+SO6ZHR7tOkpBDuZnXENFzX8qRjMDMyPD6BRkCw= +gopkg.in/gorethink/gorethink.v4 v4.1.0/go.mod h1:M7JgwrUAmshJ3iUbEK0Pt049MPyPK+CYDGGaEjdZb/c= +gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= +gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= +gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.56.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/jcmturner/aescts.v1 v1.0.1/go.mod h1:nsR8qBOg+OucoIW+WMhB3GspUQXq9XorLnQb9XtvcOo= +gopkg.in/jcmturner/dnsutils.v1 v1.0.1/go.mod h1:m3v+5svpVOhtFAP/wSz+yzh4Mc0Fg7eRhxkJMWSIz9Q= +gopkg.in/jcmturner/goidentity.v3 v3.0.0/go.mod h1:oG2kH0IvSYNIu80dVAyu/yoefjq1mNfM5bm88whjWx4= +gopkg.in/jcmturner/gokrb5.v7 v7.2.3/go.mod h1:l8VISx+WGYp+Fp7KRbsiUuXTTOnxIc3Tuvyavf11/WM= +gopkg.in/jcmturner/gokrb5.v7 v7.3.0/go.mod h1:l8VISx+WGYp+Fp7KRbsiUuXTTOnxIc3Tuvyavf11/WM= +gopkg.in/jcmturner/rpc.v1 v1.1.0/go.mod h1:YIdkC4XfD6GXbzje11McwsDuOlZQSb9W4vfLvuNnlv8= +gopkg.in/kataras/go-serializer.v0 v0.0.4/go.mod h1:v2jHg/3Wp7uncDNzenTsX75PRDxhzlxoo/qDvM4ZGxk= +gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= +gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= +gopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= +gopkg.in/square/go-jose.v2 v2.3.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= +gopkg.in/square/go-jose.v2 v2.4.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= +gopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= +gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= +gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= +gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8= +honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +k8s.io/api v0.20.0/go.mod h1:HyLC5l5eoS/ygQYl1BXBgFzWNlkHiAuyNAbevIn+FKg= +k8s.io/api v0.20.1/go.mod h1:KqwcCVogGxQY3nBlRpwt+wpAMF/KjaCc7RpywacvqUo= +k8s.io/api v0.20.4/go.mod h1:++lNL1AJMkDymriNniQsWRkMDzRaX2Y/POTUi8yvqYQ= +k8s.io/api v0.20.6/go.mod h1:X9e8Qag6JV/bL5G6bU8sdVRltWKmdHsFUGS3eVndqE8= +k8s.io/api v0.22.5/go.mod h1:mEhXyLaSD1qTOf40rRiKXkc+2iCem09rWLlFwhCEiAs= +k8s.io/api v0.23.0/go.mod h1:8wmDdLBHBNxtOIytwLstXt5E9PddnZb0GaMcqsvDBpg= +k8s.io/apiextensions-apiserver v0.23.0/go.mod h1:xIFAEEDlAZgpVBl/1VSjGDmLoXAWRG40+GsWhKhAxY4= +k8s.io/apimachinery v0.20.0/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= +k8s.io/apimachinery v0.20.1/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= +k8s.io/apimachinery v0.20.4/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= +k8s.io/apimachinery v0.20.6/go.mod h1:ejZXtW1Ra6V1O5H8xPBGz+T3+4gfkTCeExAHKU57MAc= +k8s.io/apimachinery v0.22.1/go.mod h1:O3oNtNadZdeOMxHFVxOreoznohCpy0z6mocxbZr7oJ0= +k8s.io/apimachinery v0.22.5/go.mod h1:xziclGKwuuJ2RM5/rSFQSYAj0zdbci3DH8kj+WvyN0U= +k8s.io/apimachinery v0.23.0/go.mod h1:fFCTTBKvKcwTPFzjlcxp91uPFZr+JA0FubU4fLzzFYc= +k8s.io/apiserver v0.20.1/go.mod h1:ro5QHeQkgMS7ZGpvf4tSMx6bBOgPfE+f52KwvXfScaU= +k8s.io/apiserver v0.20.4/go.mod h1:Mc80thBKOyy7tbvFtB4kJv1kbdD0eIH8k8vianJcbFM= +k8s.io/apiserver v0.20.6/go.mod h1:QIJXNt6i6JB+0YQRNcS0hdRHJlMhflFmsBDeSgT1r8Q= +k8s.io/apiserver v0.22.5/go.mod h1:s2WbtgZAkTKt679sYtSudEQrTGWUSQAPe6MupLnlmaQ= +k8s.io/apiserver v0.23.0/go.mod h1:Cec35u/9zAepDPPFyT+UMrgqOCjgJ5qtfVJDxjZYmt4= +k8s.io/cli-runtime v0.23.0/go.mod h1:B5N3YH0KP1iKr6gEuJ/RRmGjO0mJQ/f/JrsmEiPQAlU= +k8s.io/client-go v0.20.0/go.mod h1:4KWh/g+Ocd8KkCwKF8vUNnmqgv+EVnQDK4MBF4oB5tY= +k8s.io/client-go v0.20.1/go.mod h1:/zcHdt1TeWSd5HoUe6elJmHSQ6uLLgp4bIJHVEuy+/Y= +k8s.io/client-go v0.20.4/go.mod h1:LiMv25ND1gLUdBeYxBIwKpkSC5IsozMMmOOeSJboP+k= +k8s.io/client-go v0.20.6/go.mod h1:nNQMnOvEUEsOzRRFIIkdmYOjAZrC8bgq0ExboWSU1I0= +k8s.io/client-go v0.22.5/go.mod h1:cs6yf/61q2T1SdQL5Rdcjg9J1ElXSwbjSrW2vFImM4Y= +k8s.io/client-go v0.23.0/go.mod h1:hrDnpnK1mSr65lHHcUuIZIXDgEbzc7/683c6hyG4jTA= +k8s.io/code-generator v0.19.7/go.mod h1:lwEq3YnLYb/7uVXLorOJfxg+cUu2oihFhHZ0n9NIla0= +k8s.io/code-generator v0.20.0/go.mod h1:UsqdF+VX4PU2g46NC2JRs4gc+IfrctnwHb76RNbWHJg= +k8s.io/code-generator v0.23.0/go.mod h1:vQvOhDXhuzqiVfM/YHp+dmg10WDZCchJVObc9MvowsE= +k8s.io/component-base v0.20.1/go.mod h1:guxkoJnNoh8LNrbtiQOlyp2Y2XFCZQmrcg2n/DeYNLk= +k8s.io/component-base v0.20.4/go.mod h1:t4p9EdiagbVCJKrQ1RsA5/V4rFQNDfRlevJajlGwgjI= +k8s.io/component-base v0.20.6/go.mod h1:6f1MPBAeI+mvuts3sIdtpjljHWBQ2cIy38oBIWMYnrM= +k8s.io/component-base v0.22.5/go.mod h1:VK3I+TjuF9eaa+Ln67dKxhGar5ynVbwnGrUiNF4MqCI= +k8s.io/component-base v0.23.0/go.mod h1:DHH5uiFvLC1edCpvcTDV++NKULdYYU6pR9Tt3HIKMKI= +k8s.io/cri-api v0.17.3/go.mod h1:X1sbHmuXhwaHs9xxYffLqJogVsnI+f6cPRcgPel7ywM= +k8s.io/cri-api v0.20.1/go.mod h1:2JRbKt+BFLTjtrILYVqQK5jqhI+XNdF6UiGMgczeBCI= +k8s.io/cri-api v0.20.4/go.mod h1:2JRbKt+BFLTjtrILYVqQK5jqhI+XNdF6UiGMgczeBCI= +k8s.io/cri-api v0.20.6/go.mod h1:ew44AjNXwyn1s0U4xCKGodU7J1HzBeZ1MpGrpa5r8Yc= +k8s.io/cri-api v0.23.1/go.mod h1:REJE3PSU0h/LOV1APBrupxrEJqnoxZC8KWzkBUHwrK4= +k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= +k8s.io/gengo v0.0.0-20200428234225-8167cfdcfc14/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= +k8s.io/gengo v0.0.0-20201113003025-83324d819ded/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E= +k8s.io/gengo v0.0.0-20210813121822-485abfe95c7c/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E= +k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8= +k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= +k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE= +k8s.io/klog/v2 v2.2.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= +k8s.io/klog/v2 v2.4.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= +k8s.io/klog/v2 v2.9.0/go.mod h1:hy9LJ/NvuK+iVyP4Ehqva4HxZG/oXyIS3n3Jmire4Ec= +k8s.io/klog/v2 v2.30.0 h1:bUO6drIvCIsvZ/XFgfxoGFQU/a4Qkh0iAlvUR7vlHJw= +k8s.io/klog/v2 v2.30.0/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= +k8s.io/kube-openapi v0.0.0-20200805222855-6aeccd4b50c6/go.mod h1:UuqjUnNftUyPE5H64/qeyjQoUZhGpeFDVdxjTeEVN2o= +k8s.io/kube-openapi v0.0.0-20201113171705-d219536bb9fd/go.mod h1:WOJ3KddDSol4tAGcJo0Tvi+dK12EcqSLqcWsryKMpfM= +k8s.io/kube-openapi v0.0.0-20210421082810-95288971da7e/go.mod h1:vHXdDvt9+2spS2Rx9ql3I8tycm3H9FDfdUoIuKCefvw= +k8s.io/kube-openapi v0.0.0-20211109043538-20434351676c/go.mod h1:vHXdDvt9+2spS2Rx9ql3I8tycm3H9FDfdUoIuKCefvw= +k8s.io/kube-openapi v0.0.0-20211115234752-e816edb12b65/go.mod h1:sX9MT8g7NVZM5lVL/j8QyCCJe8YSMW30QvGZWaCIDIk= +k8s.io/kubernetes v1.13.0/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk= +k8s.io/metrics v0.20.0/go.mod h1:9yiRhfr8K8sjdj2EthQQE9WvpYDvsXIV3CjN4Ruq4Jw= +k8s.io/utils v0.0.0-20201110183641-67b214c5f920/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= +k8s.io/utils v0.0.0-20210802155522-efc7438f0176/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= +k8s.io/utils v0.0.0-20210819203725-bdf08cb9a70a/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= +k8s.io/utils v0.0.0-20210930125809-cb0fa318a74b/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= +nhooyr.io/websocket v1.8.6/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= +nhooyr.io/websocket v1.8.7/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= +oras.land/oras-go v1.1.0/go.mod h1:1A7vR/0KknT2UkJVWh+xMi95I/AhK8ZrxrnUSmXN0bQ= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= +sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.14/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg= +sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.15/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg= +sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.22/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg= +sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.25/go.mod h1:Mlj9PNLmG9bZ6BHFwFKDo5afkpWyUISkb9Me0GnK66I= +sigs.k8s.io/controller-runtime v0.11.0/go.mod h1:KKwLiTooNGu+JmLZGn9Sl3Gjmfj66eMbCQznLP5zcqA= +sigs.k8s.io/json v0.0.0-20211020170558-c049b76a60c6/go.mod h1:p4QtZmO4uMYipTQNzagwnNoseA6OxSUutVw05NhYDRs= +sigs.k8s.io/kustomize/api v0.10.1/go.mod h1:2FigT1QN6xKdcnGS2Ppp1uIWrtWN28Ms8A3OZUZhwr8= +sigs.k8s.io/kustomize/kyaml v0.13.0/go.mod h1:FTJxEZ86ScK184NpGSAQcfEqee0nul8oLCK30D47m4E= +sigs.k8s.io/structured-merge-diff/v4 v4.0.1/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= +sigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= +sigs.k8s.io/structured-merge-diff/v4 v4.0.3/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= +sigs.k8s.io/structured-merge-diff/v4 v4.1.2/go.mod h1:j/nl6xW8vLS49O8YvXW1ocPhZawJtm+Yrr7PPRQ0Vg4= +sigs.k8s.io/structured-merge-diff/v4 v4.2.0/go.mod h1:j/nl6xW8vLS49O8YvXW1ocPhZawJtm+Yrr7PPRQ0Vg4= +sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= +sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= +sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= +skywalking.apache.org/repo/goapi v0.0.0-20220401015832-2c9eee9481eb h1:+PP2DpKFN/rEporLdPI4A7bPWQjwfARlUDKNhSab8iM= +skywalking.apache.org/repo/goapi v0.0.0-20220401015832-2c9eee9481eb/go.mod h1:uWwwvhcwe2MD/nJCg0c1EE/eL6KzaBosLHDfMFoEJ30= +sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= +stathat.com/c/consistent v1.0.0/go.mod h1:QkzMWzcbB+yQBL2AttO6sgsQS/JSTapcDISJalmCDS0= diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-uri/hello.go b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-uri/hello.go new file mode 100644 index 0000000..d726b8e --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/openfunction/function-example/test-uri/hello.go @@ -0,0 +1,38 @@ +/* + * Copyright 2022 The OpenFunction Authors. + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package hello + +import ( + "fmt" + ofctx "github.com/OpenFunction/functions-framework-go/context" + "net/http" + + "github.com/OpenFunction/functions-framework-go/functions" +) + +func init() { + functions.HTTP("HelloWorld", HelloWorld, + functions.WithFunctionPath("/{greeting}")) +} + +func HelloWorld(w http.ResponseWriter, r *http.Request) { + vars := ofctx.VarsFromCtx(r.Context()) + fmt.Fprintf(w, "Hello, %s!\n", vars["greeting"]) +} diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/otelcol-contrib/config.yaml b/CloudronPackages/APISIX/apisix-source/ci/pod/otelcol-contrib/config.yaml new file mode 100644 index 0000000..6068f4c --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/otelcol-contrib/config.yaml @@ -0,0 +1,32 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 +exporters: + file: + path: /etc/otelcol-contrib/data-otlp.json +service: + pipelines: + traces: + receivers: [otlp] + exporters: [file] diff --git a/CloudronPackages/APISIX/apisix-source/ci/pod/vector/vector.toml b/CloudronPackages/APISIX/apisix-source/ci/pod/vector/vector.toml new file mode 100644 index 0000000..26716bc --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/pod/vector/vector.toml @@ -0,0 +1,111 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +[sources.log-from-tcp] +type = "socket" +address = "0.0.0.0:3000" +host_key = "host" +mode = "tcp" +port_key = "port" +shutdown_timeout_secs = 30 +socket_file_mode = 511 + +[sources.log-from-http] +type = "http_server" +address = "0.0.0.0:3001" + +[sources.log-from-udp] +type = "socket" +address = "0.0.0.0:8127" +host_key = "host" +mode = "udp" +port_key = "port" + +[sources.log-from-tls] +type = "socket" +address = "0.0.0.0:43000" +host_key = "host" +mode = "tcp" +port_key = "port" +tls.enabled = true +tls.verify = true +tls.ca_file = "/certs/vector_logs_ca.crt" +tls.crt_file = "/certs/vector_logs_server.crt" +tls.key_file = "/certs/vector_logs_server.key" + +[sources.log-from-syslog-tcp] +type = "syslog" +address = "0.0.0.0:5140" +mode = "tcp" + +[sources.log-from-syslog-udp] +type = "syslog" +address = "0.0.0.0:5150" +mode = "udp" + +[sources.log-from-splunk] +type = "splunk_hec" +address = "0.0.0.0:18088" +valid_tokens = [ + "BD274822-96AA-4DA6-90EC-18940FB2414C" +] + +[sinks.log-2-console] +inputs = [ "log-from-tcp", "log-from-tls", "log-from-syslog-tcp", "log-from-syslog-udp", "log-from-udp", "log-from-splunk", "log-from-http"] +type = "console" +encoding.codec = "json" + +[sinks.log-2-tcp-file] +inputs = [ "log-from-tcp" ] +type = "file" +encoding.codec = "text" +path = "/etc/vector/tcp.log" + +[sinks.log-2-http-file] +inputs = [ "log-from-http" ] +type = "file" +encoding.codec = "text" +path = "/etc/vector/http.log" + +[sinks.log-2-udp-file] +inputs = [ "log-from-udp" ] +type = "file" +encoding.codec = "json" +path = "/etc/vector/udp.log" + +[sinks.tls-log-2-file] +inputs = [ "log-from-tls" ] +type = "file" +encoding.codec = "json" +path = "/etc/vector/tls-datas.log" + +[sinks.log-2-syslog-tcp-file] +inputs = [ "log-from-syslog-tcp" ] +type = "file" +encoding.codec = "text" +path = "/etc/vector/syslog-tcp.log" + +[sinks.log-2-splunk-file] +inputs = [ "log-from-splunk" ] +type = "file" +encoding.codec = "json" +path = "/etc/vector/splunk.log" + +[sinks.log-2-syslog-udp-file] +inputs = [ "log-from-syslog-udp" ] +type = "file" +encoding.codec = "text" +path = "/etc/vector/syslog-udp.log" diff --git a/CloudronPackages/APISIX/apisix-source/ci/redhat-ci.sh b/CloudronPackages/APISIX/apisix-source/ci/redhat-ci.sh new file mode 100755 index 0000000..8cd63c8 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/redhat-ci.sh @@ -0,0 +1,116 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +. ./ci/common.sh +install_dependencies() { + export_version_info + export_or_prefix + + # install build & runtime deps + yum install -y --disablerepo=* --enablerepo=ubi-8-appstream-rpms --enablerepo=ubi-8-baseos-rpms \ + wget tar gcc gcc-c++ automake autoconf libtool make unzip git sudo openldap-devel hostname patch \ + which ca-certificates pcre pcre-devel xz \ + openssl-devel + yum install -y libyaml-devel + yum install -y --disablerepo=* --enablerepo=ubi-8-appstream-rpms --enablerepo=ubi-8-baseos-rpms cpanminus perl + + # install newer curl + yum makecache + yum install -y xz + install_curl + + # install apisix-runtime to make apisix's rpm test work + yum install -y yum-utils && yum-config-manager --add-repo https://openresty.org/package/centos/openresty.repo + yum install -y openresty-pcre-devel openresty-zlib-devel + + install_apisix_runtime + curl -o /usr/local/openresty/openssl3/ssl/openssl.cnf \ + https://raw.githubusercontent.com/api7/apisix-build-tools/apisix-runtime/${APISIX_RUNTIME}/conf/openssl3/openssl.cnf + + # patch lua-resty-events + sed -i 's/log(ERR, "event worker failed: ", perr)/log(ngx.WARN, "event worker failed: ", perr)/' /usr/local/openresty/lualib/resty/events/worker.lua + + # install luarocks + ./utils/linux-install-luarocks.sh + + # install etcdctl + ./ci/linux-install-etcd-client.sh + + # install vault cli capabilities + install_vault_cli + + # install brotli + yum install -y cmake3 + install_brotli + + # install test::nginx + cpanm --notest Test::Nginx IPC::Run > build.log 2>&1 || (cat build.log && exit 1) + + # add go1.15 binary to the path + mkdir build-cache + pushd build-cache/ + # Go is required inside the container. + wget -q https://golang.org/dl/go1.17.linux-amd64.tar.gz && tar -xf go1.17.linux-amd64.tar.gz + export PATH=$PATH:$(pwd)/go/bin + popd + # install and start grpc_server_example + pushd t/grpc_server_example + + CGO_ENABLED=0 go build + popd + + yum install -y iproute procps + start_grpc_server_example + + start_sse_server_example + + # installing grpcurl + install_grpcurl + + # install nodejs + install_nodejs + + # grpc-web server && client + pushd t/plugin/grpc-web + ./setup.sh + # back to home directory + popd + + # install dependencies + git clone https://github.com/openresty/test-nginx.git test-nginx + create_lua_deps +} + +run_case() { + export_or_prefix + make init + set_coredns + # run test cases + FLUSH_ETCD=1 TEST_EVENTS_MODULE=$TEST_EVENTS_MODULE prove --timer -Itest-nginx/lib -I./ -r ${TEST_FILE_SUB_DIR} | tee /tmp/test.result + rerun_flaky_tests /tmp/test.result +} + +case_opt=$1 +case $case_opt in + (install_dependencies) + install_dependencies + ;; + (run_case) + run_case + ;; +esac diff --git a/CloudronPackages/APISIX/apisix-source/ci/tars-ci.sh b/CloudronPackages/APISIX/apisix-source/ci/tars-ci.sh new file mode 100755 index 0000000..c160db7 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/ci/tars-ci.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +. ./ci/common.sh + +run_case() { + export_or_prefix + export PERL5LIB=.:$PERL5LIB + prove -Itest-nginx/lib -I./ -r t/tars | tee test-result + rerun_flaky_tests test-result +} + +case_opt=$1 +case $case_opt in + (run_case) + run_case + ;; +esac diff --git a/CloudronPackages/APISIX/apisix-source/conf/cert/ssl_PLACE_HOLDER.crt b/CloudronPackages/APISIX/apisix-source/conf/cert/ssl_PLACE_HOLDER.crt new file mode 100644 index 0000000..503f277 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/conf/cert/ssl_PLACE_HOLDER.crt @@ -0,0 +1,27 @@ +-----BEGIN CERTIFICATE----- +MIIEojCCAwqgAwIBAgIJAK253pMhgCkxMA0GCSqGSIb3DQEBCwUAMFYxCzAJBgNV +BAYTAkNOMRIwEAYDVQQIDAlHdWFuZ0RvbmcxDzANBgNVBAcMBlpodUhhaTEPMA0G +A1UECgwGaXJlc3R5MREwDwYDVQQDDAh0ZXN0LmNvbTAgFw0xOTA2MjQyMjE4MDVa +GA8yMTE5MDUzMTIyMTgwNVowVjELMAkGA1UEBhMCQ04xEjAQBgNVBAgMCUd1YW5n +RG9uZzEPMA0GA1UEBwwGWmh1SGFpMQ8wDQYDVQQKDAZpcmVzdHkxETAPBgNVBAMM +CHRlc3QuY29tMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAyCM0rqJe +cvgnCfOw4fATotPwk5Ba0gC2YvIrO+gSbQkyxXF5jhZB3W6BkWUWR4oNFLLSqcVb +VDPitz/Mt46Mo8amuS6zTbQetGnBARzPLtmVhJfoeLj0efMiOepOSZflj9Ob4yKR +2bGdEFOdHPjm+4ggXU9jMKeLqdVvxll/JiVFBW5smPtW1Oc/BV5terhscJdOgmRr +abf9xiIis9/qVYfyGn52u9452V0owUuwP7nZ01jt6iMWEGeQU6mwPENgvj1olji2 +WjdG2UwpUVp3jp3l7j1ekQ6mI0F7yI+LeHzfUwiyVt1TmtMWn1ztk6FfLRqwJWR/ +Evm95vnfS3Le4S2ky3XAgn2UnCMyej3wDN6qHR1onpRVeXhrBajbCRDRBMwaNw/1 +/3Uvza8QKK10PzQR6OcQ0xo9psMkd9j9ts/dTuo2fzaqpIfyUbPST4GdqNG9NyIh +/B9g26/0EWcjyO7mYVkaycrtLMaXm1u9jyRmcQQI1cGrGwyXbrieNp63AgMBAAGj +cTBvMB0GA1UdDgQWBBSZtSvV8mBwl0bpkvFtgyiOUUcbszAfBgNVHSMEGDAWgBSZ +tSvV8mBwl0bpkvFtgyiOUUcbszAMBgNVHRMEBTADAQH/MB8GA1UdEQQYMBaCCHRl +c3QuY29tggoqLnRlc3QuY29tMA0GCSqGSIb3DQEBCwUAA4IBgQAHGEul/x7ViVgC +tC8CbXEslYEkj1XVr2Y4hXZXAXKd3W7V3TC8rqWWBbr6L/tsSVFt126V5WyRmOaY +1A5pju8VhnkhYxYfZALQxJN2tZPFVeME9iGJ9BE1wPtpMgITX8Rt9kbNlENfAgOl +PYzrUZN1YUQjX+X8t8/1VkSmyZysr6ngJ46/M8F16gfYXc9zFj846Z9VST0zCKob +rJs3GtHOkS9zGGldqKKCj+Awl0jvTstI4qtS1ED92tcnJh5j/SSXCAB5FgnpKZWy +hme45nBQj86rJ8FhN+/aQ9H9/2Ib6Q4wbpaIvf4lQdLUEcWAeZGW6Rk0JURwEog1 +7/mMgkapDglgeFx9f/XztSTrkHTaX4Obr+nYrZ2V4KOB4llZnK5GeNjDrOOJDk2y +IJFgBOZJWyS93dQfuKEj42hA79MuX64lMSCVQSjX+ipR289GQZqFrIhiJxLyA+Ve +U/OOcSRr39Kuis/JJ+DkgHYa/PWHZhnJQBxcqXXk1bJGw9BNbhM= +-----END CERTIFICATE----- diff --git a/CloudronPackages/APISIX/apisix-source/conf/cert/ssl_PLACE_HOLDER.key b/CloudronPackages/APISIX/apisix-source/conf/cert/ssl_PLACE_HOLDER.key new file mode 100644 index 0000000..7105067 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/conf/cert/ssl_PLACE_HOLDER.key @@ -0,0 +1,39 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIG5AIBAAKCAYEAyCM0rqJecvgnCfOw4fATotPwk5Ba0gC2YvIrO+gSbQkyxXF5 +jhZB3W6BkWUWR4oNFLLSqcVbVDPitz/Mt46Mo8amuS6zTbQetGnBARzPLtmVhJfo +eLj0efMiOepOSZflj9Ob4yKR2bGdEFOdHPjm+4ggXU9jMKeLqdVvxll/JiVFBW5s +mPtW1Oc/BV5terhscJdOgmRrabf9xiIis9/qVYfyGn52u9452V0owUuwP7nZ01jt +6iMWEGeQU6mwPENgvj1olji2WjdG2UwpUVp3jp3l7j1ekQ6mI0F7yI+LeHzfUwiy +Vt1TmtMWn1ztk6FfLRqwJWR/Evm95vnfS3Le4S2ky3XAgn2UnCMyej3wDN6qHR1o +npRVeXhrBajbCRDRBMwaNw/1/3Uvza8QKK10PzQR6OcQ0xo9psMkd9j9ts/dTuo2 +fzaqpIfyUbPST4GdqNG9NyIh/B9g26/0EWcjyO7mYVkaycrtLMaXm1u9jyRmcQQI +1cGrGwyXbrieNp63AgMBAAECggGBAJM8g0duoHmIYoAJzbmKe4ew0C5fZtFUQNmu +O2xJITUiLT3ga4LCkRYsdBnY+nkK8PCnViAb10KtIT+bKipoLsNWI9Xcq4Cg4G3t +11XQMgPPgxYXA6m8t+73ldhxrcKqgvI6xVZmWlKDPn+CY/Wqj5PA476B5wEmYbNC +GIcd1FLl3E9Qm4g4b/sVXOHARF6iSvTR+6ol4nfWKlaXSlx2gNkHuG8RVpyDsp9c +z9zUqAdZ3QyFQhKcWWEcL6u9DLBpB/gUjyB3qWhDMe7jcCBZR1ALyRyEjmDwZzv2 +jlv8qlLFfn9R29UI0pbuL1eRAz97scFOFme1s9oSU9a12YHfEd2wJOM9bqiKju8y +DZzePhEYuTZ8qxwiPJGy7XvRYTGHAs8+iDlG4vVpA0qD++1FTpv06cg/fOdnwshE +OJlEC0ozMvnM2rZ2oYejdG3aAnUHmSNa5tkJwXnmj/EMw1TEXf+H6+xknAkw05nh +zsxXrbuFUe7VRfgB5ElMA/V4NsScgQKBwQDmMRtnS32UZjw4A8DsHOKFzugfWzJ8 +Gc+3sTgs+4dNIAvo0sjibQ3xl01h0BB2Pr1KtkgBYB8LJW/FuYdCRS/KlXH7PHgX +84gYWImhNhcNOL3coO8NXvd6+m+a/Z7xghbQtaraui6cDWPiCNd/sdLMZQ/7LopM +RbM32nrgBKMOJpMok1Z6zsPzT83SjkcSxjVzgULNYEp03uf1PWmHuvjO1yELwX9/ +goACViF+jst12RUEiEQIYwr4y637GQBy+9cCgcEA3pN9W5OjSPDVsTcVERig8++O +BFURiUa7nXRHzKp2wT6jlMVcu8Pb2fjclxRyaMGYKZBRuXDlc/RNO3uTytGYNdC2 +IptU5N4M7iZHXj190xtDxRnYQWWo/PR6EcJj3f/tc3Itm1rX0JfuI3JzJQgDb9Z2 +s/9/ub8RRvmQV9LM/utgyOwNdf5dyVoPcTY2739X4ZzXNH+CybfNa+LWpiJIVEs2 +txXbgZrhmlaWzwA525nZ0UlKdfktdcXeqke9eBghAoHARVTHFy6CjV7ZhlmDEtqE +U58FBOS36O7xRDdpXwsHLnCXhbFu9du41mom0W4UdzjgVI9gUqG71+SXrKr7lTc3 +dMHcSbplxXkBJawND/Q1rzLG5JvIRHO1AGJLmRgIdl8jNgtxgV2QSkoyKlNVbM2H +Wy6ZSKM03lIj74+rcKuU3N87dX4jDuwV0sPXjzJxL7NpR/fHwgndgyPcI14y2cGz +zMC44EyQdTw+B/YfMnoZx83xaaMNMqV6GYNnTHi0TO2TAoHBAKmdrh9WkE2qsr59 +IoHHygh7Wzez+Ewr6hfgoEK4+QzlBlX+XV/9rxIaE0jS3Sk1txadk5oFDebimuSk +lQkv1pXUOqh+xSAwk5v88dBAfh2dnnSa8HFN3oz+ZfQYtnBcc4DR1y2X+fVNgr3i +nxruU2gsAIPFRnmvwKPc1YIH9A6kIzqaoNt1f9VM243D6fNzkO4uztWEApBkkJgR +4s/yOjp6ovS9JG1NMXWjXQPcwTq3sQVLnAHxZRJmOvx69UmK4QKBwFYXXjeXiU3d +bcrPfe6qNGjfzK+BkhWznuFUMbuxyZWDYQD5yb6ukUosrj7pmZv3BxKcKCvmONU+ +CHgIXB+hG+R9S2mCcH1qBQoP/RSm+TUzS/Bl2UeuhnFZh2jSZQy3OwryUi6nhF0u +LDzMI/6aO1ggsI23Ri0Y9ZtqVKczTkxzdQKR9xvoNBUufjimRlS80sJCEB3Qm20S +wzarryret/7GFW1/3cz+hTj9/d45i25zArr3Pocfpur5mfz3fJO8jg== +-----END RSA PRIVATE KEY----- diff --git a/CloudronPackages/APISIX/apisix-source/conf/config.yaml b/CloudronPackages/APISIX/apisix-source/conf/config.yaml new file mode 100644 index 0000000..6a3c430 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/conf/config.yaml @@ -0,0 +1,63 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# If you want to set the specified configuration value, you can set the new +# in this file. For example if you want to specify the etcd address: +# +# deployment: +# role: traditional +# role_traditional: +# config_provider: etcd +# etcd: +# host: +# - http://127.0.0.1:2379 +# +# To configure via environment variables, you can use `${{VAR}}` syntax. For instance: +# +# deployment: +# role: traditional +# role_traditional: +# config_provider: etcd +# etcd: +# host: +# - http://${{ETCD_HOST}}:2379 +# +# And then run `export ETCD_HOST=$your_host` before `make init`. +# +# If the configured environment variable can't be found, an error will be thrown. +# +# Also, If you want to use default value when the environment variable not set, +# Use `${{VAR:=default_value}}` instead. For instance: +# +# deployment: +# role: traditional +# role_traditional: +# config_provider: etcd +# etcd: +# host: +# - http://${{ETCD_HOST:=localhost}}:2379 +# +# This will find environment variable `ETCD_HOST` first, and if it's not exist it will use `localhost` as default value. +# +deployment: + role: traditional + role_traditional: + config_provider: etcd + admin: + admin_key: + - name: admin + key: '' # using fixed API token has security risk, please update it when you deploy to production environment. If passed empty then will be autogenerated by APISIX and will be written back here. Recommended is to use external mechanism to generate and store the token. + role: admin diff --git a/CloudronPackages/APISIX/apisix-source/conf/config.yaml.example b/CloudronPackages/APISIX/apisix-source/conf/config.yaml.example new file mode 100644 index 0000000..6f2f831 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/conf/config.yaml.example @@ -0,0 +1,712 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# CAUTION: DO NOT MODIFY DEFAULT CONFIGURATIONS IN THIS FILE. +# Keep the custom configurations in conf/config.yaml. +# + +apisix: + # node_listen: 9080 # APISIX listening port. + node_listen: # APISIX listening ports. + - 9080 + # - port: 9081 + # - ip: 127.0.0.2 # If not set, default to `0.0.0.0` + # port: 9082 + enable_admin: true # Admin API + enable_dev_mode: false # If true, set nginx `worker_processes` to 1. + enable_reuseport: true # If true, enable nginx SO_REUSEPORT option. + show_upstream_status_in_response_header: false # If true, include the upstream HTTP status code in + # the response header `X-APISIX-Upstream-Status`. + # If false, show `X-APISIX-Upstream-Status` only if + # the upstream response code is 5xx. + enable_ipv6: true + enable_http2: true + + # proxy_protocol: # PROXY Protocol configuration + # listen_http_port: 9181 # APISIX listening port for HTTP traffic with PROXY protocol. + # listen_https_port: 9182 # APISIX listening port for HTTPS traffic with PROXY protocol. + # enable_tcp_pp: true # Enable the PROXY protocol when stream_proxy.tcp is set. + # enable_tcp_pp_to_upstream: true # Enable the PROXY protocol. + + enable_server_tokens: true # If true, show APISIX version in the `Server` response header. + extra_lua_path: "" # Extend lua_package_path to load third-party code. + extra_lua_cpath: "" # Extend lua_package_cpath to load third-party code. + # lua_module_hook: "my_project.my_hook" # Hook module used to inject third-party code into APISIX. + + proxy_cache: # Proxy Caching configuration + cache_ttl: 10s # The default caching time on disk if the upstream does not specify a caching time. + zones: + - name: disk_cache_one # Name of the cache. + memory_size: 50m # Size of the memory to store the cache index. + disk_size: 1G # Size of the disk to store the cache data. + disk_path: /tmp/disk_cache_one # Path to the cache file for disk cache. + cache_levels: "1:2" # Cache hierarchy levels of disk cache. + # - name: disk_cache_two + # memory_size: 50m + # disk_size: 1G + # disk_path: "/tmp/disk_cache_two" + # cache_levels: "1:2" + - name: memory_cache + memory_size: 50m + + delete_uri_tail_slash: false # Delete the '/' at the end of the URI + normalize_uri_like_servlet: false # If true, use the same path normalization rules as the Java + # servlet specification. See https://github.com/jakartaee/servlet/blob/master/spec/src/main/asciidoc/servlet-spec-body.adoc#352-uri-path-canonicalization, which is used in Tomcat. + + router: + http: radixtree_host_uri # radixtree_host_uri: match route by host and URI + # radixtree_uri: match route by URI + # radixtree_uri_with_parameter: similar to radixtree_uri but match URI with parameters. See https://github.com/api7/lua-resty-radixtree/#parameters-in-path for more details. + ssl: radixtree_sni # radixtree_sni: match route by SNI + + # http is the default proxy mode. proxy_mode can be one of `http`, `stream`, or `http&stream` + proxy_mode: "http" + # stream_proxy: # TCP/UDP L4 proxy + # tcp: + # - addr: 9100 # Set the TCP proxy listening ports. + # tls: true + # - addr: "127.0.0.1:9101" + # udp: # Set the UDP proxy listening ports. + # - 9200 + # - "127.0.0.1:9201" + + # dns_resolver: # If not set, read from `/etc/resolv.conf` + # - 1.1.1.1 + # - 8.8.8.8 + # dns_resolver_valid: 30 # Override the default TTL of the DNS records. + resolver_timeout: 5 # Set the time in seconds that the server will wait for a response from the + # DNS resolver before timing out. + enable_resolv_search_opt: true # If true, use search option in the resolv.conf file in DNS lookups. + + ssl: + enable: true + listen: # APISIX listening port for HTTPS traffic. + - port: 9443 + enable_http3: false # Enable HTTP/3 (with QUIC). If not set default to `false`. + # - ip: 127.0.0.3 # If not set, default to `0.0.0.0`. + # port: 9445 + # enable_http3: true + #ssl_trusted_certificate: system # Specifies a file path with trusted CA certificates in the PEM format. The default value is "system". + ssl_protocols: TLSv1.2 TLSv1.3 # TLS versions supported. + ssl_ciphers: ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384 + ssl_session_tickets: false # If true, session tickets are used for SSL/TLS connections. + # Disabled by default because it renders Perfect Forward Secrecy (FPS) + # useless. See https://github.com/mozilla/server-side-tls/issues/135. + + # fallback_sni: "my.default.domain" # Fallback SNI to be used if the client does not send SNI during + # # the handshake. + + enable_control: true # Control API + # control: + # ip: 127.0.0.1 + # port: 9090 + + disable_sync_configuration_during_start: false # Safe exit. TO BE REMOVED. + + data_encryption: # Data encryption settings. + enable_encrypt_fields: true # Whether enable encrypt fields specified in `encrypt_fields` in plugin schema. + keyring: # This field is used to encrypt the private key of SSL and the `encrypt_fields` + # in plugin schema. + - qeddd145sfvddff3 # Set the encryption key for AES-128-CBC. It should be a hexadecimal string + # of length 16. + - edd1c9f0985e76a2 # If not set, APISIX saves the original data into etcd. + # CAUTION: If you would like to update the key, add the new key as the + # first item in the array and keep the older keys below the newly added + # key, so that data can be decrypted with the older keys and encrypted + # with the new key. Removing the old keys directly can render the data + # unrecoverable. + + events: # Event distribution module configuration + module: lua-resty-events # Sets the name of the events module used. + # Supported module: lua-resty-worker-events and lua-resty-events +# status: # When enabled, APISIX will provide `/status` and `/status/ready` endpoints + # ip: 127.0.0.1 # /status endpoint will return 200 status code if APISIX has successfully started and running correctly + # port: 7085 # /status/ready endpoint will return 503 status code if any of the workers do not receive config from etcd + # or (standalone mode) the config isn't loaded yet either via file or Admin API. +nginx_config: # Config for render the template to generate nginx.conf + # user: root # Set the execution user of the worker process. This is only + # effective if the master process runs with super-user privileges. + error_log: logs/error.log # Location of the error log. + error_log_level: warn # Logging level: info, debug, notice, warn, error, crit, alert, or emerg. + worker_processes: auto # Automatically determine the optimal number of worker processes based + # on the available system resources. + # If you want use multiple cores in container, you can inject the number of + # CPU cores as environment variable "APISIX_WORKER_PROCESSES". + enable_cpu_affinity: false # Disable CPU affinity by default as worker_cpu_affinity affects the + # behavior of APISIX in containers. For example, multiple instances could + # be bound to one CPU core, which is not desirable. + # If APISIX is deployed on a physical machine, CPU affinity can be enabled. + worker_rlimit_nofile: 20480 # The number of files a worker process can open. + # The value should be larger than worker_connections. + worker_shutdown_timeout: 240s # Timeout for a graceful shutdown of worker processes. + + max_pending_timers: 16384 # The maximum number of pending timers that can be active at any given time. + # Error "too many pending timers" indicates the threshold is reached. + max_running_timers: 4096 # The maximum number of running timers that can be active at any given time. + # Error "lua_max_running_timers are not enough" error indicates the + # threshold is reached. + + event: + worker_connections: 10620 + + # envs: # Get environment variables. + # - TEST_ENV + + meta: + lua_shared_dict: # Nginx Lua shared memory zone. Size units are m or k. + prometheus-metrics: 15m + standalone-config: 10m + + stream: + enable_access_log: false # Enable stream proxy access logging. + access_log: logs/access_stream.log # Location of the stream access log. + access_log_format: | + "$remote_addr [$time_local] $protocol $status $bytes_sent $bytes_received $session_time" # Customize log format: http://nginx.org/en/docs/varindex.html + access_log_format_escape: default # Escape default or json characters in variables. + lua_shared_dict: # Nginx Lua shared memory zone. Size units are m or k. + etcd-cluster-health-check-stream: 10m + lrucache-lock-stream: 10m + plugin-limit-conn-stream: 10m + worker-events-stream: 10m + tars-stream: 1m + upstream-healthcheck-stream: 10m + + # Add other custom Nginx configurations. + # Users are responsible for validating the custom configurations + # to ensure they are not in conflict with APISIX configurations. + main_configuration_snippet: | + # Add custom Nginx main configuration to nginx.conf. + # The configuration should be well indented! + http_configuration_snippet: | + # Add custom Nginx http configuration to nginx.conf. + # The configuration should be well indented! + http_server_configuration_snippet: | + # Add custom Nginx http server configuration to nginx.conf. + # The configuration should be well indented! + http_server_location_configuration_snippet: | + # Add custom Nginx http server location configuration to nginx.conf. + # The configuration should be well indented! + http_admin_configuration_snippet: | + # Add custom Nginx admin server configuration to nginx.conf. + # The configuration should be well indented! + http_end_configuration_snippet: | + # Add custom Nginx http end configuration to nginx.conf. + # The configuration should be well indented! + stream_configuration_snippet: | + # Add custom Nginx stream configuration to nginx.conf. + # The configuration should be well indented! + + http: + enable_access_log: true # Enable HTTP proxy access logging. + access_log: logs/access.log # Location of the access log. + access_log_buffer: 16384 # buffer size of access log. + access_log_format: | + "$remote_addr - $remote_user [$time_local] $http_host \"$request\" $status $body_bytes_sent $request_time \"$http_referer\" \"$http_user_agent\" $upstream_addr $upstream_status $upstream_response_time \"$upstream_scheme://$upstream_host$upstream_uri\"" + # Customize log format: http://nginx.org/en/docs/varindex.html + access_log_format_escape: default # Escape default or json characters in variables. + keepalive_timeout: 60s # Set the maximum time for which TCP connection keeps alive. + client_header_timeout: 60s # Set the maximum time waiting for client to send the entire HTTP + # request header before closing the connection. + client_body_timeout: 60s # Set the maximum time waiting for client to send the request body. + client_max_body_size: 0 # Set the maximum allowed size of the client request body. + # Default to 0, unlimited. + # Unlike Nginx, APISIX does not limit the body size by default. + # If exceeded, the 413 (Request Entity Too Large) error is returned. + send_timeout: 10s # Set the maximum time for transmitting a response to the client before closing. + underscores_in_headers: "on" # Allow HTTP request headers to contain underscores in their names. + real_ip_header: X-Real-IP # https://nginx.org/en/docs/http/ngx_http_realip_module.html#real_ip_header + real_ip_recursive: "off" # http://nginx.org/en/docs/http/ngx_http_realip_module.html#real_ip_recursive + real_ip_from: # http://nginx.org/en/docs/http/ngx_http_realip_module.html#set_real_ip_from + - 127.0.0.1 + - "unix:" + + # custom_lua_shared_dict: # Custom Nginx Lua shared memory zone for nginx.conf. Size units are m or k. + # ipc_shared_dict: 100m # Custom shared cache, format: `cache-key: cache-size` + + proxy_ssl_server_name: true # Send the server name in the SNI extension when establishing an SSL/TLS + # connection with the upstream server, allowing the upstream server to + # select the appropriate SSL/TLS certificate and configuration based on + # the requested server name. + + upstream: + keepalive: 320 # Set the maximum time of keep-alive connections to the upstream servers. + # When the value is exceeded, the least recently used connection is closed. + keepalive_requests: 1000 # Set the maximum number of requests that can be served through one + # keep-alive connection. + # After the maximum number of requests is made, the connection is closed. + keepalive_timeout: 60s # Set the maximum time for which TCP connection keeps alive. + charset: utf-8 # Add the charset to the "Content-Type" response header field. + # See http://nginx.org/en/docs/http/ngx_http_charset_module.html#charset + variables_hash_max_size: 2048 # Set the maximum size of the variables hash table. + + lua_shared_dict: # Nginx Lua shared memory zone. Size units are m or k. + internal-status: 10m + plugin-limit-req: 10m + plugin-limit-count: 10m + prometheus-metrics: 10m # In production, less than 50m is recommended + plugin-limit-conn: 10m + upstream-healthcheck: 10m + worker-events: 10m + lrucache-lock: 10m + balancer-ewma: 10m + balancer-ewma-locks: 10m + balancer-ewma-last-touched-at: 10m + plugin-limit-req-redis-cluster-slot-lock: 1m + plugin-limit-count-redis-cluster-slot-lock: 1m + plugin-limit-conn-redis-cluster-slot-lock: 1m + tracing_buffer: 10m + plugin-api-breaker: 10m + etcd-cluster-health-check: 10m + discovery: 1m + jwks: 1m + introspection: 10m + access-tokens: 1m + ext-plugin: 1m + tars: 1m + cas-auth: 10m + ocsp-stapling: 10m + mcp-session: 10m + +# discovery: # Service Discovery +# dns: +# servers: +# - "127.0.0.1:8600" # Replace with the address of your DNS server. +# resolv_conf: /etc/resolv.conf # Replace with the path to the local DNS resolv config. Configure either "servers" or "resolv_conf". +# order: # Resolve DNS records this order. +# - last # Try the latest successful type for a hostname. +# - SRV +# - A +# - AAAA +# - CNAME +# eureka: # Eureka +# host: # Eureka address(es) +# - "http://127.0.0.1:8761" +# prefix: /eureka/ +# fetch_interval: 30 # Default 30s +# weight: 100 # Default weight for node +# timeout: +# connect: 2000 # Default 2000ms +# send: 2000 # Default 2000ms +# read: 5000 # Default 5000ms +# nacos: # Nacos +# host: # Nacos address(es) +# - "http://${username}:${password}@${host1}:${port1}" +# prefix: "/nacos/v1/" +# fetch_interval: 30 # Default 30s +# `weight` is the `default_weight` that will be attached to each discovered node that +# doesn't have a weight explicitly provided in nacos results +# weight: 100 # Default 100. +# timeout: +# connect: 2000 # Default 2000ms +# send: 2000 # Default 2000ms +# read: 5000 # Default 5000ms +# access_key: "" # Nacos AccessKey ID in Alibaba Cloud, notice that it's for Nacos instances on Microservices Engine (MSE) +# secret_key: "" # Nacos AccessKey Secret in Alibaba Cloud, notice that it's for Nacos instances on Microservices Engine (MSE) +# consul_kv: # Consul KV +# servers: # Consul KV address(es) +# - "http://127.0.0.1:8500" +# - "http://127.0.0.1:8600" +# prefix: "upstreams" +# skip_keys: # Skip special keys +# - "upstreams/unused_api/" +# timeout: +# connect: 2000 # Default 2000ms +# read: 2000 # Default 2000ms +# wait: 60 # Default 60s +# weight: 1 # Default 1 +# fetch_interval: 3 # Default 3s. Effective only when keepalive is false. +# keepalive: true # Default to true. Use long pull to query Consul. +# default_server: # Define default server to route traffic to. +# host: "127.0.0.1" +# port: 20999 +# metadata: +# fail_timeout: 1 # Default 1ms +# weight: 1 # Default 1 +# max_fails: 1 # Default 1 +# dump: # Dump the Consul key-value (KV) store to a file. +# path: "logs/consul_kv.dump" # Location of the dump file. +# expire: 2592000 # Specify the expiration time of the dump file in units of seconds. +# consul: # Consul +# servers: # Consul address(es) +# - "http://127.0.0.1:8500" +# - "http://127.0.0.1:8600" +# skip_services: # Skip services during service discovery. +# - "service_a" +# timeout: +# connect: 2000 # Default 2000ms +# read: 2000 # Default 2000ms +# wait: 60 # Default 60s +# weight: 1 # Default 1 +# fetch_interval: 3 # Default 3s. Effective only when keepalive is false. +# keepalive: true # Default to true. Use long pull to query Consul. +# default_service: # Define the default service to route traffic to. +# host: "127.0.0.1" +# port: 20999 +# metadata: +# fail_timeout: 1 # Default 1ms +# weight: 1 # Default 1 +# max_fails: 1 # Default 1 +# dump: # Dump the Consul key-value (KV) store to a file. +# path: "logs/consul_kv.dump" # Location of the dump file. +# expire: 2592000 # Specify the expiration time of the dump file in units of seconds. +# load_on_init: true # Default true, load the consul dump file on init +# kubernetes: # Kubernetes service discovery +# ### kubernetes service discovery both support single-cluster and multi-cluster mode +# ### applicable to the case where the service is distributed in a single or multiple kubernetes clusters. +# ### single-cluster mode ### +# service: +# schema: https # apiserver schema, options [http, https], default https +# host: ${KUBERNETES_SERVICE_HOST} # apiserver host, options [ipv4, ipv6, domain, environment variable], default ${KUBERNETES_SERVICE_HOST} +# port: ${KUBERNETES_SERVICE_PORT} # apiserver port, options [port number, environment variable], default ${KUBERNETES_SERVICE_PORT} +# client: +# # serviceaccount token or path of serviceaccount token_file +# token_file: ${KUBERNETES_CLIENT_TOKEN_FILE} +# # token: |- +# # eyJhbGciOiJSUzI1NiIsImtpZCI6Ikx5ME1DNWdnbmhQNkZCNlZYMXBsT3pYU3BBS2swYzBPSkN3ZnBESGpkUEEif +# # 6Ikx5ME1DNWdnbmhQNkZCNlZYMXBsT3pYU3BBS2swYzBPSkN3ZnBESGpkUEEifeyJhbGciOiJSUzI1NiIsImtpZCI +# # kubernetes discovery plugin support use namespace_selector +# # you can use one of [equal, not_equal, match, not_match] filter namespace +# namespace_selector: +# # only save endpoints with namespace equal default +# equal: default +# # only save endpoints with namespace not equal default +# #not_equal: default +# # only save endpoints with namespace match one of [default, ^my-[a-z]+$] +# #match: +# #- default +# #- ^my-[a-z]+$ +# # only save endpoints with namespace not match one of [default, ^my-[a-z]+$ ] +# #not_match: +# #- default +# #- ^my-[a-z]+$ +# # kubernetes discovery plugin support use label_selector +# # for the expression of label_selector, please refer to https://kubernetes.io/docs/concepts/overview/working-with-objects/labels +# label_selector: |- +# first="a",second="b" +# # reserved lua shared memory size,1m memory can store about 1000 pieces of endpoint +# shared_size: 1m #default 1m +# ### single-cluster mode ### +# ### multi-cluster mode ### +# - id: release # a custom name refer to the cluster, pattern ^[a-z0-9]{1,8} +# service: +# schema: https # apiserver schema, options [http, https], default https +# host: ${KUBERNETES_SERVICE_HOST} # apiserver host, options [ipv4, ipv6, domain, environment variable] +# port: ${KUBERNETES_SERVICE_PORT} # apiserver port, options [port number, environment variable] +# client: +# # serviceaccount token or path of serviceaccount token_file +# token_file: ${KUBERNETES_CLIENT_TOKEN_FILE} +# # token: |- +# # eyJhbGciOiJSUzI1NiIsImtpZCI6Ikx5ME1DNWdnbmhQNkZCNlZYMXBsT3pYU3BBS2swYzBPSkN3ZnBESGpkUEEif +# # 6Ikx5ME1DNWdnbmhQNkZCNlZYMXBsT3pYU3BBS2swYzBPSkN3ZnBESGpkUEEifeyJhbGciOiJSUzI1NiIsImtpZCI +# # kubernetes discovery plugin support use namespace_selector +# # you can use one of [equal, not_equal, match, not_match] filter namespace +# namespace_selector: +# # only save endpoints with namespace equal default +# equal: default +# # only save endpoints with namespace not equal default +# #not_equal: default +# # only save endpoints with namespace match one of [default, ^my-[a-z]+$] +# #match: +# #- default +# #- ^my-[a-z]+$ +# # only save endpoints with namespace not match one of [default, ^my-[a-z]+$ ] +# #not_match: +# #- default +# #- ^my-[a-z]+$ +# # kubernetes discovery plugin support use label_selector +# # for the expression of label_selector, please refer to https://kubernetes.io/docs/concepts/overview/working-with-objects/labels +# label_selector: |- +# first="a",second="b" +# # reserved lua shared memory size,1m memory can store about 1000 pieces of endpoint +# shared_size: 1m #default 1m +# ### multi-cluster mode ### + +graphql: + max_size: 1048576 # Set the maximum size limitation of graphql in bytes. Default to 1MiB. + +# ext-plugin: +# cmd: ["ls", "-l"] + +plugins: # plugin list (sorted by priority) + - real-ip # priority: 23000 + - ai # priority: 22900 + - client-control # priority: 22000 + - proxy-control # priority: 21990 + - request-id # priority: 12015 + - zipkin # priority: 12011 + #- skywalking # priority: 12010 + #- opentelemetry # priority: 12009 + - ext-plugin-pre-req # priority: 12000 + - fault-injection # priority: 11000 + - mocking # priority: 10900 + - serverless-pre-function # priority: 10000 + #- batch-requests # priority: 4010 + - cors # priority: 4000 + - ip-restriction # priority: 3000 + - ua-restriction # priority: 2999 + - referer-restriction # priority: 2990 + - csrf # priority: 2980 + - uri-blocker # priority: 2900 + - request-validation # priority: 2800 + - chaitin-waf # priority: 2700 + - multi-auth # priority: 2600 + - openid-connect # priority: 2599 + - cas-auth # priority: 2597 + - authz-casbin # priority: 2560 + - authz-casdoor # priority: 2559 + - wolf-rbac # priority: 2555 + - ldap-auth # priority: 2540 + - hmac-auth # priority: 2530 + - basic-auth # priority: 2520 + - jwt-auth # priority: 2510 + - jwe-decrypt # priority: 2509 + - key-auth # priority: 2500 + - consumer-restriction # priority: 2400 + - attach-consumer-label # priority: 2399 + - forward-auth # priority: 2002 + - opa # priority: 2001 + - authz-keycloak # priority: 2000 + #- error-log-logger # priority: 1091 + - proxy-cache # priority: 1085 + - body-transformer # priority: 1080 + - ai-prompt-template # priority: 1071 + - ai-prompt-decorator # priority: 1070 + - ai-prompt-guard # priority: 1072 + - ai-rag # priority: 1060 + - ai-rate-limiting # priority: 1030 + - ai-aws-content-moderation # priority: 1040 TODO: compare priority with other ai plugins + - proxy-mirror # priority: 1010 + - proxy-rewrite # priority: 1008 + - workflow # priority: 1006 + - api-breaker # priority: 1005 + - limit-conn # priority: 1003 + - limit-count # priority: 1002 + - limit-req # priority: 1001 + #- node-status # priority: 1000 + - ai-proxy # priority: 999 + - ai-proxy-multi # priority: 998 + #- brotli # priority: 996 + - gzip # priority: 995 + #- server-info # priority: 990 + - traffic-split # priority: 966 + - redirect # priority: 900 + - response-rewrite # priority: 899 + - mcp-bridge # priority: 510 + - degraphql # priority: 509 + - kafka-proxy # priority: 508 + #- dubbo-proxy # priority: 507 + - grpc-transcode # priority: 506 + - grpc-web # priority: 505 + - http-dubbo # priority: 504 + - public-api # priority: 501 + - prometheus # priority: 500 + - datadog # priority: 495 + - lago # priority: 415 + - loki-logger # priority: 414 + - elasticsearch-logger # priority: 413 + - echo # priority: 412 + - loggly # priority: 411 + - http-logger # priority: 410 + - splunk-hec-logging # priority: 409 + - skywalking-logger # priority: 408 + - google-cloud-logging # priority: 407 + - sls-logger # priority: 406 + - tcp-logger # priority: 405 + - kafka-logger # priority: 403 + - rocketmq-logger # priority: 402 + - syslog # priority: 401 + - udp-logger # priority: 400 + - file-logger # priority: 399 + - clickhouse-logger # priority: 398 + - tencent-cloud-cls # priority: 397 + - inspect # priority: 200 + #- log-rotate # priority: 100 + # <- recommend to use priority (0, 100) for your custom plugins + - example-plugin # priority: 0 + #- gm # priority: -43 + #- ocsp-stapling # priority: -44 + - aws-lambda # priority: -1899 + - azure-functions # priority: -1900 + - openwhisk # priority: -1901 + - openfunction # priority: -1902 + - serverless-post-function # priority: -2000 + - ext-plugin-post-req # priority: -3000 + - ext-plugin-post-resp # priority: -4000 + +stream_plugins: # stream plugin list (sorted by priority) + - ip-restriction # priority: 3000 + - limit-conn # priority: 1003 + - mqtt-proxy # priority: 1000 + #- prometheus # priority: 500 + - syslog # priority: 401 + # <- recommend to use priority (0, 100) for your custom plugins + + +# wasm: +# plugins: +# - name: wasm_log +# priority: 7999 +# file: t/wasm/log/main.go.wasm + +# xrpc: +# protocols: +# - name: pingpong +plugin_attr: # Plugin attributes + log-rotate: # Plugin: log-rotate + timeout: 10000 # maximum wait time for a log rotation(unit: millisecond) + interval: 3600 # Set the log rotate interval in seconds. + max_kept: 168 # Set the maximum number of log files to keep. If exceeded, historic logs are deleted. + max_size: -1 # Set the maximum size of log files in bytes before a rotation. + # Skip size check if max_size is less than 0. + enable_compression: false # Enable log file compression (gzip). + skywalking: # Plugin: skywalking + service_name: APISIX # Set the service name for SkyWalking reporter. + service_instance_name: APISIX Instance Name # Set the service instance name for SkyWalking reporter. + endpoint_addr: http://127.0.0.1:12800 # Set the SkyWalking HTTP endpoint. + report_interval: 3 # Set the reporting interval in second. + opentelemetry: # Plugin: opentelemetry + trace_id_source: x-request-id # Specify the source of the trace ID for OpenTelemetry traces. + resource: + service.name: APISIX # Set the service name for OpenTelemetry traces. + collector: + address: 127.0.0.1:4318 # Set the address of the OpenTelemetry collector to send traces to. + request_timeout: 3 # Set the timeout for requests to the OpenTelemetry collector in seconds. + request_headers: # Set the headers to include in requests to the OpenTelemetry collector. + Authorization: token # Set the authorization header to include an access token. + batch_span_processor: + drop_on_queue_full: false # Drop spans when the export queue is full. + max_queue_size: 1024 # Set the maximum size of the span export queue. + batch_timeout: 2 # Set the timeout for span batches to wait in the export queue before + # being sent. + inactive_timeout: 1 # Set the timeout for spans to wait in the export queue before being sent, + # if the queue is not full. + max_export_batch_size: 16 # Set the maximum number of spans to include in each batch sent to the + # OpenTelemetry collector. + set_ngx_var: false # Export opentelemetry variables to NGINX variables. + prometheus: # Plugin: prometheus + export_uri: /apisix/prometheus/metrics # Set the URI for the Prometheus metrics endpoint. + metric_prefix: apisix_ # Set the prefix for Prometheus metrics generated by APISIX. + enable_export_server: true # Enable the Prometheus export server. + export_addr: # Set the address for the Prometheus export server. + ip: 127.0.0.1 # Set the IP. + port: 9091 # Set the port. + # metrics: # Create extra labels from nginx variables: https://nginx.org/en/docs/varindex.html + # http_status: + # expire: 0 # The expiration time after which metrics are removed. unit: second. + # # 0 means the metrics will not expire + # extra_labels: + # - upstream_addr: $upstream_addr + # - status: $upstream_status # The label name does not need to be the same as the variable name. + # http_latency: + # expire: 0 # The expiration time after which metrics are removed. unit: second. + # # 0 means the metrics will not expire + # extra_labels: + # - upstream_addr: $upstream_addr + # bandwidth: + # expire: 0 # The expiration time after which metrics are removed. unit: second. + # # 0 means the metrics will not expire + # extra_labels: + # - upstream_addr: $upstream_addr + # upstream_status: + # expire: 0 # The expiration time after which metrics are removed. unit: second. + # default_buckets: + # - 10 + # - 50 + # - 100 + # - 200 + # - 500 + server-info: # Plugin: server-info + report_ttl: 60 # Set the TTL in seconds for server info in etcd. + # Maximum: 86400. Minimum: 3. + dubbo-proxy: # Plugin: dubbo-proxy + upstream_multiplex_count: 32 # Set the maximum number of connections that can be multiplexed over + # a single network connection between the Dubbo Proxy and the upstream + # Dubbo services. + proxy-mirror: # Plugin: proxy-mirror + timeout: # Set the timeout for mirrored requests. + connect: 60s + read: 60s + send: 60s + # redirect: # Plugin: redirect + # https_port: 8443 # Set the default port used to redirect HTTP to HTTPS. + inspect: # Plugin: inspect + delay: 3 # Set the delay in seconds for the frequency of checking the hooks file. + hooks_file: "/usr/local/apisix/plugin_inspect_hooks.lua" # Set the path to the Lua file that defines + # hooks. Only administrators should have + # write access to this file for security. + zipkin: # Plugin: zipkin + set_ngx_var: false # export zipkin variables to nginx variables + +deployment: # Deployment configurations + role: traditional # Set deployment mode: traditional, control_plane, or data_plane. + role_traditional: + config_provider: etcd # Set the configuration center. + + #role_data_plane: # Set data plane details if role is data_plane. + # config_provider: etcd # Set the configuration center: etcd, xds, or yaml. + + #role_control_plane: # Set control plane details if role is control_plane. + # config_provider: etcd # Set the configuration center. + + admin: # Admin API + admin_key_required: true # Enable Admin API authentication by default for security. + admin_key: + - + name: admin # admin: write access to configurations. + key: '' # Set API key for the admin of Admin API. + role: admin + # - + # name: viewer # viewer: read-only to configurations. + # key: 4054f7cf07e344346cd3f287985e76a2 # Set API key for the viewer of Admin API. + # role: viewer + + enable_admin_cors: true # Enable Admin API CORS response header `Access-Control-Allow-Origin`. + enable_admin_ui: true # Enable embedded APISIX Dashboard UI. + allow_admin: # Limit Admin API access by IP addresses. + - 127.0.0.0/24 # If not set, any IP address is allowed. + # - "::/64" + admin_listen: # Set the Admin API listening addresses. + ip: 0.0.0.0 # Set listening IP. + port: 9180 # Set listening port. Beware of port conflict with node_listen. + + # https_admin: true # Enable SSL for Admin API on IP and port specified in admin_listen. + # Use admin_api_mtls.admin_ssl_cert and admin_api_mtls.admin_ssl_cert_key. + # admin_api_mtls: # Set this if `https_admin` is true. + # admin_ssl_cert: "" # Set path to SSL/TLS certificate. + # admin_ssl_cert_key: "" # Set path to SSL/TLS key. + # admin_ssl_ca_cert: "" # Set path to CA certificate used to sign client certificates. + + admin_api_version: v3 # Set the version of Admin API (latest: v3). + + etcd: + host: # Set etcd address(es) in the same etcd cluster. + - "http://127.0.0.1:2379" # If TLS is enabled for etcd, use https://127.0.0.1:2379. + prefix: /apisix # Set etcd prefix. + timeout: 30 # The timeout when connect/read/write to etcd, Set timeout in seconds. + watch_timeout: 50 # The timeout when watch etcd + # resync_delay: 5 # Set resync time in seconds after a sync failure. + # The actual resync time would be resync_delay plus 50% random jitter. + # health_check_timeout: 10 # Set timeout in seconds for etcd health check. + # Default to 10 if not set or a negative value is provided. + startup_retry: 2 # Set the number of retries to etcd on startup. Default to 2. + # user: root # Set the root username for etcd. + # password: 5tHkHhYkjr6cQ # Set the root password for etcd. + tls: + # cert: /path/to/cert # Set the path to certificate used by the etcd client + # key: /path/to/key # Set the path to path of key used by the etcd client + verify: true # Verify the etcd certificate when establishing a TLS connection with etcd. + # sni: # The SNI for etcd TLS requests. + # If not set, the host from the URL is used. diff --git a/CloudronPackages/APISIX/apisix-source/conf/debug.yaml b/CloudronPackages/APISIX/apisix-source/conf/debug.yaml new file mode 100644 index 0000000..bf82562 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/conf/debug.yaml @@ -0,0 +1,36 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +basic: + enable: false # Enable the basic debug mode. +http_filter: + enable: false # Enable HTTP filter to dynamically apply advanced debug settings. + enable_header_name: X-APISIX-Dynamic-Debug # If the header is present in a request, apply the advanced debug settings. +hook_conf: + enable: false # Enable hook debug trace to log the target module function's input arguments or returned values. + name: hook_phase # Name of module and function list. + log_level: warn # Severity level for input arguments and returned values in the error log. + is_print_input_args: true # Print the input arguments. + is_print_return_value: true # Print the return value. + +hook_phase: # Name of module and function list. + apisix: # Required module name. + - http_access_phase # Required function names. + - http_header_filter_phase + - http_body_filter_phase + - http_log_phase + +#END diff --git a/CloudronPackages/APISIX/apisix-source/conf/mime.types b/CloudronPackages/APISIX/apisix-source/conf/mime.types new file mode 100644 index 0000000..b53f7f7 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/conf/mime.types @@ -0,0 +1,98 @@ + +types { + text/html html htm shtml; + text/css css; + text/xml xml; + image/gif gif; + image/jpeg jpeg jpg; + application/javascript js; + application/atom+xml atom; + application/rss+xml rss; + + text/mathml mml; + text/plain txt; + text/vnd.sun.j2me.app-descriptor jad; + text/vnd.wap.wml wml; + text/x-component htc; + + image/png png; + image/svg+xml svg svgz; + image/tiff tif tiff; + image/vnd.wap.wbmp wbmp; + image/webp webp; + image/x-icon ico; + image/x-jng jng; + image/x-ms-bmp bmp; + + font/woff woff; + font/woff2 woff2; + + application/java-archive jar war ear; + application/json json; + application/mac-binhex40 hqx; + application/msword doc; + application/pdf pdf; + application/postscript ps eps ai; + application/rtf rtf; + application/vnd.apple.mpegurl m3u8; + application/vnd.google-earth.kml+xml kml; + application/vnd.google-earth.kmz kmz; + application/vnd.ms-excel xls; + application/vnd.ms-fontobject eot; + application/vnd.ms-powerpoint ppt; + application/vnd.oasis.opendocument.graphics odg; + application/vnd.oasis.opendocument.presentation odp; + application/vnd.oasis.opendocument.spreadsheet ods; + application/vnd.oasis.opendocument.text odt; + application/vnd.openxmlformats-officedocument.presentationml.presentation + pptx; + application/vnd.openxmlformats-officedocument.spreadsheetml.sheet + xlsx; + application/vnd.openxmlformats-officedocument.wordprocessingml.document + docx; + application/vnd.wap.wmlc wmlc; + application/wasm wasm; + application/x-7z-compressed 7z; + application/x-cocoa cco; + application/x-java-archive-diff jardiff; + application/x-java-jnlp-file jnlp; + application/x-makeself run; + application/x-perl pl pm; + application/x-pilot prc pdb; + application/x-rar-compressed rar; + application/x-redhat-package-manager rpm; + application/x-sea sea; + application/x-shockwave-flash swf; + application/x-stuffit sit; + application/x-tcl tcl tk; + application/x-x509-ca-cert der pem crt; + application/x-xpinstall xpi; + application/xhtml+xml xhtml; + application/xspf+xml xspf; + application/zip zip; + + application/octet-stream bin exe dll; + application/octet-stream deb; + application/octet-stream dmg; + application/octet-stream iso img; + application/octet-stream msi msp msm; + + audio/midi mid midi kar; + audio/mpeg mp3; + audio/ogg ogg; + audio/x-m4a m4a; + audio/x-realaudio ra; + + video/3gpp 3gpp 3gp; + video/mp2t ts; + video/mp4 mp4; + video/mpeg mpeg mpg; + video/quicktime mov; + video/webm webm; + video/x-flv flv; + video/x-m4v m4v; + video/x-mng mng; + video/x-ms-asf asx asf; + video/x-ms-wmv wmv; + video/x-msvideo avi; +} diff --git a/CloudronPackages/APISIX/apisix-source/docker/compose/apisix_conf/master/config.yaml b/CloudronPackages/APISIX/apisix-source/docker/compose/apisix_conf/master/config.yaml new file mode 100644 index 0000000..1220d8f --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/docker/compose/apisix_conf/master/config.yaml @@ -0,0 +1,36 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +apisix: + node_listen: 9080 # APISIX listening port + enable_ipv6: false + +deployment: + admin: + allow_admin: # https://nginx.org/en/docs/http/ngx_http_access_module.html#allow + - 0.0.0.0/0 # We need to restrict ip access rules for security. 0.0.0.0/0 is for test. + + admin_key: + - name: "admin" + key: edd1c9f034335f136f87ad84b625c8f1 + role: admin # admin: manage all configuration data + + etcd: + host: # it's possible to define multiple etcd hosts addresses of the same etcd cluster. + - "http://etcd:2379" # multiple etcd address + prefix: "/apisix" # apisix configurations prefix + timeout: 30 # 30 seconds diff --git a/CloudronPackages/APISIX/apisix-source/docker/compose/docker-compose-master.yaml b/CloudronPackages/APISIX/apisix-source/docker/compose/docker-compose-master.yaml new file mode 100644 index 0000000..08e2b9b --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/docker/compose/docker-compose-master.yaml @@ -0,0 +1,52 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +version: "3" + +services: + apisix: + image: "apache/apisix:${APISIX_DOCKER_TAG}" + restart: always + volumes: + - ./apisix_conf/master/config.yaml:/usr/local/apisix/conf/config.yaml:ro + depends_on: + - etcd + ports: + - "9180:9180/tcp" + - "9080:9080/tcp" + - "9091:9091/tcp" + - "9443:9443/tcp" + networks: + - apisix + + etcd: + image: bitnami/etcd:3.6 + restart: always + environment: + ETCD_DATA_DIR: /etcd_data + ETCD_ENABLE_V2: "true" + ALLOW_NONE_AUTHENTICATION: "yes" + ETCD_ADVERTISE_CLIENT_URLS: "http://etcd:2379" + ETCD_LISTEN_CLIENT_URLS: "http://0.0.0.0:2379" + ports: + - "2379:2379/tcp" + networks: + - apisix + +networks: + apisix: + driver: bridge diff --git a/CloudronPackages/APISIX/apisix-source/docker/debian-dev/Dockerfile b/CloudronPackages/APISIX/apisix-source/docker/debian-dev/Dockerfile new file mode 100644 index 0000000..20447c4 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/docker/debian-dev/Dockerfile @@ -0,0 +1,80 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +FROM debian:bullseye-slim AS build + +ARG ENABLE_PROXY=false +ARG CODE_PATH + +ENV DEBIAN_FRONTEND=noninteractive +ENV ENV_INST_LUADIR=/usr/local/apisix + +COPY ${CODE_PATH} /apisix + +WORKDIR /apisix + +RUN set -x \ + && apt-get -y update --fix-missing \ + && apt-get install -y \ + make \ + git \ + sudo \ + libyaml-dev \ + && ls -al \ + && make deps \ + && mkdir -p ${ENV_INST_LUADIR} \ + && cp -r deps ${ENV_INST_LUADIR} \ + && make install + +FROM debian:bullseye-slim + +ARG ENTRYPOINT_PATH=./docker-entrypoint.sh +ARG INSTALL_BROTLI=./install-brotli.sh +ARG CHECK_STANDALONE_CONFIG=./check_standalone_config.sh + +# Install the runtime libyaml package +RUN apt-get -y update --fix-missing \ + && apt-get install -y libldap2-dev libyaml-0-2 \ + && apt-get remove --purge --auto-remove -y \ + && mkdir -p /usr/local/apisix/ui + +COPY --from=build /usr/local/apisix /usr/local/apisix +COPY --from=build /usr/local/openresty /usr/local/openresty +COPY --from=build /usr/bin/apisix /usr/bin/apisix +COPY --chown=nobody:root ui/ /usr/local/apisix/ui/ + +COPY ${INSTALL_BROTLI} /install-brotli.sh +RUN chmod +x /install-brotli.sh \ + && cd / && ./install-brotli.sh && rm -rf /install-brotli.sh + +ENV PATH=$PATH:/usr/local/openresty/luajit/bin:/usr/local/openresty/nginx/sbin:/usr/local/openresty/bin + +WORKDIR /usr/local/apisix + +RUN ln -sf /dev/stdout /usr/local/apisix/logs/access.log \ + && ln -sf /dev/stderr /usr/local/apisix/logs/error.log + +EXPOSE 9080 9443 + +COPY ${ENTRYPOINT_PATH} /docker-entrypoint.sh +COPY ${CHECK_STANDALONE_CONFIG} /check_standalone_config.sh +RUN chmod +x /docker-entrypoint.sh /check_standalone_config.sh + +ENTRYPOINT ["/docker-entrypoint.sh"] + +CMD ["docker-start"] + +STOPSIGNAL SIGQUIT diff --git a/CloudronPackages/APISIX/apisix-source/docker/debian-dev/docker-entrypoint.sh b/CloudronPackages/APISIX/apisix-source/docker/debian-dev/docker-entrypoint.sh new file mode 100644 index 0000000..b130a97 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/docker/debian-dev/docker-entrypoint.sh @@ -0,0 +1,63 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +set -eo pipefail + +PREFIX=${APISIX_PREFIX:=/usr/local/apisix} + +if [[ "$1" == "docker-start" ]]; then + if [ "$APISIX_STAND_ALONE" = "true" ]; then + # If the file is not present then initialise the content otherwise update relevant keys for standalone mode + if [ ! -f "${PREFIX}/conf/config.yaml" ]; then + cat > ${PREFIX}/conf/config.yaml << _EOC_ +deployment: + role: data_plane + role_data_plane: + config_provider: yaml +_EOC_ + else + # Check if the deployment role is set to data_plane and config provider is set to yaml for standalone mode + source /check_standalone_config.sh + fi + + if [ ! -f "${PREFIX}/conf/apisix.yaml" ]; then + cat > ${PREFIX}/conf/apisix.yaml << _EOC_ +routes: + - +#END +_EOC_ + fi + /usr/bin/apisix init + else + /usr/bin/apisix init + /usr/bin/apisix init_etcd + fi + + # For versions below 3.5.0 whose conf_server has not been removed. + if [ -e "/usr/local/apisix/conf/config_listen.sock" ]; then + rm -f "/usr/local/apisix/conf/config_listen.sock" + fi + + if [ -e "/usr/local/apisix/logs/worker_events.sock" ]; then + rm -f "/usr/local/apisix/logs/worker_events.sock" + fi + + exec /usr/local/openresty/bin/openresty -p /usr/local/apisix -g 'daemon off;' +fi + +exec "$@" diff --git a/CloudronPackages/APISIX/apisix-source/docker/debian-dev/install-brotli.sh b/CloudronPackages/APISIX/apisix-source/docker/debian-dev/install-brotli.sh new file mode 100644 index 0000000..679254e --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/docker/debian-dev/install-brotli.sh @@ -0,0 +1,42 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +install_brotli() { + apt-get -qy update + apt-get install -y sudo cmake wget unzip + local BORTLI_VERSION="1.1.0" + wget -q https://github.com/google/brotli/archive/refs/tags/v${BORTLI_VERSION}.zip || exit 1 + unzip v${BORTLI_VERSION}.zip && cd ./brotli-${BORTLI_VERSION} && mkdir build && cd build || exit 1 + local CMAKE=$(command -v cmake3 >/dev/null 2>&1 && echo cmake3 || echo cmake) || exit 1 + ${CMAKE} -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local/brotli .. || exit 1 + sudo ${CMAKE} --build . --config Release --target install || exit 1 + if [ -d "/usr/local/brotli/lib64" ]; then + echo /usr/local/brotli/lib64 | sudo tee /etc/ld.so.conf.d/brotli.conf + else + echo /usr/local/brotli/lib | sudo tee /etc/ld.so.conf.d/brotli.conf + fi + sudo ldconfig || exit 1 + ln -sf /usr/local/brotli/bin/brotli /usr/bin/brotli + cd ../.. + rm -rf brotli-${BORTLI_VERSION} + rm -rf /v${BORTLI_VERSION}.zip + export SUDO_FORCE_REMOVE=yes + apt purge -qy cmake sudo wget unzip + apt-get remove --purge --auto-remove -y +} +install_brotli diff --git a/CloudronPackages/APISIX/apisix-source/docker/utils/check_standalone_config.sh b/CloudronPackages/APISIX/apisix-source/docker/utils/check_standalone_config.sh new file mode 100644 index 0000000..22792c5 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/docker/utils/check_standalone_config.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +if ! grep -q 'role: data_plane' "${PREFIX}/conf/config.yaml"; then + echo "Error: ${PREFIX}/conf/config.yaml does not contain 'role: data_plane'. Deployment role must be set to 'data_plane' for standalone mode." + echo "Please refer to the APISIX documentation for deployment modes: https://apisix.apache.org/docs/apisix/deployment-modes/" + exit 1 +fi + +if ! grep -q 'role_data_plane:' "${PREFIX}/conf/config.yaml"; then + echo "Error: ${PREFIX}/conf/config.yaml does not contain 'role_data_plane:'." + echo "Please refer to the APISIX documentation for deployment modes: https://apisix.apache.org/docs/apisix/deployment-modes/" + exit 1 +fi + +if ! grep -q 'config_provider: yaml' "${PREFIX}/conf/config.yaml"; then + echo "Error: ${PREFIX}/conf/config.yaml does not contain 'config_provider: yaml'. Config provider must be set to 'yaml' for standalone mode." + echo "Please refer to the APISIX documentation for deployment modes: https://apisix.apache.org/docs/apisix/deployment-modes/" + exit 1 +fi diff --git a/CloudronPackages/APISIX/apisix-source/docs/assets/images/MA.jpeg b/CloudronPackages/APISIX/apisix-source/docs/assets/images/MA.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..e6ed73ba7e9ee560e2f2844d85ea7f07eb16cc3d GIT binary patch literal 45199 zcmcG#2|QG9|35skuVdetvV?@P7BZ=9N!k!%D%lbeLYO0x?9$?!mPtru&sM@@uOxf2 z&Q>W*W}2;11=eo}IxvtOhzTThDS^u<7MoI3s zcd|$E@SsqR;2&z8g37Xs4Gut|TwG9FP$-lTil0Xs#S89uz&{j^GK&B2dlYIvkIH}D zAK}sc=QA5nsI*`d-#?%60zdy8pyxlW|M`>mAuH{I4|)IV{_US;>vYtP zLqXwD;gLb%=Qo-dZbR+Zma$);-~c{ETPqQ5mE3!gCuGL4=?WqK3;zQ zKd<3Q1b;{IN%2c>+HNZ#bNG~?Mue>K*^aCn_j$Z?&$35?&*E=_TBKv=-9{cPZN_AD(&-^ zuXEqN&oh56ul!=I!fU_($OY!}A7X*)KP3A<eC@Kl71hVf+~**({bYTJc1O^z_+PG1-jk=HV%Xfywa_P1pJJ;9RyKS}mqg8gr~ zrcisqsQ(;)t{eUwe7t|IKm=Ys0e%62zfVD7Apt>QK>-0FQLu`C4sa(XDkAphKkJ~A ze>B~|&&$g%A}ApE_apzWF6%QuEfv-$QDVG2K$&=@P*@a~^C&|X^*?m-t`GhX_doPM z-2d=@?*H-IE&D8Xq5_}pC=C!X4p}ZhM0c`-=iM*$r!AH-}?KY3?-{GbNN}^MVUCCmr+^n zbj0~pa5UEbR*G1L7lQ=rl+oi#@e`yv`&{PXVU=)6??4x3PJyWO)&|G(b-Hf^`|#S? z*HntHyIxw|80lj=zo)@$ht=cQPmA(820h0*sG5B{ER+oUgJciedABmX`*5A$>%ziHp*f{Me?TiG759xQY2 z{a*vw4>W8eBF`p|+YYyP&Rb@cosl%Ey&!5~=N6!aa=}Z$MqNZDVlXxnulI3rC4LcJl-crc|z#{O_v=j(m8JpMxuRxQ}Q z4rcdAmX2pvIV{OnIdQVR#l^Oi@I{n=K1<>f#4wj>kfdd9j+^ zJt0eCZ-n*EMK)8r9`qPb@v$!tGB+AVLDKjag530l?ao;8LySE3OfCjC*#qyQ>@sp2iAuCw?!~DXHXQ$= zxu0Q{#bPyGcs(Sb!7Oq|qLL)=nD*CnQVLbz)iv9L{ z@mSX`BCMN6zs|Odb)}ZI6EkBTY+wp@;jZGf;UES~?7Sr2U3IrS=z$(n_R=Zo_`@NQi{TD42;}@*Lfp;;QIS#@TeEM_J${H55Y?n+)at z%5Dkh+&7v%dpA<9C)_}Kr;praPsMb({TV-qAvjnEoyw77OU>i>pwsK9D+Lx_AL^Li z?dqGx1Bpzrrsf9~TdiG6eNN2GkmgP5$6e*_m($)~zk6oOHT1iBo;%c~$#B*nOlRW~ ze7FSs2K)$C*hZ?K(ebmRvlEeJE~R`KYrx4XEru>(8@#zTquph%TntK1%U*Bkb2{<1 z{n{mo8|;JlQ43r{ZpXkxF;yqE#foC!(1Sy9oT!kT=ftbCd!5HCjG{7+ceku%xty~h znS`DZBRjl1nKFlI>`mkNF*T`}Yi(-y5fa1x7Pn|cwoQh!jaiUVGAQabWi4Wz*)$d& z`eUmh_3j5QN2PdsOIU@-#hm!WI8*l1wZ@Tb~_b&^_cPBQW`c)7S5B9zgM5T_KKGI<+mrIXD zw%+Z$^f*)d=J0oU<*1#vTEYg?B%`Ov5QIsd%l;e`xX#f6)T4JC+#Tyo{e1x(pLSYoA(nwb^!Lc$xz z$mTa-4L2ccrBKcNjU7MU2rX&01WXw$#t46P2#>smz~d{i98Ze(jL47pSEi#CXObO~ zZB-c+>GiixuUr{4Cy#T5~R5Vh|_Aw)R{E4mVjJ4CEwAh*O*E5DXUe4Tbyb(A}h zd--LJ;0`+O5wqV$I7dO^r?VcnR6#Gk(LM6Q{*mz5ZmouC&T8^mX7&N!O@Y?atvheJ zl_nMiEXg*%BDplL+&pZDKabD&N38i1Z~mW8XnpkGBT01ukz9mp(n-b!iuUV}q4Wd| zv%3WKAg^?1XCu|UC8Ww0Z_Qg2G3(*&y6tgCUioYrJ;N-l@z$a&_PXp&L{B&gjQ$HE@aon!4{zYKT?ELuQY0;W%l-(Xzce4); zl3WrrxmU4%>!_qQjm++KlmIpmEd=|yUH#2s?Q*aiDo%##XL?m*jw<+iIUUhWseKC> zTsX)dg^|MskS=3Ye(9?PQb`^CDqR-M0r%rimHEzJOm?aa4Y#gt*sH?*?38dVCJ7&| zXR>PNzdu`f%P2Mfr@K|1>bOb&!j>2F%jH@X_db6fo*fS*Y(RzxjAPgd0&S@sBYhk{ zih17p-W{*NP%ozSxd_3mv1sSq#W&RaaI!0oxB`@vGSE7C<^93V}` zxG^wk#506z<{*NHt#H*zl*e~V;WqtOMylTPKOJO)wcn46hL`;HBgWqTQRu3yL0I54 z5BsbUym=k9sISy&jPFwy|7%`Z`(yT{u=?xU3tV3c85$h?2}K*-zB^KU?!?@NrX}L( zr3tLtkaaMnAIFMgWYY_}+_T^xqI3zptHduowcV_`jhjziyY@u!{^i{#P9C|9L0dkn zczCbCJ7l$R=)-cB4Q{Kr^}aD-U&fKM66{%v#`G3#UhZX5qU z!gb#DkN=_nyc#tZmGSYFhy0IjT>6dN49DSyljJj878ZUVZgo`xzs}h6I@%Ofe47$;)Xijn&Zzqny?LcmmdN* zq4pzKGEF)Hxam3CHgzJSLVF$cMI1jiPh>>SaiqNWjTuJyxU$N}G+)%c$QQkW5##J` zKjWa=sejtGQ%^}X!fgtX$Oi-0+)@BdjvPaY<>M9MG!#S@N8t@};Vljv)id7V9HZQ< zHWKq3u&por7cmSMENo!Gk&B_}aAfyVVqOnqt)m((vJF@6E@fN_%clB>9vP1MWY={d zEa98hhIc!UeofND=!?~CIv&AQ{4=Kw1FvyYk>nMOIux*^I8YVqJ9p$&P1>Os!&T?* zlr`Qex|2};7cDO|+W+k#i5+jg693Abm_polW+ER}53Qp{W?$~cj8&CYf1Bj5o-&*9 znjF?N?7lru(Ks?qu{|wt9<4qG*RG=kcZ2mfyp9T~{?tcv=yWk*S#qB?7>Rx_EK4?8 zGP{eFP_y}bV)}7Wh}gN=1e4HB+IRtz%ilsKiR2$s@$m#$tEnJD* z9$d-U6~Z`*2rqy*d5ZgL9Yroy=Nq%xj=Up^*SomdGuBbppyy`tMCLJbEg$ZEo!6eu z^6Dqv#7G!BK14so20*Yrnxl|Hb?8F#FL=!XBztxp)zRE^E$W6@m+uDqh8NFo9BOo$ z_~AF~uAR9^w^EWnm+8v6wpf3e_PCcI1BB`Y4ZZNA1jZi1WK$=Jal{DLQ@9d^6-v-!_})o81$I)w%7Y&b zH@+D4Gr#y(eVym#&=FVFeLa=ACXpvM>=s-Ju7JxrAE^CYM>YMLvNE!=@t88CW#TPq zaY~#mK1SZd(XY%#qZ0RvzKS_5AL99OpO&hml9O8E!Wvkyl)GR`D=-Y#Tv)g1s*Q9o z^nA5DnB%6C`b#{(=YrLM(7YEk@iE@bu_la8ojq_F|J89)xJUn6HN3H!qY#b=`-8nt z`}P)b_D*DwRBuK~<9Iv@QC=MX?Lxd9eRuS#ezw;I#26RbqjeEZC+b+b(96o+G<( zFHEOM4vx|3z)Isu>nO&d`$s7;i!c3x%g>Ue5zP?k*dyM3S8Khm8+_@1f3Dw<&od=> ztw6I#sXqqW)XR}_W-Woyo=w;>*rtPwk|d%+v5rj4HG%As*0hXMt${5;zK2RRwj7d8 z&!P3A4hb$)b5zyXu~54kB7vW7j=4f469~#Jq5kAF_>_Niq(U-Bn_C?e9njn&;Tu%X z&a3`XVs+!apvpnnZ3e$@0j+@Hg=g>pO9|dfT}S0Y(gs!5p-(v$Lq8I{&8wru8%E|` zgH=x4Dbg^JeGou@b6DhF@-tgrCJhnq0wethwVPte1X2yYfq`TF1(RYZ+4wo?!KdYW zPlb^0d##DzyzX6bA?|kL@J6b!`R(ZqSvj{Tmh+qM6Hdg zI6L51)XV_|Sms04{0Z zPH}im9BezsJl3X>FKWfSB=+jcpcLk2C+`>C7bx|6ggKi|L=@OGe;L!LOBb&UE3l^F z%N1v13M>v!czs7OBVqnG?X2ffIi!R64mKyebGpLjcpNbHz;W(_$XTSuGLGCTtBXnG z$l_RPa1028atZoyS#;SvUWjS_lcSlP*qo~AD;sVTnRJrxP8tW!=~cKaon z`1XTU7Y2&+70i+$bQ*$ne6!iR@PimkSj$eTGe$mSb*XeJR^j-o9DS$ofEVdkZZ|%6 z$kh=$W`0aeQm7jhJHAKu)Tj+_xDUUGu*Y6{CFaNYeT9 z`?JV&9lqQJYU5QXI|4mlPtt5P3mhfFl>uOJM21@#`p; z$y?ANJnuT{`X6(xaU&ubUZ{`%2crK^#5pg+8A0m->`7vqK>r0j9f;&4An#R75{b2G z9YyyHS-9tPVxbcG85O{uS3&nGfC+}Oq3a|1)W2|J)gK4v!mT#y1G@k z_Vm|Av+YW6u_x_TbJ@LXg?U+7hXj8f1%^-x=UQ}NU_)#-?wZ4%yNA-8;5xr}gO;#E z+ZxmMv`oF?Go*wa)w%piCn;nFpjk2i8)%@^v1h=mojf^;qisr28qU*pYf$s115k-VODa5q4jl)pT9S(p~1os@r zAJ#hIW!$@qjnwnDS2*mprT&p8)&2VWn8H_}z{>B}WOB7^rdPk?Jb}sYCX%a3jUf88 zVTEJkS&x~wFF%`v!{$GK&n)`p&bo?i(CFMADbu!C^!mfLOCAfk97#V!$O*Wqc5XdH zzSAbkEg}XHlPq(ZsI7k9<0soj4LC6s6PmrD%4mILT|*t z41p0D!;xA*gx7$OE8LriYJxrYrj5jGY@o+DqSIS4zgmEpjxG&5Yo3ZW**vvyN7|%W z%W71BoJFQ4VI@sd$HXqwi$<^@4cxYep@jLVsvXtzKtzZNHllowxc4k1Ac`X#Sj|l- zs(y=xE%XtgGVqsiZX5ArYY0t;ZoqI~?rQ!;?8^3VbY8%yoozhK&DSMw@A0c|&M~L% ziIH>`P|`Y0yx1m^u#q9SxW>5AiQRz6eyc8v4eU@>tIB0m!W=sS zYS)8Kkzn1nR?3nL@^Kv{Qq6IQr+8as`bc;i$)EKKye>Ozw>87=R@a^bE!!;y?#U{4 zqt25UyA#t8Atu}AhbH30+2zVLJqzMdN@j8sM=FFiI}N?54jn-{ zMoA0R(@&xHUBDRXFpVA@c|TS=fI%`xSqtO!Xlaf%To#cSNOKW9z#y?)y1%qG-j9T& zE5|B2j;9eqgsp_MZI#+hdEkDk)U&E$BUZ+VD;DT4Yn0m6T$eF6m>@n7##4e!T|s(c z(KUqah^0qHQuHgGy*{o)JZ>k4gpB1bF82q2jjLMNG|O}PVV1xqM{-URA^|c8$#zh? z4McuG;;~VNX)GVuA)1oy(1q6?+xpAuzH?9Et)K5}zef&;@We-I6fF^|bRC zi#9a8Z zxrK}Br8JIQ6D_s0fnI%Ap-aij@$;)}CAa!kUEZrF&+o4pVCxJH3U-htC#$)9Kd?*7 zP&Tv|$2fEIr!%9Ml@eSM=vNsVbvxiK_FI!1=NEKx78>X;K*T_}r7rviXd^|dtlkxc zAN=h_@_rj3P)RAftr#XXAy>Jyp?1=huQ=(Fbe5GeYE1@_072kpCA^GuqtVd!S4XPn zWnM!+HNTNqB2DY4A!l44iKFJTj?(jH&T%u;)j`m_Sc)6f0U`dg!w_*%E*xOE+ev0m z{)H3!ld4_+_dGZ+V=sc<{y@bwi&W?T<$by8pQ` zSd}jG=EmG|{MafW)l2%Jhu9v_5RC&I#cOc@ z>~53~L8l3@nF%YMdlMUClt!rcApx)Z_4i0Sva@!)!AewI;h2KLQ^7)Iv$!gsy;Ayl zj_gl58zpyR{cLvOhe;^NAIF=UPOMMs=JK|yt0&r?EAV1&essIBhq5Z08J}k%S+RQ^ zb!MDa#!)x|P5L1siAyWwD7^p5bMFe z=$TEo`6P0b?iA$Nqjt2nWB3x#@Ddpfn`u($bBp6_&j~n<`rs+k)#n8>=N7~u?ibLv zirrW{QYd`2C`yae>edynbl<{Ygi1Xmg6aCo&d>67yK?%eOg~I09VeAJ=ytKZ*rehN-DeYy*H6M_JGd zIFNC}b~xHQToRt@UG+L0d?{i4Vw>HwcO5ZPUk~;v-%DIO1j!{3nKxLk7#4KkQ24Km zf_$`Vny=i)6(5~$f?Sl^{=S1u*~8kOg`y1v#Pcf-&Z|V%-4oChYxz}8BEpsdag`7` z9Vb^+d9UR9{H$WojO(VW#FMtNJj^+cLUlVL3&!^18$oOZ2Wz$6p^&l*)OlM*=a|C@ z`oxg{OY4~1J^FjkAGn^ntfu|RIP&wG_{bFpj#N3Tgj+}?7vLDKMuK>NC>V#TbKB6c zEfc$M%w z<3^@gZL)!JDq1%}Y+S!#x2%aG5--NT3_|2eWShR}BP+s&Zx@HP!joqGvpFGzlOKdx z0!i2yBDscTBR7a8-yo?Wx(OC_&ud0Q5e3q`r6I9+%7MSjIL`M>jAXxZn#1R9(HnDK zR=>x=X8OH&GkAo3A^|sKV+i+pGTm<9B?l1>)-&CkqRSmZHhwgu7%8^i4mY@?ktO}& z1F0$)5A2>1Rj`=Ic7ob>SjVw9!rrV9xVCPU@m*vfSCb&pgOr=&! zg^(C~s!{NW+sCDVv2Z9rMSm)LkN1QxWiC;6XWqLHX9A}qTWZh!JQ!Eqg@ZYhXJ zLs6XsktgaF=a-8MR2e_M$OgE_9^Xq8!IJ;-mEmSJ5GTrM;vZi<fqlYt%N66!gy)<-&jq$j5CzB+Ob%isZ zrGL(`@(z4auv(aPIH@v1eYYm}G+AmSkX6K31#yj~&BUf9>|!7H;kYsU;B2o@OEo|0 z<|kfXg}!F!`B-Nk+`XZGNbjwnu_2F==DZPX8gnSc9jI){_kwN3{JE6sYa6X<`|JeL zrK~S%JE?bh3}P8^h0hYs!yfrBPD108&u3!p^9xxm!;7SK_5@JfdL%cqg zF_>ke4cWdEhE-LiHeqLO2k*7cim#J~L=ZQ;2xmL@A)=2T_f>~O*_*#z`dCDx+piwQ zp*>?`4uv>Rc_IEFV-@&te^OBts9>b;b)rJh^` zhnVo_IMEYt4K$^3br%DL{^R)f$cPgsI34X?2B`}UIrQjH$;YlkX?ryfA+XBIQ zG14t=eg*@XhStuE5r-zE-~4ywuP$bH<4?N3r)))r?X(aoO=ky+yP2`XkzGcF`zjJV z(pa&KUiuvdmDUsJ6)J%i@`}yiO=PX)b%pYwen{&-b@_{&Y~|5QU|bBI0n;=*_+lMZ z*bSIKuEc(h^B;f838{yF0hhSMKzF;MZJ&64x6$Pm8#eS?xs2{#YaH^O^__m?D0xeI zOJ75csw5K$2D_!i(y(9oQF%ak;+N+EQ)2zDxb^tBxM^)?zKIpJz8uGYBx zh5zzUlZo*%p;_^~<4K;ia=;4$vZfsb+EW7)4lGsXQZf{bn|$@UjeqPHCLQc`Yey0e zQ-wlx9()RFK4Wu1GtbldO{vR_ah8?GcmE}hTs0!}^r_EV=m|2tj@nSAb7$4rbavCo z%uK7ZYJOrr{@8aCBcP0KRMK&Bm>5F5+W4e5QoHR&)k7V{x1x!QTOc^4nDq?197h1i z0SAc6pV26MDbzU*kpVll5)bh}jQBD)ItCFH22zGFsD0}wp;h`C<6!Rx-hMVFW9w;B z7R^dAP-$0U$WH>YagO_p_-)xn9U>EfrTi0!lp(nm6fjd@a*=}%(YZu2ZD0~Z_Mn@A zNz-IjcMVJ|P-jzUShP>i3n z*^7w1=aw4D&qHts;i~;Sb=c8n^tWOCFzTI0`Rq1Yqpf;xDP-%bJQ6U}Vs)zIgkz<4 z<;_E{HebwtB76P8ge=dc4Bdwt{^dlk|9g=mPu0}1TS^u~e{>^w!WPm8%^-5yyc8hK%3!wZppp;1RKB%gn+{*VoE zWSjwWt;x2>w-dz?OJtVt9D6mvgPGZ-uISkkPsOAoyjOiL{`_HaU4b4NEm?43vz%Mf ztdmpUudeW{s zrCv>*?P!A@`9Y0S{}D2Jqu)a2jnIY(^Hg5GM&~nrYq?{HbZi|phR&Y|Y~Coz%*Fc% zktt9tP*4O1+TH$1#Tf?wRbtN2w_40L-jmYcv5P-GuoI^W)ZG~(-}`<{IL+#J%!ng{okYJSq%;VhjAN3%9(*DFoasF|wd0 zQm<1zF`Zty5a{_Fe){b3{mM!rtMpC1$<&Nc$?njWM|ZMq_mqe!qnHD5$~YqIDB+NS z-xfHI$mn9%rS-k4mX5`XYp_PTFO4dkcMyw_ zo)*mYZwXUvDN(IG!ILT&fgLdF#=`o&9J#&T+zO5lBbOWwkXZ^Yz8bS&-ljU7pjVaI zSSUMc=&neo>^BRQdKccw~LsKF6!tFUTtaU zZUYI{@Qe|DX4{Q7!SszDknX;NJR7$w(NMe#7+4oDpela|{N%d{@%7wdD5x^B_v7rw z=Zx02%ObIUUE969?>Yt+o_SX<;T(SdXJnRNj{cW{93mrFha){lS#5V!j9uNwbm)HE zHk4olK>>!=e!TYnR|T(Ipr2>=-BMCg^;p!7#nR*1&d7)6m_;trqNA{&mmuIhe-V)w zqn80cZuWdYmHN?cyQ%-G;DAcRJ=R0?XFo)E8Msna_$^qUa&EB!-ijm07{B@}^su#w z!IMU&TgA`zrbA87^Zl-AWj|NuoBDtDgz?Iq`cYT|zE$g0~)BkZdv5zg+T3 zZ_5Q^gH3#iwtRTGUs#SzFAWrJ>QkDM%;-ie*}Jq0K?Rl-M;81j7jt8x!@{m?I`rXG z-My_34xQN0_mjS3R}k;-IqqEuFb8R*+O$Oig)|XEIapji8yny5t9Ikya9H%q;gFXa zBA=CfP3;2SEs@~*S*Z}bc%QJp zG2_)UVeW z<){!>-fzv?vv~=aGM}!Wt+eU22O+Kz8&Aq>P0D!)UUZJg?aPI&r6Ahbf-|{mr4Q74 z9Jm(5>RgcihXA=y1oGA%G(G5K3|yx+HrDz)wg8dyEr^cR$;)?Qo%$2HKcyJe?bG-}u^QZTkFq zo^N0ScE}qM>0=0?Tsc1C*d!BL3)_=I0PvX0;`Z7ND# zPvDRo*Vj1MV`3e}e1~E}uz@SRn~6h&hkzY@V~*297_PR1W?1Rivx!@F*$i|soDFUy zmLEUQokY`{?56SgmOn?t4zLL;@)DnB{GSy_rO+LkCVFHvGu;`+IjYH#d~|PE4%G*}QTyT$N|3*^R!IE!%7Q-6Gq*;mMt7WR#a{$w z{J#AszHe3WK1ve# zmlOLRzD?m#Ih#B;q9ey3(F4J5T@Zu_^L3P8cy5~(_b;0r@IW`K&44j_d+_Y-$(YO4 zVHA&-mp5P0s(tD){oHUn$J6&=2K)N>gHi-NQx=d`UrbD1ef3=&`KmY;m zDg1od^+kchaD9kyDBWv#a);)p;irB`b==I%n{Ox-#sIuk7Z~{r0oJuxEC5qt0Y|d~ zIerZcRbQn+Gj90NF^!>?2<(=(1J1rsi_|OG%o^wQY866An$VG%R*)AZr zZNtwoheO!rc9hjt#z1CRgMlB&BqmnKyvgtIj{ZayZZiLF+cTT=d;DAC{pJbJ?ogtJ z?pO@wB)`*F2QG7MQ1Is>kr4=z0^f!F8{sISZw_7G0oZ)!b|M@$wL(O)J_4+78Pn1K z%KURIlh+S3i}Zd-TrNZiwqnL**!`SL28N^hiWveDEJsA4PbZOHWx2*%*q-tMo?JPo z!{_4gV)E}Fputc|7Sg^W#A$0hIfYkNiZCgbX`W0WlHTyZoQ3d#q-+_A_A{Ue7 zZPa2(EZ>i2yAEQ-V=dQFu5FN1?BT^p{q%5psZ08K;d~*3B`mZu_zp3bj+~!R%w=Rw>0SX2W{6HubH@9)~&6TcavWzD!T1@`w-o~G!a9+c$qOyYf4LiX!-?LdtI3V zH)76zy#0DHgXC7%eB-Pk_RBqE$>Q>p5-yO}I;wVnqcDdEkFzf(;D@mRxQT@xUxQY_ zrG-ZC%2F;%{;^oJed=|2g%rUB| zr1ZAU3m7z?WrD95)6~2<`WE%v1M^!7)f|m=I0XAsTQ$ZVhr&1^&l+z%S^IsiK)%Vg@^Q@z-L{}D#>WV{swXpG zu#*PX%HPcdeAki#!zNu}v9}CPY!Qxr@*dYY`D?apPUQvPj=Wv>Y>q2xNxjnY%V}5y z1?l1NPEnc>M<$b{J4p0TG+}AfFlE&h?s|?AJjYrbpi^3p;>$Bvk7Sm;RSsQNl3gR3 z4<4M(L)ic*n!FJf^o2dZYbW6CjJd9UVd5a7GjAC^H}QppxgQ=L!*}^O&&$0%uT{me zRK=*o8dIf1PT2eSI#o4$CW69DTb<`}L0L{$y9|NE%0~6A`hcg~MQ37K!EfL0@awtPECOaAh|9 zNR_x=wqW90alKvDV9unUZ)<6x1J6amtz&aq&(=|9IL2XDdAO6Jz>%+}>h!dVzE)^{ z2RpNFzGQCjHfZUvx)B)9*C*q6t=sLL+*V%6ov4+KnB7>8(gKjy^7=$c6CBl5+lNUX zxB>EF&%k#C!j&{EFVtQ`5>434us}qjsk~)p39`|YYIrXqsUKf0x$hj^pt9oY^5=|M z8JTonYyWNgGI+F3k=Vej7vX^%Vl2r%!C4+t|_LP0U?i-cMEZ zKiR(KA#cvHm@N3p#~tb253>>U$&-NBXcN7)Q}A{3sw-P0*pvr&Hd#MbCA^m%2(|lK zC$jf)#7C=_thZAMO0gFg9AAt^tv1{#PLB$4C&~U5Wpl<5rTI{>NRA5%%G*0JKd)f2 z0dUm)w2t~7T?uU_)Zx++Y|&kb3Z4afTD^BhXK!_g3dOxU9qny>sy1b9{MQ{2lY||# z2RKqHw9wX;P_0~wzT~wO?cAx#9jNGs=q?C0FF-`@vbDeqOc6a!(D*v)EKYi%Ik72o zC0kxy-WM1s2ms8|ixMcDno-O1DFL!+RJq6}^mD@rLtcG<#HJ>q!*0eZYdA12r z55H>uJMez_cj-_4tjzdOyAt(dFV3Al)9ELuqb8o9@+85M$(hXhsdKsEex;Qp^!)KR zr$1E087w&u%Q)Bc8hl{qu!uXwJ`g*_dcZ75jdos=T)1RC>3-{xr<1gH>-&%KO$SO> zEjJtwOzu^D{SZBsn}gwN31{zz+O@5n+14Dh{K43iUMy&b76+CQb#yyn6!r6vap ztcMaZ2>NL7Np1xq<`m9{-UJ8ZWv8b~ijh6~6Yc6PY=YO?2w9M$av03M821qrU?hf& z+3?rG=Rl<*LByf@du1Z^uh1Xj71xzK5;UF9=Qcfy%TDR}@#`}PJ)XJnR`~|dGz#t{ zS6};0YUgTp7*VhiuBFvyu`~x_#|KZcM4Xv!QQ)N{~MnzX@tgI*8 zBG>+|Ty4ZLy#6G57GHvOb%5_OFk}UKYCMG`5cIn$RX(kFGS>CLD`)=~wHZzx?+c9= zmG>-Oa9V53zI!1NDBHffUTM^rMb-oL*5>4&I5GH{e)w}=CtmB^=;CKphYL2=s>)>1 zcJbo_68J$J(F9(?8yh^3_FIr{#3*3x8gKES3`lp%A=j%3ir*cLDu)lJiAF|gEB$d|?J-76~Ka<*DFWpj5DWWzkz;moF;R=n{l!ry+NbY6T!F}xGP`l+y zbrw`l?2e$~fvp7bnEI zPR1YL{$;%hoqM#~h-$_(o*_;`4e9~+tJ0UZ9e=mT-F{=_GVZM)9m_UHCcu`9m@dWP zpBA}@olWpwCz?CtpXghpa#J!}EK zP1-rs5<>Yr`e@X{epJT*GpQ*ieZc;8K5tW#*-|WmztWekn0HabNHyURQ4%CQNOj?Kz>4q%b1ThqCl?Tr0dnP03bs=T zQ6s@&YST3tITjsYrvpmi@g{H8KEn0R=D=wKSz82OPJ@SuQxsOe%M_<#aSZMb?TMt0fx8GJ_T z{EO?k9M=l8$Q_-WSibQ_pzc<9RKfum?6CMA!F~*0D%hev=Y%{=&`fh2TM1SdKGW3Z z?Vi>B)&6}}jHGde?SK~t0HqWNBIgkpZiESd9FCI1GeK=r?_wWST(I5iU}~lJymI4H z+AbS}?{>&eX{pb4F`7p2_b^J%Bu8Wo3If7TpavwgiGJ8A>x2;3$pKc(a6+&}8kVBw zgJFm59Q9J7+?o4ib3*>4@S7DHpE5V<1yF8Kp4KiO`k8G&TGS&8eIgszKQBi?^d}l^ zA6#sDhLkGLQLoi>#zmwKe8KgDVvki;DEAQ&IA_=>6~MG5wo!2NeNboHVoSk__sTWo z6q!odhH1Q9HmveB-=H7VBoKF8?N#FwBaL!Y6qezNgSUdvPv`|@A7I4z-dQ8=vj(em z)Ybe1?=g=a#3=r2krwf)V-sr>#AOEaX&YL*_i01873eaG{0 zpCMwwHs(VsiOy_?gCoDPzwR(>Y0T{BaeVt}v4!P_Glu>bTkjdrR1>v}20;*MBGOAl zKvbGaQ$QjrO~haWsZkJV0xB&)Oz0h=pr9zkLJ<)oU5bRz5fP>LgrFiNk_dz?DeoQ6 zIX~{X_xths=VfQ_nOU=Dt!F*!8L}FQ8c(Vk<46g$=N6iN7<|qw?Og7Y8*^jwDr6rA zD!scAe?Id=Z9>B>lD_S;9(s(>%&ceDFdst*RM^#V^y`d>4zD(F4%*_Uz!_AHY!|-e z2p}2QIOa&QvulB0s&lP`3C;V4-fq|K_n23`U&L>l-~iuyJAzv1p~K+jrTv4_Cd$bK zNvx*7QEv#7zq{IRaLPC(ELrg#@w1kyq`bE_o3HJ6>ta6JtKZG1_E6)8PhW+;<2W*= ztn=Ig1Qm40z3*yZ2jIv@TO@rcJ9vjC1M)r6y=WBis~Y_S<3!zgyQg==MxkQyH^&ay z9NKZnDuAKIdc!3lZmohosPq~|(2On5#y4$&XZ)4QUhxXjIBn08ej7LoZBW;XG_u3-n(zltX=GV z=MM&aOb}gCcsmxsgP);)I8+^E!9K~k$i+A7y@mRm7}thD`F1vv;#>Gj_WBXk{os66+vW`XTK0=8?}0tf$sBr=GAU*lLor{`y!oNAa6F&hXGzfo3WW^KW{ zS0T5u?F9midrv%Se(dP>_u3J&T;4;DwLmyeu{rC&$R6kZL`xX1+@E)jM5W;WB7MlB zxUV4{q^;o5U;JBQ|Mm$u$;k5bBP`OmX6v1xU*2o7h;zOD*o!!_HzJTS8N%DK$-dUd zmIr@!#9SAj2p0)mO%87XusJqyuE~^bQ>#d*@4a(4`{zLg9lEHGpXU?#okz(nOtA0s zNuGLBBCJVBl~LbLs_bq%ajCO$AQ^Mv&CJpTKbM(`gxm-H#RI`gh29K=&=5ZfzJ^plRZcy?|!fDXKOraT-u6>6utJy zK}2*eWDR}^HH2L01}+(7>T&n5Q#{iq$O)W3gM~`WuBOsOe3`pnzNbyUSXEDDO=%Qg z%$HYVKUHe@IJT6j~6BainEix4PT(ChEz! z+Mh%}s=Jm4U_K<%&CJZ?OCP@)a&@SyGiv+Z>c(6A6s%sUyyKYKI-VyEa;P|G>77dyIjE7Jam4n*tERxI{HwKRSDLS>>E9>!PQ%tz1oNlGP2< zSkcJm!U*b;rvyh*g7(|g-yGncL-b4x4!BNNe;Ff>J=`L(ne`(ek6bQ5y<FQ-K^CD-lzxB0@(FN)1OAMBKz@A*vQY*TGzO z>%e5*Tz_|RbKuLC3T|bwe}01OkmXN*r`>0|e2X5)8Jx?2h1A-37*X#%25M;6#AE)C zp3px{q5u7#(ErfG&c8Os6A1z9WX}Y}wx?YrQH(crb<)_Dp$bL#lv%2~5EBTOWxJN5 z4?fJSmyC?Ie=JmH3Qc8@Y|ub zh0d1!MyuuSi@05eS_j;wS}V`U9Pqw;@=bh<&}Zky>fL;Q{~=Le^5aoFeQYmq1RVoY zM;BO}s*zh}I;CzMmkO6I zh6s%ME6m|SjrT*Lk3GgZ!bdYD-#2{pQd6u~e8l_KR;F?G*@pZUiUmUKxE|~zg5e0C z^7^+k9eESRvy#ItyE*4a3|A%4jRWno%7I3_Xj3i4rwNxEY}2ir>GPtOeS)YvzF6Tn zN<(0tleuXmP|m3)w1Y^k+9%lz=~o8fJI6+&p5tgPeFGv0%jN3?I&(W!ATJRyG6K9zYUt){DJmpjLuf8r{rN5!O}e`{|l%UpfHO zjSx#dVTmo{>;ZE`M)<5IQ3N?^jNhBAV@$EnlR`-bTqDAD9MI5@ z?-?+_k%dXrL=dYWr%ZXmtHqqV&?0qJ)*T%;-(|9qP_<}&Vpv=C@Fb;AKE%0WWKbI5JzpE}6)mTeR%_DQ@-Gj zJ33t6Ew}6B@#hi)NJz&C_*MphRqWYvxb|acMOMgYvliA2RqDT7?vAc%Tf7pOUU@Qi zP(SYO;qg|r*Th;9^|NA`A!6K`Rx$IVZ`zdO*=J`^V&%n6FU@jJ)f^=0A*j=O<7%v% zToNw0;u4ia=eu6EWFiXR5o}0OvtVkoq^zTrM<2=UxASol;pH4Vl2dY4 zLSCVR7=7+DM9@7POSy%c(`@;M;)^wU&-k9tC8cS4Xc=Qi8||@4NM~5fNOlN zX?UU6C_nJwxk{G~{e(;E**)K3ZdFO@`}n9h#zp*OJF5)RV_(Pi;i^e1Gqgbdb2U)Y z{M3wT=Rk$8Cw;PQblTnSdu7<4q9fLalG> zc;XmGMsU|ak_)^OpS;x^F=<39?T)-2B02Y<^_Y2rQk!$>{=XA%OFCTMEEdPaG|t9=ogU;p#8wc6e_}-NxRrRl$!T zcYKQ6W?Y`xDmP~&yo=n?ac5&2DVsEfQ23jj*ylX75J7L>|&Cn2+y{I(_8oaMP~xgX;8)XaVNebz%}I$N?Rh(v>o>k}A*x=2I(f zX{b)fh4{9~{qbQ%K(7HoE3Y_KG=>aWSPgJO9lmMUdMlCKbu^%tM5W=zo2Y;rG|$_) z-Iz?8AX$&RtpVNsC8)tc^OW5SRhv4$rH9q-fn1ncK^5m-zEjn=MAE11iCX5 z?!qmv!B2HtFqSaZW4avIfFeY@Rm0PB3J+H?I{n>~!#&^rvW;UJrtS;gK>@g*EDKP! z*niahM}!((60^RGQd?5M6x8t{XTH#QL`oj1D?Mu_CtR3seIdm|u z=fTrGpyEcE6k&Bj2ia=SP=~pIisUE;S%SuLwVM@xOg~kXbQ3;TBsjH8v65oA)eq)m zo!dzA#Y387E3PG+VD6KRdkO;`vZ!4(9~ErFW|UACtmw?8|6onC&2f7c)Ltk9_#vof z=X6z>d2zrWI189*{A~a``*B+^m;P;*A>6G%HM_z6%>L30WLy!$Z4i)+JY8kkrtTuO z^0<`^z7CY$V|0+#G>$qA5+cx?dIUVD&&|R3G4ZUI!=Z`=KBu8X?t@ZN@_O+54xRIP z*F*XDZ%$ABM*SSvE57|6_wAxlg(Pj#PS6E zTO{IJLl)on8A(c}q_)`Z5*1!z$u7h8$;e4Pg$J=Ocn~3I)0(RzRjKPyuoV z`gXQCq}5qxuFm+;6)&E%zjVI;6l?Op&ym2u0{iB{npe7bt{%zi}eq_uIb zt7*jRTRtuNs6HG=`SM#5;|c}-Tfm3RgLmOp6km^Rh)@xKCD()5+g<;I{VIMg=Kuf0 zGXDoDpw~nTqpIx!1(~f|3b{|U%S8jV)*Wm)f}nvo9@ydqY#XVTv~mYbum?kkj%Q!x z4PXyJ!Hn)UtmZ$V0fG|xQsSCL}c*RI!98a)cx-MjD1%7t?ouZ6omw>mZO zzLFR&l!;)Th!$h2^B-6cJQ311dvwrTor4+-LrOqi7nv=&bx~$cj+MQ44i@_6J4#- zJN|to5ww{kQ**?ya>upHK{Y&9Dxyc3qrKp|{Cq737jJD{EZXDD# zGmJ1`l_>TtYNiP0AEVs1=8N?ZKdCd!p)jLCF$2tbG)4vjSz~;Q7Id9?*B#kresH)( z$0`6WO4i;z?d;wcpw&Uco@;tQpza8i6TNlN+`ukeePeIAI%TkS6+pNN?B~C?+ z%`L3_OXeQ{iXU}`n6_ojkv3)xGmT>nS7B)WbB#k`fZ$F`9oO|cKlQpk!n+7FyQK_T9f>^QgD*L^ArTrJk5XFEs5jRc>?gL@?cT+zyp~fY zz4v+nW0w+-Zb5C}|EVVfA!(p$%512~yj+(dP**V8h?S%z{~VB#@5p=#bvuz>~sG9~Ez=1-Y@RDAs8 zT9-_Ka#FSTwwLCmH$)=O=s$92R9DKIm48^WjC}I$%!|a#bH?2g-8>1eCA4g3Dh}Foc zs?|*0;G^Y2hJO2>glN1T`tfJ`(0E_azz%}f_J}U0jGP*tI_E0*^K&?|m$?b_9Nk1K zU8!bvcPKom&zkmFj&iGCdpul_`f~==tLk9e_IvW^Z9#k7c?#o_1Ry2>cA-XT97l5= zv<*r!gUY>WbkBwJl`6Z3XdkkL_ts|rn7opG+0@ADq}GS;88HplnUi2Ss6YtrVtYgz zF{3p|9>GXGJ%rd`o`ffn7?yZJD0yy`BXfyLjO92NO^^FWS$8Q0D4+WDM^D!^I>o-@ zZpZSMF(=21M{irmp9t72=ZW_L;)d@p9P26vfNn2(LDsVk5rhQvDRBg~l7*_bYI+q= zjXo=pm5)#N_%b=7z?neeB24yXyccw@E^#_>T6~19!M?-;pC(44{**|#i*2L4_lT`P zM|ZX;ju{LnEGZ;+nyQs7(n}|&9~Yg8#+h3`tQOU6Efrc2P=2li=P1OpF}x036=g+# z{$F)t5ME1YU){E}mE0*A=~aRzb~=vg4+I&w>*^i9=yb97r{%sNe#KYUC!6O%S7WM_SXF`wFld?l51WAlbK^i{CaXL^=z1GmW0}6q*?j@3Y@R!fEvTk9Fg#BGQQ? z?Yo7Qh{8VLeYbIBo40wla8fP&hrb{T%9g5B3M>3^x>_p~Asjgr;T;TVNUSV_K#vTV zWqwFLG!Hb6OOPqsT_NL+d}2aq%J22`re8JVpkvImC)?oI2mBMY%r0>UvEmqnRSTRq z4Ng(Sl%5-0phW>Y1m9`CpW-iRwmf%F-=lKGB{V*u`Z3>a^3e=ixb9`<15_c7aaV7p zjO}56hY<()i!PHC%x^NBgFi>`~PgNPO+y0r%&@DapaP* zN(%VO%VvAOwWaK11gBpA>uIT8T=-a6!@9r6>{&h+{O`EuIP!JeO+2Kb2JMGWO0{7P zM&R9cuW>g(dFWuskMtu#Pk-hF*dLtIT{>oTWuh$Cc~0-Z;j`klhdTr$Er&gMWn^KLlQCse@BRFkB_3 zHmU#Ak9=6am=Sb-#-&qC8Nw@n71^QZag>g_)a{>YiysYN^p>82Jx$OWI61A zHyiRo^g=!^=&RS$ zt?Tbw^eFilE`sp;Dk=@P+Z>quA?#6(3uHI12%G=}M#OKNZ&Y3KNv&`&?8w~k{o7Ia z7yC&1AitvElmZB#y)1?5OaUULwDrnx^^}Ie) za`u>-09_q+@|e1V5FQC;`Z;gnD_MP>g@R1Fj!m@bHel_p`M`qZHHXv;HvpdCk6-a( z-`s~C#Yuu-0+p7lKW$RU;cU&7@QA2##lN)p{;Eg;JDv6f@fGsP?WsRom~e(A zV3UFR)3duAfc>{^15Pf5*aJ^M)coT<=Cr{Xj`sRhJWxwR@p9Cd<^YKI@x6(p+BOt?yoUW3X8`GC&7sS0_L{!j*m61N#lNou`kn zU<#H+&nc2543pCo&l~jy-w6r4{rlzUsOGH72&_*sDd@NWHXKJ@D0Jo)wTPzfPZZ*4 zeYE-M_drmP=!w)m`Agyl>LYItRLbb!U*WK{U zPx=|6X@3SL$5?>k6Nkp!W9mLA!%m`v&1ImcmxtcoNI#8BF{-=Ux4r3@uBxn%s$Fcs ztJiz@I>6dUTtISmYq7#SBSFOxS+oY4HOpJTuO#x?Nx;>=LR2NEkZuI0wHz7?b(iSK zGm>&&GFSd%Ic@J3DK;G~%IsAUX?y+y`RV-O$TAwprB}#|2&x%Hl(&O(l3|>@{!yqs zWD*^0_0_QN*vqQRi5*8Q6wLPswqD_n&v05h_>Z7S0qwuwge5`9mV+37?n?tZ5gVHO zsZHm>AnmG>0eK-BjQ=9Z0(DR%P<|@!teU;dUBwaCeAm;3c+nrl(&^0m8eg z)fq3P-?yHH6T1PGqm(0K%<3M|)8IYl(fcSy!5^}KWxTsI#CW{U?~T9PLLBokUpA=i0)7h+PDrkjCLF6s1n z0d8i9x0g8**p&6#7CEV-{;)aQLXf5;_Wm`@yFLo^1g^1&O#F%zI2=Ir$dkm(h3<&=rj9h4f`PNduvx=}wcWX}2BB?sAM#mKiwlS4=; zTfBO<)T43RIhQAzFL7j$+fqLg;5cgeER50Ju9u42VJ0%ygV8AWn6)sEzZa_F^PwjD z+oP7@Plsh3uh(nt1wAQ1V_*!#Zl8I85_17L9$^Qn_y@pU4=MT$uv1Ukde> zV{SrvOhPwrU-Jm(@Rri3`Jq5F@*c6gwv4(lyy<7hb+8&+ga~516As$C03Gdy{>b*SclgfuES7{cfUu3`eDz z7$!6llYd`4I6LXJuUGP@T4rf{oXP7iRu-^11hp9~pnEW$#IPl1weUeG#?v`OEXD$m zWrW?9>H&wnxgmBgSL?-&Gro_v?R-YfU_{cItJ^@9H?Tqmj_D;s2<)`d|6uTzqX5Ji zNNX|jb;N3qat!BWhDg;0%TLqo(34uYnn}AOCv%w+g;vJ}m`&0?JTV4f2?8tB7Fr?9 z!!rRazM9;q*)6df@}VjE^5jNaG<{k5#iLJ3%^#JzD_*7aT3%i&eFR;|<}AR*dMS*H zNIZq%NSFv_eS`8C#w?BD7DLEu&NzN!&Wnl@hosu_0$%#%yM@bSRZbeXT#O(7pppDgyxPsjM~xp$<3C5GaYjIye;e4V zrBl5>?AZaBE0^$F^0#hVzeY@O@OMT@mq^s<{6?0O-r zk#-v3TL%o9HB#JkYP2mpB+HxJO|QRgNmU9Y-Wdim_^ z7CrLzj@2u^%c3yz7zY7x^TYpj-2?7_xCM%TL1Lh^(u6bAh_i_{E>Q7K0 z`O+Qx`z#_}LQyn9HG%H^FW#Cw3!h7thO}rio|`nGOugE2W!2F@q{D{~t~UBw{OZJm z7h-d}X!=;nuQ9x?Z*6Jo&peHuUbnhm|K^_xDJz>j8j`wE;jp>MK%}bTa(}O)s&oMYOSr zDN|l-``l$Y5c@z;tG-@W6MbFh!;4#TV=!9FX@(x)k5f_LP$65q;I`->0xE)BA)fX$ zM1{OL`u=P?!O)1{trzI7JYg@k z8h$dw7P1VO_mfE14vl~1zDzH9E_-gie=S$IvL?3ci&doDy8zyfa^UJvBjDM0@NF1p z=Bq1^syEe<(VdBSZ$cOgz(0t!mB`6 z3eKKe&|>(@#mrolf$MM={KO4ieASSpg{KGZJ6(0flEN;3q!>q}gmt_92fILGxZ%g= z=Mi)eu?q|_DJzu>!EPjgVN;0f&f2OYh8i^q#dYNYVY=Rj$P#%X^FPd}o}_KS7o+~X z&%=Y@#gcUTKiD8q%xT>Ye7zI5i3pUe0XK3p9dXN4BI!Q55hXh7cUw$w!Px{8!mW0w zub%diJnNuIApJH9pU?OX!yO_p99AK<2s(&Y1Ujo2T@`F=^riOO*7L%|eS^4aj;%~_ zZXLe={w`H~-(TAkf6%m#A}lzF-7(-27$8ZHSlKAW4g&%xMtM%LfZt7_A~W9KTtBU| zqtbwNF-PeBct^_kU*5InoQC;zJlR9_Uz1;D^B`}pAByqvIV5{WfE9v{a7ON`tC{@R zR4&A}d@Hi8Tj`{z(ClFS%(yTY6jkHAvF!?=T@24cd_~;&$WPN5OQVPCSYK?k4`Oxd z6g{5x=nHM_N_agvBrVr<^49_W@fO8j#yn-{-g94tX8$}#1)bvBPijdb`%a~_N|NP; z^4ADcid}@du{blG7ygJ!%~pya#J-Z_GC@0`+2wKQ-admNPM+ypfm+UH%T01zhEdKU zDjOw>qcR5+Fh`+o8ZLo_Vjw?CT?pjwn9R!A-j2DNeK4rAV!Dp!^U$E{kn1O%8kw#; zIag&Vj3}Rud%A{CFM*NV@{@VfpAbz<&#@9--(K?q#1qk=(MhC%ZHDb|$s~cs#)AX# zB%p?2_hAT8Cy|JOi>;=;MV*UO&(f-Gsj1Gi`k8j}Tceh$$I_?nWx@`QPyGDYN}o4# zl@);Auw}|NZ?fEn6*dJA3DKX$X?xUM{J15vz zoUua;-oUFioF_w>nB;0V7}j8zg3q}VhIr;9!*sS?x(ix2;V-IBbIXZ=;BMww#+$pzdZ9lT+o$ZJ^ zoOnohWL?Z>GHs(p|q=Ll8#!V?!Nm_uQ2+sx<#> z_6`43e)b4T18|WSKEu~gi&zFS>&G@j~y1#!+ z;1^>R|E+%(Xy(*|V?>>Q#U(HUsNt_)s~hn|M}L6E?z*K(i!R(w7`F-3_pLbE+_wOl zoKa(?LI*3{P>^e9qxx8*GJPZ=chFvZ%%SVx$N)d8xHQgJsl;=tY=a`C|)A=Jr>jC!A&c#0hY)YtU z=#th?1Z1q)fntE`N?`Q*D}&$@r2O@-{`%vGRcsp0w|$vd5_T{^u_{)f`bNfeq?yw^Af-?7 zgxi^`JP|J28U(r9g}`EmFPf_`W!p``1t}BgprOmmn?<2^WL)X=jK(B4e`%ulip1V? z@7*3A6Sy`8qH*#M)Sm#j5N;(1g!Jp35vbER^1G9`_96@@Bzg;VBDTT{V-U|khUU}> z3e53fq29CzjF9|r)r+|Zkro{T^Fs}Tp=;=`PY;&X5&Et=VW?-D1KsNQVI~q&1JD5y z9R+$3#l^XVSN}+Kj|;UvFpWFJq&g-eGQvdQ(!X4iV*OO|{ElQK~x zw?{6syoW@6O<@G!+TlVNP4072a6H?2IWH=ypcpFcPMsTLJbIp3g76y_BNpWBweKk5 zEGT6-0jaeDlM}2|HX8lq)1N_fJ|aH2gGE=owzOi=4zvCb<`RQIg5Ylz(uC7Bz@!gw zA7TO+KCFU4gLEA)7bqnx(?mzn%@yh=u-p^hM=UI-i~eZ zjs^(<(-VSNuDXWAXCTM@v~3eKB4ePn3Hp1D8KDRK+S+acr``|RHR`Nf13an%{Vt2-rl3dD!<#* zI;&jCtH{LuFyHr_oyTS_frz(z1B>-62T43DSZ zZVBT|f3L_ua?3xJ*5zQ|LFf%4`Z$wLk81r*?XRQ*vtng+>l#TV^^n<^}upndYM^)-^ z_P|*+5T0Rom7Bub4K;PjsKAFddYWB9$1pM7b1=g^tn1m=7!Feo{+^g!=xjg78E!``FkiaRnagu;K)`(Zd; z6yr2$B9Gcl2l2ypThP6P;C(fhsbsTbPvdCfW3jPP=W6dF2Rx-#)HHg4V$LqQB4kKtK zQ6QI^{|`28L~s}4$gIwfk--ZhR;-Oz{yozIRn8GJQM)TRS4yCyU?Irbqh9u4iA!%? zbgtioq@;`Q!))yO8`yCo83oSpDp}4REzrbGNFnw7dwST@n<8Z?7 z3P^)!Awk^Q_?Rh|4Yey#YrpBS-TBK;A&$U+t*!D6rzTW}hT!?e>ojgwhAASh} z9G?)ox{t#_yd9@cjBsZf?aT&ipJuPBZTsBu$M*Ps5&;JpPqDr+l7XcEL8qAJdjQV; zwzk-^qmIF==N3oLqjpW(m%becFbHRl`rHt8O`IowgrV^(BEU@SxK&`WpBj7Y8v@5P zGCkL2C%Sj#(&dR46aFtz)j?TGt9O0`I(5UN4Y{Yl5O*N_SF_;Yh7~DmGOh+mm!b`i zVVvIvM6JkfJ#-0mjGeC=c~>!~a{07B?ZDSNh0n4h5f4DJ5r&|*v$yj`3U@-Meb{Oo zGy|ET`R!d8Yc!{=O0D)y|BW)?9RCbKtn7(pCZQdnj6DqLTc@|rN^Cs+rMpu_ zwHA8%X7RqivB{(1bGi4sgS_yPJ+I>EU-bL2akvgc62nSj>*z;KU^Gq?PE@lOPYm?n zJ-h}bBi65Okhrn|sMjrkuoh1p5Jv&GXj0^85|rnyGy5X)c$@>4J_S3{qP~l)*(f{ zKG%1jX{tbA_i^GRK;|y-c7iz&_`@v+IJ$}^&?Y#J9h((q=B4SGAz!*p`MRjQ)@aB+ z@NU#rX}^A=!__T5R0^pQ@Eev{f4B|WwR)+9D%{FIIkaHQ_+7t8pmUqyJZgs2X(Tw{I)3RW~Zr_aBl;o{5MlJ zjp5CIKNY>Ow)pCm#sQIFd{}73J^)F*U>sZ*dFoWln_})fM2@O@CZ_3h<(TMsv)Xei z3D-7>(-dewp1#bM0YdwT>y$*0Pp;A{+4hkB9HkS_hrUU%W8!1YEZ%nMg-(@nX_k94 zs+7O=DaO6Ea1u~pr-N=L(nLGU3-ZA+H|JG8&XI&5)ES!_!)Eqnr;d6V{s8J2 zjvNWf|DaC8up`68any(A`v%QTmOTWSf-j zS^(9|jeE@Cx!_praf}XAmW)eN*tt}w$n;1LDO%q0`t(cp?Qb>Tl9Z}lpXB9}EQAV= zc&M^LibdX!_;)`SuETW|G_qC^SJu4el%p>Viswb&^Lstr`_tu#K+=gSn@>kSmG1S4 z(T@R&aZ|~>o$YMcFA~E}FO5*`PM{ahDI(Jvaji41?OtunfW$BxL#U=qC-XV~Pg#+> zEhr(=ZjX(>5&zI~@Jb7;FmC`=hf3oFv#()C`sTPh*XEv1jL)KnIyR5Geq5az$?#if zg7Gev&DRfq=l_PFvV^iFCQGTGAJoZX75wE8o02Zj8Velg@jzg#1XUeDhKpbXATRc5 zi&At%n}WK6W9l^?-}?q5*4KCAUqAg3%-IQ>KDqzn7*G5wx0>V)I;>bJkSmQa;WJN4 zZjLDV#oN|gJ)(y(jb7*uxV_nCnc1UsH zfdDzIsFOOSC6gWP8>pgr^o%n@v>PU^d{_paZ-YF(V(B z2sEn$`iS?MPSt!|>#ThLhrm@G@+>9n58s&8&wyc(U^4EIs1Fc~3#;S6FKEoIrV(le zR-OO>k~S7r8SwR7T1IP@ACI~|){!~D3^r}Cjr}{7Wo^j50w}i=|KOOF%>6N>I=ybV zAo|D#GXw;5s~nB$mx`4_j~4#A#(o(v(i!J%ZGHU*nC&F_6EYEXK=)sEt{W7?Eug`v z4i#;t1h_cG(9}*Y@WE%3493gI+nhHScByt91&Z0(LgQ-B-zl=9pc#fDD;>I0igDrQ zHv@DskG%EtOXJPzxu1?T1uq}He5mTcr?u}WAxy0{LM`_Hf0z9Kq&K@$?cR5xr2{*k zUpv4l|I^MB2jNp(H&7yFoFz(ev97X3s!))&?VR;mO^# z9q%xzZ)_Fxcb^{#Jd?PbW$N?=D7?wxIMOk=&(#dY|6s!IAmGjU1vmp8pm+#h*X9U; z&$$FV(DyCaCJKs6r~N|wiTYOnU?dD9{+idm1j~O6;rDvq)ob7*lhHv2mOE47$zd^X zgkUg3Z9Y261lQpSvL%Tv87x0;?%-?OpL@|hKmm#;o&=tFgdIiH;o6ZgB*|t3BTbpV z)5C12CeVEyB@A>$_kNJlH@IjU^{FD|wy-1IDB3gTggzDI>l*KXrN3{{Xaqm;kT*zB zxB-2fV;Ag3@mtOjclh0%0;SC22tO0ICx#+p884qICt6%$f4zH(R)O(_q)Tw*aic9H z-I{$2>gGwCKVrObB?>LMIN{d~hi{TTJf2wEhn{R(*|(4Qu(?t_w?|HZ_YONvdQWHt zZ~>ltmB)wuc0b@Q6s{9axgzxuF|K!nT{X!O3~iQ&hZ z_1GrhdksU0VtUEE1J1MP(vD1~-|vkl6o(``JLz4Yb+6w*?LBx1xT-hvuzVQTnfn-V zbLJAu6pZgK(3^J|MgtZQJe&oH_;A+qf&h zZWtGEQ&MdR5ZJ}>v?d|b3nMl5XMdgu`Z?-1DjBFWBz;hz67j{{;dIvD9vZ1)0MaA^ zrSX4VdPsN@LW!qK_7bmVO3nSKdqK2(adMQks57i?qw45$;Q5hO?T)DYKYHUsydC`D zJm2Idw3z*Z*PvRJ^}CIDYK)PJk(OLToTA&zWKqN}gWU&XzYH23Z~e97((q0g4@;uf zf(tQlX*h-*P8==a4kfbB`L!HJ$@GWBtx95(KQoo~w>?3pm z61HO@<>%0+kDm_NxOHHIAKljfsxO{liJ1T{Mi5+-bKrO2>}a4VE#BH7Q0X;g_o~f} zYgb|g?pp}NiCYN9nEQe$^aKl6LD~&^RY{?3Dmuncc9EI9bYT*x! zn$RP{g^MFLQa+}~51HI-0jWqF6};eIcDg+PlB5#jXLm!SBD4u=CwdNJq4T@)Re!W{ ztE(-&!(w0VKJlAk1(fy-rLhIV%D+fY3_FXHMo`OnlHi9T(N8TQfNJbeHmaZ7BUiE7avZ(xnASz5JFzoU2II=d$I}^nU1JVa{ zwXTpQbCZu_6}2beXL7rad3{LRez?lcp!kW0v1%-Tz=1BHVA9Dx3udW@RGs^;)uao@ zu=fMaEvOVDtmX*DFO2bt(s6OdFNtg@@}#9*dg4l>zMTFcySCsRHizV-;ra0Y<%1%< z{AAlme7wtgHR$vI!8BpdB-VMQaw;2J>1-+R^RoY%-i_zR+CXLs@8oKBrZP=w$|cE6 zQxAK!E?I{~zjxD-Ch2m4@=tbeCn$PpyniEjUjHOqbrwek;C+p5MTy0$B|`l|LQr4@ z{Y7kG2?JZ=IVBpPK+t2K+RU6Z3vzW}4|rVz{HGNB|d z9dj%>sJODe>Cn6DLk0nH;Wue3Y^tYSmg{ao?*?RU#5e8Jc;d15urZxu2I8gDafcivZ( z1Ki@4!;6LH!BXg!KZ+IJHNnd66Ru_tTrF=UdZr|zW-R13ju8uRw}_BB@Yo<|67b~a zW1N6_nsYNlQBuYo0|}1&7a4B=X0X{_1VH}hiA?T%G-DrsT|-R1xhan zh9x`=!8nV9V{Cu|eefjGj=6U`dH#Mds3J49COcf)zO$n<{_hN*oYpGpL-Cy-s#j~PhfaX>2R@^<2`(FpWd~!t!E7dBTTKx3 zBg_di;C6+#1CbKLXkpj3y4|kMW=8cy(V&z+pCT^{6fA%a+@As6h;1A{wi_ToH~X+P zf1&p5XK>7qb6+%DztZU+8Zay-6}(059@u8{bm+%%2djv%Wz?%ECysn1v&p*PT_|Fz zojiK+{)OU7hGVgmWMM+dOYsxs^QZ{}2`Dw#UX-*JZZal-8+N@wqy$5H#+*G;^LTLC z5lgQUVl((KI^6n@`F>ODN6n~G+pwFtV=6w6f2ifFHQZJ-t&9m-f*}lRgQ26F^Mwuj z`rq+pJtRmEWN!uEqxPS_dEuAHl6ov1l?R7o`2YeV0O0SqtZ9xOGcU!Hc@|>?;Y&r9 z`&B+q=}@cl&JUwLWjO^VW}~zP?;7jc!O*-MKn0Y54(8Fp#@uehePrgxB(Sn;85t&| zWS*)~`VBMrlD+`{(f0FI4S#c;i*@#9|FqP+E$49fa0urw*j-%vS~TLQnR1%@6x-yeQR&IMc{=i!?C+0Qckqf9#QT9(!qgD!H)E3w$G;CizM%YS zr1$?nwS9R!lyBem$d)Bb_H_!OtcegAQrSW&k%*9eO=Uzgwvs*kP{>$QS;oF+vhQTi z5Mxisj2M$ydQbQE-1qam|Gm%q$9%4j4`!}8&-41O$M-lsf2oOaPoGwsCvpwPIVxpQ8)T+N{YD?2W9#--n2?? zLmJ3{L+he89asSek^yii-$TxhVsL3NanM#9Ta0?CBkJyre759!v@o?abyi?!cJ2~` zRXW0>QKC}$ci7ow2bK5K^e=mtCwK3fx@g#_HPs|Mh<|xX|Gk%Jpb&bciN;Y3PM1~4 zvj*hqbv#W4_T%rPaEjWT`J4yO?*f3s|7{X(?C(1B1@sL$9II*n;+BOm00aN)sAB_Z zf~e#^(k`NzByJA>f!q@YC9SMS##C?(KLW^Dg>>kV zUthv@ovZ04#Lm%>uA*P$OZduC{XpB zb%Sz0{w8BI`pB&`XL8%(s6kZ8%Xt1(OG}|x4G4tUW34;kaenvLM~EoegBc|nF!D3$ z?SF%&P7%y??FGOI7FdVrB`S|f;4hICnkxB+@!c;#qg9S(H zl&V(nUANqLos;kn7P5}FF4N!6g7E1==t3ca5C^M8AcFS?A}8DpoFEc_5!Scw=5WsX zk~I3}iA_ylae@@ko2Kd=uPL4~9SWxcL*yEoqb$inL4Z{-zaU)ODR4mG-6n^@AQG?l zjiYUb_>@qJrtpUv1uWHTBbHisXyZI32i?T8VboxiAcPk19vhTO)~k76HGr}wBB1Zuui)|kmA zNisFw1e*ZThE~hheBMqtpQEkaFZ~bVIn~U@XzV`^-|XvbY!mp+{AF`-6eIn3z1f~UIF%;=29CP$fmyA(Og(KLSg|Rie zkIub`gOaE2`f4!mWRcDR`QFjevd5>j^~rv=LAM3ulXM!;=k0r*MKUzY8$K&y6-rR1 z)8IQy))-dt1u(p46MPKd7{W8{-O6Vu?&+pcZ9E~eH7IMB6q6TD57T99(^{@~EM7h% zWcIs~H{5=2iYP~|Xknwx0Ute?LN*J5G)%bQYik07e4V5#iarH$<7Q234KIAAcr zxxNxN$?CYH*Cb(T~n{{ z6<_wa?hw}j`R-(+;ZA$J?^>(*jQ!&NmKZ@&7u;-qh&-uk5zTf7Zo!6Ou?ay|HH~k2 zqfyrz*@=!cqdcSMO;TP?Cy8`fr!9~^Bl10?a<-ifWW?@FISAxpcEJh2ZK#xe$Mtq$*yYBS@Z=l3o&6>F}NQ{VfYmBIAOaD%}HyYQ=CF@Vm)cRlZtyy z0I=La6SK2ey{Lcf#c9{OOmXHC2J$aU7Gg@`nCQ@>$giMT-cJUUvzK)=TglqA!GrX; zGHreTeA^%I)i6i} zNBSVKJd0}-KDGs`Hq=Zov5BxBtt0Z|X?77bR$G(cx@h?`H}{@_cxuQ<=|s)$i#gr8 zvmI6g3@kScm+9?Wz#i)Jr_e-;@d{}MOAGHb0=kb+?X;5BXub2i7$4Y7L>mZ6hg+j} zttlppgZj5)uOYc-gKq1#VV{rvE?qVcTlq|)&OE5y1))w!~(#&4S`T`#xUWmXJW4bpO)63zk;6qrCjcGzITz+@R1Fn&CYW$skJ z@T9OP#5uv7&*PqQ^Vwm((Oy4A=cZ&ADU;Q-UqaDZ{m#h&_7lS(A>gF33=(3mB(?+g zpK}y3!uhw1*xBy;i^qaZo1aVDZoQH!xuwzD?j)%#Owpni0Y^UuaHXeVT}UiY8|p$G z+d#4)+CleDc&@Gxfc8mq1ZVQax%`U*m#i^WQo5~D*Zb3im{aTl#n&Ym;C843k!B*! z8%)f`W7FHBt79?1Nmdj^{ak8BZ3&!;bncc;g|H;``$};x05;Qe6g3hVn7ujD*@D^c zu%kLkA;S2Y_?0QF&zAPZGWS}4)AJMNXVavw57zJniu(an^$K`8SqSI|3NXuzCZyg) za|U>^*`(?Q25PEMN=$#6(yoGhux`>|$HJ~I$sQ5+L9F;4Sa=t3_P8+K!=y9jty{iJ zu+MDh7yJh<&li<8gF6ZG1na@IOq1XoONr5+^fKuI(1ANEx@i-xOUnfm=sOT zLGkdYEp_r0#L$Jn&zdce5Cma9i~|iY+clZ3;%A#jg?IJ&qT`Ic--Zkpel6Qv3UJyM zCfU=DDV8U9(^v}^1PdGj;Ws;vpA?%Y*_{;H)lfUYfan9DNI|;kVNa4fLXoJUW^QYX zup^i+EP1+iW=qtUD-b)XtRwH47IyuC?5Xg}_$b$ZtKWjq8~_HdZgIOiQOJX)K^i1j zb?PWhDyrV$e`en~cd~wbTZ+4*U(mhC>&vxZUab4Bh}VIdoyf{y9;duHGB z;`W7j=@9>8C~f@j;b&gVIqZMMo{UJ`4(hietO`bjdxCK`{}H)eAOL*~sO~zEKrzt~ zN*hncXku_Xl>$pqn?nPBOel1+`F(1Bp4E3>pfA%YE>Ht@{{n@vvW37H`A{jG&pCQK z@3w8f3 zj_e*-5si_DB#HPdnqmd%L_U)CooJAwy6W}*fwX|?XA~fU>_(TlA3&9+<9)WTqtSQ95Wtfm|R^XzfxS^i?s8C*S+Q zjuv^r&QVkPN>7x_;3x z#VU*Gw7|f>*SXCd9aTa=+;qeM>_%ekdbau(wWgVN&Bln{lN7S`6NrkQ-L$qS>mIPQ z(lCl>F#!q4ugIeC0HWClpt9v4g&+25Z#D{-eL#EM{$)IsK4w>bw)Mdo$UDcJC{0!t zNU|I-01@UehvBbF#LjBcDAEHujcIGqy1kjvy=7b9wI5&I9TbS9?Z^*0@^c zmCA9YY7^&bb-!X??pkZ6*0sq{m=sL`F-HJLybPfZhPdZJ1hG&VVmVUt>8v(V>81AL zG86Y_?E7Oz-lrP7i>$T1g)@~G5=2zWCFzWQ0qO@l7~2BeZ*1B|c-jV02LHU;W;HCR zsMjAF^Z1EdnwyOZ?tDg0Qj>-hho%I^jiwDalmVYAvNkxA_r z&!?R+b|zxgUOQ5Bts0i@cA(IW9F;>LJ>rg=EtNd8Hh%H=}D zMAeFxvjEvgAKx}(nciBL5#P3MQ#ei}{t{^l?lBBP`e1@9n*KDnk0w_9hx<5B1!7z?Vq;S>>2+@td1}n#=3eku)FX z4>_Cz^IwZ-92KNq&^w)i0SD?5uTRv%XD4|sVFYcyPhF^~i>Z-G9z4Y{c&1o28*8-NND7@`7*5_bTc<^-; zS@UQVwE4l(CyDm!b(2@qm0ozS-O4ygZm4+YPIEgm`=ogV$CAkhLHs9eswkXnO0AgR5lM0{}U@cT@aa#L@u!N z%NSv8%Z1o2F_@z-lVFXQjvK=Wyw2K3>QV!gAP?Ewqg+8~wv+i0Uy5aw@aT>KW;|$R za+d}Cfh4O@)j^X3WDGF8snSaf-V7Q(2P18e8Av?W35XB)rE86ZtGQ@Wpg_vK>JtOM zw%da;tC~)T0_rYlU=GI_J|zM=lm*^TRd9C1ae}s?)HQy~s^2#%{-uN-r6j9m>vN|k(ZvZ2q@VIe1dfLG_UlYCI-9jGm-V3Yri!yol8lS0Ds?# z7@n^LLny^GmbInr88D+!gpd6^&q`B(`&Vb0vUTZjlvk6zo7FQ4OKS%1J0F+KZf1&)s2Cvc;@Ma zC~23vh#;*O3X2d$!E)vwP|EQ{B90()sGKN-x8Hiah>0SK3w$Ej&%D_+RllD6L+)|- zPiukWhdt!?f|PoOS8NbLs>nTmyd?JExRLT#*l5k|Z4mZB+uU|TEbH3LpcY6Cg zNG<&9z=L0U&`{Zv5ucR_#3J&79L0O%s;>KthI?m*o!nK3{ZeJ3#zJN6GvH>5w}F zr5>mbvmzIID?!Yh0bArtB*`aIg(bC2f>O5l`28j}v+p3{6ZR8YdA{xKAuji?rZBiEAmXdpsmeFl)oe;|l& zNW4CbblSDcmn07b5v?cs1*$5(a6fDh8grodp$L>pg+q=HJFz#OGf|nj*q3^Xe@c@* z@9K10mO<0*K8>xLED`b$@W%gdQFs__zttId#d(4Q`*dy{=V-eLywG*X^52WwB zywY()b9v_~L?(vJLth5?CwhlT=M88q0b~%-x2i{V$sw^D3*L!QR-Gm}N>OwD9km%z z*efOH3Z{1iH@lX}pI!awHF|%8Sy~8KnG=9}1i^sp~UGm6GCK|X|M8XQhsMA@?;A1J+;yeB$WonCs4``NlDy!og~hu`t(;c0vg zYzWgkaQ7~`ubMGcN>OXfB>^D@W0%%wbZ1bLCDX{(VgM7*G`&4_BZ_zrzYKG&H)7Q% z$e6WndF|Y@lEhPr54sLd_WfrtW5{oSi(@0hDAv?8BrenbD9m*`;hvzKNM|-5{;wxC zKVPSRnT-uNLxGZ(MEecn4P za>{-Z^9&f%FlePr_wZK`JP+oFK!8LnQ;R6UG%-$XqoKNEGI#vQ%rH{^l}5IZn60W; zY47~-0AD3D77iYc7zhCcCLG$J;bq1mXJ>87 zF6;a%QrB+W#}P7Q#P=I`0H$SRD~RiHIRNv30u}vE7%u>}MqGFjmh%J3A!1zKQbn*! z$X&I|+}1d0Xj@5%+yN5^*?urj+0SB(kSoN;pvVM{eMrmzVJIc21TJuAtx-v!)$ztu8n z#~i?h^kJ>P4RQi4n1%;rQIO=dNPm@o-uJdCaQ3;(#n;a@m%~y(4Jpz~u^Pj=p5eWD zSGsO&A?DUB$E5W*chXdnjmFBlqZ>A%t}J1IfQL>OJe*vj~A^P1MMqB9RLPn*yB*I3OiWd+Cd z6PQvzxHr<$KNLm&EEP8Ay36vIr!0L=ti`K?L&woiS5RnJPggv}aOk~C2Yc2-&$l|K z2slSclAj^FE^J15=UlhP8cli*o=lr+uQYx4qgGvE@u1TKwHG(up%zx79-o`- z{or}B&~b2OWB*JJN>}Q$czX=wyU>Jx{ho&CsMLtjs|1b7ZxvTAB-tEUd8Vv8}zs0)7EYf)AH~4qvrQU7a;3k<$1w(6lYgctoiTL3^ z?!cii`X(Y|KV&UK?z4AMjgC<5hzq_ec|YQ6#y6*w+i|MtzI;vHit90TSACnZ)EDke z;i#A9Z{jDOZ&mh0tNmq`G;|ezK5U3(Z7x)GJv-zMJ$xA#aNYCV?2LQh@#&di`C_&p zcQ3AY?u+^uIviph;)wo%&(!U)Z+J6f@&{MEq)5lOlK8M3VH-Kgr-2?f*7C-0O?m^|V90x^=LX9kgEH%k1Mpo2C07IOys zNLiqUppfQ8=2E!Ki=4?XaL2Fayqxd9S7gef4BZNj@y1G}Geoyur}t5*RgZ`ZxSK7J z*eSdZL?6yiZrs}0lW8n}Emgjmh84Wgp{y8aptLPTHxt)l2G8NNch57VbR}InNLxf@ z?7#$n)S+?a+}NK}dv3foHIYpARl@>r_%{vP?_a(duD5oAWACQ%m=sXwK6utyI*?H` zlhdp+cS1mkwU4{NOI*O`A+u-#z$?4_;;PUMqL;E*ZH9IXKMlE4+Wojx1YMh}+;tF= z=@e?$Q$h-2Rn||na@Kxzi^J34+Ss-?jdOnez$N9>w`ME9CJojURgWaYh8sdW$!47N zk^r6Hn%7*k@FKV>IaEKX;2fKq8{MT2o2dxvO5409VY zO4k8?shdWH63Sv?ARWF@+1zehxwM`eeA$R$pHu$FlLJul=O;CP>UDcW>67j+KskmK ztNT3*U+LceCS9l%j#6f|M7h%kI5@>+IW`K*(xd7~-V1NFbdks0p6skoq7{k!Jk?%i zUy4?jcX;VHZkIHRNej7*h!ifa7yOwJ|-g8r~h_>pGubx`$v zW$taaQ?fYdRa-ut#mW^@;KJ!@Ui|b(o{4qAuan&T1_{^J2JXDhNz9cdS>EUpS?37MyFe(4>TtS zrv*vfujYKc17U^Ghn}gKZyRR0H{esIP~<+hUX+2XH57O3d{`FnOQt7B<{O>vk;V(M z^h|3DQ!jjPZdF7M~L8!hlW7T@VV@g1N<2XONB`-eb)rF^= z#$*4@dHuUH`{xMS(9;`Ncp?8EUUdJ331-&*^3N>hfAh-x4@F=)W^LME)fVc$r4WVx RHfi_IBIp=-O`~A8+U(e^0 zA=glhb)P+R)=bRA1j9_=f0&^Fn~s^bYlr@UU+v)^vyNtF?c1AmHaG9k(W0}3MVHQ9 zx^(TId$SJa9XoaI0^bnd9cyP| zYTB;7DY|R;Za?_%SoAQTJynP*5mdb{j=kc?{B5pEPSCH>h#NweVsb@ z>fOh-@37(aBSwxIKVhQtB$vt4f0!}T?Z;WO7cE}0)P33V6&~x>Z{Tg*w0Y;RyLNl~ z`0fe#eShGAgF(T^!jDIsi2UQ^sq+^uUb-B8FEuUwVMgYo$4~xw z`RetXoZP&8X^E_~>|Oc$4;8Aa>d!UmT1{O&T9*lCYFrll+qkeV*3|>n)vkSe)Ar_Q zT_)|k;mfo~d$U1fJM^5kzs z^XL!4R*W#&c=5uJH}HREufRsYH)6LJ&hH5SUN8f*UN8&&z=%gR=geAu#EA3$ozd6y zN0%;sH^Q{aRQ*|>L{HY)=&uWQ6-zq!lxywM_V9CsS?l#j*W!`ymaTkZz+7)SMJ0v$ zdV~~j$3&O`J8(hxlKQ*|uiAUbfK^R!)Y-6TPFx9JpN4oq*f= zWG$jIl|`q^{I1o?tHfn>$1cvQ`r&%HoWdca}rPX_G8Tm$x`L!<$F z^omG(L2Z)Mk0k4+{$s%E`cg~6bT+-!9_HpH%f)*C2^@OnMYxai^i$&VB1^g>`h#

4485@KmSmPw_>CL>*7ZD!hDhbbrXQ(rwg-Cv}xM+HNKa*h62@h3Z5}+(Yw|Wse+B@If zy;S()Zz^P#cA<-Tw*9(qH2puU$6WVaZ*^1#F5Teul9g+GISyJg!2)Gw3Xz}rplF=x zM6^Ckdf!PQ@w~5fjjI!`-=E%UuXxrNFnYit?buLUx<1N)<(e)wV5OU}3Tllgm~F3l zp|$t1%UC_`l>xgWm~~a3sPLwE!In;0)ZanuA0y_k9WpI7L}q_%o}URfSQ2k9zpI*} zey&lJ@I%NBI@mLv!qSMI`be$A3a#IbuzaD|)8@M5?!6Nu!?(Ptip~z0vSZvutIy_& zk6K?{XU$8v+{ct;3suS+xSHPq`5kUvZ>8_qRr(}RbymA`sWZz@*k+gy?TqP1xCZO| z+q`S8qZehtI!xBKH(&+z`o-rgN!Wl~v{pyBczMR^uCUfyNwY4))gxi8gyT~xA&8!} zAdM(S4!ftVAno@EjKDA#C4m?f~a-dr->f0g7KT& zOzyrSvsb$HGTihq3RDs57>VMyPC9DbVtB*RJQv^Y+oNqCKWP8l{PTQYn>rKi9$^vD zU)Y&`!=9|<2e|IkAC}&CmgDPV=|?xotm8L0njxAZ|GOOQ?Xxm4nrraYe zbgs8=XAk!>U~9TM8nB}=HSTohFatK+Ugbt2 zrfo_}dv}PCmNWE!7_j%3S-S3 zt#C-&`;nHYH+u~M*7vM?y(WCO;41r*S3=Y}o{Gz?%|6e#HqYDVEF~39sp+2+M?Rmk zeBh`Vh3iOOx3^eKfy z&7vttcxxJ+ zpqQ1&URrr?Y+^r|{S!)>!L3<|SHUU$l&@bRYWwlU1dDOmoI<=rMWjB4TV<)e)dbzX zE}2&V0kEKq=v5g(Nb9JYAK>A9gv*T5n93p*vmmk^qtjVElHk#F>izSVu3VB$s+;qprNIzo1}!E*gDW5GMGf z2o)|7Ez#I&7YaD~(;X5FSZ7(fL>US@#h1l5U{mAoRVR^7P-Sqf6q%>(3p2cTMp_Xh0nRw5O0zeSk=>s3ixaT5qW@pO@bT&KnJ zD4909O6~WTOuxFYfZn3`*)ImAH5Hx_1`pJj*5HA1jA?B?$`2^5T?^>cx0^N3*7ev% z(<*bi(+*rVnYJyb&lB}q5=GaD;=55qYB;=c37zYnFX=+9q4sjkbh|D?icU74>>M;+ z;w(>~@@uQ>gzGp~x?|aob{v~j_LI}meI}w{`0)#-#eek2OS~Z&TAP#+v6VU-K5Z*1 zb`KQxBU1l@Ga9Dtt>CJh;CBJ?WbZS(zA<1G$xoV}DjTJaeHXm6odt850~V!lP1*vU2 zM^BxzjDVHD6&4c45Og_fcAfi#8uzaYwF*+#J&X3_=~G}6B`^ItOjVGYL6N#QV(mUc zp%>gRU}kOxEL^TR4ObHDC*;pJudK0NFx3ow4f-(W$364Si+g`@ooggyjl3{Ac&!0T z=>Yfo=ye$$-&@x+lJ2`mCJIU(0;%f-yosG!PdTL;4ZkB4t2yDG)~t`hSEu93wrMZe z-O3&(5YheC8?YsD25dBkR-CiEMoHgOaV))c8r*o-MUagk@V2hCMG$i`h?Krn!a~3CGR})wKoLk*VYnhiUR)^K|$LNcQI}>Zg7%PH8ahxq~h~uj+?Gecnv||K6 z=?~FUe0VbZ3=YKi)c#6I8h11O`c1OL(@)L6y&}ug^3&G!CwPSu;B*ekkVlOe>igxZ zjc3KP9*8xY@V%>tYj*1IxK7c>tGSw!`f&7McUi;vNw_SpTPaj#$g4xTn-=pGvy^G- zV|`I20aaH+A3h%K5ZioBv2Do#g@>dr)Ozh5igQXUR+3y;v`0GEw{DR>-AUY z4wm_xV)vlre#F+WZV>SOBI)78I<0$nY2ggK{>BGLl#-;eiLNs9VpFIfp*?G;uI?6EGjgnl(+>Zzt+c`H()0EhdHk7(DLhP0bROfkQoRx$aUU9^^wKEg}`t?tiE#0&!_6+|p5r@-&fp4_5G!UN4f1 z6r5Z7aJ@Or7Q7ccGhpkg@z;M9YJCg-_7@)4EkyPFzM}T1Kp)M~vKk8XxGVq?W*@bG z+%qmJ>ENVAA*ISre=UNu;RZ4P*yLQz&Jw=n$IwHidTL^C6z<>o5+6(8@m$p2%_#CZ zNCkng(ji^a4VStTsV>H&2Ty96T4ewDUNBL^UElmn*$@!hJK+^{l z+JyirBz_9nmO1HheGKefIIvxZF>4_17%*vnI$!Ikn@c}#T*1nhy9GBWACKL>86C|Z zuoB#KTmJB|r){(7G4{Ga#5NjQ@;|t_bdH*cBQh()-|@A>GVAXb>=xv{y{pU7Tv8^$ z5j9KlzN&G~Siw`HD#1aqbhTg+6bZ9N2~J+)?6o3|hb2nca67+e83TxY zivZG)vY&`Gqnvdl*H}Eq{^O4?A-*gEG^TW?Iobt+v3jTor5oYJWU8EXL#P679r3ic>p#+U-+&e?oTMC%(4CkS+|*}-O6h5d(02|DAe_cO`EVK z(IZRL!UeL&3*WGfEu12(YX!lyaAi;u%SWRY&b^P%+Z`7sJt_|1Zbr8S;U6Aj8SK2m z+hog22(P`EVT~;nCeI1L?k6clBTKYVFF(IKT66 zRxTrUdVUkf{_xje&e3kp>!i~Lnm!>Q!{0707R;5=4;UzB!B@4jehEvtn0c(wO(J{^ z8b5$u5KM<_2ENAY`ua~6wXxB~p;8%1Y<3Ki-2Ovzo;}3pG~II$1n%|v0DU{uT_@+X zZccT3Kab0XX-=Cx31McyB_;gdl`V9 zA0`!s?{6PKcaCQ^<0U3q{0&=iC^whNNBLyxz4$6YWvnl^Wt2H$IqIuNJQA0_m)x$@ zTZ?t)7_jXcrJNH6O=2Lo)(SO=g+6TugESZ6DJ&p1rG6_MzK+N>b}C9%FxNVXql(Vl zLxv=+hyAcc6TYx2`Sow5GXM=qN73_|#n6qD01c^{^;b+WU!H(kyuNvwo$y3{4E)as z#S@6jD*<{enu0KZ(M||>WH12s$OZ`DB%npp)B5qukIcY+tJQsHr#@rA&Yv`3$BkIT z6WWD-MZ$tN;c|{<8Qsv|iyHw{Um?H#AoTZFXth^D$hz`t`hCeG005>G9>x4d1o))7 zGQLQEb~#)#wPa}B;H5hwkD7P(iaKjDK^qPD0ZQz%pinr#MjwCheX<+m&F5oo2*@c< zs|;99I#jH)YIhz^k_{O+z#Y!@-%v4IX`piT!u69^Ws$gAa2BmyKsl0G$7p&b-8TSt^4o1X+f}*kBtko#pJ_)&<_`l{Xc)rPPBtS^??iG(gsTmh z2vy!;19k{HxlYl!#7*MeNVPcD4_%?G-1j$j4qRcI!hnt0eaBU#zNn96+Y9qq{gdXY z!iyzPr;XL{OSq7m)_ZdK@wc>`JjE~Zr)!-TJ{-68RMFzP=e%t*7w?7D*WmKdR~K~Ur`UEJr#w^eDA9+d7~tv6uZ?&+h)lA5If z{delAOMqC+!@k6`YAp-}(g_oKGT zY+W>8?JmeN`xZjsU_oq%Q#FhsApaZ35U+F2l%CWCC&P`1V?pw3m?sfX26ErdCSB*Zco$Eo{ZtnL6Zm)m0=vruihI>Ce=0|_i*%Ty zIE=Cwy3KcO)S4Yr&shqYPV{BKUX@dr7DF-F?dy0|S&0yZ`zmL(C(D|XPN05o@`OJx zoi>G}D4OzE%RNn>cP|K=h~E$SB%3s;zDr4%;)BwmmttY1Sr{pP3mO%-IfH8a=3VK9p+zhsN6vAYM4v!$+!|`V%F~ z8B9|X)Aos6MDhr_i0m&Muj#!=$q&(OpFno4SnZyFor?WtV72Srg^TM-_BMiC1CLJa z&y`Qmb|unKiMExrBU1YVdiN!EClptnDZC|y+N!y15|}VJFR67Vw!+WeHI{d)IVez% z-j-BpZw%A`AU&@2I`g*Nly#42+kS(RqHV1cjH@9E4ihN$G}^@eCXkv zcn?Smov|`+1EBA7!GTU3Wa^-NxG&Q^%V1U z^YoX@ANEkO069L|)q2w}5B&Bul#2$eC73;x-3}FTJGHxXM#S>;Xom`sW-X!FovB;r zts5>;-Xf%34A@Cet%KH4L3%Uesaq`f5mG>Qe)oizZKL(hXTJb2Zf&n;6t6b>Q*L=Ni$QG8 zXg3OWHzPKgQYuJ@5F7ozH<=2Y7k{FHC0X&%`tco47=nA9`P*gTv>Hyq1$siPt5q}T z&S?$0*R}@Tu{|Qi3@8Ftz>+*QA4~WLSjremI)r`*;K^J{I#BC)rhLUiyj~QW{MI*7 zsLFn;iUL4)vm@jRbj+duoN2_LJF(P`c}zg=Xuz1WJ_3KEzp9o!E3Ex#w=ASo;I5ie z5*b8~*HkW2MTW%V3bN;B2)H7_!E@EFe;=Jk?z7~HN*exD)V84`{Ti=$wi{*qlukR}ZE1)+!ZC$?A?d_rZkLddHR zTGU*o*VIb+Xw&vm4nGOpj}K%n4G^FtHqY-os?yyfXy4^gWu-f_`teNt%`hKx1VFc7 zMMMxcGvyF6x?X_7G(opiqC7yPPEb46?e|Mm;2X7W`iX}&NQOwJBzlmqIsXQ}?}Q!K zFkXW**=-zzmBzL{huQVBIk4+d6oN{<%k;)w&tNQQ+h4y47|SBq^@*RN6q&Y<>I_)h z(`RmHY#Y|!p_m0AQ>S^b9D7wxx}-+~hJj!vzUBmxiX)6BKG?E`+6*XDg5Qh7x#hMv zadw7qs<4PmAYn93#fOC^*%gcXp>Cc(4}#woNWu2N?&t!w{fQ3H@rb|iit2`0n`ljD zSkGs^e*DSA{v5-`c_2i2yj^k^%}12SleYf~W8-8dk2{uE=^h4E)Z#Da-mrz|kwH4c%QHQjGciOHm9ZAaH}AW&MLtPP;}x1?(VX zriT~l$r_$CQ=O+Y<}a8Y{#*X?b63C92U5~Q@Xk9Ku;2pPilv#whO*{c(M|nnh{ikv zcDbsuuw3iG7bs2Yp*{biF6TxN2pM3D8<2rjg?kddIKi@#Fx?Qc06|4TeBBUux>bVH zSJxP@mFkOyq=R>1pTbbD@G^+otv&}6z7p(UkltrhZa|4MpuGJj=a8uJh=E$R3UN2s zZ+&m+fH@yjNn)jb>`k2YPQ@E~Yg8Ia(U2{BFhM5#>nm=!#+x^6x zEq(i#;C|_grX0cZma_eGUY~N*^>9lg)*haK2iG5db1v8{Q@uOn{CHu7(~tp^cLrom zi?Y0md$0<|0s)Wz z{^z!jFs*i=`<*GK>PIFIg6?NBahRnntuzC)YM#oH?(|aU`MKoOv%MdtvQAE?JDw>{ zc?2_jR;C+AB?25_LuJ4{LM~Nu)2MFp2@ZuDUe46c%=&nGX6P)}k0*I9^E>t#cl5JO z#h}=!(a+7p>6hG&<6#owP#hM-9w4cx9l*N%hW&KitA(!~t?mByAzrxkTxq)b&|M-3 zTN}a-)LZI$W-ieL?NqsE%ufgqtQsEK`+#l+@A9g-37pP_w{KEt8-;H8v&uh6H8Iw@iY9e7kA}N>H#n6O%t=zx9gpHw74=lwtS~nOj-(ERUPcn&S6dVa8I-XLZ{vE0T43+p&~jI1VtRTfJ&g=^+pg7DVM19 zrD!tbMT-Cef~k!_qz1Ti-;Jih7&C`A4rn2h{vOaYM@)XwTwe=f)P41LjqwSYniZ<$k>>mcArIik6;M#3z3Rupd!r)KM`~9->^RJZmvkk2o&W}fyK?_TIk%G z3#@#iWF0CL7c0bRAFKusWq3egMSjPE6t&&d9QdCFo>UOFJfkb{EA zJa$&O@0(j3I_9_L+8U#ezds)glw0GML$PIkxcX<~TvFwn!3hxB!mDKFl3DtQ8^p#{ z%vf^4if7u>SJ$Jll6d&}F_VVVC_*BEslTv_EI^Uy-&`Y@f1@JCBsDxImO7}g!| zYKXkiz`EO35LcG*PW+IYAWE&Y5cCGN@6M)*kDaET<9K^c6WRBA_Kqr-vQVeJ585Ta z(g)(Ql`NP6rlCtPb3i#a7q7O~^75eACpA^oN%u)lXc3nuOrd{PZ1LqPW--(D+w3Vm zr)Bmjlm)am9!g0%wVr;0rtKbTGA{q2r}L+6mx9+7Oh~#~eJ=ZL$kq1#z$sb?dE0{c z#enT)57#_a@|lU;kII}pA~s&6=n{h_8Wj%9shzvNxtaWa+c3@o0m~EHvFupEfu@~AV{;7`bZ%|1dS1= zG2}*If+{%h{-W~{6*SM4s;L4B!$QBeGXA7TKm~p1h^U~WW%oG@feMjL>$sbN3JS4g z@T^7UK_?r4G2iy{%`2RBJ)G%TVK67`noGRzK+QWa`>Hn#LI=yFG8X;&YvGmE#ErWK z?5_ntu$;F#rB140!5e}0Km3Cx^7BhXp8q-FA*&rQ#i7r{*Ovlnzu`()ozevDb4~xi1@0I_v=@5(`4K|C>(Km&+~%4~tDO=RA<)v_htasbrw-c?(x zv+;x|eMUQ*A;0bSLPv|bMZ`B!SS0JJs2r4a8>dXHQ^2}Iox5357c>w$JE^a8;Addn z0nfXa6%5oxFcEst7%lxVQ9ju=xO}w8tGosYGA_m-^r*Zpl*0VM*>(Jpzs&qovole+C{05XX9%Hjqj<4hxMerPz?nofmg z@J1LA2Ba6Ll0qVNlbR)G)dl;-$;pjcj?Sl!UM-G?#D2XXk;po8BMe3nUy;d7E3UO9 z`-beZoIVv8FuPV6dCyUOpZ&4oF#GFffbYhc28t3~0}EXQT)?1{kgAM-Vp1j2IlJjxNqTH1>c-u<%2UMC zEPV_hS$HfFZNM@U!Yxly^0VCfhXs`Pl-)oJI?$I#YE~IZ5<*YiXw4;NaNI!9M}uQT z&>xHj$M&sd2Et@Sj^4})qHdop-T;{Q0&wh4%;2#5)w zkBA>?zH9r2~2s|?-2cq zWSf=)@TCEpJ2)@l5rbY<#WC|4`y3-peamgpHL5s}Tirk@s`-*pG?f)Y-Qp?WD0V$6 zC9O8YQHC`Q(jOv#AqT^AeL}XRdYd?Wz*~<#DWWUmFk8_`jWfWLll4)zpDj$2N)8ax z0k>cMwdU+6{Lb4#-_VPrqMi*o>d(RUt_n-%+T{x-DSr}`tkmc}&YQ?$|46^)Z~c3n zg>Pg}x1Kc>pDswoo0L#@qB9*eO9zHmEC-sjM&nfp7Fr#eoaWq)O8|7(7;zKbd6 zcU#`Q>WT|m$c60Ly48JlYp-|IU(nuBI&17 zg`duN@7i|DCw%(ZS8kh3=c@Sos%H3aabNRt+@LA3CHGBJNhmGFfv0Rph>)E556# z`s+1OrxG3u=e(;wMcomnaN<|khy8kJ&9L9US#jIzie32mxqh>i(IUIwI+x?Wh=LW; z`#bn%6^j%vYle@@>pigJCp-u*USnaopmJ5%L7X3kJ-g;f_fJ+9T~scA`l~kMRn2#H zoY1K+E{t7#j|!d{w#A=c5b(%0MdU&qD2@&|QJL-@Bm2(o`xi-Ee80$b+PepD@^{IH zS?qLKxUPHRy=#xGi^Iqb(i^4gho{do`XE_vHqb}(BhuvRX zy2MUglYrMe!S$VDLN!BFyEA3p0Yg+X!!A|4>Dx8^uTy?SqR!vv|32#BjHes>+&rwD zUD+{xjRFfUrMB<~e5ASn;=~FXFiQEXR=wf=r4I|lo4iXi9gox7oMLCJceQ%eSIx=h zYzZkd|0goI4nWV2nmJ`8y5rqkV{eEzkWdeIowgiE0tE;kMpTo~58aj0;Th$oCg1t4~+tA?Yg0<`7J z3hr!^?S4@~0HH4bKnY(lf;}SaAR1i%esCj|_a7%c!hM4usha#ww{cBk-dn%WUAW>1eGHFF8+P%U5g zFgZ=lkJFR%A9E;gQgQfxb*9uiRT8|Kse_%GRabS?`ukO=sUG}=dT)s;3kWS|w6nT) z7tBN$$MhN1bPCi}h${O9RRDwLGn7>RLqPMXVri}D9ugy!Fit@rh;Z3CfCz7aV56{h zrn5)XIs-QQ=il6`ZgF9H^My>(AN9fe-tE%Zht(ejj{JUK?iV2Be=|lP7wh+nb*@pG zTpICOEcelA`T*#hsatxHg0%_8Wvn4jCF(gu&FMSxI@B}2JExJno$@g03|9=tt+;<0ZE26PHG+_M!h76b41COPfhR={&KLU*< zS?ioXdVTzX+KI7QPM=Nzp{ToT_fOV6*;n;fC?kH!JkCb^5_R)Upeyr4TY|1t2~D%3 za0F%@z|rgo4O@Vn30Y@gBWGC@+k)P0`5Hijcvc!Kp92Ff7bBF_inc_xWG%680Mv3b z2Vq7QKMNq;LG~GLpHBV)lul8u@R^#pO}y;n{i#}iDu(CM9AuEwa$4oJn^~fC5egwhzyU^7M4$8(qwPIBDugGn{O^*b;LK3fz<7p z5!W~4mPxZGXh#qB8FK%4?%Vu>%R|dbHa-~nA<3r3fCnp}N9 z`8s<>;h#rk)m8&#d%8c_Ve;gNk7#d_xpq7-byG&W+RIqM&P4a2Dma&yg8#``rM$hG zwQ~8Gh))YUz6+gscl!Xx+b;Ptb1a_BE*+<(0u5O4pHy6#`n0uDu;Qh3-?X~}UymL$ z6z^#j36m_KAC1NcZu%S}1NaMZmm3^>iqx(qVU^J*D4G zxcyuI??27?u*KuvNyUYT$GmS-KP~J&Yv}c9?%&1u>u>6M+z>3*N72JHi&EcsEY7Lv zKW2)j=h*RH{3B27HhHgKB0Kw|;^QiAC_T}~Oyrz3_S$$H)KRlj~btx#cO6%}s&S7Dd6DXb&+VM|%tnX$c*qdcG zRB5wh=2TZV^@=9yzH1|O-x8?%R1iSE2m{`~T5-Sb{Z&m`gW=J-VLL{UH zNVejOnRoNxEwfpJ{}m;ea=_P$K&bEi{NDk->+49L|h zLC&fHngtEmXxU!qS9fMv$V`u!2{-Gy>QB%BcX10qM2I9SeI1Xg0h-eyRIvIBtV;sP zv6<{-ultjrV|wepA+^B0<`EDWi#ey?L0mnY?5^5YHx;-y`;vS6kj0ezOfjG74l`84 z5zQ3_=%_=YOJHETVfyi-mKI)71E$$P8vKfuS3$-~GI69RV#API9|X_eII3K!rqHPJ z6->Bu0h4IZYH{+zk+6h(K>k;3`GKqm64h0Qq+#+LMy9~HAACVOduLE2dw}cvT?3is0%0`1hhF0K*b#N>lj2#)aaHw>KNuxJY-6 zG^JCRZxbZ9?%j9_*4Ih9iW0Q$qJQzN_0zI-HWB3pbg(r6uXfy0#m|YzbqJy5g z+g&6bh5osWm;;2UQ;0Y{BON`{cOu1@f}z0-)U zpcr*WmILSmJ&pL;a`M-l9`=vm1Gk?$T0th@x-BGX<9UvnzbWN@eWLa^dM)9h<>t}w z{#eNz=;R|=6}29`XNR+nSjL0$wM~`?!l_?*ZcN43Q~6=Ab+}6VG`s^n4(R&bD}x3w z`hr7xau?Fgt(vpTvhv0+CwdolP3Kg(WXSt{J5V?kkOQEcA7)Q^>1tP8=%nSPpF+}p zNA?IfruK+f(tJuW;c+2?l|cPugq2DljrV47B}V;Z#FYj>N{WT~&Go_rUe@7fBm$Ol zxS&$Bf`=HOhfR6Rvy7AbGn4^$2T{G%SFyb)AQ1GT;q?UepY&8F)jm#hw#h;mPsA;0^Ko z2T+)VC{msrvy^1go%q_ZWZmVb>{IG!fMzDtnYD2gTgz$GM$rYVZUS`e9xnu82?p%k zsuh6IpDZ(AElawot{3S3p4xH53ui*d+RW93b1eYYyF+B)P(&Trn`A3<>^=Mz;K-oq z2H%5QNWd}%aawIM28?&@5w%A7;|;vvrP{*CJyhj{B-z3(AnxH>?dVGNeTI986e-!j zJ^Z_Drq|Jw4tJ3n58ml1GzQ@LcvxcclR^kBgRq2Cu*29?pe-WuTQN_`xx<-V_omHbYuI)4! zCh}|ayQWOzhDb~qDM}n)odfdBo^Ur)^l>0U11ZYK>?+3Sfs-&`3+ZiJn7w^$7T8t% z*Lxam>2kV%9K5<)`U^x!VOU?k16p(X7jg^en%FN5*Z?*W8K>B7z&=j=B&jO*Tw_|j zvLq5QBR8pv5a-ZnRo>VZEhx4XIh z!CodEeU;wOHs~}9$QbajAeL^lJjl#t5a`0l^%@2SWX0eX1B2(CX-~F`FpIE)V&%pGy`XIC0_0c?qihxyHo1^6by5?t4Kzr?0e1-K3d-`ZemR7#2n%BWlih?F*}X!TD@$B-mm@y_cY zS%CUhWSp8LY9Z1!K~HCAjWGlr{s;Yz&X0|yL& zM@9+>(ug)uNII4?g!JW1zrtv$845`=N~Vhe2=0=G1Wg%mR5BXC86p>dK6)!~VY907 zS3f{q?N%FiILX8QJNMK|G`3^6c#mJ5;C{Tvj@MzmOX3o)MyaC)G2&+cp4QA5O z(s<*$e^G{fWVdZrcZzB#V!+@^O13Ow^i4$!7+e|S1v0o&;g4ifw(q#O56nVt<}k$G zKOukJK~NQ#yX1Uf?jmduh~O=ICIoJQVyzP#OGJqBhM$jS2vLk;jUhxaiZviaA;sFn z-G4D%)Cka!xuGtK5etHdEZQg*L>r4nV!>uQ^>`E5kWHZG7R|>dZZx8v0XLceZZx8L zXSJrDg^fbUDDe#56yvA@df?VxNe-HF#-j#;;Vqm=SUnx(TwKn2-|G%&(#w!=xj7OZ zv4jG&VPAs566{NsbrU@s-2yT>_=28IQpi4pPHlED!iytWRlg7x)F%L=qUYSx`2(oE z2YQ~D#jAjB;iP)-M&OJ}~A0UX-Ei63vGXM6oP^&!OdFA(gouN?dUo&){}*qWTC`*OatHIb;k zL2%-~*_wb-tc1cq=-f)cD}qBQLT-7zAsg*%5}+Us6u9Lffm#8qc<8l8FP%pn{MpRT z86fQJL0=%WYgaGVj)6v=F@gebqLN9P3vfP+;Rpub0H6G8IJ(+~iGcnp?A4!%$3ePr zuv~6 zI52YXfIc8{8yH*$YG$gHuS-mo2u7TliX4>_4JKpEgtJv+Umx?O=o?;;qdXHV7Qcm1 z7Xb0DbaS0M-4D=_CD5QC1DV#|E&mE!zh-x!Uy&C0CLH*g?nrv{Kl+7LB`~bXh0xng z&~;Whr|$#-B~rk|HX1H48W?1_&_WDQ!vT*ST$$JuiXQ{JzD>yTcwq{GegF~s_rmkV zO$mnA&!l=e!|O+>es{ByxgJPz!lgu7HC_QwP8k#nZx@@?Uv{yMuZotBMT6&!a6Ua5 zjQU?E&vLr1u%Msb9k2{Zf=E#gR0WRwj7-!)01eVC#ae;HUU^_FIcvSiMNsvy{l_j9L;n7*U@-s#Zc13M;qBQ{|-LJs@~n z9;9vwUxO=~I9`v4%j*O5+cJT;>|N+m5bAUU6YOL#!Dj(8@E$t&9Msw%rXV8?MNGk- z(AhN7=_A}LNS1PiW&H(8d4u}oqQ9Vrr08Gd4$5D3tuK|9@UFG_k?uMpqaKZVkc>OR ze&ES~^2iX#b|2Izk-P)jfig%W4dy1tTaRNXjT><%8SWm@u45ioZCwwv>(3w$;1~5N zJnI!uYXHIY3B2Eh}h z5y?b4DkGAK{7_mTnZ#zi_=*JC`Hjt?hmrL=!35BsD30OA_eKLX+QwVw5N5PY0>TPF zwq0V7%o?zxWF)h0uo=>{O9tmL3S3OKvNZEhsXU#M_GL=tLNy6WI{NgCIDVeKI`NK6 zRTv{Jb%F!k2%dtX6P&Rc6rFEIwpMgvtQVYzj={n{n$gAc(1`gJY%ZNMVm}(~N!MNN zDLCrTV3EYZPL-jk25hcPLvA#?7;p-z*Y0XGNNRECh;a-m3!+`pJf#fsYva&2%Q%O- z65Vosu1Y0;!}x8|&@&^32~d7i_>1XfU4EQPNo+lp6WjE!UcXFDFQy zgpZ5N7|k-$I#EWYs}2fof>F9c79J!GbqaU{c2}A)yym8$D()0U1B=Yaz#eHidj-JG zI0p3z`O@gis9DG(IFOHX={$eEFK$2`$cn`S24VEaEh|q`u87wn+dxOC;!rK}DPrqF4GT@ta&lxhhBI4!$AW4jOSLUck0{m25-`IzlgyiY6*eQuci_ zgb@NS2 zD})kl6XU+d@LG-TYem3oeM`gut@YWNn6PI=TkLv7g&fTud--E9^vrVk5GH3l6=Nje z2D3<+2~6)zXBod5gwO-z0E1lA>LEtal8gb){jOw>n>6sbxs>~r(Zv-U!0-I+A*`U2 zEH(BIW0cNC=8OA_jAq#7P43P51M(Bxjf@v|LhJF1V2KvspNu=4!-Nze)y(2q@_t16 zOXP3*FGCXyn4xWfg6Ou=&1$&&DR_OVXgGI9bF3{se3Yo*^{oLzi`5Xbmo)P~l$LEa z$*zDfZQavLGYptGnO?-L2C!k0!hH1?Hg0qrbFbU>*w#~%R|2sN( z#JP6p>&RzD1FY=w3NU?xqFD!PWZb7; zqfSBdBOAF-$BO9~Ik@CDnrgmoAZ@xeh_M+i28gjc8MzoB#%?ldP$ss?m7^9J4KR~~ zOmO9}gC_H6SH`Z_h?OyRy~x_sXxF>Lq;5H2^zU6fKS{KP67kQp&ehQHQ*<>TW0#kRe4EpUSyShV#iVpcNd92IGLU zt!v!+1x&<$3}bS5C5Ib?tM1?|1|hL$3znySOO|J>3B=i1Nc!1`!Zm0=;Y0#2w82=o zrS>zQWX6h3QGyx>!wO`_?FftxV`@VE6#Uf%keW^)LI;zYnk^@U)Ypue<0hK+mW}=x zjN1pyf28CAQ>nLZD3METMzT%aP}chDq>oqO{4Y_vH;b0*=x$ww6rIAo34R}kapX~j zP@OYql7ZOLdJ*b_n7=S%QM23LIm_E<@)k-cpHa}RsqBEGi+Xy5J^@D;UFpg>#L;cx z5Q5>dUjcZ^CIcn4{pc4oNS7b*9pJAV(f*bskrq^gg$Tm#K0=PW2iP4?w)Z-a9F5rB zTDXTN2)patAT4oeAFOq!JFeRyoM;8}8ODkl9?g0&qge%=KGQ*>TLK~{G>}bezu#<} zAjFWNI?)(F)Dl$JbJ5d^%xj7sd!R&5sI=H7o|Hl#|5&%LO*JE?|3?pIMC8Ai5rHMK vQK$SrSrU^_L>m8clM`dn-w*;XZ-~2pTPh8V;28h%8}t|8{Qu*NFy#F|Ectx% literal 0 HcmV?d00001 diff --git a/CloudronPackages/APISIX/apisix-source/docs/assets/images/apache.png b/CloudronPackages/APISIX/apisix-source/docs/assets/images/apache.png new file mode 100644 index 0000000000000000000000000000000000000000..d0075db9e3691081e94babfcdc8a5f6bf8bacf8f GIT binary patch literal 8491 zcmXY11yodBv>t};?hpo~OS-!T7(lwaq(P)(=oCas5a}9_5JZ|8N>WNm=@bF!9^lRY z-dlH_d+yqI*V=cVv%kIWNz&6%C&Z(}0{{SoU=0<0)QtFV;9#Nd`pX{Js0qtnOI-!< z^xsw7U6qNV;QDBoy#@eCMgALTWc!b^C?d8$7@~^30mLN%((nXvi~s=40I-UpVbIdi zr@#Wko!KGy-o#;(|DbJBQW~DtmqC@}@2i~{(QG|`=~KT9(&2-OIYITNZw6eWuw2Yd}`l&*jkl-UG;CgX{>MJHe4?3YPJ1(g8#Z;?bT#( zp}HVV*yr2XG;DN}vcoz&&T5`&Ab<6kbdBZWeQDso1KuZ>7hmYZcAmI=7^lz`tcn2K z`wqu0O4*&EHWw3SY5-*yy)bNvXKh@_;L*ReH}d`E0K-Z)%lhTb8K(}erzb{(sAS_u zxAok`(W0hQAShof)9y7)I~f7qMI-pgHn&2X(pLXk{G_Z)+;!paeXzm;ew%d!bPVx> z-{RYV4Ad^O_wVn(_l`nBRz?wqzeMm74TklTuS@#NsDQL$&vyc$zoYO~H3^7gmjyLM zBo42gtl%%z0R8NCw^8m;VNIfUqVs_^NkE~FObGINnfnvmx z!hU$Tu7O(9dnY^j|6UqCGD16{g-$X#7;;!0V9xzPuD^ouG-w3$AJ48an9zHs{l0y) zyIn)S+r7kfj8m+B$~6B^uD==K#kZ10iNyyLLkH0!apII>`&ihr={-8RdWx&}HTL>D zs|d1~ezzNl<}vVR=ogVG(1(>RaO`OoUn*)-aA({lgpZ@sh~fGF{?3@`VIQ`YJHuHa z-r0p7hD*T9z=FGI&4Lp{Qj-dZCmK@UatlOfB8l3~)`2vc&1Ih~&`rf5TJi6&(G-7b zHkeVDEaKSyaot%X3*z^vm=FJ(SkqO!~N`$ zVZjPgtLlDtpo?%;Xo>PkQ3@7A@#uw8l+L+ZQrizn>vgSlqxBR0$aD(kxS)+GdsL)i zw)*k03G{YY47+d->)$~z&|HpOHj^`bccj(@i!DaqwZ7wWED<|JA2;ut zeW4JVC%H%k!qK;goWGT|!rYspHF0-umeuxWklCM8uBPa+I!~}V<9Axm^XD(5fnLH% zDrM_p$!v70E6${rAe?hl+z9cri>m#;O8n)K+n+Y4d|K{gClZO+FjVh&PAx_1Tyti~ zEuaDMC+-VI%XJiDX@jhbDL0MudIF6~Y0uH3eKc?TP`PD$Gp~!y1RI^ijP$AxdXb7j z`fh8lHm#~6AHziS%dULJL7CO%UJaWb!<(0uY#+92!M~)5xI!dWCyO}Xn4ez&X~b|= z<=xiCM)}Q}zy3Nsu!XD3SHHJhWZ%4TFut8Ey$$ zqO{Xz_QhSv{JZb{Net1A z%`Jd2xlNeIydq@CMC!GawN^`NYfycIY%?bNp{b*{>;J z=a;1b8zn^hMjH-OfOOpmN}NE*K$NvSRMK^pw!_?t5j*8$SF5SK;Q1qi@Y{6`1XK{y zG|Dgk?iw51EUvT86A~bb8_6}0lcd)QgIlm7)Y9UorPY0cEeEwc2Timg^as2kS0C$R z_I5c?li+L*2~i$U^_nZYhW*vtWuJ;E_Ob(p;MWEyXk5>!3L)iAa*g3&V}|>s2CA%N z$+)M|W}a=z-IbBL%$y~WPz(N>pj>jm%|(nxmr5!F4D&+C(yvO|O zK}?)~vE3lZ)S%@pMZLDMdf?D=tGd$`6MdFcc}Z1eFmHiQET7)@8Z9X=JSBFxFTX=0 zZ#KYP>f)6CkBEjCgFx6>gUza$b#pNEEs0k8+9ghG)$=vp&kXMMEbbFp?sGm0*M+>j z(~o(DG4J+<9cG2Qz)=UQYiGOPTKN~de{P#rdeKwoFf&~Wglnqgb+)>GTlmz|QO+J?Dx zz9OzW#Af00$m!3c=`Kqy;Orh4Km1%NSH!pLHRQ*WmPs(%*Ku(5*T1p%iOAT+`w42w zT{<*^Bl2`^bi>;`2?c%q=M#nnbcPWsN_#N3i4nGKmqf4Zd`2pn-96xYw$}tK_tG(B zFnM+o!!#1bOc7AXkhr%^*94BOYt=w*f-uFD$pd0Df%^l?7+>|DIFKP%LH0H654=33 zaFx6@E-vB>WeU@aA2xiC$}XWhVq6gpxRFFF4{An>k}WS<=nn^I4rdQ^Lv)=Fm(#et zuR<^CEJQ|rw0YFmi{r9y(Ly4rd&e76kF}5S3s%WZqpg@@)?S-eU3652Fs7?wAIlMJ zm=(7=US22EzQ{9T#MDSUabTcI>0d5*5gj(U&4@n?W3LeHM^ijr{WqTyF+;o79jCyU zdWQ}-dRx0i8j{Js=u-a=5WMsOQ-!*&RukF7P8;5zaz6;?s5Q?TeSthcdwC}ME6Ldd z9Xdu`BUIAd6UnNOpq&0Pd&?lkXfftxuZ&HC;rski6Gq7vEd&2@sl$S4c&2wFuLSXE z@%91h=w|}18l2XFf3T4tPdV!A<>&Sx$HG2rAT{ z)CAZLvrNJo%P%j~`joJF<9Nq(+<~E0Tpj?OM0TH*wD}phZ>AQZjhPgsf`^XQO}WMnhRvK?(9x||(9 zvi$Ac2Bf3SwR~rI!u@JQQ}bAFjRIo{Qkrl-?L@+-7=B-B(H*Qmpg1t{JDm34;H^^Z zr{{gIa14aFFdl%8&ehynhB0x-jVo+XB3eWVjZ4R^a;Iwf!ctwMK8}*xYc7*Ot z1;31Gj#nb9Lw0Kc_-JE?q(8=a73Tz-JH-Ep9A6$L(*6N^!DoLiFskf~=8>T7Ehrx+ z;W@A#>7Lu}J&xEh&Z>^j-2SX4Ccu*8k3&iH7Fb)9MJ7fA!U|ot`SI_q(B?gH#;7d z`*}Z?7?^nRvmwIRgF>&-j3TIE)3Wp3Yffj;-r;+xe_hWc<8pm(?ND(Ih7v-9r3vaV ztttN)+|*k(Fq4uQ$u4t_3MT#S#KDiMYt||V&Q)iTeRy$D7}A=Ql$4!ON1_9~`i@H# zWkABT`0#dK-Ga=+l?R28Uqj{0e{E_Ik&1vt7CCh#R~+TM{ABv(_VeDsf^F(6sd~=p zJN^o=l>EKhF*vP@R3e{IW?+KEsohw!iMCruIv6o4TWr!J($5z|UQM&OJNSKCW<08t z72z`B%qLLue^4>@m6a~4zt(cQR3eR3;FEQc0D4r>)!>0cF2zY7>xRQnAmga`7uw3% ztIlsUF{5WJV|zaeCf`Xbc4U!u#_vrAmHQ}>J1$>zL2ZF=;h=$^63EsW6-P;h&^Y-o zCCVe#N-Cpd122OjBICZSe);Y z^uDSviRUf47V(jqsvR69#Z0c8_;(p)>}&YXlO@k=PM&jsXFamt4O)aTs*6h{g+B?zr%-cK z+~H^tcb#U%xOPmV3^Rs}dSkdg3!~m%Z^{x5FTveuRnDE%0*dEWPR)R5IhmL~S&Z}v z0l&)ra~sj5CbGS`p1SG6-W!Zh!w$)+m{J86gC@IoLJN}PvK4=myteO;AUM=BeIY7< ztK=qaXYJuiA<1*njSo=vXpZmhGy{FdqPR>llM_F(V!7rj4$xqt-|EYu;MDzaIEeczz(gn}Go92ssw?nZ5XkIM^FUU(ebDx>&+#@M5Q>PKI~B4%)GO})G5 z=;!)dmovy59y|t-94BNae6~-ecve7C^P6x8qC`1o^63|w3x+G}uwwEdtd0lL!F)}W zGHHR{UfbBBfx&mH?-MfjZs*)3Z#)&?iKXgAM6qTd^Xi2mF4vw5LiV*o{}nL4P8Zx9 zrv)sKalGwXyCL@{P=j6$tvQ4i0S-SHBcV8PTE%AUw^4Sd?L`}qd@j)_KJmw3lMN^; zYgVMa6H5A{%rh;cw8!FF84A>~IVwH;zeK^xiK72C?^I|?Hf%%@1Z z&MiJn@)JGLT4!BXM?6%bcGo$2fx)Xfs*4C)LzY^bW%@?Cuk!B@wZQOhnO+KycnfqH zV%`CZ7*~xyo0be$Om`D9^K7n6w&AC5RC}mRpZp$gd#9B9w_ZP$4dbGE;ZMBoFVo)% zSMPE%R0+ONzU@vagWSFacpBZdOPB_>>n#{;2ep8R?`(`r)cPzngAA2jjZOcwZ^eJ# z3%7kQ4E!waJalz{d1jDo7?I4#)8v1AQ#%PK`+d)#+JU|AtEFO^?%ekAWA!+$0AFwZ zhPl(jC-?bt9`@VhU`03K#?*Mpk&oir=UDn#78GNL19WL>K_T8B_M1jtrv znAbBW2st5l3ctR$IP5IL_=kR^uhgeqJ6e1)(0H@#0CA%Nwm2O|%4if#1;gypburuj z>@3Ko^|{9Jmy~}98(O?orY)KFtVE*txSR)xYF*XI;vjs~ag~shaL@ z@#EEg03G57_a;P=@~~aq58xG`A*$x7+jN7%^^Av2fk>E7J;-P$IIWMVPaPP9wqU*i zll)@musaYB5rUya#UZ_*#KY`cn!2+0-YaPLQiw@ZCKg3GJfu z&qHh>+!3<~99s+94G`1=BzTtVSqj7Y{b6j15GG3mT1l>dxd9sW_Wj zkXXsJ`H zGX6!@W|bvTrF7(xC|H(JK`;>1(V z67I5pFER_082OV^0WdC(VEzVWkvE>0C+g&8!Hu_oxVp$}&@pZYBmiJ2!K}$B0F1|9 zqV&c+Ysfy){?j=oi!}vjAbC1aWS2xa52SVxcvl#u%HQ%d^dx>hd?USow^{%Bhwh? z$uA^4Sq{zCq6FnV=-g*e1+FrPP3-SMvd3(4>?N+MoWTM(gQZTRP^$(fZnW5dI7R-c z9z2dSHFx?{4VukS8vTuf3|2wv;tCLO%OCo3xl;teK=U8J@?y9V0~vIq8q0p7mM%r9sF9b6I+_yyyKr9oiCM3`NbTzO1mUxp`HZ1Zm z!2%8YTfj-J!oqn52>rj>LO+L&vqqF9n8fN{UQ6|ImFv+#oqMLU#Ysn;1Lm9t!cI$_ z{QBkr#VCV)BL?Iy27P|hvAJ7>u~t1Cb3GJcO2|;J@%2W5_BlZ;+KHATF%xB^{}*t- zA5?7cO$&9==M!(+tDAUwKezp#cQzE5b#qe`gKAZyul#CP_?P}TdIp^q6c^RB8f@Fe zi4UH*8ujf`+i+{sF-C5GJ6ZD|)7dk^t=WU0r^Lcw!O+X-13sEJ&wb9nE~SLH*rX#6 zC&pz_GxdoV7%Ive5VD}%$j$2pqivE%_tFAE*n$v;S+}e%7eh`Rvh_;7RoW+~c(>#& z;2?dB-pdqSh<4lGXIHzn+fji^5o;IU>OGmSv80vN3|_ZVM%BZ4((To`-)jwpo-MU^ zsH*=Jp=cPS<-AzaRPXrmRoFcH--j)|ECj)NyEB zQ&&pQjB)=TThfg+bY3@YtrL0uB{KjME9-?FyM#6RS*TCw+a&o5#D|+_bC{8R7ZKFM zkO%$+LhtA8)cmGb=i(4qRQ1PG)WwuDUeMm~o}aV!i*ER^HlQ0}B$T~0n9-v1)`k>h zdgiDUWwrh~Eqv?=25LnMePVuG=aNs9FR)wm{u>x!gIx*q=6vsOR|?yWpGm^&|Fxu* z4k)~OI=6{p!Y<$u6e`Pactx4+_FgVYV2z&s$@8#qfh#;J4rr0xSh%+rZO!sWdwq+1 zi+G!DUHk%mB9rC2`tLL!W1`D-kuUJ0n>l55_X(LGzJzHB$8cTrm7x-Ser3xuL)RQm z8`Y5IKeg9(#UEYxGU}5>@IIqkRSYvIJ{_0egf|OkKiK>FcA|ewY0bN&Pz_DNSOYZ+ zSO+x!*X&#kinipYp+yMjW#5BD)_fhvs-6PgK8whD_h+SEqN*+NAUP%_hG*t*v#SIo zP@l=)Y@!mvdrK&`(QPKdS& z-~-NK;DpwQDba!N)C+E%K*Iyo76FiJO}IJ$0{8*MZD=h2EB_3wMz5yd>BnbT>&NGf zYtOa3o4UJsaaV2IxQ$LvGuME^aLc&8@8ZjiVo8U`$v?&SRWw4%2FU0(-U6*CL%1)n z76}}M1;$IUdQT&;7iTMVqsaev=}~0n{93_3GizJqtH!{ricocVev%p~CkM$NuX*|B zF;zsOnXr$;;x15(GqufsA0qIPn+{D(f}$Sx^A~!3eQGdQE55h&s$lryEIR*hv>P4g zJmKL?bYi7Bgh|hL3B~Sy7s{(=try=-vACd0V?_3&rmz5L~ zaTb${Cb&&8&r-(dyd*Z8D#Rm1>aI4uD&y6lX?>b;xvx9L4XDC)2A;o(%$EtWZ|NJR z;tSQ$$xx0Rq9CGO6MT??y1NTVQVIfT(4??T`18KuW!&bC=9jFu0;><0qKBze|KZQ3 zo{`y+3R>D->=6FkyBmF?N6@bM^t)8XGbSV#k--{CT&Xf@e!Vm9z!T|*ACbh>lX14h zcgFQ~c9q+gtRq=%o1`YXOP8!{hhQzaOt{+mi9CL3u;O57z+kLm=3T4Ln7-;D&ms|*>~B5>pihgMeUSxf=jD8o9TZBp`bsrAC^?*s82qLtAoh(!+} z$AW~Q2jSct{ZciM8`X^6Qb55-N`>vkk(352*;nha$2f-!oNvC%M%Cpu?%~K`POXUk z<^X{F#$%tqc@n%727Z;ULyQ$`y)@?bAY5YA3O0$B+pGQ8 zPFF*^GjW55r_dA4LwoHMuij)ll|i2SYmqF09+$h4(!Q@4EDL=dbsALGyc%gO`!+lE z4aC1UYtR7xW~1H&e60dXicjcdZnoiEt3T@Xr2BVihK0OIem>vr=;k7+pdJ)$1a8wQ zG2YPWJMDV*J@(fPKh!H}U<5CE+fty3Ky$7Q8W@oP>il>{k0wPi&zE1Cd~QQcTQz)5 zK{unx!xaZ94ZKM&fJhXKfRw(w3QF>H*69Z^W;}2R;r->+deSouTjk;OcBwevj;`o8jyxq253?-}X%jDs}SVYO5?(qo1w zWU)ab9&~oX-llG=u2NRBK^LL6@@$>TW}4X8znQsyvhMmmsj<4|%w+x5=`KSgZ{u#w z;QW&QZVpBuFF$SAY!2`z)By7V=LQ%=7-TR-tY?gW0bs;Tr831Mc%2~%BnHGNJYg8( zvPH44<15NztzuI_tV9OY{C=Q>Y~o%vJ%0IO(V*bTTmK=WT8ydO`(Izcar?V4er} zT9s^GU=pDp5uMc@{B7u%H5`;du*akX^xOfWo{Gbm8v{ZfD9zbqmViN8w!%~74YhE$ z-L~FLMq6H=Dg@os%lTQG%K}&mY4OjEnchsefRDh5q+nZp#91}3(u(`4d7J$Z zt|*jo$-Zauuvu#Fh|u?*N}d?i3OMgYZyz>^4S%J#fMQD zg`M>-t4brD5P$|o!^RI=rsDVZks(os$Kqm7%Z3OUg^K6!7{yl1-KWhu+@gFu8Z_<{ zK%eC5%!Fx5a|v0dEp1(+c^v%14I!l}v^lWu-X>CN%j4ZsK8ipIApPJOA*L#)W`b*M^o*jk;k~3PFY{>i>0VNn( zq%r`O;GWDM{?8#+tn}aTB$c;i8D>D6|0A(O3K$1_d&6(@?mA}K{gekuY=e$;J4G{> z#bN*46s^U4>A8~uVD)QN@@~fM6jwHTVK_myDwG{^xV7$|!kpf9b|o;E*G5RG42^1W z2o-r{zm_P$_D&k|D_-*)5KgheQ8HqbLhp=RQSsDi>LWuP5BU7wS%Be$iDf)r&Q(pX zz`PHH*&5KF$Rc6w@%UsA0nnRPLjp9cxwN-vFV@a}ujiI*${+%xtP$8KYa$;K|KB6VBEU}-5<@F0rG(Ugw3LH%42Vh(ASDed zIh1tsKl}v7_j|u@{g-Q4dcEAc&)(13@$7x(x!MDHA_6Lc3l}aBDJsaSU$}rve&GVP zB_1xgGMH!_f8hfC1w~mYO;3!)v5THG{fWmRQls#`6$5koO{(p=>ROuBF;LA>A? zxhv8-F)BI>oY3H4zQD!=;`AU4tZ(I`KG6l7+qbuk%pZBQ*C}`(cZq(#-c?txy;Po9 zWt?hNxmKrvd^R&0ow}anf)xJjk4GPH0rUUy10sq!`)OU6j`9C}%LPnn7~~e@0tPne z$&a7x-J_%I>2cZF%%+V2BHiR``=<50Pmijdh5p{~-(93mc%-aC+&h;~e~UCcB)|r* zU%OXl^v4fYOJU!=Pr>>RQbcaCU)!l84*qaFBl_5AbN|;fXAq^p};-ibo zV}sg<5_8*+yHdq7CN7ilSYOByr`o77_vR4tVHcX!S)DNqR{sYhlMWb|I3C7y_s>j4 zdzo8R-Zm&3pKOEeiT%cCiD}U9>FL(Wr@zd9w?wmFQ@c2vhh_HdW%PAk%NDprduDn0 z0KpKSNw<@f4_AVTRFDYKVsafthevcO0d19+x-uF|s`#X=o(s+obxtoieF&IOfi)G+ zk@ZqB&uR4I;NbAlR`hx*w@A12Y0-3>n6A%ssv8>GZ~o{WClp~yz$G01)qMT7b;4}2 zMq}YNVOt0qUjf=Uc;vO-)2xE0JET&MUNZrvs-~JO)I&hVtbC0lNuV<(=wd`>Rh5D4 z4BP}lgQszEL)3ZI6s6&Y8wOQgTBBC1~ttloLcfiuy_4#S1=okj$F(M@aeC-6?`8b$vGoLUecc zSK-R{0^L&c-fR`P{fNL&l}ipkc?>>WmJ;0 zU1`cP&e%k)_Tp7YtJeNf)2YvF10GMd7KhXyK4d@*6q1qM?i(D`kL9Nm^Hh6%L-?`E zcV(^+h2O_KtSj-X_WdylfzuK;&WRgFuH+37$IgPuo#`^1CGGI~5oxgAoEF zrIFt{dy(R86HFZYhQ4upk;VH`QqcY#UEM@^GiP$D*!fpc26dOE4OjB>pP@wg{SG{P zb`|5;b#H#qDugpX8UJV&#^3Q{yw2zAkdjoG_)Rs1@6^0Gug+FLPs$6{l+Xh=JH@gB zE{1&xU+i7f&Jqz35jH_E9=77cA}nULYohK2`VoCdN(~eG$HiHVi2Vc)Ta8%caxDuNDU2lXp1DUnIT}CG0ZOEyK`8)RvyA zH(U36zRCnM?7&reK! zg%tvef;=NWThI7aZZaMecFCUXCyDEYzbeUCnN(#2)8(EOmEn`iMbdKcT6RQz{w)0= z{`1;&dtnuMx#I*UKR-X*s*Be42jdcl`OL%a6}EL&&wV4M`OD`J9t8%OOtqC#JbiV8 z8K&hCHWB&8b8BB7?}>Wfn6wF=aZwl6FF79*<$RAtlOU#jxes|P+Q?C&T3TAn0Ech+ zZ3lid(JqbGjiky;`4HWW3~YADC#O6&e8VOHaS=k6ob>e4=!w~R8A_39FtVmAtUCy3 z5-DT-;16s9M)}uoo|Cb(btLj;WoF8Dl*lIuI$oU>BanZ6=` zv1wgNf~u~MEs4loAJ0@*i`deVfxjA&rA%bz ztr{EI?xfZhTQKC}0xp7!tlrxMHp%Lc#g&Iw(!fl*> zD&ksBEB9nd%zd@X9*3YkjssYv+)*h^3 zt?hRCYaxYalS|{;xsFNL*u16-LYHRK5M6rPgvOPV1zmyEmYs++zdZ-rq%PlOUW}37 zzgM+xYHZF(`g~psKf0 zxBB^T%Llnstxm%D(TP^HuM3-?)y;9*?jP542W;JOTbb4~*E5aTbBUao4oPXhAwbkU zo4S9u>UWHPEUHN9?VaL->}QK+R+ym`6MjOl%R5!G%Vp)g^fxul>~gnLy%#ublL)!Q zBu$bQ7m+vwGx#1FxEe=^ylF%~ic2r09WLe+n%R~e2}cBTL>3`@w)u8*{P*C7#P-kV_;PbBj^|Y?`08yF`JB30{&e2uo3tq|Kz1D; z%pSul4C@c-9@wKUG5Q^%s0-}7L{|(xEM|)}7AwWon$el9P9B#>+9MT%=w}6@`MGmM-KA|MX?+>d=FB?|acG*qTCvKG^A?cMn7BNG zD0fzqj!m6!dSRA4vzHI{R+}pHP&V{s_9Hqzbxu%ercLfE5tGgrkCPw_!ya5)XNehD zG-MEHpnB$;)|tZ!kiDbUy?c2>(Z!y={&357Np50N?vcH#z^fLIF}K({BX&%j*`uKL z%L^0Pm-HlhlYVX^juQ>#W(69df$O~M>!n@eW+|u~iOuIx``oCq%_pIgJ*l|%g*j=y zhbZH+*OU%;u-+V{0w?LN6{M)+r|nrg+2Z1~Cy0shKN~JquoAnW8qit_k^0&AcdcOQY*04{{+4-@UIrzgM}Iw2e<*IrUFsFiCupuNz#v4?Rsq zXvxF1U{0t)tfg=3`T5T=h!ELQ{k2CMIat4W({|q&xb^!FqY}RzSZ}G#J%Ks(+%u-+ zy%D*^sZ+NnQ7y_XKTj+}ZO7}iQa!78f<_9OSVDI!vHezWbpCze%-}S2N%L+|FK^LtqKBK*@7XFD=?y+gxjperx23ilk*Jmm`!O1?z zA(p5=;+};WQgwPNXyEJh<5pF_(I?mZ7C-XF&V<_P5N-OgV}?MneA<4u%8VGeFTJxA zR8Qiu4yuiN+L6$__-%Re?$NKytO?)tK7DFm|`lEHv$Urhx^=sl|axqHTmGOAJ>E)5_ z{L(h-P|>}fNN408yO3JAocPNxH-GT&On@lhC{_FXJT(n~jDjfe^8pg$)EhQ%VfNsG zl#R|x&>R7FZL41pwdTlPg-u7~Oq4wG+oqGGYKr%ajh6;48LPEdF+!qKq=M_&RA2yu z*H5sN0LbiP>vrU`>^j!vy6ycfgR4lFfpH9NgFi%G^I6AlWy035wgAjM9LN@Cm&Xt^ zC560dI|CI*_p06YwHlnKLS{$YWQaC(vTJ7^`7qCYM2c^l4gBt(@tt*&N21InNQlWZ z9-bg)nqd1rY%;!vc@}M(NjHG_Vq>Yc$kP?#lZDH{N<3CB)%573EszYth6nbu?@GRd ze%7vE!?CTrNdspc)($2Y0((cwCcX#V) z=bsVpu_~9q+M_)g=k?CSwfz&guz<|-k4pkjd>hWYQ*_R}A1^m?TAuTqaT^zlH&?gR;PmzV^L2koZnw6UvS2;J4OU{)YmG`6{?MQsS9DOY?>+tY zVy0DB#L{|q$>+uPp>WzKkIhSsa<#2s+;%@!ES5(Hm$GUP_-%i%F4>rde)-PuQhI4N zJBgt3gB(+bfRy!% zdc5x`vk*{>lR?31EmI9kS-%f*I{rQzvA~BernE!$0xl84HwK^(SL^@`O0?L%>>Wo2C9Vi6$ zwAxzH)vu>Q0HjfdG9u&Dy#~}^1ECMr;s^DKKF_8dw+2+G^nxV{eZx8b6% zU%A4>jU1NEw!B3K10CoT8NKDPLVWivhnrJ)raNSRWFPE#&!!0Kp{x@ghl*11{6?0{ zUODx!Q)2OIl4Pv1o~4gn&IJd>ITI+(Zz;G3oo?e+7Sck96t&v%PIKDGt8ap%-OW_h z-}nk>MYULq>-VUIS881f-Tw0+vNg0B0*Fm|T zgr`&aTxPQ#bIKPT>Tf}VT(`Y-j}9jgmWAtuj)xH^X^C%Rpx_AqNS)8SI(vC69Gc<# z?b+?CQ~sYSJmMbC8grxOK7`&zqLvJ_{fQ}9=l1_g!~)wX4;%q)Q-g5+h7NzrC+yy= zTe{}yrAdsFL`=j^<^KJ41FUS*9b4o57z3|q!LPfT2x6~HnMomP0~NFSA6_JzM+QmoR!#Qj+l@O`O53xL=6_NKyLNKtrb(*^sUv*Zw|9&*9+AZhFkv2cI zI<}JCzB!ol9C>IWvHv}u^UW)2`JsJxfeg_uj?pg39jUngmc@at$1X z)ym|3W7PWkWLoD@s{c`az~;?7uNDq10?;h!)Y#mq=T5>Okx~E zh{W-3*YL``-~J|QJl!|(5LJs=ox;|1EB-*g;V7`(`y>T9R83fw{oz)Yt{<7cejj^< zaTHNT8B9#G8F{>O&*sT()T+CszXnjqG}JfnzXuJ3=g;770;$F&P!TgfMN<7o@*Badz1l`Ft1S% zwzCo@2=gNW`RI>I<;2I-k0(dUxuAg;9izyvpF84)co0zPYYURePbK^!697qHk~5)d z{qtOl2Jr9AG_TCgMa84Xfq1TXH`GAu*3w8;cDCHs^4Og)VwxDU z=GU#D$&!_qr?^qm_CH!lq=3>Oz1u3fq+WFBf)c@B?}Xrs&hvT zf)(=Zo1E~|pTRf8y)}tx#aL@RR;~ht9>-Swk-R!fdxKRuk>}n_MyKbeZ9_l!jg%T3)m$n zlHf2Hgx|yRfiJoAX6ph9`yVcCZrkqYVE2U3reaU;)JR zZHt{PKyTJ$=N0jPF<@;F~P1zr>$1{v#|&=M6Z=%BSwAq>L| z?>5m+Prr&KW4Vt{fk%;xWtfY#H`dxk=eYgg8n5NcxdVI=1{XlLvl~`x4ixAz>Q^|F zVdA7T$NiAQxY&Fk^R%GEs@Vb2mB0msx^ zWX!*ZO#`J-*XKCkE;%J%D!sH95>Ar=a~j|K`}?(IWtlW>aQm<&T9Fz&Y^f*0>jnk47g-0{U7R79}hA_yNVa+oE>>Ugzoi z8OW)4xRp?&#!{4 zV}oLH=S*0e6L5+VSpHzN@9Rs~{uD25VUcAw{HgVwkXBl*KziVV;R2$}-W*j*qN|^J zG{ao4gPx3a11v%!vGWQwmi?#Y2?V0fz@?L4ho!RKCS`E1m?04P7FOrYr;s4*lFg4_ zicfZx%ZUAscG0I$^4^9AVU)aeWUiHrvj_OYB*yDlXR`o7;k#sFz7bigXlXe?*V}0b zCtc`FS`G~R0@sta4Ed*r8Z1@=RphQ2KbA%+^kwJ@jq3DVuMC|u?^Jc;-@FOOB@|(< z6?pfg5@z3Ej!(6kxJw3_rEaQOpU{FX1GPGBZ>GDR z35tP1ME>cII`v(*TngU!&-K3h1uQw6_)mx^KIH_y-Yo*{XzpsKDd|K($JBnE{HZ{J zM;ZfWWv87A=HJ%HOnGCRl74ue;zoc^g5|f9@5T<;fwrn1|)s|2YH`c*4HXd z+!1IoCvZ!Ji42!oMe<*-CKjAoOaU&OWK^2<=DB;gfSGz3P)HqF8Rs2;E$AfZq20bA z>@4d#$7q!cGpn8*jSA*!9s{)hnl@WF-p%U2%AeLk8A32}h zDH8v1m%xR0d8nC3h>OD!^0SKFrk`DpE-#I}cZs%UtpF|B4_1M^$|wES60?m5PNoy$m13dN&puph4-dN*k> z%hW?FDI)oNJ`qG3lTGDrdQ}$r!PW$o73@&a`*VfSLFj(|jvA0EjPk9<*^tE!VKCu| zy#zcLk_m<{+Q+2pC^2c2& zrMJ3x0?UxkwRaqExKut!h>U3tyJL8LwgHMY@a&dI$7NILgB7nE=d&RYdST$iW7ex( z22N2#Y5`EBoC{M`K$YCd&It@95Mhd#FN8G)3cOg*7#OydqQ1{>(k-Xqi>Xg(FKh8U z1-f-_!O>qSM68(a&l7|1GjX#a|+6!n*Y839`ysao)YLS zciiDSzspA)O5=?W7qmTDDv^mN6C8$Mf+N&%kEUlOVoQNrD$p<9Rp&0RRUjCWsImH7 z$Ld=+gw7d(D;EM;?u}~);b&f5_O=NXZ<(n^^HV{v@Mlj-V6R}q*YZ`hZHsa2>HAX& zhjsEJ1d`vCNMT(Bu$UuT73ql9U9LuP7LXZCD31wFLH)Q~d% UR{p~5*wdtAd$XN z*a}me>}FlWNAZ7l$rLca=aE(XvrFB#pp$Bi*V(NZb9upbQ1A6=#3rH=5Tfjbwb?2) zIrH84wB1bOF>1=SJ?auU=0x-Aur%_u@f|C}!L&^uldEL(38V~_@_~L8cx2VCNsk%- z(Q-8?>AP`b_rnTQ4n1<3adXsjtLmW0(p^Rz`z4d<41VtSU#GcS+{l0_B~2|zD{(F!m>R+b zf$lEE;=F+g(Z@svH13w-Q+wLu8$Ow>QzUj-s!#-|Wte|D)B=7#Xw^FLU73tVn+7wjSAi%XUHtZyl`H4 zwX!(Pbu;)>GScjY(-O35UlckURQS(b&DDa;e75rky3N|~3p|$=5kO9N{wlD>E10jGKqb4Cpo7Kq}yj&oYy{j~-qP*>&TO*L$^T6azu*TJOs|Ee`W68N2i zr!>NZ==&bvElwaLVPT(}NF+y}7wV#uzLSOjuMs2qs6GG$Mc-Xvuk&Ys`{pu$ZYZaI zT>2{y_^*bV6KvMGzxk*51%P1zfD!U}{SplrdVo#rMwhdpumAA3`Uz-1hl?&m{#^TW zFB(9G0oK2B5)t2ymHZEfN*|?pB9>c~ur69GK!w6+&msSx1^yQsgXd{*Vyz~+qsc$K z8D}Zu-^c)21gLrJ*l`(cEdyqNE;ym|Q}iXUR5I}H)JkRYE9a}He}@U85K)2*l$M~U z&%zk(0r2Q2wE>b5M!HBs^z8*eeL7SUr@6xE_0PbB!C6yS=oKLdz+eLlFcxE^*v~DqtAxB$&P;TB%S(4cMZc@Mzzj`j=Ze_DhA0Z9M`qptj{gy%?x7=TMZKX*v|`@jY( zur7%jSTZsCzW=ii_MfIm*#WBAKO@k)@EKPLSU=cZM?1}udq>H_jjsv$1o;)sWMrBJ zO;pe;!j6#z_*dlQ%Rx7&oa6!^Q!jBOQ#hR98Pe`z2R=4pZVQVfUS|_9Ku9RlNtXXJ zqATw_xoN!(=3)*&YH(kR04b_7*~d1t-yNjRYaL~mBqF|}adbA#1dIVu2abbjb9(mo znat4NfZ0+&qQ;|3C3t+_E!HiIEWlz;1dGFEcT9+J zyjg|ZVzy_$jn!BTb~(J=lfue!?}jACk85ky?pdw{I?eop25ew5Z0M>gS|i6mvYlfp zkLgP!M~AnDJeJDTc}s4o<<2E>{DR{uG?gzI4$+MK1M5jqfTsyV^#^$8c!G&T_xARH z7xq6|Pa7Et?(*ey0;5LmzT4imx$oaWu`=B!FANPABa|0XZ@U~r4p(=lXE)Hc7XkBM z-BPStX!o-UxSBx!A`3ob=I$5#q4kF2)>PY~%S2=y8R?%pQs;Ts*e~<#@>6GO#=w@wzajBV2bYBQ*}&S)ugD0_NQpgb2uTQFTIEA8U4P7_DzTe6 z$KyJuX6to)xGh5yy8Y~?llhwY5NN;;sL3-ccMn!eVOjX4DPN!9cto(W^Oi3~Xg-#A zh7Y{}(z$nFM-7htAUM}a!!%}>G3-g(D=I0j@p zkla%K1J4r&EoNI19O^9YcS*qQKW7%vhA#c{=+zLQ3NB7YmUB{+o|K*2|LB18A|7&e zs;Fi;286)Lv318ZyhWdx>qFZqanBQiuy77NERK(OvgGrXl@;F(2|hD^usAXs%RBo& zA_zDasqHmV`yQ);ss98O@_4^<1 zC315WDZ1h+q?VhTJJ-Ug+k6MJvlBi$CD}Iq(k}1*%AH5^y6XKo>*=54{0q~sNVeB2Io9UR})_op}k=(Q;EY^H8IXfz8u9TAmBaU$7(Bz z+5AGuLfbn|Gsoioqj$IjzFB(5_8|niR-uwltc*eyFb=brIAqqmx1)s%BOV|P-o-I& zx+J)~Pib#xM&m)^#7>R(ev|)WCraxlPh!I8{3^%POaV>0wDOW)jIrBwr=)Mp`9q&= zLR3CfI$JYm^X-yNqIw^r%I%bNY0>rDfId^=*?U3kmMIm;8ItFl$<85jb#zU`M$Eg9 zmy?|Q+{<2o7J*#U_1-Ic^h?FqxS#&~;*J%vu<%4Rl{$o&qkDCRdniXrdgW5y{r-Dc zA zZl0L9#*DE`N^)-D`b!d=e@>jhN`Z?_p)m7BBS@9-5{(F-U9Ose!Q#Bj`&D9$l<-~X zMB8}kb@jI~%#XzPbQ(Xuqn+p3$~TJn(B5BHU}EZiKVLH}r9fCeKgjzIyK>hj?^}Se zi$JEUy?e(HvA^+_NR{-ya1o{?dgVLHz`0mazUf6fd~1DJW+sz4%&AL!8!-6t!{Z66 z;*3u02sQVtjp+Pk8!ebxN(`sI#&-+&*8R%-oR|I6ZALsZ;Io%+1TWC0%X14R&~n7# z%?UJv&kQHu(0!YD>uaHuwCvob5x`G5QomZ~_ zJ>x&=1qDZKUW8us827SjSR2rH^g0?9f?kEr$b@Ach0-bdR=DOZ#4-1e^_pAVfA_m0 z6#o~Wa%Ht!RelZyw{mrUyj|}ug>(o3vrY>#`rYDIHyYV>|dQCK)D(!E) z<`dz3j7kHeJ_tTR0cT+}07+L}`w#Eu_hoLry?qCOlXU&I`D_bv z@rG_xs!a$!vqHiIi$({WomXkA1Gyf4_3J++8tkNerm1-RPv)D3XYDz8iQI3yWOsAtVA@a0wJSG+{PbCj(Om2*vuR7zkn429p*BZu)R zp1_^W@5ve4hBK1goI?e^tRH~$@haW+J0PO1Cjv_!xAq?`v23SrM%h`rT+eMmhek%Qm2)WXa%uETcetw~^Ti0|GD&IleJi?_&{ zRb-8k+WVJi7lr4l1EA8RS%9v7Ct0ANy*Bb+S1;`WhfUY?+)G{fK9I)Eiz}}>qrwi6 z5&zpTi$R(s(bYmV(e+;Yi>fBGdFIU@9G8X9Ie1ghejnimHw zDJls0XoOhHf*fM>h{!lsZuQ4+$2b#_7A>-AfKP1k6GJ{7yvv-*b@>$?N61d+$H#t` zm*$8Mccy`@)L)rtDvn}P#v>u`~ z)9B0_0&6_L8CM@1cPdx6N^t2Mki8Z`tzdg$nA8Qr%3T zH`TQ??y>N)j4MFSbA`omRBX4CpjCwDsS8|9yLKe}yzIi0Wl}RT;smGZBCP4`l0BiZ zwgXD`6%y)hHZRHwY6`-X2csH3T9=S=8YGMrRutUWThwK7cKT5R=c+5#%(6_TxUHFE z)jIH!y+$x_OTh^jpNhEv6gw#Re(Oxl%G()@9OSU1s$e})>a1-3_&SuF!f>P*KkXtfex>47eH zF@ksv61*(9S`mNb)$80)y0~s56-t|tjLM6-y7Rh|ClA+HV~)O zGjk=ev<|ZNVOHU&NI1s0D-gpiKxDF0q-5?e$9(Xfc;>>$n8oZpHudZ}fH4N%qVBAm zj!GVGO($?daJ}P=YvNl;_C;SOha>Jhm6bdFb!G8^?{lQ}jPo9>+_B|>%d(gZTmWYe ztrJS%Zo2U)jP3qTvE+8CR9iw%i~YS5z)`oRdgO4lw)aQ`E58g`Scl_-JO*eT-S|w8C8?WEa zw7l=8mu-ifyVJgJIaGI9IlScd9euqx5OCfy>bu~!#8ey&FMqt^=07k&6cQFu7?p1B zn`if<9%|Bf3(?MYK9W1h3}7K>NYwWCco*aiHWf9sg-<^*o38%jO$fa#Jw#EH@m6!z z@>e)0$du3Tn9qWl_1dAD z+l#;OD0s}Ro@5uyC-o0gOZnXT6pB|U^?4YCs-##(l(Vz>e!vm@*&G&)gypJm^ZvxG zR~9@uMQU*lI?N)jw8v=v2+)-fn!hDWgFNQRiP>G$Onb68PO44G3lFU8&F@X$iA)BG zbnD!t_H#?>^RKB{-05;qcUOGz+i*Z;O`np8q65SXt4cLj3of=D6Q`V&SQK5A($;3% z8xTe3%!r_xDeoAldEw?3TJB1Y!S%v$fjjbi3f-hq@teX!18@cwc>&*T5#5r-!v}Pt z(VOnbBWX50Ev7xu(0_Jaj6R@|s){B5hP+8ejHN{GQqOFDRq315sY2x>RJMuw4H~cN zT6a?PMRvXNY6>beeM^(r>nm%18^_&r;Y68oG3#_|ld&hH5`NX*im4U#XTJ67R!Ti` zeIZ+M8YZPa_&$>6_MCa=$amgC(^m!Kc&35DM8b#WW(YsH?B&hWWk>N7elB z!U4h8J4)a{EThC;XKNnXdAVu<=TQ2%;BRCOAjEl)Cc!~C$QARPigZQnExQ+_Hr?d=#Z z)Jz2KkAEf_qLM+hY4sWtw*4>M(Bbn53bw!Fur)_=h6>-;WTbAkM_j(f_%h?sMJ>r) z*kV;?Jinv#pDZ?YSKW4XPu&-D-?b{$oV~c%l6MTuiFQ@8;8Wux`Z6JWc~6!KOY$7! z#0#S5Urk=RBCw5ON!s6zEj-+7Ek8Q6_(dZbvxZg5ct;7-o2+w4pPnM=HaJq&DvwYU zrZ|s%(AR(!b=7>a`Pbt*6EmsO=;-{3*38-XIHQu_ zulR!8@ZC+VTv(%YxKrbG?vA>QcyfuIJ8brx5j%du{_R3}%Snu7!%7UdRPuc?+sh?< zEo5vRgI^6(rIQcVQdysr>+qS)ai%P2RPw!SME&@I6T@pY*nb!v~?EXj(>%I%nnlx=hUfyym);{?56tJEWlMC_`}ZIRwj(y=hm8%%*VIzAKuZZU3k{>=SpIYdn%|ezwK1qs6n&p zPDvm!9x1|`{#!Y~$S^l+P!i$do@a5Kv8Q*Phj=SAK-WpmOYRCLLjz9ZG`_8Yp)$SkoBbD zLSEG$Z{6Ctx`a1eahvaV%=6fFD!6U@kDbpGMF<=qkO}p6w)Q`H#kiafcAl~sX#cK~ z%heHZ`shgYBMrKk) zxPedzb;R$vp7*K8zHRvbco)!(O8=!*@Rv&u_<+6O(XZsUnVi5M)5DGG3e_#TGuUtI zX3_&E$;$T$Wk;~y19HjC=tdlkUSSU=q-^6?1NFw64u6*H?N+#q7kqPu+FZLr0$wUl zXX}UE%P19u-AmnA`y@(Dyu>2!x)9{aMdCMgf(Bf7vg=5DoxlC8p6%~6{lTbxXaO-j zONyXB0Ppc-Jc04Aq@-j}e*Hm(DN|;%*Y_B&?`QU;h)l+aIKdSJ#*fT~oD@|P{hz)t zyW1F8|D@6heEUM()i~>wU@JfBc)7h>7LH1W_v!NLMO1E=x?qO#i}apAtD8IB;yOso zaqmylRJ4rN@%~aDx9)eOaV~!Ba!=W>`J=|U2f^?F_N!d4R=fT8(lCWi?bGMvG*(B! z*8DF&(#7-9^FD;sf%xQl-fWAz%Wm+QLWJkS9%b5hFY>FO_V-^< zoT&u^``~1RFU__!@h{91A-ls>>dw`D8n0A8Iy^H!4#w=mIPQabx%HZBAfWKYm}U6R z&*cFyI~NwnX&*vJ$T`nISe>Of-*)+Fog{KhT;}#g;+X1jfFhC1XkvTUY!>MeNmRG| zBgx8#SM9D-4m!)PrLfs>#>PC#QrsGeM}K3+{}DhSOxgEo6#+v#$81tPXZsxb;Dir*_mb$;mxn=yhIzWgzTOWimZW}GRb()z z(+|BD5!(K4*#`cJUzs~S(2IzIlfAOChJ6q}$=@&HO)~raj>C_6ngbKtghNSV#!qm| z#uyv1@u9_7SAFinH+@Pg`3VeHG%%oa&x zqUPPTOJtc(y7|>TI$^}y>bW$z>)<5IT{U+@pI3HuEADKPdQNe#?jY8&33`rkx@gZ- zfk6pWQMvO|jqencSLoHiyp_eTn?%3It+CzzxasXok-@j}HFf=LNOZmLap`)y<+IO6 zb1G@@T0E31XGdPAkH964`8sIKLWuAWV&7}swwPLyjIlt}tIRy`8>cwBBS-YMZqR_y zEeN-~rVp5Bi^mBz*Db&AP-~?1+UFa^?Du8gP)K^AasRW>GooY%OwTrY#J?f?kKk9h z3UX}B5O?Z5gHdbcp*+OY0%UjQDUV~*wGTOsqiu@nTD*_iJXGVqAC_hoFsR;-L`Hun z3sE1|xtZ0;NwZh@QTA#P9Y@5#%b1WQcEW+zaUm$@!c9ttmbW<6&K6VIA9);SnJ`1l zcSDd~=L8omP{wNa*Noi1g{Tl{%UVYf4br#o_Yo~zaHDr{1FbHkap%D~fxL4=qhvd8vts@tFiyMqQJm&;~txxUwl)uDw-RN~l z*cvHIg#RgoSKf_K4xu-t7G&M36r)RYZ(*BZW>84J9PNbwAQzgV=*u# zW-Iy2{dL85dYa0Z=$NIxv>$}JA143jx_(q}}jdm-^W8K~Pri!+gL8S$@Yu3vXB4u^! zo`MtW;_r4su)ERvj(M@y=hfmg<~nt~$*Ud6n@5MT6tw!oVfW9xvB%U(&dS!$=)zJM80 zba=(;&Y9nDcn=a=+Bzq4>c3?5$=8W!^D*ybLaEz#{L;qtg$?RA%D8mXKkrhea@liI zCUXr(`5~K;!6XQ(HQPj&47Jqcy*Jh^-O=~3Uam`we-AH)HfSGje$aYfw{?RXqp=xz zm{6;Kcl1fYv3uLSt^L_lkD_NYE@|`KPaO^qyB5D4(PDK4V(4snm@$%n~%JNEB8+bm?cUza!>Qf*i9m|)*+mfp5h(4k>gn@|HSfH(tkZ|NlmKh z0u6zJ&+S^ojLfcy{eHW?QmA5u-`EoFUh%WT4;d~uf(gkv=Y6y99{)6o4-ciM5k`Kg zZgo!~qGYu;Of7c|y;SKI02*tNAt*nul>Yt6&U=onf<-%_hFU~q^&S)3O~kL>bg$q- z1JjN^O&XlVI;cC?w8OU^x&uX8 zZHBOVO}(mY0P$xcraixM=i__$OPlLL^s8)#AScgud!12v}KtF$7!FN}Efc&kn{ z*GkO{l-^gFU1RL^q+8e|OWO-R08hJE6ZRR$xK{1kBbBH|veXjlaT)u;0iR>&r%q6*UEJ|)|8yso1E#PT(Bk8MC=n2=mKw;MG)&d?=;MNTuL5JWOK z9tN10LsaVFIq6meOzzY_4debRnJdWqppeEgqL;J|bKm$RCO+g?N!$^68ELb6{5wa| zrt=}GSbQK-AGTrxGa2aDCS$#ncwpYJ4-W51^h}I9Pl3~4$6kA!AEJ*Eb-RZ5yDv0i z6Zms^>2fO7C?EZqIQrOm&qu_1H)Yz#ZP}KFfSlmL)mGbeqj*Q7O%)#>qRSn@ox^4f zv(1MEQz^>D7YF+vf|EGMKP}JsZa{;9S(3`|zXTJ+8Khuea;AFSUU~2h)NCu-7pa65 zzY_QfPHu`GZ4uhi;-rM{6j)3l$ZFkLy^m7+${=j7nc^T?gvU>mFLU2foSV+2+Lor~3%M zwFgx$bH^M#cprU z*mscl;iuW)t;e3ejY-SYN8#Y4l>Y-YzcViY(*S~{{CsS2x}`ua30m<(p&A1@$jU6@ zTFnDE|CeYl9>mySWJ)hjniR?aXPC6ug;faDWu{?r_N_~8-Acz?UNRiig8?z0BxI}LzDasO*-Xm>>n1bqsbAwAtj(g3XAOs#$~N@~#6YF>2x z{&Rd{oQ{R^9B}gq4P!+Thn+~KpN2}Z_2sV2Lr;!PW796suMd_ zyY0D%Q1%MqnoRLq62~NI72EGok`hnD`PpOd6bVD6+5Ntjo)6+QlmVfO>iEe2jO1G3W z46P_7g7nZL-63tzI7l}r9nv7_eGUWT_x--_`rZ4-U9*CJ47^||$UX)NGMm3UzVip5(fVWmf#6Mz^Muz!-A#`Yd ziwLa`6N0KYn9P#~Itpi1@fA5si51zeVnEMWdoE~=nYssXqpmExJx+pi15Y^mfEOqe zQ0M~f&h5`OJ_jbS1Zj7Y;KMW|Mi<>~~< z3yDmI->JfkPHa!18q1Y3(O3*8ABA+=-p%CWN{fE5ZqNtp1yF1!z{1uaR<~gyFtBS- zV(g~})AUCR<%U_OH>;)=9nrdk-H19ctnt>`tJk?s7zJ}2;Aht_j2;IhrvCdCQ=UA^ z5}2P?f**8l8dx76SCCYK-taY$HTGuEX*}!|wBJ^>+oEJAGatu?eD6AVEytOYG+=q5 z&tIaFrt$D8!epI~KLWpB))l{dl!+5M3En8k#$Ls8^+Mv;>9{#R;QG!6#gKA`eS{e& zLCj{LYld2=$E_c2&&q3{E<+!%6)*-_C$O+9?PKTS2v3HDA_toh%z2Ww2jM6FkIk@| z&?BTGI!Rv$1aV^NMT-Fe^h2%!3WGinMZL!%$^#?1dhqC52gYbCgX;a3UMe4##zARw zXtMp!?LgI!4QBtfs6g6!JyTy;(UD(W7AhG17g70oB5YdFtIr(T2G*w8ZBz95(f%j_ z{TmhfZpHsrLN}?KD&1L$@B(m}oOZ2=h)))Z@Lx{md;hf=-uDhKBhMed!3*XFe(>XG zn+jT43#^2!r58ljvhrUu3Pg=vSd992tJhUWXHoeN^lapzv6@#9BufTI>Fye&|*OB%9Q9)t-Sy5 z{hrk$Khfqv3i5ws{2du8M+^1Yz>PN6E}aJ3fjJ+n6x;qX)?akB&uV~@69(zgskjJE zpq4-%XuG=+zI}j#I1i1C)lXK+YY#AyVb9l`ENB8@frJ9YMJ_6t5lCjfCSYuan27XY z@bq8JCl5^3qq;DT2D-;<8O?70aWcG@|BQ~~VrUAOf!=C9iRkYuq5pd?yaNtxk;mpR z$YzDkPZ(Vx0eW*NKnnc7SRuul$SDF0TcfxHP7;7SG)X3r-kfq%qzi-N(&dUV!6IHX+^Z4fxL3{-gRA7zZ)~D50ATq*y>-7?(z95D>HY`1U3c z0vujn0_QzrxzU>FYv$kV&7&(^W7>w#mAAc5WIuTi;e1`L={3u-Gv?7Xp6T*ZHb%g* z)waLNk}IH3NJ=B0=NIS$-br)l2Ma@=YLLj?4|wS3j>sEmiAbh~07x85ctm(4&V^M^ zURlJhMJ~GU;fZc-ys_OasQ><3g!&9xsz}V&&DD}bK9vb*`X!R^hUcg5o7?MB5vDzF z3|c^Hp~Lb7RDrjVWW4ey{P$}yrmCW=3Zhphi5hSkD38Sdc%hihkJU!Qp{A%yo|`7o z8X}tP%Q0qnX9i<$x?TlAL{Pfsg9q}2bd58Da-5%4O4MQ_qzOFDi)hi$2RS(a#1qd` z2i|d1dcsVEm(0M48$e`KIQ7HFSygn7$AzUldzGJ2>89P8gwhOBr#w=VWxpKOLtZuE z=@#D@xrgQahTHeEX1qzm47~BoN9icJ<9Mm#Su^e>fp<|H;x%v`>lv_Xkl$v#XJ+__ znk0D0+F=vf{1f*JHaF&fkWfy}7RfkMMDwE$n^QI^z55yw7niI0*TwT3KeISmJ`c)) zFs=Yqcn9L?_QP4T_dv;*tG&c2aWWZ{COGjOFAF5m7kJ45>=;-*4S_$bog9yxr7z&1 zm#tk4xxJG1M(?ePw(U3E(++QP^l!ReX%ma!c=+BX+kr4O*B7WiygON5PcUS;3dk}r zL;#KsQZdhCcal=smj^ZUCJ-&ONV3T?%O@R4Xvs{R)~_|(OqTRqDP_T71#f@mB!$`s zRQ)(k}S}KP|QO18q{ej<~R6u@qD6x!2K=F z*jtH~-1)Zxm^&Vsf4@i$0~+o77ho&*+`<>)=eut zO#Rj8;sJ;C76qs8e7tgjz2IZ)$7tni#hHP##a(yQ)|a?q z=xfvO`4=Dt$8Odk^^h6tX=Oh#)!> z{=~pVo0czhpHMss&_CS6@Ne4kI2|}^V>)*gaUuBycD5K{|0_D*wu%j&fPtHN0~VzCXC(N0mGYAVH&u6uw#4;=1~yPfdo zW+3FktFUc$j|ge>9Dvgsr~lP7A8%6yyWCiNQ|3vyAlsdFIpeJ_Z`Ql9ta}lgd}qUG zf)07}+C63bc|6r2mjp1&&j}<-+wl1b&!_^O_7@Yy-x8`>Z?Nk$sQ#l#oWc?z1iLZo z)GzkR51)9KYCdenp8})3uw~-F0WHlKOL6c;)m*gZ1*tdMqQfz%&TYZZt`(FT&ZcQ7qRkE2;i3X#;IYT# z5@Zss1`a2iBUdiAY`;hsGS5B`X^$BxS#v0mNJ{*Qb&?D4Ld+^E_vV6VhlhLqnfR@U zo_iB-*ZK_l3!mR|<(jmjV})37#EmY4ul1V7?8c-bi{uFJ2Q$B!PWCL68>nZ^wrQeh z|HT;5En3c58+cL3WNVD~_K(38YB`+qL;faYznRph;WWPd|`S{+wHlibz`}+ZNGAzmA z&{s2H&A{^Tql$HRP-<9^rRi)s>E|R;iJb^RCebG&j5#P|u{^Dwd-as=O1HpwLCf9f zZ*+8NtL({zp8Bcy`h??BQOA5FV<8WOm?h|9YPzd&o z|Cb04G7Dfh`aH5JY%*j14tuOHD$YXJWYYawCdDcup=Da@S&o=CK}<_aPHdb%(QDQ_ z{xt-zT1u>a74AhkN+>%`TrzcYeJ4ojzPor(3Q)PkOI=gH|l zm<$W>(3!g}3Ka#EjMViY`~g_X)!xQ8V&lKW_zm6*CW;x3?8H3GHYv((*Dlpq5MmPR zF4U#FK@~$xoVGLD=_PKqK3e_#cYUBhqYW;x!cPuaJNN498leDCfW|_%5u2_UHN7OB zy0bCowHZsz7xtASv2c6L=#z(Xb+_{E0IR~5>c=S{4|{O0R4Vpr*JxSAHz9?@&TEaI z)7!U!C8(f}AJ()0Sr6S_Mt&vq=+E zdeIMQkHzJYsA89r>Tdv+FwJ>P^5e5%T2<4vh{z=dg!e#Q%&Fa#$LmU_RwjfNHcU6B z+l<@Wzq_p!%k-- z(>moE``UHHp|St`_GoLR#k2bA?V8fQ>bx@9k6LnLGrI|!O@qmgHlxEvD|rjFOa<+x z9fIMlJF{^qrFaKy$~6;*j=#2^VbPsda&vXRvzz9R$Z;2Onj_xhsrTbV2mz5K*3E5kgQNIr6Q07lLWls{2@2))+D58wwO5UKB@bskeDe-jR z&+F-neX7NJK@heQ5OU|;wFlTAw`KL`){G2`w%*Jtkx;oPEkDKfyLkn_^?l*R==_7x zhBgl4R__+S!pZq-loAUKZS2Y`0OGafrxcpE0A~X=FeMD0P1(&KUHL^V%ciU`!(&RD z1o{lfam{7lEiMY$?s}#~C#yh5$gOF{>yf0mmmh4OB~vriqtd6tzX<|%Hb4CbB=W7? zWBbC#=L8h;7T){hlM^2<*H~8{l)aG&*gAW`@z3UC);1UH!FomsXH{?G%WO`+h7T-j zW6erSTJpLm9J)QSQOr&g(pi={eLp}M9VzMCdl_Qi-TlUV;359HeWoGy@xCkCD_=>9 zsx@6N`kfYtIs;ADd*=XP$*1{U^BAgzO?>8#_B_-@+PIbgUXnIw@7MNJ`X2q8}<3uGi|@>^U$Y82x0zLT5X_O?G`?(|t_`0^rnXb5^qJ-fp4;&)sU zhl)LJend#i_zdX%-nc!nd7-~-tcp>>w%8fT@9|{8)AJ|CC||75#mA%GMyJ^Wgb7{8 zm6(-H1O&^=YNTWH9E$xL-aUJK$^1`}=i2kHb==C2oLPz;y*GIUUQP~i<-|8g16*ef zMsws!PqAdUR9+rv#Sr-_{FydK)1>=Ya~oxm!Dt1Zk#ECG?JPp7Y~qQ(u!LT~iAab( zzc!h$286VO8q&r_h?VNeobS^iK}R=^q^^ivvy>YAvm_2W1j3e9wMDyobJN5FR^XkH z19xgo$0DDvbZ4gbJzun-=ERN6vnX-bA6P@3)9jO53Aw=GVo{Bkr+aS6X*+~rjLPv0 z-80XjTiAZpdsQWmC{l~QjSOrYn@J6jZF0Ir90>Do^mV0D-xgJU>O1o2Gp`o;gEcX!G!n z5z{V{Ih)L^P^$IVe{agPeNh)>Y*(S zn;_UVY`lYME>^0zzJb^_MVYPJ2c-k}#VKRE)1FUL-U$&jh%eSMGJC|E&HlJ#Cud>p z>+Px7RV-Wi7zSt2h#!^-xR<{s-LmVKd=OGwjcQyAUNv?_G;BN-Xpevg@$o(Om3~j{gpL+K zsYXbkzZM_wqYmh*0fRq#|5NyD@U2n!&{a$QukxMN%>iQ@+xib4nA~MsDAE5ikRJR? znA)({9WR8TOju}7aICVHMq~CgqUVdk^4VBY*~GK>K1P>W<2z)TVrL{6S3|fh20ebH z?tR;HOP)fxYcC6(Rppny3745o?&(z!Kj^u%->Zw%A7wJ4t8#3AC7nNNP5rs1Y}Q`C z_2Ggk<=Xj=t|`nLcsHzyR4C+jw`)8!n0D^MIoo(bBUyooBmJP^FC8MN!F+gNzmL&0 z$_T-MWo6ObN@>C5!npssRWKeQXHAK5nV z+&rJH0-OwO#gcZ@BV$_&SkrX7VwRz~6=b8P)?2Qt$-hs-$HsKwc-)V80mei2BVj+X zD8768_0mM6+OKU97a4_o^sRUNx?%_0gRdxw+qE zG|Xg(>fGaS>#`qS;ffk`3Be~BQxfT7|MH-;bZel~P7E)M7dt1ZWhHDi`V2kyVYAw2 zsm;A~)5MDmYBahD*E3DAW~@0HJ*#B*tXCxv-k&79xiFQy;}pQqkGfBFTz!X3g9i1* zbm6{bCwcQTQ`cgw?J{8BMkapP|62u427*b_f_E*@ZVxXU!1IU4lHeKK4zhGelKvii z0N3=pUJ>K8#5w|8L582;k68jACW_ot9nRL6w$zC~JO*wU+%=dIMVru&0>A=!&%(bllFN;uK?j90cD#NlkJwo>-;h%3v=79-jGvj`Jc}!B?@1OY?z~<;6D!z4nH{s>2ft ztNv~TDiU1MzAS-u@_U_zT(bY#2%VLiO5_+CBmfbc>-nmj6zF$$LI>?%8U+B0o+{JRko;0+VB5@?{)y4&E}uVqw2)C;4%l~@8A&yrvCP*#}cDXhHFF#)bZC^Y1{kRWh0A{(e9+rJ65^$#KP_sN94 z6rdSkK!_;L*=QkA@5a<~XC|AIs@w(yE#77G(ehjgdTEfhlNnO+G<05AnQZ|c5~=VB z-gLN+T@mw!qh`M*+$139m&~>Ub8X&0ndy*UvE`uMhoSX@gS^YMmbsa+{v=U8&Un*W zu_i<80Hadv-8N>510a+i8o$}0d343V%S#O#Z>hTy#i8DNmWhRqn&$|iH%Ge+G}L>z z?;-X4%=cb=Y=oET^T9*K1PQL#$h=S?R=C`4#RS36l6FtWIx8BiCFSOD6A!+Zf-5u`=z zxK9P-?X~(Fw#viRuXO~uGl$9}Kt2CJp;deY2|ZhnGoDJw!}rZsLJP5QrUHb_Cx4B6 zZ04relV;|^F0&p3InNK9()0F$bc3EUypTbLMwsbyoGd@A7m%>QqqlYqM^~aR>OV&! z88jCBN#9TgoHZuqnhC4NHdc%em&3zLzIhYQkXQLS*y6gjN?85HIIVO-`q5gw^y!%( zj+dnjpyq|@S{j}WBj({QfSZ3`XzxTbQrQ@RY4p>745k=_$YNSyGG}5t;t^M#D$0$nBf%;oUgYG9dQ;(? zlP)P1|Au4e?!6Hgtq3}-fpWKsS?Tr(HM-j_@<^D^Uoe0`kQT(kC^UK^-f$NQsqF2g zn^z`m*QXT-2mx-`56bsBId6qh;P>47rAx)c9xJGXb$54vE>j!z^TNTkPnzER1eX;| z0g4X2@?$DYpw}h|oUPMWUSC8LG7{SnmsrGZJHQzS9g6{)nnuD1(;b)M;MixT5j2?O&7WA|LyY;ErFjms*t}ne{ zPq1e?;@>S$ zDjf~e6D;&|`K-#nA2;z=Jo|}%&dX~7&wemvGlf_tKG%@-bo8&L&5{{{au| zH26tsrPhEaEgtkgl~|dydv&jBSd?XHL>e(PQV4-$FMNojJd#>XRDeOtXUcVa;sw0? zB^RSYod`U(e`<{wa|)mZRzjLagm2cTl80`gezsxY z)}YHl$#(#>6q(RDy1#dP2H59h1hm%2)I$W;Qt>@r%eWD^Riuy6Fyuef7f8B=gF8ac z+}=VXIe9_7Tsu>i(VbH-)$8-`iWC@rJ#FBERDSe`2SQ=G;kwlXAc?=stQ)$;+j?IF zMho^|)#5%7Q>mCPC!rv-$8Yu-Xqjd@H(XIPTw%wtk$!lvcMtSlHT>jHk~9;(qyOjd zU~N7tuO%bVH2FR4o+JHW?^7c)BvfVZ3BU^FWBVMf zN0>Z*Ki>Hf#3x^_f5nZ0^jEs4p4|13caAraWRx^?7_mb58RR69v7SFpI2(sHcGf)E z5IA7IzE7KJd)pG{=OPDwja^e?py(p8vtI)WXNSQ3=Sb#%ezzReg`1T zzIS-{S3^9{cs4tPjI1rUl4@Y%iFg0S5)ZurHhMs+qkR#p+`mkz) zY~V4$gd0YKgnpov<~y5NfQw#0V)h zndTiwU^vX5j;4hw#rAVvpN`Jd2hC3@20LF}5NNZjtU-Vx=ATS|LTPlJ>a%_F4VuGI zjzK`gdx92WYi$DG-dVdT#R}*2l+|00R&nj@>|C=NzP>K`h)O*pq4T3qhP$F-@Xn3F zPOOJJ9f?wWhK7deI^QJsviCt3zsTKP7d!e#PU@{6)Q0y#3!JE#1lNv;X8`Te7Bopk ziMj!drs;mZ+WkVVklbzkx67y`nWUu+M6N*R$LH2X)4su#C2urAu`+?4~#) z4Y!7VX-~(OPHX?0x8g(AC(T+Ni4Ueq_29I#3$%lCJYg^vRqc?O zu_%8AEnbqJf?h>dv{jlZuLa$n?SYh&B_Y`zjC3^yKKAP$JvbTAHXk{Vz8Ak2yRh`a zSR~HpJEo86^ma4>JPvIoKM$=gY|sXSdqYQpAt6Zw?xc@L4Cv`q{_PD zw!rZBvQ(7w6}e zROfPa_KwWQS(@X5xCh6dwhb7|uiP!d^;4Du|=fwER#}$G0@Vu8U4QGlF7VUEF)hrfR&cL{7R^nJ??;eYZuP#Fe1%Yd;{6jk#s?q^6HWR8dXjV3#;7ZK8_rHs}{ z8vyyC=J?3?)*pu1?njl=KKRz^xjLqr_UeZ~<83iZL18prb0C0p)E*pTvmvH|$ZNkb zxW_|>?FVCK28xVed|IV)Vc&w~o#fhujoY-swgT9m@G38sY765M z_lo3g4)t30gmoC_F=TY&6Iad_S?#TSIve!s362s?j66>$#_^oy-={&v_!yfPd zUQO?C*_`jO=$wdjxN1ErQ526)2F|X~hnt_$SH8T?`Ki%mV#!6FRDs2!beW>U!fU&4$ilX2O59g}+%8gDqQ3!joT39p2`Bz;)2zIJ z5M>R30pYBdDCr(k4fg~MJcRP*L@IAEDi!r@4$W2Ofv&V42iC22FLh!2z2Q*0+n4)D zp?Vg3f@eVe+KHgyLX$|dvU%k-NR{-+N*n=&NCrQQjE5aLq0(A zcI)-2o= z(t@CGNW_I}2D0zlO=7z{sRp+;5402yqO)NvF`jyrHWiF&%VFtOw%V?xE%*N6VFMJ( zd=6}4rm|ro`h2C{1X`_&!2y1uBfUT0LvtSPkB>k3qk^+Z_uTik{1S>7H5iir!Po}= z03}Q2n`Y40dGD`0*2W!iS3zGrmb|I&!Mi3_4|guMU-A4kEV|RGclcz>)FE=dVtZ+* z@6}zW%^vl3bHKyfNkuFNi;c$mNzc*)z{pEr-Y+zZdj~TPyWQu8e_PcUK0zWk3!K-D zoY&h$VoPwce(5?*;1$^vouk#AVqb&#K&%Iyi|bdY7B$5<7>k-&_&VAv3XI1% z@VoIgmy@1-_7G+=3ppJ#Did6$3NPd(K+sw~`#}e$PQ@k|+-*1S9e#ZG z8*sP0j>Rk36>#@MHii3r3VoU43tl1p0o@k>(d`>x{WYnYq& zKoN!qoxWKA5W*76D=?FtH!#H3KTKPwqnU(R)iH?@5CZ=VeOr4J5SOCki(J^)FNp3x zeAp6<L)@tDV55b<=vO%;-&V5q>UTUed?d2{9q5p%xjz%}5SrZF3O=!IJ2 zKww*f_L{8PZT%hA-)#I@r zy^UW|8Uq`DzOc@yI*mm@I9%`G~#o2{?8F38B}L%Z4JBTd+uXn z=br1YHVmR~ZME*1aJeX==V&!JIk=uYPPm_M^g3?o&CmzSvNMz$&$00a|AA940SJ5b z{pxYS6>z4hBavfnBmDZv`*D&>r|^J}&;q;Cv7s_aCib;rzL@sH`d-T}i_GNORBORf zL=L%)S6wqDN2=R+-TONnfB_&Iz&wwYrg2q35m686zZ{2dfHzk>*=(KIU@4H?7HD3V zv9bCH0cx(y*GZbD_~G6Z%53GN%OYd)?)C?}4t;Snc(=!YddP-X@IHyY#R{Xe*Nx}d3@PC2fzqe2IRxBuRIB)@; z$#hjRn>a7t!YcE1BTXp-laH*FT8`yrE66UUxfo*Z?!|JmG}uK|H8ob?R8F%PN4~qm zu3;GuLm2(VO(pv`iC7?rsYtF90kQL@JA46QJY_~dlwJ<037HC0byp_{9?hmWP+lcwV z;;s91Q|n`TzO3TRWVE?Ws_^hAz)hLUy%~%t0y6LiC(fvw)%eJT7wU(tNJN!QdWD3V zcWSaF!Q&v!ev(u3hEaAly%fMWNvvb=DK17l_{q@(xI~)${iMZ3Ddo$1lV8uVX;=>K z&b2R_ZIImLB_Td)!NXnk31#8Q8=zd%*EP}y;NfsY3Klz~lSDF()J`j5?c#EF@M zU;}YPHmtBmXojpG;038cZ(b%Ih{dHavA!~a;-etk>CKQh{MThWOflX(BESXM2%Cn`ug$+#mkE&QYx9O%+M`-Uoo!2% zg70rk2V+@!fl#JwD#>ig{k%*U%N}!!Yu@z0k1JU%dTfS);u8>q!+YMxly(#%YkGo} zfkzx=XKOg)g(x&@(-^q!iLlWA_;AZ|;No7X)zryDktB;_l|4FYBD@;M4mY*GQSb?9^8XSvwZF6e9W<-rINb;x zwQE|iyn?rH-xfZbQymaA2%HtVo&c3UrNk0(V0JncfE=?J7B8MhM@hSh0pVTm(-9?isr3t8VVXwA`D5jXr!1{C>3#{6G_{zX!%%eL#OPG99Sb8;Mzq_?3^ zb)gI_ubvhEOlVid&Q(89jxx|GdHvprpO<+6@StXD!{Z*Asd9B4unASgTzNijijc-P z=|xj$RE99zD&4&7*sC(UTQ59PK~4{wLc-Ayk9Hf463TEKU9YQ$LOdnTc&a-0*@T3I zNPCk^Ak)E-XjazmN9WU2_B`efVbDl=B(G~@MxqIgB;4RQH!oEa#GfYgJ@iELpIgU# zi~MvH*tci10gaPHl~GZ!Bvww{zat2QAQP>`Mvwil)lje{p$c2`hDX(JH%V8egp?z? zE2aQ#2qS(FQQn4!@0{K(aW(dpz9N}`QG>_=K|MbUF`MwQ+x-RfoVOs}NDauI+ zk%U*^;&8EC$7Zvsh&(DE)bvgN-!gM?u}_azr+nP2C<7hBu_V%|w3jR0W&%1EHR(5W z#sUjY0b>14y)j2vb%-WF zE@75V|Ih)R0Uh8o$-r^#M=4-p{S6&AA@%}?JW!P`ob)G0dK@&7!s}CK;W!EuhvtB1 z4QQ>$iGaV80Z?F;LXtc*^E=uWUa&3YEUsuOse@3XwEIP&LiuqM@PD}aSwBo#i4IgB zuzPtDy(@(ez_YZ?6V@Tda1qkUODFz;{`eVMbrFN(g7|(=>8gKP{HU^(9%?l*JD+58 zvTPu#2RtBpesRz?dcjHo?=&!Pw;P4N7=U><30M84!cdURBK-r?<5n9uPb?nKK&=G5 zQov*AAKjoLJ)s+$YQVM^vW|N>G(7~*vdD4nfN+HZ0XdQl_^V@f1D65>wq!970{TJV zR-Q$AC`ModH91Pj3TVJkb1qaD<9_bA)x`iv!U)X~`sNHB!!R(2i$g0Xt#ZK>et->l zfvNem0O81dzjzY;8VhLPHQ5WPxV^lWqn|xxWQ< zu(J4co08eE86_Ac25=0E@56({_l_92UV_nttuh zEFN9a0R%Muqa|hbDv`-~B%&TTo%-h8LBxU7VSIV0EU#xlN$Q~3*}XsiagJ`##w{mg zzDZ~D!`SNe4soNz<(fUKtUf`neC%Kl*?5?@jtepy0R5Hl?_v0UA;k7SE+iHmU9l+u zL>P1>B=3)XfpgS7P-uKYwWqLH zl#%3Uo-z&&8D3d0{S?F%RG}tfV%l36v^S8j0;4--HAk>5Iby_6MCm_O2I%+#_pJ#o zdb`UX0@^Kk0IvsAKturRH}zE{14h$=^2qSfr`RSBERpL%a5(hGOlXPTa_{|JB4xy% zv-rnjwt#vs{&~)UD*VxV znJZWG%0ks8)%hz?Yjt-9IAdkDzvCE2hqPSy8yoip3{9F!;&_oBK{A@|&bbyq)5CbT zO`#>=rYLva&W$)nT|&oh(hpYn_G@o7%rwJ+cO&$cyd%0;A0`GR7L<~lX9M16xfxSi zZfF4z=W#(JQ(kR+l8efL^{*8E(nc~A-5-6)gT7)D;l+Sxm>U%>e7@%h7gaugbn_ne z-t(n!HFsM=s$XwbFt`yg<`;kliK9nzBS7zR&GnJz zwpTp2H6m=M+k3^qD?2o1PiSK>3t7R7sbewBht7=L*mWGYSF5LIA5m)4rD)-r^amVgWPtn|cFPjyy@;YIKBC z(jYO345j~kz0$#UW#LdIfC!gZvs3W8+%c8Hm@k1P<$7$Gfj$YTx^Wib}6UPidQ@C#^6WOxoZQl>ddcq+=-}S=2re;j$a03rHa9bm-X?Y#Kt2zr?}l5 z29(yxy4**qpf|g}zzhWa1h}h7sEy4xMlMEyhl-ocueoGV3&N~ag&z-WFsX;Ii(qIk zW>Uk*f{0wAS4Zt0ZDOxAV8 ztBO6(&F0|_|5V{TJ!p=HTX3i&l^x=?>2BSesLhNMNfJ)J0BgkC&0 zxjH!z*n#)v47-Vrh{J=Yr&~w8YYNM(rGA}9;e+8Ja0lyZygk^9Z*+;nH)!cR+Fx&9 zMM#HBzWUn~Hq;P2RP*6P+>w0-nt~j zqZ29#nC6D29UFRd;ZWXY##1pd22uQ_SHhvumdsR~Z3{zYEh!fojjv9rs7e(KTsYyx zp`zGaXQ=>4ep+s>B9Kp$)9tagoAVFQ1pm6bI|Xs;cFtRlRA3+5w7!YuTz+v_Hr z&qn4H2%ZrU5DS^#hI3lxy%XHMH+Q3&OZj~94-X{2;h+1_&dH`N zDVNlvu1Y8E!1-!1+MOm}C;%8--o6LI-xppZSt)-DKkF^Ka*9m$1I*Gg=fYfSO8mxE z(B0mI0u&i(sD13fCBAYcO7>}_L+b6_ue{&ZEv-PVIJ5R3td{5xprHDFk7Pu{`|r{h zYAuu9o+G)G-+6i@fhwkYyJziMnln6Ms(KX>1i~$M2IwoJ7&(7Fz3`WcgK#NGsc30w zCqeU<n8#6o$EADuzY8E`2nU%aB|BnD7ef`W zSAo3pj+GAxJ;#~-;c}#5UsP-T6y>aJTX^tii=8c@nPyp>LD;0QvRG;v5tXgc^xfLq z2Zk#jl2kP$K^^1oJL!xu>_v+0R}tR$9aomKg|Uh~tRi+q_@!B54^iVfXvPd=m@w`S z04`DO3!-wJ4ix3k?#)r5I+ZJPv6H05dPE~B@t%-S*}o*d0>EZC-;_8kd;uAc51Zzn z+C^^#!Vt@-VWehy;gOJGz@=`we8wv@`?-DU4?W zDWIzX{t4!v3ZwjU87YruDu7wK;DVAiH%X?h;M5F{>p<_nMV2p(cSrX2sULIMWKgA_ z^*b|Ueuvrk+32Vig z4ZUkH|2zhkr@|p`thePOP2uUS!!Ml`pmllP_YQVbN@Qc*2gY*c@bfquW>o^QuM!X= zV0GNVAA4{q@sRsH)Cas`?SwN3O9ZlyJVOzV(hlLxw^ZLy3me;JiGz(tb*)Hg*c6bt~q zSP>e;!jcivt$xuoIvQ;2iXCC*em-;ZZK#f0HPH#xWAYilGWYHstW8Qil@v~)w69Fr zufV&eKL-;SQE^@1$j;R0^D8{b1$boKs{|g4V+t(HJ#!y4@);zCVorai;?${jd2sp4 zmDpv(7{t|L@$6ezAEk<(KQpL9K^g*c!TUjSRI=Ko`OY+im+xwq4)^el2OkJFIqoyx zM}k@e6Pyd*@qcacgkt$63Npp#ub5w`vJHH7na4tMMCFTwz({9d{wAfM7cV%rvdb9~ zaE#3!$3+Gz1{eFUuDVorNKoDdYM~~N&F+S2CTQ4ThNCFChf6_3C4E|8p%~uNo&jp| z&4Rf{CFDL3P+VTgv8w4&0PGXg!``Q%YRV^1Y-;=|751(6AVgEVo;IG}qTQymlZf)U z8|xUuzOGRM#6xIeOjp|M;XUTDIf+0iTHMq25x{8HTEw>)Kx!I%mqgzRoFFesJYegw z)9~rqKj{P>C@#uC4rNs4WqV)g-(tjK@n1fCc%td(Ucp^id}}%`nxiYYBUCiDHjM5Q z{tC&|nH~SaA@SQ>QVe^=i2k9vo|>KSo|BDh%g>l%`EO>3wZzAB=2zUyjJ|} zxJ+d?lBj)EiCS03Lh|gxQB5A64Ds*Z&ZbM;9cUC2#|%=3h0Fno>|%IS<2G=P<2DO9 zjCl{&ej_3{`G-GDn23$_grN-4r}1y11fvU==tKog0|mclD&|Xr^+Mt6^x!+5nPvUM zwg242!22Bg#z&E!PN;0K4PbCTn@L5@`tHoowEVboul-4z7{jQTl3dp=5qY}!6P0O; z-7FtFj6NTOn+({T7SS@`q(JZ~;U9CmLeNtm$wi*_OJ(1L_pMbV8Dr~d0AaOzbhrrHuf5=qV zi^OI~Ea)jLJiWti@{ZqTNB=^NRVtO`c^=AACBeQOpp(1^Rz#_oRXTbLiQSZ9+JKJT zV9c*T7|XfDUyGRBN}EzxH^SSoyQi;De9r>PiZUpvixx1}fU{(2!?xnxs=$de4H=|O z%O7t@PR|SpmOnC-R)^SQoJ)=Q&R^d9x)C+#F}K8=B12&fq^OrLSp`Y=xsA8+ID1Xy zVTrNiP;)^SB0ab5w2kO6R%g4lw6WyFYbIa7m>qqID8E|9Gxy$y7b$(Sv|IAsqJCPM zE_oS5Dczas;xIAO&@b331jLZ&f-#4FxexinuRj_G&TiY84u%YxH)|}Qn&1|WUt|iE z{X8o}TK@j@U4bb-MfarBf=c4o9@UHph*Dji710jJKF|Hecq?jZY~xM4A6}ukQw{z3 z7(N33$>ziQib7B2F847ceZenRf@#9%XH*C9w@BK^f1NjQp+kAf3=wRG6|z(t9avy;Vet-@MKLFBZ%BrNGXJ%i&9{g5@SDcp%1Ev9?Xg*@!neXYFcl9V{sm4NXyDr~#Buv3pkQ%4`}nAZzC7cZX1iWZu25Md zs+SWlelWS*C?|Sen#M#IsMa9XTW2q9n(@Zt&4Y}_^VO!=uedKiSVlH_$|yY!UBq{6GUSBhhmZ+@vB6>dVi~4iba~x$GSzndIG@ z8KwNE1s%w6z=I%e5k=Y`YI<$d0dzfDf$o;m@4($tK6M7f$rYCl7(A<|?L^rqAVu zY>K+`;27_-Co*t|)9NFWYH!~qTG${52tv-Els;sm$^Ak1JMa6fXT`nlb+7gF;gGjbG=F(s zF>q={$U!~5sMibV$ofDtokTkTlD}VMCiuokYE}9t?CAUQ&z`pws;4RfG;gO~XEKmE zu;PK&-3$YBIlBWxVN!D3rUQn1Y=NRCTW*BsM9v|{*0K|&MZ%%m(Y=!^_I%rF* z`ZtyFVZiXMvQ|83mN?{!XA3r#{mONRbLe>uKfgT*;jcf-;y-qip`5GmYNW^8-`qdj z8(TiGnsXnnAKziPduhf_Nq_5a&sgFex=}g<*g3YUR;;4nKgUIlq#s5W`>idl<9i-VF<{jB>R~a}4W9aP+qvIxY z#D__VRyx$;Z5Ji!{dekz;MBGABn7%guVQ}q)*pUQ?mpi z&xN|b3muf~+Xj#R>63t3f`VzmCUPN{cqv8nrN_-Be=v08WbI}UQv}iidEn6AGH(!b zDRe8hb@w-Dq-jjGp6gDz-rGgLp6gattm&$vX508^rn`x6I@W6b{LzsWBSDecDsVZTEP$7wWt`l>n1@4UQ>=Cdoa3t(=3vu`fD`{1O0JOeZauQCy!J zGn86iOxEG)chK$?dp7N0XeJjuSE2gS50hAs`dYtn`M*AKHMlftH_vvv9PGzmV^Pv= zI8J=VB0WMXQYbMHadL%|5pN{-Pq)-X%>Z{!#Kb>pP9F5s`30U~r#Oz=-T1!T1` zIQ;xyy!ScH5`|?wQf^;*><0!vFG2h4e9L;`?Wwk9;HdyUj&^v|?7;7w6t6juNo*z`p! zF$HJX4m9;cj$vW0p|rodDxFygg68!Uedh27(-MTLiRPkj(N93}pybx1_j9t`BvIhl zpW6IIbMK!VWWbKf)b*y^YkWJu=qtjR#vd_mfafZ(GrC=-pHfJOtsaKhX5^=eT3{o$ zKwSgi?%>md)_Z;-w8Sj$&mU1G{yS+M$3%duc(CAg7)&wApBXMdDJApSdH%Xf!R_kxxRd^g|S0swYUEA!YNNmM8-@!QC zaG8Vu7=D)S+#T8Dz#`cxLJdi=hQ9?3M`;XD98wk{=FPz>MvHb|W&eytU(UVUd7sgz z0>_O9OYiK*Lkb5PKBby&H2?cy2u!H3nzHmY-xvL)q}4?A)ejT^!3KO3jX5o=qVZJ! z7@hBmSop*u5d0~(Bq;rC$}RPO8#g5~@;yIqL-JNnz;d8*6nhRLdkR&kd3%PfF3*r{vXi-d*@Fz{o38l@t{+4K$Rt>e?&$ z*t$FZB#@Gejw8fQ{_)X*@$-G`QXs~FPX@*y{%YJ?{dX+)8-|$UlEF_!j|R!QCTqCU zSksNh;jN0wq1e^?VN>|P@e3mc>?PV@0*h)xnxnZcf0LBR%ezgpKMx^fbDqPAeoZb> z@_L02bbDo@M%PVW#0*8L(BbJ^Nx3W+E3mT7}13n*{<4K)cbR*vU!>VHhLU zS@DuC3S`?bsqB^kSPO)#fDnhH4TH_FM3(!rZ-A&0jqatA)_>6FN;zsn$CfG2cF`i- z(T3u&=JpftK8LUDNNAZhi~%L(7P3(NZ#F;>?BkW*WVa)KcyqLS#X!U_?{dO;D0cLG zxC@2&NjoJw z7U;xm6WN-~A#t%libuRBVqVJt?)Ejqmo%(@*w_lP*cl?Q5|QBJg#57vt&#yvPD=LA zfjH{T2%KlzdZOh8;kcB7ww*{S4Fc~;Ht=T}+u1dHZV9sV1)wJKk8YFs{Qf@nm-Y_L5$?1J2kcj{X9qA zSkPZKs8c$t12fGWeTO?_@)`(3N6JkVHqC*Q#YJC=8UiwI(=G&!)|j3j=Kp&PAb?*| z-VT4RcjIVu9=G_ZpI=y~U)NT=`P=0c!tl)C9`&Pt5jkya7x5*7S{U%PKfxNio2rc4 zLdmaZ{wD3q(R8V+Z(pjH9JKPS=~B+QcPA$7|HS{*WEuWtd^m_+O$GGO6a{}@IhrF7 z4A>GblWeN8Zkde|w=k*c>(BJGu6G|o8RP)z?n#YQwy85{L4egNA;?i&tbGp%K6!yp z>W3~vr3exTb*o5i!2>R@x&7-lG#q5^KP5=uyhrusPFv;YLMfZ=Fj zrl^IaX|r5=H2-pLW-o$uOo$p2%K3pn@vEl=F?rLaTsgK^A7M-nPP&@(PecS?fh?{b zFWVR&p#QM7hss1HqA=5Z)jwPHOY1jgX(i3RNYa%FmFf0W;-7V}Yv4Er^~1l^(mD)Z zp`vd#9;xSZckZetSCabV`!*?7lu?FfBG|LEWGdaZbm8;v4t(ol2WR8GHCxZNYx1S? zz;IkHnexzW5)I1?K52m0YWO|To24^nkEL3aqq)Lh-h-ba7wh?bG;s(cz%S}N+58M# zZz2pQl0FGd8JFbEq}i`VA!g4!qE|eb06rkH0#Xv2*DLFq|6Mr+I+xubelk3AKGFeq z8-<&Bed2z5m7vZ&x)!kIs{Lx|Xle6S{*w%bc-49`|GIfn6=lu~R^;y*QCezDdc?x- z581ydRhPK2Ncv8-5!Jg*xw2H_GctyrS^B!CA&ZHHybtreajA-|_ing~!C9BPcaGh0 z7do2%6Tuad{U9j)ujZXOJJ9@aKxdjF?7A|OLH0V*kYXiv-n6iPHAu^pp-oZq!fw_4 z^~Kwe`lq!nrOG%fu}?!pB0F4JPqsd?X{u2S0TSDj{(S>*TnX6 zhyhSj*FyH2(psb&JWkasJZ|>QDdy_b15R_JiY4=+`{xS;i#-n%XB+4iix|j_Dt*zn z*B2XtIn6jGbzXs~&cZ88aeF|{LnGeBTDd8wM_S|{Qgx!x+us&RC;_cG;pd1vMaoXn zp#6JEYa4jE-bm+lo8waJtn}B^2mN%sCW7B1i7j}ziMZbn|4EasTT!T_d7mEMh_q5h z9xG$ib3_=?*vkHhI9z)87)r^id8RL(zuF)<5+9CrIpNm0nGH5)fytRCO?u4kq5q?x zDTBr;f`NN51UjcemG<8eE8khb|NgJ<5Y~dGU(COS*7NAP?KAeAKZes3q zTpgGcvu$Yz1LS^pB#_|L=KK*Q3y9|2~f$^{!}l>1v0;%w-?%u<>si zpB$i^-r!&BQp)1-;Pki|xGRY1(JHoJVOTtGZ-6aEkp~A;=Bn`m>;66RYOxU{G5C)` z*Jz?Sq8xzsPo&DI#{bW^9KW7~0xSgC9qf7tU+`BvT=n9_+iw^x|0o?Qr#c~J<2b^6 zaMYr_G_T9KlpApw))FkR_!y+0BT9awkvRrQ}oID zi7A;J?5z<4t44>X9#R>VvL4wgijL!f?NoDzDP^6r-@(g9#eW@-sBZcKT(UpUi6}kT zbs_J`W`9Qrat(vPL@CSv>tfPZ_FGr}i)zB#(XTBuzU~|B%eN(Xll7~Lw-1ZM!|y%a z?!jkM%})7dGj_I?QQUW~Z|!*L8V^>oDG92aaI2eVIsL9CWO*XPgLj48upuwQXMaQg zKe`t5`#YH4(Dd#Sx8*)SYI(Kav!l5lcY|mAY zyEg)bUdxe$B2!!_{SZso7S!n1s9M>=f^=Msb##xe#7nY5ohRMh>Ro?_R@WK}9BTf{ z3xxCW*tM%PKXN8Ksw4{2|F75{8pT8kSs)?Awm|%E5=8|Ck$HKn*yw0` zHI;@>umL)w(~f6l)<+`GKx@`T zj20WyPcV`Ge+n>PG@7?h{s6EwVVrt~2il+WsuC%oO}V@^OV5PRb?Fe}*Qrjfa9>Yl zk;gbY0Pc51oM(;Z|M`6A>?1fx|Kke5Gup3U2a==Q?`2|nCj>$@wnwrYsvcPw)*Zvu zslhV$4})bv81;tA?8ByFT#6tB}7E}?T@Q*GpnesgLgBjXpGxrm&ygm$8VDW+=Ml`g^>+eKgdVjVoZA>oga{PjlxF3Ijg z0S_)7$`XtH4FeG%ykoTc#dUBNQC}!6V}jSQ_sYANz)XqJbi%3WJ9`z%wvJN&$JidS zge(r%Hr<3e$Q>lYY4jrX1n7_l#dJY24)FK)A;UtwVR$-%)fN*LLu!-A6(nJCXd*+F zn?aepPK%mapFYGD)X$1(ei3>LC}lmTty;f05$X7#&HkbBa67=}vcoF?$vfrMg8E_5 zC}lF_=jG?qIOp9q)U}@-UcI^d^Kxl(aJB~CDwEWHH50MKrNuBbLBcdM1!L9mI#iv! zX;#ruaUR(v{MqL#=Y9HxzTA|H-*mr%b~ZRwJI|~h3~0Hm4qZziPl8|QnFehLv?Xd( zHkFt_{v(3hd0s3|(3rCAfH((#4oS?dmNM)CE(;lFE{o3fQ90vxsIMA-`e?I48DIY9kr!Kw-_`SLFZ&=zIsEP-=GyM8lv_v6RhOR#bBlbR8GyJ2S?~p7+s0g}4ZROC=o1bK_L_)>~d3>{# zgFud**|?6=rz@1|PJZZc=|9qRm4)7~_L)rVG`TR$ldF-A>h4PJQ_0c2H`!9{ZArbA zb9bIv9>dvRn(CiZUV1PkGuI}V{SXUp4a$(1@72u9o$9b>0O~cp0cS`{n;6*9b(RO@ zUNs5~WhB2ty2d-bM_V-E4qLf|(?t|~@&zXN)RcA{|Cvr)xkXuz*fSk6J?(eW{sI-- z?;e2iYv%1yq`)MY!K$>Gm#A?+vo3S)y+t-9bub(t8Oxoa-GGp7$q`69Bk5hqUw>2U z;x*dlnoi9BYUA1mq(#gv(^b!XrIyv4uXA?p+V)ohQ*L8+HjepfW|7|Q+{Z=VQ-N~5 z@5e&0#S)i3{XdD!rM8Hi-bN|-KgH%*i6M<|#L@*BJALtb`y2xS)!<(=n2?2CxJKS- zcRI;X;h)z*F8es$n`iS|!;Stb%KW=Lo$o!sh_&o2)P8P;rfJ2-L0hZ@R9qt%5lL1+ z_l5f#H?u{`4sxu^_J>bM^U$yOlW$l~v%YFonWY9rE_VyfgCLI=H1f5P4wv`z{IaoE zL{oNFWAJ+|>g!$Ee`a1@8o{RDjbqR2{Y-HNqy*RnsF8#71`8*DMn|@35Ft zIVX@?jTZ#rAWl5JY(z2zKS(1{O@ZLI)jze7W79oNQiAA03UaC7+f17$|7CiKoKxzH zHx|=k?(hQV1m{M@BMzYvWui=*vcuV#U~R;oBkCUx6}6~k(0g9f?FaBmy^K`)=k|`j z-b~J&NKIb?*3(1qE~f9qUVZK z?pE|t1+g{;>FoF1sXV&^a8{bXET@tIlcpsrYdx_sDc zr!1{kf&;3%=3q2FvYxO;`>WqTrT79gT;n|5ir5<}bpc;1AWPyg%+|Q~&DvWWpuBon zINQ zJ^$fkRzne%s&O9AHrXhuJxab$iYYfris_N*YQ7&+Es=c+Sh0x5Pv1@Xpc-)h#|6Lu zt$v!Hnr2ff$@d=n0Le5knn#?L zni$<&|D-nl3hWmDsqSk%;&Pk#R_vH zMDoc#Mz#XNU@uzlH%^XlHML)5jg4=CD8aOz9I#)j3T0PfQY0~D0}IeUfD;$QMvg}I zB|eK7Q$6LCNERU=CoT`&jWyfPpjl=y};!jv3qkrJz`ahH;Z^L)?j&W)xk&f=- zC3dpNV7U?<9dqNkrr^!SUCeHh7bp3nP0OpC&(Q(q1!JW%AQ%mn+36sI5miEp#RGu# z$LFOX6o^2x()Qq>^~)GwbH`M?})$s@Vg&(uP)yFBkca>e6RU!9N>u|*-F64 zDY_2R!+_>gZ4OA#0JkLp9f%5o2CuNEMd!K4kgyxThi@`mYDJ>+XEq8!y&Bs?z8I zfC%~Hy7rVMW2R>x5!$RPgLWOcb_8Ni*WYsY8fWshS!G4LDv*YpDW{N9D1?TQsI3-? zOhLzy-t7OIf`aB`k7NxDj9lahKS1_30R!)zKmMgRU5fJcE93z2K?TFgw0Oh^>tAA8 zUiO&#y1O%N5W{2hIF+w|#$9H;Py zj2GRXuK)5Fb2z8tcLO?$+Rn~>r2X_+r}nV`7BSxz{7r@mC{#VisFAbTUC&YRNanWB z;2yk-sz5xR4Yar;knsg5=zrL($Z`ZEMBdW1XW1?oTie^K=ZDLOjY((7LcNGg-X!Ja z!SU0>+g1o1mCBBID1A{&*Q)lA$v_NdpY+JAPw3@qFJzHWSCFn%3zqAn-56@ zR?9qPAp`7-RG%Vx`@^MQ<%u=kN3zHQQ!FcD?x`xRWAs#0Ba>80|3eopy&p1nC&WLC ze6?Z_Aail@e(u{DG9&a?NYS%csQn1WypGPm(T2<6+&>?57Z`ljJ=?+nCbWDK|MDV# zz3Nn2M7I=TVPz`SG8b^?<^5T&kl=Tj9Gw^pi=(IB(YHs8ryCveOTYr?gNVJ0MM!~- zY^UYqeQ#Hgv{h9vz}*CY|235SqlEjZrMB{HlWj6G*-J;mp!s0C8Ge6tujQtr+4-tc z_vV98E^T)s`N?-J2O-Ne({e5Z8M4-xHiFu~G#gBeR=7Lt0lN5L0WNCq-RiDlULXTLsQBKLAyTKg6W1LiQExnObz(U5+x}Yb$6zLx}G#wD?zZ+U6pCB37^XMp0x_pcwG8TS8MVbBngfS-D34SSo zP^Tyw$&1n*?^VXb@zJj%V?k%D^WWCP17E=wJPY!7t`6^_>=^JLM`w9x_clZ6_AS)MS8_C} zEI%L04b~D&>l!2zBVY!ywOcnN2$*Bxit=k82o@~N6*f&`_^_+N{Vc6K$mo^1&Rs?W zT#e2UU^@5~q0V?<8;lh-j=cx27na2r8E-dS{1`4ZKSQb+jny|Lf+1~f)hyIrODe*% zr_V?vSRY+>ok}%*qW@72Q;_oHID#S8N>((?-e7}{->a4;1s`7&|Al_(WqZ*Mo7aSTR>#m!xU1-G;|*>3RL- zR=(+UB?r(?TM}&1HZr0Yw4V6_MCsD?>YQBmYSs_->h|+Y9*MiHgzW(Bk39XcLVp@c zo_C)`*6S&gKa_diT%E}!2wJ_px;)tqL=K~M*CFs-BV%F$Z5l5yf&QXYzIq|jf(o>L zcC4l>kLGLSi=2E-jN#D9>MISt4!lecm3VW~czd~hz7nBA9WN}RdK#dR+H`wq;jlkP z(=T}^c6q$%hk--ZY+m6T;Cg!UCyK2vv_^bv3aCZmjRxRLoI_8R`@L`ZAP)IH6CH=k zoovCm0$0E7x_f%qHHy3vyY*N^4S6jp_kpgYhDnZ4r~%O46Ogc}KMW9At2sVC{`6wt zfZOBTHtL(NeV`SfLOu)n_N{xC`M#qJDk{X$m^zajt_t;$UMP=__egJUx|wcTyaO6i z*@m$;iUrzFU%&oSLQ71X{=LUT-22v}CV9BJ5Y@LMm@1#QVwsFX+XvIuI$I^h zX(n55TpydGd>80}{?RPbGZsxxPp9c03M;hIHV|>z$@zZi5rC3rHk=i5#r!H3F7V{UiAn`K9}IHkCJ!HPj^@X_ zY|y>|Apo}eX>|#VPg0+W-y9D^=*-S|xcnzAoyw)=M(}|Bj|AIA>Z2=zV}lQCoBs_) zYe9ot?@EQWB-u(;)}}%ie54<+)6Da}mHudgkLJdj$jongAZ*kx0n>MDAvUNr&(z`z&`9#0s2FCQe>G=Ba59d~p!F_GF7 z)5Mc^8C=!8tL5e8(IZ_w*>rnz^SpZy+Y=F?=Q_wuE0^q2d`XG{d(j zx3go=h1}2vVvFE-W8F&g9pgAsXig=CS{v0 zc+9(K4`xY8_=^dY67tY6qT5R21T3OiErv1_LJo(-o>W>+rWFyA&;a5kedlbQb1LH$ zt&nx%a(#U+6>x_+RLd+4SU~NE1TDK_WQ7XD7lQ)v2s+QH4A|-A`HjE z#Es=4yQ^JXAlf2Ys+6(^uCg3|D?VNdv`^KJMPUYG+Fji6D?~#dx61gj9eV*iq)1nt zn9g{ys>}5J2_*jQ5Eoit7dNT&Zz8ZNwuC5~U#`dn7(&xnnLZYb00GNpDo+t%fVQZ# zDwYPQ0dHv($YhE)jN8i(eI(+LqbvHbC~&<-G_b~qg)Vu0%iAiwsu@VU=1Ak1t(R*Q znXljI{@SZIqa_qfD4b#R#gb^-=I;`=(N*f-{){$r>`6~BXv6Pok$+K9B2lkT2pec4 zDl7nMhdCmfPgAY#KcI%Kn>P3N^^GZu>QkTv?XK)`zz{KQZGo%HIb{pLbL9S&4W-@9 zk-L+<#}nTP&r(u@`>IE0Q$wiPdM%9S>RqRHLo}r6E0apT=rfg#(je_TsWdVfT^4EQ zgIeZ?Nk}_y+=fTKe*y|H62+sVu=&Dto*%rOzgVD#*!UGx7t85JW|YNKS*xnTv}C?~ z!IX%FxNMlP9WJ}lvfC8Tu_xVwHK`QO2 z%r_@0DWjJlV3Di=x z9CoKjnN^YTMbe8?f;NH5G))L z9kUa?6Ed0OSj!)>xvUkkoIqJ~yiB>fcqDeQ5@Beao41zzF0MZ2#Ov@Ed9~+N|IUC&exEx>Jq;2M$$0v+SjK6VPvT)bf@jhYk2e(Mx+DCCul{dmSII>>J*w&)(XW zb^uj3wHiwB)(;;4#$!~ zQg$)&D+{$KWGqG>RsubGYe=-VyhvY7L;67HXGe)`ww+`Fp(R5g{r;L`qFWKh1Ia@< zvB%uDCg&#aR-$`fAFra5^u@uK_Jz`p7d;VO0Y4(Za6|St8q;d`%B*!wA#u)22N{L9 zY=;j=3evP{Sz%H|IN)v?hUJ=Wd<6N{@qzsEHNpo4L-LDu_qT$sfu5*C{^YtNR>P{A z$0_4VOc62|l**=tKwkFeC#~^!mlm=vjYP5=LMqV3pZyZ5s}^_Vn<;i@`r9~d_%+jO zeM$LXJ(%;9Iw8#VOhqtd`1^&nB!AnpBLbJHhsr^oXJjnYVlD%j@&Rw(X;}tl2P@>d z;@{;m4h)nZlEMt7Eu;IBI?cbQK*kVvOaWQ`rMIfSn+RQh8Iy1k2cOH6|^JmzdG2|f+ zn&j1F|Hy^G&6Y;qT+{itre-d$L!1N3>C8KhwqFEd<0VEig_e zCoXx_lo3Yblxx_8WE2u;nw((=1( z8H!~)-R-b8DDHCB(YGx3MT!1+Sc10|xMY<8b6chiQlXYv>@h2G^0-%@#i&2_hmOvL zfXYAei!68Ni%GbYE`Nbl@{7SOQhCosE$Z`OWBG5m6@HlFC@e?sVK*MEw9#uyhS+$e z>+ei&_=BpfDnqTUA6UxVZy{RC!u50=buH!(ks4b#gX#WyvIWA&v7E-R`9FSTlR6K- z9|x9$pU&<%!bS&`Dh{a6MkA$&Z@_@)auhQ@UXy$WBpzuW{s=rgtD$_!2ek-ru|gL^ z^#>xKF!450N3}`TEu54ya@d08uN@s}Y2VI|pB!oTfxF1a$f%C|9c9A)4bxqPiryE~ zKF9gPlrZy$u|Fpyf8}wOpUa>aDZHpd!BFb0J!FNZT7OSJeryXO9aiGln}~e0_`~=B z6Ur5V;x}t7i2E%4ccTV3YBg%NjVKUZ7X(F9O_Ua;kRufRzB`A45#338sqN*D zL(a*F7_Dx;vyYUx=wHBCqhv9P3Iw^enLS{9xO{nqNZ zCbr;+=C3jRxL8X}ncoFXj#8kxZ39a)y>)lMcfe;qp`&f8GAd6;oYFPD0r*K7YUR8pk1pbyQ1kYp(*<>9Ws%Gt4AX7yN; z8Iaz3J3=n8&jnUXJj1&_&atgJeDstl?-bnO97a5{5=&<)V$Gk8AT7@8KCWEgk-@H> z<}XZY6LbZ6;d;Uz^Jf16&^VM0IP;N71I%n~ZdrNj0 zqXg!K(vq#skT8i^Qt^tKk2E71#cG)(TzZ^bY2(@#ThBo>w60PhRj%$2-i6*&wM2he zt;XUc?XJ?nv4@aKf7Q>b-kRRa)1!v1wr^2YDONduyw)7l?h3y1T5K7Jg1n}Qr+8nS z3;~-r@v*pL<85`nM*_qRGjV%(cz8mhj4!8v5_ncBcd! zcQ~8SJN@p2e|oB6K&EC4y7ZsIy#;yy6G*cQ^BWYPrU4bi!1p6+F4--cwaJ#eW7` z*`H~&+o}}!4SY!$yRYKu;59ej2}g&-u|?BYy%=u(EEW{Po_?g=KNIPU=A?Z1nE^nsL|H(AQVH8(7x^8wyXFCdTHN67c-_ zvYLs=mUN1P&owh0*ZyYOzKB{~v8BcW=(o|1&@CFIL7&HHCyd&IXU6d^CxK$t%<}C0 z1Q%a>sMq|7RWz7U)qP`43m2o-rO zyYBTtjezNG)!X|%m$=)^PRX;lz8_ChDAvru|>*iPh+t6?FakezX!)56?I)kS^tVQ^otuhwJX9YaD4A8NE|kK&9Ho z)qBB3Qiz{?xnU~rGlvdgDG`mkI%CLIv#&6p&(1%b{ml06Vk}l(GeIbz#ztr+L4YRI zY2Qac953^X56+5G^hpNx2C}ja=T2L(6j6NeB+c}KXrd!TT56w@#bp!2{XF9fjHF$* zZE7W3jPUYvFGjHOA&Urg%++Znxq;z}K@*1SrBp+&%4fW%jAY*)R7cd)3=k*Kgv^zcI}BPE&=? z(z@9mP8uuBeqV1o@{HCnX?8lk#Nww~epc(aj>b3cjehIq=cElp#|SAg64uBm0&)zN zhwmO{5TXJMPa5S{5u=*g`_E^XEG2c{4Z6Mc^*s(>*fP!pk+N}HPH;PhJVWPasN#bS z;12VL*uQ9yFlhxlBi_C;$Q4!-2%a9O4R=clet2H>yk zsrJgPv(4e`bf6!xm00Y9c9kxLd&}1shW08z{CyXJoM|oqr(zQPdp@){@#qqD`s?G4 zB&=fvpKsxk@b?qzZ%>q_D@y11|4a-2P9NnGoWCB+?AOmH$lG6B4W9Jf()I>%uLr)(@PH;kGFmKbcr!h9reG_n3Nw2oMZ z;L51K%J@@d%O(gJPiwOiVj^-~g~}zi-D{b8f{F1}RE;GoZCdEyz&~1|KNZ@KWu9bL zblsX2v=)xyA2h-AYN@;h*SrS&aou$?O`|#0c64Z`T`8Foe*k zb|O(Q{P4c;ckTnMjH5A+|KWRelavePGe5 zhc)QYPB9D+N$$@#&D(|0N4gk%`c$@@c#C&;vFf<)#T|;Sfl_v}(+}3RP}5@bcO1oy zjJ)-HO*?O4Wg8Qz<^IJ8u~E}SL-lu5K6`DBmIvH02f6xlb-XN56cX>F593oVXip_2rsd^?#4FZ&06Wc zYP|}^B-s#XcCuRxstf-1%ta4J@-2nW0S0VgUu5j}yt&$Bx!@>}5We$b+B@5|K7)O^ zGs9?-TtE{?pdgTCrI>vfJ>yGDi z#k$E~cw?c({u}ZnxHLIpPZa6($nKo&s$0@c4!TTTv_Hou;?pnvoN-!nUl3di9&UuSYGG}NVf)F6 z^0fMw;wdK8Wy772-DlLt$><*zLjult7=$_-G(Iy#UxeD@U@Gu;W(bP-#w%ISwZ)Tckb?HMulRQAkAL9`Giq zdm)9#J(5Tp4=s9QK1wp+gC>qWYo(g?%8qBJ?jGcrKl8Ot6N?PnKCy9R~jxzv^C)4xy0?2~}o2(VsSWiWo6F&+C%g3$@6( z>Hwmb5_^SO{>DalwtTETXV6r$#G&|j4M))TQOor8NNMsxYru`Y2M=&wF9dC}^VcEb zp>Qfhb4KTtbM04+x9SA&_GafE*5~1K+{FzhODmDYMNiUm2sE2Uwum^tE{4Ra8jZOK z>ErbUbBS_tV)kW{rS|8msSp)b^k1m?_}&l4bI|tjg=5OZ6A=Ko0sZ;M+>GR>&EDJ< z*XaBsqBdW@elrVKK)>rU_M zKwkCjHleIoF^iyAS0wv+N%lF*-t&MZsrRve;D$lL($*gQkg4*|g)i?X1bjl>d6cSEc$;@|hn?LvTi}`nNk2P;ahG|Pm zCe7o(Y}MNloghc$AK)v8b<7e~ch`e!ocx#9wlBSTd9FC#cGY+NdOk5Q{r)=Qn<_0j z<<^BqlhCVb@BYzYpBm*x>P5<|Fa6x!t(7Ng-6;sX4`Xd_zEc}w+6xNHpxK0Au$Tr> zo`GYu>y*`AdjyW>Tu$Su8p(h(bHnxDZr1zgZNfaS1tKUduyd2{!pTRU!>4jUayTVy zKD1^Ct|EQx*2`H+nBhcX%FT`m>-^|MsqtWqy~*{KM$;S2PXQgNRq2&A_aHsF$6!0d zo7p?Cup(DX`5q=NtYgRE?sEKrOdTePCYeT`g`;VJOXH`LJFo?7luz~^#PZC2zae`{ z_MU_|AC_t?Uy#-K_cL%2EhESt2N|DFSrq3A+NJ&h4K@4GA`N}l7gUXINiuM+su<;@ zX1|zebaW=JG7GAptXI;5B~mPuGx#rVYTL#t?pe~zK^IdD z{C4|on#V3^FmNIFPox%0T39a4A1gNM1THPh!Q758n{7+|{aJL&P{zoUIrcpX$?v_4 zcsL=8`(m?jn?>B2Yz_bZ(%Uv>+mbBe1S$uZRt?)7~4(M|UIPRg>Nb6fiT z%yUG<)it)KzR;p>ypU?**U^`yJ5*=80EQUc7;UrwdHZ~fFtEh3Xz}gc zEs-}to4jsxy;82ny>DV_1MM}MtY<N&^L0}`Ra4cgzG8F@B{ zLt;2&?S>S=$J)gv#N@=@ykNAp$J2YxAhDe&IqJEQR<7!Q z%KcLLf_OuTr@o5fV?@4&pMBj)=g#<72EkR*#Tk55h+C%lojWR|!W%d9{@hcLNsBhD zSif%8!8i})yPLcQt>|V_pYCXmwi^5OdJfq=atz#jfKInP2RC28vAL?Qsrdy|jCm_P*9KF=9rbU(3K7-0_`SV29^(Msp1SVRg}qzD zL}TR~&^ph*hRJG^fL$a4ncj+uiZutSEdx2Mj+nfGXw&xdmL0!d;6xgUFF@g`;Y(?D zZ0zV}#pQCR~#so#P^X=Mk8{7bwf zs4r;dU?r_YR38Gd4|!NN&Y6N5Z;2{X{YEONqX=pVANUKr{hjcrcuL1H<&&sHY3naz4H?5aXspW|$$TAI0)a8~lob@f@M5b&E>#cH`PZu(GH$t!&1d zyq~P5x8-0DmyYekK%TuIbbD#-D`_@&)bx#^m^5IZMU@jlubAH9^U#(TP<* zeh~4SkI3t-KgONhszifhLR>L04j%)^QDv|$@hmrzK7|rx$W2?9i9)ivmdpHIH0iK)H1BkI)?!=#`}&JdG(MO zN-_9I=bg_mUP|*#tb9_H6g7MAaw>|<^M|kO28F2zgBXnP4vgQk^q@Z=UGcJYQFC#} zZ=-3rB^nB)mX&~iAx}WT;A)4g72cvl^rqtu6m0lD^%O0B+kkUldb*#}%S|w>@tT^n z#=So?`f)50SN4*-+Ao5>3HO1n*s_GfV0r%$v@7+DcEG1KTCIqp6PviQxa!?Z;Is3; z!@30Tgl%e`WuAY0$0<_P$Ei`&81iQh8Ri(p-{u-CP)c9nIZVXKYT5fwLfnWdu+ z9C{A}E&tni(c;>d)GjWa*9KP0H8CAdX#NG&rWv0DS#oTJ%TXT$iow%^liR;ys?cqI zm88Eol%zX3lze>iwKM#7WqC_r_fRtLsQ$JMRFkCvDoa=?RwF3)sRUrO_wUP4Exgy1 z(G7)Eo#opV_~Ps=H%zc3KCt;*{zbOydxgyXlz?YL4qrlFJF4oK>%Q62<9`S)^NJ1d z7VwTsiQ+ih-8XhMFvEC$`^GM|7&$n~5`dSgu%jwW$~D|@zSF#s;Nd&s!re2)H1a$8 z*vnPnPP9=^ADP-zsNk>v4vT3$SRcC}eVK1^<5*WvkkkH#h~Z(X+~+#Ii z$!f4OJG8~c_%X-dVo;M`=NCtE0;5fX>VZCywB945&6Hwf#%}Qi|b?@o6y@eaP{HYBIv0$XKkz-BB9m z6nDx%N(s13DxQP84E1C71=^_+OB)mndTFZvhpVrSi~8H5rG^+9hVCA^OQjW15C$X_ z=|)OK8kwO4NtIHBp#%j1K}tfpTe?G9Lb`ot?(g3F?t6df$2{M2&fa_Nwb$NfNi&D< zk6I4CCe-z%B*=IeS&A)HWZsCSFq|4H3@uhOGsIndP28-rsH0+5=Aiv()R$B_GBJHW z1XUFc*$94t17YlSa#X4tk65xECv!7*(p!v>B8SY~+*oF&G1q+Ilhv1zbNfZyZb~ma zlOk=7fI4EeI-G7?god9S=ZlD4Z@z}wP3j15*`~KEP>!A1iU1WcZv7Jwb)_Z7d45wi zwI>nZS^xX7UWk;+_dr0o$=B0gEfBJ-P}esIEA`C?d1uW=lcxm0B9 zXUIZqDUtqZb}=ZNC%0= zD=Us_sxd5C8!nIC$yMLOF|E?W9q87BKc(#x2OR4PCw;y`I{j z00GxeU#oyKaHk<2zPiSTYQL^~YTj^9ZfBT;bmdnJ3Ck6^@M8=9=DKg zPn#?9sphYhCyL#iuVM7=L9yT=uNzwjeeda^F0Q;Va2QQk6&P;IebF(j`y&Zz=@sD0z! z^T7Be8ZEukzGtxer_7aBY>hIu>`z+o3zxHNB=yStw;PS_qyVS7mKFuwRsqq;66VYi{&H< zyvT3ueLS7&@#IILEP$OxPfWMpIi3yI>_?>$k&y9B%?O-rr}(W8PX%3v#S#^_#+*?% zSR7H8g4T$uXd-?-v!1G7Db}-8uZT+vKBu6tAXRGi1#bY!Ce1vYiUmhJK<`~$eVOof zG{$-;Up0kgm0rS!?usXpd*dx~$Nen~5getWb64n@$frFaiSR**qi6RgS=E z!h-%jYbg@q&b+6pUQ*oY&P>f-K;aSz{W?@y8ARI_NuDjG#3sU_SI5A}xI;{g?Ijc4 zkZvhzK|phfL;j6IV@j}|ny~{e{6L3iF<82psP}=KQtcWw!NK!a|JN%s^NGbaXIfVWbJ{yL{dRIaqox0oDW0_77~l?G{(C45!%)Y zn+?ZGDiBht+>B@8c)hV=UYEKQsFUXbLt#f+W{YZ`lnGCVBQk`hE$?l+& zan}SW4%=@({AB@ghQjFEIjwr0!uB;{pZsfYUg8|x!!A~T|7V-z@ZAJcs6APGhn8Z~ zX&<%t*7_4}Gn2U36B(;otZ;v6zTEO84AJy7_7@r0tb$r79x)t710dRppj>SrQSX4v z0XgDj6pBX#n%qA0d>2W3AE;OewBE@5($HA2FE=q`kY^6h*Ugp^CC}4d@BW*^a5DoP zayM2>Yx`!rUqb<5o($8yFGg=|9kS$Bq@rmD)KmugOUo{mTf)jbG)590>OXLP^x=Mp zs<#1gvm~&^cgU+i|HMZVGOo{xp*LGEiMZx_RLtG{_8QMl?pjEy9}jqTwKV?t5K)?FKdXtWv;?aAD;qN8S=^YL!#5>K-B-DMWj`{p09m^hWT z*TyFg_k2UX6s%4ZapDi83;_yV{35UtizDr;~l&kAD!&Mopwu?rRe8Om#7aA8!5w zzs7+Hmz5s`i$FE{{r(?ec0jk(DKGdbch*Ad^&MY&g7gvYvX3S_!@Pn*nAn z7vIb_kRv7xInyBHytugE30jk6mHgQprYcpPpUJt&_~O*)EtVQl@|tA26;G{E?}kh4 z;yG&ye{%Ac#fFX=iZl+KvB1B{enKeM%Fu-fo_(k?B;Gf)wqJc{X~OybO?hKz4Ao&! zUl_LgyTldV7fr)y$F02|EX1t39cud93YWQ}Wa_4GT=1nT$G?fC|Rcc*!6`lH;xF6|3E`Tm39=7Eom z`=8)=qO`Ok)7KK?m5C|GW4D11mDbbEpn-t=z8GkSQD4wl4x;9_rz(Xz-A)4g>S61N zgCA+0ylKzjK6~5kh4}Wg28TG=;r%qm;!#AecRSRVA_2w4JVp_zn$eoY_cu%}{)$ZF z?;d?Hp&u0=z)1!M^g3-}%LdQnbiyMSZkrIsDV)iXUGG%M9d>kh6f`X#JG3w^J@e(7 ziH*A5KpWBd)5Q!{RDUC9@6{hmk;r$d;g_c-wpA`)q;iu-zI}g>6}~Azx3GqT4Rh?x zRFY;RQTSEIGW7th3i*@qc;0lQ;(?pibyI|hXzJj~9qRt?cYEtzL@ErZVeiaOgi7mYY+|Vl_{;BcMD^S5F=QoaRcz zOacLLxbi~7a3z5SVsvRoC7l*k$|4X-1V8x%6UM-*5V$2bL_$JBY_msiUA&m@{JtMK zTkjP`j!{sAsL%bTy4*7pZ*OTCr5V)YWCehDr96g{(cu1btgEq;0_#Dt3s7jI8{Hi2 z?UxK5GKvIA6Up~26_V3fXIoyN+3ty67O2uBrQeUt3vw}OR<(?Z36U20Ziceu)#5eK zBqj$`V+2%)LZm^t-pSB?@bOf<3OZ@q!O{mu(+@A1>p4a5qHt{s{3xIT8yhRrW$r zj9@Hq+~ao23nA5c+287?Y_nl^rcx~-R@f&jOpmE0Efn2#d$q~KAGncRkq948qM2*6 zEa;xURL&X~dR4LkcHJVx|s<#FOaQfHgj)m{!CfEGzXP1 zzCh_hDm3E$;;*j{3zhF9o3DpMEm^~3ePlQmFIGCcx;|Ar)NHwfaXA!_&mXa3vFC*@ z@n_Y&H3p*U|IB^ErvGk9uU^6w>U8nZ#%hzZsOBr#qq0h?6mN2R*7yv9c#0c6xFdKE>G!a;$xdvfKDM?R#o4TP$w*6!Pij@(7p>3 z@qpq54?cTJ2;l*K7?YB@=q+NTNi)^-u-1+#HP)|z&7tca0oM_QAIy3F;>ZGZmWfi2PthiEsTw^8m?Z$uD{o z4Q!1PMks?ogZE_=Z|nQt7_M}^bKK*q%=N=2?}ee!Z;(JQ@~P>)`w%{yts0cPCeNT;Y9OGY>IhaM9aVBW&5qAa~!SwBZt`=)+uPfT%%U$&|t$aj^>bP{~JC3kLo- zP9Gcn1QGr|4awjE#>|Q(ot_SVRZR(N*Y>!zqbA84-w7c-JC9{0W%HIS9K0>H6VI5i z{5leufo(H4pm!p_W?^Z{M?S5*3#mt1gx_>_z-bEcddYMBWxnpYB}eC5D(^XBvq(v;zU z8yik>$mEmuy1av-gP$4eGL<*|?+YC0r2eH;d?d+ev7Hg^uG`nqEm&xHjVH`ejujq4 zo@C%?337RQUyocT;3pS+ATe05i7qBtw}soz0wO}`1V0Z3LTa$VhPp=I(1?8)Zh;o14$> zmAi}=XSK@Tb(OY{!wPqPNgJa9RL$#Fd$UkM!i{uUUK{iBL6mB~%XkSaYfbBU36^RI zwtL#%Is})m6iT1H)?WdX7xHp=rxDv+>a~JeaWvtyQyzJnKmm+gb=< zk7Of^Z=~oKyq5R*8F1TR36m|E-d$!PMu^R&YQDO}xJDe~WGcAW@1f(F52qqtlC-H>DlR$g^u{pCy;mKnUjU*mhTJrEI1=-xAIS{Ct0{VUtVNmc*C_Mze_jjaxt$)$> z9<(^_in->H^<^QWfu&+t(FB}ZHqeYmOOa}d!r@^>$fuSq`1&|$<$y-Ef=nX**4`_XcB*z`2G_4f#5eYn1dU7mw(JANmx&LrirBu#Zy+8$9a zX4VZZegJ$#KB_NiMHlcrKX!fjCsQ#L@oKeZApq^{Vwm$Rt%_$=P<{H4@pT+q$2Lc3 z!nl+U0nO4ae<7GuTw7nN2Qv{dwpBAFuddrEo{qFHvl0N!RgBWDp@S5^ZijP<@-m+Z zziS-c@iQ@ZCD@uSX*}OyDm~sLnGR_vC1nU{US#`BagQXY^`w#m*NSOSW7cN-@cR`* zgslHp8ln4333RNenNo^h>S3IG5+ol}32!+whev+NY-)*fXZPyFvK7vVpLwIys7~Q| z1dUD+39H!l>LewGF}S`Hs??7>sNDlT>AkVb?_BHQaH;5D2L-2xqlKqg`VQT z#6KocD|;1GFa#e`ynF@i0w4$k9j6)F4T-;kV>Q zE879lVtDR|6gS(Amw@q+Q-PeY5+5Rfl&(Fgz$2ZEyeVprf|=OmCE$!bo+iExZfdGp0P-d?Eo1p z_?Sq@^&x*rSR^0_saWC&JoVUNp9Zy%Vp(^YcaFDy< z`kSpWs>b+>W?|zUB-4j2Don7}0_zDbY^Zq;^Bw^>PqU|=#5$p;Q23x_t6@PcF;$b{ zCz7C6`YB?4TC1@{cQL{*aB=4l@F=;^yZBR_2poZ1VN21j4CoT}`)R%tSUyiyTxH??#2n6tfI>SCW z_9w*!OvR!4mwpz8)X~Jw!3Hm*tAb}eiTL#O6Usy>(-dkA7z7+G7xm0wJMHQV51-MC zsE03V+ZN!EcHOt}C|-VFEwM>G^G=e*$B27j?j4P9b+GVGiCJ_&PYS2aBhgp9uPsBCodYFODR4Ur(zR5!($U>j>sa8 zKqepyAKTH9+LCoLC&*i-%kM4LWo;g$WsZgs9%|B8Y)Y;q?X?|ur3PC@+-okfZ0_?u zy&f`3(shV;EkX5Ox)pS1q<)uuIUlB90jDqlm2 zi&mhvh9R=>`J-x+c-Q(f3>nY-d4Z`_WCsg~0*W~*je7N$ahK$R*+a_jmBQ```94hn z?x`MsGhD3g9sC?8GB59vrPivrcRDa=9|k_ld~Zt@;>^@SYq~Eud2lbuIIP?OGM)JT zL9{#XuWYZa-%q`_rn6p%0`mPZ=6|Xd4fs$270E1=9eHJ%4_p@#p=3=Tk(LA4!K_|uYX_;M%Wq|yTfbN=^gMn5(@Z^R zF{7DsYg;iC=%uu3j;njju5D@l;d9uHVare~$lu^`t&Mhlps@eiuJ(`%WGqO-M({up zR4jiSbT1k#Exrv3@GNmB|0NR}nKvAWo1sRq@_A5GywipGRbT{t^edx?7h2kH_gPDC zhjRgu7j1>CzH%dgLQ@%RQxr3P+9nBXw9$2Y2oUHv_ym~kS9L5qb5jXttmSf2Jv`4Wb>?z1=h8=gWg}JSxK(TyxB!sB&=g*&3GViY>?gGii9iw?VM(}n- zMJy3@#7Z@(?ys407N##Ez~?pp?BQ6kUD=UZ{Y^!(gP3bkVfX_A;hrnC+q_3#W}|IS z$MJN6*iCZ5HN(1+cPWwg1%)gX`kAP8cjul!E(!?NMtR3W zUwk`Tq_unU5HK3m(MeVb^e*o`tN?yI_5b^E>h zbznj`g&}%K97C}*?d}jAHUIr4Tmd~Vw^tuaTLU8SX}%l}#Jy^tozOIUMbshfPjXvh zL0y~MEqR;aYvMu%16^!yHGI|uw$pZpgG&a-$C}H7ONMH^rsMX4{xi9Ljd1zKrmj-; zZd{}v^XqOboCEH?n%{D@kq{M}dbohI;6>_q-KR~j{ie`vNu(I^-Kh&TdO_4K$jpr$ ziE{w0o9~JtNGm=gORtr7zn3vhbFJN5XIBBNVQp>TBka4c+@Yg`KF8bkz=Tj7Su$vE ziHq2 z!$4M$uTjuLVn|>GjV&7SzY!er40k)?m#0p}#gzV%2zh=r4}}ObQ?;eskO~gTB7L+Xajug&`!E6TA5TP@9Ri3okvY9VrECOS&l-i*6!<1qzW zC}2%sQePu2KEpUEBq)+w+c?*196W-4JC3fs>FUzFuhkM7Nq%*MAKZFF^LZLpO>h+n z2PbrMYV-T-A?oBZwK&LQUNP~5BBG=RA&C>+ZkcT%0u4xIk1HK6*-mdbH12~r^y~cHAD@t27f|o1!_#U#gMi?aT zaIuw{WQ+FEjK8T|3XgJ=_+dm4Xk9K<<03tHg9C&EQdC3Lxdg-jDvjPV@)sVYztaeK zi5-$Oj{jUlY@0spZOM0;lha;4p9j^$S{ty47y*s6kT2@TKbC;WOn#MW=3p`tW#|Y# zTA81ara$v$ zg%9p}Q%mgnKKHcLU&(shx4!yzBn^%J`-s=<$I?AXLb=K>!;f}kdebeJ)q}qj#Vsq_tmAmumiVepzcxhV*PBS+i(g$2;Wlw z8@@sIwL@~>zmt7;JA*kpbp2mAIB_kccHpT+I`E(KyZ-#MCYbXc^VM|0oOda7gcsx{ zP$<;n>fGDTLn-YUBLKRIm9E4R5hM|3llI5AJM09nNf(0_`d_JxD^{ktIg0Fcszq>u@Sa;fSoI zCF?<>x9{FXzv0sraEGiK$)%K%oPgG2ot}J4Jyv2nb7Hcb;B*!_xi}l#fFeJHRFt8s z?W0Ni$`2Qx-tZY_?#$HWoYSvQg1W(Av~hU@P`WVws;!hE#AQFIM<1=5r8rcS*~j_xWuik=1E^($tEz9EtgG;+!oSZmfsH%?x0Y=d-j@$&8H-|1{m2Tf&Q2jkg&6HIc2#6K@5BQE%EhkfdARO z*eASzvv&V6ohN03A;2bRn8`?DV*qJl~+`iTX`X_4HBcfgi41ENC8yovAD5V9xJ5yWf# z1hy6at_0Ajs)$kNHZT^%pp$)-ec|&9ntN~_NEgBJVl&F!bL|Sd)4xLa=_9E-!9V4G z38K-V#89dA_*rFq0rD0I;`?b#-#}35Y+Ki)3sQZ_<^8YWAlb>K=U$K%A zhVb9U(@i7${J$z4vsI~9Iy%@b{AgP_Gv|o?Me!qq1r8h+6gKC0JvU9gdh#2~Qt6O( znHH6>u8U)VmyM4vee?2z{NLup`MQM( znta2&G8=nC6Jnf7A0LhH7VT?6i0*# zBc(Dxm@heWV`?M_Fc|{jF*ArzV93BHfpzEz4cf!w#En_kgp840P*T6!TimvXx!|!E zyZg9X;7vaXk?Yf(0VT;TFoip;Z1PEGcej34xQJODVFZJI2+}I?_3Or4_)?ddns?ix zMhpT>IL&W*;&gSyFCGqYX(~Q7+1|YRW3&0rUxeq50%rLGU$u9NLcPU@B&;aF;_3aU z(U!l4hAP0RU_oY|B~-Bj$;^TZz5HVbjE9J5wPd2o3r-8gtR2Xy%atT^ZQNi5#LA1O zS!!}+woxH?f9`?X6}c8-cf$JHq|PL~+w(xrX#0UTcycp5OGRngwi(P@^gP?QOTX!( z!*KP09?CC2Ny&NFPm)4PjJ)NO#5y`|dkrQ9Kk&6AoSck^B=ir$ zgRhT@?E#4}aACI;NPhws7Qp~@(hLDBWskTezWP5dOf{t8Fpp6Uj+jO^$FKD-cka_H zr6D3oyO!=FYU_wD<;~A(pWAfo({v-Y3&{u1@nDDVXz1t)2i17r+~xV|@YEKCFteOek(a+`;`P^}DbUCvzvM3|{IS|6Pe@A)zI ztF@wn0lokK^(9oo%Iya4F9TOMr(aNWH~!L707iMe{ki{7FV z<)#lIy>}j%m=G3@M%P^iLI??o(tXUA73xw3Mv_4BOqk&=W-FY#3`m_NA#gmy;7_** zp?FO2-=YeHx)%+vaTESa#r<6c5MCJ4`e}gmkYe>L@d=0C^Pztc_i4Cu1K7Hmg?4Kd16B^cIYIUpOcEeeHj`t0l5m8M z*rQ7WuKZIgcPVV1DoF6Y$`A3p1wqNv(v270an^sWt0)U#;qCStdM`oELL%l9AN1?i z&G)}mMi#8>XFczK=J`3dWe@!SLkT~0vLFQ^8qJUkdNwHzLoDN6MW54gCr|sBo+$r! zH4T}#Q)|LX0Tqvfc(>R?S;h7wXDlpLz%Kq3=jc1Rhn#}KdhI2Tg|sMA*~L^=l@-!) zS6Oa;b(!@qQQW#U3v{J#B#w6Lu)@z}SBGu%(EXV1tbVdD&w4{KIe>ox`b+RiAwH#) z&6r3JtK$WrSq66j+;6rUn2g#kdxigx(H8`gRbP;depp`H?!TOMHqLCZqZr4gf0jYY zXtqaS0!(gE>!Hs0U5K+FqS||lR_BWufBP?gM{TmzZr@MSa;#L3QbY{JFI~=?gp{*B zLdQO=LH?OM1T%+cU+f4rLI(Y25%Cb<5ZKjG(Dkj$gc@Y8u|Ip5wM*WV5D+CJO$8Alc0kO5wl^M4x z=8k0tr#J_;yu7^h^(7b!U_-Q}jg14_>_yzu0EiY``(EVv$B+^v3ZGgk9#28jvXhGO z6Co8LB_a7+aX}X*APC7mzJDScdeI*6p7{cZNB?iZXTBGE`{1ayw{M|gqv^zX+>%r& zg~kj`pt++{dxD{1G&HKwE7gB^z*ZsVgM&tPug8qgprI&bteBb{@&^H2knUPzqG4g+ z>r_CDUR{%ooS288%t`%3LH1dJ(N$OYpXUA}EqnUxq{#hv&P&E>8gn#0W_|S` zcEZCa2UW#r%Wmn>2w^cnAdpdCUmbs{lfxsv{9ST47;Nh3DNc%p^B0^72Dq4bVG)%4 z_h*lxM~5=q%yM;4^T1WV=(FjK0Hw}<7pl;V0AChFIPPnHF#?6OCI;Su(ZcYH5oe=_ z-id&d0JFJEVWBte7o#S;2-U`zfABzEChnN0ji=|?_{8gXA=0vAR)A)I-k9*`!#;fu z0fQQk6G!4G;D?n|==1>hPR{e7Fz9`Fe;PvR6zZOTI12{r3|AUG|oMApxke*&;d zAW72=BiNbB#BVDpi+xPOGubZ3BMGXzKTW;=%3}TWy8l{a0ZxYjvLGR1-aaHzFm;ew9k5+wu z`+y1K(bdh|W2&E)^wQ;EfLz5Q)5qr(dLgRYjG|6oFv1lunkXFFnyBTik9-F@jxC#q zdYBR=?Xu}9NZpj*j?T=LtkU+~F8{7b@RTp6t-szY(ZdsBWM+OuZ2~_qL06%01xx~Q zX@KIsE0cbhpB#hoalh)3v<{P{y_&=PpR9@_E6*xG_(QmE{89lqjo3R(f+NE@%=$tQ zBl<}CL=uqY$D6KLqgqFW$4#jH8bD|??Ev=bBe}}p8_DU9qr-@MzV3HB(~w2g(5BiT z&uG@apl{L4C_~&2_`&C%RMo$A=f(g7%5f&H;u*n%HaG@>CpO&OR0o5*{xv)gXsxiY zu%Qy9o9}T(A<%a|C`oNi`DFIH$u3_hzZRF&C0|GR0Agl$wJ|E`!B;WwL>%HC(J!r^ zU0e+4bAhx|UX@*54YK-RVTTm34*#|rDEM4Yc9Pb#_cF`tGd0x}!D_M8PD)c{capsC zzkWJy(xV(x|6>#U`5!-OFNZ+fy+#;z!@w;-gXtR_@G`AD6$A7Q@R~W16g?%}F@q?7 zcpSa%wHsq-Jwsjp?~1~2`BI-*sotCQSg(dt;2{4qwp_ATh3IBL=Tw0~s9bWe8xoj- z%?5gXyS>qrTK^fj+qZ99s{X}ZiCM5YKWgb2P>2tbbV$Bg@?z}lDp~Z0z8&e-*q81B zxs)uHwe5)w>dverz7pg`02P{IujJJYi!7}08%nLiDthQ^%$$(xn2N)ZVr77A_I2Ph z`BYvz0~0t~VV+cA+9rs5n8F951AyZ7A(ILl{00SSHsYVOHk0wk6q;esho?EfISO13 z?VxUR)9ySdon#UP>)tQSRRlds93J?C=TDwICxES2D0ep24?J==W*`H_Gw%LWWIm&8 z`ymILQO(ZztqAokAg?_?yib$b07VeS^y|L@cNk9@d^k3R>1VOGICD6VzMCp+n~o0G zZni>a4`W_>_r8P|w#U{xo3(4S^Cft(+w98)dim0>W~8r$Ko1Q}d;&sI(1+QpKwXDG zWI1L+0}Kp-JvSkgNOQ%5jlea+3Rk9&p2dcY-Tdt1{w?^N;rF$_e^Tsu0}hl}iRF`J z1hD2aS@2f|EXa;ZeSz2a7|iPd4HIA{pXy^4(~T59h%*lC8$yHcff125h>icM%Oh_n z5^K&Pwgn#~}4?M`mqlf+DcfkD~(Xnx6 zXAcqG+QWl=eh`TTNh=3$ox(PkWc58RxRp7o&$YHON*OiqXHOB@{GW~B2p06h?+<&9 z$Hvy0MavPb%Ee*H-)&dwd$Dc$kwt%O_Kq@~paNo-Lraoc7~3A3F^ME)nEw?(?=jnCA>C7--v%eJ-*cg))O=Yd@7=Su%LkTfXodH7dPECv z5s}NhuqbBMRki{Rp`T|C^Ln~x2aZ`U?PU~q##pu=RW!SZ)`fJ1;x7(&8)CkFDg(l$ zXy{$09r4J&{yvj;?{&V2xqdN=u~A8T^~s!Ceu{FD8XHOze7n2N8FS?sv%9!3`po~5 z+Sd=1a6b+FM+?x%ghrG53oBzYipm*Mo!tv*?n?UAXHdbdo8 z(V^8Z7Wc6)+FXY8^n4rc5SNnP3jV(FK<<`9+_L%cn^NSs`pL$YegnRhl)|ry)dP^` zcEoGcAnt>UX7E7HPa9LK%m=Y=IX3EB=MShaG0YLnCn3gxtm;&s&zgyIk5Q{-7ct7N ze)eRbV@Of%M;<)rSm^m+VRepVz!cYnv>)fbmMQ^}jcI2}jcrH^IZu9W22BcAp!zT4 z2SoG~v?L~_Xy)4<5zui7Nrh-0<*ZeFQ-_@6Y1e51wbT6}n{I|~KQ`k(KTUMR8g2Tt z*cEy4HP-sWU3EF5Pj=Myv)i1dm3CL4L0`z+Lz=tJhz%Y~EgjS}x7L4V=JnjU>@Fm; zsb_`z{$xEkX0bGtj1120ekW%ca${A#?@fqU*ikv#Peppv<%e1WrrznwP;cy z7(4)uY80l!is6EmG0mC( z?y>@2OL%5TWQ+B8=QQNt+g%(`Sim?`5C$`Pv3u5sQV*|g2UY|)*9>a7)x(O8&7hZa z=*UNR5TI}2#gDDLXEyv4Bxr=bK1w!U`x6DP)b%gwJw;v6iZf4dchv5R!eeY|&0lnQlftFkhw)B6 zs46;brx?%oJp$8x64$9%fe(L>QtMsCHi7m6fb~N%=K{ zo)WhP=i%mSniEhPQ6O05iD{#o=Che<*uYfZSMm}-q3Uf4N+bo9oL(bf6%U-;S&w|0DPip}^JFJkJ4F^u6SbfWELxC99<6-EVbz!osrOLLv%2tQ&j+%GA4ye9V6E4-l` z+ajR-EXf=iMmDt>EIguXh{UO<{N|v*u)=r9hFCr>oy`D)ZqVU3tQGOr6!%X@P~o3R zMHNocm%rn~?8z@CJo{(MpZ3do8HcEbvn4+L`X7+_5<_BQPP(0$&ME0HY!4qU7kTrB zzkLN32zmcbU>v+Guc~syvwF2K)z8!vDK@Qskv8x`)YqM3{3rf<3YG6F189ylLr{Mk z*79tZ`9Y%C_^apdOpTX3dhfImIpMP!W4r{gPpsb9Wc8KGT7Ng9n8)FxXZ_L7fdVg* z*_gJ(&`2K!dRKd=@ z+*O#~z-^2gdYhilDQ_26K!zA>X8E94sZA1f)SCbo?1-&@`mg9q;bl1^Dho&{aUqsHnP|!YoeKk&npP{Dt3w zc6A0D1%~pIf%H6*6;Y+XbrR7oXC#C)(cMoO_DqXs930sg=iW=Z#hlFf5s;9#2I5(e zu@?0oysJ`PTzbRaz+h+MU|2ZV&J+3Kt%Z*?wn?-9qEEvgeKH(D|IM zV4-WgmUd}!;0B+l&!4HE@;cCnL7X@$XWiJgGR+7x?FO2VPy%^dZm1UO=^=f?M{3 zCBz8Aa|%sdoT3m}Wu#~6$;7b$E+y$5rvO^|_C}hQhBf+;Mc2vlh?6ztjT{ZbbC`&{ zzmR3-i#$5f&oVTk7L=to1mA5De2i%=7|pZ+f~UA&y(9tO4g*opfNvPBcW^aix);pW zGjNjWx_@*ZsSF+ut*d(~wzzxaS!2%3jPqR)<)#E?y zf7)c{EcUPe@R}Om^!L66bCh!XS(fJ?+%e+hhE$C_-W~&cH1cm9{++xaD=p~%wk_JVWB;R%cXHNt zI_1mXofP5uWLQpiBTUO*y!~x#`?FZ_D4wXVoD)f$me`cHzw5*BHrh}SraS(-cN*uA z7eBzu*>bofy(E%v)7H$!ef!g^=6AQ`dxL%q92}w=ODSNqAFc>XSc1yN$ybJ+d-%1- zkL&*Z`?t<-26~nvQE?^9M0H|{pr`uT_<>#-kAxDoRKg4WJ24q01V2>&?)mQQ&A_dW zRoRZ$=)sKCFd5^M+RQH*j^w0j8bS4&M6;_K((HIjrrLu;wDh;ww~s1{$>oIG$`*90 zG?BadpPmbWNj^Ak4oc8>OX2@#;BMV~4XTiofm2bM( z@h)7)jw5XArK+Q{cM)Q_N+UY!o6_0EZEP>Hhw$^-Zq&Y%dKz#$nQ_zR*3t%%i=bd) z#$v_eq;?c)V=VW^^{_NC6kkcP22G0>5sqFkIb@krRIL1ecPlV()9>wL{*mMPCz>-3 zDps1>xIWfcl!F=w&4v#GLa-eubUG5RW|Z!%ZDUpAd_R3Z{hc#u0bY20qJq-=t;$as zgWDH20sYH$l&;ftgO&D=2R0s>5jiov(LtfEpx<2Qrp~fl6s8CHl!%~OI4MmWfjnc| z^H}3jvc9%YaNG0<1&CA}z2!LHXHx_PD=igsgMWK&30~U6HCnbrOss6l4hjEhXUB*_ z^}Xi7=juhFoVp%$K|{ij^;=_wnqE=8ANHGxQHU2#m%efX@8gvT2!u}exIB0J=qwB< zcOnR(<5X0C`g=-Ve(%hqsCk^tK2$y*WIPjK{t4!A7lBsXm}V2m5xK1$v@IQYe0-BV zMQfOTO*d0yDr=#aSn1pZ5B(86`ub+RVYx%$zCHsjOuLph&o8lG2u^=_`U~}tf}R$I zN^UrCRS%)G#FL`1R8hetySBzATo;57>OA0lX)V&qDTstXf;8p&d${FFkXb+Q&tb6S zo9JbueBzGd&`7D57st7_&&=vS|5;skbW@+!slYle9M4`p_kJcNo#lG;Fn0f&;_b_b zt?{ANiKTdgaXBI)nqUq~vl-0POmbqyCg!yCLX*OImX_QuNaqDEB z?o40Sb(noEYQo~Oi%(!bV-v`mp<0SpFk2es`BCus7hH+M4L!fb^hD^6^gfLE@P85J zp_$W4X}0gBH@vZ;XYzuO;M-0<5~I}R#MFY#EY`_wWWSLMrM(VIj7^ zIrX3UN^3(t(_OX#|1jb$?8u}KCm&=#TZ=q1tBt6v*a~Oj=J&Edp~C8AQZ%-^`1K7P z<0k@)<@WZCOz(YTQ+R$7O!r6iv&FpECJPXUOTIxbr2j(oX;eOc2#4WY8{dJCKp_6^ zrD*g!fd)%Fu>@3emk2FXq7q^G^u}m4x8$4S`oKtzGXA=3FJ)Ah_ z7i{#PWO+tDaV#UD;CsH}T-@v7{%2*hgDoqG1{(3S(8BS#?E%mvt%Y}_t6`oYFgxKE zABJA2)g*)#a*j9h4?*7ltWtiAcv&z{3~Uv2K*WULFSFP!Dz$S&&jT0f zq1tipspb1^hF&O?qtOG@Zw@w5;MROcxB6jLZ^GV)YA#LX-N~h-6w^dhk2ryE$9dCA zwb#N%#5Xe=OHt!1tNpsxL zANVGS(oW;#y99kG)ZN9EZU(R_cFdl#53N>4;DGxeM)`yUmAJLwZ7brUT!>}|9TPsw zJctEb#;VWSW`Xf2RSnOtzB5&91A{QwtxBg=i*hm+S69vwgQ@~B|EfO#+2nIjXnNzD zgI!jilJZ5QwXEiqY_lm3JWNQ#m>5C}i)eoT;X-VLl3r7VBd6M=MODSh!9ncSlPs~* zFp)Q#w+KROz%s>7|8zkrz=ok{El*{*8VO01@JtZ&!CvhOJ8b`~Q#(6xYw1X8W<2B` z^90kUq_Ev;f5Th8Jg>synpOYQ=QK0~F!0eOX%4cMv$-A%?z9`c)E(@c&v;wpk3Qjt zCnR{T+;gbS%VRtmWd5dc=T&1bONwaDb{tRm*VT`O=!MjUqNjeXD3mkw3`IE@4GvNp zO3usQ>w0i5`HU3o@DWhb;B$f{A{#x$LExeHCoDya9j=B2*gnJ~p@DVE%GBPn-k&hR z;~8&paVjXn{(u)Af&a(SP06?5O(*$Wo=eAP&Q(*xLMQdc7lK1_@Q?071LS+Tz+_*@+78ny7w zmEgUH%up9)Iu)cD$0A6St1=GFtp~&rU@JB5omugU=ym z1__@jchF;I><$YQpx#Q>DSEVd`cqAHFwo&ms0#Uyo8iUg_0g|izrHqTvg^YGUaZ%a z(~mv|iwmn1-B{Q-46p$5u9;0@j2U?ekCzE9i=p9?^q?@je5&~Rcs7euS3`qfYR7~n zL6ED=1VKj$f8!w!tJ8WjVo`y=Z92*Lf7p72%}PE%*vvTMPaNfv_?Hk7nN?F(aPaE>=G8T5oe4h78!(17x&iuo`Y+boMLH z6q%J2co|l|TQ;|2;9g?P8@nYBosq8cz<;9dYkR~Z2tAB%ww$n^Ea87^U&M736&l;D zqj1Dx@gJR2_r353QWwW&zFDv9MFN4zi4Q)&m6v{O>xK*|#a1lp+H&nS031figBNR9w6tN)$PezN= z;W;T)zAbM-^?8-C2l(pF+$V?SPBMh&ggb$d&`yUjzP_H%2N_rjBUze5HLIqkt6`!V zIujC67V19EB{xUpczh<=N{tEHwb8i*a^y6TCt*LCj#A)rb(R*UqU9ky-|`*X@_dHG zxEbCGgo#q%I6QnB-3VpiM97gPaB)dN9Q?4tZDQah;*~ZHZ@s;D^qC2*CB10G`0CRc zG7sfh8MhNYHEvlzI_Ir3`uov!aDacRGiGrHMR~U9NEdV3RM!n2;9D5X0PlB?hDz(Q}Ko3@rS8@V{PQ0uEEvggZ_FIG;G%bqFV7{0ksX zAyyzM0+|uY8|^9x6u(BG7$I4;q5inq zmzMD1FE?&qJhG0`K)?jy;RRShLT6q8{x##(^N9>=oQiuq2aW&n3JDxKBYo&L5R(Rg z3{~27A3%S7y(}09QYy|A_~!I04FrzJjAf4papZa^y zbr_$Ie!EBixKbC&`D&$4>do4qeaK8mWBIe0UnincHmN>)B%0Q^h%PBQ9Y#6o^^JAv zzJ4_ARs0MMYqhU4A)m`aZ;KgwBEsQyb(*&v&b2kYm>>!YoAy95zH-Cvf;ghFfJ1US zdwW$it;iM_VitcODG5N@^RRkT?S2M@8pN-r;Av=Rz{|_AhKGk6JWgnxjM~|Exn_G_ zN=UOZzC_%fr++>SRN$GE!dPYmJ%H#KDzD=HTEy3&<)`nf3@hN!IX#wk=4)0$LmJUR z;a-}WpQtHn!X#bZF@2V|d`I_*&DXoFAmKqR01;5+r53Qu8E3=KXQ(Z}0P$5AN;^uU zU{UdhEFW{0p8bvF;A>hjzhcX%{)In!gSy(7BtqOSgf|$?uXB*lwQZPOxk!PcD@6`8 zj7+-@<%G#dW71(>YXnrmEsg(>s2Ux+$hE!so#qmvTayPweesC&^yo99dOxE?m~y?J zmj`_J7|QdtfyA6;;Qu$1-#piTk!Z>JpHScu)F$&X!dknO^ERo%usO0+y{Jlz5nUDu zQI(Q1G%|3!ftZM)lxP4JDi)7W5lYj5iibDeBu40&tjah`5zGDbq9-CU7jSX)&P%+v zH4;C@gi#`}Y<*S(Zn}K4ehq}Eh*J|bHJRU9sVL3jhQ#<^ng`57{uWw_y9$&2WH{sG zts(wtJ@jp$BqX7=i}!}fLFSlIS|i&yE! z{C0QTRD^9M(A*Ii#qlQ1<0!6W z9?tL?aXk<2go1o(1{TGJKeo=!>5qz;`wrPK#E=9m@YaT@toVzz+3iRsBhico2GYUF zyrg8GB;A*p^KOhFdpP6wu-saN!D>`Sg*_p$*B4)}n98COQ~n&SYkp!W4{L#wR@W3% zil$=ENiuM`8q>b<^)XL~V=>FkbKPkZ}pvHf?F8_98ytDXL8x8$wRD=-z zeGZ_Z9*DF5##%;1^$9Ajg^n7yD=i>^fw3wHlR{cA_>h#&+xfGW@~7{oq_6B9f7d;P z+uxzPe>F_b!dWtJiMY(x_`bf4x*1 zOWX4mE3oGWqX6!-gM-xccXhe=Xz`CZVrCCWNjSs9}!}thJC_R&I1)615<`a zL#JO@O22bZ*fagA4$4gRC3Zh$Ev*?cQP)G01{UaUY*BNRhqBfnSLb*8{BwMlRqp^% z2Qo6JB^ihldjJwrmCYGpz)wpSE5}(_!J$4*i_l9=%Yxkr%RlzOJAyd0N67QxHwQ`Py!-=W!T@^C8sCkS!(@1vn;tDvKI#GHGif-ohup$%kc-wM8CM5(K< zP~b$rZxdSC*owM-xKlMWO1S*Vdje7r^T9C|5Ht^PA$CZ#?B;D(z_}S5BxD>UGtJcg zFs}RhD;M5r37pj)R^8G#kt?pUhm3~@F8BaV%jw?kF5;YJKwmTDTXDP^&S>;y5uMwO zVIt#`Y#e`;cMs#b)hV=5G@3^DRej~4k{!Xy2sRcH>>jtID{N|9`&Y@xsGbN3qQ|eM zvkpH#HjqOWb@h@~Qc_AJeAO3p1>`yoQjMa|*3A8E&D`6KD-fWz{vY9|i@LS4QMU%t zVStQsT9@s6N}uw81SKQ+wSK}$lQ`Fy)1u|Ci&U?!rdsLGw+aMjO-;*`t-~<_a7aly zd#4!n3-$edK!^uJSLtnEK6&VS1Jd=!Z$7>ksHI-+VM8)REtQKTr|0TW@RYb#kh0FJ zco7f0QHQx0*S)NhOFxEyhE5VHN0&Z*dp?scKkN8QjE)TRe^9Ngu;>_gUL|Mw50dHr zYEG*S!GK0DkyYS1R8?a(&$e72yHEhqA;cj4Q#u_p7s5MoEz{a&w7$(WN!>;ISz6Vu zHzzocrzZ@=D`YRErQd<2cTY&|Ocl+)g^ZHbcbGR5&CV$iWn~f)VS?V=aP)n83!pD1 z&vm2;e8ds!&40W2eS!AY*4Asl?nA*qV1s!qFl}4=^C!&Uw~f$9j#!0y;G~m`3@S4- zvkR~k)p5$URV9#-g+)?Fhd3)M%f#9`%J*#3bmvcji?_F|oE&-rl~_V3`sw$&R7p-# zz*LbM8%epqMmlwaHqqotNFud-jpzcB4yIj{V0tc4@-FaawE$O`t@ynee~qyV$d_bCPlK;V&g*i+pc| z3$}cf^l7s5X9J-YTEg1V5^KoRB7b~IGxaXR5Za3J_k67CKZ5bY%st4~+NPgqhBKJ} z&@ls1vpn?~HF*xMs3=}e9ytHarJ2m9Ujo{CCxB+j?!yfrisJ$Z7f}AX1*TwH_wqzb z4I1$Z3Yu<|-;A3*++GOQ)%cv4(YVwuxZ;}{AV(QHZc+n!aUSS9Lf!zPL@!u@02J{v zc&RvBdn{7c=AyPyxp0@YTb{o{i=ku>|Cfj>J+{B z_X4rOf%V9A0bwT^$3`^K(0LR*K6+Er{c%<$<3`f0Q63WaTb^CB{~)wofSc?l0S+

XzX?&gM{_$g+`V{c za9u+o)VTk2`k`Pj?bTs#!|+^q2DJh1Cti~PP0TkMb!GvcEPXRs{pyF=#49Z%r&-m? z-qN3jefq?}XIkYlj)oLY3zJ{>8BABU$LWaMEd;>61kjJ%9{2?78}Erzy~vN)4B|KY zfP!U6%F*5ZVvxY$47)>2*c)9D8SkYF5qwlC<@pqA0MBujj zQ^kg+Cc6iIetr``&2Az3eryc-LD>_4CQji+e%hrvp}_8CuM+3JcoIeqj+tWh+mwt9 zbOmi9P%)O9s}zLX-ObJA_T1LB)n!HELx9E`etw-FgJz>Z9n;`@_QotM3=*3DUfRU1 zRmtj%lvvLm&do|LtF8uIdrCW4*C$M(<8KcNe7JOnS`%; zQ3MQ!(vI`Z3XWD&(?!X{BB%yFMkbim_iTL6NlE0ChpY965FlC?(#&TJSQz-eIws|u zd9K`WjoR!qi!nh`_g9|tO`e#zxVQ_^w=CM_#j#!l;J5GIY1)byg27;PZ`oa1go~2B z@yW^XQPDf4JV!j{=SqvGJvN+x^AMo5IUs6efpjE95n=S8wcYzLFL{H30;T#a#h&&;T>`;FDP(>T4n13UP_ ze*Uiesqc!?54}AtG3+ArWtF>ek1%6N)bOAuzw4$_V%1BJ-k4R2-?cSS?`3$`ExpUY z0}o>Z+w`UL%h9}b9RuflA#G@%PB5^#qL*E`c%@y-aFpeUlfMr39~t{oTm>B6T{o7I zuMJG8yB68gORKW=Q4Doy+fV0qhj@AJ02uOr12+Flxn$Cl&iNt`1?9V^8&y90izG&B zidzs!;}DqZBXx?Gm+|XsPX2I@!7}e z{2HMHaeetipVRCE-3YnRn;+jRO1g8U<2Y^q(ErX9BAS|-YSQ9ZoM_bj=VNsthf~v7jYP@Z;nYTJ9u2U*eNkg zS5U>oCNZ&iAT%B`P6J(+l3E%_(zYTNjRecLqk~7(Zq;&V1`^OvL_Cxt=>e$xmFc_v-o<1tyv5uyzie_^Lbg zW^_fD9Hz8B8t~uniI3AVZwX)fO+lCe&7i8lLMS^XlO#Z%zbLex?4 z)7|y3b5K}!6!xA)r|jSPHrH3q?0U4wuVS64IcQY#$elU#XwBY{^Sg+nV_2}!1P0ws zNg|_=rbxb7`|)nE@!t z;Wufwvw)ZL0rY6d=vO|dVp4}-a=?S>x^iXfSE0tXxK)`OtExp|SiT}|q(goZ@)QIY zooQ6zhZjSqL$;;`bKAaA3jF)$vu-V>j{9)6=+CkJ4p37sJBje?I>Hz7|GSyyjVi@n z*8R8CtfkK6+W;vZUao5ULbat&;GezY%h_plO_4>q-$jC@FxCpSo|vS0(inGD3JckH zCzB}~P(qF8()F}jCjNtbpEiru%5K4s$h2*ICID6;NtcH`M^@zL$Qon2to)Mtzem;- zUMVOw)v%YB7c(r!03DFBzd~!Lfl2uL3r#NIB~Nt&BWk@SOldGb&P~H+FA^eT%o0ax z6qJe{saFuwrsn2O8+Gx`)iF+>BZmn9bA(N(e&fSZ@P^L|}x&%X5 zk-?h7KC=ujMjZUSDvNmUCg{NOg-ei|^oYY-TwcCv#F02983jcWy7o3cBov*v zyXSXDAzLmnfC_UB3(N5!M*_*{!U!c%+P`nEzurD_qCR88$E4f+q&7x&Q7l7gYiVCa z;$K$+jgw0}o~{B>7~%sKzw0V8IXSt886xgeTU#5c$T(2Xx~`$ZU@+j)E+_th1XGpt z+si2{r=?swym$uGSurujalmcom%Q-)uhMc*ct>dqUBI$e_`B)&3#cAaR9M)s8Whf8 z@LrR^s?-3bzz?`SQ9_Bp(b3d15{6=N07nrZ^FgZ6CSwAdj2hi6`RYN2 zC=tMXEXa6h-C6gc#{0;~$KM}w=B44@dleP!SKKTtig^owj-4i{@pj@f_Dtn4n@_{o z(ATrQv?BBxfY{vp8I$Mc(?g9DH(42Rc8qjYVXnS; z5!JaEqf&k5U)`<6*9!~Z@CylV`+Z|MjaWfg~h7vk4PqQ2^D5 zt3rEwe9!RCg<6Dyh}~#X05yDsVuuFkbRa9pqB$)!Mc8XsfR;yj>fyC5hzz3^v~Ou} zvzsNWCIjCs>eqtJ7Xzu<$@p-egdTO{{j5oizP_V63alqKF4K+koU8BV6Gr4>`aU3> z88*1fTx--2t(eXcx4m+E`(xjXL>Y8#io3pX z(oaQ%U0hDGL(>2#YzkjYs{d^Z^q;YKEPw8TY`?&>=3+rc)rU6JItVBWmP)l$aGbii zjm^&5Xv!2{G36E`1|t-0r-{5@>|=H z4D13PlMxuA$WF8h9jyqj0EW7Ca3}*`1BSOO7_K7;Wwh_uK7W@{w}??j#2zw_Q&^ba zF*QYSZd6`UGNG#k_oA3~?odCmh0fF5Ma*5QcG%Yv*PXQ__nv2wH&3-BSi7(s8vpZmOt!2~@uvagTb^XnJrReYgrTHJ zLKZ(ldvvQ#r7z2(;Sur;sq9}LAM^DD6JcDEL{+AK6Mtc}^-Q|}2tl7J=SxsivL@nO zJbAo80I8NM+Od)iSmy4!+bR_;+i)vfg}2EpERkasn;eYT9if=+vX>yHxb~iIh8vKH z5cb8x7CwG408BS$uB@z(F3{u@Pft%X<1>IxB@m@%!hYw-;}-a3`>bMA7D#A^-#(7O zB$kL>vCbq+I8jjCY{B*81UHS1)YWm=z!x(?CPtW`6Bm=g?(bRt>xLF)KvM-bnHShi zgp1P$J79}`d_@3eRPM?a@o#FhrAB}lLZS6Wob04&s&nEPAO-~m?w;_RmMcnjgH}7v z0iXA&X1xy*j}mz3fTRau+rj*;bf^MMQckS|Ml>YRkQnYO`(_> z9&-%nB{gY$P~Mv8Pw46kdhIYs3h`7f7D&X{*a8s2j_{_qB#K{MwT;GkbyccGx0CNY zYwPnkIk25Mes(wH{mWJU{FT)Jqu(QIxrPg>URkEP6n4i4uEi#W?r{&gdAPo4=wA#}?!hS7uh# zMfzA9!u$nZY7C zV@!hD^(kmy*Quf&lb#S9FO;|oLu?cTP_^9?V8%CEG775`NI1jh8FJfeYc=xJ0f&0A zH)9S+6^R4|1O!w%FNs=MSlC>Ac#quB1jivDC>YN!Hy*GIJ8$&ZNh$CB&AtSDWOwiV zF&wawL2Hw-PBonK7@0kFGyVpMa%r@T@H`6R3PUuYR}jGhcUl)Z*ypf^z3lqhHlo&?#-%lp{>bNgFJ9-3RKIx(0V=LyTr$BKpN4IoKW`OmOxiWIh05<@~K zzYb}a!;2e;Garyn`o4G)?`%Yk+xDBP>#)JRl8C9L!pc+4JG@`NXZ$?1*WkPeZYK+FaE(n%h|-_6Ruy$Q}a<9 zK5b|F(}VM~<_OSS(z0ZzTKUD0qk(}yPYM-J7kk)IkqDQQlk0f8AAPdPSM2Hn#N1qW zCz%}nRZBNWrW{V(+$}wc7QW9cw2NZj#=i30n_jtUe`>vk8efk+_dDMUUuWLXaT3E2 zQ7|rynh67YKX3(~`-TI}BY=Ctj`*2DnR>w##c2;$8(zz;4RA#F=do;CXP_HTW^mzIV! z=ZIzN*J06lABF8ZNEcl|r0LczC}}ZX7``cx>xIz8HWfC{#(s#+To5K~BH$=p83e13+YBD1VJP8bIfy z-E8Hf;7d(uPVUfNT~(!^96tzhmI$f(Bbs#|8IAFIr7jRU%V$A8o}ipNx)gXmUb$04 zv&OAOqKFIrp{Q^?=x5D%$}H%+NO?Us!jIO2#AmV}y5{pQ2qhrEJGZS`qNGmZZkUX5 zU<5Xi#s)x3xYhCGj3uMy4yU-n*Kx+H?T3v;WEHaWOM%$JAzUltNdy{Z`{g1TwO&YV zhV@vKqCz;hgpmUxxagplU9;~qPgVw}GM0amR9}Zd@zUodm+IUmrR$)S!#zY;EMw(A zF`D+8J2IA{>VS}dC%sYbY}4_5-+`V=nW*{k3=})|XBI>o_AB_|u}4Ztw~;gtFrW9T zbuOm#9OVbx7{4kaft6BVUrkz*sNCX0^4a}mFTHt|F^rz?QP_iDmMaJ+Pq4uIJ7sVb0Nu(--t;vBXRHW zxa^w6jFy%WbH+HwMsNZ(-kGz570coftM4!C+oy0)1`Nl;G4RF9;~g9v!pEp%ko$z6 zmeIs;a5sPbzR7vIX9&9Prusn%rFRm(+pp21@=6ml26zs;zx59r(Da~;I(AX`J3DEZ z{E1o50Y8)`^2Gz;Y`Y~(Ih%fYYVouEFpL5A;|It$YY{-itVhT{D1VlK9$#E&*2%)e zEiCe7G6MruImFESUam+qRJ`5%;$#O|(XeHr%;tSB3Dv;^Pdf7hT&)0z>Bd$LCgMHN zZ9t8|+?8^zZWeOrPl%YCS+bee9yK2XT9c^3ff;T=-<<$k8L=SmN3Jh3Gp|1;oxrSM zab&Om#R_{bhrFn>g)XGcca4HZ$16xGe{%A-*_-1h!1fsOOz`Qn#CKRzqY=}`5@_rf zGYl6C7S-;Zum$*>yw;))AZ8jCF|@fFf=YR@SJFz%eUVb)Bw&P_MW2;fIj=WVu`q=y zlGcj0s~+t+D+1HOLCuDM$AI=Fk|qsK2NaZQx#F^tFmD^;$C?q0>2iIpE}<86^*3f& zd1P%Ki_zPE`5S!vyQeMRIjC$&8ZEvQNWRgPPI^t=6&}LbvRj8#;dt7!N4e-T-A{#o z)dLBlMLUHtGs=5xTb{_^cy@7h7yNV;9OpG4A<@cS zZE`)g^f;Y2gnMf$AH1!%+6jZb9%-^nd`9ocye|J9Z5Dc0G7n|bP>KNYhZC_-mYRbF z^PgfmkRkdNH`kdI!*M7m%4n}+;M!tJG>p)FB+q(T_pK3va__0RDKA4UE!5pSi*J=? zy&y+jw$(N!l!(lFXY+OMCEI9>_8bBHGVStnGJU`zayZ}Mu6&BYKe-Wl+Ohh!T;uT> zhfD`L7MT540^(iDDi4*5kLzxrfIz{M=p&k|j{ZWauS2S9aZkHQ2NJBi{ft2l-bbOd zHrRcU2T||U1lO%&#mf(v63Sb0QB<618PRE-lpT-6684xm!Qo&VD(e_hCR3ew&8yyS zOs&UEoCZV0ZEy5#%%~fu$U~AeP1St)+_wKz22E1Xm;#k-)8G4^f`!-c8Gjpm-1v}W znJ!LA8L>mQP-$62l|Ur``%?Wj%h^HP-SO3FO*KxHHTGt6lWDHX3owIwuLX0RQelJf zpBI^vYd-E5f<60lQDj6McA=zQDq8A}Y}3vcA$9$i%IR{iF92krYtw=0v2lb%#UD*v zJ~TVdhGr_RZAgL4L?8MpvyzejcE97IM7yrOlgW=xoa^Otb)b|jOk!$k+Vw;FNBDi% z5W?#T+Z2)&0u>hH*PY;ifQYM@FREcjLfY>Oo z9f(7YuMQVQe}7e0C5hE&Il3`m@4mS z{&@r&U{HHZuz^#jw0{&Hnv^tBI82s9E{@8*Czp|7Fgpk1w;jFk|Urbc&D=S=Kwn8a)5|Lh|7oo6hlYzt0~r4F@lx2b}UNrpNL}O1GvD zVYga35nH^z)GTz8b1c7$uc8XzP$Ruq-NQ_3MdhQHxdF1nG>F-`#;J{iOX;)b9zHKE zxNiUe(trJ>k-Mv`iA^yMj$fSYSkM|O7X9m$R?mbaszIDs)%st|<=qX{Z-i5Qh}GK- zLw=;-eP!63ohmtgh0$q~NKwgV{{jO)-|d&1!tpw(tZ-{DnF^|ObjbKu(Hgs6iJv3v zeTr?NazaUk?)q6(&d39SxV`iZPmg!f@!_s(eWrJp2Y>_&ZfG)78HZbY{rS@UCSP=7 zB9Ru-`p@0-oe5$tGpY97{L29pJQUlJ!9m0uqt_F^a_-j|%vde{0ia)38=A9~06;*j z!PrNS_9LiFLP%lCiy}rsiL>!Rz#2+*;MEI(2pg!XhHQ zg*COc+$VROKr%y75a$u^J-l>x27>uKa2I4XbcYN@VuCA){36+)R-&hy^bX6P8 zM^XW>Eifp&+$hhDObQaoH}XVErSch3Ayd>APiyoM5hWDzLl_*Nv3si5qT{JTdq3oL zL}*)X9K66*#`RVqU-e&HmFuy7n?XPkb*H@3VTM?s;G4mX>rs}9WjzVnFS(bd|Kdsh zo*y7;8ZIiP#lCfJIfh+}sBjS$i{4t?9BOP#Sdy#d0HVds!U{m)xSA-|iMiIdatd8-KQrqqV`09DBIG~8i7d@>u8Uz zxz9a}RuWd=7wcGqIn4ezUcZv!k7n$r~;Piw`W8NFRmNT6c3zFFPX4K^4(={^wJ z`B)_4&rqIq()+tYG-N9D<-ibQc7>CXtKVu~^FrS6E(%YQFB+{V@3hTP!Xh)N7p!Yo z=kEKP|IG#1o2gu!LtD#Dfivc#s9{S(>Rim}N7*B2+Ywb7ox?OukU#cbB&G9pZR!xd znX5o-tY{3}({lE)$xklH#h!5t+1H>1TkycGzKbJ<9DJ5gGOB)gp{Yr_;c?@3^5-?{ z!fGgPl`S-l&l7cyu`KHC8tdcn>QKW`dvJIT%Nz6wQcK;`N{u9edrz|0_|K&bzXm$bbE0NTV?xUb9p%p^; zz?bw1KtbNU?oKFUl0o-8)B=&UuC1+o>E6?D_n#Uqg8>no)#&HC)cG0Ummc1eF*AYz z9x~2d6c&Y==Wh)dB9FKPYOe#7h#-GYI*t&--da|+pYjYUfcKhpMdzo3ZX&U~&iGBH za~s02n4?tQk{5RrJ#Ui#_iTe5yY8~nd4YS|=~5@NF7>5=urs?fmf68QzomKPh`hO< z7>JJ#GsZ;p?;OihMMusyG5$1gG7@vBWKbU_25geF@CON(mX-EgaplRD$n67DhO3L) z$#js=HvfDTeKMz0?n6*5w;mGg)*BZ0C^L()g_%<>IdEY`B9_ccTU9* zbz5_~a7cHEfnZtPC=NQ zPTexNx>>t~7wNnme#UPp?+=_$x@ChC+Szr&G1fV~ zcSV^QoL(npS$o5us(;iT5^XXw%1Lp>pf^4u(EjDpC74%I=*!>AR>vJ7y6OkveMaA^ z83!@7O^8(6jaifLK1^TfwH_t7Z~ZV6PI?_eKgfD} zZ9a8@BkGrFH+1IO6urw66TnY}X z{t@2$z%B=7u6SLX*jyx#diuHhpBi!mPkDJ|j3NOH z#(Q@3c=f8;8@&0=*2AH zW4$&LMgndS2Id_TRoOGYl{$o$=F6C`H5mo&Kths;G$koNy?6?Fv0#k#Fd(oG%+Va9 zx8RUq&$WQ!OB^}}4DXIR)`AIo_C>B1=F9H+wi>vJ)4Rd79n5DYuo-WYosRy+ z6rZv+V4}}$1Ab&+RdG_i3l{+Oqcz&tP87apUj1B};wcghEhI+WmEykHEXvExl~@yf z=;OYGv}@DR()O_XUx<3N78+hhVN>z?^T`^X9FVT%vamlqh`MhoB}qr8rsA(t1;I(S|_#K`ATm4VM!w>l_e;OGe5QS8)Rqt1WrAPevx91{? zflnr<7E{*+f+(=3R;g^3)f5PC?tDrf{;M~SE z``TMeO>{>|3pJ%KjGoazMG4QT%%+?^S$0rmYcp^h-bkbPF|su|SJ`%cc2lYK-lhH7 z0h;Eq$n;Z$l>Tk7XqCDg8q((+97Zx7sc{wZXK=DOl7eK>AsTL(4*%icJUEcQwi>b4 zsd1n*IIv#zm?W*;LWQ?>P!T2gn`~}PaME5hESD4resCo$KabnxbP8rRsCU@85|T^@ zMSg`ZAMQoP#6$s=M1-p*+jTq72+W7U>_WCZ=p|hMOt1`Ju8hgKt0v))k)5sgK%WoN z+_i(va65(tbi7K2T49U&j^|7RFLHwBJruP^S+?P$?Mh zwt5|q-e1YAeja0FWYIfhR)6hDTqlMSoNA-rxz>38e2zxw)QGSHfvo7YBzA8Q70m$8Gdk!(K5 zp&bZTS68bQEV+-0bjdP&CVJWud+IcPx}6ESib6&K@v>3WPBWQMeHKU+(=e3g_Qu!_C2Wd+#5oP;R`*#D`9yU9ZD?*jn(%oP|*;b?~s@j*t^Yo8bo53fhr&uu~7 zz+m3oAOf@m^Py9RZ zNCYBtwJsACy;5!1EG>9*Ty$iyATD*zudhGfwq`A6npe^w>q}~(H!x^UybzZD_rn7MUWQ>bAlcDz`IJ33Z$$hQW<(|iwr0-Z|v@J0k% z2-4Xt_5y!Roo1*4R-H?dciFd^oSf&f^l-Iybrcv~MPl9S6dc>(n5h~6k8EMw?}RqG zN0a#zthf~*Yn+@8abVgb8cIcM&XUF(Z@rY3j&l*oFSYBG!*;fy6iX=4u2}uWkPS`l zmRe6xXYPGyvpSp}&XjvE2IFdjbj?fG`6%oHY`*BC)_W6*U~%L94R1ih!otd(2c*fu zG2R|1(AMaHV3&Xis~-iaX0CU|+@^i}NFVSm=zLsOqQ_^ys{c5>{T>AY$pVpQEG`Kp zB2*@8j?Cei#=cLoeEFX$oR}2-Wj-87w;3Dqj|P~&TusgT^p;zE*@zQSMrwJ^xt-^8 zu{c7`ySHyOT0LW9ySgU|J@E-wKiGUVD~EM8SG3L2{Zr};kKxyv{TutDL+ZXHcrj?_ zHNg#Q4DxzT&S2KRD#^*lfKBUFp~eT-?3tZ(V}+Vb0rmW3%TTGeit8S;o5TOdGSM1o zZ|!!uzls$F1V|Ydw5f;KU5F~qgkuI42Q?Krw->)`49tBobhZ?1tiu9TLjG>FqlA#o z#DNl8=x-50eerGY^E|aGr93p+-Bop}NOWd*hMriJc2PpfsLF zE(vkD9}TxYIUg}hBGB?g%N%& zG~^RFr8jV-14@D)e;l#`#&(`(mNcl68^FJ3rl$#?uG@p*`T0`%oaHf}R{FxIdKAzS%czCHAp2yNl=wrdF+D5)8V%MG6;V$B5 zQuZs>Vs|8zf#<3OjTP*K=U(r-dhTz|T$ei#XlRpg#(08IMU?RH@gtI`r=$-i^pblp zZ`*<%ZYjN=o?BpBqd7!EkcfD)ji zua%x=_&x7rSR7m7{!i)8iiV($35uCXty+ubAS^Rvm7DeyngMbFfuSK<*(S$1-c)pu zg|u?onqguqpoU^SQDh$tdIko0*w_qfNY57lS{$f)4*%C(iyBk3IUdHqu(C51)8FcK z)R=iJ+#3;9_I|~+{)SA5ejE&;4P4c zNvOI+Un}#MTuD9%&W_8kCnHBh*X{LqSL^LKwlS5Jz~PP~lXGN;GeD1wzqzT4R}ipeU9o!|embXOpdnh7J=-8HP`2Z6%uwUHX;X9S7RNhfFP z*>ES%ZF;M}b0xEU8@-kIU{X6fPWu`(3p5oER4#V=@O=+anFhzueVsxa5_A(^TcpxI zHI-&jRP07=@qC3!s<=-858#>J*mk?i@^zJIzReFMRjVwlrR7`A@F1aZ>lB*x+qdDL zy7Ll+X^|t0A5YWe-phBEj*3e9>gwj4d7bsW+Gp^V7m|b*YqIp`T?XZOE#h<>-cSJB=FxIptTuRBH9i$?b9ni@%EGPjf`X)CPdB5@b&9aG%uF(Np6tezt)7Ci;w;+Jnef{F>?}UFMdx)j#T5tw*|F;Kw@@Pj>)0 zr4H|pA%@wnI_yTil=-bBeTqR@sixr!F6y}GcR~Rut|A9@Gu*pra=wo1wSWcDtifLl zq_3@d-pV0{PI?zaAUbVeym0YpHOdZNf;4mRq*{{+U$lr~}!;3C^O!G2J z`p&LfZ70G@Ja;xb*Xi#A>+t)ubNC#CqtSqoFwI+%fF`p~lP&V2-O5mE?i6*k<7r;1 zzxg?lEiJe}>#9?(o%*juy14RVDY{&2TVUafv!6g`L#!}F4O`?qiP}r-@!GtBU#Q!D zj(1G?xGllGjt5$P}4Y^<~A^Hd?Iits1LPufPecQ(QV3li}Sd!JA@+ zxBq^PQjPZHOXvnEo4bzlTNGiQ)b@Mn_};938lGF;pbBA0XTO*-R6aroO-jPSNiOHO zQNFQV`2B?k(|RNew^`o9HTAV;=(M(vPeWy7(rjU!gpkodH6$l&*UrJA=WwAJlAEEC z(`H>=IWROdQM6;4nhwm8zSEpn{l6D+0CmGAIsCW+dZXCO#T22}=imeV4R4GEEZlOHzLSMfL7@{q)4yGB{w0viE zF2|1FDFS)ZO?1>&lB^sW7BleksdQpt{JDJGIV(~jvQ25GYu^~sw3Xt#l2#_8Wh55F z3dC^!e$)g4-Co+CFyx@`3ln-JazJt;*I^2H;NmBa1S*a}IW4eZKljCy_H$+iEk*ev z6ngC}Xf~UC;V5;^+fcaLgF}GR8(?;Qdzl!KJK*6I zA;?!X)4i!gQ#g`|UEI4Bcy}?AT(DVwXrssc*ffoMx~uhSL^~Q!<|lxV%|N_Z$#N?6 z;u3z&y+I4m0fB6s(GG|+Hn)-YM7VqaDthTvk%W;B-8_Y-vXY2H?A|dfmF36oUUH^8 zAt*v3X>ffv(qLe6a&jA-apR&#n%%aR7QIVo#eB1o;b5IuLc|^|hP8iiAT7QyRiYiY zuopN?p8SX4lV(Qs2=3{<0>JHo(L%xx6EVe01vaw2>jt<_lCb384QfsDuv+%AV(DtE7?%3hj_&5GP9t6w5xqSw8_uU`++g&)lY$ce-CN(vC`s4dx)zkg{PcMe>r=(xDS>FFq^nYk6~OWb`jSxI!n< zB{L^h5TVk>xvx}ivr-@jEBSK)ogG-z$vtekN^Ledh4jD|EOD78U`eXuAxUd zq$Gp^1f+(LP60{jE@>pByNB-X1}SMl=`K;ak@9}|JtuC(&u=jPf#p5q5wF&)W>tj4FWC~O)Rq4cfc|I91|YU$7S{mHAHuVPS&jnEWkFq z{ud4Vbc|UgimmGF{JKL&+qy`{*&%Y&)kOfm4laI-tyv^0@ioU!J+&|%mWj>$q;ki* z+smT_i*1pwx^M`4gtPOhK&!|Id+=FwI|G)6xjf3?hop5SQul2V@E@gZ!*(Y$)jLxj zw{}{ycO0=kr=~7U*l&wwkeW#ti2s;DV@K5Y8g5%th5;^UL`yH-Ny+8@O z#PV0UmfP)Uxm4XJsCO`9P@&y_)%&JDpIn}Hc@qYyhgjUlMqC~DEtivzX9cta`v7@Pe~jWSo%|*;ZIpi@B)2-Z#$kzm zOpk|_Pn<6b-|(nyc@q$za7$0>&}%`5tE4x98U$qX{&p$-ag~`qCtKF6)%E_y(S*r< zxJ}ge_T_A~QH=R$vdKnY$oq4mkKo(;^T|v4Dtqn_Db=lR{N5Lr zP!fljtL@5hAcFW+BrEu9mCfI-u7r1iU4il{+EGA9O-dv6*Zz{aK-RHNUamxeV^jj) z;KE|rqd;yq-mp`z=c#20mfQNmuuU(JSI^4n{j)~?MiFE}g*BJ8Yu<7R-Qi?P+q zd%E$MSA+mF{Z=j~()xkVj7qJuEwivI=j`-%t$i?gD`6YZ8jGt^>Fyxm8cbEq*Kn3} z?fQ3X#@O1*@cpOKa1R3d}zq0fZ(6W$s_el5V6M{EMjU0Yg*Pqsl|9K&Idv7U; z{ysv>XpElkVnN<{_YN=@L0)V>`nJc;(J(~zRrCPXQU7j z62deIslXV19$#I}1vOj+IM4n1Gac%N5uarLaamUWn47~E9N2!PnG+;X^qX(>3#A}v zPUW@%!Et<5!1!Ep!`{fZkfU7rdRz$(XUTGHt-_7zF~P&z#z41f;h8QxfRFHL3?Xc~ zIS*pc;;tFd^3^jwud zquek3m6UmGd7VyD8$ZT~^nAK_9kY=Z+et*fC$lHkqcX>YO98;ozVi4isem0 zMrskj!O|ds`9ZZFC|o`b5~wTviErnJCB^|S|*vo;AP*>d~AXXsP~N z*Z~NTVhmFmx$q849ycF;jQZy;DldX%QOSK5i$dfSk>E4QM997+Lq;mqNZ| zO>&!L%m|?I#X*`O?^E|YhSL-t=#U)Q?<4hKE-nZ`M0EDp{m(7mU)+QSNhYnU&9ORauCIS#$_LCJEpMNBSR4}V#<2Y$a zio{vXy-1=%R)3YAk%2~&A!wg$7v7?i7_(TkGQK_Mwb)?w9%q}z`ub?))xb}+cO^S8 zihOZx0xSjJ@`?&@d#M$*Vx|zmao1f{Xk$UuzKp`0S0~41#>98tiWFGMa+|!dngPne0=mQXpKepij=Etl?W?;V+fpeP(>TVd<05^?FaiL?wa|Pt z?tZ+vCg;*K$E-v;0(U!YHbY!iT|Gf{Xyf3ZLgjq(uZ5zIrC_HxhK`1Zr{`q^k`A}e z=>REx7t(-m?mutoqnRo{r=;!8P2@Vo&(YE71UsQr@ixVyfEdW)hr_?a0&H}~&`W{q zV7AtF-htDR#YT$2zN!nCpbQg0LTRDE@`J6DFYHFEId&xqb*}Qf>|NQ&@ytOLi zOc6^hd~_834cJ@Hi@ zA0|`p4lz7_juJHu@zQbG2UJubHMJ7gOt6Q#DwErSY$U5vu2XN*ce`UF+OS8iN_R$_+IduSk(=?vz(FLL|)R8XA z)u5N_R-?m`5Z%DHXrJM6ihJmODLXkG0zR-3kT$Xn@P~kH1M19|)}50@)XfXK!@@Ao zaj8nWc9rGChjtnOxb#J}`|)Z#p=Os#&Wg*Sl(3Iq0zB6IPj=}Tzuq;b}{X4lUX9gKZ7|DkWmD$u2DpEbn>>NR{4ibP76;UkGqH)RLp7N0(eRd zDCwTGpYChhKuKjCf@Z_`mtw7lpC0y&Jnjyg3w>Do!%NvS-ZmdGw^FOu+yYm=Ks=T) z?v6f3c&Mm~)Oiqcogp0;6|;=NI+D7lyZc4Z4*ktGda#y}FL5SbGD9@rJwTnlYv;7^ ziDz_pKk1T`Qy==Xu>DOOdyu~(R6`{#BDcD0kd#5ztqk>0QPMh6G+w=^08j-|G^#Jp zcV%ojg>fdlT){nE82aaaJH|pT$0Y8)m#|&Pgq~x;81i?lm_Emh8VlZLGR+k8WgxKN z#mY5z(X*WP{*Z2hOvze%_BY9#ve1dN@>ws#uV+OVXiJPGp*rQm6Rd~xwI~1-pDPb?J(}u%xLP_qGREI& zKWf8%(uyswtT5GY?C$Pz5)|tQ$cI;bM%|rB-*{?_Hi*}wBoEYl6c536&JK+5epv7kGl-9oJX%D!_Oe?FiW|taoN}7wD z@6iX%EhpLA#V3s?%A(a3DFA^Hy)j-}i_RVk7NCi3n*y@bB^UZ}ZIxCOsa1}*M)~_94$fL3rlgDgM zA*ABbK$Y03vdI1-32F1M+mwF)EKhNPe|y1_w(fP*`iqeb!}F*F=K)Arf23l=B;Qor zS){>B(s+sE0Me8r2~KXgO4?$U_x(Tfn)m8NbBykhGhyO63HOT4uhK!@N1MO?xdK7O z?6T4C!OQA-JkI(*bu~3Y%N27wdIAXeW;d^R%xihRuzmG+SLCslO);8Yvo!%Ir&7n% zEg=w&9~@ZE_9M&pegX#7|M%z}b&`dbg7WT-{kDLNtj)2x7^s}bsVfHpZu8R1wzC@C zYwU&ZB=vSarpiak)M=M|L??c%n>K2qIuY`q;6kmIaw;LMN(w`Cz62i<69~1x#tc(S z*@Q2uWsVp`UY$Ao_VadxF0wJe+kELB`MoLhBlZzxg5!j}Q+Wkc&CB#r21K8+SKyc~ zA%zS(eDkw5)t%_9TVGU5*a~ro|GBUE3TFG=TA`w%mL$Es-Hm>IR*JeF^RH@f$bal* z_y_7j7D*`77hUF`k_>P*is)zScS?gdCp^S6N-w6BH15kaSi|viz43$r4Z6-p>YkM< z9PYQ|c6N5gxr!@h-^f7eBOh3|T!>b797tag58bn~DByz{D7J+z(eMgo=)WQDX276u zy3!1@LO=H{%(S4SWr1H=BW~knM_%6(O~x$7{q~(>WJ1^--CSJgB%8p`3r~D0s$EyB*)L{lMc6jh(1)E8jtD|hW(Nfb%LX5?ceLj5K z&qiwfovyOhK6g)oGZ!`S?K?eqRgmW@w8%^niV5poApSEU?q@*~nhaPIsbMq{RU zf@>Xq8(DVx+vhbPopMHr-SzI7&LRGYm|FJ0q^*7)$Xbm|!%PUu;sCWV|JfI# zd?su{>ZfNkQJ6l8^N}-fvJ1J@%Sxww3!|9 z%4Vpf&g7oqQWHx&p3GbCGLv`Jo8;lhu@ZMSmBiar{T}>{Y+b*R7 zbV@0qOL~qiF<&JO7$=ADu6w@F=Bv-bhN6!<>SyKgeG)7~+?(Hkb7 zzOmYh3%C6($1(elo!6c*FIl4J5kHrhCP$C)?(bs}S}up0nhAmgagu|EJ!Yr*|2*_o zm}t+|{@(P7iL41@g=|vz)b-$L>FSO98IkF!eh==83q|jsB|~lwbZVJ%pKOdRa>_Zy zT_)v{S-AS!VhE-uKqtixbhK4@hFiobwUPX{WVID6%d541@YpDb6%r5kF(M1>DI}V* z(F18?RdnAY99x3pbE#3TpbvHp8k|uF&&`}i0E)^b7uJZ;T6pGLL z;AasW$lO{p;|}UNr4HjE8Z2=)7;Uds68^;JnF#?a<}j;DAuy-T^qO`_f&<}WBJJ;W zqm0u2p-f0a$Mm|8t6m7hf4)rH{`N+lt73j~ppi`c6CI~j|91Usxz_h!n5h9ZIz|Y> zT{0vDDGv8sOgx={lXv2Bpp3|A>d5c0 zjf6USyIUMNIv%AhNJp+kt;W0!+E$(j*0RB=DLIX#WNjO2s2Zm`*Yv>!AlZ$_L`8r9 z_)W!+egJh(G2lqfGwM!)USftn^vKLfi|xV6o!?G48r8ADrf`V%lfF}B0o!DDH5$Tr zvYz4g!}F(|dahB)qRx!kLX_!f_e$h~0CYjF%*2j&ix4#9G5!oUmVmeVX;}!z)>klE zK-8q<-8{dpWH|ldL`YzZUd8pdBKmX7*iG3H#!yIuI83q$`8SAVcN8gn{AqJ8THo_p zbh=nXljG|f#iKaz4N6(qBzqRClS~(pgp$FAFYX2jLBFw!Oq}+> zW1mm^#!?qj>wH)N>zt%-+kIL@UBG7io?)~WdWQU#2|O}70;eR!K;UQ=@oJA{q&6Q_ z1+4J!P%qo0pUV|dwoC{#rZdb;yK16VgtGEbm!;<`%R*a{XE&(V&{|xMDhf>wAu7P- z+PKvVhKXaLjY3aK=iT#jDnqySI;}8nm2)twUl^H=7JY%TRZI9jNJia#pjup9bZlrv z+t`Pl-j>9igmiNm8nlsddPn413;Ef6cU!aW%kM&~8ff$o;YAPm`Ef!?n5(#+f^VPS=VpSVL|jvR)O zQ8I2HirNfLH<*zDH0}mKwyo;wmNjvk5+*T+e);)Q80WkGSa7pRUB z!`JEXG#jXf95tnLD#C$ks3N>-$DD7EGpV8tDQYL#(o;6X8*m0H%q=?eMdxvKbd3AM z+l1WRKUyFO$Vv1SeUr~|UO4|m`*H(e`?g@u%I-=OR6epTX=hjUO0UcP>R^0@rw%(m z00pCLbeNV?zXgJp`hk)c(Icu8SLvxzqHU^VT8yP4v@W8lMGE-u3J0&Z0IrQpS$<6LIyz>Xa-O0jFqZsBMFtQJk+Fja zG|2-}gK9BXNpX(xX``qGXLEP@Qmco6tf3-}Fex$GUkwWlJ$@oGoFF(50`8}9nC6eg zr$MZu&Sd?nA;b)sZxOmM?0#Rfy;dNF8Sv`Lmr$w)~tb~AW)F=#xV zArJ`qP#A{v!1DlqUpnBz+@<50dhz+*y2!`9-Kn&DrvK^Frxz+S{CM(6ZSjb+q~vE# zy4`-AE-^^{@5=N-o@oHt$v@x|eBRFaRxJLnGX>Eot9r>^7 zMTdZGikHk3nLw+ZQ*>z^<{$RVC63566I8SByd(iSYuiiH!lI)3tdp;RyA@cZN1DtC z7T~uZyqdw9cjUw}!JuEK-up5+Z;^Si+oD%D7A!bXejW#seT^HT0NG>0|wF~yBII?9n;&uke2VW%o~Op z9?$Y+zaZN!$5!d#()8EQbgui|G`MP4w~<_MmVOW8hre2l9Tz)C`}o*PVv+ z@Q9mu4q(s1$kcXFoc(sGtcR*U;w)i^Vd*MXtz8@k3BO=*en_=Xt>y)mDLgLJwr(fOZJNR>+Tq zHoI};-X>^9J|2{etJiFobhJQ;Fa~KF5P+Vbgk7C%Q>kEN^N}N9A{Nl?iruxl*!v!9 zs^kM85xPnV5)`~(mqv1m z-gd3cad2Ouf);+eI7w$<-}C^~Ur5)jlKRP2AeT)_JzMP4uE$FQBZpsmj}Lc_$DP-2O+QSTxAEj5`-7s^{ykn> zvu``{(4e6RQJ!giJdRYt2VIFv<$X$i15|X$Z+-}zk+rxjw>bV;xvtC0Y`^Ds_dHLV z5Iw@>Vq@E%YUQ(-kZ5`{g|n&n`WqwV_R0ez$Cwh3H);|kvayBlh^u>6RoiRa)JHKR zB=$oA?!Nr>&aKN%f7j69;M;3{6p>NiqC&LF-4dT{UUQJZ$3;e8jD*v_n~_}GdGUJ8 zBSr7RGPik79*ViXwg3KWfDnwdRL5S*?xoPAMTl2inlnxrwjOq7u5c9aC#N@hg?rjxm|TAc{dYqmA#-Yo9e8 z-r|EGUaQ0kw6>?y&^(DO1fZZl>+&73nO{uKA5IR_B`(g4f>Ji!H@-5Ba@+f|ubySa zkk+38nT!cPHd;y@I?i$n^=Wvx7mMmL>f5((Px^^eEME+Nx^7>i13a&8<3?Amd`?se z=-B4}orJvx+Q*n08dSng(yvH3bWzcTlZg+Cvj2Wfdkp*{28{4wK4g3tHF|vSmf=Ke zrIXu=oz#v}3V$zdKvSy0s-dN&z$x%q#?P;7dq}Tpk`I7oW?ZZg1(!^iXiVeDd*w{{ z0HVPJKM*GOQBFxtek*m;oaUHM@OX{=JdfpN;%A(+BOH6{Le7o?Ij^vI2Qtw!J+N?6!9lZ(RtG z#|If8K6rI;V4*GijPrR)I?tJ`p`=JmL?9pZV%t@~VHFhtkz-=fMKRx>%ZGBUy~i`~ zrUy@C3WvR8d*E^IW0BMQ6>hq~=2%EiCErW5(d5OkTu27eoWJ3)f7aaWY=FJYM#(em zjf=~CxADdR-1Qni>W71hhw=?FIk^sr9tOC;5{5YZ^UfnQxR0UunZ~0Y!+a>%xOJli zeDn8jg-^>IZP*Y(eXqV7JjwK-r?Nnko`{`7P|{g-w$VShFPB54#sD9r8<}%TR1%x3 z@*6OWIgaPmR~J^^>Drx;)3md+jCK1x1KMa>vXgRgX-MW}U}TgYjgVB&=E-RtjdR3~ z5-XA0h;h6n-JOsyUF6mX4MukN;QekLBq{RgZfWf;FL_IP1nFuD(Sv1`UbAwaq&jn@ zPz#D9V3r-k=YKLbVB%=oDy?Zxgp~M|8Gm+`F8$M1uJCnOS94 zeXk8(82MfQPOiOlRe5i|U+dN^CJA*R?Tx-fLrf76Im$wepQ9!!Lh7-Rnq({^sO4RPLn%8B09IMD;mUZM zqs9j^e`Ny79c4La2`lBsZZrM><*(dM(F0~?Vv~}^R`6=JQGQX;teS%4LNM~;A~y>Z z)$X@-rUAz8sNRGyoVSrB?sRWFh(kM#G^GfHPJ(% za%VeZ>%H#Fj$wv^GA^gNHi4gjgf83%C`FlG5&r_1%vxUwsHm1Xfcn?VnTYuQLaMTK zS#YPmXp}+M>;^DNbNmbVV=^52|2&|i?E#>Uo8Ky*77n?k^L0I0|nIgx84vdHSaamM0-Wi`>KrcB2yV_YT8-M;k=S2~e*Rj^Al%(hCcb zrJ$8Ud>nOEg56L z9Dk1p{~$OR#;0EvoqO%{Y5R>QqLW^~Z^zmMlM`I^^=Q6TyQ$wttEFvJ@;eELYHSmj zWwz2tX-9be<36a*$T_7u{`DOH8Y8&r7#!SJ`ExWk`iaOZ0_}B8I5;>2?<|9mlLZi@*dhDuPz zx)9FnZQiUbTCY92aGxnc<1kWSDZa`R7}T*M1D{wsU>605Fp5HE|%En}*F)0R-#SE|3-ukhsQoK!pWhY9uDp(+QE0x-*MZz#2tF+GPxW zEq_0Tp0Cz@q>O+6zQ@BRS@{YEH8U?VHSIC8eV!hw-fH962?ZZAAzDnFt%e6FUA_=2z=pM)ar zYojC1gX-&e+zZ5T6(Ud&2A<}%(osH-g)X^bD!1f=F$B?LoatonzBiRX#O>f&B`Fz0 z0B5XaD2&8+_cKg#+(uS23o6qPCjzP?AU z5c_s0eveFlC5iPjTCJW@1yV?Lz--pHc}kjqA0L-E?9w2wc@9J1Lau+nU1Wdh=#!&{ z3h8y3r8iv+A+sD5VS*T*>uIsjmUntX{9T=dAR5>4lM=hY(LeBxyL62BS{^wR|&BBD;dTNUY*a5`bZxd*f2pZVgkJQ zEkNfrN-eaWiuhN$bYO*O4$wgcf0cN|01y&QxKZ5B&K z{4XN~CFReLg!zp&b8cjOL43fO{xq2Lcu|b&dAqHC#SkMJsFWd4YQA)iAqky7Klt3G zgYxu@qP(M1hd}W0F`;6oN&e+B|jCQ{o92d9ufwW{hoB|&%)rQpW=l}Dz^ z+xQAn%U%dD`a~PWtpz<^-Qj~QeR{KhrRtUSB^ zT7}6)rKlIGRNghv!WC*Vnp}yAgA;1Kb*0A{Lp7D~g|7T_+lLk{!Cnq82F=@Q35lM! zinrEenv-`_3cEfbEvc4~oEQT{pzv?sJUqADT9SW$%_69Lj#U2Rt^r;m6fC&uZc+{b zH<0{`XHt;R62Ap+WUXx3JpytM;Fobw(=-5uD_~F-y3+1W={iOF(v*mMRx(H2SCJ-6 z^bNY{tgIRL8|0nqIC>~>FVPg z&8HZ6j{cjQ6vHDzHuW{}+2aQLDU;ytc9G(+HCxKw2o zxHOxE2{K@*3`;g0s%7H`hD3&t#6*^P!01Hyi0q~9gKZqCm-nfY;FkUSB5o!Mut5H7 zjELlK>to#yhxR{v!_1l5yZF9gpp$)oyl;E>p3ur6R3Aoy@};7j$mPM{U*C+2=kRAl6UY>A$1Of;#jYrQTiPWj&e(kV=bZ~)t#T4^`f zw$gd9yBVDZ!yF}%R@SOF10*Q|M@$v%b0>WOu09TjB9M= zPf*CW$n~6h(!{kVxrjjHllASZgiKrucnvy0HnTC*4i=MF$-LX zij3>;&p-%@V(|dTh4AM1IxC{4kPrqQQRHuNahv#LpZSf8Z6Hjnpws=nSbXA;F=`RG zfvQzw3s!oJ9TPstIfXrd)t5`7aY7baVjaCXux&dAn0GiYR-EYoOJ!h=7Frbi4sj3G211z?9K9jG&o>RP(W1W-qr(OdxVrrG>z4UkZHB5-uF0&T^;BJxRh>1W( z`tRgViJ^k74tf!#Gq0G4CBXKgROz3?w%yOzLBTSIMb(D%TC zA@;f0W$|eGWf5@>qtz~!Fq59>m(sQxZ;Y3#EGqAqqN#)fYfXnX0sU8+WMPXUH@iU_ z7_ef0S*GgE0`@yg$XQR)GK^z?9z_uw0MqE`zL-|XdM~@VSvy4YhhG!K91-zsDkhU_K+(KiMi92+ZWdszB$A0y!E6bx6=r z^%Gb7$}rX7T0R3c3ds|b8f!#J;Fz14Rwbs4EGV(068D*J2+N2oJj?=22@fg6PfrIU z?;^9};jtCGZnQn%*CvRmop|wuAHgbtU)STcaFg-};U%{Hxztvekze)+@npGrj8y?2 zBac6&#S*Xo@mftSJW0e3Z{6rZ}5;3*F)D^^ZX-1zwN<@c!nE-yYNS@^RR-N9=8kaE5cK+YjICe8CS zbxjC_h)s$QT>dNvM|zQX0Q<%{>u*{a5c%zecD*r@by?Y%?+@|{0B!|FP*mOwy92N7 zJ7Ffoy5UTeU+VzbuS4vHM)FKq1vBXC&iCJ@95_vbmJ|37(0s^TqSl&n)zjN>d;A54;GhV_;8&=WY);&sVNn4jOl?JXN@Jy$#*RoMMlPF2+$&ginF}CCeofu`t-t^6C zbLYm_GaV(;zV-tqfgX2pCT#-z%QqOrnuG`mwGH4~=#>I(Z=+fzI#&JeKK}Lq6nel! z2RxSu>LA`3*51V2S%b^3i$KrhztG!BXKVut7%73NaBUYA2DHDIA^e~BDj-|RKVrN; ziOuoDElz#~5deWYJ2UCJQ9TvAiAjmMw!Re*B47$t8~mP4<KrPrL4m0IN0=fIND`~9jR zrQ&Ov7#qL@P-!{haMDhZeW~L(K94;qqdsK_j%wP3>%RcWm|nd_WlzOCdg(Bmn3ncJ zhmE_+fA`3Dd=eO6{S}yy6BSs}9^6-=n%^Vt>Q+!(%)rcyp5t?db<}xn81yhp6@-|? zQC#W%^pDdWd+z7YjW}5vo;D6$CLAgu{CAp|o2EacQ+4FzoxZO#52n~qwg$xdl_ntSvFb|>`u`kTP>f~PNkUlhHJgrWF?eb^maQXjO zF4`!{P3YF|x8VA4<2cz8;9}eXG0#6q(xLp-W&;Vx7u$xX&w_?C>$QEHuppx}PcY2I zOo+tGFQ9|t+<}t|^COdqB^S2rN;Dc7uLa2K*bxp3*kyGccNkraIrd@M7|VWV$%Qxc z_a0(aof9OSAiEKf^zfcsSMD3l_NqBHYw-JGU`^i zq$Cm`Z|x>KXs4r>JQwG9{#HT3JkDzpaKE{EIR0esBGPc5%4ZR6sB`6&&|b*KrkvZd z#~4l1x=|o2qML5N-hWCd(_ObVd`cU`p^bNr|VR;(nJzBHEiEA~QoxU!P9o+U*H z*?+H#F(GoiIjqN;D%)Cr8CAS_hrze^r6O)tAUTg=YWq(N>;%%@*A{61e@kfoM+f9f!;B%bdVMG&zGaRGjG8}N4`EQod0rKm?4ezM=~657yVHu2Y7cnUri z5T<49N{m29uQluKkJ&MwWr%nzLs`6z*#gQ|B~k2tH1_~}vek^2G)r?Hc`idRs-;d) zdtYab_1`{D_u^5qSDV5R$g^$$9Fe#dO@He)J_%3;k%3Gmio(ww3(d&LD1Y%gyEL0W z2CdJO@jw2dyyJ5N_mvR9#lSk^$1VGdMK-q6ldpJ`E9e~JkH`NW@!H)Er{m2hN#7+M zeqM7^e#JYz2#L-hGc#}4kdPFi_+!6*pd-mWe@-D#Y%eAQh4Z-j4k{)p<^=TjNC2b1 z{CVu8|MxN@?bAKwDF;eQG9&jP#)_0_jNCiym}vXsF|h|!nDdG7;YlqFvkwltbde-! z&(Go*WMgTmfrhUn^fUi@fenO?eXQapqYaN52xe}BL-{J&>>59nl&ugFFcS&Y=WqUU z_9R7;x)x97%|+Bcy^$3037fSX!`M2t;`0=LzKpAu*D?u!f>J8|ZTvz^+eFJgb_rMc zGI^|7h&NTn^z44#`D`SJr#H(X;b&MSLmC_(%mKJ6{U?joi2nyTyvmrH7pjUhdI!QG zyMTMQGQRW4x+DOdyw`HUiGLtL*e$x~XYtIi^9D?Q1mM8^MmAP#)PiFHB_$;R zN&3~mV3?B9^AVuI3~`%fv$5z*jHQG?Q`fBO%M!$`SmgJS3nH~S^SxoI_EB_r zt(@@(;Ft*v48n+Od#$k0-YF3+pT9jFs8|59-}HAzlR)P18CA(wPmilCshD_RfQ*m? zZLZH6DD{Fw+%H5HRX-`#^^nn!MdUw!u4#b(7GL>V{Ce$Ew>-#6_nE_?OHyB7p9Cqa zu5R@QVB=+^rL;G0Z*M;YP#rAX+7I|lY4m^aSZ(qTt?EQ!nGVXS(TRxY*{>b$e9)tW zYj_aeU1z~QDzzuj2*Pz%qw`P>=W~|Obw9eu@E%OqeU1AnIJN5c)v!qI;wviZK{%XS ztCi``!}^lGFtj<5pd+a#@x(G{$X@txptasp@%CM4gdcWDi#E!N9*Xey@5++U z{z@afcVH~EUoM^nWx}A8<@Y|q_Tp2kRlhhswCY7LfL+46J{{BCi#UntM_Nd;3@8)|t5^>%D&_uQ(P=5=vWH z=>t9+pdhm8shtIg3yG(Gq@4{$Pt8@a!Xh{b`@WxIFO7d)G#G}nEDv=k_RB)C6FRm= z@dft3?#gsz>D<{Ktw@Sm$fXRTL*Vmd?KEYdg-Xg|gqVP& zQI?CzywBoPt$vMkP*Xz`QNvG%gwn*+S9lKra%`%}mG70gV`X`-I!KAb`Z`2Sf6Z=; zepiA!I@WIg`2|>D7lSkB2}fLPMj?*5T!2!{b@WxA+C$M@0nic+swxB~kzcz0v^&Ov z^|q8+*|!IL{UR8fu=4$VfF=$jwX_;KvX~MyRyuduraOj9@m z_-KIvSV5&(iSMhO!=qz~vov$Yiv>yQgC%|!+f}FH3 zBrBxU=ltvOL3*2;>l8HDNqp$$MKW*3Ra-vF_m@dLc*!bs8353z^vC7TbA)Oen&02y~Sv9iO)g4vsMZ?%C&W zD$Q?g0<;sW`ab_L;vH&vG+iz#VOxVw-bI%hi2}hADukf0B4w$OJem2Fc=}eSc20U9 z%5-MOwKtd!`H}$70QX?^OTYQ3qvb75?8WrE5;wQ;^-ZQ33uD@=4g4s4}g_4Wl# z?Ukczo%dhv*e$mNTwJ(_Ud@||l|>y#Nl%UONkbj_Zm$lP@=bHg^DT0GNxppf5~2SZ zfGzA7DezYI){$TEbv`dv!BRjw$8R*Kd`kIpIZ`)5#>O)^2xjiH4hY& zWUdZc->?~4CB^d0VJwsT{XH|c2vmrACyD9sk!HRggM4+h;#wr#R2>Bj`21#W&;#)BBNu5_R<9~8#p2}T zTtD(s8tClwJBoTjw+>EurgpxE(fWO6i`p%(VZ=l`XF1poryQP`Zqgk7X8rY=6s^=) zyll35fhZU@GNv=Eq=NBH|7riCd$JE~r9H;`3pv5n4T_kT>EVAy-g?u<@8w8Hz##6E zG!^G<a?m22Y@WW0T`uq z(L%qfjhxR1puZxN4TRj9$&Diqo21>b@1J0a7uL@KB+Yxqb$9H!*$2x6ponRb^r-ty zQ#D)h(FR6mA4Y9)_7a0On@YHsa#Vs98a4m-N`d$EHx8pj>VZ30IF z-r{8Je_^YvEb3xbkw3UBYPapRWWExvU?1^eBm9)6=cb7)6{yYHGMe zfm&It%3n~Xc8whIGs1etPQkP>3URy7yPA*`+o~`^Dq;+V> zno_wQ|NMSXXfFZRo#FsiL&N|S<0k5MRU`<9<0%9gF~0w-LW*KKkcLCmYZVDMz96BT z40s>A*T6k$wYVgq1#HfSm6BvGOf&zZ#@Xl;cHU6c$95NsaF^|rh4pqeVcGq@6KgKpP#} zfLw2t7E!fL_DJz1BIG7!u+UYPp&_+gNVm@eKMA0;j;iCG;#boG=nv7w9suiMYurk! zK*IejD+F%Y?M0lw>-ANvnM^S00Jnl|b}E1mQZMVuxBOm^j+eEpTq{uK3}6B3+lj@T z+61n5HP0h_O?hh2IlzzgCL(q9bsW5>(U9%i%l)U>B&SnScmX5F0MCx-2?MojKWw7W z8XVmY0C)kx$iD`MhGZ#Xx^8FNWVIBYB;AE>zhQp>7Fpsl%?qnafVaq7@0$OBes3$*67yM;-p;DcmUat4oI|LZ_0m9Rnkn>rn$@KzFP1{oX0 zo{^32@0}Mq&~2jH3d0+EmIONPK1JtbDlS<;FTA@mJ!4@wneCB`cYU@+Dm|7NVnb02 zx~eeb_@*o3TGp0baq#i;m!z{~05S^e*wSm3_dFQ-+h4$W=kk8Z%_Euu?H?8qlMXN! zseFI5Dz*r4NO7@WXkN2wbQdteNRIkP;_~I?@Cx+^-evg&h++bc zzL4=DKK*;VIjEmp|Me^E=Eeh<^UBJ(R|WyIUsrqIHZ&z68_;Hnu?3#!;+l-g)e$3N zTc_|?lrw7-i$lNx5_ohy?lytlud7x-Q$AVAf}_bBcgNkw2%7K@XvoQu>zf0S@x*E3 z$dcDUcx4Z2jos|Pyg)@rG_V1A>B;!=>uzEP!x;{O0Dhy#i-oNp5n| z>Dgb{v~ojXq3P+(=^s%%Qc`(I&Xm6gE7uZCVo75%i2VX5F42FYoXm$m)bug_4`pu| z7IoCMkHXLc(jZ7ncS?hFBi#)W(kYD$T>^rHNDKlBNOzY4(n@zqBO#rhJp1JdRx)mrGt;lI8TCKRm`)c;Wn!YE$9nm0eXi z=#;nu*W%&snu&(NFxP+U16OQX-u;1nCXZFv`VW*~RV{qzjVA9u^Iyn+{GgF&Ju2Nn z0_8drq!B%r-dMGQn%`ekjS@pk1QjeVFa8J%j_Aw}1Y5T-^KGm)4&xA_Xp{e{Z;NlQ9KAp*5I${l$T$z36ph_36`N$A9o$E2?wu*b;sgI^xzE! zvSpKJgr;LY)B2S%{^4cM1sXP^tpLf|9J@gY$uC7A*Ob@SwbbDZI&>p{b;nPs%c%s? zCOxZOZn>CLX|lT=y!ppIWh7x6=WU^1L$R02mEZOH)n-??%Ij&d8jn=(V8_5VhSR+Y z-L_{A$?t`BQ#IT}6hC?A`3PNIeVREPQ)Q5y_&j19JV6=Go44`!M%krp^6O)V>h9Nb z3RX%iQSImH?-kkcjR5eu=x^Qz9dA|0sHthEErF+~B(ja`T3^ry%_~U}GzLY#q-k2d z>WMc0_?Ok)w+sxwFLLe=)PSSGx`oxJ}p=O4im~%YYe4gNde> zCkbb#1CGq*d$YZzKQ%QL{CR@`(*=b8X4A{H&E~SK`ig%!8tE<%ks|9l=;U(`<@aa~ z^)mjq7r-PjP^wX>jhlV${5mk1rB**reO`d8XfzYu%atMo8LBu|l;bj4LgZn+VFF z=QkpoOVljRsq8Q%EweVv`IS&)%xDY%ivT*0fF^JxZ0LcoX+}UqJijZm$2Bc_wdOmG zf8eF}1zVLvZVgsst^LVn>6H7*_EkiflQiQp9`sv&i)cI-k|J?>axxxl%9>4CLj}o;-O{ICj8(JKVv4 zlKt^xd_Q&IFbmGk-yH<6tiUuijirznz~s3?iob}K6Yd-;N#Y7(vZG1xoX>Ka0F(Lo zMgK!W0yRZ}gCVV@GAfv%qA7T9XStQds-tDBZLNGFlP(4C$7kVWllk+$BusOsnjHU; z7&hcWiX}4~nBNf1<;pC9=J1}Zhl`R-CUPnHddVXG0pxV$V-tqWjOZ0_#mSi|z4nii z{sY=2YQ@_)ZTExh@cMAdKsujmPu8RM84H7mm$TJ-uLb%i zswDLX|IKv40#2U|Wu1P|higpDMnjx7?NrtE0wxv#&9K`$lb$#}hR zD@+BPOYBBl3u;Ay7ufOK7k34<4Cd%YKq(g0!(04!kjK-7=Y_yCAHlpAdHqr&vQap^ zBJdLcl-JVNj{;@0GUIw^UdRn=!1*5IpW`n*_ka>wWNS2so#2rK8s9(!XoSZBrT;j8 zsqx>DkIlP&yrU<;#}7R?aHf<9(2Zl29>V^>!@Cu<5`1X|=D+3`OPozMslPG+a9Oym z>D;%s^AR!+4vIR zU3B=XzTe{pKzJB4KpbA>1QJQfRwRsdoWx*snwU**9Ke?v2YD$ooS z!NgYmD|r~?Qg)x+@b9(=ilWE&EhY5RZjX>j7!P9S9+tRv)=Cl(d10dUXpbVaww3^z*4w+YU5n3E^az(qQM%?u zu_)vc?hdkkZWEVZK4GEpai@mK{PcCB!~FE^ruXT`hDOX4H1suXr?2XdW5%B>vsMPi}OEfjSqmh%rB0U z=jrBtO&O92WJQFQB_HjnZMG*+qyc!EVSl-=TY~?;;NiOi{(-dd9=8syp-D#Kv$yrU zuHm}5u30ayTei(qGn8!wK$kXM!|gPBn2TcyHiMeL0ELOXewGg zyUgwqOS$b1P^TUUadOro`I%qLo|S(^#EPK587=yg+v|>RT)12TAPIeAd)S}7ioe*j zIohv;Di06@%)?f(nFLHW1(vd2M z=#qtA7@c;Ai0#F(+OFinJl`k$u?l-f0%SiO_pn%nwEfpqWstW0oT-^zU2gUsm^~~ z-0cznI<7wx7iY#@;ys*mb6x|)heF3`;^dbc)pW;tTL;rRZMex(o5UkuCRM(Y?_WOB zCC0)^$!U#rBr8jP+IR3qKyJG%e{s0|sIlDq{_%S>S3Z1>>pA~y9py^cU*X0U+w7D1 zIZQOkLoctIUvF}0OownCon?FzGNBC0;(J+Z#y(^fPBU~|(+$&D-vR0rKyyNU-7#-I zZ>y;I`YP6GCzC4C-y99gStk5>9D~;3HK!}*=O-Kj+V|H0kz3j#d5Q6WNUJ9`u49dNTjx9}}lXWByF}8PJbd&sJGE+6EsjxBr?) zasA>zLO@Pl!bC*v|C@k;@xF@ss`mmBR>h!yxclk*`5B5b;mw?c-!a^qi#wpNRPH8j)l+49a`GX^>RcUvPS zb$hzcwLPQvUmNC!yjBf&I%@F6+4s* z#>vbmb0z-{0+bx+&FMBg$-40K{@p9oS2&(!(82duXd{xuQ4x;kYeCj20omCoHhsT$ zY_s2Ku6lJ;SCG1mWur}X0N1IZtDr%CjVn28l@86?RUt17+9v2r4fPDPv#DCgihOI* zxR%Bl>s~Ir6B;2yB)*pCK)7v~9EF8I6Q=v&weRm=*fQPk>G~K@)D8{c`tf0WTRC!vd;#X{fgWyb|r1Z zexhj`*my(**)tMfJT23+_=n=+H^_bDuip#D4Wlmao)DdFB8^4F@h_tkU;%o99KH5?j}GYb7mD&GDE&%+6a{Kc8C zIAtsh3`#8i3Oh7cIV}3sF)_VLn<}$E*p8Yt#0frC=e$N)T3Q3n(@&Yu%xbefJNR%# zA6uhpd_2B}2ACslRn^`9xUTGnakEOw<#pgu$?c1|(~dh8_4hwEet$S&gNB7q+1g%@ z27syrtlA-5MwZt>4}Vd0e@#K+D+J|P?HILBKb-ArzbgB*_40bJL+k4)LGI@g*t`TE-j>Zz-~i}L(V4`PmJj80_~24vjkZi zzb>yqho>y4ot3w^O}qxp+f+?sc{5DJ(7(WFQx&Rbuwh1y@)pDkJ-`v9^)0K}B3{SS zCT_}OYUk?MA(m|veU0SLLxM2EvPJ|11VTML4(Lv(W{dK0*fF%S!*|vB1i{j z2x92MjYP?f-{b{(dil~1NWDuykPP+NFseXNTfz~=QdO4BPfB*jYwJIKW?StZIoMjN zm<6&2zT9B?4XQ?bd@4g!sf;&s>FKz}4w@Ym`;0o98Bvj{C(Jl&R1$%SXYJ+|31}_F zY}j~;QU9QgZ1%;Ry|5j+d3Xz)XUPbiu5T zS0|or*GDtS8Lf{EDol4r*~?9QKz+@$H3(~B2EAK?ou<^-mIGCG8k6d9rm?HO3D-@b z&syX4>qe&eId+}u)7rdyHFfBYPTeWB8RP`R+sKF<6dM8R9mbp&q|GiUU1{E)4B|sD zxINaeD&0CJop3Qnk*|AfOk>E5DEtLC=mi1uixfaX&C}C!b$xw&^GaBuMc(?@QBe(X zg~}Ql9EqUUw+m$x$;pm<;9l%d@|nrNRFM3)$G7v|Ec@H?gRYUBudd<0!|ZOr%)=t_ zlrD*!@ren*`4k1>%}Z|s=lKrMCKCQJJ)zFw{XdJau&{)$4lhearvVR90;<-ROj=@Z zB!UbGGCKm$y$$}AR=qxdbNXTkE98jmdQ^-GGaFQC{4Uc;e%y$7iT#eh-VF%J{-6mT zJzQ+|iovpo#`|~6-6r6LzUUdhrZs^Y5SdSrk9%V%*35ifUV&EOqwYS$Df{pGv}$(W zhS)V1qw+}MDJN%pzY-b%EZpVd6Hr7yBot`+V^aGhu3f~$E9lMO%(O?6)jK{K&8xW^ zj=b|RO_(@S;`dg zK2x~$EzlK&B2t%10{ixsw%1n=w?~W_@-LOiZ$qbX85tR8n>^?Nbz{PEd*DXL?cu%@ z;f|BVibM*7ZlIp2T)mFy~)ZGdYO ziMs*H&B;&*gy++}Gi19xGTmhFLqQQp<7O>4JDY>x<()?-;E~^8qtH^phbn}=v?c&O zEtr`sT=+ahz-Pv^ZLVc2bm2XG+tCpa&UYI4WB>kkL&`m->V=Zf(VL&-SZJ>EoSdBD zjWfPK5>LA~$9X^psBEAfgJUYG_20>NJGIVei(c`oTEV*Gj1$(~#7pIygAc17itSpt zT*sFr1(!k3gLoh4CmFb!l$nU5SB*U|=T*MB^SFqJGlxXA+@SNXIBpquValxS6{%)E zsd`fJDlotF)%bCwZKoZ(8Kr? z73I}shh%SC$)y7t0@(O%YKr8&*sF1d0kdBK`OGLJo3L5sx+KcV6%MFyJFd4V+CVp& z_D^P!G6u1o(Sd2{qIyy5UJ%_GVwfUc3{;g!B4oB%_UMTL zRW$dzVbGGXH|UL`xc=ef5$Iy?_7g?qIEgW=PPaz(JQ;$Oe_ntZ3doyf7;z; zw;S$?BVjKLz`&z^|58Wt4bDlXpi7qbnBhKB^P8aPqM{<@`zJh>NDsFw|4P+OU(xk< zBvMNlKW%QV%?|$E8im@J9Tpy5)Np-hWkGfX3*OK&HfDRAI+`#d8!yc+s#iFIv@XbV>rltE< ziOv_5e@O`FSmC+f*^XB5i10mt&I;cX;v>ys0ujmlGF%tQXI!^N=JoB01czm0byPHgYH)V22e1?2*>V~f{`wIn^2JyN%J0Ydt)!L}* zaEVAc`wCA0gU&p;unt+mev#1an|c;h=F5t{fqSQ_H??@_9Dln>C|aj{FlAsXRN6Wd z#|nl5rtwbFwm}Baz`&6&U+EPNUza<|b^Z~ne#$}VGE=#*v;WRZP_`&evQhDd_*|t~ z(N}SKdHIDOeqbnLviTvS;!&PtEW)u$j)1}WQ;zo&wAE4ScDT|$e-`O`=z9utvEXYXmmOClu za@O8xPRwzV#Yd#ME$D@cO3KS$HEshbjC-EC977duhUq8FJGmOM2mtw579~bRbylW> z{57axt%H2sPpCVWYRb%)eA+>_G!>bw2y$0q0g&1X=5WKEKj15<^(lirZ6DB^Oc7u6D` z0y)cjX6)V4)TDH(@l`=27!=;d+hbxduhVZhDm82{(QKykIFj}!fS=`JWNcxPG-L6` zY!9QHRBjwWdok}9!ww6r$6pnDf8P6%}?o>W?Pc*cgH)$yQIP`e-NbQ!knD=WGRvyQU!ERi=I zBtxV(v~=!JE|TcgfB{j;(c|c`_iM#!=8#S!i%)HBDqj=pxhbPGyWUc;{21$hqhA4I zXWp7F^zc;I+q-nQZXs=&e73gfqcvL>-1=z=Y8ZH8tRiV<;DByncWj5wXEz)PSfl$Z zlp{e2*J5uQaE5wuYDd^y0Wby!+B@sth!|T3P(|#oI$|LT43zRZSdsa%(T-S!t&b0* zK3jVYXYv~Rx4}sur#pY;Ux3n1P@cV)4a*Ox{^8kYRmLT`MmdIlawP+yUwfg!g=sK5 zzTe++kqa;$Jz<#22TetCgc0|&BA)Btv6h@$J4u06(W$c?B7JqZ`$CnUk%>v=^3_zy z$KU6#aPnzqp4B~oe&kzV(l*!kY7atUzlSN!gOrLi1)c{ZeibR=N{h>X)y$0-@rio2 z>KxItIb=}f+)eqi>=JfaxF($ahnsb6L1lVvHWx|mRwLqHO{B1Aq4Io8(e0>%gcmyF zwSP=peDAOxGFkmFjVuImVkd`VK9`M@RH$~QzJ#KuPc80=+eP1qS-MErv`wmNpA_7C z40=*74}zMCj>4T(aBr9ndH`$Bab*=*8%-6u#cgHJ;*?Xh#{|=R5|Xu^X0Mi|pDmI> zO#~s!xwGxv6x+}+k7^cF4|e9BF((Fnj*Gsz+MJmgLOU3CKOHfWX=?zkj#8GJq!#rzj~1QiuTzsW(SIVlW%u`(nn=UiGt3*h1&+I4afZ) zrw6~ZVfVJQ4i{d4hGH92g7&LZ@>>88C~BT!`~pUV;Rp!{0sg3`(c>>6UE>iD zoAezf<1Kg2=b*bA&f2Q<9KlmbQ-z<03IW^;fbz`Wh}>}_A~KAhC9#(bP5P7Dql3iQ zU#+|YliQ4MG(ZLQgK@dyAoJmDqFY~3!ZG89M+Xn;W9)Fvj223(hY9>9yR;L(IKcob+c2bKHc0iW_}_c5)#^1$V44u4FLAm(15PGHPSVD{GnLMJnF5JIPXRL zFBVqTYLPx%P%Ljh-5ymY+H816?h29v0)olH)%1l%w;AoGJGO14n~M3Tvy3;V#Kjj=OoTZD`06+`oE>bA1KoITQw=Q_ZMN z1Nd8qszu7J(v11j`%n<$hjhsV5n>t=Ar}!sy(tU`Nf=G3u`(2`9|`Y1QNqIO?~Q-l zdhe%8nD2f)Nb<%Bz+wY+P2YX(==SYgHN*X6QKT+c%ChVwpn~sRQM8goCt(6Q4_vj zFu|;0JuUt#fG8lZg#hBE5IQ3i!-)V`I=Zvs1S8Rc&0+*#u~Tt#m%HhboQW)Xe<)0~ zX$(S)56b^u}Oq~AV(=UOajcV<&>Kw2r<|9duXKL@#s6c=0Hu{X@J4_4 ze#f+}No7kt|6}<0GJ7b6j!s;LDx*|vOp%cQ`d8TN@-T7*ViAFgvGKE8p-v>GBdx+Q zkWYj6WWsQjA0zy^Kh0P|oQN%0oj0K;37GqU3j7cA2=f zGqM_yvl#g{KC0a9h*2?AXRM;bjtmQfl$1$YT;~*&Czx90!{6jyg-5KsYI?!+B0C1m z0o;-Jef7li*i(f9keTYbjpa&EMRTn!g#A3=^24kzJAfvX)|yD2csgbJFAH5p?8e z%84s|dZ>qKei&JImVisdsT+rZxJO)cc7~o((owvcR8# z4Yg1Ce`hUra3UZpM7mYluZC#HTVjt`jgXx{P3DqfXhKoKcAu%C`%(*#Em}EXG`ms- zwt&9Ghq>f%FxE!b{v*kb@w{b{E2E^~s2A%Is?VW?X1@5uoSrlmOf_}kg z!sm%WP#og2@Y5kPuMv8# z&nl|bY6>RcB8h~T3ll$v$|kq|dQ5L09B=L<17|ChUHSF}K3-*5)vy^h|0F-%gD`Oo z@cl1qd6+uQA6@o?#+PyivY@Dh;uET{*ki$@2nU~TXQN`uDMUoM=C&?7l6cFaJn}9*v=2=C+Tu8a zJGN}bxopvXo1?cJN=`1HS?BwF+2WWgg+a#Gkpbre#?Mp&wBRl2-)0L@!3z*AQ|-NF zeod72)6>&$h}o&O5_@0@#K-1XKmVonT3udl9f zN*^V!NHkw;Bt4f22~3I;IGTj4j?azifG=Ssd^el^~xY+22R3 zg&!Agu*6r8Hug4&-s9;YO=a*ul83xMtvr8Tu-UzTW~ty$Z}p>mSK{gQ-VfG^%*?tF}m;wLK}9J7U$ z_RslFmkRK1Fwut=omSO@6eLK8Lt#j$Ff2zX78W_7lk_?t1LGq<_z#+mH@A&%7A!_r z?nYKtLGII$;6BjbiHDsXyU}fBi)Q@36o(R@1;@c^h#rDwQu=D429sD}dSWUx%&|Cw zaUY;CFfzF)__Z~3QNA+5X)l8TEF%tf4orD3Fmvv|e-ISAy?t|c(vKg?iEgL^hX)A$ z@JF77;7x9(d^h~$cnK0?vgA-o$?{v1sU_F+biVtdVo*KzE z6*T9f&~#YX9jE{OFnIv0Uo!99_`;s}wh=YxwU zHT?r7;(oXO5&Eyc-nm9V*NbTTZWf0=612F|h0T;J_zlx>ERu(jcQ5dWTyTWk^Xx}% zOMz1pVnI&9!0;42=gYi)+Eu$Z$D=Ba7w1UKa^mR0X_q#|YL3Z2L~){`%xNBw9BI*; z+g};pxE{^9;ITFTY*I2fZu`$4zrTfqfv=tAv=chVUzpt>|2AI^E1-+lN+F9Y{`nRR ziTrgZ=e3Rd^5x6b%6~VNX^ih4PH6slU}7dWirJc3Pw`8qYot8y!}tPDo^6O(IVj@a zRnosIGI#%Di96>CBv@E8b;FGu+>)Kq#R3=B^CN5Y1HU4#r=XC~rM+o`^9RpS;=NMz z;kvrIhoC)}_{Ux{#{qT@Kzi5Q@M?nTS?k~FhTj>D^%!4LQV2J%xzYUx=%WIhQsz6> zLc_=go&(@nks7Ok;BG6D8w`^s`0SnMiw7hgbwDSqB6&`&v-)#(HU>CH?B;%ZK69!@ z-^c^hhh9(FKqB+$YwgFVZ9|keP@S}8WWrJ`+APij+CAgjcHjZ4#i#NeC=gs!_&_rN ztJJPXe|)nS)OZ&B9>TyB_6r7 z2}2u27Rp1>vmbqWI9c+L5#@YR#h_Ug8F#1^1&&=7<7sm6Gt}q?PPz3pDeof1?qu>u zvZKe7&m^PT0!3t_ zkc-}6lBhqFd``r{@s+f%!Q-FyXQIiCsDPNdiq}DekK0)jE^}I-L^; zuA#oQ-`Q)z{0B`P7$2myJX-tGJ;jxonR(;r;qZu9BNB0|Wqo|k@t=Lal@6mF6rCk3 zOMZ0##b-${<99lw)vQur-2P3c>7IC4Vfghf>U~<8G-M;uN4e-`&`i8BwA{1+H;blv?r2Tgz=xiZDvpxDJlIV%C>R|jz+1HUcx8V2vmc>& z`B%kP{vv!ewI2UGpeZyG3NnAGOgnwBl`jw1TnBB$)s*F5gisH%vujnCX`mMG1uy_DY%N6RAw58$Vg8{egK*yDZir%&5d@_dYPD`&b)bkQmIGnA! zeE|uLcXDvP^UDj$-i*99yp^>0MrK%#;iRXtIRveLZ)Cm>@%BZ3vP4xG_`%l9 zFC98-jO>d7EtQmzkWk6)e1CiCv3=vSDC~U_YUwWqckW6uaIG3UlULl zq|u7IBEarvRz;9v1D(SXubR!E=^krkNiR?cK0OLOUY#lx{3n}~_zxvrP2f+-8ok@z zfn^A3f(Jc9|D|9C{@=S~jwtZnHezXcyx0Wqq9{9#f@4ZVwsh3NnaZPx5w}APVJFyDEP@8w-~$@ ztGH``C2k=eJBCmAeSgbt zYP!fDT0ImK;mczy&aQaqSbO%s@n;*ol`sPmppl1s@SNn>87xLT@J>Ate3?@8B*b;A z153RimxQ3*?l{J4Q=t)m$3J%;F(xJ>W#-=Y2U8z%W zmb3Plz+@q;l@Eia!ztePDVGJhC?h5|I3rr_^EZj{w>{C14B2@}DqI($T3SRoed1k; zAS~?c-=@kJZZ^VjqA=1WiCEm%HDR$pneg!5`w4;lo7WGOl3v)bADRYoq^+JV59CD$ zAumaH|2;lJmKdwj(HWZU*GExh@?&ynb?{JBqE2F>@IkHg@OMQ?qk!8rL@~2Cl$01B zsX>Ak9}>t33tt2J+QG_uPQd^?qB~77+NA)Z+qH~Xiiy>q_Zo0`PPMfv*iJHeWJshI z>XU6-NTdgJJ9L5a5%G7ash!868y|%V0F@ZB#7h!g%R8umkkE#sE-@6NSDkG1=lnWN z=&4BqysZo;I5>EtGSgQZS&WU=tG2Cfa))=z$!rmILIj*yf49{ykWGSxDdO;pfg9h& z1HJ+deF8=+9me31AwuZ`{2K&=WIJ{vL31b1R0pa=B)Um$WtK0Qq*sM$XhL z*i#C`S~T6*j|L$0)$xvCKkp56Ft}SwTRSW+4(dADW#t8LY9gC6G6F$Zv!Bxv`sCG( z2@aR!c(!1>0^Bm*!s36UQqYt`6H_Smw;b%p$pQZ)0sdM`SNHpWzVeUk)Mnr9MSoN1 zBb}{6sfCjs)aE4MP;_xBhjI58;v|H?w&CozItTqN#)O8T6x&ViGfn6LWNZLwuQItgb^H zWITjj_{JQne2GP@_I;J^e^gz!kP1TexSRDe;@DQp+c!8?heFB+_}L!2mHDJ)u|{f+2&6oLTyhl&-R(r z;6hs-(SE5B9}?`+FISNcCZ&!V^@-HN3!FOg8yBtQm-u5Cym>1 zPyLcwoBk_RX6W~G^X(7%_U2Ki-0iv5)~8hV>E%?@I@0Nh1Y;3xZEe+Wu>3Jd+&@9| z^z;T7|4@GWtB4AHLCK^4hZJbgre2;JBH(Kz=m5wyhc^G>p%8m?1!&U zJ`EG5VN>vsWnP@&UM7h*&1^@CS|JA=@KaP!EJVyMDk~(OG ze1}(_FH51BO2h6BoW*gBZ9QB~;;MoDhZgh+Iv_kV&LrxE7eJ58ffZ&JhaGx7hlSlH zFb2w?iJOC8->Siu&As5Xny{hx1;zd9^gR|h92-;SD&^1dWeKy|DxYFjHVQ0ctlHKC z)%Xr&7^jj@Xl=e*HsJ%$bTr;+YXSt^T1Rhzms}f$ zG=1~&+`02ln{<%D7X7Q{>yL|g2IbQJ{>|qWqFaR!u3{oxD6%<4yK#2IFxubE0|8(PbpYcl& z3yA-82JFIMvkAX-DS$<-GJu6AxLZTyQ0)D}S}yx=t6@9!yf;)SSeveYjYC zbK94o+m%M*=Y1>47JP?EZ0espE7*t#QaGdD<-o!2W`idr=m-n7y#;{_Mw;0tNNy=& zcNNUPP&~A?Vc`@1ywrE|7c>P@R8}U4AYRhkH zcpsCvd!W_u^K1S7BMV<$-}t`u$^uB{pCr0KDImC4Kyb-AUmw0^hK04hWx16_6E{Q% zE^df%+;xp2m?C?t|6^QHuM31olM9bw?1xQ}yhcuL6SeIPmxaeCY<){4u()5(W`-!h zWUzveu}m1$K^oT9CjL7B;d7PED!72{g#S6Q*#~MlGZSnl(ZpEJA(N*2mtQX}ibJCj zs(A%VQ^%#_KL3vJ2j^EWW9q!>=d_AC?ANeGYd2%9O%>ADfL8%w*@EFZLDR*~Pkt#v8 z4MvLF3TS_>mCY>%x;n#%+s)IF7Hq0z@~j+KTA25*KNJBJk!A#}kq(1W4-hzpxpU0lh3sjbwKkAWf}rVv=4KKf#V00x1DeF=k|Tzr(0 z4vup_as?;Tv=CGCyrMKz3@P&SXC^Q;fd2J}v4DA#)JC;JLVturgcW#=_zzYShNb#-?~T4Jp?!V2^+0RPrq z0-PSD5K{>&Iyig?$zCpBZdic=E&sRBTi-{*UJ@iHIhXE4jnPXfH?DNQ+=;N-V`KK? z`qLVmntu9W?auV1b=BQ;>Cqpmwg(gZfDe;ZA}_qVq!RrZK1nQLje8;71{zVkXK8+g z>%l4>`7cr-sB`k+a3oNe1Obn%hDK5WwQ5;EA|w{}M>1^hXMy{`f6mXx)wAobcjro= zVxeMX1I{xH&rby>^F!PPrKN*hW42(8Kjtlp30TVd%jPDI_l8US{)Nv}TdpOsaB?1w zZLnd0^W-CR#sJO)5%5Q?dI*ipLSJAkw%Xv5xH9hk8doc}tdAl3!6GG1I|?i4jp@*e z+bHpGGE5DV3;v5=1bL$jm{wd~UVrTcJm#&G zED{G?;?FFaYdOC5YkT#ocGsqBD6la01jqu=RiWyL^hj)i{^RlKc?=k)^{l?owW*0J zVx1eq#gNFDa9d$~i&7~xWNZw#^wpQHf)#=i2N>9>P>>dS){^w>joMk4$#G6jFm5Ed zjmuXtNF-?1J(rEhum~Z4b3J6SdUTWo2xYAY-?I+Z?@oLvC@g%LtwA}{cw^$-SlYYR_70q_9C&;y#sc^Y3?^ z_EKo}{w!riMY69-kg2GGV_K(G;g%V0?VB?3@$tMa^8#wQ;^Y9+0W)_y)7J7D`uM7J zy9aPb2cFk4T|x+!-vFR3D7eN%hdIzLlcPc<>A9?!RTqIJ=-Ceyf+V6@|7kv-rEDb0 zAFzs1T1X1_=M~l`jh}dC|GwztWo%p1s2{@_16wEkH_pn8WDnU?uei6LAsbp!U!@R4 zQ>|@n4oVkDvV83X;zJZxg(s2?0gqf6nci+|2^|1>_-`sUso3eT9vleT$KoLwaESp2 z2v93L2|TYQ&5W!%&fY;u$8{b#xH0QiMc>&;(fuWV-8L)pX}{{y``M@U!P~VSSOg(u z@ca1MG(3-U|1iZ~O~Y2>XtR8YS)%Fin0ATTx{#Muv@_o>&n%BBs1FvF!q1%)M!DVk zqn&50e2X`08d3M%Sz0ji#e57&cmj;*+g-R_u&{rzx|m3)WkUjy(2Pcv&K4SMt^Z_n zxER_~;{AG^Lo%OO*zZ@_{4u)cSAiml?9JWvX>{;p%!$+9WQmF-GX7yECNWX~OVh@% zU7o2HGA9Jhf55)K#iuytEka+fQXa!aetPcm3HtfZO{xM{zN1qO=>4gTB1EbQjRTPw z&?1e_E~`BH_U9vtDou&zn%yVl>t=Ls9|De9Pg-=8NHawz0z$JDJW^Di2rB=$<5Rtu zIPOTP$l@z&x!%X?N-EG7=}b+gns&T`jSXkgD+|#Pey5F#j6aV~xBHwf4|k&5kXL3M z+CKN;;&0rC-v;b>%Wj+L6-mQzKBMohz7_rbo9w4O&zKv}86Bs5(jzB~gk8p7^rE*( z)H`(NrOFS6mF3SYZ}q8Ue0MZ2CxbYBW4Q^bY=f&_*5_hx2Wn}#hStfe6IRmFt0&ce zMW1K9uSO-F^yD(MxwbudGhR(cM~5bn>QLI`qt?+-2Kpce zJO7@w=xfI%A)@o-YThY>#G~b3vTYP<{XNV5hfrdGe-N9G4jyHQiEZ$8Il=`p2ICO90^EFDo{ypvB+r1e*aL&e6)? z&sh|BdB{4$ZA#|8BI&)8>{;>Y6GX^$QPY|cfwV#J0KNae*!t_BD%_}R7={CfI5bjH zqJ(rvcPSwP2kCB*29fSVgEZ19UD6@lEubPTT>>KA@m~DydFH#H?|tX|!3+ZfSMR;{ zT5F33QIL~Ah|GYESb-cSK)MDC`=bv|Pu0ajgMxsQ0{XC%yL(ds1LIbM{qNHCIGQOP zWq^-Ak!&zzfxRPO1cfoMQe46~GYZ?)~8iV89In}v%Xl-BP-cKS@jl&>;0aEL)*51p; zB{NrDyW_aZ;JC95#>=%tYtr7BJbtzYBXBA}x@O;4QB!*nfr6VOYV8o?mFNM27k)?JYulqJ2v+D-xyesj0 zz*k8zH=w+}CdN zbn#2Io;?~UBOBc~27QXl7ZTFagy^!<43pgAQwWZKRFY3aZ z*WI7KrDGC3V5cIExFQR+q?RV8c4uKAfJ#jy!2)SBBbG9VE+wFQWguwvIjt}Jz|~vY zZ~sfd_`pEqaZN3DamjulSt#@+Hs7*NsRFjqkwz_<8uWFT)7vDh3XIuZkjP=$)45RdrS?oVu3IsI#+}(VX@$i9&tA=MWsU4UxL#a0`3 z|NcB(d=$^m;+$jqRaS&-vLJl560c5WaZ2-=_s3#gRvI}SGia{TK?) z<#~YWhcKTc2y%=(k{9mVm0qPyfkag!1!vewcNEF0y^L9PdL}e<&hrj2A{mvxwrsPH zM)a}=*nOT;OhK{3B=3OrC2e;wkhL?Pk7fuy0v19$JhF<4Q4r)_d=AN2#h(Z$R-#vL zj-j@8iJrBCgQf|8aaT6IL|uM360}6ugg)&2rHTqcZ@Od_W?%p!<55qR?M}SXuV250 zw?JfDK(*w39QDIOA&U^atQud~3|h5@%1-Y=Fzs!H?^D4)2b>eBh?0yl~EK1bS ziOd131FFKB{$nqQSEFt9EUiYF#L7?t^FoIodX!?+tsjfh%fz%VCW#;NyC?V$+-6@6 zC!C4;=wKnkbQ|q@|0iZ-8a~_9HI2-j_L4m?{2qnAbK;$eYf`OY1I+^^83Q z-Atnku}52(WISH-e8wXOcfNbK%EgeXVb3R`Z9dYm>t2lT#^EF|I34+L5LY0?5?T1F z3qczob+L%Nb{Fi767pXA^&pQvpU3>&(4)2bBIPIp)S5cwo?0;;{`1ow)L+G{P8gn^ zA06f*RSd=yKBDmA@SR1T2fF{7{DtF>`D=n>#JvKO9EswoFxui-@?~W=YKe5xsmjv9 z__xOguJCzttI8?bbGtcCcubmRs{ma%e~{-GiNwl^&zg8dpH|B`^bLF@xs55YnuIaz zynA^g^@|?0KPxjUSH*zAz&-|%8MR)AFICN6+*L^X9_SB_tG%;5RiSI4KqkMu%D?2T zI%H!dNW<0NakWzx5oh32D^5!*J;YHB>rLSk2rmkl{QB zT`^&l%dsYg4`yhnIyi7xjAvEoJ-S%M1<@pxOq|aI0-}QoyOx%MzdvCF853m7AAKxf zGcvhqc6{6-MzaDrxS>HKL(q%RdCNC=>2}4ARg04}T>g-@-JLsLtD+4jtSqL#c~!id znewY`8xG5g22$u8Rcy{_3sA*A3VG86hO4awu$7g5< z&zK7Ncrl1VtIe$BTUMhEGK}z? z<#LRzT&o(RI=?GSEa&P1%BG_eC`iGZ%n+$cu8## zT)9#-I&{J0OqjIUf#^gmt#<#^f45&kQG!ux^P_$DaGI#|aM8h_r+k)!$u4n?e<>@s z?w1Z8G_cFRv=jyypFIP9H#nQEml5ypZ7T1{JMMdgF|dy$b*@ZLP9NWoaoY@{iUCYe zLS*qZNY!i}I08CrxeYQ&O!55v&NbLgJ_(LpB#y?Ma13!x!DsLpzcy5_aW#{zt~6gf zC^C%e&O>1Bp3Lcv|C^DNb?0M|IQm$~RA(GD%tS;ZnV*ohqZd7Pxig;bz^zv-k3i?k zkna7~I1bsbUjpU)FQ?pKo_~z4m`oF&3u{8zj-j(~Q=5$4WrC$`xo#F>_e6&98yAG^>3n zfx)Q}Xi%qiBr7YMuEDi;2rdD4HLcfchw#|U(`zqu77Jb(?M}P5sGLC4J+0{uDlV{iWOBy#uPVy)7|<}e)w=)JE6MVh=Gsf z9@8y-EJs(oMpnYpllkUGliA4hY*%I?w_)UmZ$5oYZ_mnPiRr~v`uO2lOD(7Klv?fG zSc)JNG?wm^1%IxpimJGrwQ*LNGXU`|-%Jw3r2@JjF13v5+&&$-+BM1lRR70Z@pK>t zyUAG>PutppN4ka8&rFYGD2FwY&QWTWYcR=sq)A@eEgq*Sy?o%eFxo=G-!GDwBy0-x zgJ;WGP6-A2)MA{=akh9k?yU2)v%7~EanwvP*m54W5~a3GX!!-?v00ns^TEe$;BFw* zZQwK6&FX*3MWi+!6c$O0A*>NdbsYuTh7aZ05hzz5!=bo;mAC^j+)&s%Fn)k~!dF_K zW#&*6XaF)Wux~;a6qKR8%`|JPnQA9L5ZE-rqtmL%-`NRh{L=?#E8*CvHb~9exve3s#B|Zl3Wo$&djgAu7uax&Clm6^h?5=?iQI<% zVygxy2Udx+i9qp?b&gTZ%TJ~J0f^Go-ZRj8bpVLMCKeV%Y$XZxfN=J%)lB3b{?EmAzR zWD5sE!blgZL%IM2iENbJ)Mo(wWu3b6FKuqQ+xXzR6T&C{G&`NlNZwqIKUuA4v^*Q2 zsWQtabqG%7E?Uhzw^(cWYPh;tK_T5roVBa#)F6a`lucKdLQU{p)7ItRSx6`~i3MDcARxIh-m`5AlarChy^u!R~7!|BR~3CxMB4 zL+p~@U3S4cIzs7>&isrUqzq>Mr+=m$W~I^W}HtWdJDp6^%h5;Eo7Cfc2K;Ge5$aILR1; zvMYProz*c!Z`1-XAdk(k1hpzr%@+OYiBt~z!alOR?EM6xY)TEuS1n4Uk9xBbP+)3_ z1U=N^QY1w3P|x}P&UdHRpFrvKBV)QSBTqZYJEC4O zOg^^Pg3;lEMkKb{J4t8XQn{&YrmlMCEyLgIkEJ#^2;|`7Z}aB2&Q>b+;5pwTio+aP zb#H24g&l?BEd{^h{eK@+D20h76b%JROv88$@X7zdBwy zGdXo`Ki~<}6sw1{0VM@^WsncUFHEhM%qk&JWd89TU|UU?Nw@5BWG6bOm=quHg+cU& z_`{f!vGG&T2Dp@LBrq`W`|PaM6uyU-JoZ*8@;$(ChF!$P#W$!uoc#_&SA%KL&VdS~ zp@VgZ)?acAON3Jq_|Lin-{C|X*%)edcKJ4I_`C2~qqNDR0#;pQX#L&r@avzYS23W+ zC~?zMmtlq+>9Mb>&;F2_Wr(xLG#!5 z8xXO}brdLS@!wh!##S*)A=mwdkBBs$1)Juu@-*QvLCT7EG&-Xhyf_Y{snM?@{a)JG zj0E{v19mm*Q!G%0>{JJEyW^)gBjLRYfBGDJ*9)ez%*yV7iywDL-_RXM*36VhzVIx{ zN;y1RZC-UM_?tF@0_E~7bC`-us%eeMP~|`U>;2WjdNOI(#7HxtRi3RY!~8O=nX#LK{(C^me{>f_-d@s-?FVTju&I7cJig3EGXk2_dChr|^ zbK_kbqTlYVeP|uq+)FFf=|5Qj!3qCq)J(nGA4PFJqVx4VeQ)|IXc_?@lNTYg3x3Ov{-h~q%nZkQ9%dxU1-X2=K?KnB9p?e~rac%cEOQK7s zASDLyK$0Ig8XKWuICXIlX7~)1K~!5&HqDNXw)o3f|Iq=}W5;R+Xn0#=+@u>;cu0C0 z3XGrd<9!t&Bd7b&lb<%ahP`-j zx245JT+qv{wzcE#wDM^{mZ_=fr{h)iTm~CcMmd^4T_ySXF9RqbQu(3~IkckBpFiI< zh{7EXvJ-@~@r0MLa3M zolyXb@ukc@*EJA~2A@(O`f-U2G7;yCepDr?c=!gjM3D-G-TeRb&60lib-xlf`8$WS zzOj*0Bx_k1ti54+1BxUvT`8Ghv8Nw4oXbE6TWtkcYyro*rYCfi?Yd#L1&_o2wMM2l z{qC?LW_&Z5!>WKPPqUS>O>L`Znc3ypBV_7tp^YZL$|l#jnS->upZU%UeW{Xsjl+b)7Eo7>gzGz#KOBjRmZ7DIJ*UamOqA^^~< zv|5?b4?tW65xChOvf_8fe)?2^s`uy}_glgV8jmL;?*7fq&2cqnhK~a0+PuyauXS{F zug7(T@Ew7Ty(xVqck*K)(4Ptdo`C0Wpc=9-P`a1E0xBWP+WlJOOM;6!*(j(mzutuB zpDbifK8}9qla|yc9|D}k9sQQErO_TulT0@Kvs!Hla~n~j%~nB_BBdwN+LnCm{6Vy3 zrzFiEGK-5DHCs8b(MC8XqRtzWB#c{!55)jBrl$mM=41*&zCk5evBc9WcLJpii#46& zKwp0&*rW9sRS;f(oTsgK;)IhiRkBsS+M_~3TU9sKIopw-f?<8U$0z21DoBh8tv$?)wV&7j{n84!phr*FEM`IbFX9v$E|NEEB2B9Nm_!R!JhP<#accjs z%Wc%xj`{9nD+aE5Y!(*Uo$fHc%|C~?l^Z*Khcpst*axyv(>W4i<-WsG9BJQST&jrf zy=jA;N==+*)2Gg$tA6?V{E`&TMWVx!LTG zI`%<+qcu-E1UUq%K)Z9dwK<+$i4^>IzOlv55)91s-7FWWYUiv(u4bj|TUWk@4DlFq zbUc&!V65%!eQK402^Bn)xs7f*)`<>2sx2c#V`I+f1LA1a zPT|!WdTEENb5g?J?UeL=skzjO5dlqB9oAci|+9=Lv>K?s6uK+j%ohDP{-jH8&>z}xSGdBxh5LqAx@ zd-*rc(hp$dnX#X8Wpzra52wAH4*btn8B(l4od#sfayMfDU;)l_K74RpmNB99%{a$dOyQ;)NgCH&#ZIxsVufwbR0q`;VKQ-%KaVU@3BS^_lHV z6U=$hyN~st21YP>I|}#{ch{A)bY?8f%pP+8GTt(ZIzqC3{D zmXE6vJEFlE%gTDzKe2qHK`^OZ+$XV9ISGIqjM9yCP2igA>?_E@azPKObj0+s&H^zI zMr!eNr;D+Iu<-lD1$HQ&tm(f+AaqD-YxBJJoR~O9-LiHwHeaMZ18$Ix zKZGrkwbBD2F@RVJ{YzGZNu_664j6ZrRV#Mv4^-z7JOYYs1{g?<)x^9)?txairyCU+ zYgZ|gTty$CAr!_|O@$WwFVICW8Rc&Hxu(v~&OTneq4ZWM`iX<}XtEW8>4`0r=(f4M zoPfkSmKq%lRjn#n#}cs*4;-6-20|-QPw%?C+9YG98Z`8p9MKHDFJ18(N*~8FM=2h#wr@Z%bupdXt!ma~!+*^DhqtkI4v%#MqFVebahP(sibWH1^F~e{v^Hgg902aIe);5M=iwR`zz;$*C>#g{ z-WnK0uxN772n#D9)35d!#Q=R-aH+m1+Pa4Lz#mzcnFKga_KcMK015Lz5w+N>u>@B+ z>W8Vwcqj-tpIZKn)_h?60a29Xh=Z^ll^^+Ty=nP$x7MXQ@#;#1MBlQ`^54L{(KiEr zPTI7@Re=!g$B!9L0O>8Tjx`<-Hq`Tl)w9jqvE#( zb6mi1JPV>+atV){upyg$^jYDa@|XUrVTOSo`onTgPx+btNQvP`jK)m+-Hvu-;~tP; z0<}l~0wXN-n-cG^)7dqR76T}~JH*kKUYRjHtmIb)4>}G}5*hr&WT2!12eYcSlXY=} z$PpD@e0VyLW}b{mbYnF$4dEoKh%#V#5xculh7=5F{~}6Cu3!K0QoMxr=XK`kHp2g` z@qczhxN}TM8;k55Q8RPq_|pb-`>|@9pb;(VcKGTkXy7wW2<)@>JDr*CIj2e(FJJyG z&a;<(9#5fUta_MT{o)+QJYh{Zs_5zT0zDu@amSC(c{;Y_V0zh$bakhQKH4(b=xgBh z1nzgG-EaxXUE0C37q+pp2BwKAyK(7S^(#_7&4SD=Z3a;|0%d54sKR-=x=eWd) z9p$0+nIgfShJGNVp`_v)&Lnaqf)7@dz6iv;KiEi)b%(9V|NpJdR*B%9SB-8$b?u%k z>o-3r*89TMA^;;Z47^wRvQhW@u^*KE1JEMobiLc%dG%VG0(YTYBZ>E|C}D_ykff1h zD+ygCYcSpU>&I_N5=@CF{#+j@FUC-{To#(_B#BUZz=c=FsQqr#n0B+hRjKCD{|(^( zt;@n7suM%l-fIzqW!uKV=w753#$TRaB|+QxzG$@Na(>ziSnBsKO_hwYX1#=C2MH@ z#><=olM^~LDM>qYB@y=sbxVaqeKV5w$Wg9y88TD?R!Ow4viqN2D2aGS+@x)NtX%GN z2&?t)zF&w9{yROwu<(=W&^#8n9`rw_59q%5u%7j4uG@~@ra_| zcxSE7{4nAU+|BD$XJljgzAx?1Rs=zM{rdHRfa8ahPWl)D{o>Bvhmp-V{fU1J*YaNO zY@$%k;dG^C#hIQQ)!#%4=x%zFsquSe?x%lbeamwCIUmVZN4W$lGCE9dFe^fIBl44- zYEADe5^JNkI0$n$0+7m9udd{9{EGtH zeoe>G=^iGDuIJEuHcqOg0ixV`=bI7zdY4m1b2;t%^#m!rV5c{a0S7azUzg{B0sf$a z<6{#Hlju-Vq)7bZv)sOjt`YUOri}57&(O4emIF!iGcIGm2+u92(VD16KvovDUU&pz zbmM#LBS5=$O{-etQ|nZbF#b7P3;14)e69;^;NcZrbD#fIRxIphF#F*Uv&yX1zR6ML zE4!D%^oCgd$Fs6&*3Ie<*`7*%wAJ@_Nh6x9i%SG}_GZKAgtfn`9 z+_bfs7H|4}zcR^M&F-fc`K+NF^S$&jl-!cqSj1J;kZ1_ZBq+kL9j%?k8G~rMAZJK$ zc}qX4M}c2gk8o4P2><8>x82QOGbgo9F4%fuk@RL2FDE$nT&CSR3%ba%$iY#TV=2hN z>d%!v>S}>UjcdOLQYy|2>ul*L>DhE@BnI9tRj{=Lu6`VdLw5(eg9j-LDLe{7p7 zyR~&3;!ZX9cjJqGORKjlVq&)*&u(v6H!s)Q&aaNuGKK88VhXN2#<-5-C5#th#*7_C zZCYaf_BmH`x><`;UNG5!Gou_}xY#1+={jn1V2(M+I!za|{>clK6~idDCV%wi?7C-a z*P2v>kWtPy@Fb#oK%NYj{G+R{qc^l9{g8O?Sly8tF4xt8R%p5(_pC0O)>Xz!6K|7> zsT+JtuuoVLDdb(z=}SClMGVcnxCWFGlW6V+g`nMxqL%fw8FyOZI*Fkn;NY&Inv?Zp#_8N$GTAmcAjS*6o!#camlexsF zx6O0d9qJgbi8D%PLu7t$SF6)MjJwxVav0tzwQp8nTOz&l{&ZbMBDZDQACa~ta9fPK zJC+GA2nRG;y|aY;e-!mE!PLBg~gYjj<`)2z`l zsg3txXwxM3NP9TgFBwFM^>!W)^kY{_Xj?IzLD8Zy8Nf^DF6t2s(<%d}wL3(T8dmlv zcUdFE8X2iHw7WvTh3!mH3dNOA|8Rq!nZz}y<;bUnSDpP4^KL&|WYF_+MOysoV61YdHtI!S zq@9C6)m_wY!0#LX8OmXl!t#E6|3~oa$G85^z%9I~9PBoh{yOt*V;MsX5{&S9ZfP!- z(=5)6Z$05L-sUWwQn%x>QQa}2q2wHmT^ye}udC$TAdeF?14h*PWG%V9#9fpQkLx`9 zb3vZlB(CVc{l3&mOLt36G2cJC7fqOj2MB6d9xf?2K*&@(zk}OYCCV3hrh$>HeQ_fQ z76fAl7qZVzwabw2x>xFeBH6y;{upT%tS0DVxdITN0w!l_y5OHbP3^|CI=3jYsOCwV zo+ecst+?*A%UU#3S-LUboOB7&C_}>PZUs_E15M4V1@u7;fShCH^*nE{OFwPya{>Uk zHHFWUGzNX7_`*m${}M2#K_KQ6mCeJhYB+{G4-|jFhxbG$xg{uJJKXNM`1|#JW#ZFnvivu^?cK_=37>=t_GQ0mKDBeIUq6#r(e}dIC*d_U zG~rF6V(~;Ktl>=izH~QTo7fd5)&f@%mb%Zi?%ncI!^v4k@F(lwYfgTRsWNR+mvx; zN_wCFms{H1wKW_yOL=8hG@~!upwWB&CH!fZX z;dLx#6s%ts7?y&H8D0np5Wl2*wWh-aq>(L=pO)3_XO|w+{uI29qm;!UG$8QSv@p8;u@&sieDZw^upejApOc^&cM+`d1n^AnZuCqX7>=L2u55lc|3ztO2x zasxt_fPOQ!#|g6XVEkWk&PT%K3=03Jd#Q6_aBLL_KTub*2rsU!446d z=P=|jpK?iNa*@+ttOdNf#A%YjoE77YLD>X`6rWJvS%Sy^lJ>-M@Ol;WP0mGo~#w4?5m#mc8G` z?Ocqtc^yvW zFHJAUvF>pB(V=v8Bgavj_k*B!GyuZ!#C&XM!6t>~CFdJD1uP)=A8q>`zR>C4KnIs?%SJ%lLF5#cV99$ZVnq6_ zo(KS%U_gELuM5}@^god&;VZ=D_exFVa7QTI$c_Sos65+pR357L3K>X<3qzQxE7K;@ zdnRM9oPhp!#tURmO~V`l^SSB(pAebmRLBzIeTH+MnCtEhS6Vt08%ovPlak^YP^ZvQ6HHTC{gv<6+!}bF>p~(=<4$j`fD`8)R)@ z0x-rf*>1nvo8vo05t9D&?OX}zCK8+#+giI#HlX%W9!tvdEN8oTiRoWXwrj}hQ1{~8 zMbEpLwBicwwbd5;MvX{R@mL{MR1sP5JrG@EAE@inu8hQs%Rf?D| ztZR)8@ebud(9@AZ!rl~c{CNH+{Y2%8*~fzX0kDMtd1P2P%sm1EHZ%xCRJQ*{Ci2=i zJqKJoL?eyUv4#Dlzw^V#h|-zH>X?KM-JyT2em8b^Ez+U`Y4Aw5!Cx4VGmbb=&wd&_ z(82d`ab>=h{LBfM;zQHfk)x{TK}Q8zj)qZ-nBM4UTq07^`En6U(!`TJvLuq5xr@z? z&Y(T3Fjm9xh-kf&E)UR(h5bj;aJ0x?<-yyzi=SA0yjN2z@^h({qlb)!C z%mb|`g8c%R1doniUoeO~@|#T$aCr8j`U1{AmC4fI-V4j{P>T%Ovux6x2^uJ1ljCqu z|1T9%Jd#`FwT!H~F#7T~Sg`6H)P&j>EIEQ@)6-qR(GiD1V2Hb-l;CWCa%+`=GD9Rc)RA)$+%D z!JLU7B?A?$GJ6qzh*zP52h z@#toK5e8~PA1=^W&vtm5oOTjTPTo6wniU36O|kzmG4OHguK>QUroTPIG-uq~h=D>@ zJjphofq3E>T;LH!Wa5G2!!{j5#>+t27%(NMb}srx-3r*x>rwZwE;%G2)EpzJQAvxI zd_P~qtpD2ewiP)r59j2Lym{1jMUjVV!1T=@)!gHA7%OTfdvEwPuN;fRD^suW zAjne%d~+t+`f_OXxlLjX)N{syDWA3A-(69JBU>JIv%$1ems39~wchgw25dn0vIA7S zr4}-V1p9jzFq~fV%ELoRU-?Joz$YJ|H}{8O7fHPG5X7z2jUl=Bcanrcf>#nZJeZ9# zAL4~FAM`M;DyBSQa)m99hkZ!v9Ft-XlwV4wp9C15Kc3=o3=Ql_Ab{RWLW7 znOnd4OPM-UsQaz)oleqxHlcS-9PNv)`3gfNE8h+*jKmne&m=rIl3XMj9Z$7I#jMeb z8)7h$fUu9aV4}8h`jLYO&7%5EcnOUi4;0++mUQ2MKUI42qzldVn^eU%LXv^?XIC1Z zYJyZ+s8%1a zq+xNZf<%KFaVkDFe%R5`d-U_fUqGt-uq8_UZ*;e$;km}`!U{h2e#TvxFbr7Xq;N=&Ht2Kb!?KPeljZbsXjaMzxzb;ddc*x)AJ$e`+zJt{FY)i zHFb(VRuIUFALt6~Fg0~`rbUxXQ@y(|i7xh@E-6vDhr2>OP=tt2sZ&}qWb54r9{bQB zLdFS&|MA&W`oQA~MW3Iv0G>TJT`ay?AqAJ*L|RqF@0d->0)7r9MkNyh=6aK&<`t#ZxFL|OU5VG=L4cy6e zw7e_qhzp})(a0$-9xTLDgFy2#dXS-+xfJP<9U4do*dsIfJfDq`*lFJ9d;-~9l_ z&216EMa*UO_T=ny$fZEY(4R(p#)prPt{)43ivpK(}1P5jip0e?eZ(j8-ifEPX7Cs?8~Hy~%Y0_8A4`X^4y)S6a=J zEE~?PnHZ5X^xIpfljSazESPD>n;fG_TY;Q|{92^Q8iPOQO0flzuE}gK9Sp~dO|bOd zP5v^`9cn~@Kfs0N9f~cDv3l#}=&V;!oPz8XAcS8Oi)oLQr6{6>f>0!x3&$n{-fCKo5F% zTA7-w9MgR^pq0q|o2*ID6p+U6$)|Ut9KpG=WI_Y?Q^ zyN3X9LAQfIIZMRJPjXiJziX_Y=$)-?Zxov@CJ_&xz3ms*2dEQ6#CKs-`su}jV0xGZ zCmDW?3yV||+uD(=-c?u@vppaBhr`3BcIW4=4)bV-CUvsU;GCsGl<+|-h5l{?Q@_M$ zYC@v_2!A$JXuGn53u`2<4@E<=Z_GwPB_n57i!EQYro&rifCvwiXJNqc18*ZtJwV{| zsygZEVI6(mXJVF^4avC+pO;BB_rIrFZk-74rU@j#69o@`5PJceSziwKexHZ` zwyD+hxu>U$+s_Y%1+(1$jcS+K(c_=k2Q)2G1z^|kML60vm5*pb2o6_|Zt+wPFNvij z56J>%he^LjtKQ(D*m;qCbh$BmTnm)bGp!#Uo5WC7M_MAT_CTJ?%tZ-j^vMbjp5<68 zdSlADnO*ywmht}FeSt8vKTBsDTYsBtJLlhSusbaoT@s=^$|0VvUrNOb_C;Jc?BXi0 z>2Qy8ShyyU5*5ZErR^nPkV7*Zy-?h zaIU=pETZUz_JiFIkbduBx8otBE6&CwLhFzJpWt`-x|8-?6M)uFpEC5{_L<@R_e_zi zFYot779ZEMM*}+q8T8a|u8*M$0ABy>>gxJTRFn$H1fAafGkC}YX8NNRijlYcBgX>t z)A^oyPP1o-xnko{i%5Xx5mzn^*MQr_c<3l0`DM{sAiVGL=XD#=q&o~k0#Mj4P%DO+ zr6dbtj=Z-pRa_aEQNQEHRsSOOwug(zEkC!Q(Eu46Li*-W0HTCkgE|JCG0d$Sv2B~L zSr_G1lDD(TzA)_Y^3BmB+crIfdJ)#jqjbEb8Sf2{57o~@qhed97y>Aw{{C)Cc^i=Z zEdJ0zoVrik`-OJ~DlxUz+#^QpVx|AW70v^n<+y8>Wq};Wng2Zriv;2c_)iRjuHNUNWuSZ! zLhru0skT2PEZ1%(A`o_^xjeH26j0&yIPf4PdeG(M;J$pVKtxEDZmUgkb0C;6J&$1Ayhz;h!m)KN2ym4gBY7AAp&-(rNSY%S^)_6=iq3|=*-Tb za5a87S(VbEYK_^F9wzWVP;_T#r?N<}d7b6+cvuvGid+KnBHM@^i@ny2V7F<5evbB! z{YyTSVw?2QXwqnqVB~`Tx&F}F@usLNv}@7sH#%-eZ$S6cmRUBxQ%%2ujF_k>USWq3 zJacpNnKrM+n%NpXWu{2rp&*lXoBv-F5BLP#2XhB7;;Q#cIN`1JH zB!bx}wiBY?A}T_rllGk#7uJjDQC41}d-*M^bHVv|jD8UR?^~k|8JfCx` zC2i{}br>(-Tj?WznO>*KdxjR*7m8}DmA%Gf6sY>v?~RQE2SQ6Uo zdNTJih)z$|_!SOf?LDXcsnw0!0^h=Qg<)AYg*?9G-H=4C2k~P@PPkFd#LYj+rMARG zqH^6vT+lsFt+|(w8iq}jNXLJHg#rzvU&GS+Zwdq-reGodC}^$mMr9R(JkMqhrH6 z4ixBDqpN)i2+?5i{d}oA-FjlKfnS#YXe&&>-X7-s(he-@#JJ(j8dQyp03W)gNU*)p zEW0kheNmsiDT29lwTtTodPI!}3{!xmZwU*4pg_@J-|ne(S_UL?>tSj4o@lVm=@c&` zC>U@~1xu$yZbc=2%y<$}l|>F*LnDMhI)sMLh@RqO#f;HtXJA0+7q8_XNfM*)9Xo%O zX)4}bnCDE;m4NYOwIC7W6r_9-BV#YQT}iwVD=hRzj1Id8PUo~&Q&-dHk|kV_`-(3m zj(SM#P|T#ad<;NzSwlEc8Dm2MECxGzU3R)(+MY{b6TKCdf!W`U3ISx8?14`4XWvI| zIkWD3D1>mdagYgl{E?8++Oax)`1#f0IVWwwp4dcb$*B)RQ$-FQ9ewkWf*%+7{*&x~ z-J<@mzwp?h{jxCZ)KI_Ta<;~9o)4Q^gba|gWv|cv9v+QwnD&MdfEK>C$BLfLl~tf+ zlJ8Z?3}e6#AOt@I@{P4PS!x0wpVMEO0zOxD#flzi{U09(f~K<6zzIgW(f;?#mQn|* za{U&KI({Jih{es5iRvz^Y>#O9*G_SNHQjLlx<;%Xtn^r$&NjQ0HAU}?C@4q-_`XpQ zj~C-1nw7&LWzdwm`$=5@ALg5d!gT4#ru@=$vWjXdZOP_9n?1cL5jPk>Ge z`Bx^lC2n0s@QtVLqGgWJNZE+9>BHhlh=aYlUG!+$1o|e|zt6wBKWdm7c70ZFeSw36 zqgCzBrF3=m;*C*C=ea-C0*cixzfpTp(U2bpH~gI^Q5ht-B6%yf^$SJ&ogn9nj0Ln}ag%Ek~hN7cayu43E=koj| zkF8A@5*kweW4UBGR5rAtr!i9Mz^fC}n}-GC+>bOzEe70GFm7;ajbJ%dZvPBE?9Hec zZx>$N9sLEZ3*oO_)c}V0R62z5SKRdTo||&*+B63>Bw%Z!GMUY3j4(0urFuLbh(Mqk z#XdmLGAm#qe z5NsmQGmf_0|0_|I#)GiOpFTR;R;~d>y5>vc^kwUi-~hRr zQ8o(7^$s$6;P}mkeaT5iW;O&ux zInWi~HL{ZID(m$}k>9RSQ7JI@a-ekw8QzRx#U893X?gGXz&Ml^BiY66@u|@v4DTSA zD7P=AHlqzIJ873Hd*hc;_1H>!Q|m-N+r5)1Ud;0vNMMIy{kcj zQFX-aTmm9T@a`G*%!x$@ zFk4Z|5()p7&cAj%F0M(|fJq}~ezN^NV(p~=HkwKppNcB#6N@Hum%m~5U7&;_|Mj8<%Bn2gfp`}YvBnG43ROV?uk-g=(B_p`SNRwgJBIzhZ4lP})z=es-gSmovA_@t!WZ$%yh ziIO$ zBJzldM=KDb#ozLFcE$$WgvC9C-T>ADII!&3j+JPaUx z^0fiO-n+BNxs+0hQk31@KDr?r|JVoTPJTR(&U8uoC@a4#T zAJ-XUnagQaFUAJ5WdrbX2B`hgN6$07RO_*EBPl|-=)6P}_AcZmbkF4eS+1t38j)+o zmsT@>T!{b2(hcEpOIQ%;(2&4}b#nbT=$bUy_RARwmX-gunC*N)qNMwBrdS!xlxsE22O~$l>jCvTwD@{BEjmv=jYrt_rZ|s9vWS#q@RQpPAY$a zbm7G|&+=I$7}@!uD_jAGfm`Q#jTWc?1w*5+RPb;*A^)m)vL zdVl|%3E}a3QmBUG#&luW;paztr?u?`3@jXfQ~NAdo)&P3`EFi9`@mCP9ve-ZQgS%* zbl5kA;}i4Gc%t6l;WqMC_zmNJHSg{M9lZ82&CLe#)Tf?d}Ds29Q z&*|N_9z?Xhx%-n0jbKh7;aq;-pmQxzPR-CWObCzKQ@#3gI?D;&^tIgku?M5g;1ZC`LJxHn64+q2yufYa^StJ~s>E(AN~K_kugmaJ3>BZglgYU! zhy_ylyRotH-L@=yDj3qFp@NJ&KL=4jVpi5BCY<{k$V{syKgUwLEj(KI=<;$Mw7 zTm}~Y7o7*Oaf>0;6bhGgnM9{5*^XRYs_HIGj=Yy#-yQ(mcjVQMAiQoisaqn^1Vd@nHBcPaUs?Fx&4P; z9xRnP@`g5O3}muh@1%}K@)1|E8kR4u0}qTt-3dV6$RsQrs9l~}9s|}g0lF9|$?9Zq z3+V%ro5U!w`WkpVEGHYTPe-!#j z64%=>YqfSzyW5+1yPopf+JMMUyOHtWw!V8LOz&mFIda4LRJ_Y@pkz65&sMHb1R8F3 z7W*GGZBXa5Z}&d#nV~HRtMwp{rB&D15)pzAEcTuE)c-P!un&ko#Dr~@zpT;^q`Nz? zU<`YQ16|HR3m};m%=eM#ux~JujTBm2pFeeR>xVxBW-dV_wTKO)^o!5fCrQ+h@Vol^ zs)TkA!&U$fntq-f93rmd3nnl@Iqc@~`#S>x8*+pi9u^+X6?>si&ljk6a#@4IgC$Kuc+Zb7&Hb3b``#&^AWb+?=*p z>n5{arB(@e&w5UUMI#OEJ8>aU>fO|+vsj&fkK3om#@5#&H^XajVaAU?KM3gCYFn3= z+i+ofm8I$(>KBG@bi3LgE;)Q17@!HH)(OHA6-gN4(LFSA4&>dRRkos?Mj?c>ukpJ%GP(syZq zCT}t|#&!|)Wcf8$_mat5*BI+9Iy#}FJ$v$ajXS!a$wL?#rnX6H=lCCT>4!YjR4*+v zG&YV_?tI8p81~Jup8%E>czoYQ>Bi{!K|Bb2B0oCBfx`Utg!cgePYdiYT|DmHyZ1wy zWFt)c-)#Cia0UFO?_HzI)qgAe!N=}4L@T37=34*1lJ_GhiBgZ|-AaR{C@W#eQ@>dm zc66nVlAKWB+J=ZT)&8^X2X-HC)1~~xYCV=E^D6VORENuOYK@+F+pcQ)AyU^3 zEZ8iO=(F_pGPpP)OgyV0gx)VKDoL4CyD>idGCf&X0u|;%WClac4*uOsX#FZ|3eNhb><`3ZZ>*hAU_ODteIQJd!?n>b-8?+r6_*(L74gI<9xQ3ST6g==VC$ zMP)P>61M{>`1GA4_f zZ*JtoI?R;>rr?Fu)qyIjqKZ4w+Ul)d{kO6 zq4J^}pE)1pk#@XYlr1U}P*SpMDj|yEB53bOt%D(KrztPnG{-~@bL)?}JwNa>6>{VJ zeAQd8|B63;%waDU-Rwy8IQk@7Xk^kM7Do57OUtfMY9{(Y3IdfZd`+JE@+0?=$^?h! zGh23!Qr)g0%s;2uZq+?0dou#gd-E8(90_5w+++$QFxQ>B-MhDM%fC&Cw?3V3cJmHX zI3Fh^B{6%c+eF&x2yWTa%$9Z~lO?ML9WJlrJB)vQWh3}yqq*n@7h-%{A@H6p50daz zqZ&BUJ+>uf76)Pd+6Kqg+K)I?-<>dc?pM-QrffXI@nqi=1N_*0ygfuF|X^&InB?&fFvqcDAGp8HGNPYMsbMQKFkGqO1G;CEm zHG2W2z@w6h*KA!3VF1u`a}N~`REONh*A^0Q?bH{_d`HdnQL%dL69>;n`#Pr&jkxv8 zolkVoGQ?k!qXkzGByisE4eJn#ilSRNpO)>=mw6Z50bhk~TNWlqDt=>rLjxBrRwD@R zMQDRDqD%K9Cj-e3z;y7zYX95lVHNp{y--EhzHG$N_3rf*X;xNN;)Ao4w3ZeW7Ug!5 zM5tjuF{}r@yRyQr$e@?q+1W`W>sS4NcL`*TM?4@yq62V~=JUB zV6XTL7h>b!a80o$5QH#{Qc?O^56PY|EG{l~ctB_Y?$nA!D#KLXw3g+Mkl*=TC5Wu- zHd>SV^snPB1#4|DdrHq^ql3eX3Hx4QWfVeiLV7#e?-jKh(OXt5oM|yC5&Zd@F>g{q zg2{z4%@m{8$~XoSytYa2<|i!E5Q3UDsbiuoOFi~7t5o?v zL=dj)oFIN6HPm@!)2++kjuiXYsp}6Mul!;4ltD3!IUL~Wuy0wZsbS@oU${4CJi2c$ z-laXt7oYW6A|}_@Xy} zzTUpB0LW;80vl;8oJ+4ov(jKLSS2>{O|AC_3go(cpx=jxK?EvpupxFZVD{NAIWF{E zUXFeQ-V$s*4wK9~Y=6^X@{Z^Xn^@oPftTljY8C_{$#xW`y9yr_U%yuTQcJEUyuMZQ zbMm>1R6zj=*(G@;*(F;uch_>Zm`ZF?m*0mNZAf_b<)GdAzUl86FjEA>ppqC?6yF`Z`sHZ0DfnCry~E>H6Zx$V(6&Ru_S zg8)zr3q)E9ACiLD=><$CG2U=4mSHA=An-tU9^HMulX{2h@57G5J)|d6Auw?Q!fq&@ z2b_U`@Hj7n0NAbRHDolBH$30vmqD-%z;&_1t)mkXOiG-AIS71mI{F~s-=&PaDX$wi z0>UT-(;%z3(8IgS*>+uU%Vuu=EqA{Y=3V#uKtFh}OMs&TuiPfL!nyt$e#s znLrm?bNt+p1nqHnVaplg;@_}TiTR{+a2f~cXJ7t~KJSuF9jBW2YSkg{ic+MH)67e} zH@f!CU28WK=dCd?cl&|a>-N+cPe5iId z?IjJ-&WwzLgi8UT?lnagUP`uI0#*W@CBv`)F#(zcV)JQiDp4PL8Z1h1OI8%v2P;OH?sEV!t4g^Cd$<$MvlzlAS(s_o&y32rZ#0jC`BPFT2 zxDAkq2WI^PGsF!|6w9;ZfG5~xJ!1^#%Qd}R4rdA%PR^dF4%+z6?h!Xbh%k^fxRZi~ zQ@kA;OGGXW7x^Lc(hr_=1VBdwL5bsBMmPl`JTj6n3a#7r_-TB8S@i(mN=mU4+DD$wAvVSt7S%s6^}tL z)G~uua8L}P(5m;}vw2t}T+NAvBQCuqEPdcR-J}NK{ObUdvioNKw>e{;;ge#gbzHeO z50mOppSMXZ$u1}$d|~{vf{YxAX$tmAQu^OmD_Q&@UF=P>%dZf#q#m+@Ts71Mg@JKO zXCILIKT@yty<_Y4zYKtLXWU`CT!Ni3#IF((OnwyNOGf zJ9u};=lBbb`@THHNvTFuRGj5lpZU>Ftg72Ylh0|Sk+zLOBwh%A4>&aU9b+)S?RnvE z%rM@$9{;oXEP&Z6gPOFd5#|bz5FKkJk5`Y-m3xOqGFuu1uWMRDxX5vR*#R`z{QQZn z?k$jX>idVG8enb;U&I5t8Yt0?0yl8LH*d(ZH+E z$)iiop%#tg3G2zMlIGRadYzG-C+X*_)M-w1AN0oDvCbn{O3J^Yc#JkCv+mxB=L!_! zPG#{Mv**6mS!7PmZz=BeC7h&H>= z2CK=xi+Q%|H(ze$s!gweJLUSK@Tp`Ku?%-gQCwo_E6athW{K_EzcREtjgMJhEXBbnV$&RE}tvL2Q}H)hk@X<-*5l@j4XCQXNL$uA8?K|w{S zHCL-mz)^ii6u;0wpNJvH&l?x|#B1E%YxSVkKS2nj56*SqHZ8#NC9l9&pWA-}$B3p*fc`pzHj*Ek z4K@osS0KcNa)(R}o8UsltKazPgrh>T6g|ZLfgDdv!5J5Cte2?~FAV-1tT>Z@Z(Z_X z6xfrl3yLZI+5de{zF8J3sY=RCca{tY^ySq|0wKINl4OiJANE(75&t zpEWRk-cx}Vo`w<%Z)xjv?v!MFPB5LoY5Qxz6kzqY-|wlOhhUFg5qM>2d=9yeH4P`p zKAV@@Hxwk}2FKHQtPzpZYjgeD4YF81>#&nLZ-C|gIS*IkY=B=^KfF4Go9I`v?~lET z_H}u7NCm&9vd>@~)vgJ2$y@pZ(b9?ilGN_Oddc#JO~wSoAFj^xs3|)Y{u#J1(TF*v z9_fUrxh4YSfG9bJ=U<^|lL7^GXRrN=MvWd$KPF6{-&(a!?|rIGTWEX4f_}5(gGYb~ zeE*66y-0T5FaI!9$k~;M1{Z1MLF)V>%^3 z+~&CwXtdJf zpE62EO{o`}^|uFGV`l%XZ)!0T>Q770hf{<_VPoNVyPVLEh2wX8iabgm&f&RkM4%G;u5L zhPYXGR%5=*x@um`G|zd>T(&;(I{i&L^LM~=Mr{@u_G zM~6@W`XtA913YSs@95%er$zK~AVl!w@z1}ZjiU0H0{-2sTwg!>A|y@#hEO#>YUK5f z5zy0$Yh2`~UPj!x6*Gt?R89lD<9j_;+TQPeNtp*203t%yJ_!h>bT|Mf&Eeo)2iC(I z5WVF$_$m0MV+NB4jchSQ5t#!rTFvT7<73L`2(IwiSimoR#8B-iL^}G*vgTyW1Rk*C9 zT^c29dw%T>DXVc2CBMD^LY;Jzp(kM29di9PkkNdQTKxI5rwK9?#%h)zY%sM_3U`_U zI@ZRhH+CznZx3Z1ZPoi$WpNpN7B~}a;YV%+qHWld`wIhjV)u%k!qO>(>KGs|Q6V|d zr-8=vBNZ$e*WZ0OF=>CGregu&lBWLABuL z9Y45LmLD>P5uO+Z(V9vP$-Z0?nvqvn);MkZi)}?;N1PD2^d8?W@4Vpa_mk(c?rvW< zXoR2MWl0uz-#W=VP-bemDZ+9jd?oSp?vB5k^i0qxZNipe04(jmHpS;6Bu=N-$yb32UrS-i%!R?K;!V3I>+Izvgph3^c5$HwL6F0Z9r`t$JyL$L7!`jj_T27%@>L02+B7EnC#Y_*>@ zub?2es0crf?}CJnpMU&=?52c_3_}{frJ{X)Lu7Ti)G$p?WfPF?+!(LBy1LTHzmT!8 zw5;;^^We#BBkkH?CKhemXHlNSw6w^>LoZBRVPRp!2}apA@WQS1KXH9BQ%`<%b!A%Z zKn~I)=YRi>1z^@9$a&J6x)Jh7bjKMSAbpn8tlh==%04UI`VE+VOFN-Z3@dZX6VM#r zl2ML(vgl-l`#>{3D(={&%#;|})BgpPujaWI6V%9}t5|o(cz*UuL?sJ5F2bjmN#MGX zfU?9&ZvTc0Z1OAY6j>;Trt;D#dz`2e3zwJ`d7tmDmg}b|CF@rf}*I0st zH=y9BA9gA|EMsR9qKZCbjnzz6G}pZj*DL9w_lBpjP7dEjG%uOhYP%DaQ@-AE$uJtxt*C&Nt~%LD8RKUcFd28;c)DJB}6u%y4sv(xW z<7{inUrjvte)Yu5`;`ptDDhu9S$qc85l3NEI6MHB2)T|5u;4FNsL8TfnC#T|%Dz4Q zV4W2dd55H^!UqyJ_-B~wq=Q8IzVd&xlj7h5&ooeqP{Gx^9yeHxA>DY9iBmHGT8#-| zV`sN*3nRd#?@Hq{FCY)Do_zZ%lP!T*L+xoGg%%P=e#gAFNFjW7XQxPnyaQ03|L-eq ze0(Q~l6b-iaNpso*7flcY8CCWyS|QtDQGB(+4-!0x_ZRL&@Q!Yh#Q$=t}yc1`>y4C zCVua6)pCUeXC?5jFJ9OhKpwpc8}aiu=~;dH{Og9#0;haN?2r%-k4SVv$_W`v?zXyoy4(0{DD3%Oz-h)|cu>D)skT+Z05w(>>vPUys-5oE zk4pIEG9igmf?;9|?LryD_2C`iQ`?$7CH>o?-O-=U>R-Gk;Zu?Pm0}U=nk8h`1fzi$@d@3r!I^5j;con?{!U()zjS#~ znXAYYc;q{tNnis-sQ6U2c(`#qWSsIt7M2x(CQg*~vd zqg)f+&dbXSK7UQ$;Am084lLBZUY&o?mxWM*Am0+B&gc6X_yh!cCAxvis;W+x)BmVyIfd2!RZ!mwd1qCbbno}k8~*@q;eHWG|2Xlb)&K&AVHS66|t7))cQG1#isB5`5R_BHl&0_;vP=pC4CshSpn8Uax0S9g&#D zQQw+Sek^8mnlDuN{P=YmX|sw6UiUO{iw@{1iMX5(*Emlohg6Hpsxvb1*eK%g;=mM} z)?TtDb1+Ctblw!T;5~)l^w0#jFR+Y$_64OIDl0iQpE+?vsM0x4YDfp)Z{U1Fo$4XT2aiq^fq)f0P+y()eXad;SI= zZ_s4E`~0WAXga9PN#^pGqLF7Y zNawF|pUaRG6o!ktGt5dfR0NPYtVn=tG9zGH~Lx|c%s`hJOr6hF+^c5+sbK< zbhGP{Ln#Qm0pNn<;U*!qey}b$MJ7i^`)~`8nmfe@8CuRoVSdP4?%#9~LJrp(1|(8C z{J6Q<*>t?s+RhGK%m~41+=#=Di!eF^5;Au3skLlKaemA(cR_HVd~6c>krCpNal*Lp z;eOz;T|COsw3IjhsmP=LM4gSJjg^)B`YWI{RkF9wx0<-`%&w`(XPrI%dyDWE zTiNRN&-d6c#_Mef!-;Xt!nkuFD?5(uw8slgtn*Kpbczo{q$qr~*_HFik`v1;+{zR0 z3ng(ZeZL{SVH)W0X7M~33s1}{i1oBGSI2(OHqJZxR}Zq|WzPlz^ddLEixm*KC}+WbbY?iXzNh+4{WM?d=}0-~upsOR4$?3R zVgfMFr)R3l#Sro_oD<^93U|T#BqVYOZF|a%U?eEXugxLc=g45E8@#e10kv0DY{74!?OZ7^E$kHwoWWWH}pOKUgzhR zw>THaOsqS90Y@kkD_7?3?=6qpS38jkeSt?ys;vwF-bM6ZY=!-c*fW zQ?Em)@ImT7shPtkD8pP0$h^0th0o+t3RA&{r#C%W#e6ZMZZn+UND-%y}}2BZ>3FEsfO zn5SkCC|x7V&Ca0WZOOhmWfK=bJE3fAo0Gen9dvbn^m4DA^iRLYkV9iU6PC;V ziUYF;6b>_Mbt=8C{+F#3(^@pWv zOwsOZ2r`R_jlo~yUjK;%K%#di^!-#4{{yrQZF?CruBQR$B0FO!BLXI@UkFVmx^biT zw$<)%btN^qd}&=0RF)`rK5gnpDELwA7Zy`i`k|Q!^bJfBCJ#TD()i%s(f$cn#D2^6 zHKbJ#p%jmcHH8*3z~@ZI8VFxie!7&K4JiP6P!?Oy>tI6*?(*>pJ60#7aWR|Om(Hh= zk4yJxfK-%d_TXMu#3UrukgMV>sbBx_eiqSwI(koaxf9(WyGer*^8bj;^DGk;-7i_E zPGo6=J3>EoATaB_Hj>-+blfh#eVecjDwCYZQNm*<1AeQ)U_C4|IG;b|cAh9V8ogeO z5vX+iB|S{4?AQL``R;7=)BQV%y0S;9lCw3=CJR3iZlZvniNW@xz((Ie=_dt4GQVus zPItUBJbX35?%4%eJwmVDgSyfmy!muvstK(i}g~Lk#i?;klR_*mxb%<`u zC{b|TZqyAtxuJI37dh(LubsG2^d zFDJlIXM}dA@s2r!aSmaZ98B?4ILHKzcAJbuOh=K^ZB>hs3SZr`qq*29q^lrx_b?j0 zFFU4fm$@r33O*&L}HkDpFR-3}8Z0R1OIV#pM5*SUYRgW6R`J-1Wt zuK>V=5>TUpe!5iO2|o;@JIPnPo=M@l^dU~(diZ?jgC=|ybi`s8ayuvX7H>h zk$I6OoKc@oMMVXIft3GXQbARCu)h!69?HCHazqhu{PXeR1<56xaWVk1^x+H@Rf_QQYMxzYe7Z@u+RLNzVbVI9$0f*4qVIEY~Kx4yTMIsOEGAp zF0W3pxcJW_rE?!kf>U`_4rY+Zu%n}*TB;p?q|OGUT~Vc~K87RF*C`<6wP>^;B_(B) z0iG5z!^B8Ikt^AKcN2R0_b<-|`-Dz*UE*|x%+2ssCX6TjHo=Eeu-~mh=jihg9`IFCh3ECg+w291JdWaAuJMQz);1E?q zUT!>iBHCpu@;pZ9$K+?4Lc|tI5nKAMa_+=;whrqi+ZiUJ`jYAf#;lyQKVkkvFMhrt zr-qi8hta6KI+9G+i)JsZXcd#hGay;Dc{EA+TbEDO(K~cJ>!n^P{!n+yPHK$8(;hZl zsP_KIwB^IFLK$qB{43}s9Br2%vyXq1np-6xaefn(x;P1@oXSl@x2NkuHyNZnl|6t! zF(&5LQSkt!gyox!0pUIuM0ZxezD8mps&P1>b$#7oVTI{-TCgI3Z&GDA@(!>ii)2CY z*+$~x!ElaJv`}Is4)vB{%Xb7SR&T@LSLMS&1{|4BIx9Y-|C$QZLdfB`@A_b-`o04M z8<=yPi2Q#vgkqeqS{nVCYaGm^+1Ory|4pmx;*^8}(8Mref9 zr^A!H&03J@gCwaSxW!RaaJ!EgG`OOyCBjIX(C6mjJ_nQi{z1aX|7rmrp&$f^Dn^5k<#wG}@PLIz980;A zu3wjp!-J`3l+_^+%bWz6PE!Xh(b|_q_O1$zGif3aoD5O+})24Wmf6_F{}#tp7pHS6EuFMd#bnP%wC1?>*zUU z-AK{FET$2=4+4NwMY=D4um43WgwC(6aUO%n;|fz;2(cH4yFH4w`%Z#_Xa=a2|48A~ z?E=$g312T(1fX`!*svnGOQyVMdD|mucW`{Ak=d|#jT&{tLet5tfh_xtL1thsAKh1{ zoJw_lc~DD_Rb}ngLEwoao5`ODLtgW>haY;y34Zf~Ehqg436zB*wJwx+OjyS7hp-7i z_b9mS2WEkwy3NS#m8oQ4mKJEJN$IhmuJ^2G&Ua&S6j>gN)DZenz6`R%b zTkEoqw)uuCm{v5SI`7G8s2_riQyR=(3ciQ+=Ee3rou z+xndOtv&vf#e(-c1Ix0RFCWM6p*Gfd^3#*wlE2k?mG8$rbHh?HVtG@w-Y+JzK=pp? zE((3a>DqIh3DHh(5Q}gpn?RayUMD3n57xNNS@{SKlj4=!OGJazkP-aP*k&V6d z?}0HGpVFdiXjmwnAPOs;3tsL}PJLY>FHvT0$te4;AOW-f(0l5(L%rIg+?Y2*px=I2S2Baoe`1xmxmWlR0>ozZ zo-5Hl3Q5V?-;RD%yE(lxIwnd8xbz_wO;!so>P2Z!2OS!q!D*Yx`1!kTJZ#6y@MAFhCpK;7_xxpofK7d`;>7=xc42aMX>R3_E5|?~(ad@I^6TsY{9ivjnMeY-* z2Z1a>lY-vsILqOTiB|_!l)ALU=RLUa7iWKAJouK$mqQ(Bpwf+irXTl$QPwXiG#O9v z(W8P>oh&S`tNynlrj<6RNfWWnpzF&MC>)3^dIJ51vSe8TC_3d_dna{mn7O&3K$6o6 z;-F){UT(WZ*TP?ZOs&5eR80T5HCu)aP4?Jq*fM0R_houG8}R#&Wr&jV#G#A>4Y^_>-fyd)l~Eu~ zDPRx1sEJ_x&Xx9|l3UZScL9BdNhavT)rAK?1WGI0%%6L)EgLyy zcpH1v^i1zX8RP{+b%&xtw-y-KR*iCUYg=t95z;W@aXW8A%!Z-w#J3z8!J4H-cv6xH z)?Xe^d+f|eRn39APX&i-L3qU+J*;ME!e|^d>y-Pj+biRRdr^e&`#a*#e|e`#3iy)F z)O#p}spd-7m4v@&)O&&Jlu(O)Cl3c87q@-T+%yD<6f^$wd}%TLlWQsc9996S>EaE^ zHT9Kp8G+hoUN`7L3rMI}nAhoS2m=eHsSG^uG{D*-&-0P#5me9wt~&=n^+W8^_WNif zJToi|n_ScB;`C4SpT28ViDzUm3M087$2ki@R97!h)s4FRR3EVWlyy_GRq4KH)=7gr zBY>&Hg!kC3`rE%}lArqYsd}Az@lT!PUqJbMX=q-=PvzAXC$QgbQ^_vrQn8HbSUWOl z@j;Ve3a_46q(%{j8^J3akxwYah?#c70yR3<#aU~6@3t1N)Ln{UhBFc_Rxy_&tM9q= z{Y3|AR$iob4^%2IV=mCVtP4m{whGU?aKglQ9Tm^5#bogiv45B--nWgkoP$sGf7K)G z`?mL^HpXX$Pm}GfF?~YB9}y(7d8p@BaO??qIBsQQljnVWG(hL^dq83in;pOi2#u6{ z7E(?a0|W1EnmWT^YiD==!Wkkmv2&oAxFAEHjx*9`2swiSb6M3 zi8FX}j{`_22XEAA)j=XOSTHzS(#(7rltqge=Fqh)%PbK>ziEqLB>R4JsBzXhrC3~6 zhSL%18Y7|`b z-i?YMDFk{5RY1D1!qvSzVmsb7Ynb}5P!q;~tG)I44XqzecB$srv}8qZ@SL|3OwK$r zTB^GoUKb=o9iy}SV6s;0!j29Thqjcdavp}d=8x(gnij75Z6OzL|C&Xpg5s@;SiPx5 zC?Sqf)K}y1K(5X=PerM%OSQXGMoBF1ExcXcuBjYCD4fk2Y9}`O9XYAs2%p~T3Or}4 zWD?NN!jYEn*|R~);cy?Wj)E~r*h2jJ1;`%mlEDE{LubKq8b~j>2KzVx)p0m$!K&B6eq+%xCil1J8~CFNC*pTyGQ}dw*F$OJF6I zPE6C$YF|q8uf9(8c)SUTQO^>qoLqB_cCAkn7pkpc;FChOCwoTw>u zElHLJR?R@CFti#Mj^F-8DGc9H*_Sl6wpZdQN3~7j#5ea{vf`QY#yc)Ksqhn2>Zew~ z4SngJ-*{VfI@IWM@%)9%mF%ue!$4H)`*OY9Ed1U)R_l<0Dq*nY)wpakAGQ<@-5`3< zo5pkT;^OS!)!;}P&`j%iWF-qe-x?iacC&H_iviq zc*d3g`(`xa>1@jf4{N`x4Epmx*=L}|{;^3)Fc6;|q7kzt$*u2*4)-6BfZ2SJ z725_I6bn%;YvonNVi&EIo3B*vTD{X?+gFYQG4 zy@bvvS$|_TM^TmUN?R5nE#sA5hMm=#YgCnP3z`9o)r$txg$xAG3TZGB57 zF? zn7Q$F9S0rrA}C#0#(6vYUr`H_DXXZU7Qglp(mvuUrMR4A(|@llImO|Om57%4I9bp{gjsPv!%Gjf$XsOE~-;U zha>P20IkpUo;;-%MmpGPfTyth1+&jm9fp^WuGGITVdoXU>7Erv)logng$w<7L5Ldy z#+RuYG=SEpB@Ajx2S@Vm-ezQsA|{ns;H~HSr_M9Q45VagpTD3LNt7G=1;3eSq9t~}d2J{~uXEN6w=zEr&I$w6((wMN{& z`z*Lk=sv`~XYENGaL@kgI6#+S`nw&0YJ#0IR=)RVavU%_9eiu|eT@wZ=j^2}y^c1=S-ZT(Q~R#6@jIuyC}a>4N&f9l%)X$@*F5PMQ=7 zGLY&C_3`!PIuf`OQxKTW>Dyykl33UZdS6PrJO+}dYlAB8?v?nT9Ev?0HoMJGkg~u1 zFK>A)4Ks_wr;`-kHsMYcOa!x!=gWx_#S~~^hMSAq143F;tn3RPE)=)a`e&RI+hfb_ zoTtF+KzB-<|0-15`?u0>f!2~T8*Bw_5Jd;UBz zXM_S*d#C+H+|S12x)<9MI+@Yh&J*bu8bBGloNVS2N(3u?w&63M4l8nY=%_Fk_7PFU z`sZqajSC%bj+S|$1Ot?aMuuO{ca zkSOBWy4m>wXPag*{Qcvuypi_`dy}J}N-CzTnLP%gjoqm~J>Z}sR6t429kOTg&ICA6 zFpvDC{G+EQUcsrG_>Wh2)(@WbB;7wydy9Dnn^bB|H?H4Kc*Ue(VZm@Csm3NVl)b(F zcjHIl^i|j)O-_Dr8}_Tb{%>YUsrTjh{9JjnnFIt#IUf%K zC_U_1z)M%0b(iDQQ=cb*FamQ|g4j&z2vpnc@bK`BoqkQHNhp%(Gnd{$fta&k;05-R z{X^x{>^u77WokRex9c11INeGy=dfH(8PzSnGLF2snv}D{eWddy7{l6nd3pKGrA8m3 z^*4pNHXhh*kBWWJU=N}1fu(GKEgsYswEXEG59f|6=T`qoUxxc9jx_6b2BKl9uj< zAtVIp?i7&j1_ucdrKJ=Y1O%kJK~Mzg?(S|F>3ewJ-*?yg*1F%l_YeICl;NCxc0Bvp z&z3mxO&@Pic`GCejmmjagT>}UxCJTHE&JM@-Al+D6GRBR^Ta?WMNMD-`&`~9sxqi5 zpg{6X0h-@L;)4gDUkX8to~JW{Kwsu~?#h}bLfTwmef*pJKx{yEA5QPKX4)G^KT+nx z)0ZY%ceQz8ty^Tnx0pfzwP+5gw*Eqr|CPqMh<6B3bzA043rV{i81)xP+D>y_I-91?K`EXn-k&<}}cR7TX+){ge0 z29N`UH~PhyS~%F&QOMIDM_h`Hu5=&T_eBU5Y>c&^uI^>$T%PSk?RHd_8n90JO{aYgloh ztQFbduPh#9+g;ZNW}y8BE@fa$G5qmk8#X-y!xGrsjj;T);(;KKk{?Iz?AQ;F?2|{$ldZC@)tUtg|IW!9R$v4e2T=$K3BB@p!K2Ck3?GU6 z3j%;({G!zv`~Oo|ZR8cg)K#IVS`s|326s_sU?Qqo;KzP{TC*pDir#J04{PNXcH{WR z2UJ>t)300|wVj;+YJne6u=@J?besK!FlFiJ=#s_0d2YY5HPSHvdJ9?#T|47l***WW zCKw~o0e^WB0|rIZg^KFIZ>ZhmFX|0`d<=`}YXoffBRL-gtWbWo*L>-CwQX|S7A0G8ScdWD8jKHRI+O%}T>}sZ1a^*x!rq57KZ4e zyN(qCVL4nSi5!0*XS|9R?aBA*~s4yxle615*s=s)Sq0&}Ipz-QG!N z%^b3O`RCXF%}&lkqt8Cm zl(-w<=JP_ne-F9;SabWu{O#4cgvD5)-izyjq|@#3mDCM*g&VaPa+K&gqn$JAXM9pa*|Zb;<;w?)jwl*VP69B@OyYyL zfsTuE|0E8~PQIUWP;XC7CGPN&YPbE&~)uv0dpQ)aGxnTD-m~1T4s2#rE1W6npY{9 z4jcGf3)sHiJ7pbE*%KfthBf%7mVOio$an_tmOG+Z`Z7ek@TO~=!ZR{5P+0v>2IZDS znv58TUA2;?vZ=nFET4cXYAKwVY-jU{4@PHqzr<}}ZP?UIy$1{6!{T~*c`b^;#_xu8 z5jEr4+`cC^?R;W>XHd$AgM)+A91pO92Rw!hH-&wFMj(J>8gkCB%x3WEK;zlYByA(d zMr|cvB3i7`fBYmd3WgLA#Su+UGK2Oldl)T-i=Nd$m_V`~Q8#?GdET9)0IzmdInbXW zM+ej1D8tp}7v-Ce#LWetG(Jf%Luo}9L4lk+#UMM{+%Et&^S)GS_}j~$U^dv`gf2!D zd&7yr0@B1Qm5yiV*n~2M2|YSG;&d9a1XdeMlhQ;0l#V6G^-Db6NK2WHcLrE9jOA(AAtzV>w6(shUWC+LxO@NA5&c(^|BU41p z+3M=~dIssZQkt+<;3HCY!yhKM7dkG=@m@bbDf3HcD|${fOKLnu;6BAA@DihQ`r_^z zF-xgnxbnT-(9lqb4!Y4X_(DPNr$9~xV{$b0?7xwKbx~&%DGKEcRr`9b`nmECJeccP zoiMB=j~UGTy(DZ9f@MzZ5Hk_T{9TDxfi130NN1iWI$VcifQ`c;GKNeCY`tLPiLD$7?hcZ{#7;wK$cx) z-9o%U6K@Hqu_4Q8XzWaM@!zD(K4uGnMj6-f(1N*- zULEQuHDBf`HvG)=1)`MQ7PS$ziOYA%=Hu_aMGPQ01-tIs^&jfo19F;&WnP1gfOd50p# z{xITFJ9H&5oDy{9Qlzgg5f>ZqGYt1DW!(>rrB}jxo16KyiG+f~To8?fjEoXNWs#{j z!3iSpMzHYyNb%!mXw7R~X{m^hUa2)$C%%|uKx=%F|5UEAOsK zCqTcDaGhzBA(S@Dw=&iX#Q~7DmPFdO9AE0yIHuWzyH*UiMaK060V%4dtqtMf4+x2f zMpMVppxK_4@Rz9)A2ZTZj$8pFOAvrP75(Bwb*39a+bt8kWugCd;gv5Z$`H|J4q{8CUKLzoPC>uFp5`Y_# zVEt=Ast$jJ+-90e#Iw-5^{tfdP>Y@KW=P}X?4Xjcv+mRBZ+EO<3+-Z=OUbv;&n;ms z4^OmgR+rj4Cn_WUBYt>5$uw6(HCJ4GA9>U;fd=3dQO?;tmsH#MQ zZu}1wg!J>AHjBJ2M7<9X>ZGY}f=86Wc(=OICbpPU9@PuuXP@X56Peaz$kkea6b_-W z&nmI}df?TeTz>sKR7D5><6eRH7L~L>%Rym7bW^EcW-6(<`&6r5x1S?{BW&ucvm)9v zn_IvuB>wG^PsN>>l#swwB_C11ocUcd+#O;4^<;K%@ZriVJ@FJHCrjG?Y(q_Q^hp{{ zDs(S`I^JFBw1N+qhe|L#TJ@@f#s=xLu$g7eTabODypVTLE2dIFMWMgb6xqnFvZJ%L zQON-Eew5pyT&)(t#u#pYVkotPl*pdxQir457YdFzF-KjkdU{XMInp!>wBX+qT&!>* zEce0~DstEPt{c9c54Qva7otsWw7=KLRgT~)TZdm?MOMfn7XB|nPrQ@TU>5sNcI8DF zsQs@AkI+EA3`Q3dV2hL18J|+MF0*T7>9d5%8nUIrC^iMJCvq(F0m26D;BA6`2cko< zAEq*|&`;i0X&3u&1$NMQ(CO$RH2H~fN{V9ekU~U6G9ofpUhv~I(L#-ti>gcEzjVp#94V0Oh= z5;_L`GoTzWaQqTad$ZRX>t1ZDnsH}*NHI-m!j~_s%7eZ`k9*LtH=Z%a`QXqhjJ}Dj-;GM z<7m=wT>X%H#Fs12sp*{($44=sGqbbp9=CrxBvwX0oo#mTk@?g8#BOqtyPg9)68#{i z;pvRq^ZuY;2A(62LU}XonCi-3|9zOx<9i&nH6W`RGMAe31^dKk)Re(UI%5QX6i5t$ zgvu!%`ENp{U?6y>5@7ZyV7sUWEkT2VD0GYFC-W*_EaZARZq7TGKz;=m=COh62$|2+ zr|Z9Ofn)LAVbBYUN%axs%YGItAd{mB>h|oK17@74*ECstt?EOq1c(EWu;=sB;~b9Q zh2p+`8T)vO4{naaesX)y$6Nj&v`t%W-ja8`G7+l$zK@75j+)4)Ww6Y4_**`1QX9d; zTIvB>j&F^OE5{>H&kcD?j`m*(USK+PX%R4A&Zq zfLug-bbE!Sf!Dn~T=#w&Nb?s2nmw8x_N|{&+A7`+v`L|0?fn-QAT8RX*fbjqV1Ix{ zaR3!*cy;a_f1B*5QPnT<>__wNH%>v9pY@H=d@2KR5XCtpw=Dz!q;Syw+RQEH|5w^O$miZHsJeixS4+KNWvP z($(+(y=bQF_jcUpNiVYA*-kmE@|!t3neh>mMtb|gG0L(&Vw7>1+nzl)e~4094hez5 zP4e^*!d{%kZ(@3iP0j)In63Zuk&%xFNMN8jNt;jXnu zl>hFd(Nh$EvC5yCCgXj6$nZd-*^9HfKnoM(;Jx8-+Jv+YUw;5*FNs4 z#{WANM`I^7X&{H@nc=P77Y{HM&Ir75JtU(l8Fa(|4y~*U-Ru|fLT!d=I?U4 zvNuKO!u3Ev&@D2k#hPkY6KVWr)Hgctv6jhrJOAlpzrEY6f@8aFyP&^7Qks3q*28e~ z%jj5e$3;E6U><&e(YWKldf>WOpxnuH*2Ndk35WUS*{ijJ=5LYGI$BzK&pF3c2G)bN zYXZ9sm8SFsRlC1xlx*15Z`xpNXS!h-^l!T>j2)i%UwkxTt2nLqLm>9?PwETZj)rUQ z!GoQWY$OWY#_iVgly$OhqZ$QpAD?<$|E@>c+}p*e5!SAvtt*i;N$Z%3D5CJZvEPG- z6Ekfux$Mb}vLJdUJy81c%0@bD={4mCGZ(8~b}2_ig=#4s@g$_C2%q z4@Vg={L(IbmbV|;Jg$+g-DsN(5Np1i-A~0pg9KomUN@G&GR63eqN2%Ih9#~B_WS-e z3vWta`y)IGDQd|HHWI8Z=Bz|ec$}t2OZvw{*;)>ztPpI zKe(q}@VKkIycwJC2TOez3Q)>96jkluDd~9im`3%{0)|j$lE<@OiPg$=hk4JHN5vAF zKH-+1{=(G@MCgl8dP_C)S=3zpymD1w7Ny;eGK^mu_E2}pk^D}c-uX7SKc{K9PJ@|{ z38(zLK*86X-`u)ZT2iDzRP+e0&Oz^L%oC>6`fubm~D)RhG zAFab~u0&PEyzfR0{B}j&EeC3O8}K$!u_x$G5$^jgKgS-=;a%es}` zyBWwAyjkM7`D&dpf3rK!e_K8Fa^Uzp^{Z4a)P*B5N_*hAPQtor{Mx_OFzNF0d{*NP ztabP&dr%V~5WVWiQ&v3QYuG<)5cB^#kTo%-_l=^v{(6bDW#ic3pjkNj6gGwB&=iEo z*o!>V2R#m#J+-{Ofg#~^K5pAmc(5{7cE?D5=7sM@C&Mh`6L>Hmw>$XX?<4+~T2d&)}^xzKz{&v(e9C5BKn|zYH?#^G*!-m1)TqiP()KSGAPzXTsY| zi-*Tg)O?#^)3tp;C06r|J{4Ls#u7!Rj3~SIdmacu|28pI@6?-Zj??TjUG`2)NJ5&CM2Ay6jRkfOn~Q%0o$?HO1d$@0 zi+yIJXfy1FJxlk-YfN0BUxGeo-RaMDZvUldE`55|ihk8-|d zC}4-CBqwhSD%{8goye_~2AXKVbVJ94-WEl9P&m$yOUtp_EFgxlHg7;pdASUDY+tb% z8^Y1*GnBou=U<)jU5EA=6sGx;!*s0%GJ?*Nf^4JQoh6!28S^?lpzfS`-q zpl9`ak|wvyB^tUoWZb`&MT4$7g6sqRoc|syt{uOg2eLajxJ{xqi_2IM88pym9cT3>Q`%c)NXixtx7z)6pni z2$S5-EeWqmJ_F!+uaSEIX4UYerwM))$iygu_Nd3M2lf!)flbeMxxko)VXt>SpGFcc|^?UH*kD0f4 zCoQ=riumNZc&~sM9Pb1+`~V{JOf^|cbLAn^)%sGqq{$;V3Pt~oyJ&@qeKiqED8ebN zY3^@rj?w$RnM9;E)u&r7s}j0zdvVzbX>o!spK-`cSfziH)H{IPj!MmS^dBal8BLRr zar;)#ZScRUpo^M(8ewrnuYei`3dyw1X{)ibWA)jAiR~Fr|6o{aRFzZ;0gRdnu8Ia} zRxV6C5#9~!bK2{d&aE0y_Q6HD(BGNJSk}GV;H(G8pc~!-$Q>?m2nz#%fcIgx#=Q;A zdtlC^DxLBmm+}^X_yJMP2OjsX!>XDY2SQ2S*0GzcN3XAALU;5og+Il3;x%xX?ij1@ zR^Ik`HCrfWz7i|B4dgW)L2h21UpUQeQI7?g%;9fWse1J3!Grrg?r&#qSK3U@h@1rg zmFkWtKIC!L!6x7}^^Ttij-_I0cT`@UD2|=;z4^N)&@#E*`03N9HMjl7Abw~0S5Glzfcu>%mA&tz;6xBeE{avPA5mS4{vrG*^BeCRQ@c~ICg z+izjif=tl+C$^svGoelW#>U2eiE89+xPLtt4R&}Qdw4umJs2bV>x2t+f1mZ;g~)RF zor;xzvK*sjaVo=hzkN*as6kF`;O8B2t*P}Sd<=FmKk5ZmxI8+9z9ZTX4I(XZqnqxf zBJ?Ql5nRgN6H0VXwdAG}?2U)*CpRM{ytd2y6Z~G>_#ROz$WPK^5h?Z%(3lyP;gIR9 zkkHxW9XwSUP|egyHVuRL#3eWWap?Y(J?In1>sjINw1%g5+fM8=G_}^i$?ua0Yh@so zWVom#N21^iG>+QS2093f1}_W^Vr_j2C3HaR(8KFV^z9hbgo$*-wSCIH+=3w zyi=P8kou_Ls^3w@SyAM}wo#Rhw-+V1Q^PLB_e(spw~xul$i%WJPQ-+LO;u0)DKMTwTN$s6PgI#-M_o4-v0DIRJTi{ zl$mY6_F|pj++f_{a7U$s=knJp5gJv@lc-6*TMj#_PCvWo&l+U7DYY*Ndp0Ah6}~Dv zWM>h9omTstW|4FsTpp(^&??D4s!}8Db$CnnQJ+KM&*znG+fZ(&bv>VhllFH1hKolP)cW`&`g&?U)+L*v$2Xg9p+n$$;Y59z1w1yvV8Rc0*2rJNJB0*%Ozj64kKe zxd8C8M;q^Ox1Wq{UmW|*U#3HeiBAn%v^>F5Y<(T!U|7&rr6!o?ivfUoKWl+;3!iZ7 zvx+F*Qdydfr?bN7G4)rVqU4{?I?XeCe>%W?$SQ}+Sov|ouQI*R_A3xowb*sM!Lj=M zh)H#T2xI8CN01Tc3?#ZTZS!&z_LcxQd<3?g25ZgIzrOgXgD^$MzBu0d1!boA#L5HK zOw3X7+Yj8A)*cK7vI-d3Xpq2BB9ZnmST+j!D0#DQl^o(AT;ejNhmHBruCly=|G&LD zbYz3sLd(4G%(LaA{?Ci8F^AJVu~=BkHGc|Ha*q1qq>q03T&|Q1dwQt&tWYv+@XEo% zu_9>4BOMj8_`3>h(aX!P;zaj1a2gjLdOrZOKruq-D3~&`)BsNDV{t4JLr9RN5u^x5 zBP$4)CK}K;MxJXk=8visr?pOT*}n|`HcYl@cX1VpP3mhD|*| zsN{CjLVZ<6i$oDjQtsu(%zAy)Tdh|V9()c{ywNb}=mLtH`e=|y_iyv*Sye}6(vrA8 zeAO+Tdk##XW^24qj}~AtJnN?Y{rv*t24(&;E%{F95ML!^xCd_YH>{y+cZVlz(Ox#o zfgj+3HNsP;lqDVrip4R$Efw5H{-arxNDvg)eEIe+?XGQAv*w48fi_H;+Z(m}B`tQG znWwvyoSS~qlMIF1J9b^P1%)@2E_No@`KPv3k3r5IX!KJqcTGbiAPUxM)YCHm00N!) z8EQ2AyZaqkMwi^OL?TiG)3GUZvd#??dR0Jv_FfV$j{)&|{CKrL4EGq5I1msRrRQEK z#MVt${{c{Z&fs<=W^*SNbi?~BcR3}%ydsT&>2E#D#qCVg@#)dq&~VGnUlePdAxR7l zNh6am;8?$BgrY3L% zzS|i)eC=w`9yE_1Fqix?vxt$C=Y2W%#^UZ9iZ;QveFWIiF@&T81MYD-W?P)!g zsP}+1jYkowkxgw$m9eXvTd(q~>#$}DN>HEp*%X@n017!*#(uK73c@}|*H`r1v9%G|2%_q4k?!W0_blmln$?vaT{A;3&I=UUXCpbiJ}Ny@ zN{-M~MX!ojp3I1{K8xt}ltK*b@0QJz@~P@xJ}e6?sZCyuikVf{TTWbc%a5amejzANjOt&sybk*OZt}xP`#ON4rVnvm7Ttn&8*pEYrXY#4U+CDyn>)Z$) zOw2&{fWLPzlGCh+jH&U;wpxizkRjT>RFv3>lO%sDI8;F)VX;{BPR@{}pR>mqCr?E4A1w_dP>5wotFN zo>)6CvE2F6v#P-V9<59FLg2T&xuU}Uy*N-phQwrSj-Y@pM?KGl_m!0%Cr=J`dgYF~ zwM^u{g1E(|B=3w3#n9(z1=j#GE4r#tsi3b|P_%VF!bCT^S5nj)D9!5oI>rQ~&U@(Y z&5miGQy-ul0z`&@gOB27d*pn_YiHtNm0w;IYydrQczDmUx!WV&!rG|08&nQzk zCpgVE-tirM(#3?jKdenmOkA)F_Imo}9*P;s0J_R9H=ArS$ETmPrXb50AON>`AMGg_ z)03FD>1M$57OK+{HWuISy55P?+I)N}(rdHbsC zWSMsY7Q7S_Bsrm<-rjw%C_-vgXH5?^-Ed$*18Vk3QRZEe1}C|=1WahRnZv|PhVf2H zRVbk-WZVobe$yA`hpSu$+1B^hRzzL!i3sD}=v_u+6`oW{#>o~X{feEPe)mLSs0e($ zm^sQll@^yM49{d1U@zpTa49#p3d1^A`oF0QX_(Y+V2{vGB=$UT(J3*1#e=6JI?6x( z{vHbEUlMB7gWa|Y%n<1}Fu1Z|kg&^SxTks2UG;v&2oKcq*j*$)zZg+hNUoIb8N^(U zq&M>~ql+3Yk|*pSezBx$$HJK_{(?N71C(hBF$i;Px)))kp8zV zF`P}q`_EHO4O%XCO~%wPHC{UFq7ghwcvGA)Ffk#RXL0Zf?F$1z1Z?plB*Jy;z$|xA z)ff#}BRks3?O-*VAzhj+S)juz4`Wv?-!eU)z{_t?tFFQ!q4I-)r+O~FpDcBX<7cKs z|CDeLAfKnqj;oq>W!2hf8(K=--7s`ICo9=?faKa1h+ALJ3_S2wU~HKPC^HVMT1##` z_6cpQ@D4(1v+uLACwY;0Iz@J^P}mvhGVCW$8~a!UD5UbnjY{N*V984^i=Q5<3RC{~Qi zTAI2GB^DD(&hV{s(Wv38f6f=m`{4V2B;D;*-s#g{&`CyQqQZkYo~KLy2)d7gF6?r> z`OAuGD#8djxZqnP(s{>ccaP}YefFWki#eu<`(ao{@HX;4J<)JjW>(f)HOQik*%J_t zFzB6kr}zeMYuDkhMl6}9s@}ZJT4xRu1WuKP94MI73z%?FIR3dI~a z!G!l*?l3;BVZ~9ctgdR8jmV_KU$wl~-zez9*J$3FVtKQ8RvdYFD!yIU7qBX}?=(|3 zynbo$0(r7CtY6=;E*@R8zcJY%%rCKfydMI~yj7tzV$5tBrl$y_*-dM%!IL(@cWiLY zRSR2xl1OJHo$1$$79`6+7f`MqCvHs75v_auRCS}4rFhI@>Gd3AG2@;=U9GKA8We0fyY-*UYy zakk^;X4l*waKZARBosLhg-h+@XNUJXlwS@}q-Z)T&*{19&ZQf$&wp-EX_l}qS?BkQ z_HXg?^E;#T@$=004?1)7F{nvtSl!hV>D_%$;n|7*N`Kn(ytQZd=x56G>I0tYKQ*}| zT{;~OU^Y#cnfzJp14!{VG4q*od@jczjSt&So(@$`?=ua0Kbsp~GrI685!;G!JYCFa zqFsPxR#a#_V`0h3&(HTNI=}%2Kp=U)iOM$l#}w8>nRU%-!grbo=VyFIufG>Kc2orUwC+qyXcr0V#%5;*uf6UMit|heH1Godc0ePl zp3MN2U9NG&r?NO6w%8snZReM3FJZ!QTfT$s!yHgUm$MW1D?)_MY>1zXyUb4Uk;zl( z-=s);tonna2`PmQJ^wlwVaJN%k}fhd$>VU9Esj^X#BZ8TI^M09%@#O6@-pSPu=RJh zv_$Ui!59#hmDNQSdY;>Nbz#BV3Mr2~_9I+QUvjaxu*+&8wI{DNG8)dlJ@U9^D3&vL zF8tZV(Em{&wxH?&x2WG)&zFn5oE&|eh_m-b%V?ei&Se^O-h%dm!f3?s>y=NC9)xOjp;Mum_ zi8j0b$;toc`t~I$`<9UG)&r`)Gr2A0uh(0&!ZNNm=h*HyxNNgNa-un7|vqdun#P8x7PNVIGd9Y z%g*GhJ%<9i50gjckDK%eW!Dv?T*niW8x=sfXsA|uOs7^8TYncVw&@gV7KK_vv8Xtb zvZHl6b;R|UQw##oAbX(1gwz{t%@sG&$nHkBpK|K_@+PTPnL?nTvwe(G6%4v*Bx4UL za)0s&Sj7Pi!YsJ>v-9$AT&j{ID9Rx%I4VhzOyz8A`PESD!?)d^?m`_u2`7$p7r$dd zSj0`VGC*rxLe+UY?qXrhaeSYZzb$H}1MtRAd;3MK4&mhIQbBaD>t1EaDBKW}%3S+cJcy+ovp~sU=x>BG>t5M*iu`rzUGlEB?TGBLI|7k2 zsJgf@n02)BC%zOK^-sW^LlN#B1Y3i1s;d({h#Fn~itODb0hQU=fA&%;WLh3}quv=h zbv;T01bfx}g+45c2ow6%!1{|AKQMQx=}CokuJlUyK{ebPA8N=R8&^@Y+Mq`q7qdjh z(D>*Rf)?7w#Z6-n7iNbjt+rMqZoYtzVYpbA?>7iNXRam72NzpO0gP zuB|V<-bz1`M-9Dc3&AdlaJk`g`xRUu{(MExL0A0E17aqdh}6%YR{=B^$r1b}DJN7PB>(Qw2;M=bI_U_Y6hvUN+l(ik$P+ zOzCnKJ|kiEVQESx`}$_RqiyW$l||3VbIY29>b+0pXRgcwc%Ao!Hv9^{zax3D{v7n! z&>)MuSJ`f4CQ~q*X;|X}$S5+slhMZ-yjRa?+o-3+48ZBY!>~AP5eJ8^Db=Tu#eC30 zFxthYTKY|>T$Vxi&X(5XL#ZORCE@Q_P9k~n0XVI4P+iRrT#uQ{$h+Hh97Y*xakLXk z4ILfpuq3d;J}NqL?b*WvY3d2ETq%pf^%e5yjmTGC&AZk^=&*I?=+^5&W&v+%U{(Nz zfQ^@l=<7d|8{)|pd&iks@(>b<$C3D-GU5;un!cAfWQ_)i%{S^>f`XK8z|(Wc=L4ey zb)y4yIx70l)wr2uftAw~D}Xl}^z+KaLWpKa&G2}Y9$hS|`)vNSI{Kv2h#Hy^VTKO5 z4#gs8`m#-)zfjNjJiADIg{p!E3tFjp;Xsylu>t5VYwmPlx{~K@SWHqf>eDwcLrfbv zD+`M(GCIRX??FTVl)moTJYRP1dvF^xh<2W{^AVJJx-m59^ZgOhhP!vtifI$0>T~#Y zN4H#t9i#SD@La`g%~1-G)lBMPT=KRhH(K*EIb#vUr+Frz5QGo zbwovS~w(bjmIk|9;$VEmJ%vvlN0IT&er@db8 z`vuMK>`B<|PVA1jzU)<#x!Bhk*E9V|#`1adzqkNz_+gI`xM=ej?kC953hPyvS>{fs zK6S48aE{PBKBAeQXxB}QsuKxwEUKOP`u&*bHT}AQ&wnI=^wma0WK;BaJU_u4KS4jd zZ87!oXzzVioDKUgL%K|=6i(;lgQ0gn#*{Y5Q%W_%ZeHa#o(!s=U*u_(oA;C1Zf`(! zhAFumuNofd%xyp*g|<;Axl!_^{1l+_4R*XRT*YtqkWl$Gy18w^wy`cn$>g1sos-j! zL%^#vAe7`WMaYk1gIalipC6Hi2WahUzO3~N4eZT5z?oc-Vm?##t~YS+%8IvQ5o@m6 zn%#tU&KKoxx(2k#JiC(y2Wz~%8=l!5&Hyd|ZG(aCpN{u* z>1gWg6qV-aBL37FP3Pgp|o{cCNM&^ZRtx4`jGzj;H8hJp({6XhUcQDu_IP(&>7 z)p%EmTnI{}^@7&{tDXL_b-rxre}<;e-5t{tXUkn_%cVTC++QFD%xW#oTlL7e zizgi1rB;516suts&~3(liaRN*Vgq43se2&W-GE_^mZ*?((I9?IaV}GFXwdduRnn zylt66-L(KFJha(}?X^zPk^6BFn5pM$)L{uKpntmgna6UfdXw90-v9i{#tY$W_13<4 z8xKn9g$}u>QyVj8U;TX_bRPnow1{Ib7nYSUW%PbTz}o=d-^${*rEvUmDNl;iZ8&oJ zR{4M}P0ZjdF)?hXZ!-bxw)q(YAkH|NWkukktV6Ma1bmBnScINMr;GD!_l?%deVFnx zQSw@G0&XqXG1r)QJZqN5?tw2gN@$F$E9buk`IK=%1%aS|X(LK&95RHY%>0&>nd@~} zktDsA2`E|6yN5dk`%(!HC$r`DZGV(f>lkNgpTas^-5=w#s#m@;p{^GYSX5Xt2f7xa zUXfPQ%M@$tp=3<%0QA|5Pve<10>6$FsMnOpgw);HJs92JsLKksf`rBn75jIzz<34j zXwxXr7^pZ?Cad0)@j?*va+-abrzJAJv8ijk8ibsCPRY8_ykb#`Ih$Up=Xw6&yFkO3 zGUihy;M_-B?nV7;ZZI57G$N)Q$4S~(283Ux4<&>;?F?gl9mc?W znL%x#{qa-8wg#QWwl%lKx>8Dugkqw9T@MbFe`KXA(ct{mLLq>8jcpL6C2kV5Q1gWr zvf7XVOkjHhEc3OVMNbS%V_o;dFi5nDB*#fc_L?td zJwtGv566!k@!r_!PBdJi5Ah{nB^xbN>bT@h)<0PQ^@1}ThJwCJF!T_TQ{-nQH_Xl) zSo+da(;*H|cmzx9-%B>EgclZaaXa>UD@-Oo%OF;Y)oZX;)NYs3^E3}FY)WQ(T;jL| zf$k+GXApr-QIxwy7j5{h9BMm$#2XFNP_W}rel1UJsN7G0qRV9@vq#`s8u>CPSI={g zCF$Jax4egkhx}f^nB*5iCehcn@wX{;ieVqBWEs&~?zs)+oyH|Jsx@=_#{2uZ!u=aB z-t@2w(X$}alBzB_&Zfq8TBygWtFM9^C;Ga{160oIiXG;K^lEN{6P}bL9t-R#Xmq

>~LeN>a7_%rRV+9M;%tYm`&N&?Dt_1jp9Fr@ zLVxkf8PgK=JcKYNA9YrsoMZocFoGnQ45|a?rCxkVo<&*CCA$3cGtWo`*xqeM@ix;q zL_5iA;E$j>PzyUO{2IYp+dxa(W8xFoZFiS@K+-w1-0e4}GGSQrybwKwPnDC@(viwk z@)b&`;7v))-$r+ECi)g(W}U%8ZkCd++{08{rw?hh%nNZVcV}7r%F9=d>)djRir82M zY{AJC?WP??YRS=?*_*|fwfUYGaMf|wErjrjI&PMaR54AJL~IYLb8X#5KdPA{S?=>{ zx7nE6^Ede5JA2b?*Kp0o)wBEAe8yMt!)Hf>Sp)M8&n&-&n{ST*e4%z6_9)3@A413; z#;(K(5Qn3E_&=o_fUM;2-h5iIlpNEBFhrQ6x+*GI*(p{ho&~x7l2&u!@f7gvz$|`G z{kT5SOewX|?B57E=_QNP<9@levzxkSbN+|8{7gM+#dw;|z=>2ocKibCqArtm=27NC z=Pl@Nm=QSpB1~fw_NJ_g^PO!LpW4nF$ac7X@L=VM`$0yFC8ebz{BbhnQ-S4Jp5>qE zQW{necm2XOP(rQ2fdrnFk;0|!ftwq|`tx}8)E5Q7}5YcRTP6v?_R)jjqt z>mjd9X|-f=is?tC0Zss-ziz#f`6qVYyN|`Ix}le>mn-IbWImPE_}C!(K(ku3_v%C6 zuSeTTD~}fk&t(PoYNs=CE3(z4d+ zu4j2HBk{nUT-2>Ti!pte;vv%k)f3D{f_I-_k|pw_ujfF#@ITv>pA_^d|6Q7x?pF<{vJ-%B4AsQA^wm2|)(p;+-x+wrG$AKaYXWrS6 zuwzXz*dRG3`3vJUC|CGK2Yt_{e)X0&?tHEEB7a)U?;^$qUEXUjpSf}Lidi6LFvR6l zI>3WsK!_Si&BTwtKN7Brz4YADayzu*ksk<6Zg?9~oZ<9tdoewx`u#0Sw%oGmKb{ek z!(8`+z{<&2`BAFQCUUI-Q?%FG-vdRe{vJSV(_iG7hDX7=VBx$VS>;UTA4pAPk6dH; zY!}olh2%ktii%vp(h+8m<$cLfTm?D1)!@md{i6X4`zZ{P{$v_E zQjWx{8ABn@on&4LiJx;4k6{HqQ+Gz$BQOX1D$;bQ9)GQ)*VbL^_ejMmk%HBH|jVsl~mp>#ZQ!Y9N z26mC~^Ao{pXPJx2`x*Vtb(VX@*hinRW@({UWT5J5J9u&j?a4X`vU=1L zxLIe_QgV=F?SW?kfcE`ZEYuLJJq-%m7=lesNz~B0eBi<|w)(++kuxL)O6ZNdH}1dn z$YX3YkYh8UUsv1SwVuSMx61@8gkEIwtK0>+v%^p7RqhyR5qP^wPkL%neK_Xd=i&-B z;2M76R!}GPh*r`SX{A*f$d6jv?~m(`W{(B1!f9dJf;b2u7VHkQc7e(p>+Nsu-o?b< z7D6U{K{WUJ=&h|S=O?KacjbIFX|30O`pCFffZKdus7FNS1ufrEmo+`0HCZj#M)|gd zJUMrwb+gp|!wfi03`aYSwUkj0m&d$*QZX2Mzr?YdMN3;^O16!fkz=q)HdXdJNvw5b zBj!pEuj~SNRM(%@f2uBmcVA%C@L*)6T(c~*eRJLDmkaaVcKeEo{5(2E@>g8?a6D~m zuTu4(5fjl$c-RL`JrS?M5BiC$AXnrYSCXGhp5ozdg%FVh9M-W)RH=^{b|&KT)s@%n zwNGIQ3rqe)1BDg246=cm2QIZDej*glhBh@dm0eywL}Ham0A3a>8yytGs;yYj0ZESC z5*(~azqkzOxrA!$9#=^cdOXgb7Iqtchy|Dni<5D@PvvercVzX-*Bb$^1gdi!$hCKq z`^aPd(C|&2^}|;+wo%__NldE&ew0P%|53@=M4&Z;v(;}FZJ&>*@n^{1AlC2mAd>Ox z-KSr<6gMc*AYjF@{J-y8Mw2=jmRX_(X~(Zl(JIa{Jt%r@P&ud6iu zE~=7i!A&X5P$xSj%Zvvv-EQ zkZ@<%8YSK_ubvUtnOnTT@pMQ;T?A!YZEAue`|ogrT$0K`L1XK8<}1X^yX z*qr@yJUS;gMT@0#{W=*YEZgiQNucpIn7;?C;j*PTKMS~n6Ma486c&Ks&RN%4`-o}- zE~{sn{KkFIbAN$5`$3K79+HUhon6PKWmEj!)6>hb_1&~`OOu?d-t4`Hkn6h`*NOuz zU$tB2?<{Xx&oPo?5@iODq{Xu+W1*6>^-1Odo6GFTW|aPA6=dx>N~r0GH~zm4j-LQH zIA^LHgoeQe%9k;eUh-!)jPZePRg9W~RqBaP44z@*v&Ge2u6>U5Rh}0Axl?*kX!<_e zsgJV-_jBs!SDrR64FwbmUy9VZKh6+?c!g)sU&mxh6p70 zf5so@o}gg~KQnWj6xDNfj`g{%{wCy#PXyT~>8=K?Vl=n!S!b?pMHHY}Aj>qd(1O7v zm5rV*9?&HWiIrXV2eHWm4m5GQ0pylt90h|Mw0{yg+E+YN97+~dcv3fqks{~Kn+Jd=!dL%_s(=ZkG1 zw&&|Z-V}Ag(Vc-I<)RO4@x9*Yl6`@%g;R+2_2^`woIJ|`eUX&_vfw+|ZhMo$g#e7>6dv9B@ZLcDgh6Flo6rz{b z0M zymg$z(D3(2Yt}>S)x<4*c9LrSB~rPesHJDXT=-x)9n(uQwnQC^VE7D<|?Ib>-nNL?|&-?TF`Pj z?U#_0Vd6-n+*c!Wgh~!viU??bW`=i2Fbiz5+4cP(w!ojj7{-DSTQ(%%p85`55UguZ z%~aA{70J2w{*Id~j~A)35SGlN99+Ptb9czu&r?5g%5hS(xD*wl*|lF*-20%>PbPrE z8JmC%|A!LW%3z#El8KYOg&f71@G!8omRQT`PdCheLA|_qq_NiMD&8;ie*4%88?QeT zt2%UUf{9lSq18*{cT>^8SIS&`P%zFIE7xY}E+3y@`gc-rszHP+OHVFqLR+K5aZ-}0 z=pQl+z2E3?Gi#vLmya9va=;V^lGaZRp@#mo;mUUmRYID`i~L{9{xk=z{8=)ZPiMTE zoA=qxm61iIY_Ec{Ligpvx0^3U$_JVR4i|$~zUKhfV%S=EJG;E>v)v!Eg&|(vuB_lu z{=`}N3Fp#NEjki>a{bqGMc)wxVE0=3294a zw099QCk8yt=;~6+0oHQ-gm0g>r0|y_wH!0ylE-4=6~1QzgER0hVrj6@xj|8*PP>eX)Iof%!lK>MREiw;0S#4 z7?meaZY3rr5{vWSL0bDaZrLvNvt{Mv1pQoSa7r@nyZk5U7C5coihnkrc^PD?Eg{84 zp*A>yV$<&7EXuHF|91oj72j${W+{jR8^r4&4kciuC=COY;dGUeO={+1_0 z{^6|~iNBKL-bgT5J&eL+1Y`)fL=Rq%1F4bn17}4J>?vGH0R`0JhY!iYw(XxJcqppJRVD9TK1}FtW^rxD@yA* z@HP}boITRQG)OE_dWEG+At!e$#;~Dx?8C0JuJ@2zT&j2f!@>q2HGOwqC8dt&AyQH# zpFe+oueKg_Vi(AVtn1`Je4tt0GUhURBWG9tomXt7+VKR}weNf+deq@#M-IURw4c;# zgq*5bnM4^^XcLjnxIYyXi;KISfTZS5bKh=L*-m`^%A!qEVM*M{)kcrXKl(;{!|W&8 z1Y^t|Hu)?dE;TD+Cn`qYa||_l7$(7B^z(wDXau|vmXeCQA&K;!14=h74;U>UVy{?z zPM7F0kQDy#9Xr1A6?URfBTccbLyjQFZu(OE51YnCexvR9PZbt((NqFDL-b1*#{lR{ zp3jedN@(!Y@}egQBl^Z=hvG|JBe+vAjhRQcTPR$I&}?zlX3S#y{m}c?K%{@E=A0TRJ_=x~{7B zIbx*R!rQQ#`UusbJ#@Ooc*J&f^v$l&6a@CI-^ z3kQd|F_z3A1%XvtX0i%t6Cto*4`aZ+-v;&X3SxwTR8+ zrY$y4$goRzb5})DI35PMX-WMti5S0gC(glkDHGdr+6RX~y%o{R7^`;EVi4)YBeguP z%e5}IT%1e&ra0AO3_it^pAUubRvE8ujpy&W?z(vu)~;EOahPRy4o<6u8_^Saz*|DE z$1{?_(A9)~f6%`k3FOsZlCE%d>YMZOIUoS=nf>jzbhuOs<$xEfKMn-$i&qi$xmYWe z+*Qhd;AdKboRCYXDPxrjOjI1pOh99nx1M}(5q?o#F z3nU-dtl-_x2y-@jlw2<*S03YF{(;3^oe~qq`WAtT_FBIc(niyg%T^)tY$X#oZs@uRgxU8?6t0g@9VyxoU!-o%H z9fCFL8$Tl`0u#@g4h`0@B{J!|WJkI{;PfIntKGajIt4R>ZI z_xM|hT*y2w_EGa1$at6mr?Ve|_$O*SM%Suwk-W4YXqn~usTu8AtV|fBGtur zl~+Eqpy2)N?5t)r*_Ek^Dvg zt0nzVmht68R`rP!K56y@Q6q#D_SmPEk)$ev)Bbuaf{~29_D;S(jU68yxrRpxWz^d1 z6h<--32CwfD*nAhedY7LYo`V=7S0_+FWpM{C;r?-SxiTva%a5_H$OpsK|%qfRTplB zi8+?hxZ%3+qgv6Kw=C3l9GUln6m=zJAc-~~b_tRKA>lR8+>a`Yt648|y-fa=rFJ=v z@EZXcuS$$GVlt)a`l)A1&uNa{$T{7b#`wA5R#6a7K{5l8jS~&5>9nnjc?&IZ{{ z@o|ML70Hdkr4B)JyU)-sE7`snNZS!8?U=e(0au=_^>GX3_;oa>IFj4b3||FI-KBqR zQ0?jM%e&Vw4hxNJPFV@$9!g^5x5$9mrezSe3yxg5c?H-pV;O?njhF>r-~X5!zdDe2 z3mlVX$eIQ1SJ_Sd$Oi!?z?z9{J(7q?ys!Efn|(FkhN0XH#qGijMuu1(2Xu$JS9M46<{Jq2#lv!oA9?f_qm{;OO}P zHJ+J1$-p+=jXVv}1#QcwC&HgB4QDy@iKQ|9mKK^) zftoayWiR%7<(i)D-SV}-rCHYSIma9MPT^+!O^(L?lo_>0Myj(sHbFU_uL5%#oum|9 zo>elZNSwk0>rU73Ff-X4^D~>@V(H>W2aG6B5=d5yYfv|B_FE<+(?eg(>OJH)*w2?@ zh>@Ss(p0u>$(3Cx$ApXrjdj%C_Q%0p9nsRg4w{tuU|Bx%@yA;o$pRMcCMFkq6-)EN zbth$qtv8e{dR%hcW%(gjEoz=AaA9CSRmb;}0`vqNDFOIgXL9>-#9`W&Snu_X71KkH z%2=TkC3DOUD@-BoUz1;@dqziTMgvIJPQL)3(J&bV=2#>Hlc1Z78F}{Si+2asJ+ta9 ziFxTVHH^VDaZ&!H<}bdTs1W_s=Q+{{EJ%pecT#E;n}IvvFy=SC@0Ht=Mob=!+tGiV zP)XzIicN(7ytXZi%=+lX*>P5HO%gV-|FHPc+~5Z)4-2{4Kymc<@|#q4eZgd@v%*F? zI}ox1;5Vt!07s%cRI!U*vJumgXFy?|@M=fRt^9VzrtRrS>qe|RFATrELybBzlz?r5 zd$UJtxouV~*Ouu&SX&K0%7^%1r;DecaxRL{Ylpo(O(VxgSY{}8+xseVdWMFmE+r@E z+&JZRT>7q13BYUAyMas1P%d^a{kPczl?~Xm#R#?QAl$2^Mzalap}=xD8{F4QLA#J^ zc4HH_dyX@cr3QcB0!gMnG1E>tnw}LyNc&e?FpK@CTyK7H$81A7?o5xwzN=@0Ezt&9iU8T_ugZ+RvtemB$xEiK-&8~Z_Kkx#l{ z8RQ5G_aD{VE4N1rpC3Wn?+4;RWgeKADl=75BR z?921j`i+=Jtb-c5>|_)y)36^FIv;cnJF}S-og6^7ive)6pIL;OiP+;AE6}vid=*~nBazg~* z?k8s$QNfvd1y2O@=xrR5KBlL+T7}d?*-@Nu{v)suWD)t+3)23>x3|PJHZJK6n+_`C zb4d(1X9r{f8Wkv&-}uL)Xy_aub(n+b_}}@<=^uQEz}Y=Ua4E_rM*R>_0C)ejwq-2- zJ6>H9NU__=rFh_XhE;$FDUhQ^WMmJ$m;o@sQ}D1X&y}6#48TNz*TIv$EX#bP*L=Rh zhC3h*DfTqv?fGy9Bw%^*kFmaC=aUc> zffaYbs?OqFXRw(krdOFynWQ)e6;`RezVs(e;<7W ztY|Qu8H)SgZ3}3EMb$^ibzk80FA2Cn(fPXizsp4XcO#1Ld^~szAdtY<&!=zv_eF3U z01Kj;kEV11G$K#1l=W*4IOm@tWugaoI9OnuPxKG@fo|YQnGFWo1;jv19v7Lj>fOq4 zF~>5%iox1J3hW==k${^d05oFulO*kehADl~;0^TaFIoP(UHUHYwS?LQB?Y>GSz`)} z*w5F2{M!vAlF^?jA*0`?%OTPQ@68`LfTA4#`S>z&6Pa@P82>gQ0lFssczRdixpsC- zf9GnI@A&{I8l41JQtB^3luzv%xiF2=MwnRPUYK$vO(_67s*n~O$q0lkf0w1 z?{(*+cU({hc<~bw+W5t-d>1?0Vv#^}aa`dzXEM~XJ;CPozBVacU|tDa_OZ2JQT-pZ z6b6BdCA!aj>@UXieF&CfRfJ0SKF8mbJa`zw9ItQ==BXzJ?a^%S;-VKnea*o}<;P!) zfd$q?luNUAnqr3Bl=bU=dA;fEq`i2vVGj?xti3PjtA{-##IY^Jk|c0D3Hjwp>86Fi~QB zRs@_V;-(lZxZH60rR_6^@6RfSl9R8b-H0gfd5}=_nL|N-$*EM z8;B$C+4SMvIyWmpyB7&Jx5{UV9rI*RP@vv-os|Z7ffob*zl5D7JUcsE&rk{o3Q)SY zb#t!s_ppD^e>Dg?76y<4xc7oQF1Ai{M@3U}9$bu7`31V>gjCW&{s*qO_kf0G(EgJ^ zaXvl$G6qP2*`n$_Bp`)ZW4~`F+p)LB`0wH*=#lrNj8e$U|0AH2G43cQZQ+vKPE&jr z`I9tst|LiwX|r17LQA25G07x>m|Up;=d6HdTsIIwT%B>E?TjLnQ6dlC$=z`i;dfru zS(slA49}V6{tr}ul3WK2?b{j6cwyq)Q$w)h9g%?;WH23D;V|Wa?`INBG_a3x}G$3KZoANu*}U9=Abx;LX81 zJUnWv;$_!+@9NO}4{dN#0>M~6zk$5ycP8{k@Q^IX2<>7Dc3K|nCb>NiMOKHG|ItSc zu5=$%6q~TMzKiZBAJHYjzMv@N*ULaE8U$z~2rlTAfp}eyU-FgD=7$+X&hc)} zx)amFJcr8s@^a+z^0MBVvVuZ{$IRi!yy_neS4saF*QemYEdT=JrH7!xOaFBni9PH=AK4X3vFldc41O3a zc5?LRLdkf000FS9MiGgH6dUyZSTLbzg{b$_pjyzQM~`kiEqM`s`PyB1B_%=-zJq%X zOE@W&fcm*VmlKo~ls9vDyoUamJaVfpnGlvozZPfIjrzb7`iFBbm(q7VC-(RN)__3hP`*Yrs*i0y)apKooJ9kPB?IL-0 z{c@2mab>*=As*zHZJNAK3fs**nmW7@?8}Tvyeq$&%b5MEE9JCx6a0q zV2+uo7wEBFcE09*#4{~;&a!Y^!1(uY<9bmhOjAf5C|~kywW|Ec^O`6lqqKUS-mQJ? z{m*7=bb_v1nSy-nQHw07Sx&%7AH&aqfp$>rUHWd{eF03r!SYJGTe$Z$BnDUo?WTNl zK@&&r+h%)z6Z-sm9MR4G1vDcHl+M)JV%&JOLLIdmKCdJvKjru{`jv;cUBm< z{q!Ho^TLXYi~Ama--E5A#_Pn>i?C7b9}A;lFaR?TmkSzzG8h6M!u!qgW~)|e(v4fc ze~$#N^#gF})Jji8!w+Pb1OAt|WshgaWV<`At5s`Q-N`A=!J#}Xytf?O?q4r+rfVdA z0;2yQ+xZ$L^b>B_p45AMYinEVi%sd}S{^P|(AIvMV9yju0xu&2S$Z&Qq;Eb+0|)&E z4ij+h%i;X_C*pxKlK}8MohwL!6lB1Tf{D$6K-eyX1a=vl+#hD(_h`BdTyqK>$)0O4 zmPh30E17;z=w9O4m-q^&ta!kV{ZCyNA|-6~Qa}$%`T$*+nko(0c|7O06vfjQFDycg zz6tPQ;9@ZYX_QHM$Y`JpZ*G>=Dl<1M=$r-J72L>U9-Rv=uG`=(W3sjx6#jjc=*7ir zH3DP|Ra0P2PX4PmY4CufgRF=^Y^vlVaSw}*Uo4u6M?{WUF_v2fRt@geqdr9b_=UWr zRlp&KB5m8ux?dcG$la;RH<}1+y@B4v*g(3<6Wg7AElJ+T@g1$L>wCGNeMU$%{>+rni1!)2c=+g_|8TrlQ_U+xRDf{vSFGC$L;psjN)R1uob%L)bJscQPH&*F< zi%s-Ls>2lM_0`Qp0NgQn7}8>S`wo-AfCYKxC9fSfH@7q7U}pV97IN;wHN6A$z}k0v zzz3<6K+IhCj%0>nVTnu-#ithyh$n(*7~Q~H5A>u}!N6*`RmkUM**RC@>AA$~hbp;n z%EAqd01R{*A*jg3qy${}va^WL%_6m_KT++3Raf5t({CKewDB8%gC&-6MLhp%_Vo6O ziZy~8ZpL#M<;Z*!lo3)V3*Ne$P4EV_8X{e|Zl$veOg2}$n7IOb3R%vI8)%>~ReO3Y zE2cu%AJ0QO*TL3DJ|~c#o3XYPfG~S4!~USS$M>EDg>8bMWxAYM(>Jv{()Lz_VC?^F ziOF`ll^6{3HJO8OZb9JgKM)g@f~D9!-PS>c$h<2hq@`cU$@#Lc4~~v5*C0=lyk8-` z5FHm%T6A$NI#JK*4Z-|g@{*#Wy$vNN$upx0Bv}Cem4S$D2W5%`YN4;$+|Ac1=Qb^f z!k4C}14B%G>Mx&tNupWx@{?Sqimn>14Hl|M0LpA7asnGJByl*kgYV#44OM(iC7J5_ zC9MR#YXU+D;$VcozDE35G^~X#?WUofe}I`KwD%l(ZmJk417dOO$4@&Ymclx4yH!+( zAvyhx9yH5O@o`qM7|LAI1Ol=5SC> z%oQdz9|r+cK<|XK$}`vj9uKYxxbGFVnC*j$+K0nYbN|d#bSxiIz?WA;N>M!D6as`T z9W4>-G^Ma(dldb@PyoFXHfT0Ax8YJ__q%zpS~LIOMK_{AB%MZ13bG;b17xvwa<~}8i$CKY8 z?uaH{8OGys=>Hx@m;8zW$>~7S>R*aPgWKXi9(aij83LDb@6PR=@g0#DAc_r*IUO$4 zGTuw{M%)gSFa+&a^xW3(0GF*-ZoJv;?L9#1c5~wc%jL^~>lSJU#fXAd??->w*Z(r5@ z&O6(Jw|6pl;3@U#8WLO*%SeOw#hASbt|oXKjU`SThzwN?<~=>f^D+=52FVqOo2U^~ zLB*E9t0EaP^giGN;-=ZhxcBY@ZhEkuKz@eRsR!Wy`PuP7DE3+h;&@WQZ;mLC4LNYO zay|~aKKQie@yl~Ez|8}cWG*U*@|7Th$rj-U#w6+QRN1f6jG%;R}DE2Mr{EjVBWfc2Q`8nQn{Itb<8xUflqbR zg(+ywz?$sTf9bHFNt=5zU>D5G38?ULof7z~Q}*RHABmR$;tjvsREY~g*%+V1=dV7W z={a!N3}E^EhrKRp3QvIBy@y{za0S!>&OSYnt0!JZV$X)z={(BlgEUOyLikbEY||UC zl-!>;2jCLO*pA1@It1erl7(3KzB!)F;hhK|vaBNyx08o$DAo_ZyMgOlDE)F0RPD;`7Aa5Q3qXbpu$gA*r zBt^=G?w;H0$3O$_%{A$vV8q1=w$~Wdp2lz%Sf$?qvl`WgluMndpFreNx0bxGqV{tS z8O&-Ut6Lzxu#Hs#39t!%aT6VlxHctdu8jA3HAf5A8h% zVVw;oP^t$?a$7NM<9Ey9sB9=58)&|=G~@}o451xLS0?GRjf0IUBj;oMsvg_vv@=Qc%P+D=H?fE^Gc$3&&F#O9<@wk< zk9Bd#LN5UzidH{Jiv(|7EXJ?}BJmPDM0R~#{LY=0ShPrLxByWXkwl}nf$Yt)TC=|F zCw7Rhw{A=-!kg1&?<*)=+nU?YAFE8jlgRp~ir`Q{rT5A)1W>_DiD|5&%{pOv+^sbb z9+1ngO8}-O26^H;|r1KX6Rg%D5_f`vRyDWh6L$#~mL zZznn|YWlLZ= zMy5U?dKaJ%3masanLMBUi4gA$r4t6-y6w-hb%1s*L5l4hv2to8(ZbQOibxp3i0VBi zKwc1`Eldwgu279t@h=xZ7}p#Mt~1&KZ%$YT!vyuko1maq2O^!Vtzpsq(dZ!^?WpCU zes};D+q%`biTV#0Q&&&Vjp5iyI?}z(LmzB>qn=kmQuO55yrkgrT#TTDDXYEhL)@m1 z61uv&b;&aaF3FWl{I^BR&X3>d9|0*7{%%$THC_pER|l4t6xOZ>A)SbM{2w(UP`66c zp!qEodH^JVkd}I{ZW)|=ln}SJwL$has}{L~@U5s`?u zYFIB(n~|1wUrnvoKUWV}D4FdY=6r znVLIz5Ai8Wm3OODtl-i?x#e#Dp7hj37>@`Lr7vYWMoaC-AVp zd33|uJD?8m_z@v#c6JQTSIkgx5Yduwaw?ffo&yX|WB=qLVpRx&RX!d|{_jwO`Kp4! za_bPafG%Tu#=Gg7-`Sc27Je@QTEpwZ8wNiagK}wpIg~UY6x#r`T^$vy73+M@s(ZEh z5%NV8bxY8D0xFuy$S;3{dcuLeqBGB-*fsD*;Z61U*JR*tlA>S6<7K5ZA5Q)7;YNne z)fu39IN&=wPO))`QGh-VnF27Ui~t4T96S`E6H1Ah^p69BXiU=-@BwKVMrrG2kFYmlBRXTuNIzj+gJ~WF;}+{gb|Z z|C7w781oO!H=9XG6h7QOzDkmcFu(W7V7dOLY%p81kKZ`Xf+Hm*4$enhj2>+CG#2^r z4$jpH*WKK;!a3WK@6`@-k!JluOQYqxqiRWCCcdm4k5|`v@E)R-N#Ra=*L{!)f>1+t zM;$^zW2MK0xVEo}TE#$Bn|`0|{Y?&$wT|NCb}_VkA`BvLG%R{Fbljh>(fp+nB^)Sm zCD1U4%`_W?6nR)mc6pU%5ZVw;$|sMZIs(>w6oE` z3X5-hr>IzdSiSpsHeYwwqa&@=u1c%?<-K) zu(BojgBU*BIqzZ?Se6nq=zw_^<$ogE1n>*b492i zwIfe4nPaAk`*unHW>->pNr_oTUgguB&8=$C6K=oaZ&pT<3uWW_HxSZ;-XadOZ`H1dKINriQgFf1_t=hd z15E=*gE*>j3+SLc%%RNjC7Iu~4WuOT@Z~iHJHud=Feay?8Q6=15>!$Djq!2M5^rE$e>!-$p_MJl%_%FLl}TQa9URHK4!G?x1mKIWePLUG95FJ%zDC z4iwM;;@(N4NizuU`tZP!v=Zr!eO-#N-iMG}MFbSN?L~HUbN;)xVUE1uc?eouYB3vd zf)=F%0vW{WWQEKc>5o?%WK15Js0FoUVK;v_;B;W%C4U3Ck$N4mc_5O3bD4S?ST0>I zSC&S3^ZA6E0I_A*4|qJKSZ{QQAC>yJe{DA1iqj@|OzAn+1RBQgFrHItRj%!H1wOCa{GH;}&I zakwMM_F%-geRZso7T^O^2!?^)!L$%b^p)bX-js$dqHqxXL$^>fG)f9;hZVVFA@7s?xu)HdztJh#JzD1pelq& zmb)3_jZH7b^Q$XGT510{3g3NezMGX(-knNCYzcI~tGiTZ@-p@t!rRx!oV&Tf+t|HG z%IB7Lgc{37hqPHLk(Y&);H*kFS~K<|<4V+<_^ogs$x-8$zq#(XkdgeVk-zE}*ae#MFd!Fi@6gO7x z5R~8$q-78`$Zb2Tm;*{)=371&68|8Z-T+!}j)JBd9zqcOd%me;tR_BfUg~NeNPG5w zW~nhU8h($V7&A+L=xA+?PvYj~a=iZc36+48RnpT+rv$gzSULCoD25WyWG`7gJE4Qj z>U(_0d$y;uOAd|@&1%;QVVHT1rA>y|oXflAxo_Vh@W(n59|Bz9=zH?rxS#IxXyyK# zDJ%3S7Hldj%p}pQkg4I;88#kVty6)%4!FVqHo8R!=j?*=NHQ5wQ#bG9jP9%iV^MGs zJ<`Cb**6V;T5cc5(MWPf3HG$a$}ZVsN9!3Ylqf~-`-6A`CK6U%pBe2nTE3l8)!#xI zdMCnbAZgj-dxhiWtjZQF%m`Y7+?FFj=ls6QRDW!2m_?4Zdc_8zi7b{+St^{fOuIBK6AV$IYa}w7MkZdI4g- z-Skcg`+nG<1-2u2n+S)h&p}PH50r$PuzgwqgGkDL_x%}-zFn8dm&2%0_Q&7r&$m0C z_BuZ(5U>5BaF}?>etzLn^oUGrkzz=!-Re;gBNDco%E_v3DJIbVQ~<+~R8D?j7b+%H z_kIS%r1TM+Wm$+VahINb;C#7zfLItSOvxi@nA>*orxd?ogQkA;dk%fN1^2m*#BOu! zzByCom=3D6urz&cpxsdFFcv89q&2YtLcpi*Bq+@vhsj7ji7!;OAjbWi#?#vDu~cUB zw6tRf9T%G#^nf%jHEi_Hr1+xf$2pk!I_?X#Z{U&+BM+~TWchq3jfK=47PXo#j%kN` z@B0}Fy_fY%I-4g@zBjP}elT`SQOi_Psl*%yZ;V{(M8spckE|crl@Hf2t~m<0?O2sM zt!PsWl*=+}9x&!~e#zXtclWjc)2T3#xB?W*ykXrk9a7(F;6c$+5O0E&x_pai7l>se zjAQAIvtPZ1B(;*Nxu~z2Y(IaTwB7Ysh0YXT%G^i}o8eimNuGwW&fEhE2A*Fj0S6h! zbQz-Z&HEbH=XGmq>fg_`(7AnA?xI=LN%!fwet#}|g7vEw9V~x*#P#?`f*@U@T5?7W z0TerNt8(vYwBVuO+=ja42MjV64+3e#IU_cnC5=o)+V>9sD z1iG#y3)nS;J}T4;)*`N47?ovUI*K6hlYE8q!2u&Uivv^GS!r`-|5w2E9U=ZrQ0g&Q zsb2O6BZAbdr~J`}CN~Z<$+@1FI&*P%WVT%#Hk*D<{4N@hN$}dPmD^cOTx#F+#UrWe z0R0~uTOX@vb-wEOvYtBy7L6k1F$V9HF52hrmyi&@LX3+^^dJ=2JRYy!BP2gl#-(tY zey8LIu?TVTKx~G)Hsi}BHWRCJYPfPtgE|oI;g8`(CtpDmx$H+W2GE;1xqp`DkB>N| z0r2VUfBg87=o8$tA3`fE1f(Hp!+5(^PkHd&$PSZq11PLkC*|9-Pdw6+jD%^Mz?2d0 zn|t7OGtqK7F<7XmTRz<(xM>DycOlJB_!bZt@@v()(*8^`$BcLEi)%qp=FdCHEx9;76cUmsbXu`9Z)V`MJgN;FPW-d%xQe zUpol=E?sx;x<3cp{M~xl1^1(aZLvc3#1=WGn2s+-P)(0~`s_uAAB^ynu-wmWUs4Zu z8wvbC7+Ua0Dt)3C=!>xmME##Eg2rz7I0c5JW=?#eP6IVx1)yM%uzh?FO*-X;GY1|k(I2>FrLGu=oT5} zLJC=+k!H}EiJX&lL{1GFUY3D|f-Z?1Km}$9^gdfo>9|5CGRRb|rH;Ztydm_lMoE{c z{a$Pef;gaOKS9u86kI$x`YYd4-c~8g_RELOgCar!2x^@Y^e$MOr0<-S z1sHK9=v%O+;Ii2CygFcN!OP}8RDEMXT`DT$Gr(4@1VM^}NN9hKYWaK6muCgp$2UVS zIGXSl%O*6Y!}=I&L8rQeDn&(iNGkr@FA8c1N?`ppzqwS+iurKi$43VRn`oIt7W3x| z_kb`yEGjmxZ(wY-*lu}64(m-71ovT1rde)KKunB z>nQ*2!_41jaTAyamB=HG#L|)yp@q?&c$4y15}&n+6E2?u5#b4dxY!cMh*WS;1%wKk z)MYMN=*O8GIp z&LjP+=5W>FK>)Yi)V%_SG=8A-Vh8r4*`8LV&?jCOg0;?RbY^$(dHLK<>ziJpgnVPl z5aG$Lq%eiPr4j5|a_{+gzvh)Utj2xcUR=;2(}9QZ%g4>n;>$y4K2n$}us*4(Kk1NT z(T6Ev*hx#G2kKTj8A8hzsjIOrziaFyz{aMReM|YvjZb@+G3`E3^u<>ynASdn3-a52 z8e+8-u75^+^A;ZO?tdY4d;yoGV!BvGN(#}U`*@Yh3vfc2IIHmK@1hnjVO07;J$l@;xEOoTZcAnPKop24Gr$H$KM?4eP2BLIpt# zSm3fb26f+9XqO5lNQhDn_`r(??t;D8nEI-;35Cw5SG9AKS~c|NQbEYgACJGJ3i!`l zTWb>&_vb)WNHN+svkKuIKLkcU8Ox;yOE2hHBq)MaePhlL;m@wTZVDy1E;i~+gzNZ% z7OMeouuqcSve~d9o-Bdx48twJ(%queb|OGB^b*!Hs3xM@m6y4Nmnk&catmDCBCG`~ zY9D#5ljnvn5d2ekryLT@n1>o-EKF7~;Y-%pjcWSaOdXK$0ru>nCmLqqj9+&aA z(mbS*t1=O)5q~ld{`kp8)N}HZLL7;@n!a;)f3~7Y-!H|T1DYFY5@z}XP0qKq#|~Bn zrzU}tnm4J)%QnWM$y%>Ub4jOwZ2W5w7L9z^wYyffz}$C;6Q8bwV~Wj$6WmpBi6oO| zAm9b1wfrOHKHhcEa_R8(_j%jcrJ`A6u6pM;XAoof*4xhqqp*CJ8=R!i)k2@i+_9() zDFcWV;b=Atjz?bx{wPu;hfxDc$|`@700qBa!XWon=`dL;Pk>4Pink#zSId~C&>bFm z$^vL{G`CLR{g0G#*RD{#B{Xqq>Jfb>&i@O zx429CAWF4Y<K=%c+QBW=Q66IUKu-?^(VH0B-m6bsh^Muuwe69e*DJZol-2R4dM z)@j;3?i`EYc5-|x_zl?5v|s)v4`E}tJRsV%4G^aipX!1o$w)K&R7v?v)BkGY&vWW` zbW_QGCkpYy&N=}-vMI)kqP^N>$?L@ym0_!mE6a|b911(FJ*Mt`d0Or28!xco!f(>% zHM-c>IQFa2;elSV2Y>JqAQB;%x#Vw^+uG4;l>Z;x7D^b$5%`6QQP0+F=h=|y*6daX zjXmVo{Vf~f-h8PC!WUP2@Vw-(!zI9e@oM?pszbmPe%TgQ*o*rw3nEhrZGTAxq;PU>>~iXbv_-8Y zA8!Tdg@TN-d6@NBpZ&|f3hev&11*`VoqHYupvefw=-p%2gDnb1>mP}3$66^XZK4q` zDp&ZsofQ^%{w}||GQKgV7NW67TTPgJX~R-}@nnVo``YV|5{&**u5K$DY9~44kM=ze z7Cdh`9xUcu0r8BDpzCrY-#7!8ZvH*FBwp5(bqt~$ExHW763LdpsXHcK`4SoJf7xx= z#%svrJ?W_DM2*EN=wJ(*INmAc5a8(R=BWLP5cYJ}t6z&2deI|}f7#E7F&q&7sEUE} zndRXNtUUSF;7MMjQV;Z|g7l}VMik%n@NH;|O1tb=81A(@R?9Kypi*CFTS>*E$3DD1 zUX)dpLbST{ga7*Wp?1aAm{JRFDbvnDs8>~om-BS1WMWO_%Z(A4PG$eV9gtz_BAud~ zoKFQYM4LW7RJaEDReo|Xpi$l_7XNr_^c_(`ol2R?oW=DN6^s37i%hhSQH`t5hiVFn z2^1_OWPOe&YQ1Dhf4cqhJ4`k9oA@+vba+zlrBN@l{k&win0;6AU^CffeoGP8QFFOi zl%ILVqTfo;-cAUp&yIraabn z-)+PGsEy^y{z`=rG{&i@X0u~L*MK=@L);Z+;@{AfLhJ97M7FURq55(rN<1*>l7`NP z$DQB_yO!WpIq?cs*AxG1p3#D8=+f|#Q*T6qz7}Z9KW=N~_Q2$TWA(g%T8#?y(+c0^ z(Ee#@)BT~o#?Jb(`!*zmztyH<;k|0=(~<3^-9!A6PiwxCtz<0zvMrx~uZ{`-VAEVr zuUxIjN%inN-oGg>G*t(WWv|hc_)|Q9qqja3&PbaD0g{$);P12#+Q&qFCTYpkYasNa zY2RHwTvK#c`qyfuP5GpDD4n(a(m<$_kn``uL1Pag5Ajirt9r&z9djtOD`qxQj-1|W z@7IV>lxkGxFWLst7B@ZbFDM)^%tiY6C<@P)+bv&9PD??tjEYmLU{b`TIzBX=1^k|~kxM=|R9U(iyE`RW4N_j91=DRZ9yddEx zh0&fItus_C?UmG=>Ot^5*0vse_ZmkD*;|kGv2d46_r<73n4#a)K@nO(U1Oa z?(XkKDeu?8wN^g_3E2~|2vEE*D`2@S_1q2rcIKs85AmA2n24*=zYdnXW|LfI>AY$d z{i+<7_V9Ns4|7UW5PFkSEIhZrk(3)jr+ZcQ_I`)gxM#n8E=ICYh}thyV?XV^)Azjf zHLu-TiBzGIZ9Qa&6IKuP&lihWj`c~g^<+mQzy|yC>-SqcNxDl48@={ouiR_(@9*ng zD_b;~ekq78AJ-%crkw;XoTnwc0%;084oqTi>Pznbrj};-9ekDj%w1CP0X71jfIG_d z7{~f-1^s)xTl?qon(d?CU(6`(l?PbcH`RF9?2L@LZ}KH#p!E;t^WtVj-tHgNfZROv zbhpNlNb~pEu}o!)l+%rn_CG8->Kjpcx$*VV-|aUJFU9V=1*e25OG{GoiVjbIFYdBu zkd`zVmtB_6*(j1TtVk^z8cZ~MXF(v1E?w`*lgVxaV&>M?*6bWip>Kj!!o(nbC!*HS z`F*2^o50=bZ%~QhnXB#QdB}KUMKCOb&)>JzHTcd6t&Hqww5oKC60xz_MC-hLm1tHTDK{nTaiGvWT~tt` zEkG8rQcwy=X`h=^XiK2YLh8|iYDDC)4hGmWm$W2FS{yv3T)n~mdb%*TL!;PQ2hX>f zktfnA!kw7H>OBK*Rq9k!sMwyI05VlsECkviB>Gq5$!gNe4+WLWpB4V{NTl9ai8Zds z@=ICo5Dq;&TG~~hzN5rX-j~ZodUX^r`SZ+87gYq_WB~dXH>44$%Mj)y{j2|JfwS}2 zcBOgwMsr65+sU8z;tP=>8D1wLh}xb|%~W*~27UJpnVJ;3PPbg^2KwpZ0lgJBvFzPc zwg)T*@TPW-lNu0R(=4(PmMsI+&EoGxVlJ zgQ&;jI3z_6w`jNbu3W3y9Y4Hua@?n(?;=_8juTgF@#TRc32A}8ctvkb(Nn(6Fg1MG1@WlE98nN#oFKAuWX0fLkc{l3mPdYfmm!`zm1c zLoi>(rbwajEf2s#;?gNGJKLUB#~S-9*%^chJq{sWJ597PB;70Q`F9!o##; zYwI&JG-T3g{;du!)0 zM-90VKYuCX-#OifzdQ-AzkuBZW$3OfPlz)P=ac(ONjH8uu6h>j-@>+Y@H1uGfw-mUSNx^2?o<5^?gj=Wg=%4^&o94z zWic1CjAmC#-_arqtBlSD)QEXhd_4k1rMKpY=g3~X_XVP7h>w()*j}SeldVXnO{J?`bGXnz>i*v>W^EkV_gv+VE2&N=-%`=*Mw=(MiQql=kZ{NmO(^thy>KIpX=}zVwzb zB5fvk{;-&n^x30fo9Jq{&d{172ZDj@)Pl09L(jwdSJ#6&7)Ngh1HR<$*_-4(6F+10 zAWC@I+#MT0K|-0hH7KRfurVf+A-}HHb+_d`9P?zVRJBoG^f336t_6{OOUUbPE~_o? z6-J$S^e$Ry$wO5RbA_?3gQd`W7UYW-(0LI$muV-j>cv}hdip)U&@zB(#sZ1aP6cW7 zcl^k#>dLUKSrV?`F#ESOLMhwj_RzKglr<#A2)r1>{wB}l4-)mN?6EQlUO2tTDa ztoPwhU2D0v+(!)=p2#T>TMjjexW81keU~)JtA;W8%WbO=edBk>EkOPk{?Wu|d(Q(} zM;Q6{Hs$twoE7Rm_}tFEz$;kQkr8M1PWOsGhQH5|j;ikX`1mTQ>p2PnO`{;-lsNlf zsL=40vf<#U2v>Kf1xxQtzoFKv1#c(XnJibp7APw`;K_IPo$h&Xhp;w|qH2a#K)RvZZ1vf5Ne679r{E+#_Gyd9yMppBiNcetK79 z>WkqoRnG0YJgAcqE}hLmHd*STv?bbiviQwwe}>k==NU#DO_7JcKWK>932b9(_(z$D zoPwwPZA`^s!E{yovz9`RdYsad+ooAUTBhD1TIP>TtG(7IS$7++AJ^%>x^0ytyj!f~ z4_72lgE5;W3_7M+|G@RLD!pGObaq6+1fY{PzfOgWzX*~1pSrF+p6T}eOD4^PLX4PB z$ob4dHiaBAk>f(nER>N5IqQJO^W@CfTFxvbY09BQp2-}VLlLPoHc2!ps^7hMp4a#L zd;Na?`q=h4-Pd(r*ZaEO@B8!VNhT5bn{_F=uDELcdz_Z`*_&Z&h{XH#08fRej(weP zT$M0Bp$dcSu@1_eZ@I^XzGe9_tpGaC#wzQIsk|ZaSV>+~>T1?E>EwD#xU-;YakoWQ z?7^Xok5eN>RYr{Bm{a2cZq92qcjF?5F#`S74EAEQ*E6f{aDcB!e`(cB@knIn^HUxz zZCen&{D3YEtJpp~I6cezQC~;w>_cz%)cObJ2_=!vWzZiQi<*xs_hqcs#peczELhyu z9eR=>7-zRnoLguu)YMY0lpcm3D9^d_`BTM@%dk|W{!Rm#O)Q22SZrs6wt+ZzD1vmv zoa*7Sx<$h;ZezE)MrRMtIwe1xxDVay(iy-0HQ>j}^#1Eb?L>wNrTu5X&$Zy^%2Ve_ zA>xPu%^Y0o05c|xQ5CSRr$4c{d~-bT;WN2U(uA;rC#0~cV)E+87_0i`ki%uj#}|bp z&*A>|NuS$ymiLwS=!@b{K8n8mU{=*HP37J_C8(-AeC6IP#=)+vdXMh=1oK5jNvTjA zoV@^$#sLR2X87L+ras=gyf!h?UCYo$tgFQSQ&FVLvnbJ^Y`Q^{W{;9-_G2zCs-xW^h-CD^HdW+ z#WC>6+Ra!U3E&Y%VL*nT8>*9$i8YGUAl6W1b|cJQt2qJ3%FVj?q&arNFPPV zrQXtz&3#{TiOs$>+9}YERXUKs{XNHv55y<*NUye~HQSze*otEQ5TKU!-kt z9gGA}a0nsp3UVEzi3`tu@L!xvR*{5CLZe4bWhrvNQ{{zcj6RKCAz^Agf6a60CT9XRf{z!aw8SQ7V!m`i2(xBzY>@u0<&-ri+K zDplpnf^6()DFGOF-`B!)FnT?PTR>z=t=p!cA*LaA*Q1+iVy>uxPHZs}?gzgr(RX_g z`#UJ~=z@Q-Q}JqmwzPzUosvXch0*`s1^2EPGtn1!0EVny{PDL*BD=CxRV=6 z?gBdrpnZ)65`T}m7M;T#YD_+J+7MWCCBRsEb1yAxfIu3X^8}N5_=YTb%n}htN0LQ; z51jfttQ;^EZ)CJ==~hW-(Al7K^vwkKN~8l*C1kO}3+|G5@c%y-<|?V-{5$jJxU~z- zQJ*}b7T=(vzRcQ3ExkV;wKDG-wJ<>~TEF10uyI%*3M&JK|51oP${mMepxGdj(i0=X zYc`EGn_ism4DBn@rP!63gbolB)!2Rs?B1VvfIx%Vj8;KkUPwyoX{MBRw{5A>OIY2YeWL&TL(rRcFP{POs8UZ#$EYLBoV5HqUEh(uMENHpV*SBks+S|6{ z?$5eTj3s;5wdGA;bhHD*8k05;v}XyXeF6ofbKrsRK=X4RG~Ps;@yru)2?aVqGFt?A zMF6#04)_kZGiWO73@DzivhXR|ni=C#KnK_SF*1u}>ZOTGd}bvC;b7@4BroKVh}rB; zj49i72$_cNi~rdOC^K%>G-U~Z_QFEm`6pmIZ@UX%EfR@lc}Y8_o48((GLty^GIk_a zTa5eist5mki73&4gP&Do~7Ghl5bQ7stm;# zctiC>^B2*9GNCW;b{+XuVHalk!?oQ=aY-lAPA}LRYf}99N1BQ(TX483qp+r83H#W^cu~L-YyeTT#R_pf%T7c9xL?nRKq^=(+LD(iF%^glDm==QWa<;#YwGS^Ta0WXIq-M`QJV>m zC_h}4hlF^@E8=a7PTM4)Z2Nyds_MSqqoToooUATuP$L#7N4$t+inv7 zgJY!c)!`ZE`m_J?Prxk5%-NoYz>ABE$)@iV%3}YTGxZ4s4SCQzhKzb7$maN z#4ZF#h3dM17O6XbW_9vXetZc72$;^Hgc{QtSSol)FB0N0Tzmd?i_}7Km}T721g+CstO~bZm#|&&8ynhF zZDdzsFbFgnFZ#-kOwAnNvA1Dz87{7XRrslz%c4i{R@*@B)354gejv|#w_Tdat&Ee3 zpCzT2#His)#a>XKfA;Im`({cW`c)O`K(t>L^i>^@WKF;zQt8l!YgdOPK?hkTBC9&P zMeE$sd-Ukom}a<>Qs2EQ8@nQ%zxzs!$D@*5+hVEi&7p+xmdM)CZ@WEXr*%BDBPcpv zKqelUftd~0PE_BF(>u93M9t=;U0faFfTn9L+{G&Oru?*)m{{PT5myDi^wUwVbR9Hs1>b-^m}mZ_>B>@OGoEV-}?hcDQi2jw(dHvU1o z`okaJU(Za&F83)4U86O9u*;@{$TuDNbFQ+?A35Lxagk{$020!jIKw-xIk`(}Xu?Wf#agCu5flB0(eX5}#N?Hi?*)2ZM0l|3xB}0uK6PIAAR^ z%n$XrcqO8WXbIuf?Q07UO)6Vrv`+v5YvnIY%f8~OQ(LZtjNkV|a_!Hmu9qEe^zRG* zu@=SW--q>jedcb5{5Kidi*L590&V9d0&XahL(e zmr5eyLHERxS)`sJ3Vn%tcJA2kRG2Nt#Te94tP;R6yu4?K_KD)fJ@&>A8Hn*aKI!i5 z@vXSvU|@0Gw_*iN{G~CMce^nzWkC2_?fG?fqPC>Vo24Cnye-$+R>vgdmCg|)s4yWW zLbM`X9M<46+G(o@i7p-pi=>p6XgP{S#Ifs}OAa=*f_eA)b=1^Hg^-b_eW1-T3h5<8 zP0qP^4M_W$&e&JC$i^@v$K-enwc6wk$qLuKa4U( zj=pDDiISOT4sfEwj5-Yuw(qHfRhj7TLq)a}ZfD6L2<+x+bNNDg?ubfBwkuMYY4!wT ziRw&oNZCMuV)6i{@WH~kMdYjyFVr+}PykYHeDV+5VhdEzPDZW0&^_?jkMGm(;Z78f zGM0En&Q5qZ1(5~6Ogmja>BAD`cJMqKU_j7vr-PX1hwz|JMl<(R9-G0S1ahq*ePTuq z!F-S*ov0*pg}^VF&$X}UOEBMmUJbPg}MPPjFkzx>+a{P>IK0TM;3OTB{V=flF zfA&wh_ymd>_#h8X@%8MApzx3*)Eo{os3(h#Gxd;Z`U{5m9GJg9ylEy`&&aUECQ4v*VB^5ji7lemaXvm)ujsC;@Ez3 zh`gJ-duugOM=6+5eSUOc;LTPfbY5{bu=S~U0hkfSy$LpanUX^zzs5;FmYM*!Hd`^&QH%sDp#bdTVswK71+;~A)Zh# zI{Y|7ZI@~-vKdMEhqed<(QfI&GpYxOcAr7C!@Oa{=mM@J?~-e4tBL^0i);hYeak3t zQ1|D2cj|DO^99e)HAQccm5^jK!m37ZO5`3X6o6(abtt~Go^cEkW!$hwnR5nCEn-m- zh5l#4x;2c^Bd}BqQ)>qhpy>r%OUeWxqU4lXOhi^GyPE0!>`*x#(k+evb7AC}O(z=Z zY#cz&zDvPNXZn(NsB-?afw1={&p(DhAc4CcJ*6@ zt%pzWVzo2=L6v+DZ*u9Otxh9iUq)_*S1nZ`3N!emyr3mYs!2)0{sIo_>OjS_(w zs~&m;TtwPQIi)Tsq(!B1`HSFHX%?1~5kYz5jb%b}teNmag!bc;#Gd`?44k1I(wVgn zsLrTaOIY~w`D^S6{V z8|7YtX$&Y@Zmmw9{{XO-`Sz4Fw1Jhp>{-XfQ;($}K{|KHyrRJa)n^p&s8+OiHYVAw z^{9A{Um~iNmOv|!$VI7wMGg7TyuU58MJsrUc!R)m6~3U%3;p~Lrcm=5^A;@spn%m2 zijeXcEe8}bznUmpR)Lqg0wx@Oz_5jqPkHxD=@z~)b%@F$qk%`=LL>XVZ_ijogjOWY zVevK~1`~R*rp!PiN^8tdPb(%IwVQ?yKsrhd6i_8lTmeUfPKMD?GQ9rY5BC>JlxCoN(^fdoWC zb(XjkWS+zu$RgB+%joaQ;XO7p3C_WByRTpchW~-$;!b^W2Kg`r&-v;!AW9&!W7x>} zM^=%Pj_3&+Qzo;o6SxoJCjQHwQTMR*FnIHKY~&yD0W~J00%nmg%(mUb%pvNKOkp9B zz9o8qI?S%yjFQnHfn?_&?u!I;MfF}UAVRbs#yM0ZL8ONbZd4P`yob_(z!~gm#2i$jRKwXEa=UNvE!2cw6B-83M_#kpBKaLxH&F=E( zUD41Yu;K5J`VR68s=0EHi(&=d2h8dyySecd={((5iX)R&PSLTLT}LkuaHj!h9>!kI zZhb9064<6dGj$oE%}pTrxfa3ss`@7lfBom$|C5lC+KW*ctLiR<2SI67gB2cL!5vB( zwWVjELx)^yk0rXRLQRA1HS!_+#;5t~9=&u!ol}M8D9sYXgUyp@w`q5HXFtC&%d=1z zBi9trbx)#D&M&AZ5w0Cztfxx>&A+A}H{Mc0_e4LH)o5pSl2>oPO6e!u^kBMZ}1pgS?&$LM6^QYV6h7E!HqR3xUXW z9@}zSLh?(5RD)KfR@Je`;IayW?4z4_$h^!vpO-^2GmmW=(vNF`Dk(!+aL52@(QaHn zWwkmbPUzJ*%AEQ-XgPi57Vvfa5K<6RM*e1{>&pIU^UiH|P5&vmR1P%~Ht@-@w!cs3 zxi<@4`^>p?zTXNGaUsPNsoc zJ}byx70ELnl425S(hGS1xUYDzmp6Ix(AGoghi=?gGz~qN#PM=W(46lo$Vd2m^-?Ow z&y7n2$q>lQ$szomcQ=u5O`LNKx5qB*!Lv$#s(cN^3BJ9%AmHCgGLflUug1Q*c{jK zZK#fAe(V1d!p1Nw&esn{$I;aLuhI<2%M{}#n<=;SZgij;ahXPwh1bg|L`>=u8vn^o zy&yZ?jg6v$&H|#5?}^t@QbGzuzLThW`|!}&4$`WD3)PWF61;@_kQxUNb+`}Il{n{{F z=#TGZP*-6Xn$)3&5$s&7`BoBhmxzu&O$f2vFW?_Y2~jkpeyTyw+c z0~^hq4S>5@*8RJeX&Y$yyk~=VS^ckb{QC>;eu<%?U_pa~UK>}6v2hLm_5H@DJ-ZWs ZZR=wd-qN#NsOABG$B&&jNXD1HC{ literal 0 HcmV?d00001 diff --git a/CloudronPackages/APISIX/apisix-source/docs/assets/images/apisix-seed.svg b/CloudronPackages/APISIX/apisix-source/docs/assets/images/apisix-seed.svg new file mode 100644 index 0000000..efbbd85 --- /dev/null +++ b/CloudronPackages/APISIX/apisix-source/docs/assets/images/apisix-seed.svg @@ -0,0 +1,3 @@ + + +
APISIX-SEED
APISIX-SEED
1、Register route
1、Register route
7、Fetch changes  & refresh memory
7、Fetch changes  & refresh memory
APISIX
APISIX
ETCD
ETCD
Zookeeper
Zookeeper
3、Register service listen event
3、Register service listen event
4、Register or update service
4、Register or update service
CLI
CLI
6、Update etcd
6、Update etcd
5、Fetch service changes
5、Fetch service changes
2、Fetch discovery type and service name
2、Fetch discovery type and service name
Text is not SVG - cannot display
diff --git a/CloudronPackages/APISIX/apisix-source/docs/assets/images/apisix.png b/CloudronPackages/APISIX/apisix-source/docs/assets/images/apisix.png new file mode 100644 index 0000000000000000000000000000000000000000..d7672dd9bbab15c2e9851a6826ddfee8b4f215fa GIT binary patch literal 273572 zcmeGEbySqy_XiAvqk}PsN)4f)f+*dH)R=^nq=0m{q;#rCBS@!oH-mI{cPdCXA|?Hv z!2<64exLRIt>@2Yty!+YHP^Y$?$6$5@8c>e%!_%R@H`qC8YVyABQZ3zbMk0tr}fW5 z!T$)Jd-VeijTDXl(R~RE^o5dh{lt>N`6SPVpVf6d?pT2bcvXc`v`l-_oHn=QvN(VN>Q`JT`eQZU(uuK<4jM-BrB&QUm6pENh zn|-#G=Qk^pmp5x&E=bwt6gilaAgY0!5k>poKcu;*^dDq2xZQR*^?&#@hRn`&Cy6J= z#XV+WVM(g3ebUq39wi*X*}%i>fDS+L;Uk0i<_qjoTBc6q`1bWyI`4+bo?eHa{Mp~9 z9R0jr-DxF!DHEU4+bh*GJnT{7Mk0D;3v!;~Qt2ZlH0aS!G$ZK~>IRc8jq)2jF6)V@ zIo!?VXt00%sJ%xIWf}Dqloo5orFf24lh77sdUPq&cTyr4-xsPaLtm1K=>etpCKrv~ z{)3?3JKceOkb`)s75Y-yGRO^J-N1l`RGxS>!kEB^^R70N;`>O7KzuKC6$peL?cYDM zDpB2pFSWWPpFX|7Bxgi){~ru;Y-S|2G`ezaVtSUcUsUO)!hbDA?{g6v-Rs~Ul0Gpv z5Fa1kjMszCb9EG(V(j$scZ;5blJTVXrb*U+- zCRHjdN_a82?}n**s$w3~qGM_hbq|JWxBvQd>OJ&_uS{Kj@BEkS9oOhdw@kvY zzOSxjmK!}yJ@))j1&!K(-xmJVvueBy>L&kmjk3D~MUBm_)tW|Vq zvM}jksZ0ODUjeW$9U`!=1zf$`otI7ilSV#n$Ub4FZs8c^-~@&@!~=d(ErDH~E$mFf z!N%LNG*)e1;nn+336K`Se(08a&AO#%I!bJ7la|1OSVmbBgu|l~6*QiOJ6|us zfMne?Ke0I^@JK};Ft*sK2Q}U=R^S}B;v*vM*F^L=4CMm7Evz&>bCPLb)d2X< z{7K4yDr#fQtgI^k4;!=GaAU%mc*p*qB*uE9%R>yh+S?JzpT(5JA=hm&oCtE{t+A4) z=kSuKI6S5B&1W)N_aBPve?eTFNzDv+ZCzYXZrR1vhlmv1NPlONP!SDe5DltK6Erp1w85cYHmz`S|<4eC@4W*ZQ z`1|+2-Gv|eTwpg=B1yRe4Kx<&G3zFi&UoPbd^#eBK~5Rz^jAhruzGlt)L5lCpHo*o zT0GS!p_}=+?4O6lMn+x8R$a&Lz=9VpU)l^zT+P)IVI@VXBV{CMuzuFDZ{Xrf{~2T6Ry{oUjiK&PN|@ILxNLE+QXMP5m1lr^Q2jQ^!M=*I$7!PG!F@&cyT`&1GYZzt;_jt2kw= z{u1`v07ZqMdSrBZ&SZD>fuNlRav7dlcQxs;l#LItsR|xrf`g;1l0(?t5_SgQczV=O zD*_HPv@M8UaE5J{D-rxAD(VF^Ie0D8(054(MgB%|&9$ADYu1x?BCD3QR)2Ko=NB1G zhZpy(3z!)wrdWX}fSge48YCS-Gg(YZH!YQk$SSl|!AqOtJ!J+%6*O}8$ItFfV>~#q zM`Hp~3SmQG!G(A7ECj#Sfkx?M&IXjf+tXH`cPI4 zZ9}Wz$-3aV{kzWlGsol7tmZ%YxLZ%Sj!JxR3S#{QV9uZ@ca|4Cs*Y|0pNP!UlwJnxc(zb`hk{}?5mEALSBlyH8u1` zyW=KZpBH)VDWR{NedqG6WO~%d?Ar`~o=O+D_Y;2=9$LIs?V+Ywzcu|uBt#gU8pf82 zJ+g(PKi-G@7fq{serrDqMi|2Yn_zrdHA0d6v8(T1LD99%Tx5iC1S@M^;`$I)GXI7A zMBeQdX~m-CK#C0BwijIbIw~nkIMVWuHhZKv);DyFWD>apGRlzNEuZd2apNspl9G_@ zUj3|0$^)6`9B7JjWoO8*I1GG2>8I>wG|S)HmpigHbWS;5%P|$A`ffbGROO}oG~N70 z`j=1p8x!(J-U#_;8o*JbNOMCAp8XxoL&sVP==v@%wc&X(FZ1_tfp~dCm8G_(A4WXAgwG znbhm5glItP4gKDaT)`OgVTF=0==dR@bSYe9mV?D^xkn|gQlltk~)neTp(MNnO`C2 zgpGP1$EG0%lXm2Fn^e{ra>|UZg&G+nY(18r`l&`#JL$Ba1`Z!li zF$}G(1e8W@Q%>^ix0Mv;`v&)~sn+MRuoFtT?R}D=&zQo-R)KTNQ1EbQ`RVC*ejw|R z^0ktK%`9i%ek!-4!2zJbfQo~WXm;KGYL_3~=OOk>3{3^~)LMEbGtZqv@{L{QIF#QI z-c|PxS)Vf}x4&S;c~9xS&gvnZx+psP9_!_aZx#gvD9@=DmKl+R8kwM|p zGSM;67~{&ef~;|v;6yop<-+9l+}56J)R_l^M48*36z+YCQurgRx2H!$maoMdEZ&ph zlFNV8@S$5J`#X;?GjqyQgU!}kIQUHbbfQ}q&h1=`tnZMRx0wxp_(AZqt>PwuPDOIK zolKA8O^TjZ1`Cnmg0YDo4lHb}`fo_rO$A7i6Yj{Q;_MjZC@Hj1gqP;;c!ZU1@3hYE zE<_1s=VkeqXI}OF%FHlo_wn&DF+2NmUA)0voqz}TxyL8U#cx1#+w6T5x7~PQDex{e zbNvms+O+tL_eKuX^+q+Fkbbdcc)8q7!k#Y;0Gz?dIxNo=8_Rn6OT0 zQOIr!NgHJ;p2M3;@Bg~-YWd^%+`RPW#f6;6>k1f5uW1{4$c6Z?=#wYq*Em=^d4H?! zEgiT>T!lcwm0%x+87^gAXU(fbh?b~Ur>@WK>h9pQC=Zz>Nei{4MO^MkrrB|~qjeob z5zLZvgy3wL+#iBxG1eJM>2xQ5#F5Nu1asb#vt(Ym^#uv{c?7f_1tx>i#bhRwzR4D@ zJjyM~d}yDiwWtdS#woWm!mHdh+|}vXW_&No>ygvzxv1S0f>{WEyRI0UWS{Fl zei`apg*N`t)WhW+wvhl8gr_K9I1d@|-9zKabO9fwA_>VHSKq3orBMeBL_F}|79emv z{{(?4-B|=|7+2VUM0tA~SDx~8wlE%%=mhH@i58y1fb{iYq));{9#u7QTog|bwshC1 ziqD{_ui7*qjH-UbtluD&XRw2A_TzcXpzUaomD#ugi)9Di`i#RF$NVn8J+oO3k%sR6 z@hQVKoWZHo{;#|8Uv|o{&WO+{w%Wyf7c;o7yfBOzvCnV6r~gh&BD-Ug0)x(!Wq(Ef zVfaQp^8w=pY45CzWNE2oF~*oc_==Uc;>Z0T!_4bx^E;EdLnByw+l|68JvaBJQl91X zrI-y}yj`Fo8RXN)wrd-`ty84%IN$94PU%Gv*ni1s9YxSeIBxo@Xi zA`{PH*`~Am*SVNje~3%7&AEE_t)m`;PtVGkJ&hMIh`+MZcBg*CSC1FDuV%wLXa{!& zB}=-2)p8BJALdqsm6@p8ufL46Y^CN-$zPCm@nX4~f9FMkWh5bkN%-8bF49+jYNvR= zD9wtmJ0w6sQAeJq%G_&^vOSuOWT9^iTVi&*_zUmhvWXXs0;^0Ou5)>$m-U zX+T8)v*b-|)6x@xA2` zZUGOgnroqo9I1ocFCt@{3A~F6g;w>ZGWMpY<;%ntq@B)mAUq7FZbojkce_tF@bF^a zH1znR+(FYDnQdL&JcErZXCV#B4n{7oh8?fXphGs`mtuSloVb#({G(TL%$|08mGBPR zi(#A{^lG-+8{F^0r|_!Po~-?sTnft+sR6(MC(Lde9Yh6W?o zGg*At5XSntUnyRSFSa36uLG)wT+deKESP(56yfAue6~>bna9ItB(@Z(-!gIY5ad<^ z{e}{sn`p=cWk1#2^jE~t772>>Z7B_XT&qeE`r(B?;ZzpDWaLbuXS8oK#Wm<@DwBxC z(wO4TpN*6eQanePyDWh6d9n6vJS+n@$1lVLKy|Lgp0Q6^vV`wd%}2(QxgveLhg8Z< zjIp*V|GGTED|UiTQd(yq9q=Cmwl|HRFz^)Xztm%&SA{4(YoYn7gV&iE&?`lrPuq+y zEw}EI$U>8|lw;X~C8Vh0iV$c@Si+Suv8ADzX(cw%fmnPc2ifyH7rKr=Uiq zeMb!DpTfE~DL z!@h!hwxIZd7FFF%7?}%t?SlrA3zu)Ie2{_~SX;Si-Zq)AP}SQ?Zr)z7C+6swXzrxV zpPXm1dm~$FWaxG0sSZKlXBOs6dF0&nXS%W)pX){__6C{d2bPKo4s?RkE4%C6@_-%h zZ$BsZ76NY9C2pO=!@SBs0$Ok6=Qn=kg39|_P&FIxpXCHWX~)N|iO!dMGj`E>16Cfz zo3oMF`T5vGqZ{0jiH0u%Id(c;84^ZKuo_Q|16VcnjohAzMC77EP>$C(HS02E0-8J@ z()>1TNbr`HK3809{lv(tLr!Ag62yrPu58UlAoqTxf7ChnimicVd*`mMJvAkkN}xY1`hJ|2%>%NXY$57 zN@gqi&cmfY*&38y*?XL=HEsU;t2h}QFlc83nx!f7yVjUcu|5Ooxb4rpUAfR0+Xm`1 zOL%sWF@pL%H_(+7hby=Zj zO~C#GC)1g&_fH`Qu2PIoB}#`DFe3`@B^+cQs5~Ygvfacdz`DkjWcPA@b}S;PAx*v{ zzt9gEvg}U42a}t+S^pw|rz`zwiH@Fuk#r!HXqlNUPa7-8X2@bGok#>xaFgek2x_Vt zfCSq22pRP~d7U{T8d#|3%w*!{UkDW|cn7APe!J)cOK?SRbLEeL5-k%agSfy(BSf7n}ue#Z{Zub-*bEbOPf%SP$6AE;of^6ZH$ zC6jkDlqu>tvHvjh{+rX{k(q5+*Vuz!A)})olQ)IzxO*^)OLAYOEmV(6aAy?vFG#l! zNC|F2T7F^c>uH-bcb)=QX`$J?I+7@S9L{iKiOuYzGE8e>c~va zpX(p{1rHKIm+wQTw-~xu&Hz($cVzZFr$wdu9L|RiVG7V+^-$Rsk)3Ur)~^xQxiA%@ zS+eO=9<}EYF`mZ4$wKz(mRacfCGCfYI{4nsGdSmZhJn%A=ixH_E`NfA%(m4YpnP5C z`t7%{deXfm=lXG$lDy;Qi?5oNP8foUSS17ET6h{~hy}Kx94Ljte?!LaxAtJwm6+AZ zaeg%8uER?$HpR>uGW>$|l5gc+SsT(l_nM_lO;_F|GPh z^;P7u#Rxi4o_ZAR<)vCpmdn3oK%*L=|AUb0kvyQ5HQ{-w9ax<==7vsn=a z;@t0EfQYuW$lS)^SW4C=@k0zGo8z+6yskAGX(XIc$2c&pKIX^6jl2R!K4c8L1?8mJ z@S*Gc8b%Plsm*rtlr#b*+-VN9(cY*UL(kV)ii z9ao^y$JFc`_MZSFv`z5gG06u>F($cMzcVT3R_C=qZKSEGI^S?6BmH3K(fa27j6`%l z={p64hU2Zy0vZp`TlF)_gMi%M|3QrJ?m&O4l;J>z+LHtf#Rz|Cp3iS@KPD%#Wpnr7 z*tm9QuvAiR!@6=SFGV;seK$WQ3}lNaeB^X6u>qN?;!zDL6D6x*Op|QSJ_IzM%K*k< zA{V=DfN=RhHNLFhHy5CM~K_DwkM--2b*6DXlVLx*YYPf>+ zMNdl)m}(96>SgApc93vp07U(@;6eMYVCnQ^NV?tlRwl2En8Xc!}9oEyzDK1s9S}GeO&h{ zVj=~xGe%Si7Jod*P@DU6Hd1b)g0*!+XfGy>*qh#m7z#fiGueIk;@wjk`frtqaS_j& z2QRU_vDw2L?;y{|GCv5n*C(M^UJp&cOS7es;HHjbBiX@3OUxdISIOA~~hu7{Jb@06XjJ!$o#$u5PJ(%I?y_!e-;ur9fW=tP0Zo?jlVx4A^54V!KqQg!U}F?C3?W9W8f|k^fEuai0?_y_EVg zntEk>fIfnASTiC4K`wnJqHALEU7@Pj%;B$ns94w3F?C zCJ2JI^WUn;ywhVeOswyxVKzYzy07dpaE8hc zonDtgCW9o;=k;6M9TU09Wgkd*KWOm))Kc4~@zMwUSJ_cM-cP?v@`Wv(#H;f$X$lle zg?t}D1QbsewN1TJrxACzedj$nCT+&@v{Pp&;%3CDp3$TX{9;qVh%~tc%d)sUNv>hdbYBARwR=RbD2i-{RzkupW|`I9V_zo8Zhg8k{}gp>o}Ea#3JXa-7ZCvjxIdA^|RfYlM?Oc(R^Eaf=**{daqa#-q3fH60z5yZWYJ>T0#Cw{7i_2CkY! zpqar~L$GhC-{jk;9Xs9mJL@qVZ*e~F&#-^lu4byjpSGz*;VPXWQ}Q8?nB_Nf!m_LM zO`qm(ycy)T6--iD&k`UGt>hmj4x$FA%DQL5ar&F6_EG##_Ws{E#a`I~!iYorcyjcnl}KoaRFpFEo|f!~-CYG$Z;-gBUpwGhzzer*7(O{Wv2f z^Z8FTBxuA|IdbSPir1y&NWX8m)=CHgDUU=$^Yqk14dK8Gsg+tr(>YxdswVDRMl9+b zcx{@BG9^u`c3A_FZAJ0k_sp{!R)Ycl{H7;9sDXk8?p3C^ydxKS!Yiud=s1+BF)Aw{Q&>+9;?XDqcAC1TrStaZQK9~n6V_N2b!k&2UtKROeP{ih*QS#}X z>^6bSs2x_b6)A6M4a{rX1ZbFjwA`fG9}OF2&vLPOf28=XPsxM4Z^GP02)U`5)<)3s zZkFG8|AFjo-ob8|`@R>kp@di@BhTjawH(f&K>dTcwgNNCJpy}7oyjTNh{f4;z21jv zp>|!8qC!u5sU$W#^t+nwZeLSBD7;G1a zo<(;$6KvqyqAjvTFz4qcSBOhb@1}$n2sa2KjB%q=vDao~4s=K{_9)W6r`iG3Y1Sm} z*VC7=w3tda6?{=dP|UrzbV^2~?2>aJ4|1{4qsx$^NtGg`+wdB3TB&sx+Pdfb_PzPK zEJb^c?J=nK%(mS+G9ZRfR`&klJS@xR!u#qG`^Zpf8eV zFoa&>mV-0i(;_0Y(cHLS5(-VcFD_->>xZAMAj+xN_pq`%H}>kAIqrS)+K$cJJDd4+ zHdlYE@pP}B2B%X*ovmIY_xM!#0zQZ!azei%$dM9?&FSBH!72AE{@0o_%Vr>2Wh-O} zV>sWjKJ(JFG$D#;*V(1X8a3j+gfiXXP>XhQY<-8 zY5HUTi#K9|#?49Y24FvASZ|=Y#kR=#fhja3mAY9=HJs<-`YNjxl0rOor>`S}s-kou zV(7T1vn4SrZHeTGa_}RC*aU zufG*i3}x#pTk1GBmAteHDhQ~=UZ?KM&83lM3Q*i^C=wc4_n~K@QH~P*O!4mWkga#& zK@@Al$XL2Njt<3oR^J^u36d@&_Z`%U!)Gj1sYC}4q@Q{4qsL9pi2mdSG=dVkN>z4M zn8~2nxzPVg;`5O0la3#o;hB}G>XWwVnjEMSP3ygfwvY1$G7Q>ec=w}(q$?4`JD62c ze_)CE`)7kcK%R0Jno^E{?mpAV@NJuLzn>71!xeeH@Vlfv@(}gU`VqTKjaLL}PWJHY zmNO1pUJ3V&5fAYt8x#;TbVpwuzP3^Dr*K>zM8U3l4IGl_ggY*eaEf|HZTlRwqE_i7WJ$N=Yg_XS>zsHOrPSuR&I*l@rvvt)p(w`aCM*sr z5pzo9aBI|3x1tKcraZu?uO`bVox9MZa9!J66W*qEPhnelgr+{W5^TBb$JZ%&@ zQHIWgV}@2HXXz}t&u<3e?ZZ7eunOhv^$)VbRNKu*u&(O9+{`y#Ee+e-8p_vcvU_D~ z>ANiT$~d%P-%rY_cfCwY$3Rkb+CX%qAj`gDZ~q&zB4S8U_<1J#^7dgh>D@*S3_Cgq zpRQD2^h@cEYK^YjC2i&MKjpat4u&Vv3ac_B@k+Y>UJJ>g` zk;lK&D?FzvfSc^SHBO!}eiu~}-gMr6zG%Zc6O>IBudR9%^BLp*#C&nxFa3}e=NE+z z&~|)>Fj4>-`=~&eBj_2Fr?8%CFNr$$*qiV!$alo{KhdW>yYo07W;*tx`CE-wlHm^zvt)2c zFjtDNiN_)ryzL!ozm4ZChQJ)FeNl!NaWLa`UBE+X`#LmFb_U;;?|^;89M<*cu=Ln@ zhSH98=psXaGiVcQa_F{?jbH8CU+wYH*8MQC4oOHAYwM+XmTw-ON~<-JA4@FV%&M|B z^JCW(B*Vs<){OJ-o89(Oc)Z1oL(4MtJyc6FS<=Kd!q-XzI}?m~9@-FO@&tWT&C;FUf zcimlc0S@za%+HPLN2MoF_A{v&tNR?XYm`4+fg7pma1qj@tOW9xC6O-U`5K#i*3>sY z!a&Yx_N|^jzifQkaLlIpm2nHB&z~%ftAf`;MR!CG!%LY|4%S`SAH*#|aq!@0cebM% zBf^$C*yyXpby|ASp<|S9|Dv4dTkir^G0Z{N8confm#Gx!Y&>D8+;C(9I0KZ=nNH-z zpnkbC(B2(QTVlf3vf)3T$Lig8+bHihbRCVDxXH=?2f8}m9%)Ez`ULyc)aKiUww^Un z_ip?hyaKVvaI|z6r}#T(8RNA>@rw@!(&o)++f)+1cYKUeWVTxn(L}0 zH87=(FX3*hOmaaFhXzEsprv+ZhIs(eZc@z2hAk#P9kDbxKrV5oww4sKjXVt^(G5YD zI|m!OP9Tg8S#ssB^!w`wNM9u>EIDmx{cKpfgb(SiQ=+0p_S5QBAn1>t2wKdwWGrUZJz5Sq5RTN)e)J0+DXEU3q zJM2$+?Pa*c(X)lTyXH|M904BFaZ?D+&g!EL(xCfv6?C6+>Kcyn3%cGpY}!OffLHkc z@zBRvE;^!QgJ)(_yG?(5>M4OB$}TvKI3(oU3bNt%8}$0VIHS7X85ER@|H{ZQ2=0>= z;8CD=i$Tvl_AU+Vr@q7)=K76ag8J5g6sLRcYejX`7 zpiyRvq}<9|uGTx1VXvN09Z^*XbNHpYQLF?$SyF=lQ9e}xi1MgWE-)Z$V_56NJl?xf z9eY7_6A;3}WE&7d!A~KK@n-r_NTNFHkCA|W*jcafy%Gt|JZllBGyIEm2DkaF*=dzY zWniKs?>2!H79J=bmjcb96cXZUffVW;4hKX=>n=4*2Wui;G!N@!BB25>8&`^7Tas`= zClEJ!13za>h^-c?AMkU)qs~j{;(!#ZWpwGY@D;3a;p0U^l%QM zzdy*b%2VHHK;a%%|A{GxHa+|R%_61D;E0M_SvRD9D*7|PH$RV$YSiSal(ki}#Gqtr zZRB_O_JR_4Ig_kLF!~&k$7HnEP#prVha_4UIYcV|dFH z%+ZVKdwXNpslI>=S0n6FZVGc{P|L9Ftv5Kk{+(c?dr>h$AtLD zHN9*J_Y$CUG@C46XbnCP1X12yA!UW+O{kp9ojIiM5$T5NDoK)Iy3ve2_U z6Z7KBlMeu}XeB!<<_X3HXT5mp=vDxpJxb)??~a2S<*Dc#puh}e19LjXl@=#FIv^Dd zLJs7R=ZUdpsgJLIyYTXH6ZpH>A@_dHgXs*KLO(#>|3hR5Mz~M$V6F2mC&9|ZryCc4 zs^`D{d+XygEBSD|LzKxw*XZA-F-M2r36^BgtyGb8&sFP0lH82(VPEJ;q zULpMSMsAhxjxzlC{-VTSeAlklIqwp`rK+Ldab)=q8KA@K+YqIMFTG|3&esSD3Bfby z75+t?5SO83_Fk)CR;{f4{^hg!CkQb+FaaPYzxO`C61U`Y`CKe%P0axb6n<7d%MF*a=m1WHpc4>6W^2C zNP6yy8$pIcCh^%V>fNIe0(|c# zgw&)PbZXoQeX3DBb~ib1{M~*z^S@XU>N$iM!C5&7tyW=YEmZ-JvKWUz(cQEElr|-3 zS*`OA`PyhW=)30|S!EUXT$3mBgq8mhyw783bT?{H*YtgUT-=#)+T%geR zE%kR3FTdEU6uB6;{z=78R(3Wbl2}Ih;;G~j2KxGv*;To2kBivb|M_z_;07}$%J}Vo z-!fwGvLtzOOcqDMENPIVv2HneaJp%Qj9XDlOH1;{kLVD*eE847sH3XH#|7~n5r_x{ zM+dD<>%P03pR^t@oRk00rO}{*P`zGgO(qsc zR|ntu2*G4}qraeUod7qLr9KUCmF&H~=BXXmg8nImpU);jAx3=;X~F3_AL)fi{+~zh z$phVLe)j_Wb51`h=u}X;+nws2|HBaeZKc}|vyGRW6aFvHiVA|Ml^UEeTO0)X|NNoH zDK}MrS0VmCKM3A+J01-*yPaR*J|BhlKY!rol?oZFNALX44}y2$h@*8$GQ6=M`QI-~ zKsy6FLoZLD@(t(z@~qn(huP$y2gTn1mzx&e0{wt7Sh)RPo~;35k`%oDh4+7#FarG{ z(2uup&d2|J)-jjrfz!~>(jv#IeJ+?~xf7#=^MhAFSY=BOcgL;c; zjeo8KF|Jy#3&HV_=%~YZX;V_z|C@sAfIxb8a=h0<==h1Kddx?8!ua$hbyDNF8yV$| z$Z0Ty2(btUVfiQA?iM`L(qBBu)hej1;f+X8+UQ-9y4|a+|LLDdIU!=YH0o0+;0lTt z{6yUJ2#;Sl`tQO%!qCQ&#ycLC$sf!BiVYfY8#?=6u@l8!2X$!d2I1u81{CqLyB{4$ zEZz~Fj9?#H5lPbQkNI1xQmNQ2#l^AT;55C#@$&5S#>{>AU&HB+vZ@H3Ol8fI^ZJEj z071mnvWJ9n-`5+rpLy;|b2nqwjY=Cm9cT0hZ1~SY zKfkCsI?_wCzhRKXmE+auZFGuQd%p_(PqNDqR|tFv^-Fo94p(G%CGEQP>4Zk}T>)6c zxH0gZ%bqp30pxAUc_G{gvUIHr)9lBU`3U6Z!-I&WU|O9<@sm}iVqEFQwLfh6^~FaJ zdR~4K=fi84z0eo1$IX5OCSwm?{2}z@7gB2&UrX;nJp)jC{f+WbHAM4CHbt32zspZf zh?K_^2gu~U4;4seEV=rA8`j33yu-2x4gjJEqqjv z`_k;e4X-{-PfQVGXmefs|AR)m86o@kQ1?y<$d+fQf14$;p$JCrdStZw!$^If!+arV z?9j*ctCVw=)tjvD`c15o>5p3zgK`=T>_>*Nn@Voyyl-fgwX8)#c;@-v1JuVs8a)7g z&A;Aq@h$Z=*c!o7N{s{Sy&&jW{p7n=Yl)o)Uc~ItKU|#*>*d@#4msUG=3- zF*8srv!Mf47#og$ASpb^hpRp?Jlt$~S6TdRLip3cwgTM$1jP#sq%1G?UZT8@uE-0- zq}8HEKfWnXk9>US(p}+<5pIuhkyv!9Y;R|i>L+8LLz`D! zI0H@G!&E0Cxu@&5>I^G8r&DaEnf6;>T}A2tgl2hhBR|ELX8|D5MO795Vc`!5+Rd0s zy4URjz7$!+N@87=E{WZLU$*ba3fk!CeSklEn?eGv>!8wRC(X7&w7>e1aT;RfKt<6Q9kWRG#d{jr-79N~QItk~u$Q}!c>*}Llf0UIkR>F=~wOmt2JZ1<-Cqb#F{9Rqmjg*TAQd`^)?Bx7=MpVtDUsw@YZ`H~xkbnqf5qx;8#iRR<%N=Ak z-ks39fpJ)1FD89J6E1zQhZ59?2q^0f0n@wxO zX^1+JKHk;OE5VoxA5Gt(7bt!JVcf1nfSXS+rHR-KByK+O5VhT2 z`+8^z5^vBMtY~GjAL)NCl++(Wwy-GA%4EjE$sR-RP`}#}P|Qnk7qm%G-{J85fo%^z z+vZT($RCkG@;u>vpd3Kq~U>(Fg{wF_6e zGk}7OG@1ltZ`@G9svfG0;wxI2aOT7e!h5+qhYCZA2c00sR$B{W)erG~P&w5SdKK^( z{b`$xI$_3|=^#AgM`_chZQY161Zqss4xB!V-0V6b5fK9zO8i@>{ORxtC|ybR+W2nojo&Gm9sQ=WHE1g)BzWX+rkt`hq%U zC81!($7wP-wFQ&2@j{_5R{e2sW22(n&uvrE%JU^wvwyJtCXWf0H;1kPcOi&tekcEy zp!JA0sNWn9SO{4HpF+C~g%Wx>^?JQYHWjYTVH4qSQ?PxV>DF)oowyf?jqEgO)69dI zMf8{GAYx5shd?Hy-NB=j-bV;Z#_l&{+#tvp-=MrQ_AEKhQYsaC!Q%yd07%yf?qlK| z=G<)3xiFB^lPsA+yxOz9KJOotakyJXPo!naeGln@U13$~hr`OG_aNH<2WU{0T$APP zBW!6uk;axoAy#jx%Fem;4HTt~IiN#M(MjO0`gcE_3~=6lAh~swuRp7TbZGR&Vdg!n zT7Ia@+o$R(o-`|w4#4Ws z5T@}*9b_xM4bLT1G0_hlw7u>-PNN;(%QVSEdM@O}=S<})Xx~fn{B@PLR30q;%)Oa( z7EN^yVeZXxnBWpN8Jg#r2pV9MX)g*ogVzPxEb?^wXs4x|aV|F=7>w@VVk(0$|C&WM zc)o&vpTY7oDUz~AIHhwiocxw%hiN^cFRX4n)a;92mA&iXsG6_Iu|ndeFRWE9GgJNFllf$zNwWVsv^_WzuJL<^#?( z8=IZS7Uaf;*}HfOFp*A^*@jkpt#U~TX3|WyT%BUCO~Iv-vha{LPN_h-_s6H%%Ni`x z1KK9nJhRRNsEY{|WRmQkX5DCF9mPMkWw8Bd2GYFr)^HA0Q0kv#Y|m2V-FzUK6bQU$ zZ$Z1=d~aIo)l(o|%zwF#=zGeb@R(!iuEh7f8NPA8y4po#o`w=6xRQ>4<24OnhVzih}ID14_R94vpqS$P{F z2JVWuL;+Ful$x?KLb~WzaU3@@5F;6*=weG}<3nFmdD{2hRM1>TIvb}n5HM*Z7MM&Y zKX%6}$WdsxDm)?OZ2U2~TTs20Dk->e`9V;+u{T{SJIYyz&I4{|>md`(lx9dM9>Z_9 zIDjzY#*q}2Xo|=+k;vwxvA!2UeSqO|K zE}?~0v&fe!zgog=*6sDJJLfPL9csv!F7;}urw2~#x3hY&9S6*gvT6vOZIz%q8s%U2 z7J{>%co8SndJ;_&W|+2^0>jp63ZU^?(P@j|YSoh#W5NvxT69}gqluuH#ePwRumg97 z$gs+x$=UU{L-n&O9bn1oFKKcrlnMowU_8yGkMxw3kc>jo#-)V7mxw2 z80rBp>DgEu3a857SRTo^c=g@{PV|ILpuY*no*FU0I-=(x@xhb}t702fnxoh!6$@<)UN^ueTKTEy0&S=UBu|P{JBr`dsASEx1ktO$rXMqc*;VO6DAy z8GkBt07{|N5F!t$opk{yu%cftM|2Xj$ESQLvF6B!f4ET@#Go2B1ZxdpX$oeP(?Bs5 zY4AC@Sq?TwmEU<6Exv`!00`b+g*3IJ@kNH1zA6tf#z{?h*%|E5H&PHCgzmDLquDq& zQdUODsux7aVV>&Cak@E-UA#e0no2rdV!kiql}1q~i#V;sWLP6RK&hmmC?(cHj*>P` z-G4OfTGZg6UFa+Kk^vkUhsK5)XelBIScIfYtk%UK7#AgsL+=MOS`X%G>r^iRts1x}pf*2fj*a}JxFiGON zTLQ87fbcOKGW+3bJFxaFAgt{Q3<#Sw96ef zb6O}}%V$=NL0)~R-RYz24~lSf{lY6%Jx}RMVz)OI=M@Ub&%6faT$m5X>GB^|c?n(w zD)AzLEeM_hh^@*)J*BAapI-QK{#>0W{1UjRBlM^ZWo zh&!dm9iIcAvGMWXuDi7JRSxsJR_;sD_8%-J<)o41ug<|4)!v!7paIX(eF) zHB}OzfMc*AjpWR>j=WFlTCbm+P#Dya+MY88mMp{UJIm>%qGfZ9`{zS3vLkHIsNqq% zKI8^2zKjPT4*{T%9H%`9SsDu-IspEGa4et`4;*=wX^r6`d;z!*##RR#_q;3?dSwon zG00Q1TOEYkCz9cU(gUnHN*XuzwUx@5<@wP+@_JuuH61`9*YP@U=^Tv~u?zUL5>!&D zqA0CAe>S6JBZSE>GHm*jj{EXns{L}8y_cg3?1LC9EmG)<%RfpanRO27vqOyXt{uE% zs@x%=p}~{Mr)6L|3iDaEhGB2A>Pr^Wh7?;KUJdHiNJb2GzGfETuLD;5h13C5U{);&Mh;!8XWk zW8VXz0gTq5_x=f*783<6JhRCmILh^oGJufDB9UNBA5)t_xm_6cklyDi)Jaep*q3Dd ztN{L{iDY^~Y4b9s9~Ym9z`;e3rdeE#S7>!2FQ^C}zvPjW9X1oPzrVNhlyyq$VcS@Z z`{y+gjTfEvnlkKF{=KqmeORvU`_|o)*dm-?gVHC2(SdzQ-Q@L>@Vg}>W-{F>2_l@h zT^BBX@~!FqDbAvhv=yvrq(UmRieqmCnK8r#p zC@2B0INiA4^1(zgq}4VTS3$Y_8sxjY^K7H38il7I*pW zm9Ie+G!dmcsEqJud_ZY5W*6?0FlHhnEo<{1HKvcn9*KcIA&@m)-jiybt0wyr6h*fU zto~?WF$l<+J3d8eTr30=wR~@jYWVawP(1=Yc0uXX1T?ZjvDVmfhy=70(cPk=3+XAENKl0>#*gdMn|0NI_x z_Lv$NN)7M`+RqV!p1r%;tYhsP3*X#l-A+e;IPKI{jtP6M30`B7HhMf+2P{mk0GvbN zZg8uXi6~3!|BtV?0E=>q+CcT7cn}l?5l~4%ln{^>sbkQgFboO<2qN9xAQp&HA{`jAJ=HWrnnf>j(@?Gy*TkI$4oF>eC`{mr-B=(Pr5qUl? zJlGSIjSLIL``6Z?Fn6zdUA++J=tTWeGnxYyeR2j*WD>%n-_bG{Jt!B>{}E!%Uo^6C zpJaPsE!+YOMbJ4IF8EXcV#SOqHxaQu07x!y>-kQ%CHRnNd&p|nWPS>09} z7j5TXon1;7)E^=J*f(rCy63Dtz&mNP*>!EL$mQp!Y4;PoNw}RK^sBk)&gv1)Yq}jR zE^EftU6^uZye*Mv$T!Xz;gUKZ8q|*^^xJG~lmEzFsWh5+H*5VdpU=KEAXx=B(QH3B zFj!K^Znvg0$2YmTs2woSBBi06_H)9ejG|JkPj%10v?HkvHSI6mjMCVw*KJP_UVUu6 zx2d7zx>3oPEFCn{zM?N*+(*1svcq<|X?d0fha(KZ*1V%i0ToXMlb7&;O*J0!wokmK zgQ;Njk`SZ~0@&Dh)I!N){o*%#2cNigpB1Ed9Rziv#c9IwNy0x4?ldUAE1-$U3`CUg z)bwI44H|-NMInb31tglYwzf8?curu@94=rm)f#62kbuEx&0B-ocT^@#D_{LCzY9sf z#JkBlalo9EAVw4*gw0J@=o=}E78yC-O|gKZxeR*d!PrNkx`|l%ftbPV!E7Z62v6=g z?r-BhsoM?9)pnnKQZU&!AwM?(yUs_(^ZJbjIUFHbpJ&Da!YV>rF0$^)~VY6Sxd5kL~WVyB5|M z-&Vrw_pfG8A-ujJGB>fjI;>5z9qlmU8K$INne*AkD})WFTe;_4VV7<{-{NX=(LRi3 zZ(*A%K3>kAIUxd;`t@nKqN{Xu?#>4+SbB2uytC$ax}+va~n0-#K-fmT@#m z>+@nHHFRp&UH6}pl>Mp|3o)hRX>frvCYV(Si1OD_m087sq40f zXxa$JkwC4Y)~zV}9~V(RL33+4sHbLa^9>e5ZYPB1!+JNO_9w+PY(-f528(sQw|g?; zSwiS$j7=T0i>b2NHX|FhZsXh9tV0fax~XaQl|A(Hhu~R}Fu_3U+)5}p_j)qcGG-pC zxw!Yj9N`0tSg-rJ!V2YBD>PxD1)#lV7(DEU#`Af2m23Rnk7=F3auO1ZtJr);Z#pz)b@Wo3IFAfki`hRUC1$7iM`*(> zW4^w$x51-ztp9NVuAAE3zQ|*2fNpURdl^967T9=);bpkf{^0JMZP6o_5mK7^;z2UB zQ_AF?M{kbtj2Yu>P}$eCqaK|ETVcU3pD15VbEojc7SAOZw0{+E&x$L3-~Sv;RSOF=l4#_se) zyC7~;IbWnmgPpP_rSjDkI~n;PjYz@lN50BSh504pGPZpd(xR579PufXZBdIxw|m!3 z_t(x&*j?mZq0_}@{jRogYrcfNVB3GS`R0sq^P>KoVr1>dLRXqjY%cU+u?lyB1gIk?k9WgJ2mRyt@ zMWc4cM2cq`eo9-UNLlu|;Q993e0M5;KLMdecGT{j@3RpG5##;BiecXv%E&QR?;;f$$39(Ij@o~6 zmz#Ln^<(+tBKpjX5S0uzyJzl5zBz927ZU>a{O%Hc&mqvuzwR)|-$FfxD&udwDHwYj z$e4Y6A%WOM>t3S~7X%1>@hqIRuRltJ@$F4T4Suqjz4WkMFH3h_Z}q#+Uch!CA^T3& z1XsL$64VT0d?@@*Qp))26>W9qmyK0T&YHTuxoPes^<^#0d3(;be56MCDIXV((BXPM zeLGQ|MfH+!WDCO=tGK>_kCkL5sydx{ogYF>RK;9m~PUScSdQf*4IB0?`rE=hC8j%>@NCTsJ=SMn& zl5}$XBmuR+pJRAbMTp;83)X6-+I!SH_)zIg)CLmge@E)*OcYF9l23(<*N6)^K>j@y zHk#ekCj^OLp83JUN7xRMtF^rhekvCkJk4ox?gkoQ4Yb-K+d6I5;w+ z7hnIraedNe+p^{TLgy930PZ%#_}cUhq)e3B_s!0~D&c)APAsfq_mX{gGsw|OiAr#< zb5r+1$k;AFk!V~fHePmidYaxG12KmWFW;b@!xdJ}UMhiG-o<-&o4Q(WZd>l}y(z7+3M`wyP;;UR1_{A=rdU7G!NFG2R;7v7bPaAj4rM49yRWYOR@9WiaSH2WK25-)xJdHLoM zCq|Nn#%1FX`ZDL#hOT+*O@<~b>#4X2fmz|@M!uWzOasjv4d9&(O`@vpxXnI$Oe@W# z`ft9jJff65CFL#EduRP6U(Wp-s)0#$EQIP-IQDaKelCwCzc@O;r!_00GSDL}t&Q_N^mezC)|!`<9tIid}>BzZ8o zrZ{!$$!}jagIdm2;g&DfoW8_A6g!^9ib|-ZaaFUP>6Bfa{C@k?C4PmDj*cV((kqEQ z*}5rl(m_yCQBHb$kuMoQp~O!mOM%4R4f-^H}bF=QF@~bcP&W$e{E4mk4b|kjMy6geGU6!}ZPWWsYs0t}j-1 z{e9UbhvqYm;@QBGb;QN(?uZ{k$=uxe5j-h26pJM4@qt>OnLGI3ceFs#S8}%2-ov;8 zh|z9Qzx=t&WBaBr_;@|qAS1l8XhIG9Iv%IjnNN2L%2m0Xl`5?mxo+-OiiDGGY4}ps z8Rhq@D_oTL(K3U5a)sVl)Sz!oe9qA!f*}NpPdq)Aw>pK*|B-{r7rcnqN$+Mf~ zf80(Bim4_qTexQ3#1kaIk`VxqpCuurT8cOodw>#{Nvv(EDl#a9M;)gcwWn2tA}gMW^{on0Eu|ick9-b?NIRl7 zlm}Y0sryDM`!W0{8z1B5nJQ|i;rRbb1^*Wj0Pyb{ovZmN8m;(;*+u0%ZLjpz9hSpz zN=r6`l*&~{EJoj=?Y5$2#U6jqjoR%Aw+Wero!xV;GoGp%(+*Yee^RtH&R#wOe>d}h6|oT@J8^AH`TIUvyE(9 zV4+Fy#QvHo&HXUGQ*}M@o!T=W9!ur4^*I!Z*M?g+sS-w7%3YUsP<=tMm%hJ+wxucG zsJ69O`f{$Nk=K>5TONr~5vn?8K{=N19d$_T z-nfSFkil9{i_1SeJaV@AegGKfObN}~C$u%EyR2H8Y{%3nq_gLffGH4L-Nx;IYjZW0 zSYn4*IWOE9pVFn@soeFV4JS-wV~n z?57<^%rva_9E2d!585nmuscP{p&4{E|0eY? z0cXZVyp=ohf9ReQIAfOwGUy0FTV+ApIif%@o9T9fQPkN{=%9MH>r^Z3`sx(?9|Y$= z1T=wIK>}Owl9;nuCSvLRfmkKKpPJ5UXl5_oeOkN=k&D0grN!6c+07B@HtQiUZV2M+ zeqNg?i-L-RR|&PRh+lV4_z-IJ}Ar( zR@ls5KefZeWwyIfJCWq$G`1U=cQWXOH=5mM|5I^UDvR?s;~S=ISLYG;?<}AGDF`+P ziRFrYki^bD|6@Lx4&|#K_t)5Gv<^jR|DfufSw3oG-|0QO@`pf~9~g8FjL_y-0B{+5 zg6Mv@dP08|c7*>*_mzo9$X31v{8}|%tYEU@BV{OfuMI$7LSb*Vtno0FuPAiY9j4~K z#uIa16B<}mp1{SwO)%|x8XANc=6n7u*o_YqXo{j0oW}>)0VL1qpLPAIO~YCc5p68(w7STY?H4jr^|7#Um!ky<*M96@ALWrj z#%nDF)h?IcCi+=(MCkh;r7T(+fC4m(+dAIz4JIO0Fv)X103!f}Vxasjwa_CrOpy|d zqjX1r0%7c?;KVItZ@_{rm3mLrx3cnmR()vq{bFfA%`eS*r24cd|0AikESc7KysJZx~J9o|`c2vC1`^knk}9$1{Wfi6I!v5RXIz5y4J#mep@1#5f~4 z_RLqSNcjZze*m2FR{vYk58UFkJQQIHXss z9FPKpfg;JOS=1F`^+z%D4#gywQ50M+Qrnit9s66n9^a9n-pbB3D)JW=hHyE5UQ1=B zYuzRASX)PGNK4Ud(fS~+aPgZtLADX%{CLOKY#$3>o@w`SZr=^pqyNFZ+w&_5#1>7j z$Y{EJ@;6CWe@D5?moKjyxp}YloX+5*h0=K;X&zn~%k3l)-Rlqso-bSSFLwT(s$Gja z-~XWy6H+*%7`2tLZO^G0G7sVKFWKto{2Y1btzCua5`|gH=-J3KwPV$O!DkMKO^LGq zZr2M!@}h^@($a5=fM7%Aff8;*Da$I|-+nul%?iK^$eOI1qimgW?0Qal);m2W&Ig8RETk?A~6iLM{jN6s7o*deU>3cu>=DU1v?EYKoRdmb5$hZEV z=-`Q9Z1S@6mv!!LEBk%;ZP=(+yIy@y3Z>2Y10MyPP;CQ?c<^> z4oC9%lxb>>aNIYzor#v^m)Sl5h7alkFpsLTE@}EQjeWUcKmSZ>z5v&GvKvY{+${<} zj3l(SCKp==s4KYiwzL$VV8@@_jaIvXF8|9p?%W~M8TQju>o5pzM2310S>1(+r$vS;#~pz1vq#DTOt2bo>`OV!bxF&P);Xga!w9wd1-STxbQ@7@HpLVTXkg>fV<8u|A*?+RmKUZO(J45S!Op9FbwKJ}& z_a`?uw`$wKH^S1adVgO#lbyEZ4_SG0$uS@JSX`<6l;x$c%U++&)Z z4_xDs>}xs~w7)5`9~HYk=_pmukYh4!xXYQk+7f{=+Fvc-vu@Ru&Lqkl&{f)mHq@9f~x6PjngbTk?%DWg0 zUR3)nxJ=Mvrb?W8M*hH_S5)3tx$#c3L}qIr6ac9G-*2|jU>>~$FcEl zQEO4{yXL&|&9Hs;S<43^UAy~`O6+F{WJfg=&Bwhi+?Fd8rpK6%kS37Ql(CecpB=T$ z6pX5s`pFda|0<-P5y$W6*_Dz;hxW=pK;6E5yR)rUoZ6njFfIl1_kN(~g)Ekue@OSm z>UE8DV;3WasqyPu+-_>drqF*q|5??=4(?}Je|yySnKJWKz2rF($+Gp*@_EG_-}bgB zr5wBO4bCQ&o|lt+{G+iNeg0=8UAEd><%}kR%6Gb|W=vdZoL9&_aS0|YLHj8dL46;n zE4L$3tm4)s`w3jtT^G%xHsjcx>3R}-gB^u%waxFWJzHFd++Er21e2or+6($!7T=gM z=27jv8=lzTE#KFOms09UzK1C;5|EW4EEfry7_B)awWYpwYh4%>BQ>yYv>$Yp;=>rX z^GI4)-*jve3$xsO2%CrFk&(ceIyLnCQDj%sTkF$mq7%vv^EpZliXEb)EIS*Ws zyJQmD3e5N@{J`%|8|`{+=7Jx*uYY8=bet()WI$&%fxJ5_F1`D!A91gu{4R5%L7agK z&Az?(6T5$klCkbWOa^15_&eIPqk(fS>I^=6<9^}3haFF%97v6`Ml6e`;-f3Fp-*OK zXDM5#8Bs!O-P&K#`;;u?*)l#7=nnh-R_Xwxj~hoJTEzTJZ&>v~56uOLWr*dO=z>`1+PO(oWcnT6#y>jeDK-0SQdgi z#6l|yoyn+P+(eXXnf#(u9|f?uk}6Q~QT$3}@GL(U?{44QLA1NVZR;U3hEDzd_}>5< z{7qCq${m|hF-9Cq2@UPzx<{tG0wN2-R~CdGmp&kRQj{@(GYsrS3kQwgMqxl() zXz5i6q|%iqFS5%$Wn@w3&nO@X;*|po7RIUF;eA$25qG+Aq4CzvYpc6`qlSHzE?=$? zDUst%%O_dAu)7-wOfYwPjv={kzSxriBECgRODA>iE#6BD7fJF6I^ITZC0OsLZx*Y?**sZ zAYtHISSFK#0 z#S&R7YWy8rFAs8w>KP_{U+fudq8Xs>3JkwK_3_e)Rw8o`_a!bjt7=dAa5<+9?1DN0 zJ_)*F+3tbo?iiPV@V`g@ANykDt6OnA4cXN6cX66rKoJ<4Io9WKQTL1djdFqd(_OC& zBA{z-)UwfR1|pyhA|R1<{0#cEmX`ca=g0qt2z)GC`S|k$t1=6oC(E$?t)xzsg;M_J zKf)I$ z(8CUa&l6Q|ODfz%h+;A0&BMo+Cgk_N9xwcz@8{+|{q)z080n-$1_lQ5jQe=QZrNwO zg`v8THNWS7a|?(uBZZNZ!p|94JnsR#n=2c}+rt#)zf172zs7H!wplh1pW`vC=~YC3+=8Hy+t8W+#69zm~obv_bZqEp$!%o zQ`_{IakF}gRQkf{K(xA~QsD(Y6SZDdMpfXOe@5i9%0cd)SHoox5g*Q80Skn;ast1d z;4+2|^Z#upNF({f7Z#|7OZ;JPR-1=kNX@ZFH7)*8^a5G?0-Ir@MfMbl_)w0= z|M*LgeZcyrQOi`}PkF7e%FR;Z$Oa!nkdpZ79vDcYqg8<(!QG^e8c6&hfgUFp_qB%m zV9(VYX(e+`WOu&0ee_L7y=XJK={Ae9A?t?vrHMmYdIYbG6CNG?sOSQRrd#P^%|-AQ z3Gz{S&CPPf_RI0kdz%xU*>`1m_<@b;9hky^A(PGv1qv|0oy*t{VjlR6RS8nc9sd-? zG%cnzJ$BV3!A}l28q4yWg&C<=obv=v|I!K-yyMP&yI(VUhlmz~nm7_g%C~e9Z;!*b z;AUhk>Hn{^1j@?%657+$w+dM417g#QcPV*|R35JJsP^~I{2>tKl5r{Y1mtB5fxFS} z?(X%!tXDnzJK7mQjjwux^{QVXUumnN!XK#=r6W%szAWtIYiRL@rO*LMUB?tXs7`rB z;{u=BHXwzzo$Jq}yZ!RLjz)x9;OyC{|K1si|a+ zO+5XV1z$urDgTy6o0hJANOIaJv0pZVhKoFxU5CC*?N?Lm(*}lyX=P=D02QU>=JH%M zcUhfA6`PN~OqzYu@Hu3@PLdvC%QD!rFEV9~8GqdTF1kr!*0tgsh*fvl)Cs$x<5jx1 zHKP*{3_O>MPVvI|zB)S_i#mN>Mp=Q5R8z@LwIs9WGePcObHQ@&`m>D?oyA4mJBPnl z0YO86RooXRaq(BsS5ft&CCZfK;u)!W_V;Hj!NfXq`AQZ&2xFWwsC;=!vb6(X8(%C1 zAAL6*@K_y4+Jsa%3;g^$)$1eHTjW13z(xaD!1)xb@i97T8diz1e~OmYyFXzV5Y~N1 z)sa?IBzaTHwM{4T3tSXeh4J!D25!Wrlra)BP>kS#kbr&Pad9gPL>d^LQ;HTnd&NO~ z>(vPaMEjrD!W%f*iYM`j5}Fcd7b?>n*D zx$nU*Mi>$v$y;owXUrn!moZ8B28`+<#+v9&_=N;|m*5HXUgqex#rLE>%r;sasm`+) zm+C9A&z>D9?s`WfVuEPQ%a{0u`udrO;yA1tEWFw*LRWDHxip?7)j}E?-oh!8-Oo$B zC%PfW?Zn|r18==6P5qjq>Ti4qIi{G)c_W4Wkb2-)18iyISOd{aCDEZheeVN`1z#yk z@PNh$;PLMRR^-63UZsb3-22M#T4?Mq{H=o!CEP%$ZrJfl9y=mIG3=~aG*C(ZUH_-x z7j*CyM?;=NR&@}&EFli)b=L8P@e?$Q^y(e3Yp8wy6ZAx!&yKoMd}Y9tLIokuY15)F z3s`~8%B>cGH%Z!m!3l?!LV|%z%Mi;tqIniP{v8woDJAO>C8F_fu@Xd+h zN|E2T({29WXa6p@IVhLu$^rHWaq3N6&o3T)@K9{Z;NW01Nn!GIuK;?HVH03{CemmCOoWlYTw$@;Sf(3yw_za_aF`l&b2* zK&)4{saeI?J^`r!y|`^9Y{yRjDUlL#mqXz~bk4u43S6W3bn0>$Y?QesM4Mqx|3M z`}Fi~Qj?a(Fd6>N6Ijtjo&Q$Er00GKck9v6| zWNSa4ge>oQ3Zg`fAza45b_XS@k8bS%(*G<_t0BA5ooI>5|MBCgp{c2O5wlkL7fJsp zCdsiECFel9SyY(MOB+`Dq_CT~G--b<0KE z)FHpFmI3709>9B26B25nc6JZ^%o>fkAi?mq#RwHSvUft)QXaq)+Q(25&R6cGVtu6Nk;*T* z3`-(2I`ol(aeJ0dgVFBRDiS^QmAjThF}!hlW@Z)^ZUZfB+*4j(N;Nzjm&AlMKeNNC zcIr>mp4j{!JNfH}zqu5Qrlee-r$qsn)d#(9X(AtA;ezD09_$33*uBdBqQ+sQ%JU1+ zQRpN%xu=5SI#wW}+))|@TBZsq{{m&`?w#ytwQ`pM;OJME=F<;464K<~N}vK~xbXJq zDv#@UpIoNMrK@lr_jN>>^g-B&@H_pn+^2$=D}VMEa!Ho7=BPKxhRv=e6tk!=;-5WE zNr0ambu^d9z_LQ|q*TTOYZmml6#41%*VTf9A1TZ#%gVl;8WB?|nx34@K=iVejGTBf z{Zp?-HZDW;I{veiV*5Qstn;WP|LH>_0?AynXl7grbQfJ|KBD2g+VSje)IZ1R2G?Iy z#cdSRjK_WJrp0`ht0|2b+qMMB*e6OX7b~jBsm?=;DhZxkQEQHBsEc%)S<($}<|-px znUG|Og`y*3kJvQId|yz2-+TZ3Xf7wV)Z8c8jB$86Er0MNwQh4bWokC`pQ#qxq(MhS zwasiVnGBoV(wHRJhj1Pqo`;5pDO2ou446f7)m`Eo0ZKZgnPl!SQ(>R~t0j#Z--`{? z6wGc2wjZw#KUBo;gdAPvJMi=| zM1O6|-NwhO))o!aV7M$H+(x5Ih#Dh>1cM)Z zBS05T(h?YgyIcEmf~Fg~z3?R4W!datI7%@zsww2kcWf(F%j;yB_7{kS7}fYn^uh9X zIfkw;{8;{4P+~V~p(V-sM_RI_72Vy6G$bK*+EY=~QzW^dO36b*DVkZyrGDJmD|>|M zJoY{DIAhSd?xPb^Uc9mCyw7o8PJoT0_(t4{5+Sb_P~lgBGhTX(sE@TZV?4$Coak#s zO7+U9bOt`wM>${5v#R)>@nI0w3lX$6X}tktdu>lsBBd94&|RhfCf>v2&4z`a;T+>b z&%mj+gcDhT@uW`Mv-ut;(etJ2>fbsO*&bzT-HoX~Fit~XJZwN6s@k8CNT5pPQ9DXa z@~wf_Wyaftv##&LoJi4r02K9GMkV{Ts@CH$e<)kxIlSr*ScX||3Qqb0_4C4~N}RWC z%w%P|I#i^{@u~%OE@R*O$lO3-)9EnZg;B}QBbJYe@nb68wx|(rZ-Rt9=lqUFBo36= zbNQ0=pYsUymsTX2PG_E;o_=<%cx`Ur;#YJsbGAVTvnn*lKsL>BCWApj;^ns$jYkaq zG7&dJ^{Y()pO z13Y6!!#oHM(7~9=&E<(SsPOjo_R7#Fg@H0&YDHP$?}kVFu^`CI@SD;?VJ3879ho`P z97w?bzRrl?OVMEj7Max+nY)(7I$6|@o!c)a!@ie}P1(pLTsc05OqS5VZ+E0aVxzBo zM^jtG2=O{<-2$-ioc`w+SR@{qbzmXHqE_+)TkQia_-tpWI z`Q9k*njE8Ja_=CJ-i@HqUF-!=V2PRH$&B>$5U+N;Emn%Q<2VstZgl(uYdk4xfrG@| zy%C(3&O5@AGvxmpXCKHm4dfk5)}_*2iw3! zAf|b#yESo-3t(b;LifdIsWR2x#T&@rAMpz*%ZIOE;Pl4@u3(>tGBPuL%)bY zU0=@J^Hj1OQO0$=o{cz4_H|az;6zt)Fn4{qKcLav`{pAbTI#rYu`YXC;49rKbkJ0Rj z^h%iB;|77zNr>B!Ac;{^l*#asS}qIfm7iUl0jIICvAZ4PM=>prW3S*g-iPpKS`C%#dB#$Zgxce z94O|~(*RnZuWuUkww%qm4W5uw_FCdeG#eY6*o(3ZGe*C2!(OBWw)9^vvmy?c7PCro znV%l!&&~?no_;c;=6G^b` zCR6FV3~EV2ihauUd2r7YA2?ejQpmm*)MRfhV}>Yy3!kHVL0J|6Pjwn+NNE>`11U8y zT;CApIE7aYIT~^x3CMvw5oI#=KNoH{&a5u^#P{1!vN~c3vOHp6At?k&6}YK1EcUtz z!#%zGJa7ornh-&+0YjVB_`8+m!Wj|jlXZo?#9gp-KfSoSvv0$s6i6n=8-hG+BvG4c zFIaL_yV5m?dpVQU$Xq#^2~Kmq06-2>y`trYMQN8qUYfQS30qa;q?D8P#f!YwIOQ(N z?t#Otha?U~p!3Fp!M7#)qubGP%5Rckg64{W6rO}zDU_(v>A5*MrBaKfz}OQ^n)+Vp zkc1rj=XurNs2Bov#5_}TsuWoz4M2XP-v9xurrRhj3*PYBKgO>~Q`@nL&fm`^7?>jZ zu-<-J5+B7QjLLr?7`JwExPIQ{rTLLfclb!rq9s9*OCn<@e%oY-@tWLUU|Tmdk_#L6sr9$)Ka2DC{Y7V=v}MNfEHkHsZ5B zy>xZa20?1pEPQdaWLW;puj5tEtcQ-_x57%LdgE$dRf-+ z&OiRF0SYa}s(Pfnd?DbXg`yn(4No+7I&cghy~)g~nKa`4y5&H7 zYyAKa>6c@*cbka{^6vN;XoSOxm{?T{rcYye!WsRwE$0Hv{b04HFzc5X2q7_CBNh>p`Y^}A@pZ(`e>}#Vp z$!!@*wnV%}-J0cK*jhhm*S_;}+>qC%RaG4xW6r&1vL%Y{U}k-kL?3mYNi}Iei&j0< zo!^`h%b~Cf?~QVOC-AP0^S5jqqSc^-b7Dg4INmApnp>3ZlbJ(gt{P;H5@e1su;t@X z_oMriW=xh?FK_1h=i~%uhVS0^K$^uMi3B&$8I^?t+JU|_21CVac59`<%}}Jm*vpaH z#8%DmxzV^Q%|9aY?=)#|!V2y(p)aAwP9Bz_TDM}kbVHr3+to%Rr_yWD&Hog6X%vfU zh$7hflFos)PLsg~l7Y5bn-CBZV_UtcD<2$d2^h~QsM3Zm)U{V4J9NfL>FKY0&OPS5 zE^}^6sM7dN%G2>)>aO$Sg27Lm5dhqt#+OP?YA6Bm~raLdQ#f%VdTu0P-K;!Jn5`F~SlzI@pxI0n! zS_MvxEkLHMlzcn#zXE^{ z_>vw-$vt{b-H=2>4n>(x`YN3YxKXO{lq5ONT^WoDyO1=~C7;Wvo1+-O-VA~|h2fTO z%RGji&-qN@;hlPK$`x&`w8_sx64cq*d0ws3%yj2xc8kpROdclH*|v|R2FeV>wdBU6 znJ{_|0Osi~U+k?i2^#VfE|62APuvh?``e*cRxCu-+z z(>3?e!*)`W_B4@_RFT76;lxkqegBda^PEMNhN5h4d!drJFF;;6#I6ZdRDZYVq^T}U zq3~>jqLnjuX6eGsp2!(u_tj-+b7Y!*qq@H|-r#9GQ|Cp-(wa9Pn-UorSv9{hkfQ-8 zIl-U?RH`>3`tqpfpW-U}3i~FY!u7&jk8tyy*=ao6TDP0rNM^Hl^B2zaSsOn~5`DAo zDXo<)9^Hue4x%6sIM#`_l1*OPSFGQC+_!N0P-Fnlufd^kZOUN0KG2iv4k~Y2o`^Z$ zipOH?YgNvxh9GwNzVd`9mz$aUbCLj9^g4vZvaDkUl%7R7EqMYh?Y92>-J-2$itiq3 z9SjO^;fgLhGgUu&arTm^3ytX5s~se#_i#628U)Zx0=+Vf>^QuJIPvC2EQNi>DO49I zy&oTPo3QGk7UgSG(=Zh1i+)V9#HkD1S7rGvwEP`e^R29bJfyh~(WCLuXLtSW1pPAu zEikv}Swgbf>c*%KQ(9k;RjzJJI*En$4@`nyKIa&{zs-)68Y zoP7!E#;;si%X~0h{Il+VOB~d->*l*~FreltF3gj2&pCW|+o4|DIDXsaqfDe;* zdOcSeeRI|Cp%Q}Uk+dS?82tHf?|?jVofLn(Dbdovh(V_QU^U9Ae?}#n$IZRk6LH!w z17q0o%sS5$N3E3qAdqD@oZp;&N(c^`01Zr1!p<&{Py^1k30jP~Q|mp%7KtiB!SJQk z_~PBIM7JuJ)=8vfAv#6TU0|+VL)rpTWBJEQ$&i&MN%6(g=T)6QY^P}(MHypL$kpxW z%wHA&3g+GAtjHK^A&Ca_NVql&nv{tM;Aj~F*kta*T${f;3jsA*=$-A$H-`BOp6dcr zQsrupup1AS^uQ=ol6iC-yQE$c4h=!qh{`FNlwU=TC`yIegH=034L`=|^&{F5eT&Gb zJA-c2GeJ(FPR!myG~AQxz?+IfD=U^^VlmdYWmsr|;7_I5Gl zr$mr+skH<8k!KN1V2w?6_1bLE+Cveq0l)=4b~TEu^hX4t4$HO}QPGOhda>{H0FYV; zqUBPaYr3(tydS1kT<#fU-^k=vXsTmh{&Hf!GvCjZP$_ zQc+Q@fqdxpvzK0{UO{XU0^&Z?0g8iX+W<`Mt#XX8imQL5o-qKz`}X6t^XP6<;BcQuM6t$S-yG zCB*Bw{?GcV!}UHLs*OLVl)4w|Mn7on0FOtS@PZ@*pNPVDaVwqFPwLt)jlvY4|0xf- zUetCbL~xp=fDf^rsm+-;x&MN7oy%osLsPJW6$1j>@_!b&k}-e)Gentt5AJeG5qZ+V zHl3qT*nb5o_IB;6Xq9j)QX%{-s!yqs{ZzIlU;xG>nT^~#WU2VUQWYfUj53c%JN;p) zE&=rF&(Jn75V3e6l{Ct0lizM(XlS{=aOCBwi!w_A+uqO})(tbBNcL@8QKqJ*3q>yE z>JZq(RTbTSNt6kr*G@EmpBfPF2FxqS&KV4^=W4!j-}%&Kn4;19`s`(r^kQcMk?G}C zMeta8g9YZJBvEn3)6Sbx=*>AtJDk+&P7@#Qpz;TvTDI*f_EBzqo>7mMvm~0&q@N>m zoPX4h_rS#_haI(clR#5z#7-PZox}6p@du8AyBdW`se^T|WU3s!A9jd|+(qSt+<^jH zfS&SCEIxNaUDv3J?EXDg`YgIbp>Q|GLK{>AE)}ZY8Ukmr;q#aH;$Y ze7P@i`WDbfK-FHcZ0C%j*5S%I;*_Pj95w96$qbE*8trNl4W#Bm&0Jei&d$zS7<3`? z=)L{ryG)n}AxB=jyD?qSVjd)YW>FVi!UZg!Y6ZyOTD((ZZ3{?w>Zhp_C}U}|l9jf$ zwgB_pg|*dNQO@$trb$5Myn9hhY(K5*Q-jJ`Na-vc|p%9R17B~TY22Sho( zGqpIlkmCDO+Jd6+36Zf|y#h?GW5kE-c}%q^V;l?9$;tFYhiW|U_WX42DNX<*&ji_f&No3 zKpuOc?ega*AK?*)x3yh{dX_9*KS&(4G?0?^_o23_U&3qG{27#uD{^IZWyM#p#S;D2 zL%fWX=`P2S)z6Zf|8W6e{YdIlSbGxt8tRt6lLkZ?Da={}vAHlC_{ONUN^ET7g9o{_ zI*<|Y>bJ&<7KOrCF6gCcS58+qR?Robg|WR%l4uyE9v^ZIDsnHas(Bpe`rs|A5Sm9Y zV+jeKvTy(?BjSq+1d&52EiEPLp9D7fuY=NtlzYyEsEWdW1F=)Tz21kqXIRBa-Rrnc zvbbwO{x#D62PJToENC%J5rSQSqdo;XOeTQQ51}j-0f9#%G(vX+0Vf9)XOhw_?ES^9 z95%<^$vqOU7Kit$-xsMVTil2aD3oEUWJakq?+#@T{a0p1O76_%c3;RB|CqOWl@IUn z6x<*21vqr!i;s-rXR=E^6n{RKs{(AU&rp!j1yD2-pdsKQEX%lFDz%w?)K}(QOvP{Z z86a(a1bCA~gMkNtHd1ii5G;>YJ>=6eq#AQ#EK#+dB*;^rYZlEb&v=leHS)+#&65gA zIAF)F^1v~1C>zuuPxIT>eEvMZxd2(g@wPAe%I@#pomS1%I^Nph(ZQlP&g_b$sJuYq za)fHiEYPvDj}xBnhN#gPm?L?>L=&`{^o73O9>DU`l9K8m98N1P77SKjfQ;%2mY;Wi zuA9N73Urf}8Br?C_x}+tQQoO(fbGYcm;3MPh<T2L7<4FI9^x)=2hx^+Nj!LlOd;Kk>W0es~r z9uhsPBmSkez<~I6`gi?>pEsIBRf<|y%tEioDH6eLsEn6U+&9gJL}~l<3<{yhfUnq; zwDldR2zUY$v-!Bw%$Fs*L-@_L6s}2j_z&8RzLj7@(4<*IjqOLRFxRkQ&qsx zUGtRq7m9K8C|3VFmqUK^7h2jNMP(SQ4tyj&UlZ=?@G7KK6pHO@4&h=7el`by{i$QA zNs0-f46ONy1`m`zKsWj;n--yU1~vya=Iy9?m!eD8bVH>U5F!L(P!OUMWUTIHxj^-S z3bwJrxIq^dqpYZ?4bSB6%fva~8Yk_Z*flUfr6jG`!M&$)GjjIt-4ph&+&6BxS&@Aq zT)&jDD5O~@RQSr>?nZPu6m0x2P>Lv z7{qrj$qM8HTqj!QADfLh;~c&F3UrdFjF`NBz{=Oy_m|iu->ARoUH_`<%Hteuam8M) zqd}<$nJMLN6gS1d*i#6_FM|hv4I*|mGn4ZSmYXUr#bP}E{GFoYJzftN_eHSwA8yYEQK=QGr2!}~3$(rWtB;B&QDV%7M$i*&l2Qe|z-&oEj30p=P;dT*Ql+UW zar~$Ei4Ym`w6#C`8~y#}ntlqR8ls#4ZZGZ7J9GLs^6DXz|1_a~(HJ4fCwq`U7W@W{ z96Z-7Cxy1&5)3guFmk`XFFpoS7fV^k1rdT+E{h~j=+lg(q-$r=p%6=@RB{vG23ie- z?8iMD&a9C{?|&x(QsU$EAD*Np!psrZa_4zRob;oWN9|?eblWNYk}&`Sgh?hU-pvL> zeY~TIp%1*?aWX8Fjs6N{{%8$ksb`UQgp$>&6VjqJXpxSEBqU&P@e}5Gyc6YdaCyr} zt=kWO`83F=$wW@`uhj`4hTsm608=K%VSY;ZA-61gi_#2^r6tx24OPiSsfxGERFaGVAdSu9xG zEKUvp3v#5;9{8t#%S?c9!|sfJU>qQoyBWDR?q9EP2& zu`FALRVNQt>Haqu+;LoiSLStrKW-VDe1lZH^hf)ZJmgA>06bS%m0Ajn4-IgN)1P3N zlxY8c?zz8BKRq1_a|2A*e7rvBXo{$2*8b*PLW7@0e6^=?eNsT0(y&Zc#B&yukTe;# zKj)cJVGL#}1v6~Ba1^ftR){*r-fyX(ye=l`QU_D_MVaxqbem-Yklv5RCflw5E?Qg& z2T7^1P8z+rM@;(rW^xRP;5}ZH3iM{s(EopEd+&Ix`}TjF%uq-|0~N_0mAw+lo@b}M zC6q0DlSuX|yAY=`3UOv+gs7B}k(E)|BYXai_o=S?x~}{4`+dHD{JxLJ_0N4>-JP%Z zYaHWwJdfwWQR&dlsUUo$SQ}r#Eb>^G4knz1fit}ALc6&6+Mt^;vb=S}r9P_(<@-xd z#W|iHc^+?!Dkp2Z9=p&OY=P9_>J#>wFV6Dd_}Huv#-4+Km+drBN(bwV>YGPf+pqMP7NyMcf66;)gt;tCy zF+1%%H~7)i9q8ax0SgdcbERf$YAHvaO@}?BwP1-8_@#;NY<(Q43dM=Ld!30$oB|m> z&S*)HSZ*IybW%kMOqvV;Wn6vaa>?bVx?O2!=zdo9#{UP{1^z;%Q$34*$+$bYe>1^) z-{g-5V^kkg;5(7a6dyk(%cUS{7gI`;GZ8{(RE$I~h}Y z4+ug~)~LPKewm>*C^j|D8NN!J7$GBFoU~bA_%?9!=|7*ML6T_U*dE_MpWgSK>vFMWs7abZ1e%}mlr)46WZjvo^ zbzLF?cNr0uJ3JARewUkJ2l-)B?wufbm|f7!=oN8d_Frq(fB8UvD~<#ZZ%Zy@ECS+g zX$W>!PH*~@?}qj;r0j3TGL&s;y_esT)CN%^_qFByz8S+8x=_&=QUQOcF)RSd51w~) zk3~s+l+*s#S3I+O|Hv~)yi>)c`p&QKD#!M}gb#>KeVegelFch};sQMT#_lIVi>IHA zqV}acix`#YkNdwBAb3j8Ohy@&INJ0E@L?pprsmv#*0}Jxs`+`J-Cd<;SjVxe9Zox& zuH?KB;$Hacx55G({IY)J`-E>kyyZiMDjt;(4Ewrrv4*#*L3$Tnu(@a!zz^3jh~!a9e64_Z{g%Y5>fX$WcJ6Nvs_8_$6*nkz8Q)Mw#awE8Orb*h-X;*xc1J#7wc{P{y$duRVBT9zS`~P-svYs{=Rzx2v_S?Vqz*G-oc8`m|@5k+s`!&|?an>Q9?k z%wnD|<#eQ&@R`FzfrH=mpUI(XsI}x!1c<#y2?wD%@18(Dy^ zQtWFUo->r#S?(PF@T-d7gnP(8LFWiEKnmG4RNBm4d)23{*XQm6Bs;;OJ9)L@HTxVP z=io)RC0U^H0?5F^4N15lHxPx)_IHlTnb;sBx*wgl=in2@$ZBp$QDXN-g zZi3Uo)TBD4#%dvN`n&`ze4uzDR|03iz)>j!umWzC-;5A<#uiFa(X3645X-)CLl%1f zbFsrjHs@g8^)f0p0|+mJIR^*6sLZVbzjUV|?MsRGL)Z7Iri1pX#dAH)y7S2zU&J;o z|2)S}QUvB#b=PdopWBX}_#(J-%2v1Z6HKCbakpO&;UhDDS8R1Ul4xP1paZc3L)*bx z)fy))D$xzao#f36Y{vOf&OYgkK1}u{e%+Ub&SXH(G6!_ppe)BI_Zo2k!(*hEN$8e{ z<32Y4E-eWDorN?4syXjcet4KWocvJ7?{U^1-o;yRBp6S3C!vfB@2+F5pR;Q8VZH^r zJ~Or&)H=7~clK1DKmC4uK0rip3clz&ZQJbj=O+lZBnE>?9tE@Fx4lDS5ZA3;%c`s6 zlbqrNgte*pRBzP#66&v*^b+by7EqH&L!O6~k%-Yd+<^A@M6h;ty-uCKi|5Y7O;Jo4 zaCJd$4tI6$anCX*t{*|F59a}@qV-YLvMfjuvVU|8MN|o$Ne)j$HTtHs(_Cw$+CAq+ zzl(GiMG#*unsh zzXaS{xTF^L-3-P@7`U9$+O)FWmt9_BpX9&89^-wiN&W)<7&tNpXi5_B#A{g9)->IO}rK}dmgMgOZFPoopl zZ&g-hIG!vpa!etsl6DpNv@=C7_S&>MP28gcdPNdgj18;Y`KgNPI}h8m)#%Zel~LU^oT4f`eYVtwpdKWi?sR|tgt=>62)7jZzI2H+Cq=Jv`Jpv|6{hepE|Pn}d;uNR&! z8V8-COe$g?HJ`S~uC89-B|V4Mu_@rOx1xcW{1SkkOyIl~EVu+s&3DaP$GAM({e-HP zsS5wTe$Hm><|^GslS~2RL7bagP#r)5cN|&qZx!713w*$%1%S=D(O2M zLL*!5RirJ{9=wtuslD<9+IU3u^qy@E=0oRZ`Fx*@Va zw4?~Gatu5Y;Hz@ljh@j*E({)p`$y?>lQvMN!@bsI?sk(zVHzvA1sTXwV9Y42Psr79 z(juF>`!Yo{nhdS|kUhJVVm<&W2>49&H+^kioa%kWOH~B3^AeMt)H@GhMiWx()JMAk z)P!L|oK%tBB$$5MPKvAc24la3ANmBb(Jvq^xl+IC7iEl)7y}5+fDD_q{i)NpGtghS zp{$ovkYw!p5a(m+d?^841;Qo$c@nsqpU0G6pmZvnP=$fkL`l5&wNW%ob_*{l@0~+# z+z1_km6h@<39nThD2G7LlyIJ>D@3R!*gAkgzrsWp8d`*LIjOjlRp36csfaO@vrpBE zT;$Ebr{kl#4U2i4CKV~Q56U1S2>-lxXd@3Ih}uu`Lv3C$vGhLO!W_GIqdi(ECQ zPBKhuLU2Lk&K6Z+vf}Bv_mX8+cN?-WO>oJ@LH%JUywm(^s+dtX0$~*H_k9j$1#Fv_@N-I z4p|`5$|o1vQ|!X}O;IS36=|N)?HQGW2k>hD0pxKIe^mAP3Z$BJxL|;YCfh7P;k)i) z5DYSh*3RSFuQmO8p&omEmTZzk)I9C{gRDMuMPNWHfApm2MZazRN^b`)DqO+g9x<#D z0)+pVTDV;zw>|6N)yE07|7tQ3J|1McB%2OPQv}b8y13qP$ehhkp@IxuiFMl4Mh#jN z$;#16#wkkTZW$W^lW%c7@P>~mU;4>m1TAv|NT0-!B50ph>7*zxE7O&imuEhsbnT8$ z4#u!jvNV2-z5LXJ#iJf)pb7C5@+##SMHt`V=BLCXeA9pUQJ9+rA>ads3;SVK*~F=? zBle?E-yS)OJb{spgGkMv)Q+W~H38#sQ#9w}+7IabAKYhfW^ynHZ+)@&3S=VnMVqy` zK?EwtxVr)}UrtLu7 zj5kKsnVE3+?$Ezqs{3S4Qb%?H)P7?BhfPe_^R6U2g|&W`(hN6?-0s|T?otBzkLM(x zGdbi4Fmvm=uAx=YboA$W38u!%eHQ|7jcIcO)?RCKWze$9lFK0O{5|5*#fya9g%>}V z0Qcr`f5N5`stS5eH#;>W^Lxwp%sHH7BTX_zk#I0l+(|qZr&$YsB`zEc-XA;-#Tp~; z#kY{h178(q>hiefGZXjem`q4go1p~YuNI2}7R(A3i+yIkBN}`*`vT$*{R1#JMp`1= zf^5*LRsFB*jn!wTAPNq%K_gKtFttcJ5-`%3<1F2U+e~BY!h$z4Mu>^G;mx-0opd}I z1Ii`PiHPKd|2Sdrs38|8r@GO`toG?Y9t0Hm5g0qk`~|b^y${KOu0sw$&qF%m6;M7; z(!vEMo}a%`h0GE0kD@T}y`2i9D9=$9!7aS;`pzsF#t<4Xxsq>uLcO-JvlB$U#$jx; zilYg#6AMR!v|lgz4QS1k@MVzD?AwsXNiewtiQQrD*njy~kfo7?SOn(xv>@STrv63% zj|k2HB_IH*AMqvbSU~l&IZ=x@rTiGbg-`wV_$4MLgRyVRfg}_R?Hp*RuTj83`fQe_ zN%R;+=2h=$`iXsb&65d>)w0KM&tKR*7*;^!`Mh;iey^C4JYx1eCHpz z5xA?Vl^$a@WXtM`GJ=eka9*Sp-fY5Nl?bjZd--kT+??Gs^k%M7x;2eneI12D_p)K| zkq}tggQ545ITpq?^b6jy3lZ+Ovu~Ik3nKXl0Lb zYZcj@csYVkjCARsN`Eyk$|JsfKLYrN%U>99ZimPOBz)e#3B%`f+>h>B5XZac5#y4R zALG(l@#>XO-cVM4oMWss!fMehVZ&keZ^WkwfN)djR zyKuxBR)7Osg7JYCBLBoUYMZk_$x5T6D-bwfe)&$_Lw2VwST&Ab73;QW)mORu=a~qL z?OxKaaxXzfU4AJtwns|8P^s33q)vTSQHvs7!s`pvQ0bNVdx1v0oHQkL=~5BmFgT`q zjC=iRo9z%TtMnwiHx8eOZ^`sX!YjQ3Ddxn6oabTel-SP}b`hurwIl^tW%vjJ920zy z=HE4wiM5J$1vI83E2Swcp9pI9TOSTyLu*(3j+%%FlA0fsBfMxJfZT?uSh)7-N@pwe z*;W<)Jb1L=)S$21xdR0iz5b&?8%FacKDEt?xV0frXt?o5k&pZJjR7R9M9`?E*P*HO zN1&V*kavL(%F6!*c&`axWgYHQumAfWaX1KSm>FFa;Tq)-lZjg%C#VyNo< zWJAqza2Z;M*TFU57hLha^{DOA`7euJ+lsy|YWZ-+fhC$3npRCA0?IrN|8rFKUYse; zoq%_AC z@)*~gyq_|o9m{Ud$}p-@g`kf&OhD_Q+~HfjNq8MFn2Etryfr}rKcp3XYD!s)LJ-`? zPBzpDcb;x)D4?gbx@!|(O8t1beabfGgcAY5XGoqo9(jzp<5FopQ0T&iqhWl1fCTZC zE-gqHvlZfShA>*c?=sIK+b+B)Nl0lYctfEnTg7aUYi@F~1Slw-D&xR2{*NzazP{hX zY~}!r&~o55OJn;%a0N$XWR$Q$n!>URpgBp$+1c4F{PIf1l>Fe)ATT+RKHl^vUQ$9; z1JQz*@QW0VYyq_ZU=uXspn*#3zbe3>E@%NDxyO07RlbXoi@DA<9Unaj;8wS66&= zA;57eJLfwRYR%J~qWm_*bpWhJDzmfOjz|s(E)JJQ{sV#$Uu>j-oddWNw{s5q59_!c zac#Mh$fM_tLYnYn?T$>}D6u3(Mc%T%mE1Y zIK3LIYr$;a%VrSaxe4|EJDbbaxF&W%L12j~@v+*Y(R3ZOJ?APUAv#r;Lw3OH@N{R{ z;Z-6W-;wztpxPYADe&C{1{#Chkr9QT0okiqnTQ452$NxCj%eml3XsEVfTWB@nSUMWO^1f%ThM(2jWUJOSW(~YiZoGI zj?;#QHQpkiT;s>_@ZluU&Z#Z!wo3vvBoTGMNCkU%+xuJ6Iu2uZMe1jWJ*=7%Uufa< zKMTnXMl(qo7^Qom>vXd?q>UlH2>Z_7%1- ztWj{!fqNw_29Z4y+#`88ViBjdv(DXs@ewA_ROr6^Ssq?9P&%CcPFlz9oujpJM}pnC zEl;h~9hqJvoPsoth#(eW`O{uQL-_Ox-fchd$7njbAsU|@e-{TSE(0XZ%^BTyR3hfb z=G`9+Xj|C7jY_}&GS?z2B3Pn<)Rc@> zSq%p(9M9&x&mud8>B+Ebol6?MEBIA5et%I52?1_H9&FKtLwrWalaKLSj@OEjxO+ZW~nY`AC z3tBk^@OO|T6gtHec12;NkP9@R72(MJmARIWiV}8dG{!2@Y6-Wvx0cdR6aD1W@1M{f zRS-vT6VT*&hX4^EFxldCPiKG82(=|-ppE-Gec)|Nrnsw$*y9&k?YxWY?Ov4~3p_f1 z8d(k%e$<)Fa)aBR0MHs8)KNvo0UN0U+|RK^AGk=1R1x!Tz|W7oezfuZbsdDx()skC z*x8Ue_LNeBcezpnj=J~R#H6>_)H(sge;&t(Me-w*IXr^J8=28wReV~EH-JR|)Wt6V zVIMklXlQ`6R?;fb=S!aksbWe5e&#Ic%A{; z;UeAA{)RAK;DeSl@axcIH~xMs6*ZO_GvD}*3b{_>?b4^$Jj0xK9eG%8fY702|H^4C zl4DL(i;S?gd(zrx9DYRRKj#hA?jE+v#x;?V<3Ki>U zzXo%HC$_PYewv9S%@a+(QT1jDglVUO?J&-~zQr6e-z>e~OT#=xanz|@Xlhu5V{5+5 z<!z zl8HF&UsL9OYSPAn&%9{mDj6(?w{o3(1j7FRS6_6d0maW-M*++sUzrFDXwUkFvn5gt z^cQccgEs|U1C*To_Ny{T>HDbDR??!5N0WJtBARJ-2t_LR=j<`e2NF_ks3tMuW0XxE zNFx!h$Y9dtcC}B69xV6*9A@Mhpwwgy$vgD29lwY(gG>a7a0>jC*Xpf*|I}U*|8Kv=Lu?GA)FiFUm>cd&2tX{jTI0U5Fdq?HG+_t`KH94%rHg3^ImW9 zSCxm|tW}>sD?CGU=s0pu;-dlr2uN}j(9YlAf4eHS0}tX45O{jckHI{_DqjTdpm?HOVj^KFxzx|@Z|L}{UjGpIJu#d#tc!1$hyZ_)30wq}`$$R<&ar0*o zLiQoFZU~9LKSV}M#~d{%QK>-8^BrOry>RfRA)7X#(ev_GaJS6o0-wnnJc!_JcntOrS2j98 zW{%(j&y<&ecEoyKD+-jUAh0m&=$)>H}YS8~ipvb-rTW z>&4w)zUV=As7|e!aGI5~Kr@@;D#j_Bz)RqUu|YWtY<>LE$fOax}#jHKMA+Gjll zC{z6G+Q0k{k2Kq!{DeF!DkfqOZmt9B(R}TmcFDWyU2n;3|W`D(BRV=seh=`p-~JK#wI)=~33O%eCj8)+Zry+VAo5G+9LxzA!*D$*+ejyWBvB+0?h^vG z89wE+<9rIfm|t-i13&QA^NtbQ4OKnV!0MIWr2enHG~hO;&!`Qx@90j8lH&RMi3dPQ z?gIQ>W8xXjqBAFY`U;Ychg7}s_oJ|gD?G+)r2JLo-&<&&!xZJgW=MPBuiJr% z!s{eY^i_USaeqJI7XT~uZqc7|be}i_@Uf@}#0%RpK0IL-rw+R#I|>@j&_$70($O8u zsssqYKYnJ=q{$_RsS{*+WX}D$gHG~D{9}Hg$q*^=;t|I}aHSYY;!TBPd?39~pDNGb z4#-)71ypH}hY$6#Xcep;)`k7wrz6sqT)1p5nq45%GXC3zsn3xz8z%gJe+F9ifA|ce z>PPSypp^FS&-kGG|J!GL`ua5m?ipkl{};amx(@^t(SQkz{L60$Suwz{N99l#6zRi1 zkF@m^pnC^V(Tso;Bd(QrYfz(r#{U zS0|@}C?)@I{s;g+S8?syKJPQToO@90_ zp8e4J9psa&%>&2P&_agZQ;3KNeK*w)}0*W0tSQRcnzOEUC z?GaJyfCwCDtFq_-LW1Go0C5*VfDXK1Ci^UO5|nw(RW{dvrbjiTnqrLDNbYCWx z)5^G;wODnrAR}icRVoE8sLgs0GsJaq&PQ?-AFQlM_=U0j0j_`@5hDZ&K=l;S(F_}>GU zpEcGW)fiqU_|DvQNw^THi;mY`1mS&l>x~Ac-DuYx;k{89ePDM+6(mZYhpS%{6znfH zZ8$Q_nGn*oH|=%*VU6$ZH>TawCDoweuv})N;3ovi0gb*ty+Bq-s%7$*z$u#k>pW7R zUGCChCArjcwz7(Q-qm-dz?fAM83th4zjZ$$i0>D|G3@`bATvh|8U0SOzat$#j^Seg zhBbB!pQ&+wz$O56E{V{X3So)Ck$0je7jV`$jiU6`E35dzWx8MERie(SzbO{llmwN- z*LTcRmW(&I&l>vF>1VYgrGwxq>SMR`^!3RHExK!XxiYnbT*`ntwzZU0o65gudQ3pV z7?}HC#OajQ-)-WH>^`okk(_;P-MY5x%MzaB`vcUHSJui$)`mAey2TE?HmM6t9ZH&4 zyoTPm`fO?X?Ehx!Kolk^&lHyj2+c6r~Uty;Ouzc%D>kzuLdr`~Ety zwb@gfUOqIuJ3HK8yBuPDE^-^yTNeShVw4 zdp6Sbhb0niJl`%zYEE)oqVe5j`rn)b11qp?oMX7bG{HaOmdq(GWp;z2PT#FRIx+Z{ z942r@tk^vg$Vlr$9(246K#ZMnm3*gxmO16tMg1wpeyetN6TSxoB^*=nHgz+TU$Y zmq@w3g48Xn=YJ_6j@A0eSnP+*O$FMg_06(=^h~2^VEL&5*-@DX)VVlrcm?kV|c$(KeBE5-|-3A*Y4cu_woUN@`0JVUo8xVB-y#$ zue$ex{E_D8=SA#C8$|X%t3P%2cHK10Aa19b?-smXXwdg#)9n`s3ce0AvK#_E_`m!H zh_eGw9k;mYR1(5@Tdm&bq*<$5_UyiSC%uq);VPgryj`I*P0qhRiw;q&fTYOjfYLvr zA8x%S&m5*{B>MSoJoLm`F4f?1Gc>jIg5Gef5u&_j`O1EH?$?!zD+ACx6|^?B1BA}H zP0!)}GTT`YlP`gJXP}qd5SXuw!1|zYJ>{J8&DdrO4(`G`3BS$Gj9YK~@$1TsK+4+- z`f_LB*uHgd`4sLz&ze7!R5+78RTHA-YWIBOZv6s}p=)Jj_2qUjQ7RBs&VA|JRZ#Z= zL@;Ql(^=*8P;YOt4qFmi`2CtFmh(Pnl?OsLUFSid_;Qo=wo~7YTvyErET|+X7g&08 zqs%&vUdor+xP}fN1Qi~dHmH-#fIA7>n|6Z|veM4I(!GM}L-s$wuZ6kRd(cex`{pv0 z z(uuX_!*vUZzlApe6?~bFw(Kkg3xX>Qj$v%Wapk`){d<2^dR#)xq)Oi|;*ay~Mif5- z9oNUZC7_p|hOOkE!9kYgGxTN&BN7}uQt({gU8~KYlC9b7E_#$ry%TJlWN}6TQLpmucsxyzZ;9^0l~(?M49==cG-1 z7<94<11)RXXLojn_o}Uri=3*>mooK`KNA)lZ@fK9=VPb#;u??Ck*=E6vLegDL<|p3 zGlnM`2!L6(-@)ToqD@FqDYB2?pBW6sQ#2%VFzl;cYAPn$B=pI=Id`eDN=sE zQ}Wq6Qe~b!h>OvO>7d620x&Fv1% zpSWM=Qj%S8OT4uS$*_7J)npri_ZjWv`aA{^lK`ZvnAk9~_yS3x;K1NesgL@#5 zIwZXsA2Dg9W#F@W`EO1aREmxZVW(SjJiDwKS<}7M`VS+<0_pJ)E!q#$g-ykO;NkqU zJAY;^S-Z=bXSH~DL)=TGYu@2t5m_*RcH^->I({%929P`N=$I^#gd~)M!ValCy2yT- zKYN#_MDwQt2F7^g@p`oQ6y|yWa;$q(>75$tbJO#bwkN@9P)}jCXIZX=DZ%|PqwPcL ztBrbc-r%8~q^G8J<5Ih0QpyQ$ocdlN6HjmJ&k>c;@YbxGseb)zH7S<&qIYon=Q%z1 z+LdaTLd|CxyV;xyexM$#+E21rjOaVkX&)4>@aC9xZBc)!MG$WDR_12(42uG7bNxKI)E>BDM(xkeAPQT3OLZoeFR#@HBEm9OrNm^5UZg_kgu%oi z`qY1B2pm5iI93Z&1oD=90M9`e86A=}Kn-h2wYldmk1+r!054(5RJ!{V6X>c$^gO&^ z==E{!(rx{Wn0Cn$9*TWf2C6cmXgP>8%$WajSDQ-f%50Ue$)~N-zdzF>4)dl|bw`}` z$Q)>AIad_*gl!ErSJ%>?F7IQPsnoLsV?Xy=-f4BfDxx*o^a@K4y0tX2(-r3e`EQY~7F*6C)qOyY*JJv{x z3i3x_7~F6&%S&J(_Q6QG&g@~^K|sLSd?t$hj|?TQqQ<;u<1pJ6v9DjKSSN9iTffGP zKCzK*)yl1~CabE+y{z0nNwN3YL&1G-WgEt$XQ(*oxa-wW*ngPJsR4XnGF)r@7cHWw z%~Jk+Y&YF&PC@T!QT`aq-8{XNuen28i1tn-gIQ(Ki9;kE=hb& zIXQY!)H&C-eWND#b{r_2#Ol!4A}(x&R|DQD2St$ z7&VTgpFtv%Ef8ICHnlmf>-NIlsB?%*0!j2p;k;vSDSP4U$b= zh+0KyqIE_qU=VzLfsT%}pdQ=Z=+THOMf^E=<_LwuhGD($4+>PTS8 zO)~OipLDKWN+_~OtrtL17=WvrYG&CjB9r{KgRar?(|o9!(AJe0ELZEW@(PA&p1ZvV zCBWh-RqN&{7rxoz7T!KN;s5m z6s8yYxMdfdlvdW2JAl&hc8B(QtuK5^w6fZ7*}kel2?v6^zUx9Nea8~@OL>7wqoTJ5 z`ZlUFPjU~%H^$G(3x(7V%Rg3Mmv?%=LjNj~o%*gv!&9J3GPKsRqQ9j+&islzuIxdJFyLQ4)!zg%;&qObYhj=AVXJe`7*< zk?$FGmQ-eRwVHs`tZOY9BkN|!Fef$N%F|?J+pi&z4tqfb!Z>_4bDZfd z$rjG(Mf2@&7T+8`B|~`PR%#AR08(C!0Ca+`pJ)BV3WK}r2V|1@U{@q^JxBR zRfk~NZKj&DTW?Bmj4M9%gTyX}!x4!p6PW9TN4)E|TGOav^LT4>E5kSZ@#re1;=VuE znfs(KNJ#^kGh5mYpLx-8?nPTMMO$$L9|hkG^SxV#w;tcR@SZv4@yNiuh{tnv1-3y2 z(MeUe49|4mj=_A4wq2U`AjZpym!C?D!la?s`txhcwqvWh%3E9~$W9f#3nAo}rl2BN zZ1j)&45Xr&wx3(E*#+9?mBlHS!d0mp&oFGEr)Y z%i9l*UwUC7r&~S9%#x#M?6*^vqbv}69fNgX9H)<-bKzC-Oc|&N(k=y=$sVO%1@waE z@>@D%o&HGMbGA~XGQ73hFLa43oXGM?HB%WQ4_m5#NuOCNry-y=)2ub#DwB57`Y~&f z_4x6bUniHCS)wFD!mPVylU!)CDIHFoYB0KnR!#4Z=Nfr_SoSJwGccvy2&(UA=O-v(Z@7H zY35JQOe}C8|CxjW_VMRLGHTp@HcBh=`+qvAN^THaC{VzsaXF_d3?KJL15Mu8&8#em zwYcguF-a<5mjW9@J!XcIM%bIU`>Of+!Za|cX)Y@;Bp0Jq*bK^MdM!r)ws8HBGD9en z81Viv4ev~L0mR1o9~*2nUTAA+orOvczmzDu3R~vLBh}NCD_B=E3h|qh)=(u(yi@7n z-O={uY_e|=-0yBRl3ibDV7q)zlB$;KxN7ay7xN4EzQ?l&2;^p+a!ldZ?<>w{^WRZjBMRV^1U#@*!cp zp*vtDAoGZ0?9Otab$oZp#JDc!Gs#c<8R0HO5l@$D?m7gJk*``khDyeFk>Io{xB2vx zz0Bn%vkOk0Xs7#*NR)NdihzDoVc#=SYyOxLT zmt1JrFP~_ai35^JwU;|If=Nz(b0ljdei}7+0tM>kr)rac%P((!KBL6IJ5;i4>$!8-?0CNbw6r#OMGvWNxw)Ts!IrjQ-X1l~ z{1x=r+Kjlq2}SsNdOYPz%JALHXQgbd$bk>iUjq4)4=py$3qXrIf+_ZD?U+BIqdqJ9 zQ#s;czNTpM)j{8DXw2=+&$diIlhx#~FTC)aV=qI3r>xtTad0ptxw9;Fir`$APmGIP z2P$=iw)}xt3TzLkI%ZOOvTEnFhGHMvuuV@~K#T(XhiFdvH~n#-Uu56)gDkmM^{_W? zGlQc7;5Lcy1cH896A||hd)$d2`~RKZL0~m^Gip@wZ9OHb6>97|a{3H#2)BSz0{FQxPJ>Q%2P(lj$*^l9)O3gMS!P zKDbx)?nss0e5Xg-P3z#uCfw_T^ zTOaaCudDvhpxe5*<$`653@*owJ*l{xwocHg*e@gExeVfwB$To<8w~rGPV3RBiyGH2O_aa!lAq> zmS6|1o>yKEWawqxKWO|Ij3lL7TWaIgUz*J3^b&pxG_XdcPJ>@R3F8Wk(?Gc_T5!pLn!sUOD< zXidnI%1hKPwdRKA$$o_)OZKQnrgaTnP<)OjrBd>rn&Wt!uqi&3k74U?VtA=MRt zUB+&t>73o)j`Okkq-}lmzaz#wx+)%Y`%-JSRUIjL*Sa{g9D}_H- zcv_PM&JmA>1%Pa`*qq4LX~V3;tM=ksNjvqvr;)8@eo6mEFCO=_e`U4IT)FI_H3XYV z#YUlh{kkZRzuQc2o!md4_JY}V*^jU|#@M3Urzcw@MSl!4vrU@zjx64Q#KCD{q%NC@ z$_X+R5po7IXrM~G5ANHjUkA|4UzVD>1(w-Cm8N|C-#AO zf=D~DN@{x{=;t@e6JW%nE0zV;83l7So-%cyM13;NB^5hp=M>n?AWOgmTcMi$lYWSS zWPf@s|I%r8j;Z-!pS3iY!qv8*j5aXfg4+`dX1@hg*dj`p%(nn~CGt87Czt?YK&_y7 zpM%BfK~hZA&hh!V9?Ct+sKMF$r#v^o}#{#oC?ORIY zET~jJ*n*nGy^xy%ATgz+AvTEtI&|VMRVk`SRW|I;(|t2N`jt*OTD0H&7G7lcSh9U6 zz17ztPI-Xt9^t##wBVN@6Mf+|?yj!@*FI%dX6v%QQo^TIKqDV9QJvk+Md+%9(-V?2 zJ$QZBl0?L%Cl%NXRnovOh22suzl z-PAw8lq`$Ggs>&?aox1$JJ#QJ%7Wtj*`f^d(`}quYpNXyLb6b1864OpcUC{RPCv|^i=^3^uAZAuR`lz zqBkd!jP;hL*B;ZTS4AhpdSL7OrEA+E>Fi61T{vNCyeyz&zOJ%xc)5{AO=s%zy{l3O z$4^wB(wYCs&M--~VY1!X2IH*;K3|E){DO!fvDhD&{W0FqGkoJzMTWM8D5?h}zi%aW ze)XRxbxY<5aJk9*)Rz}#ik7(dCO64-fSsu7fryRi(qt=-{Y&%dUkA@OUN(g6G*IJS#okwCO|bFFHEWZ>r*wRwhfD5W_KZ( z3Xy6_vB}ms6nW!uAxaJT$dlCWMw%;SHr=fqchdZ0c_ztBtKcaZlCL@LZhN$?EHml2 zbrl&8zuyD2q;=SD@440D_Nm+Gp10Je_ZI5qF@$Pr%=aar?w7c9XI?%38rt2F=rqrY z;Lo9tsr{d!BS^`&dIrWwrG^Xdl02Jq>X~M^WD{@Bu+t7CvWoJCLj|Vh$7jKH;KB@f z6>NQ*vZr6wua9Dd7NTO-)DzdRXC}iv(aWB6mdXq?1dDIq0GNMZUhN6jmSBxfTaD@P zepsPPVe&x*iITM=fA6+HL;!SO5gT=Fo;Cx7x_yB|nuzly*|7-~kKe`wkKpo=5*mp+ z*RYBYl!K3ThEah0p^{-sqB*4MiK3!zbGN=boH6|7SrIwMN=7tN*)(Tq4Q*044A;#l z_f~he=G%av%jF@h^TNwVa}8#%aw!>Y7kf3>fE>O|8er&2&=dLADFYer+?7M4-oa{^ zH40;z^b6O(*$Urf7$~z%Vjg&el|RV_6+GME4mq>V0L-jv=1QdnW8)aAksnij+6 zC?=sV;+_oNZe;mMkQig1DnB%&nCpGtow?d`(BasjUDWKgn0@{p5>)C z>8(Xfh@0neoN`e)dyTdwc3m)znyNI zn;`31lfh!$=8a^G15HTG9%vm{oUZ(lu5|MkpiW#1P2bseK<@hB3LX=^S``ah70a)3 zTB6(r;2up9oci^U+3@mp>?@HFA@@eD;lW765+~S;9-a^N%7h()?p&b7uT;xF5b!N( z*ls-cJ6)DE$J;ued28qr z?r{9+?~xH0Q7o>CYm|koNH)Si@jG3*e)B`sUuCr2cUF36s_1oy2yYEDl2>1 z=$kZ6+&kDix&_0Ce2{wyK>+{9$)=Xj?7X~MKd3Apcwd)dt@7Q(yTs0uw+4b=9F4%Q zI*K1vYP6ObLuWbat#<1~hRA*z7ocp8H?0_3cn$kFQ1*8oNJPPG5!^#`JK7tjNziUx zBHwvrZ5i1!b^!Ui?ggxyG2c6#w8P-z7weL6@}YTCL`n3OKq~Dn+N)8FxAVzGxxsD| z2^n|yU8%c>iF(Km`eGCgUS3Fd1={};`WAvaOVORxf$Df;vDFe& zLM<{d=~3t&v8Pv;8&$gYQ1LZ%-U-Gcs;Lj}8ltd_^#W%s#-IxgVD2|s<#&f<<-N!Xz?h6Ze95*`m1K2bU1Rp@iNPZdk)NKB@{$(0 zkox7fetX1Bb2>|9$}jj_gmd_#n>jsd(A~dD;ws*njBIo`e(=aDb}gdHv6`~{emni| z8(+@n@5-uxzSyFvaArTGXgNT<@VyGR$&jd8)!xW5ewov?h&52h8GkA%2=vBZFg=-< zk1Y&jzAzx5n$2nOt*CZ477YIhi^ovbC65qYC=XQkGwOtY^*~fQa}%;d%lhDhcYBVp zQR=qn8yWE|2ou`#Fw<1Bx;x-I-T*2=AQoN%JM%JvJUqX4byq&+n+K_E4x}@!kaQe6 zf;6-c-i{Q2?lu(A*-tO{5(S=;ArJf%C^$kC2OduN?vaQWDy!Jj1aif7)TS)X+;Sn4 z25~_dt7fUq!UU9-(HZH>3n>Vc;mdJn*i}=zYZ#(iqrJ>XdGL0P(gyr^Q87kq=`liu zYjt-P8Gh%2Eysw9lL3e$*5y$6ZC!QW;0-mtJy+Bt_dLVf-FzLCdS`mLmTk-9MU9fk z$#55VG&F$ui~g$l^3Jsm6qUtY{iX*lv~wt(6RgZH0HGwGBx4cF-Hv$!;95lS9dHS+ z9c6VoEe^7XyVq&TR%txB%G#E`;;cdQe0lR<%kBSH z6z@Kq%gyMV*gtQEm4cZtoJk;|r;5}yhI=BgC&x_b4;%&g?B!==@rKLolnUs}(F}GB zu^_d;X6u|J4Iq?C;D>^unnxEKv%I*)Y0wAXv^_eb{;dM4_1P-iJUr#&p`ovZ+0u^E zuDJapFt(z2lVXeUu^JEiB~g6ayXj9v7J~nb6C$$MED8OzkJCI%CcsmzpopMVF;(%m+xEw9UYqFbvMAEeq`9N z|EM?uwoF#S;J8KphNnF^2p%d0W6&!0e^1`lr?ywjFWUSu&Bm!}tAL)fb zmAO^I=Kp1#OIN+iMKgLP-dw>s0dYR0?IHfY9-CRhS3l5196gZiEVloQAYmmyo`t5= zeIu!}lN_W14cu2cP!&rSxA)@k>IYrshYX>F&za$~`cQm#ee7Hv6m8P^>%XaA-(bZL zrb8m*559L_6~IMm1110Qu6!*g=#pn=XODqgj{MqnEczL!MZGnm7e%tae|*Or{W?4^ z!?ip|p1E18t@Q|qj1jESf}B9dAl8L8=)?LgeE*(_k^S#jRrAN#N6&j)J#q*jlF z{kQ3f#1Ecy{p#FQS@9}kcI?8{-^QGq!o(54DGI@{mqd6*NkNpjz3gk2c6BXtnH%VH z)tD~tb=_oI(MF*hsoz(o?dMV8Z}Eu(b)554M!KNTbc7rthE1xGroFFH=RWpc z9E#+WD`$Yza~r_B5^J^-M`{RxlElJWwGhZQO1t1+DY%+nsOs`x24H~S0kl+`C2VGd z#ta6&wy*xf2Gp=jauBC1_8q`0YO4#yIUIKr6X;FMiY~0#S=Hz97b*Jd-tyN;Qw8z3 zzWnL>60+qfr@D0}!uJZTnUuMzX&j{UAqY$pJN}(r@l6ULAQ7yjKAmN@XkcQg5!yZh zPwB<^-av#mpmujt3HUth)t)P{;K)g36rtnU*vM=4Qh*ib2>xjjXI4|Yd;hiNFbo`~ zt6b}S!U!{z*XYe&G)I%uo1GB(f+S6+&G4p`V+)0KHBJARd;wxO*0|zK*CmUDsanVb zU-G^jND9eQUrMix#{Ho;3e@Hd~4k1hn#+VJhf_>xFx`JRCb7fP84^sgM$K?%S~D(Pxi6JnZsKA zP}hn7wH8^G(vE4iNe<7Qvk&d+o#Un8ZZ#4StisWv$7(A|qh?25%Ix1mDtQVMYd~8b zywpzqZK=%s17K=Nt{hemRExIwiFfGbL!xev$^_#Mea3|O9(S@2t&j2B2QIF!ZfQ~F z6;r?_z+Ol*D&aW?sfLGdiYcH!PcwBbo*#qe4wvuR8L!>u$)q;e2xc?@()zqUBDN*G zx7n+fk$aq^Ys28gSg+B?He|!j5f@)%1!tpoeA4yS)Pp}L@pVCC-UhRmif zivMRbAMK?ta&I$mD;oe~sn541zA4$9%P077v1t|hCwQQRMAOg^QDRF(&>)aQzF_mS zW@E6FS>2RD(f>#$;rRgZM7-~J|7jw}BJkxNIh-|jfSXO~_n9V}n81LMfP3h(S0hZAnzAVyu7A;C zqQk=-<2X#H?hWi4{=DV}hsftSTBJFdC?&O#@NzH2?FqbLyd*g@y*+O$<#UUPvP(GN$#JPJ_s7?C zHfs|vWFSDcR`|zmZ`TX&W{G2kvVS`d`9sn zA*WnD0wazP_~`oB*w_STY$w$LA)k$}Hs^6`>pZsoAno2kVje^6c#2E+bA2K67@G3fH=B9xNA?Xr6~Uw_^Sx8a z0%~gCjmGKZJGvieG)!D}MNzf<=b|d$IpcpPOruH&+xI5!pEZ(0ccAosv`=yJjcA`D z_4A`1GSttc>nnVBu#1=J?YuD<3|9sKYjBS_gP`(qP;zE$d)8da$JTGRyCSpcU<89G zHM#l0YdUuhB$7$wHRYUd694k9G9LCA;216i2xGzkj=>wbB(m@)n2cb!&_uU51<)y* zb)>V%U;gw81z`!k7UGArh@T33U;{7je5n#S-$=e-O z=4@dZ*(9HQhM8XLprm9tL;^voH{MEGp>AeNcg zmCT|>)(uQ-!=Iwy%|AL~Xsr(8u^jU6Pi8X&( z(poNk?xWqD;z)^6hAqSf1II+iB&c7`KkmC$IVVt2{0Z$4H&nun{wOV?@rB>6%jSqm27(ej*2ddPO$3>o)|5T!o<7WP&EO5K!fVR3Im=Y zu`gV|&Rqr|RB5j%MACMs;dRa3>o)D1St@wn_$_7`|F$A1G4b!qEjpV_5YHf6#UcSs z;cr1DZVi#|(04DLsMYT&h|_)dR@GX~sB87U&Fbko?%_=)Q?$A6g_?H@U(UT80S_$U zaagR-q6;s-%cV%^EtoRQD;lXJ1F!V{+J%bvH*m@+I97P_==V*Z-&mVNyS$g=H%+F2 z;*?v>0Ej7tjSee~o$`%G4c2)tUpWCGrh;u_o9aG|PW#)D*wXU}Uol0Z??c#rr2npD zf2`Hj@bXFr!IsNAS%EQ1IFbxgz47+8oy&3MAcX)+Ge;C2zeBRf2#))NV{23H@!ovf z>zk;&GtL*A_&%Nj9_kfi?+$?~j;PlFIm{caq~EC!Zlx2MQk?1}3|?(u1UjFb8VWXy zqyBw#<-Z8}-j0>^I$`I41E4}XUzhDZ9RjK*$S$R{G2qQ^-|cJ$O5Efb9Lg1fIog*7 zX=2&0#0&*0D6w(M_fgf|L%NN>mu!e1N)N>W1I%jJ8C9!yn4+n9x)I)G$cjz{TYb&a zSY>LsTQGqzhqv{Ppmp`Qp&{#0nb8#t?!V_cBnC?`(Ok1p$Qn@`Lh>(R&u;SVw{amh z5VN+<$+J)@Zl{SguDt2}dYt>n)@=IMFE_{iZBq&0)}Yn^Akj2mPwX-SP!|i;!2x`a z7h>?!_j;&{WczipGI4ulA^S?LSsosUNtaqbCDF(0KmMejZ6uh$NZo-0cCbD=aNqY< zF0>rZllgDYpMVw2<&F;|dGOHk%-#JOUHbkRmsn%!)#smYbQvck>`uFX7@Vl(Y#|9$ zU=sbkAm{`9!691n>+ROzqcpWbFCGv4Oj-YNxvZRiZA69_fYkb|TOQo#7_xX;BgyVb&m?kCQINOHr*Mm9F8^~}MUPfY9{_*2I61zK` z@B)XgTR%I0rRkvp?Ep>FERX@w(ESi0!h%QrDG+wrntU!!ja6ocK-aST8;1X({qf9t za9x6a|TZE9u{zVQMB%O3KGn zT3DEKc)BuZ{(+jRQYd5hRaytIPQu~$kfZ z!GxoV41Js#=WIEFm9OKwd&w68PJV?aLy~Zqc%51dvV2lJGef|Fy)qLTC_?r%#4p)w zdbl9Ya%yt|P@W2{Yx< zpIt(O32?=J;7LTdV-2P+uD_!>GEAe8`4~j_JK|S{ffrQC>m&ASO_|3X$@{_9vmpN}G$<0{cVRj`Se#M`VB7!% z*vUNOQ0d!6cLGqryHZ`t2#zlIOM@VpC7mVHizNc^7Jzo?g~y-2k!)@PnsBK4^{uS9 zPx%(T5_v0ew$x1Pu8Pmxv2qSC1J0$I9NfVG_ z4rQx!9`@@7AU=TRY6&nO$`_FvOjqZ)5bBG-rVk+y!JDOQg)DYRD|-c=C6|Gn5~}== zZu4*aibS`40|v9TLg8T6al#qp-g&n{Zp_gq?BBAmbMSBpJ}H(MkC7Fj8KqCwxo(-` zZlzSCa!V=HR?Pnzi(ely+?n2B0EbWSV0#4N)$7;3PY&dD7YEg#Us_93mzAC+zz+PW zdyY279wX`$-zt!l0X^8`Ksv0Q!ryFjM8y?zCh(YFJ(VW>z4QSOx)S>0A>G!W$8;`b1ocS~YQ4)q#g6*Gw^G{BQJNQ96HfYVF(Czzy9Y&rO*vce zu1&$-jD9wN2o{VzQ@A+TmB#vj;za!OX@02!M7pi%Fo6SS#~OeX%5R$if~|@=9kN~z zz+5OO;4fkU`p*$an^RTv^NZRb@d$OBFC`{B?AtSqIhjA#*cB5rJ68ZB*;Iea6*kv@ z#6{eFndpJ-Q+jzaM+S-R?K>Yln@d$jyTr0X{8~Ptsi8jbY~frbETto?-#8(Ocez|N zEEH^?U^cL?JJld*X}cvr&8CC0%L}BF{*9QAK;h{UPVG=Q%ASYi3$z|)37P@Zv{T42 zC&97Rze4i^|B<~(UK$rC>bE?HG;P>5OtEc6^$PuYURhJ{z7Vs9v?X>^4cFLVl>E)U z^JIs=mjj>`)Ok(x><@DjSTWTcHYR;kbU2G$z%k0s*>yF|gXybGywXfJ#+-p+7Xdd= zD}@*UpPm?iO%lMRhg%_lA|Bdy&25305oy~Stn_fEMRCiSLFpjTD5VF{PudnVD&;#Q zU?e6#a7|1AGiyErxYhy;seA@H{(Hnd$#~ehc@jy_EMd6?}q%sSo;_4*Db58DgAKfRaRn zALf(~Ct$S24h2DlZMtrL4#=uU$tb&3!a5ks7nB{>P76i5e0b3K5?`@Iv?TGV6gBGQ zzQ7Z!LBshd^h<$|T#h0jG?ajwpFa^yY1_U@`{(GQQpI*5G#RWDynQk|&A_rfov1v` z3Di$ZSNJ)Q*mv+`Q%=4rDJ{Km!&v}c#|7}&o|PV(SUjd!ygyt=2)h06kRFyA*>yo! z))>F(4oX+Q%?BcGWC!|WcBs&lFSNa8kWG|NfhQ7z#|%INsz6$4FvUutt_ArEjI)db zk|vrp7x!A8x|*nyE2q*)zoYuTI|({YIi9~+x$S7>yzaIV? zMt?inekit5m)c~Xc2>T;aaV2-;aGQ&D?jSAA!ZYSb^`}NMtQl*pg07$;81+hd)%dT zmh|)rvqm2b?C0oGU|7*<^#*$>u_fi<$ng5wU7Gvvp1cOrv6KPIW)VmmID8JsE82`x zFawDL<#UIqZJ|-MH<;-aNvq}7GlP1ezBBsFizZFx2k{; zwRrfGx-z83<~wkuj$eaWI)8T$)EV-3-vpDB5}pw3CmMaP9XV|IUhDMw0+g4W%4*2j zMW4bRpA1fA@N-#xm!HTb(WvkFX!~+S0Ck=`670-Gz%1V^-5OWur<7qpfnsnIlQn8g z%TI5VFzgK$78WYOV7{=bBG6t!J6T!^ueV06A*6!DoYnYh1UQ-u+JG{c1jh<>^}G)X zL~o;lSJ}ZZMhh)IJC1r<2dtlEvpNUxs6Ava`Gd!TJp>H9<%#`dG3wixHfc@>_i6aw z)Cs)d;_dF{Ma5mtdw_vYdDGWPQDw04VMx ztW+v=c)29;oD}mo$`8}az3UC}WKzOFo|=_a!OVs=knRCM8Q|#>z`S&3gMbC6sIV}6 zyRdrFXK&ISyqR9=IcnB(nwV|-3+iv$T4d)k;vnT>!2w{?Yyu!9(TAXce7Vc z3HVscM_68#x>;Cn;^krKLdxdF?a2{florB>sPl4z&A9uYnGHV#mV@&rX@(Vy8ncq^ z6|iINNmJ&fN6eWtecFqS!@7y}Q}^x>Ghbb>v_#$A5{T_A1}RrfFx8P8S;ZUf2RIa| zn~>PCCK+2Zs4GoV|L%p3TmG&V1gjN|N(Lwds%Wl-4fDr5P3ktYG&-0?4{%%N4*GjI zxcDHpXHVng?Z0&y^x0%h-4TQ2(d*j9r!sdbnY+v4m-Pnt1KS!~tM$!N!Eb z&OrI@PZBU(W-QjONNE=Bk(_4Jnrvf;>p+2nQ$7z08C!X}?-(X%{9ZtTV=HvGbhK9Y z@S+|JDssR6hp$FarD;uQ`&7#UAagw-+D{XtqAv~z`>xm_PeO11`ZFK-QCYYF&6-JF zsL0AHfL(U8kUV(t|Dd^cP|imR$zH+He(wk5=^H71%?P3k@qkHpaN!2O#x65X`cc-= z_GmriGflf|wEQi|{8gM%B1XP<4{s#B`CbCCH?WcPhNj9gLu;$-lR(>$q4)6uHCr1L z9;q18yt-=5_Jv0gVrI(@yld-PYLbQ7aNabdC4!BH4IL+m{)Q%OFdzDIT(O$x0n{>K z&8ekYUEpfa!l%Hqp1*?DZGZG-A;M6q$yn8 zwA~zNT1Q@tvyYcgjo1$d6UU@nd(}m}^&x)N^{K@I?v{qt60zl2`D@2#Ab$hZT$-ed z`rq6G=nKvHz=v|zko|gGmp|SvnwWK|6w}6$70L>xFuHHVPMJij)fc9aIHJXXk`wdzOt#Q}z{{um~oWOWZ~FAMO%B)|BjHz8_K? zTL!|*Z>AHs+4uYn8X!)e$-g0Z$pa2|v^Guhx8eehw&@dMsm{bGWM`>8XE!jxTL3x% z<&fqn@6+4B2IWzp;iu1f+s;8k!kr`C!vDZ9TJ`<}?Uvgk=}jk&Yt6D?x|&sZpb8#Y z>8y=(o@u7nCGCVFq<*%d?bA)nQSLLX7S`kzRW{BJ87r(ethX+o3U20r+}vMU;`gJ= z+i5gG;SpalNWsjVKcE;11n4+H;Q*kLFmp>;QW!#DG}VdQqiDU+=KI~nx>Lb6sH#C~ zt!pBqjBG3>2ZyeqAva0_3A#J<1#$d+3Hkw8S_L9(qu=TGHhEh?-ywi61(+)#p1T`?%^aC)g6||61qC^d-Ji4x>>HYIp zoyAl=77AVD6Sv)$T>S>HM;V30%H0SqS_kWySnWiF@z+iC_hyo7G0 z)Af46Y&f$3S5>(LV8~-tAi%-uE4xt%IPy=#BO7Ouz1n2|E#a3rntLRu9L|Enst|mW zBg8es#0u+;>r2b0qjpV&B_#>q+yz*KMi|v8rAAam+4rQf=>l%;#=qS}O!l^j{0sJI za=^dMbo$A+-*Bb}8xG4Vi*>dnZci5<28^79!p6EMF{c+VJ2U#-1SiM;7CI(U2& zxI>f~5KBmVz50`ln}qw(5Go*%pmdx;^}2j5^IkGor>8T7@$_z zxO}QjTMp9Vr!?he3P?IpBc{6s*nW_IUkX4q)Z;Xw(sOMf@%)1#dkbYb--|Al7X($f z(Hq>CBJrF;B*YM!oTA08&R<6lx??|i#&sS+G2*e|Wgc5-fXmOKW|C4b15i zgU7er4-u7CH2@wSq=0Q}JV`fB$xMEUfq6gnQXU=ggj>*eWCf%?c{2`Uxx!?ns@4BK z7T6mgQ)5&@`ZHm9afM>zugxopWdf`cC#`okw**U)Grbv(JM9Tr1HA6EBaSctUZ`fe^ygq?`X_Nrkq2_7)O}x+tsv!$hc~ zIVt8vAoo$sAqu5D1aYo-dlmz@bs+)uOAChs@O|5TCQg$}uRERw5#ncm!2#>Wnr*0;vS;UHIJhW-zG8hdKN6#VXftDC!6Xyz_}-XV z;)5-KgVk(D({7eHaV;pgSAW*Gx;TwszhHw&`@0xDC(=N*e|8koU zIh?EuE*PG>P98O19TRm|O}HP$*xSf_r(uZ%iqMCFIgXIMSAU8M!_I> zjn@EM2VE(be$)WWb2rrKA<;@;GCug0ImPWaPu8?&7RbURDxK}F7%vGNu&1nYiYPb6 zXmg5oOVcffJ0>9D<22MSv@MffT_G@^1&pL!ooY};W%MG>B(KX2GiU}))ADVwf)*k_ zdhO{)mBu_gJeoJ>7WL@C7W|*D!v&GQ(7Ch*c3=yz!8L2b#fh~~lFaOxe~^Etj8d$h za?C_cCS-wv^JMQ^3^#^r6Deu?<6vgyS z421VK!@}&Iz5P2@SdZPP5!FX44k)bTqC5axJe;`uabbSJTrB zk8`EFBTs@kzQz4cc{wN>&of_FJn;j`|C_Av;(b{%pDng^w!^>ec!M;)=kKzf_^H)N zy6v*7i5ba}U@{3!oA(ZW=@xywZ}Sys1xSSb|L03I+r@|Piv49y%o}Ms4gMr7cbK=2 z)5-97014K-N0I9t{SwJ3YG;8!_Ny7JUGE?Ao0R#q?L5?2mc~1Ft3RRRjH}V<%ATG! z3r>G4+6zYzl;poXnf3Prl-?_x_Jv#xwd((U->MM4sp`SOm(vH<6G#`w<6XVlPPx_fHg3Wr*IE9Z0W0$~3PP1FJz#=onggY50;UT-*^4tQJVbO!Y$(PIgqVfP!` z{G>p~9%6p6L3{luM3fAKumA| zY&0FtAwE~0g|acWGU1a!mO>+nbv$}a&i4EMf_|45K)~G)1s9|0XRp)59sJ$dhsoOH z$skyxjuEYrTzXIMH+t7=X3E0AUver+cWi(S#kiCT{Sp<RwF~$07 zNCJP&zf?)*d#t+X+?H;jm~u=njoufTwDByD76AT^dy9B7wU69!W)28+F(pK%(TV(eW}GhoOPm#+bXACNvbS17JQXF@Y>{|7zD z$`UFEhfm@CgO3d((Kihj+;uLvXNOQ)0us{^hL1juF3oMOPT$h6;P~6_=Q-6I@vbQ& zb+L-0WKUF0u&zk`f}&J%U4L1sltlTw@bqdepk6@9_iQJS`atBB`i-QmCQqZH{L3}llVa7nK5b#5kq8p0*WKeL7#|Qi8jZo zwB_q`QA!g!Wike}me7mYXu_Y{r(B&{_ACcCdHNGrjb}#6j2rt}h=DE)E9KvDwZf;M zTOqJ{0kn~7dD@x$P<lRg3c6 z_z%~OGPXusSLKd<(KTh+VGh2mE8W`== zH3&1{`?#zGg|9G`0qD>!kgm?_^yAy8^uS#5@?BG%Pae~YzVtZ$>xampgs2P9J;vS2 z8;K`(9}Z^{|Gyst=&-=>n`@veC>}`CQ*ZH#ZarTee0lnv|MdIuTl(FM;x!(rl~v$B z1k!}uG*-QFpg0-0dHG@Lu(ro7b%Rxp9yCb9ye5qS$p|Y=MGam zu28fa#AKIcC6tg_OO+o5=XnNPii9vz%&#_1K#02D~H!Igow^NI;RNQY4KNhFsZmCuFz+>q zQ|p+3Cg~S1y0nk?zOU_K`bc$*=Wubs7jUzMKB$Ua*cj{j5y{-S`1_Wgm*2{8%(1tZ z_y(>E2Pn+6_{-rWPFd!ZhsV0{VHq{V)J{{=Q(=;P>xz@F#ZT)uv?S6!;oPzCk^tp7Q>8m<0%F7iav%z>q784l(LY-r^g1 z8p!^LO7H)>DJ}ZZUg7ssO6%H=eY}m*Pi~v!_RiT&)jF@Vc-bU*t(ET!eJ7uC(JzfF z-AsJiyki{Wf<~vDiP%;>0{>g5Ac0QVibBJc||i$DaQJMw$F+CsXYI8UrOR=HdS)5F2N!hlyY$;CsEJ+(R2C;)nT7iHFHo}EV45DJjG-^NQQ8rFxwHa

  • XzJcM1DkO^umuVcZ-drTqbkVVur_vup+A%Fq9Z)Xce9IuQ0!&*HdIjk^+#WDT_ z;Pk$%185Vnz$*$gSV~Q-$Dv_;ia$c|Dm@;k?!F9l&JC}&o5B76gSxQQFG#90iYiqY z*#U}xpI09gxG{79xue6>56dZJv~#;+{jd=yOr|;m*8kG$3(mLYi}S4uzD#gIkGu;y z@c{ha;z5Sj;pm+;t^7(bmaxZm7?;^IT1!7Bi70ytOyaPeEMvqh@CMB6p0NA=lNy(` zd;K0kOP2Ng(RGz6+rKPzqg0n3fT)yNerL&T^0~Juv$?FA0Z0*cR{a5!)FL+0kEfOL zPdQcd5*>6->~DltT3+2jVW|E+*GS5?!H~$p>8El^P5n2C%`gc~aks{6%stDSzdQ># z{_vBpttSwgS{%fl|5nWa`3-ILyuYAlaB22dV=s1PpAfW)e?qR11< zJo{H%2S%ZnGq`+?)5@G@tNL{?+&UIVDfiYcx_1M3b?BUNTlb&APU z^ck4k1lgSdrYIJmjb*o$SgsRjy)623)ONwJsjki#HcK$g8ha0&R9&q2(W=yZcpouM zuqv`LJHBy1s2kx$o}v3S4a-^k3e;Xu2cw1^eF92*a0CJ+NeN2L3%WaEKs#@cY?;rm zosRJ;*U>8eTNB8>O^IPO0{~p4vQP^+%K(?G$ebc@h?Ktrn}&Y!r}xhiBfZO14a8-^ zRFHP2qyZREaRx03SWmDN1D*Zsr19&Xt-7gSZPJT4;Qc(CnH|CgEJ%=jc`j+1@SnCz zD2^jJCS1*6c`EkW=i9TCM$MU0&%d%8Hw$KW8g%c0QYm7_O4+TUzaDOyZ5UHl&DGPZ zWF>pVOEumkg(nLDwUVakOl~vMvOQCE!{-}6^ii^4z?_M;WW6a{Ug_=%ymS8|$TVK< z;Hsyo+VxkpO3P-^`lOrKnXSRZQt*%*UX_aNUWOR?`|X+n2R~V0z}W1;N83@e=yuX> zG4=Fp0F+(6lCO^$UmYDbx)^_rD>b|Ry@fDSE8|tWO zQQQfRknc-B(49t*M)+b}m;!WKr7D}!?Ty<13#UJ6wr?VT09Hf|*`hoI{422AX#>|W z(APlQ5+D;y#D{KP@%LO%u4_q+94v6s$mef6_s8WhVNM;@p<^|CZf%xeKRF@{>!6AKiI6{wQGp&vLezME2^g?X2k}wf zV*-adx-LZ$!`mbdxi>E+o{q}Ug63oK14+9lWDArpzhO(6V^?w2(Q8OU9 z{*Z1^F`?;T=MP6w1crCsH?hMvrW%bwcT_|#m{I~!3}ABI?oQxs9}9vOHmw1<2G963 z5PvHL(dBd${dyRk*Wcl^@eB0U@Zf2G1J`|Qi7X_FB%!qVyM|kA>H4F9mb0f~-M4nG zdV=AFby7g^ENxhs_fs9prV0u` z^r8VZUI3hq$S>(q!%(f(hZ;IMF?>eVcYv)nAD#;w^p=CpKJxXQSxo=MLj58OV7ut= zfvLvCk_l$qL+D37G&!IMHY50%sM|246?eaW#M`2v_u*458MU)>kSC23phb?T&78 z!4@bRR&B%8Zqc*k4Wfn@peZ43hqgCGt9o^$B0HQ>>iaHp-DBk#Kx6R_37+P{H4EfJ@W-yw!Wm$0yjc(XG(4 z%Bu3%(8apojxw=jj>XQ#s{Q=hr|j_FN;V}oj%>kaFEvs?!UIKyCTaSAG*B}yu?tT!6>fkYmPuPew>S@8mW;E4G zeLK<;8hD=_&$PWNlrcMeSZs!SWuF7k)+fRiL_0~o)RlMdy8-_KRgE}K?L^R&@F?iX zLJ(h%G_!iXR(cN5xA>Dpf#N?7bZFAQE_Xj%w?eS3P1IKT-kk&;$LLn2J{RZ8nWz@g zU0{&Z3FcXHgBX!`0W2ehotN*>-mpy$rx8*FgCwm2pE8O(3RloP1{{_pAYd_e@fK@9 zhcTV$J3rDus~1Ni(~p6l1-?bK+ukB=pm;H^Lde7SPhu28Flp^T#*Ra3_bHU}D(#h9 zIMFj8lYUGxd&6%03CS#QZwY@kfoD??Hv>AuOrvK&&%slYSkG7vd+Uo%J;SYjeet>Lc&U=6ILVj_ zx))nP)bMCfou4*96$e18s#?SGB7L(+z!G}fZG8ite#(%v5ni2BjzaV%{tQv+Ry=gP zSenv3@W5@o=xd+idZ7dNKR9nE@**zE8_h0C9&3aua(Ep$H&rnk7=$L z#UvAcrxyX=0aATfdN)4I$bNh3ly&yQ`kjcLrz8gDRs}6Yt8Khz8=G%$e3&aut@PS?nCT0_YbAle z6vBO=R;Er2?E*ANlX*H>Peik)2^NI4%K39s7Tz3V_+!Cj)8gjmw(aJ8W(b8^{MY5) z`8M%8yMJy;zzxyWRDMUO{SJWgul)SzCGpkfYm`{E4Ft@NpPfF8nUUXo&enRcGSC4X z@XbM290wiq5JiU0XP~7^xa%3Tjm1bJ652U~sT+~>Sn`V7tl%4ey#021`(%%2KnN=) z-SPVA$;HUcKm(~V{EXE}wwVCM4p_1MDf~D zb!Y6Id1kq`@JsYBgumHBgUk9;vh`ZTY)Z`2BZ1lnR9MyuSQtd7O+Rpt zHIW3ExWGc5VsqMyfGK^vJSlWNCn@!#cdYcuNOt7Ud&5prkudT84@A|`d5jj z5M7QN?oq?!W$<|%&GpCJVXj~KG85n5KB?cUJs8cE7Um>w%qa@}Y>5w&urhUZ^>DGM z9pzkB-T z_=Yy;tG@jmT>wWgy19BG-o)46kI3|$N#N6|hI(Ms_I3LXM(_1)>u8>p48mzY-Q2e_ zJa50?VhArsr4d258w znu6;f4I6gii5eh1A(u8^Igs@70p|A$6VIna_bx4OhgRGP_G6AFJ$k=v-Q_&LDU0y( zn0?8qyBL&_4Sc1jhs#odL4hRPZ6P6kSaGa;l87y6!hU7-=K$CLc!w`kDf9%P7Btwa6Ia@&vi$?y3|Ey@{Ucxw70hN9)rPs^R&OIFl5wJ_n(`)82SB?` z;1yI84r#m6Fk5;<>t{5)AUid&O3VIi|4AkUULZ?YK_K#53CFH)Q9J1)34kfDqtR{q1Nx!)0VOPsKWW8VDB({a|mO^L7h`8O|Xtq zVj}aLx$}`$`~7^$W8+1RXrlfhP&X093NJqK41TT}SibjWPf&X%#(!k#+aa(GSc$6L zhUE2nAjNpUB%DY@C&#Ybc|vr^m*$)dlwV_gOY$a;Y&Ua(UDnfPQTfsq=?gF>(C|mW zQtJ^85gm>Bbv%^ikVYq=ZjH>!i4F)wXYNo3-m(y_N;C=S2@2Q7-0Ugyjga%91+@ND znbCt|Aa(Od48QoA!xZ!N!_^yh>;qF)tnVINj{_nN3%66(%8zrnl{GexM-LptB>HOe z88*irYVB|FK9=3GU#DhW=YVJF3Op+OARDzaDrRn6EXds`4w8B=OInx3Fi>saDDiP$%^n{ zSI--zQMp=!nS5WSGvune*S6{&aqrFA2oOj=miUlrly?d+3axDxy zql}7*I;D-5hI{9bwUcDq9ebAYhKwF**|Z&VDvz}}HoeK9Ir+i;Zg#p9Xxem|gp)2E zlWAHSa0{V5Y3|*$W;uY}1>RgCAk6bYZN<8cS%&UHg_b@E;$Ov}+9O z?M!GPTZ~qQTG6^2fdn3>Vzl7MneE|K8J?_b2qse2F7HOW#*4ko3wFeoda&~ln3=7e z!QGbu@*~NYy}ty)J#-R+ZX;1m0n%st0~rh9(aC~FLZ7rTR%isz67@4mDOLz3`+ygN z2|~|r1_3(im4Cj|jI$L~_J@W(vjwiSFG(j?=c43psJ_LU!0{K|=bvv)PFy+MasL(@ zNdRjAE_q`70VAbx{?qB*TG#FRb6#3Dw4JaUDsrj9?A^eO?Qj{QdWf`ZKxfWU>+`Z4 z-oU)U`qwQ|yG?$W@!~V;l%IsUzs1<>$r!0tm*SH zHi-7uud(yQIxd6vy(xToRq0c6GT*6mFR)l(*Nk@MV)b_lIokIg1u_gu!xIt{y8#u0 z^*Zqiv#f0(y)!EuDC%ibABbZL&>tbRWggJ>JL zN+{ll9z!)Kw$TON^A{63^ZtgbxfADWJbPF2I1VtCi4?D8UYE6MKARi^yEICAD>cUwWo_jb9c)I={a%;2)iNKNA?e z(b-T2QnnJZMPs*A`cs5`fYkjo`_l9SXV*VT58)W!k?K8!OwmeC4J+#bAhCjV6D(=-Z*{UEWshjc6_T5h z(PH@&t=O>mq(%J4(OdA_NumiUXs^t%wCe=sV6j{=bp!>916H7T?2JG80Pr`mH)?;E z)9cbyD>GyGZ4a%?ZUyjkiO1v_oI;7>9MQE|nJX?HEHH(r_vEd2UPt7;&zMhBAN^5 z{tRRX023r9nNXE&%?jJEOpnAic~cX#I84;PV8iPcPvwEom1v^IT65~zH{@H}Z?9AK z&sNDlV=NB{)idC8*0}ese3W8VGccWnGWJuR;7pbb&Qho#rYnz9s4E9RK;}CLKQ~1v z${e~6*#`~{jtM^LmhY&9|9HJ>HO6^wZVJ&pgAalcgqnzB3A(@KG6fGoJsQ0Kv#6RsRTC8f|%BzhM@oy6)%o@kdD$ z*pd(zfbXeU8)2DFqrCDyB+S{_hRP8Zlnrpe3by@5mOJiG>$`%Gg=XrP`H|jo+OSJw z_vC6FJ_ z;5m_AX>$^yFE7qcvFF}0$r8IMK+8Ev81_#DGsHRaPlHJ5cl*iPk3qrta21QRnLEf! z0G!$^l@&*Hg_bU8m?v=FyT0cihz$|>q=B&#{q|SlfjSc77_XOD7D_ZD@iO)dE9C&zS zdYqksa*D<&cHj)<0Gp8H7VS1$R#YA}qM{(=TLbD^0KERjp|ATd`IC+pL>OzGjvL)K z{U+!=YM}dT58w+KUNf|#9s)vbsc>e+cO~KA4HRJrPLvD;xX)`9Ayr(AhJhUR9~} z9ud)a(R*GtQ-Wu@{Hn@F1xd8Zdh@Y2cWPOQQ%P@xv#9-&Cb~k&8Ry3Q1o1@xA{8q#}F&#AhmJ9*_QmW{}Y#Fh8uB9-i!fWbx^vP`Zq| z@Pq73wz?C)oUO!5EXH_=b&~pk&#Z|aM^FwpIV)DPgw*+QWiEiGuZSslM7KjXE~{Rv z7+jdzG-}94({-Fr1TueeasTd@$r~~SGvk}wR%TtpI1bClNtt46RRUUc4ac#3a<6D4 zzw)yo_}oi&ib@aOx*Pi}i8>Hs<`37b?9y2sdJp6r$Q-trW9&wekABiZKJc&$svByd zi-UhqW0a#G<}CPi_f-B)(Z&4Ds;`B0lCrzYZ2c}KoNa@;2>$ebe6cQ_AE z@=lXrsT0RvGkGV)DfC*Ia9{vT`@-qm`P64cHOOl;dWETl=vBS;er92NpuIq zuO$2WA0gTu3Ci9McWtHl%!Z(gVdz(WL?(=09JzACYuni2GaIHfY5L1#b?J~<%1L

    cCfb@lCV$gx^rlta)R)uVG!;By9JXR`McIj^Kuq zsRpl4SJ_JD@Sf%3A*e7mN$!2U?X$Zl*CLhsYS>NI=qzbBJW3kSo3dBU8Tt?KidRTn zdshOS+~EjH7j;RNac+xDJ=(yzyDs^xu;w`Z%E0Y`LVnIIefLqI)`hoVv*x_DP95-C zeX4ON6YAZ1;JG&uA^RcFx1?M2>iB*|kXJZ0pv~Z)yf*H@uam@iiB;T^R&lXWG=tZ?4K# z7v;M@BO1q)4jWydoPVAy-gE$>)u_zfm5i*xGdC6~zoxt++!G?E;=7Ed4E66@y&0qT zf$*a`_o;oAwFL>u#tiPo=>+xbkHeEx=ZoW%lsMyfce_fHiobl+umn>XfFH5{vnxfWaKsd9Q34H(+RWfzdgLI1?#T*uu)+`nsIG;%&L^A2Ei| z_q3_2jTQ$BV>Nekin36#3Dg9au}L9Z80j6l)ioJd1;zugyJ|pP3z5u#7|gLBE|L`V zDHIm}p(Z9-z@^7TN#dIeyO)IPaNQkUC+Qy`sH2Uuo!T>gWj%L(S@I@-2ZTQPqKHZj32)2rO~fn zzC#P=cJbvX(O00xOtm0k+Sg8@cIg%T?2~MnRD;t2q_v)7fylvdo+p&v8an9Ie zPYWqD>_E$Hx9nHru0{z6V~FrZTC<+hLet1L~CFpwlO#6*b@TjyyglJU-uZR)j&t zCAx@N@DGsN^*W!$ayz1ncfY4zP~>1-l{N{6Yt_2^@0DGxy$tPLsidttyO9}M(=&Cg=6NW*lKeq ztzUP^MnQKm*TYBPQfu%%!UG+sG1%?jfl8UKjW>`_!son>1JzOKAjEv&j*a6bW^R%x z>jlxaE%=dj!{m%XZzx53X+qTiw25987$#(&&VVipHzp`)*eT# z^C<`8JCOs526$zV-X@Y=Mao3ApEI1o9R<+V2QrQSyOi*Tf}11f=(cXwN)U7`+|PM8BOYr^frU^u44q})Y#5>`Kp&ha(+8!PIWjifg` zH^*!GvsH)mKKSz?6}34b9ZY}ie;QV8hfu2+_6gaOW!N{t0C#tu&Jl{_fWd(DVyA5b6m`k9WZ z8)i(=$O-<#XsT!%T=%V|jhQ=<>%4~KBgy0e!-&c0op@*KgphY3 zci8wkb?p7Vk6T0huQvtqz3jR$%k(_XM_xq}TL>Zu`l;JLM^bRizw?Myr0*^*{V*Q@ zkzeg!vj)tV1`}t$e%O^Bi-Q%*L{E^$sm~ovejFSENe@4=*dz7dzMu7DD5L@MZ?3@ zq0iq0@wPO;lz_wcF9<3FQtif^3hK0w1R|a$JhY$>3@ar_uiY*l{}ff>Cw!U4jM7A; z+*(1RYzddN(C{@AJb=CCwFT!nc%C%Rd%KArwo%qeQ*523?gWZR|KZ@f+>{_vc;Lr5 z2b~5~iHOZ6#7fnrffD;`Uu%|z&?h~N%WxGH>zuo|kSb=cuA~%%GTG7TaAbuOG0Tp} zea$UxW*f#n4g?gae?(8FkKKgFJ*Uh}wvZiSCTT%>wB9jyw_3Q_qxW9uqlO8e^?_%- zFpZrAq+g%nEDzaQI>kBAtWVnrj0S;hDE~P4NYN`4%xOgbLjPZSqBq~hx_lXyF66Ux z2_+ch7zcR$o|$pVeE3!FrpV+HrIk~}k16ZiWBhB+A4}jKNWI_9Z|#q^iUpJ2AYs4i zB6I36`E~2s5hiOy25v@#^UXH%17;_p-rzev?s$qu^z-ccnE*Tl@LqSR)WV`$0&&Ly zwL0I1dp?X@qruC^K1nRK(X?xBk%;i1M7-gChUW>sh+JI=HVa|FjTaG^gEl~Sv~$-h zrD5k`2XUum>*EUp`L_dVz67ZOd64&=8Xv}ZNAb^a`qK&pMk|`%X{(KGmRPopvt+cA zxIYcHuJR6C5;YnqxNS@s@vP3iUb@j~@}&rlF6rB}ADg+FU5N`oKLj+Ky7%rqqA|al z+zf;p^pm9|RJfC+-e(5}=q!1Kh3bH{y#{bJTC5a*!kZU_H@m;^pk{g1&|%I*^c2G?OXkYEA%JTOry|-15zhZUhGmR{gB`jb0>^%nv8d5_YV3K3~ccV3`4*DBNQ zjp}ndk%%7}i@4)+;+DU4)-G!lm~AT@o2ch@)>!V~i`)Ro;`uL6+}B?szAOf>zc2YX z2&HWCh|gSW_4qN)IK{a^EVt3OP}<;gqf}clRsN?&uxkG5V$7cI4Wa2Ec}x4uB;i!; zryxB9)Qd(W$ec+qf9KzR zp9k?eoCBDW7w!4rRp9Z&AS~-$n=nWm58-v&{3pEQJh7Ol97BhPuub zZu*=e<%nXY@R^17E!-Si8Otzd%1`w`JJgK%YO>KThnIsFK?Oz<2PibT1KZ+jmIve35}u8Y{Qs90F2rRU0_B{ z`4<k>@ z^IH(C4zxuKR9u<;t-JbG{Zx#p)L4i|Tw89;TJ#*@IqS=1s(dhLs2TJnyzU zY7()nqga@8`LerLJ9Y~X{vTm)9TsKRwvQ45ij)E(AZ5^?QcB4nCM^OZF@$t0%+M{N zs2Fq)NXO7Mq;xa1G)Q-Y^j`SzTEfJy4c~ubRs~E4b(B_>@erN*KYw7YKkiP*D8k!cpdTh?QI}*VE;oO6o?QVxOk8(;s%YPE}x&AI^NnHT$)~+zxkEoLyOIz2}l%Dfx>`7l!(USjfIvM@(pn>b{YBg2I7`u^+vU3kL8!2!BME+aJnY- zlMkO6o45bYLnfx8cr?Z;V=`C%cv-FvY1q4sC{S##a`_+bhEg9NwnTIuG9G$?2JPfI zm#RadXlgqzOFJIH4z}&KvX(uPQulox>%;bO;g=1!4`m;m7FDsj>gVr8wNR5Y`^hWs z03n6j6(--36oVOf^sGTO_V>2^y7(NrM8Eo?SId}jwo#~!w)uC=y5Eg*#Lpk~ z0AilgC-yj4;DDx%FBz|cHi883CwB;ySl5w6d1N&gU+4QmGquyMh>v(4iar^Di@LWu za{O?&^6}xFlyQqYRUYL*$Q>%1qfkyS;7ow60JTEp;zv!4j-s+5#%%*g`s>5fBu6 zGbAG@wbdpVi{$8g=9rx4e+h7s6pHb&zF9aRD}SXgRl_w|+EXj^J%VGd>9BO3Gv&A&JfEzKW!+|CLgn(daLcRcIZ;( zi3A35Y5>HF4A0D+Y?m`R7b^`CA67i}CDd+I#CjJO#1!A-IQ~PP zLKnPj820YOWhy)4oR0_x!Ig*9V!mQzLxFcien*eRExP3IDi9qf7JnNT9hfF1F29_* zeYl~Syge@l5Fph%Fj62$=GQXZ2YR>8jbT7 zR?2!G*gQrX(Ac43Px)R@cCYS| zT}o`8G?rjP&L)WNi!5l`D-QNjM5h4HkWdl^G$xzN;g0t*>^8`}PcOMEge8F@r&CO| zuPC3|Vt3L<4VD%GC?^h8C_ghU$hHq|>r5&&zObFTD>WUul$ZfnA_T=!0fWO}bk7|) z1*1sY-pTt3Ta&kz{(AU`EgP&CB_TiQ`i)K=MO{VGNu) zI`)^$zkJ2^;7MlKnp@SuO|SE>fzdLC(2150E|cd=+GoF5-|i_;FxV>B8m3yD07<&y z>7M8C-8;@JE`?jYBE`F4{Y9f%DVaP#^JJ2EbhUNqbj7*gIdsgA{vNaW_NrFpu!&9f zv4e76Pz7_!IIGytsc^)}`6bg%mPDourQ1P`v5svjryMsPghtHt@SaW{oVPNoV)wAC zY$hUeyrsOb_q$NOenub&SqP-tGz!f_q_H(oiGale=zmNx=?6iiOO}>8rM8cwACh@1 zOWVM;mk$x0p38I7Gu$ybBGMx8pl#b@@csCUz%h+hc-iNc>Q;d}vYY2_1#N5`#oR{qIc zKFp|jNSqsl@oQ(W6kTHvGSPKxufA*(k8}KTh*v)1R0lraY_lVTNc?prKxW` zE$Y3HLt*LgW{!eJOyTXZTsTbGfK#(Xbj$s7J8UeN4{GPwEIfC!pAV+Sl9vM(_InyFHGXb&SkcwcO94d`D#m^ zb<@)AyAN=mSNK5X4T3g^et6xv*~(6I@YH>{Ak~-xpfk58k0Rh^YPtx{_KxYC88U-w9CCnjZC& zH@-ctoHCAJm5+K;jcZ+$m`kDzh&-Z1UJV$it(z#o59jx>(yq_sw zxJ-+D_Wh=8$BT8@4m#_J%TH;_4~y-|Q?gbu>BI3@Z2Hndp*boQxtLX~xbkvt!L49Q zL!27o1OhXoFnEELt@-dgRj6WhU-(@E+o4#q+MALwo$DVp!uDz;X$Y__xKDQxnYb8% z!G~~C_v9K8$U^{0F%`g8tA|=I2N?EwZgN8mFgcXOyIsX1tN`7g!O|3%Do~#J-jWu< z7Aa!IEjFSD6c@92FYShFNLB6hYX#m3VUDhOzB{wjmtk_e3+7mE21-Uv^@Rv{VU8Yz z0?iK&CV$anpc2)<5YXi6;H>0}5n@gUGNj$RV01~Nl>_QD0Ob7|gE>MML)~1IBzJ;S z_nds?yjaDhyfCD64pcqyRv6~@Vo21~$22ds^dal~@d0ELR$Di6;a4S}6-ROmcG7Ke}`xJq8n@>C}c=0+D8xW7zoIN44F7jkeMUvvw=L-85CY{Wa01%RTr(a$m)$+!h1 zFzkFV$Y%kr9WMNo)?0%tOUAP=&>7m%^l_*fVno`e^KDg5cHvm$;R7JATrl~kjlPZe z;U+b<``Dr(`3m3nQ}l0pvV$`F^Ey~$GmYd5LZ2^XXGbH@$;hr8tH_QE;h)>fO*HPb z72`j)0u-FxfM%!eZOTKqrpV{Gk$#6mgk%GcE9rEPSHuby29Kzxs?(gONhC?2K$uA) zv&)fQI=(rmlAj>Vdc|69{Hgaa&Fy~8`ltryi)%EvCH~?2=_ z^d23W2c1zacA3CMsgxivfztHl)kFN1%M8C-4NgZ)w7(?gEhszh?xWoJ6Dg77`kok}ml_pMmu@X2 z$|(0IeRgx7Qg0q;g)gNfX2o$K3wLJ4oKD71pK*Vo;47W#Xeh8Ur>A*e;9$#{S-3u- z-DtF8G%L85kmJb8=-Co18ii^D)NLKK;0{t~<-YcvfAjocP1zSiaqT{<@q1||^=IM| zt*x!VritwySowqeiq>hHLOZe#P*IjoR%kQN&)V^3MhzC?Awxb2PUS1cpI$fxeGNBX z*I1qyWv_P)+i(K)&1iWbmS^wGFfmal3b z(|8y3yLELKXAgiDo$ms7!*^D2Y9fsphb=VNZlhy~E2Coxk3VWpiZLzD0}AvoFlZnL zIJBsT$Qgv1UR-Cw%=yxN_pi8j@M~PF(A)@hRovQ#nnttTzC#^YSm^M)Rt8MUW_{>m1k zS!&&`xAmR~-aB5IoF>^w!>O68XVK|6>5K?G;u90P$91F2qFqPfTFT-K2eR;FQPyeu z!&Bl4q zJCmN(<QsE^%)Q#;gqlgkyRblJ74ohxuf0Y=(WN8#=EV3pj#};L7XehzwA%O`U!Y+ z>LW6#iHQK$q!Prlm#I^s$*vg93DuH79#>6t#4Ic`2dN{y&E+-OAd+@5&(W zhl0V#0AnU?pn!?Gt%Mj`aj~+>n$NXRm;X$QEMf7D_jcV!@oE}N#sYKJn0v*>>&Bn7 z@nwuL&+QnH)+W$1f;LjQh2i3CA`8?~zk(38Kf$W==@hLs(dJ5Bo3R@@qo$Fi&e1eF ziv-w2I=!&=DqP*CEH4Sksr-<1ekXYVA#xX%8%BiveG*rj4J(YZM~NDAs}tKHnP_QLwDNfK}0Zv}e$H zIcd0ivPUJKYUdzP>$Mj46NRuTi6TRxCFfEL{3hQIJz4X9j{FYX!h+1tB;(oxhpDjc zB2k+G`Lxs5$BAx~QFrf~RRN#joO#29gVJDcdpz!Piav4G*g=TI)|Zr^mVSrV*@<}( zb~C#=KzY`^mH?4eaU{b~lBhhqt|9a2IQ>lsg2Q*4!ql&Ch)=BSYFNGxwt} zAa`c4&1S;uiU9v_EB&d63} zG;T^JYsZexmMQgT5AVU?rT2qS%6LTJ*IS*nNl_V{ft@0G%_~GYM0g7y@%bA2J(EtA z^!xMN!N>8iBbwB#LQFC16p7R>xUk2<^M^c#wRXPio?}Spx^Aw@bNYzZ<|v?f#PZ)Q zw{_U-iuGz9d$7NIq^PWf7*=C#u4XyF3_6pYpCJah6J6^g8?%$2QJW)Gv2sIx9Uc{r z-zDMG731%eL9zzcQn0cHr#9A znATO5IXR+Zc3?Xs5>;RxtS(X&&(pr1WR%0=PrOoQQPobc^W?LO#H(IK@Cs4;JcOvybJ0BQLiotXwW7oJbwFaf8=V1&!f|0DD*Q@L`Fv8XCTmX6 z8{RC&=}7>7rFkZvJ~d7r#>2Keh-D>~$$#jARz>q<`Ja>-+2PbU4`sZ5aiIq^lJ}19 z@R5L1DZuT$Ec(*u{XeJTx}bc%CP#XW5a6WMJ4oBI5NS4Y@$HQ-VX<$y0cuzs&u{T|lLZb!S2Z;RmwBnfU-tk3mjEet#)tsfW+AZzkFf2=j$a1Zcxkg`@ z7ACvhkuhy}T6`~qRf6g!QW!K#cEyi^{y|G}K~RDev-pJusJR(PyUqN{HRxAdl{%w?!5(+6p4jrvn?ts53GfO}Q_0WH{|k#UQUEMUTZ!cKY5wIN0%6f*RO z*#1mO?@7K1pZG?Mnx8MpTr%GhGBeWcng5f5u=*1XL00m}A>E=;&9(DIzw#})RHsz! z4{a2*?kO%WeMn((Kp1Bk* zwQsLFWPk7}+r_a4Th!{hs^z;4`-fn6iu3Mi?d5d1xlWLUNTQMMama?l_Kt6RE z6oJ%}A(Gb@p0gmUN_P9%A^X(s=Yu9Q)1P3p*RY&CyXYyul4C_tql5=DCq824w|u`e zJ`D_?=Da4pl(^8+&=p*W)Lr6S2C;($FO_!~^Cw^M%1IBN)SSGI_axH(%#4q$&%dJ^ zYq0OcVxU#`_$@#WZ#*%n@6+BRAoMVYViV8)kz>UH%&uaqD#~y(;KLR9-nTfCZWC?F zh>~gQuJ{uHNenq{My3^KPPhKVVM6r>Z%!#4&DbJx#BM7p$mnQ~1p<8UYecLM2b~RK z=D&WpQF+qjtTONq;;G3!>{5JT zn8(+MsH&ViT`pLv%th{!f&Cv80@EMyHwW|H<}F6M81o2WvJgd*NO^ zD=VBDH}EUnFVRK)4A`DdNA%U<>pir4-4Dm)21QOtTL_(l@UaO7HDMIKXnDqIcTdc` zVW7C_1Tmr>K^GW@#iyL`y6?>TY#vg-&C~lyu)kWiHto;iPSq}!i0cD6K_d{wZh|pX zISq{n4rCH#DsAR@0eeH;E{i6M^0nXvS?LE-9VYxrsN-L^hu3$nlFgKm&e;P7-&oZq=KsYGgKHCN zlQ`m0PT)^@vBpCj5-nhS%cv{iah~O%z(wp(fmzV)tT9P_msF zqkOD{C^>eq>xaM1$`9?>tXbP@nL#n9wZw?N6QC5QuK&+YUsi{$k^3%B$d8KP)a-N5 z<#e%g5;%88g5$ygotFKZbigy%^W_WtIJX>a_WEroDmg#&2Gezg)-5Fl^53s=3Ax#`7ro2Vk{O& zCvhUdh)c*e&Ucc?jBKEfnEsx%HUl7;^Y9oSdWw4R4D||)5fGH}(Df(Bc9SfW{{=tF z)fmq)M(^ckMA#OoO4cVbGF6gP9P`{w2Z3(n7q?Hm$L0}XBsAEAgpDPEtR8V;g+Yg!V66n22jg}BPqabZoFl6s1b5m&pa3|4H~^s)4I#n`(hRT}=LZ&M zd~9h!#_~V{?C|)}kblLNP*11(O4Le;E|2~@3^Mh+&R=N5sRsw?z1?u z-lH_7zA{e=ep>1YVQ}OF7t{kmN{mn2~uB#68o|xYY zOLrc^i$i&dg(0U2{mS82aGV=mxPYYX3~yTd6}hs%bOX(U7mi*dTBhuztp(q4GA3R^ z_k)9(GmxZ!jRdmDWF9)OjIRCVzWp|dBd@>z)!R)xoZ4FkOQGP48R9rK{T=cQkJ|Hy zKH%Jd3$7;rd#hf9OLcKOmI|4|akiy74d%g19`5I`1fCZTbgEC*weLbtG+B)s3uUl3 zNdf&uDgtC>xG4P8_DmxLZA9R7dYSk3iMT`Sl}-|eIRAB%RN4Fogqcg;^+^`(nf7RXop%>YugH_PfdC_mOA+cQJu8C8s)x;Mpgq)>*&hOJPK+ z9XE$3^wHL*|M+FIPo7+NG@k|(8x#NVYfP;d zqXRaX+?Xa9V^F{Lcm2StqIq6Vy{@139k~q_=f-3_FDuRnLEN!k{HI@_VB@YTZ0Y2+ z{e412y&yZcVRU=Z)?P3*5(TI=(<1L+Qgr z*9Z_v7MDrle3(EEK$2jc+pn6){}TZxl`awQg!%USgoiX|NBEm+@A3ZC6dx-EHXZgF;rxx3FU@&CPHULZh(^g=*y9E4LKmUIAJI1VN> zdOiZt^IOc=ky1Nlf6q8=_j(S>i6lS=-?+)YN zgsp~~fK+B{2S3vcY!CDM9AdQO?Yl`Lr&W@1Pm+gI_*Pf~gnkq5s(|HZ77%*JRfxq! z_)DGn1V>gyX3i_fLc`|E8lWHHOxOiDX;K6j`sW1x4={IKMxqXPto~fcl+(c zr-|37_`e98hlxbp;Sif~ce&69o*0o*5O~v>CxcAyv$Ix+u+qdv`QGV{#hWKBzf!ud`33+iz!<6p>YjlR=ftV~V zWC!Nl`=+!4`C){o>Y6yG7vU2KAga8CE!k%ESUe%8qy@1yaCfP=T#@0=k42Jc2B?frJv*w(zp_*{v-q`T1wZh)OdOK3NKEiaLPX86=V`?|Cg1qpg1wt zuT4nQ_Rn>16XNZ{#fOB54gc1oRl$*zE=tNjbNE)p1PFZaQ|bsFnGTce4wKnQO6V!T zV}2h*;)yx3@aaO6e+h-bCq?O`b?A}_A+E*#uZaiM{e~3xK@T_}92YBK(zsU{&h5~j z6*Au1GVQLG)H_ze19Z-k8Vuh^fcu$*hFN8)J+(k&p^ExPHT_L`F40ftom~*`PgTF* zyUc=jmw>uWR(Kx_&XBF{F2Yu@^ZYA{?vP;pCt=r)CxPmeu1mc)Ci$KfuU%9X+egi! zuQOR%%|rHU&9i`RaTOCYt@nj;=G7XrZGz41ySNbVRMNyTVCB4f{Kvy14*6AU3G|UO z!02iS5HHyN=Zm-Rqmu11|1beG3ck7=a_hB%aegMR28e)OQ?>$$I!!!}7;VCET6O8v z$Rxf5l`-(9z{!wWd{QhunQQC~FHBEwX+feO9mrJrttTqk3HadTyLPIvd&4b013KT* zJ~Om@O8#s#rqw=O7Rop6FR~WUG2ER7f+VKJjrgPPGXLo10j&;-c^*wVP!xaxBigul zmyr4VM{ok#dQmyitSv7_S_uC2gaR1Dy7@UjoxtF}UBb)m3Mo?fy~( z9>*mjMeo#sL6QxbaLJr(7x?~mUzm|f?ZEWMhV0gX_Q)3REA7)#S(+{Bdz!6<ax$ z9ZbCQi*GuuRr*s>LKYT}s@)S%lYng|3;SZ}*{YOp=_#5A``c?7tN&u z8juOw?k;PHmb%d4XmPZ_F1~G7Z?k_kJhI4cnjTb&3>S}ozqM9*1GxQf z3M*jz9wF9JX^ohk(;1y{9M`c;_h5N3FT3>67X!&qX+E`G+c+J2muwn9lDCJ43n6*) zEPtG!su)jwX#t3%HzkZatH&U;^}-CUX@@ZDy9PvEN~Qc>4&w7GI= zqg$e}rY;N+ASM0vUv{5%#gB#DVQz~AIg=@uc1SdzoDNsCzy>8FtdnnPUEcSpH zyNPFg$9dtZF!pRg{&cMrU7cP|NQTDygBMztF_4Jy@CyW;CpoiWER)>Gw ziCwhf;K|%`>SAs^bgh{h9NyZO0f&gF`>Ec60hBOm3-zZ2Md-keiohr01w6Eryyk<~ zMO)Q^o(KSkpSQkJY`9Ri)&yBT^ycn%Fd5nu^V|LtwOW`;`(pUXnN9nAVemS>x(kl1 zmH>4VV6XRt_yZ?%7AqMupPT_5kOF~1jt0wL4EgTCXGS9u1JET>Cs=llsL?A7z}lt( za8m$6J$j!q_++vI)KZwbO4R&&K-W+tDD^af@a?h;88eU(>JVsSouH41U57PE<`lkk zq2F5bXfA>4hITSmvg-=DBaDTtc(=yQLz`?v#0n<5{xNv02aBjwPMkKg051Jvp<*xg zn~U<~nzZSs1}KWdLpb5x3+EK14#`veZ-7zd(;;~87c9^`6F#n!=lUCEz{v+kg0BT2 zAmp{HZa`zAo)^WB5V5)mpieRl+zc2hV1G)%^!?*Y4#F6pYhql5E+maI62WOC6no3; z2D1MbkRfLv9;-bHY?&GBXk(jY+E?_&OUENI+O=77d9q2ZCEvCYY)@4F#Wk1 z|1Jxwyz~eF*f5pyIXn5T9bw23+|2neyUvaM{D5J3b&g`b1repPOAg=!VT3(J&NUtA zo#zK<7GNT;+Ona;#T3TZ^AU< zV*9^($CYo|(4P+R2<-0=Z$d`1)Xx5a7v8E>{GDaEf^L4j0@@ioTNHn$6NQ>k#z{H05W`sg7%fheL`%4prm5V!ZjZEMhGHL;NJ#E zbBXcrlCHlM=q=gU2473{SeunJkUASA4Zf!R-F;s^krauJ2L+9oT{dD+Hh!Fshyfa# zE%vBC>-vLb2L91;5?rd)kCS((THkio5GKvx2TjW6YcJ$@2QicJq*nKp7e{JnU_uM6 zZvl^asoge7hH4c&OxM7BdM)3m6niJNO1NO6@4{5(q`Gh`EiPnB@|_D8cEam706M_g zT>fv)MlG6{k$k3Rkpijq%B^-$*QF7X>74;>4Up~)q-6u@nXN$ecp>%o_~h9)OHlFr zbBQf7pOkhVo3gxCo*DphM5VSfvMNPTYu96_PG07){FS<=DnN~iIP;}*4#k|%!JH~S zw{MnEZ6AaapbOE%ARJ7uz^GMA>VCMZl~mrvkj!!7WOWz#+Z7O1T2A&iE@J2?#e*4K91$z-(<^cet$;R1i;hP zGXR5kd3Y*ukj+s+KxiWS?=kH&Y2{bc>^Jeb3lCxdC#;cCnHW%-T^=oi13o~u!z!(t zAwQnt>Ed4kXG@hzf+1t#y$GQ419v>_g|FVkC6QwVxou`Aew=T1K+9w-E)Dtkj^x??SLrKAwF=8LJZ;)lom#hRRvWO@{~sU4z4wHUyno%C z1W7w)_p)iJA*&}q9)4m!LHG#Gapqpd{<$$tjvuWKm$;~_#wy*ktjEf^*oXW6aj?(> zZ4oXF?#?`pZ7|>D3q%Czu34e!&LqyRJmHP)Bl`X7e|iC=#86Um0d_ORB=+_zhDL?m zWbnAkgS;K5%_Z_oGB~MryLG>6rCFEu8EiVyd-ZdNr69;EMjS$@kMgydR{9V^t$kct zp&2|ZWW}a=|Be3R)HM`sN6FQ zoQ$cdaS!fO3k09R!%_gr?)B-@CQTCAxZYJ;&D7$l3JYmIPM1wPUb**eNYkyT_Tt}K zkvwT{zb2}nzfaQybQz#Z#$%$~tw2TsBym(<+qdjTnI5GFF<}MnW>N^6R%d_`dn=?? z1@y9^cAh~nT9JvU)TS~2Q)qL$ROGd(=@MJjqF@o>GanM~9nXHA$oRz!_RpD<=g2L0 zG~U6DT9(Mqpc!~zU(sV!ym$mN(FAbF$J~%CYtD9aX7}WA3C(EHmzkcOY5j2U)rBzk zA~-hvO;3@>r&V`o-{-yY@`1wcf5BvrqnVTMk7nnc+3&Y^m?z|6T5?xfD>hs8Yc6nJ zgmT$v7zL>*0)ujHPo0b>U^#RA`?N3fWAu&;UcVxQq@Uk=!vVc&0=?F4pkNB73yb{d zg(ms{Va!<*6ZX(wT|I)$ox650qtMN^y_(2A=2Y94D8u(emg@JoNdV0QCogvi#+iNTXc~O>d zhQ=`<*ho_j;L>F#W96TaFr!8-S!hT|Gho`}1#L6M0y02`%-32x(@?SOJQXE~5hbr8ViyM)OMnPudT; zrx_Gty;Cxx{bl~bndlmAY?a(Q650K)r{p}TAhCk7;G5^h>_{~#WcrcRtl(R@5Fknc zJh(S9OaA-lUvq%8xo@d99B_x=IZes_L4Y`RXf^huAQQYRJ$RNf&;==v$4duyki=&h zB~r0nV~_pAwz^#Q;Sg`QVl7!L^jw|s*AEuk#4W!n?(n`w=2$pA>Hvj!hSDF6uTaJ7 z)omw#=2L#H`NOaOs~)M6X%R!a<`0;!an6g98hKK!JMs9;@4t|=6=tKn`Y*8m2aDG4= z^lzD9Uz;t3Bep5by!YF+Skd^Mc>7 zV>#g%x70WTzXG2lH2AeoaIgBAJ0tap734D}`u-DhQ+>dF8lC#>^yBmZ!m4g@ zPQvq{*hzx<7{_;63W2AxwZ&byc%Ko1@B(ETFL?|;Q&1-lS|nRsVGMiz^CG)YHDS^u ze(}=C0SK@x=3(u|Nhlv+B|d25!fvs;*g@H)aA8V6>RxOk9^_aHJ$aZ^)(0L))*F_b zVW31bh1shOy^9k!f4c;aH zyFF>3HS^eE=?Y8P`XJ3;_Zm#YIjj8cY%gTFFkgmq!yVQA>3f&6h|9Y_L5@|ufrltt zz99qB3IsD$>@MtX?BZBtLA5^=`6zea& zhBUieq$@QtY&BTU-c+_J%OZrMQbf4=PaY;?oF~Y+ zz^Ph$Y8DG;?dc`ayl2W3kE#a`4eu}6B`R^tB-E!GbN{bXWFB!?s zT+d&MwTeG`WgaQxSJ3r-Vqm`SNCc&GI6GE6RYdKn2a=7;1!Ty=R@tx$oyR7_a-dh z+r8K2AoLCJ^UDLso>9yY_dEYE3LVN0hQZg(S6GUf6GyYQIr(pgnl!$JA zlT=GOc0VWX#NsM308!T`@vWRe;<^7>c;QuYwtq=H<6#qSBwyWdjNbz#Cnepej!TOOQKxguS>kIhiM*u+Np>66P~?v6PoAJ1pzmg9d$4UL zDq@jLq{Fnl5g+3AWHJLmnWt;D(yJrla8ymeL4*FaE-iksuG{jJu?lDF1G7-xvl~B$ ze~7Zic(}oG%Ku~v<<{ZrGkix_8Q_^#ug#q#J z>M6AQc(qz`yhknyR?=NL*k>F5GPhN0*-=&mPC2z8wcxBb^+*7U@71EVv{J~{J`%eRoL2vQnE|oonnSi6iO2k9%Ji#_|ia&dVJ_-Mr0&I*C)7f z$xoo94Tb+FVLQMFazzPswTS(Yaf33Q%WFW-@j;V&l8l#GC<75^5o%TMCykhVxSL8- zjafJEJ$X}4l7t2P6bns{eBHwX4Sb#~#)3c`_F=q2*1ZCXF6^Tvnk+4${1A#;e=NWj z%r7?3y}a@MoU$QQ1%Y`z9Xhw5?UPtz~CJzB(4rZyNKNq`(T)tm^CATmQ8t1cC~N z$Uz~YA9co*#ZY~;lp%nPZRQ6WMOy8WnC zCt6zGpr(l3`uW}ZM<@lWSQ2P8i2$;*1-W~tw}f{8rOMg$iX5BEPl;EJ)zg!lh(y}J%sgh&C;5T-l@%0)VMW&8 z>}sX*U>AtkX`*vvUJ*BRPu8e#5EX>uKu#=_L?zqq*m@Cv3xo1u%2UC~`cw60ssk)a zXKCa|T`po@FarFiyLp(q`Ks68IAIrAdjRmF&}n>oEBIeeLyzytyF%RIIS6D!EjnWr zjMCRUum4>;jf&UMxH}=%)=5We{?FHyM!^TXeOuQ4*~>B%=DR!q6$I75iL28kJaqWm z<4o|WBjcY;UWlPd#68#N61D=Uhr(sy${AzDo%nyl;D+MdaYYvz`8|o%-a`zdm>Y9sUpNN9srT|G6oE zPvqjDO=rf-F>^tzPI(Rn7YcTeeDlh z9ZpK1swnD(IX_mK``;>art--m>~fQ!ive6_v3c%*J9tJ=5fKz@O9=MK=)za9WVf$^ z3L?#Kr$9FGHHJSeb+uy4=m?OpWUB7weKpV#7Zf0zG1F4FaUr#)ns#;+Mpu4H`9PWk z$$5)1d`V&lG081x#-7n@sh*b8S9nQgW@!4kty`^gsof5#TQ|R3_ef7(SxP*evu`uu z(0vf8+D;MA{q{M%Gr(RNwP(}MbX8B2}!DCvFbH3643Z!8P1RgHK;-XZjAg;F7o&Q zRSIR9LG&@x8a4y^!@xtk0L%YKQ$7&O^f%;vq~c}yGtD)W1oG6=aQSxogasBty@i~2 z#sYZii)A&h25##FFUE)`nlD^Vix!r_1fHs50_nNn{^Yry7YTLNPo(gOm~ipI5@@QB zo<#c0@5z^BlAM=K(_-)Iq?>epk7NmL{VZ8F)6ti)EmO8FV@LUURx%i1Bsp+&_rX&; zUou$g$eNDpo3*eo>R((C&>wQ#pFJe91p;FkTWHV?u?p@(Gl>EX15rYvr;dqpg;`3B zj)ybT;!f&7l1`laJF2wd9~AGuYjIj|aVsy2u@)+>FNx!5KR5R_7;?FSZ%fJc!pLr{ zw%&yx7;J&-E9IjQXrKus=4IJp=dK>@VDNL7*NB^bAK$*86KwDUvq*Pdzet9OD$q`WOS$^F7waP4OFEiGrul$hC12k< zTGPCg*d32S_F4Z@WmE^?v9?y z@X7)IW*A(*;-w1z+4}H*z#dSk6)gc=1X@dsB^oL>t}&laA$jtJnQyIPPp3N(M?1$8 zJ@;{|#tG!;@hE+235WcEU&FV)V|HI&vu%rtcHVqHcg<*XyAvwt%$Aa^f!Bx)4$B@; z;&9^z_oS7vJb~i;esi)T=LFg_4&_P5IWq?u*Fe`x!;J6HJ?M^;BWK&z$W?aKfT!=sHoPQTN#qoWI z7oHfJUsq4}@26of6r;6lq&@_k6In3Og7_(FJB?E)gy*U>(?r%?yBDl&q<>zJOlvw! z=P~2d%BNmSy-+DT4YM6Vf~hf+ExS!{Uk4 z2osz-xP(c6(Ke+}G!aF&m@t8SL}Pt@VC%&~52%@fVZOzkQLDU2L!FDaP0|x)O$aKYn z5aD~&aCP!Gpk&eo8cjDK=CN`hGq;)TaV!UEH8?HjHckVLwnDL-f*x})Bx$=gR?)mIPf|05&oOTfXo$Ai zu-k0EN7o(0?0%Ml;vG61Tncs^?L9dHb~;KQdTb=Pv_}OqDX9`13DLmnP@L$2e4T*^ zg3G=5sPiqBH1ER+2K(_N`f1(yK}&IMVL17~!xhHAryKA~LEw5lP$|j~GVgr`rUef@ z=oLUUgwFRGAlSO`)+aJ%>v(TmNxaDY_~3EE5zS>H@HQ4rs6GJ0k0k6bylS6hBJ{3Qqo#_MJ(tP;@GcQ|n!e zoaC-DE(WB2rVO8gxm^2zqS8KK=&wO3QzUhDh@==0U~Q>B#wk~G{HM|KZwM|;&`Kx| zG2=dEon37qK>CN~`CpbB<#}eguuywEje&h~9;C2EJ95zDO_kD@H^0b$7k))^A2`T1 zP00$y`2aCiUXxF@XwNEALroGI()4sd2nyc_{J0bl^+OEG5`a6jxpzW4m$g9Ug}@@; z`g~({o@a9cJW)wN07?MJ$n8(Oqnrj3P)O|}Fe_pUcwUs{_S*zIZ-!Lp*ZM&Wh7sZp z)~T%ulgg|ur2h#98`gsvtA2acI8|T3b~|2aznCIAl)wkPfg|eyGtQm^@}2lWA4zJ? zBUVn_^5w%tbP9fW(t&R<1A{q*I^)hUWj`s^nbvQiU}iCZ2$Sd}1;&o@)EcF#TLTp% zo1-pIB`JSUc8aX~$pM3pI326#0g`~{tly2joq@3%Zv3vd1!I>5qeuI5anLZpbI?81 z)Bncc1OrT3on(-fQHb7>i@$PRciLWsvfJ!?qct$o~2OnXS!WJuY z`^;{RK-VBhb<$1vUA*p2_h?a+@&>Z-piKpOV13=vYx@CG3rO}n`viQNUi`=6!pm2Q z2wtEuV%1sJ=YKmuzP0yNDNQ9XRH*#rHPS8X&M{nMB}ed$MpotO0;mR~n9FM5Lrz8{ z5sQ5p9Dv+E!*297uQKT1Q&n#tsRs`rdi_5)T;=_oC~^5mlA|3@YGhLF^B?(DVm~;X zL5GMZiEJz3kQitn#a`F5-GE(<&w!8-nESbSYUnQsFbmCN{EEsQF$Y4A=&T3C6U-toAU`X|k|4+O zI^ErMF0uxC#_D>`R{Y!edG+*fFVK=J80va!t~2iTQ4rHENC+s?5N#jC=1$AOftjN@ zQ+&aC^`P~h!-toyK<@*2gNldpS;sOVg2-NrXOL+HwDn&oQF$ZlPu{j0f{nR1=2-id z8mclBWgmjZHYMJCAT&x3ST~W&zg{3yWm4eK^rm~Ve8hD^5{h6j5hV-k$>>*(e!|y! zkZg`t&v)<1M1Wz_++8dGTBP_vnIav(_8V=fxZ_B2!0;*fc7)VDBFsF;ens=epNy9& ziko1{vlZm6XCMlv!}jf&r}ToObgl6){{MBBUv@+vxPAuRHPv+=sKM9-&{@U#&v*O3 zZ_L}f8W|5YZzRqCKXkoyR8?#D1xyGCA}ArKq)MlND5!)I0+IrU22oN%Qd&YJR8&#{ z<`+mRiePi5l|G48`@7a4l&suZMHP>9Y)=My>QE=(+ zH?>$x3EaE?fS24_3#PM}DgYhbbQ*n#ydb8EnUK(UZ^pw9imQrzP-(wXs`D@|gT$-| z#wS@Wwzsc;{CP?Zu80GLRD7Jwt8-t&l*KHNh#Lj@A;~%u_O}J?ouuWLfG4|H>9!&N zC&k1LuNeGBS)zTK4K!2Rb)JLgUCLq!c*HniGV5BCs#ZsG21l#+H*)ItsW~&#(%!%< zJgb+3C^9H(ddy&3L*}05s4Dst$#;BJlX9fuIaC9QuvWYzT3xF#i}Q>m)Zh3u5+f!R zB_{PJGH$Gcd=D}xJ2knv`>EOB7?!^=MDe$Mg;N#D3V(og0vu>VT>4O{9EYpkzM|;g zGl3Ln31;fFOtQzB=Y4+zCX{)cF66t|c&G@&*QZ`pI`i5Z*)}UT-VUP?ByQu*v-0;d zofn5#7RQLO(IO6}Ljp=~Lbn%TX?r9%mRzEj<%QR_d&M6n#34n6(HMGMPJD1q#6D^8 z*;IL)iQ_k1@dR(>|M{pF-1S<_knrK{W&{fhBv8D2{(2YnE*>@U$A92jb<6VD6#fv8uLU>&)eY51Sq`q*hmey4h)=*Km$9$P`+xO|C>N%+&haQsiX zS%aN|XzWMah?a7hCjG}+xSX)JI-^E3PgN4#z{m;ZieIlo&X=-v(k z6QEV9V~1$f)#BZaxHVdt+4(TJ2cBNf>V806{sd8U#s-JLNn)Vw%(Nu#0rheQ{7RJL*qHS`A=bcash=3T(JIIDFVj8QZ3{0dSm9*UT?FX@+7F3_huUBEb)YP z>2+SR+g1jFM?#5Eopb8{v;dF-t5Ijxj>GkfIJSjAy^jrLz?C7Zq^IV-e+{!3Rsg?* z7rmgE?T!=_hxscuRT+BW+?15hE%NRt^JX&jIfpkrsFaR~v0X)&vCP841g_;u))MR- zzAH*rvNrkTEd7bP*X-kA|MN-HPOV%Su2(?BIPY$_+Uulbg8NcOr;@k;lG_m#4mk{Y zoSJRA&4gEPP87KPQ5?+`_mLv+EIen{(R1d9hWe{Ag_5}R;@U{4pQ?s5c%8THDtyfy zrHPs5rq9vLVc&D(UB5iff?2<2R~=?0HmNfoonbfZPPc=Kuf0N!DoCK$n-qHZg?o$*&nxz|0op;f*2m3%a}PQSyOn?{~Lb)UO{6 zF3b3U*TAJ31_F2eEy;BYLJfKGS3CQ2^%K^Am-PPXfs3DH_*c z$?ULMnO9rMOU!vwLXzL;f1it5Y@g}#?FSH46UKckx>J&i-uhscZKx^~&aQC4X>3^c z7Ct~m>|r8~J>~t0JXhQW#XAk0fzWbJq0;A_++oyZ5<*-jYMxz?Q+OQeH<~GEI zAcxeM(5P}egBpU4h;o~T=tO#pP3#<4zcp|MPk9yH04;QJ3an{rH2Hm5?|*KsbWWZu z{>W}DNLi9_QBbseY>5*eLtaNGFbp^zT?f`=$pW+vb_W&t-KDtPQ{bLpP=-8349$||&L359Dwc#4`PeDCu z+zt%}<>w^18K(4U3-NaI9-iLcD&3#R`M)7isYtL@V`PY#Ql__`gza2PE!{-@4KB#O zd)4P4%b`v@{&p~x<}pBrs_wL!h_cP~J=oe6He6yGkb>TPWV<+26wg6JV26V1avTq1 zKTOo2Q97wUbaFfHHLUeXBKrCxKHpS)MZs{3Ys53eaM$$uF9uw!{T8Fcyr#rK$Lg1h zuP-$`g+DLHT{?<|RrzCVVYWv+7f+mNU4CQgP8nR71iJ~?YV{BnFC;uwM-*5|WKZ95 zIcfy%4Z*+YuAhTNXWk0W!w6hvBv@5tVhrRQcJje|n<6ykEJT!?N(>!hwB5qOOHlN! zWWS9yid7V*fo?(`)8m4xAi(FfD#Rz&-Hm>U8rSUymOT;!Fq&V3=WQ zBI@qGk8Yr^6qNg-P8rD`=1ZV5y!C}L-P#S81|~PIM8@YqhWq<(*Sw@3A6tMj)Un9pLHSlY{$ebbhmGzz?S&V(CES5*g=qKw|(SnEc)m+&vBOD$a^XYM+#_^X+ZSKZZ26kn(-2WV3)wUxdBLL_eqDg z7012V`J#S zxxRfc!JTtc04f)q)ggFUr;h1O7n4KX_@LySNJe#gtcc*^{rp=fQK=%s#&89b@&2m8 zkliaM4#(9SF*Ojpx+kka``J)+M#H00TvK+quJmBOdL2@9a_3_brG3P&hgw?t{k6v#)jw}^ne&;tX&Y;)l% z+$RCI;O4tjU+(?m^P6}hnKo}BEP#A3zEVfTW_tMiioKp*^HDM#4*=oM;r!trA}IOMwEi@V5JJ1nDBtbb+d>Df+$ornC;<*Xcg z?QQYXwK|}Pr=iGE(h)<=W}MpHIFIF&I{PZ}eZ@k-@m3qt6exsy%A&RVDh}WXe_XH{ z!CuMp)R$ebH|W&(7ia=E6hti<^Wl^uv+c zC4GaBWY?7lOVaPhv3q;*u7LjgF596(jydMlm8r(otimU)cLmMb=*~iAWj>_W>b(4w zE$$1rYt?hy4&SfS4p``#uxRnK-)k(`Pw9p_rF+wEY4qkcFh1GVWb~ryj$KwFW^J)@ zy23Gx70+pUhzh_E^DP(vH3R<}2`b)#rgDcFrAMRikEI!FUc+W{xAj?OlCUhp zY6Btncdm2d1sguU08=MJQWY?1q8iLMd9hGku5n_dS}7tTqR$j&xNR1-&tD|Oy`iOJJIE3rpbQ!{zf zYKgzgo)%Agj2xYLISq+nC#CTo*ka2f`e%H9Z#Qg6=A0P^PO^2TddF9Wdp}O-4-U6t zN1lwjHl7piHSn^2{fbds>~zV{Rt~*&I$x`fm7&ao#vatai;yxuzM((mRvxmW9mi2x zS-AHBTVbJ4o*gE-z737Aku%d<$N$qxX}gS{M}I2 z>5HIfc29EiH40F}2YY`-Bm)y}cqUIJqE9*&*^X5u5k*8taBd_YT%#8>Op>hb()aPbHem>P)cZ3m2&zri$GrIZp^2^D5VOVxU9(Vj_MKR~eU zjef&vI1S3&wtd}C=mlkY=H6(TI0%YG=Tr#$?2Kl?{B zG%}?Ex&?+6Jo`g#F9!b5(EFcaF2ZYyUx^$Cj2oghnaY4qJwy8|m8`6X)$3QBwTNNK zQKb`0l?FTs;=+8TxJyf6*l?(=dm{DT1HL^>-us&d%{17qVjZAgx;9}E1s76>S3o&q zvG#M?4Swu2RJJAF9u6(}R2i%A>vvlj-^sOC0 zUb?54PfE@_H;Q#q9shRYMscWz;b>~GcMKeT2_s)F4i(Z^789lp_T!W?L}Fsg2s2-!10xMb&n~|KVAIo3EMqFBUP90NOCOl_c7c2Jb0t| zn`3x_aL*gla{12`<3TQj(WiJo`zW4+%gJ7@Efc`g5nwN>HvucL-U($z^M;Xx4Azq4 z>jS*}+Ld8FTpYCcXp^Z|sa-+O@G#9?(~Ix-FFPZ2ViZCl1wnyXXxGcfHaPeGeMmu* zUtlyIQ7N%-lHN@{9$!#-Y~!jy!#yMl`d;?id36_6|G6Qgb#JH|FVA_P$wO0OG{^JZ z;&-*McTTQ+->!gd2)I&4uyR4JTnzld67+P$=+E7*WrzRr zcnMgO=Y50X{%1kh^A`S&2yHYtgavJ^$MxAgbv?xPk7c2WNcnLQAcR`H(D~P8AshOF zYu74Q{V^@V3s^6;_)PH4jM1Q60&8Z)bC#w};T%D(9>)!Eb3~cgYnm*?!T3be&nD}P z@zZGLRte3`Eg*<>&UXD7HRHV!IA&txKGzSdFt`|fxJ#|VZVLWKu$t@X?B%dxvm*K= z?X!Mvb5-l)w@6eFZ_TDJh@JX^vP`;3hY93iqrGjJslguM`}3;?V=syfa|DvxUfeo+ zueoLG_kViWb1Qh4siKi&B@2c0#O}47j}Ja?LD>7gfNPz7C`H%~ZrHVU3PEZ1h!M7JH#WId&(WymOI{4_r&Ik67vMwSgz)AWCs3 z>ugSoy$(YOsfZlc&EJzWj>#sE-7ETx^Yx3{Kv}eUk6=8 zlgarOIh?`Cd4!TmDthlk^`>;b*6tq7VGvrDU}fkih6&oE2A(&@*DLmWWwi%b^f5(5 ze`s>IecIX^e{2R!Zon;E0f%@{0u}wXx#q00*mlwts|iD0<^7~O=4g|R5z?(U72#`F ztI=Ha0-bXkLyvM;no|gDlD`*#sp|$BT zAS9q{6%f5N2V~OEg6#GNaGmY;9exBrI7na%mUdZR_9B<~-;>5KXkql+^(_6)0GzZZ z^gkA0u#Uija$3#gnRcBT$NB%aUDn@jK9}*HTF}}xvZIa!QdkYOLP+DeZQsgvaZ48i);rKP>W_{`KeFC8Ju7u`ZYJPG(VA2wu3&MAt~bFE%H2g`Nj7SbGq z&@ArDDztycxE>$OQ9!2sCDR4ra+LVhRjP5JjZMSFy7i!1^kw+x03@XL#lPAc2>k4|KVODg4ZI7VyiId!5Ouj zZZHm5t|hXV{z8S#$jZ9=a}A*vhePnxQb4S2fl79$(hR%-M5;-jYvRCe2wZ*iz=5BxIh=l}maD4Erh$BbDEkq&no z!#nY(RS$+m+M*x->>);~9+=dRA}Y*Acw>!RrRP{u+RbeVl%%-Q>Zl=7A?iT0g+juY z@;CY<$D}D>{ z8`~=uo}$ec^m9Emc5Iq5i7Ri zk%xf!-?I+Z2J}pfE32(qc^^{yBM%TqL)f)^NcZt_L}(egaq-%jo3%^E`% z$<036UY$+|b3Sge14XoY`ZC|lAt}G~$KreVtaWn{Y37X2cIH z;v&{vp=1eykvkT-d-;Yyny}Gwrj2=H5;|)VnfLo$yC&X%;$Y!-O(~f5XcB(9wGg2o ztuZT*w>OqWt9$*V%?Q@yBwzZ|(hldE|4JP#zNSuz4m%|m3)5sCdG?_tIJi`F&(BtV zA;#(yn7%^Xatar*K+uVJ{LT%Jh|u1Wt2CVVytfqR#@IU%y-}61h4PG)u2@0c4L)NI z-dKQumJxVW91rxpUAV_x?G-L~i<0gVF;I5!)+lzc`R)1V;6Wwb-<_!!pJ`jYCydLT ziaNn4ZD2;^w^rNcePp6j#!{BwGob?2RfAyP=AH|ZCJm=~xcRRoiE1e|b z$CLkJmWdY{7UHFGlmr;q>^A#5S4%PrbW|d5+G_MHIodLJT{jeBa!s%qdy6G{S)cxT z(z0ZMTVB4u%bND61WNShpC@CUY(OAV`eWdJA9Iu*lVq|hFtlSB=J4Qt^*K2^Z!`Jb zPe4RakldqrEdIB+1)VR?UKnmXd$!N7ypUjcrH7C#8MA~dWCTQR#l_Uz}n zSX`m0K`{)J%flf7192dB^B2|z&Ej0?O}T#@;=YDFZ#>oLlkGh$(U#Z4P@Ik;Hs4C? zEy1T(&Pur?Q+r#@5TWFBJ`=0NE?2VeI3r6U;lFXk9e z-JP7_=T6Lv(|yk6X>bsiXGs!wNKELyh$y@y`K2K)SWmC%hxWOv6D5H|y0oigA! zAq{;AJOYxRMiI_ge->?@DzwnBze!!eo-@ zzTXBzsN7-)nBi8C^5^=w28Y14Q@xUijPA`%_f4(b29Qv^*E(2wdQiTg5@2J-($oQz zvuDIL=id7Oh5MF+Z5nmvtKAVW-5|2N`O+0!jUTJsR6_+J9r)!EgWXML8IWJZzDwO` zL`(W*VPz+{=|ehtIDSAy4=)!?z!ymUZL5G7q%xW=WJG8FzH%mU(|a5|1_dCfHWJnU zV8p@}w-benVG1WHuZ$$>j6yvE{p=*Ij@~5`lQJ#;zh>6*TDX-*g8quQ4h{7O_)UuL zL`ddZjwzKy4UNh$Voo2GCN=nN3>_&PhsJsqv5J&z`rd$z*EvzM^6{+r+_Sq#lbp;7 zbPjTBi_OvP$hs%<9~^5V6=T&;#q)URKYjjVGwNi1c})kb(#=zN1lt!YVMr_gK_gj# z&gUi7=U{8Bea{-9V_v?l>^ZZ3cw-%kovukbv~t#mw<;7Ezjxa31X44o_sQZ=#Ic7% zQG#*6YL^QnYKPH*~8tVpD;knr!X_%_fndk@E*BS-r zt}hD0`yiRX-Mfh0?hf74m)&Hq-ydKV`-~vMaUV6v15h>mCl6o()CEmj?s%U7*wa+{ z7?{JL4Jw_E7EK+Wn10|Sqyh@>4Sba#`>{8d8W*XWDD!ox`hYh&dp_>qk6D+1^XxpB zb|yTI)JBaho@_dy@=puU5%FsY`gp59Vr*G?9vB)-94BD6ceOA}9PD#wzZq2+gzWdZ ztX$waXt?Rx8hxdpU4G{qe4={cZ%t`+iSGr>Bl&j`L);)&q}A7+FK{praHkt`hczYJ z8@yT0ucAI4d4{_4t*~Unq{uMa4?1=6&kkf?GkhqlBDZd@w%(N7t@ge7j#~|232)Ve1tHq9!nh zHGz849-I(3xVR|kN4EfN+k=Z&l19QjJKFLo zQ|ylRb&6g}Nk&87D|b3fUM}o^0>`)C#+Lubmi6z5k+c~~@4>B*^Gj1c56L}XH*bjb zBXs7&56dUrj%pll@XHheF%GWH&s!@ZA8DYuK`dTre@&>BDw)3lepmKHC9=MysHjLz z!IBMQ8n07g^MO6<`+CF&-ZV<->(yEo~b6bgNHUt{f3OcRM1SL%}UG_Juy80t16i^fBrV_gi8(8oVGP zM+6SB{{#+I2m9`g!o~ZwC-oirsTVuQ4nZpDPfaMdVf z9Hj~P_!ON<;bGsv94~R~NKbO%F&@VSlJDt=F6!8Af{pXw?qruYB#TTcQ}kLTi9XD5 zon6=%g$B{1<4wj9gFmlE&DUf#yWh_#%auf3NqpYC*A%SBUH30jCmmXFz9E;0Xa;?e z|EueP3mr5_h2tV2`~9)yBg1lM2q#tlQMmJhe@SNcnx>+{Fqa~jG z{e_MfI5jpXDeici7BB$@ijT<$%v#N-@$(+2CL2b}ry&Wa`qG8_*e`i6#$rIw;Y za|%bX418GtclaFQjyE(ROgZmnaA!Fa$kW+{Nu=~}5h%_%$rd%4C_(t2qLP4EUolQh z|8odEmmd*%F)O{i&F|$20sHTF5Z=n;&r0Je(#=6y*-)tn*iA~EXj7G#K~y2Y8o4WB zUv>*wbX~HGaU3@7+7nTP*m9~wu6a&q?((}iz{%7la)INM;?frT;Bc@x(ZQa~1 z29%2|N?{*~1YbcP=Ns!yGo@}M)QQcx_O+--H^>JZbM#A}-|?*R#?>@4x;#vtzS}k) zs;qm->AVNu-SR@E*^J}bGKF=2s&$1^U40pmON2u(8Sg!czv`cAuU+l-UFC#MoQOk8 zg!VeH(Ukc5u#8R_Tn{Dw0HF%w?6T;@lWSP$nGe5WS`FLLQC0FN6QALnBY4GZh*+%67|>g`r#qAi`>=Vub`Lp?{uHm z1iMl_Zw~RkG68$7&mFx`Jz9>iZFp%L7$~K1MVU3#JJi-Y%)V@Msd-Ayt&iMS zoz9e@5L9z8vIjP|93oAH5WXLIYdbwB8UC(I_iQs zdz7Kq+;4AkH0MW&h2HAKbu_vo&e&F8K@6hOdW+%Py z2Xkm`I+rni=%)hS;Ab9Rag*x&b$v$m)g1{GfEvo2y8!0=gA@X{I;i25=eig{jTqAY z(DB+}ZundK$CZB>s{xQ~Ya+4+Ulv3z6R~-lCxndPQ-=3ppQo=trc9+q9BI{{akx_V zjuGf9eh}?l!HuKwsH^ymqfLL^IULTcR^2eW5M?o;v!$zj`dPJ-+O1DF?g(`BLh?!i zd(8E?;my0KX+uxd!ZzEmqWW`|xlksZ&pWg>M@vv4i$I=7T-or-8-4xitcfHFq=yu6 zdVrtFH*G!FIY9tOh6MqSfFreI(Oi|U=l9eLLS&f5E*5j%werWz1{2AH^Giimw_s#& zaBxRS^@@>&%bT??uE4c01v7N~U_&q&Wdo)B`@QTyDE5}01q&p=aMt0$iWuGg5vanX z0SD_4c9)>hnb0!@J%BzPQ_>J}~Tn}3+ zocm+lT{fppSCWuf`Jyu0THYB)=g6#8DNjk9F#wmb4gSvYlL)gW@ zK;K2BFfvy&)|-SS=`aZ~DTK(LzomfYJNP}}$#a*Ky1|yH!TOxQ!@z5bjGpUn-&xFd z#7_!y{P`{fgoa*h%IS+Tfx|)&>MnP~vP^DDHl;$*T`H_@{=*BHmh_lJ^6^etq_!sart*dg%rT$z*>ao@~}HQar`F8V23X+^bedb5kgKIcz< z>XUbet|08Eya)<;`$a3k2fyQ+nWiEwpJ;uFDHhITFf~`!T@!1AFeieTe0}n{|sc5CiX=st#Z0;QC&f;=&)}@BmMH`o9;43NDW7e=m;z z!F_5LDIYcULpZ?MAlP;}ULqghfHx&6b&o9sDp8MEGV6rFS5!5Bt#do^>^6UmaDkRD z6jm~cIL(J6zK*6rZ7?>Y}|CrMN) z89j-jS?SI5U;jUh^ZAe6H>IIbIGjLy^M4GcDwW8aXL7_3zWHLPyil{2Fb|Rso@m;D z(`aYqrMnMVF@M;xH?AJd>y}o!C)hQc_K;mB!y52ClY+ zf<57XL`zWNeFDvuNCDzs?4fY*FL9`P-v9SaE0dPw6q4b~;g+tXj>&^PLZEZUs=2Iz z7w4IlbQQCPdT=73BUfO%IZgXqTqXgxA6$5_L~S8-yvQ^KOkusag+exiY@`MN*w*go z1wqh5u(;`qWANIZrV+85d=_z9Z7B*OjhlEcX{g&%p56hUsf3%);3?>CpgXefrDTFu zZ~nyrQFZ>s% z{FRtTJo$D<@0FMm!;*(hZuHL1nfN$_dh`26pn@@1jQ`!iW9nncxd@!=&rWj^1Z{jk+^j zQDk&T#df?J4?0fNm2D$9B=g?Oaku3I@wVAj{;Qim`HfxZah>L8y?MV}`rGM|jbOX~ z^f8yC6WkAxs;6;!_`>WK15ER0E!oAjBZ1ZX(unW&I!44Ufj}QBsOEE{?#Rvvk8OKc zzxc{(AXoCxlx}M&3buq@BU?J@D1AO&{eg7I3EReAd>bI`i|u-!O{Iwa{rT7%=yq=( zjGlK4w*Ga-TOiG`DWy#RoY1oI1`y)&0%n{1of? z^XH>VPhUi7{~EDhij2`?DWkkMHn#TRn_x?bvu5Ud9>MZB*N>eT=hRr&_L&~&w=*-! z$B}Pa%}b$lNampO8Kv+Gc1%tV4WSd648l0W&Q;m-jZC7qTnO>`aq#(7!bq*$wye*K zd6Xh$F6>m8;o-RkRKzZzq??@@3JNc>4JwCpbakPNVqG;WKlv~yyofs|a#rKI8z7fR zn)*g>CVWC8G$HMqSz*JZOxO(^T7BhB_4en_9-8k(j{ki5;XbdQPs_@Sfz(N;neSmODG?LFt5_zO&7eP*OQ5U}rP8m3@$^&==wo}cX79v!|iZN(<9Xf2h_%=lH7*4yVAX{^hF z)9&LArd^n4v;uy2w97Y)RWTpJ?J%qjsz@e?;?_0>@0dd10v&aAR2Gwl6oMkC=j0Q( zq(h_}HOj$^P1rS-<8{UVAVjW6Tq4-QUh$M7wv z^t1Pj(@&ne-0_{ss18pHUCfQP&tA>8XJA32$w$G(M|A7k1Zsn%c8G8!ITt^=bJRcqg}Ng`Yv16)#V%PI2m~c^o4IP z^oCr(8fU5G?8$vRzxn7DuC!VoxQ@H#$AHxnNB5YMP-yAJ;gRIL=DE34*in_4HNd zP*SMD#clzHbc4~2{Bz4>Y}K4mhT9NJIOP;rIf4VEkX->A3Hor#dWW=3b6M*N?Gipc zChL5L98G^8jin0flP6Ce-nT23;WX2htKYu@VJ;MN)4jN)(8$4|AX_W%6Q#a|MJ*&F z*Twju1_&5#a5SSRxD*P1N}()KbB*d4M!)R5D{y~BXtfH>RSh~LhP>^+y?*rR^3hA= z;YZ8qOQ8ZWv)t1BZ!|l5yD$X8j_Q6oWS)bvF87UY!mX-1Aucyc@) zF`{q_6ZFO|VvTY1PT-Hri{FmL#&=m{ZS7icToQRvBorEXr|aJbeD*w&7PRIiFpO9y zY&~EkD^B}-Iq=R^By!^5zkg^nJZ!67H(o33@%u4fQ!mIytzk$~7&M0ts)w5ld3bn= zVgG=wg=A0&3wEgbX*HWnTS*~$NVO<65JSt?<6a#)lSbbXOw7MaM|d1xd2h&-ddI~REdZsedCsi(|O4Wy^~Hz91Vq@(!m61=&0z%+-@Xr?R?grZ%k#$wwn(uOsX{K zVkWZVABv#+5Jw+517K{s{Q*hQ&lbDmPPR?l1i zb7wykB#pxW3G6qr+XRKYdoxMfyUTSy+CD5umxU(kt74!FYG%1~N!VEx)5CyGCg@d? zPvXuKP&gqVlNKd*v7LBf;}gQ4oYJ2VlrbVY3`rddhgmWsN+vU^jkd%v=g-vO4d#2Za%y01&D%lx9*GQr7`jTQf-@!&zCV(hg}upFSY9qs1fDg{N7F4i#} z!8|ZEWNx_xU3U}UZPp@(G#p2vFm&kzX-1TXSn zk+=urq)2WxLNlE0sNhTBl0rE~UZDNr3s0{>EKJ2AOUYz~1mik9&YWO%woY}mxA2XH zS7X(Ueb5B*+C-PKZwKFG?4-v3;YP>L=J~^@jBnGk%iZ2h6~zr85WY zut9)=uq#ZAVZHm#2<9L*kD;K;S3I8YYuSH^Q!_jhIFtdH6R%`(?j_@Ff@@UEj-t@d zD>Pc#p9zhK46i}Otn}iWW%*`Sj_6#v7JH#b+R31GcOf$u zNnokyT2+7YTzkE9MG5&Ve3v<5X9mq$)A>?fF9{Q*V7;1`$Y`I>O(PP<% zW<ARjX<<31?F;h!+-N-)YtOJaljj zgzhBhX;kIhIyer{Cs`5dO8M|6Y@R@fi~1zR*+Xb3`}7yughK4R`nQ8JE8N zbM{ir%p*fX(HARpN*_-=r5h03w>U2ol6!Hf(r*_12{uzqu8UnD6RtDme`8HZw&~e@ zruqQ$ie6v<6icQ`RxlaDj^JjX%-UYtTydILkGcFnCS+W``3?aAYCDqphS#6ceg^rM zS(}|aTjGfza&bq60us64(&q|6bS=&j!0;Oov;bYS(gQjqmBk9kg7JhC@h zkhrv2L}^=X@xjwj_KiQ&iKENSPS|5gx3v45{?~5GqDyK{w%d~aZol`wu=AHUbn(3~ zJZBzkm7IKxrC808i-+wukmmy+IkbX;Ll|Zx0J5(RaS>x8`3!#*J+N%2YEWH_84@Xa z65W0L1cvIQ8Ul%IEF^b$d*{lHp8O;`%ALI50Xuyi{6APo#6Svl+BZh$i_u4BZRrS& z&IkwCd25`0rKmNZxb>AVR9ojS+m)U?C(^M)zF@X@Zq%%ikY9z6P`ygMO?_1Tm-@as znFfRYU{(*86V3*mljro=JNM{S=COpG8u?x2G1}uLxI1G-rN#=|`}Fd{L|c2|CCm^PWbVgWzFb9Smi<&;r50m{uJh!$uYQ-a zEgQjLc<+_71A>&3LiNt&bST&Vf)hAx$|Uc|qH0sN)VeWo{_vABWs)*sZ#|ZlO~=94 zrqPcW8f@CoRM!*oYt0imifuZy5{?b%tC@bImMP_IJ{na^neRv4s-LL1B%2UUYeo8L zhnVuDUurC%>j-vX2`HrNo=7H~t}w_m_UCZ>0yA7UW;PL*go{XlDifVc3rLw@mPeHFzX zVS}b+RF9a&TpCE?he02Cb0YT`3OP&DbzJ=_v9=qeeNNG z;a9OHJf+_&a}a4KZ?bkdE-fRq{Yzrg57mLu?Pn-^AqB`y@bHU^iyxQgp~@S9*yO?@ zTl@^AW24e375iIWfy`2yjb|w-FD25gD^z)CnjNK1u?UHEyt_(Dg{g|nyNeMQ^eoc#h%uKg zMM7*Mff@H6L(SZRg6cLt&TzI~TaZIsa%x)di|9o}tG6td+xwx{!uO2oUwiXtlfp8DnA=9ws!Mzjn zshLzBp1$yc0ZbM5J7{sKeF0U+O}65MfG_FU(=p$_8PTrYBj{BXGj;fE;4- zD^hdPFutx{$ac#tXO>w~F7wGzI*}qzsvZIfOeoYboP^(dmPGc64&6u+{+auSNi(Ar zCn2-xWEbtJ{*dgopbWR2mw$4+_5=J;6DEZE^<2_;h<%+=^-CL;nX(IdBDE1if;1EIi?PSxaoKCB zn@KdQZEqdCxdCSCqb+;~E+#uLkU@&>R93QQ&fMscPfGKIM-bxrjXsc)-`Xt?rRvSHu6-sCB|8l`UJ<8g)^ zRmNBB#l8W>>U@EC%QbfGs+r95ZabtgYz3|yi&4rh(~;-GZ|f-Vaa{NUE%lpbMRfha zXS(|Oegx$dKD?*pLH*K;CEz#o!~U>q*@s`h97#7bgv!#I5b2t}`=tDWN!|l|gx*TC zCe-_|WYEh{S`0ZOwnm=JzKG2dMWwH*@D28v3{vjN|2a>kD@U8aRWd8+2xbq~JPqARTuBCLidJr8VHXQto7VjVKr}dM+M-^_68tcT}R7lA5 zZ~UQCH&EM`pVhGNXY+f@ggcAPdHOKb2v-nq)vzL&VOfq(tuZ4B*D?_87g{l zt_;fcYpF%;AAdM*6jybuSa^MOyBqTirQ|Vv!2lcvYG3?`H;KZ+vg4JANRzJ7(a~Qb z1x4S{bi+1-l591hYs1ntU3{NEoKjG-U$lSs2=$DDhF+n~@n5+7>it3|S-7_7G^X`9 zNmftFF=+pnmPNV9(am3h2Pky6 zD3Sep;AZbflQiP8;Ntf6gbCtxRkD+Yj zB>M@w#N`YRp50(p2B_6k>;0?>A^CVK>9(e#ACn$2iYG|T$!I$@o{guR5qM{IM^n{A z(vhY1b4@&cRzZPah;o1f(7w|Li*13pt z3y;OeP6f+MUbs&{KjS~Dkk#-P6c*F6t1ksqJ6Hz{okkAzr9xz5JdK6wqSCc|4Hc_o z=VrLt00ixn9#1FebzYS-8++!#Y)g30hVngg3mIrFte}sG&JNGZ& z#o+ey9&6A5lv^xi9Xvz>pDt$Vf-VO|;tQ+fmB1G^$;tM;AtHGZlzL^t$~o)Ippx$v>wWEB7Ya8?+p!30&quV^-?0!(x#V9X zI+4_Z7kyjU`o_)OjvfPHFh{ZT1L{$UDBB2ukCkk`ng^qHLeLRsqcM!&UryDEUu4tO?4{bDxMj zsqQ{`Of%Rtd4xDScA!bRNbUUe1>G{?mLdzxt3>h8{r%&vQVdWshx_3MWv)U4<}+vG zUMVZx5h4Y0^ncmvcTM1TTHYR9w0SI1~ntr2suZ_PCe zsy#yvSgL(o4sY5Ivp`H&Kg#6KoBbO>_mtn{%(bJUWZOitQ6iaspEBXsli9G;fr>DAy0=x(vI;AdxNyI$1KHmv?AjAtlth=OrQrTEbzMo?#d z<}i`8{Fbrlzgkf4_Ey$b?V+!?9UDDFM_MUx1rexg+cr>OA>cEfdt8d z-aZBbhd;PdNJsetMODs+kVnv_dsMAu=_fy;0cIy`v)X3y;UAdj)kj^l3qWhDB%n&#^c!?3t*mIpZR}d&(fjRnBOYI_J5$Q| zUVRLW>v<|oo4!TLz-U_4^=rc90X(NJ1i$L*zT>c{sr~M)RD})GYy9?w<83tVBkIdJ z(~~0B>BTO&D&pd1Uxpa%9uf@@F@7(ry!+*^W7y*SJbu**%bG;r_YMu$#>fnW^AJ4JRJY(4!e?deWQtp)YD1-Y zLf-9LZ*bS~y;fy+q9nZqkL55y{erl1r&m+>S26AtQqh|AUEBABxC@9voz%6S%k9^h zwvR8oaC_+e<4iX6zRN;0uVv{!L13%{17C-t%b2r}obmzt%Gy-huLr>03X?&3_>r!$ zvGJc@#q*4{cF;tCO}zJG7JsBYvk2rm04*^X45ads_jw zA>hA5^a;VOB4~oE+TcLps(XL@dO@a^%IU=6m8SFuiars(!I7IIXvKdBP>ftHjkPE zlFWVA;&m6iu=lwO*c>2pS>Bk_#~Y3Y+|8Kk9I=mk`{O=1yP**MFTapg&yl)H>f7j^ zeMqj+RT@R6pCRtlG`u~cNqpT+x&7+)?}h#K%U5H-f$~9}z2!&gAzjoWv5M=~ggnJ- z!ec4Hyi9ycT?KbvAxma#V?T|cdYDuiCC}Tij&%7CJlo7W@Wy8naG~E1xcR>yu+(9u z;n<`ph{VAG9i<1fIRl_9|oXIHr`9G+EJ~ zpE~&dy;D`)2@#wZyHn#k$HT==ki!>gZ}?-U0QMBa*JVC0lS zi!dW2Bhmy0zG6|ozLvIfD@0oaa-t4ThA&_}bOr|69M{HTXuHk2-D zyLnHfdt4u^pRU_CWWz))nuG**!h93Pj$~o9wQ0_INph1{$A6!iy8&IE}b^U1VR#HRF=>0n5L5gblA3o7KPvL#Ky*UD!E0>9j7*x ztD)+nzV^kx!?JiRau8LUrltw)yPQh{PmNUpKpIyPu@2ldh z+s94WNB;hfxG&3UJP6D!XBku=EpK&O5Z{r#48%;2u*EVCjI>@W%dMNoJ=SjQ#(?`yTHVO>@?X-Zh^BSyIlr;VQqqvGauUyIHgxOR zDI~t^Wq*uS%yNahKs!@PT-p_pV)ii{_WzG41HiB_ayC}oY(wwDcrZ(y6G%IIUiopjBpz_>}fe)*iE3%1W{IG0_wuu_(!O zJzlO4?2OhS`%N_VY@8wS%OvT2WcBuj_q}5BfLNq5)0!jFB~#709&_}7`=MDC_B6pMIYxD76Kh7kNB!G%8{V|_CA<}NmC!CQh@wo#q1ySlp*@BZ@x zo%pFMmy*F{(0LQO5iG&+V$QklyPE^6S#!QRF6H2cRGvigUU7kd&jI>1o7;1q@vCm= zk*SQV6%a7W&Uvnu4y=#gH9VYOzTBm|V+{muwdNaCe#J|fI{6KU1JTwWXm*U5Jl0-1 zy4{n{1%|w9{AAp0pIcT*H|XicnLdYqQSv$G7#nR>A#Af=7ekc#-V>X{ADraKpHNt? z2EzwaXj;8xk9TYDWmDhkuG}#~w4p;aTLXn)N0t*ANt;!+Jk#3^^hjeWDbvpaz@wa* zc~JAzIxAE~YI3&p-q0nLwIZ-Q6u;r8v;YS7|<1v6;CV^N5^jrGjrswy{v* zZd%DIh^O*a>@u&g86KVrc`J`)xz%@t-QH&xD^YY_uVH{)B-h4CtO0-if=*mU^3<-+19(>m}{R7w8qJNso07x||<1~I2?4H8Mt z$8OoIaQi%sT=?96A9!y0pjdiTa zb>E{PX2^%@mX2x%^Th`epHH8Bz&zG>@BP7@b9Xtb-Fks;uzl-kP+TPQC&wo)>pa+) zD>Q!JE&Ws7HwZuF3|G+3^{X4e*%>caDpLOFnAtud(ZUWsQ%~HCuKOPh$b{5FyNPU* zl=n&OYg|KGwzjr~aOYoe-<_W#o&&+uw$q1^|s!k>~J#+LM zi7O>jNAwdX{VCdi*FE)TYr=6ZaEQyK+6Q;Pu8t0ju@R;4IMHKa0^ zd~+U;-QiC+K2J>9cP&$I(lg6!YoojJfWA6_Wnio>+aXul^F4`2*9sP32f-ENPjx16YpcM`8e;o><++V@1)l0TOVV&H3tVH z$je@Tp>5M@QGR*;c+QRYgeuyj(gY9mJ=YrzdLR##<6U064(gXtQc_md4bU1_KU&)z zaOkzz$Gode&DxMx`MQaEjc_`mCk4s!e`cA_gWXWo2J~vuGyR3iLsuG(N_v#-IMibR(TgD(;Ad7F-EHQF&XN5mx+V98gI09LS_nE70##5$Y@zgAJhCs zOC82ljqaeOwwyfbmx zNe?{V&X1>US3e-_H|xl8JJgMfrTHI!Rk^18@12wTwQgfWWxI0_bN=1Qi~cEj=GqdJ zkrKNb(^=a@MUa@?hrE3WJvVy?m{$#{*?G&8W?(GJ&N>yN+?TfAHyxOa+S=NWtkT2+ zc9&Ez+>D1Glbv!UC>*4E>~gB0n)W~-9|fAG00pb#OMG}McLRKtsmoqWg~2S1wYm)R zyU{|{nBD7V_Fbz81a;Rt$qO7MTptANDpPYAXwd{L+6151!K-16$GOCF*JF43m}OCw z$|kTDw_Dy@ov){&wnR2m>Yp3Rr8_ur;7NInRLnsKL!$pIdo;-=>RxWVTnpr@xXV{I z@3TF(yOUK2_D_o|u)n`T&NS0W8UeZd%6FPY|I8))<+|gppO(_sHD=a>J{yBtFo61Q z#MFzTLt`M1fco-;(l6XfL<&jGP=GgFmm*sG#RaGphN7cbDe8QEUfDHO`a$Xa>8bbQ zSHG7>(tJ)fBWMX^5#hLIw+toT+A4|c*hZtl&akH-cwK)a3!pNc@VzOG8k!K&323qa zuM$T!lY4dDn`XGGk5XO+RmHaZknD*1+MPSJqt_c}OkBrf9o=d{@c!PX`}qZnC{J@$ zB(_b|su>A$l(a8+J}5B!HsNb@@#G||Y%GgijK;povD@$a_tCt4A8$KBOo3Hvs)dG^^mJ|&!Md~rHl~^E75h~OLm~)s zd3q@kkU-78SxuA1KQ+&+o`=^F^{D`Ag&4onmc?&C9=LuJk2r<(_}bSNk82$WoyfKD z(CODTO*1ZZ-*yD{4z8EZ{#LJ**uhHHvC9+1;JIj^T8- zZph=S6naCpxQkk8S8+HJhTm;CfiE&j<)bth#W zN@^(%s>hpR&Fx-Dh1t?=PtBIxdg|r#705$flCQPfBC6L*@~&yv30U?CNivd40Ew2N zDv5w5^Z6TN+yapF@<55F-F)ZMb2Zo*D;Wmc4dZS*TkHKlsdP)u3PEM((;dRm$aw6v zEfT_u=-VRpt34=i!(_nyc73v9EK-*wcGleyF6x_gEG1KIdVZM|`Ztt2k{rxGJ?HPw zjfHHJ^WvPbj)q3KfS_P6D4s2#*zOOj0xaw-uctHkM|{rWP>B;W0n{X}Xca=ysfz{7 zy0pD>BSk$i+L;a&D|#EHOlmXdZ9N@co?-U4tdlw^N@hos-xtgYh;S;{+eH5f+kU%jy-y>82Ow>Uwo`b;~a!K8-OP zI$*ca1jLAGQ) z47XBqk&D8Zp1n#3t=P432o$*;j`5Eg-Op5O>hz1MJ=G6X(y(f_u467qf8+n{=H|D2 zmp;d&vNgwiQ_{G=TwvM%QAfbyvh@ox->oM*%midm3I=DU zxB-^X+@X6Qe2xU-7MGq~tPPau&y+cx`N+~m$j0FHLtHI&16lj_2Q>88M z$11edG}wY|F&setJ5t1N*qr>Wl6&k!B}>-Z7u}=B)^Rep-wPIUhmcxWjOQuW*e+Zt zPgc|(Ft`;}e8Qqb7g^Mh7`&z}<`N8LA`2E29*(_iB4h)wr?9dIaU58<9({41_aeRC zsjH#7h~SPXYeEZ!wvX^^%6?wYCY1OI}m!>!U4u*baVk)~j00 z(;z*z!Bi38R`VKsP9`JubHdNZ-Q;B6xvUL6au=;H@XPd}1;|HlmYXw~YegRbV)6?5 zcU|!Z7oV+F$5+d5TVIL`AOF|38@n?II@)Bd3ksZhB>IZnUQaiV6iJNj$0bAHp%u^} z?1EEIj$Dc2(hY^J_k%)Vq!4oLrJz>^Z4vq&>qf^wL{zvG&pdqhYbMNhkdR$Fp(Zr| z0uJ#ojG{3ALw&?}51EgID0HHtqOP8_y+1&H>;ljD(Eu`A7~8Q~C-*eTOhNA?UKg0{ zI>M5V1I|@1>LQ6e zTehA316LV48rGA$oA-+Ot{u>-AE7{N6)-7OR$llKTUkT5^Rz7MNf|pGx*sWWA=;e8jjHC*W?P&u?+cnWT z&yI{5&GpEN@Vm@K<~KUlH@4bjnU*7ulJ&}53KX3!dR2Fx)8`YsggDz0=Gwc(Z!fKC zX~x1Rtv1PI21vAAMIA2{Ro3)R|J2cMVtj=XKc@ThS6zozwPWk2-hT7t8!tKiLw5{i@cFa=`Yb-qie+ z-!&Ur_-=Ln#+$ER<1c6bRSkQ2+nu=wZUC@ktlyb~KZn+h;;&<)L4>i?=W?w4>aXk0 zCqCb?g;?(7tGQ296)3ZyQKmI7&-$7k08?e(V~z*Q`)wL| z^_SAzFuaa8{0o=N$a&GUVyHQ`QF<|_>{v<9e#W>(uAvBKL(cLcTO<$lX5YBa=@YSe zn#XsecVOqcCyJxJ6UxkKrW6kSA*ImtYM+An@;+hEl4iQwtCnWo*+x*R5tN%WxCVN7GPRRvX#-|m4Eq-YM zpJAT@^`n+0lc#o{2L%O{tqi}PLr~lgBrmy~;l*y$noMUHpA;X6SQs8K8Y-4!S;qC{ z|G=YehkGM?f3%u=d!FHVZax0`l?rTeXG3^?5o(z>n%I{}SBqwjL184+loe6yKp~e8 z8S!1e=dQcev%lmN_s0P;h^m)xoQ*-`H%E^h6Ib)yFRK{I^E0xad*u=xpZ=1xx(>z+ z;%xyyZJ+G}zb=VUNtnpLg<*GJL9L>+@2!Fr^bnWgZh-RX)J5$IrOM~l%!g%G!C>WT z#rrj$z12E!d3&I4XhJopJWij@NAm@lCDlE&=k~Hh<&c?N>Jj}5XPhefxkb9beo~dR+GReSn1(I5hYcHef_!M`bNOz#9*crhTJCh5tWVRWUV!$$GnIlHzU8BIi7@RY zW68}Io-vr&Z&2OoZ>N@=YN{wr(r+z}~#k2bK^iu6XWJMNU|Hv0CH z0fx|Evkp2L1W!pSCpTQfh=2jr28*8LxP!QCm`&yR#z+DR8>JgLY+X-P5#!EsT5>Lh ze(=vOB^CqUhpA`5sduesL!sPrd}C9t;Ct%EwKF~OB5Oz@TaAH&hqL>Q*;L7pjlpY* z^f0)or_a87;%$3jhA9QZ{6DfP0U`DJE+`;)Ud$^O1@MxFmwx(X<3bh)qwly^>g^Iws?P zP)j-Q^~<2P_UNjygNhK?Bp0EG`7W8_LHW!|qZEk%U6Q9wi9fiFcP$$Kz1TM~#j2Gr z<`xzyQ}o|Qu3Pw{vq}H?#eqll&>I*F5z=-^&XNm>=be2Rb6AkSbNks|GeahYW3QPJ zqY+Hode;1Oy3)KoD94xY2Gz7MU^~v{?E!j&L87zsjaAURx&^+Lt8=bUGeiTl%bsUr zgbdhQsdTi$?UOPOFvHq8Hzf>)PGid_?f7Jhd&`{P(#7?Ozlx;qXuoeRt_uj`ulgZZ zbmG=K{qnNu;w48(pRII#j~lM$UcT?(I88$Mj zT^U*2;8ev{_G|k|Mx#J_%wj@icYIN6`DnT zblkq_qX>gHh5VCKik&lK%J=G>q_>gUJ25uwdc5G zJca8jVNP_oU-X-@V?nTh;TV@ZP)rb+gX`7>0Ea+n^$?TofEuX_t}FEO{MDJ;u{uQGM)SbC zn&Q85B6Pkk3YiO(q=KGT*;6Toxj?_^dHMC zK}bkOPoZlE?Xc5sfs_?8+A2)bqr}epAaW|>(Ecl-L23x{2WUT*NtK9;Cv5o~L>S=HJTe@=*QtqqeKAE zvSEW6_t7gNvU?EEFe2{@)4}e;hz?9lFhRQ&^xl3|b-zj!O5jxp9Ge&GVEm8gQzksE zS7g{CwsBYKYy2P&B#rvzIs_=>^b<`Bg3N8Zbg|BfVx zk;i}XM}CzNgehakW4eZyM>-_(?-IrN>6;?QyG1k(i-{_2ziBa=okIG$BQJ&wI1)PC{* z56|lNUK0FYdx;$%e9^cS?3PEvReIp`{zM*oNHo^%e>}Fp&Eb2~KYlzOsl>`g&l=)I z%#oiu)JTa|n>`m|%}Pninhl)0UyVfim`K-qI)UB68Uh^{MyFF#m!p3*ABC6`OnAdSJWR{fL?Gy-ySWAH8$}8VLM03(Q~#>8716={&WGU`^4}9+Wr3^ zC2}|@VEI?djp9OYK1|o89W`w`BzAZi4#^*NBmABi?G2^JDo2T_g~f+>`XLqHIlD!r zoe;@CcJ_%$R&aT^0@(2PD;jl?2Z0OO1Ek-=24~gV7mKr+%D$&%d;Q5Z#Q$t_=&IRr zPiD;O_xx)XRkmu6{`Il%q+E`CDYCwGv8|JougnlU)vB={KC_go)E^> z{`zn&jb5H;F?V&VtqTl+u+@RXQnmcw{~(CQ3MjpQ@>Mr9;>InWtb6^T8RS7d)=r&;(u79PKc zFR~|r5`P9f7r{6gy93nC@gmG3toFEm!&g~Mv`@Swz%=|Kz;(_%JA3+LG7#V>N9`{x zM!B&J{ObhdHopT|DzglX&$bZi`N_UVD3LmjYwri&$nEE6-cFkAOHs39t26swgkRhJ zC}s1D#VXddq3034<%5QD?p+gS8BpWyux&4a8aHMN@5)pl( z5#@IIE$NML$65FSVegkmzW)Tl3tJEqRstdSl0Y*~E0dI%3mDIzgu;9g#`ALM7CUAO!7dYx91^_@A6>c|{`F z23c=o@3eIWEC#;0@xxFQ3^W-GaJ*|~(o}{Fu?#&rDmB$vji)B81f;BaQaNK4ssWdp9C@l!q&J(m(~WK1o|9wEBjJ8L~fwjkE)th^KIiTkAMhJ$dz1b^!o zKD+}XCS;g+0Wd{+n&Tx^qA!*bV-ISGpEu9r(6B@`#Hob>{qpK#!xN;iHfXjls)-Dd zM<0)LK>?u3NSa#4r5?Lq)4Jv`B8NH#awvHoPzBYcaVt0HTalr7e8ulG}TF5yJ!u{H(Ht0D%h^+VxxS;AkOFeenco{auM#&2yS?* zM{cs(Uun0cL`Nih%1f44{Gd-cD8Nra!{zi-je$y#Xk~$X(YPnR+4r}>eoVXeDD@q%EkON zIzw8%HX1KH7Q-D?sJSl6UT(!ZnIhdPKNLe}bH_J!0_aKng^u?2M{ttqKh88rU@dtt z-Gka}JT*2W(b<1-3!UOxUCSu!?nbQGq*2lccRT~bJuZaa{Ge`T8f^p?W)`15sa=2~ zf`r|i#qGC^W|(~2mXVRud_iW|v(=bSKy zKv)8sAr%F0g@8CS=5Ten$jb;@Jt58`1w3tl~5BD(^8Rkmz4FCVJoz=AJGg5m9ii#4!8NA)a&C3yo<6 zv|xlpKA-h#QWqIc&CFeLON?;&#dmLbFVz})V%%plqLAT`jnsaY>%#0Z+IrAZFzo`5n>m3HsnpCCvk^ys2QRfNY7Z%#y|IdD0< za&8vK#{EkA?k((vfuD%eN`J6aAGtK^yY^sNJ!&30JZ_GP_UCfL+=ZhF9toJ7@ME&e zcYbI!+(MA;OLkgb$m?!4>$GmRt-63)Kr4`YKX{-06u->{4j6ZXQh9c>ZM6vUhpe~W z2@`_=N|<6v(NdT;Ptm;7I=e|((EU#A*pLTEIoqWX2fX0ga9?`#^$AF>#LXp`(pd3xeJV7r)%fb^~03m3z*us&>EU(WXsKB2!Go}!Z5)pafF zraGG)2p)tgn6P1;4sL7kAdjTRRQB}Z=Xh>&Ebn&Um4DRYx*#b|cDYk16&>YbbPljDO;*|%B#QYh;@*xoz{SIl>!;uWR6=;J<-X-Z;{^qMOFKD zBm|5#I8zUxHtY*L(%h3@y%)K&bvTD6lK8=6(6FM6QKyffJ&RMHIVC>)-?u z2Lbl50l_PM0v3If$73E7>a#yn5!*?b1C|XIZkYq_xhE|y)CWwMHU~{!Fkezt`F5l( z2Up|u?^D0EYBgb{WbyVeX;vyeSG;E*+Z+8PtVL5t1@=|ZwEcUD4HM>$IhaovnxPAuk?sn@_~+Ae9VW!^DP+kLL=$7SSC{>c{}hIUALm~d+?de;TJ3TT3i>vAi2H+D)}iBK z6Qo&reF?rO#7#4ICcr6)|JjNSX%=E}2>~ZG6O?CfCrm%Ae*jalGm0d%)8)fLJfJ?k z`+`5hIAZ$d2Z|0n&Sq6mpSqZM3)xHM*voJRUxqx8^Yl=iOYt?n4E(uymz;ww^05FD zB0Ux>vi)G2a*vJOi@iw1wiemRbqeot!jg@Y-!;L_dJu4SD3m?hM?&-qzU84J@#Tx{@G zKbIiyQCRjs8dWi9I&*YC>Okhd&YWSwU--IrnzLby;Fn~jXmgldwPSN1Nmhp7)>7I| z!g6{zqE>$_$RX*El3YgyrC3V)1p=Oa#RuzAoIdug*C|^q5GlJ-$*U42^6kn|B6_O+ z4m|Nr%CQZQb_Ug{iyogIazUja>3_5zym?QksZ6m^#7~zra zFG+pX+=x0afTFKsWH8>iId(}QV}}3J7@6D3_gY|RPPRBogrJJX zhx?zY1u#$_Z+8~iFNoV940k91gKV6G$2<}wP7Osw9@VVjrkhb1;d}C)+WE_qCI#W- zu0Kj|P>bdp@Z?`)A#|a+#K4Zb$b-w-OB4ucgQ+B8tp#`Gjv#A&bIL2V*@U~nKY7fBORn)L@Ua37z=RUT8Nup$S)CL1(4aG%5F`3~ zp^;ytMCi5GCm9guPNtux0L>S`;1RGv)76b;FH_IU07pQVMzU+nJrR4+xLrh!9UT!Z zrSSjN2bimyD5=swv>-@>F=RJCe0P-18FzC&LD znIi4q2PhDvgSQxBGY0c?klIjuNK2?Ef*DQl3LRlnmifmP-jgLq5Fz%G`6hS>IL3iT zU@v9yT~y@(K@5RQIzDFVp@Lh`1WC#bP9n^*4KJ#|by0_}U`RMkV=rTQ&1M7&a|t*IME-q&t0HHv44Y;jJ8q zz#-tU4A|-s&JxU_kE(aVGJgW~-f!yc$* z>W8QFU9>7f_gZ#FMx)4gfuCj$(#HQ@UW)tPO}Auwf%IJNhk}MiE(|p41&V&T5g}U{ z$?0=Wa0&8!aq&q#^^Ag~Uz_$D&BgCXRiSkme_h?X%6@0$x%{gM)0&zg{SOp>3y(qF zhSaj57;bQB9Y4tl?k7C08jKWde(8wsbvk?BORG3^Z(SPs$GIQh1)XL4aaq~-2v|ff zk(&8uiaa`0RZR81_>!jg)D@a{J71Xo0P9X1B+FZMZGrD;3=_@sU>f{a7!z(WQu_pn z6`n7uZeq|6>a47H_wKlC8b3b}@V^;P0?TnIsrf30h3;WHKufxVV3wVXvv`*CJRWw5 zC!_{h+&zou2Z#CJxUZZ~;Z}AJ`96NqqT^R@vQ?CjwWK{O^;^3S_`w!j_-~>21Rnlk zvdXG2ut|AxXt=L^0cSP~2*AQ^nZOY6VS9r~4|q-M?x zM$&dr(MAoTZufrXjvUdGAJmh7&o?cPPkH@~e;E!OF^6x6N!yysHyq5!{X z^mpjQ);S^~B3DBn0d_L(pLzb;WjmUaVK>w<4yV}KL!NR<&2^-f}>>{&<*J%9Y!s}3QANHt-Mh(42(Pf7MfMADl>NrOSdr0w3Go}M3& z4;-L#?&*J#IBV$@J$nfk)aFfiZ_5lH>=(QG__o2;uj{H;~K?VR= z`}Ws*6Vu=IT(yW6u{TxD_sx|t9)2)$WTxPz;s=SS z_f_6Q&CbbF$|5JRO>|thH6dgW5qnSL#NH%us(n(CfS+EntzJv;HP18V=%cdcJNn^P zqsMS}g6G~qX+}iFfL2I@$rIQE{k{%kf1x_l`gPSl8i;xbZ4DGNo?pwH7sc8-|A;@m zZ;e&@*`UmBKRYj7a>+@?E*-Jb`#)an@5Q5fA0)jq_rhOD;E==2*UT>mj^X0sZAe5V z?YCCUYcL_AbMFSK_5J(6il-m~1x1iT45-@-_Hxf{w}L z*ah=!#M6U^-wAf%Vx;TbkMq2{!1TnRP5L735O_0^?zNW_yU7)65BpQ$f4UE-%H8vx z;7YR>W+F`-*!N({i{qjuVu7y}yTdW!W!@{U{N!?+v1m)ZF@X@tGRMv<%lY=tC! z+NkbRp%Jp2!Ob(1Li#wAo$cBj8xqu^{&P5-<^vk#JcMGg0<_>SGlc_|Z)11c+(_T#i*y_?Wp)id1`yI;CE8iWcy`MQLS@RQwz+1JvC%rCNgGl#yMaA zI%|_RrJ{Ucj(QEy>uF2|!H62(exf*C6$e#>LrvR2h zh3!s}BQUXYC3k)XuawWZXVq=5kH@caopC&!^|jy~%q`mzt6cvct+9UQ;sqLov;X-7 zzw&K>ZT65~jq*jp0KWAB#v@|!tMmK!BEr|(?|6Ar{oP>h1Yn&|pgfjC@nqp#K7Ty` z6b)>K2{}1!AQ$ql0utR9@Gv!>Y)^`tK>Xg3sTpuD&H?xfno+5fvS)NwzWHZTztE-( z2oEY1c&nJ0K|<^lc87Kxoil*VcOJ40yxUQs-Lk<%+l^#t3{#Z1M6|yZtUds zmvI$3oR@YVum%TRp)^1f0+m53-B}~`jx<}GM_->Oc2z~x_jeJ z;QtxL4NKACl=qlTm{7PSAdNbZM8!S({I4nJeFxxCESa^gB8**Vd`j-Fw^#se2dY2w z0Iz%~R6o0evbBUU>9&I_zD{kr0va7Fgs22K89^kjFZkX`O%>&bzS zg8#Um088ao?c*H8dI0!x7KrIEFwRwA?A=)i2lIKPh!Y7VI#$HKL3al&8FUeed2Lz* z;9|ARG~NPiPBWnJ#Pg|!sZQp1>z`9PA!9*RriW;)g@Hz1=nlkNwMVEiNbTk4Txq3I zQ8gV|SMohU2U70(j+zmTjT%B*SFv7WuO#*igP)N}ss)!JK50PE5rtCE4V%U1w<@n6alj zsyyv~MT*o0NrD4#&*F$cw%4yrQLs_`SeGk2-g=5)A^EKH{n(syxJ)4ou$KY^xG?4JGh-XiKba*u>jET8mUCfQUbz}xDqRdqAt=V+ADJh(`8^^${+cdQHe z5Nmw-00Wgfu8)t0Cn0eCv_guP|4k%&tCJ z#}noH{MgT-IE@y%zNacdg^1m*0Vi`LJ*8j+ivQ(epplr7;MNGVFl{2VZ`9Su&#^00pkJ^{5nsuO5_7i6 za+6h$R5+jL4s&aS$9HBOD;Ei_Q@xkApkgmRFE>ivFR{I(Q3}C(`&M7h>~eCnmeKByq3KD{#>C!=3kgd?36qAMatu-rd#&`PYQ&^_+>N7klP3uz1MzQ2va(~jK3aPwZL zq{c>Q98aZba;R>V>fg1kfQ%TrdK zo4UK3h)lD8JZiqD#6AP{v&2~AJdb;_co9!5My2=`wYxm?d0zPxKTQ$v-1XjB6;YBwFXjkBTT)r^1BuR_JS6fFy8bYq4dV;Z+i$>swF@on}O2 z&&WL2vMEQsXN}du3uL}gD&2bdzbvM z@Xebh-DcHGVCSPxqm^+KSACg|9$2l}7m+XZQsjgJg_eKIpH_h@SgXL_fZ;rwnA4eYH<`Au1nwet@PR?<(6u^&^XKzNh#X9Y_HEwmKD}8olIOlT=Naw3X%Ul2F%AT}?01C&E@M(-7soD*T^`$CXVDU* zI~Cr;u}@5fm{-bOJ+~x$Bgt3KCaou1>R<;S_`?F?a-M)sEinUZcwD(fp=lgaluS#s zDyFd@4*=esfc>NUIY3Vmuh`?qN`vOy7p55Pt3X9C)_Y1R>NX-NEEnhaG$n7x7W$%e z2Xt^@bL~6ERbx{=mtwOgOrb{E5v~#p$Qzqo1`8H}wFvOAUmaKtc7|GGyCu;1FfE!h zU9&=|I(g7`cgD(dk(@haf{T7kL9K#Sx)H2~>GUxCnG;i8x>hr%rfgPShaS+~?EQ*n znKZ*z_=#OOvoXjGnEh&4_++7iZF_O!?TwhDM9NdahZkbRkUN!k&#i>h(kK}DKnDmq zbl3tvj{y{;`V8Ei)ry4|SEAVtQ+D`<1a2grZp0^#{j{%xT|7{zd?R#&?a192^fTIz z>f1gOB>XgO>1W^lh2d_5u*XIi&^rvvBJX9}sfAkKcZAoI!&^%zOjC2ig$j+RPj>PJ z&A(9CADLU^zg4?dR{_Mpotzsmx?)0IWDQ~Bx5SIsr+a$LE1khyQ(?{?bar6^1Saq5 z?Rg30&f}Q|vYu1QF%y!z8Y<#r1Lp>!2g9CO%b*@GKQOs2%kJe00?Gxpmp|fZsPsT? zSGrz|N1IymXF!_~#lUsHWphq-8k4a&a%=`F+F?6;1VqC- zm$jA3(CKTKZWUVn$ZsT7g@a1_#0!Fs;~Md!S&vc;!g4-*)jmPGOp3`MgN~l~+V_pg z_`!>IMyK!|A5Pc;R}u~r(r`_?q#8N5(e`nDpfxF!f~$wpFoTRL37^LJaZJcWdPWBK zc!`1A&j+HsbJG|ZNWL_TpDbViP^9qV9*>`a25aN8GA~I2UV>DEx<0M700!7f@>Utq zHFB1?fXIEzRbffB0+abC#~Y+M?pcmp#f3<{nm;g8rYz)JY?&d(GkFKD(@b5Dhz zq23Ax4zkxqqAprtn<|`5`8~*S63aZ05<5T+bp%v85nmDC2vSwc(wTyLTD>E?0Q+9> z*UnS?bhs~o23`Ks=GKkUUtECNV;)B(fEqZ+_5=Q@+-`l>rrmyjBcCjg;_&m3xjr_f6U9>XH$wvY%IWL*Vw(_8L==FoBpHIg@ioOr(QM$vJCI zqNAi>_WIIPfjW3dES%+-4GdTsYp0}s@PP*H4swoJn{WD*X!~=-Oom|ad4u#s$;L6jRP4ej4si7k2bW<9-!lB4izj6H~E;KNgx9txIrLl3?Pg7wbI!#?r?-iuRUT zJoO2mtZXSa(hehcsvwN)HWkx}=9+y3JRw~h1Z`S``vtEmXd0_CD>=y88C?Jz#K7y- z0d=*;5t+{S&@xzUsT_}UwK>EM1_~e-tgpPHZ8vt>2|VYIff|pqK~wb%wVNkwmhU{` z+H!XJFtcq}L~^}erO$%d02=iZOni6|y-+-q&XzQ~_ss|>VUQ6?Yb`cS0^MsSynxdR|p4e>#cfEvnayQS(ba3Ty9({D!3Gpl2 zc?n=|ecXc!GE~+rHlStQctmJngQ))FrO(vp%gzt=EiPEf{| zT$JeT{O)+;vk#M9*d}Cowp2NffSh|fDTfd>Vk}zT!9FBGy{vfEQ0O4(QuRKQ30jP5+J>RDCZT=| z{$$MI`w!@nRhI(O^`UtAVON(srN$nqQ&X`og61}tAFaB>h4jVt4`a!5#8!H~ivOUFyTOh@-MqWs~lcY=?i5AH&-gf?`5}wCOoNi#F)S)$cmG!Yi98 z9oV+C+|3?Lg&Sk}YX4iw-9^cknUo(YNjvU~(F30>Hend~_ywg`6AG8|)OY_M%H9Gh z%XRA-mR27S1u0QL1(9w6?5v05OUpH~~ z+3)-PHO3xik2A*J+xxjV|}sa;>#_HzGr9#{6V1YVw(%@5%R*T)UDy!dWb&$(5Kat+8U;5?gOCI6hKgmJh&D z_*o9}W5VGdxzF?#CDz`*cDeZ0P@}z_-KZEV(I1y_{vi%ony@YUHwt`>p;FtDCrI`j zst9h325y7)%X4(Yu7G84Za?oW2UWXO$f!o$oMBMNc3W&;wan$&;}SpC;_K_S3~gb& z3O9wxs0Uov2a0y%(NVXbwF|DM$c^O#YUuVj1lnqLa)5Z$Y3Xkemilb|Lht@6a(=VB ztHaQylx~?~gMMu8Gr)aLZ9)f)E_WRvj;*uwn@9}`+dvG;K5GE^EuP6`uXbx^HP<0( zFFEE{Rd(bnmfqlegx%_~!r7`ApN2R8WgU`)KkZqBaK$;8B`EAs+CnH3rHH`;^NAj(p2m+2QD_~ z{tEI#?~vh*pWb$A)KjRRonbRE2Si!)Eh65?sGO|0{#5ljY~#4Ix=%vcMh)BF=d48E zT~^a{czm9m-i2~CVz*0YSLv~{kHJqcS~ux%o-3j+XcF3~62iWu>G%<*1bg;(V)wZU z0-7lyyZRW6da78P_3rFUN>WUNB^jw4v@rPBcaaHeXSnQDNd;NM(s$y-{Yb}lGYE5J z3wM6ME#Dc7`y6le4qQ0*cgy!3f=a`bWY(-B&C>T>c8->LzwR4FS4R2XmIy@`jPL3< zCxOnb14eiLr42$1E!XHRw3~mSko2TRc;O5ux9cvV|9H?>MhXM%gY5 zi@->w5sk%yYu?=SbfZ=SRA2f;8k_S&TR#gt1D`>$U&+gup1i$0guF<-A##GI(r;Pw zKu8uh{xejwyz3?T4<&0B%5&kcr{}!od|q!fFfd>bY3A5KW7eapZv{l&GySBo+x_E9 zZfSWePqUunGezIMQ%oP@ctuzGTYmY5UwK`Lpe3mHW>Rd()1#8DY}dNz&u^2lhNR54 zkT8^=Z7xfig@^sYOpsx@+;U!6sUz=ovx5*avXMR$(!6-T4X?DCFxT)_U$FfLt;6Sx z(J(GTOl7{zzVP|4Z*m}b-&uv}G#BsbX|y4ORv=X8yGDB5(!*$0F5dOZuC(cy4HD#^ zFrCx^N^0Vj5=C5qBg2LWAYBc$q^{u0|P6ckN)c=2gZP>T+w z%t1zv9=^{^*Pd-V{T)#iOKo?j>PryXpiF9s*XkI`$USFyh_TE2dKjJ4D!r-acv0o0 zD{!jrRbV>p#m9RxqZ5TBPn#{zl^9APa>L*G2F zm*3i@YE~WV+fq+B1Cuorp_nVKE~TImR{{dRw%>mVw%JAj@ca1dV(nMUoo;n$P37d3 zxaJSHvdhxvlDf;$d*kS4%Oqy?7ke+hOw<{5?V$WR6z|9<5Qn8VWH$XGDndq8yri}# zHO=IPSl>@dmD9I{-wK?X)8L+XdG=`oXEj0`T>sY5ByAh4RS9#SH5j+|#NX z1$wgy`V7-=hmx?-qKlcK!*|LTGqXWUdw@A?Qc7(r>Qeg0D>CI~tLDFFZFbCqJ@XiG@RbcyV5n+ zvo4d~>&G;mKu&M*l@2B3^eTPb537(c-1*xQOC5?Gx*@KX9!f9`T`JZx{Wm(?lk^y! zplE0bt?f!{-&3TgFO7g~8loW!k$QaQ&0VTfQWqFaN}!~HjN0IX)>2srpVXa`U3Oii zBwI-`a>Y7^x$V1aD=XNQgX@FNnXP|Qwpba9m;5?*Q=8MPuQ1N5_{cY}v*c&wvFsKS zZKQlo@SOOKZV}wJxSvHtQum^uacL+4n+$;Y{v1ty(&QhQ30v+`HTPG$o5df%oHKRu zRr223ycshoEknL*RxxWyET+;(O=u#Etj_9MzUAiq)3I>6P2T;pI_>OseCJ5X4USi* zG49$pin)~?_2~mz#4Ze@l7RSX`b`d^#|~mD1k-PCK8R+Km$6eC?@)Hx0Ii=Me+n*7 z@sUi0{R~LBqb5=d3JN+CBX-k6k-n}`WEko@CW%IzBDFjrhf>0KXxLg^TnpJO;iO`8 zTM(tx>ua#^i_RiG1nku*Oi0_hoaqB!-9Eop4TB|bNL!c5NJtGfA$z$x`CC(!W(c$C zCuA>ow%+cn{Us(f`GnKn3fapYJMqLceCXh&zR}_uPffyUXDz-35c~am2|4C9Qp{2Y zt3GlrSGw2O_D0xb5M2p6n;`gduLPfxlzHn__XS6oilu5q==J^(I%k7?})Xv9Dl!nUcuj}JjFGH4n&(9 zd|kQ`#45tKOMCXN(*b7QWs%;rtLsrodc6y;Q&dK2lH;7#Z^|1b^E$7$DR-W~-A|e45R(Z!GJwZ@Q-UoiKO5S$MXRMBUtNBFni>Tzu?E-Nm` zSlOFNMl8rZeC?xMnp1HgeuAvV{4N zA0%$|h7=bbETE)X#2;Oc6?No0|L2*!FBkH6`*-ieu?l+JZ73v19EE3)k?u~+wm7|T zp}X?|EwEnPKQYzjgc`J*|M|>*F7^2#fNTGC6it5BAN(p9CquR`zyzb&1Lx&WWc!@F zQy2xVL00|8Xnl9u8g1=V=VDzNRj{AqW*^ zt160n>6z6^(FJqK=9dRw7vT2y<2X=z|8u%>r$@QqSO#PXA8e3B?uqdVOi^zJE!;>HLrYR+;k!uX^|l?m@g?orS^z`3H*h>_M*o+OBw!8fJ9h zAZ09Y^GrVD$ci0K?l z#m%gW)`JZ=h*}mv%m=2RC1ky8fM)h(l&+~ijL^o6>mhVMBTiX6Df@Ed)=m0jg?C{a zXS4rcu9xyZyEGQl=Clut%Qh0LMq_ux!NYB-U%ze7|KLzX z;H*TE5zDmVajWUu@~g(d9O5cQ&DKi^mm&Z7G%n+2gk?JeD#rzoYqQel*kp%gf)U+S z_lO!{8;`qRXH?X_%4I|tA8L!8z#TLOB^)(}R|Ig8k|9E!QQ%$LAn9j~M1ZWjM@MMN zg!fDUY!D8q$qE;>0QCV#`HtBUofD^I`3AJu4FiP zjG!y*1xfSGf|o+-)tHDm00^VwG{T#&F`6-rjy$+(zlPMKKBzGZqZMMqe;lmiv_3~* zuRm!VIz=um+*x9JmmdL<42m00U;!*6FvR-gNVVKB3g!uuxhb+!lz!S0u> zuCPMh-Gw=}%D3ZD_5WH^>)SMI42B*+ zsOg2|&g7tY5|6mhzVe#J$W{I71;rmr<6ElUA8WHu>`GotGepNSuTzg@2yCZ!+;$M& zZ@+D+))XE3zZSQ&^F}Sy&N{N_@A(jqz--2;Y!wcxd@G(oGDpz?Cl{7h&sFqlSxfMQ zSx2HQFUSVbus@Kwr_{6#2+O&OA9^%5C2mykabW$aX3nYO>3EHhyM+H7U7%P6Qq{sI zk61%OIi&(F@T7MBCDe3awK%kY>Pj?25c!s;Y6g5e|_ifDM%w4P@kX&zMR*8 z;hmqRgeC43KcLnjRX7L_&Oi_OOt^VQQElou6_P$snEvA#+C+Lbu#JHI{*Ma2>>fTC z)cJ$dcQk58@i8eB=cHbE&i&;*QUXS!P21&sg|#{VJz8hW@M;p{gXdPV`;|VeKE~Et zgPC`EACH&$nnHvOpq6lk)192N1vYnx6Hi^-gLvAiN!n_gKCaR7i~|G5RoFJ~rUQ6m zkS2d3y#DhB_sBNH4}^TgR=$%n0CW^Y`xUnlq!s~e#B#Gw!*-}@aZErL_*xdW7+~#m z6FVb8OV|k(Tq)&Om3^4HJB?97xLL;R;Ts&8W|-r8V}-uwae1N2(7_7(niH={4`Qyc z)%S6u?php12qsJn?+nfggrIvmH86s!{bzE&5=xK3cm+~M{Ec7fu*e-adG7-X zhgQ6?O;EjHp$>LSk06v?B=mNbQiE@1nC70hItUUste6bvnT8D4Hw)qwF z+?7v)6QYDD#!d$pC)n0g>Wcn3V+XL7!1xehat+Q!--72_Ru4m<#VLb{L>itr$tqh))!}i{L z>7Ywpp@Uk0VlAvFTS;K!(X@LCGD#Ou2aec>Oivi2e(r(X&|FQB_!KevnTg(=7Liak zG*$}#DTQ@XwDgi!o&htv+m7Ql7}!J+)4iy9xV7y?y%VQpQ%?s}u!I81!AxuD`T<3 zFrAH79wJ(rO9E6H@E`}7p(bwrT>d;>tl)v|Dx4Y_>Q{xSq(*a}THaLW&8U3?LkyJu_OSBt^YD1TCQta-d#=h;};v+)%|EG(=wjq+6u!`I6n2wm$0fpR&Xf3+h_ zBWrC{9*-mBHhT7_$?VnE*j)Q2Q zPkqbXY};SGz$$dYtTXw+8uHa^2#m7k0M(FpMqQ40ILAK>zq<4AJ`+*%8ZNo(Y?W9wUL-@e?SVXJe;M!>@e(lhH(u}a%{y3f+0Up7iDeA} z={asjv-l^1A>&Jt>;b;q6}P-`^PJXLL{1{1=%s3zr5zp$&**?X?2~4B!oe-Jy@5(; zmg=tPt+&ebackSuQ-T->s?Mop=G0+}}$(j_}P1-jA7|F{&8+v}* z0Ye)BUoFuqAE^m?AA>~fv%U%Ousi$l5qW}JZLfQQdbh+2od$NaL0-;19#I2saMBIRTx)FzHdb0$ZvLRG7rLuZIbJ$%q z?lHXSsWMNAWV?EWTJbv5Bly@!lOu70XRyp>MHvTl_JS~RqC5B*=L$6MDWy+HiC{;k zef(G@0eIlaELITNEgAMi%}DEP1-p(h>72q=11Z;REvLx@d!V4;S^1@L*TREJ@>L+? zMjgq*l?A>PkGs9@|AZkEI(YrNEXl)su=b&F|01;8A=E^Cb#-->q;w7VT|JrmTbbke z`)vDT<=yZkEahj4*&)O}nzSh+92H$(ZNJ3`4)r(Q^(-D_vt4UWEPdu`(xbUxN9 z3EkKo1c+XcMB0YCPa_ztZQqq*8x;K)swobE?Lh1cG`@!c&5ANgW*Dk zR~Jd>kJdgF7(Mzz4el}5Fv17R9>eX0QTN1QEO->dJ%GummM5JzuIpSVg9VSz#jLA$ z1G97wcwl@swATq8fHW+ed__pNB}O3MdHr!8C>ul9tN!IJ_6L)j(>&MA*8Z3oR%mA* za{+cnzR{;ENoDmPps7}|FdQ*DtKc&+MLDz~F~~6oZaQD}#f18;zh8RRw+^Uaq}Xd) zkW-UzXxhHm6fBaMc6+&VBE+z-uSgRhOxoUCZz7(6E|;`F|O+gfFAG;|cE-%blW@a|_xQ(?TUG_oL|; zirf2s!*~=|*l^HEGf<6Ri=KB$x*U;l{(4pU{o3-!Jp80ngbl90%y>uKp5-657TyP7 zZ0>=54gn`#>0&LVJ)+qG4<1czZ#f#oeQcWLYtcy0f2mr=_U{h{=evvpWTa*T!0&pt zt$3w)3&ENk{Go?Gdgl1IG!Q_XwFZ~h+I^ZJ#PyN8*P~BaqDy_IrN?*z2aP_FfuVaady7P?0om%60d8{;3XYBn?Yv|)A;ws z)P{cuP{~xzGaQus_7?SHfFd>b-5u+w+GjkcCzve{qg;agQcnERS9Q=*0NtK z<<6^=Z*}z-ozqe>$noV(VwSP^5$^>9j4fiZb6#=Yo8qKfK83%?J7`(TDR)u3 z(h7tk{l+&^b`S9hG4Wg17+>aGLQHW{?9BtAniN@1*rKRQwq1RGmTNJOol~s*1E!qX zVyq~Qd97{A?NaMwB@8@3-vUsB00&n(x=xpC49pZ3d4-Q4y>vDR_j zyV=%-wC|9JU^~9Mk|Y#efKH$eeZ%4sjl86)Z5c?i`B+Fyg(Tv0d_eGNiNxQ0Bm?Y} z1UV;mPsMwS$XD-NImk3GCrYZj-Pj`Adi#mW(@CH&jOsw8|KQbxFfxki|8QI;gRkP9 zK~RYPs{Zim4}T@-P%JfXZnmHW zt@fpal0%(0Zm|_l#QWNp0)fkR8_0om1|*h>rdb@QJ|(*<3+_716?*S0xFYA4{g_vX ztus0P`}Y;LJXU~z2ePP_^`7hWDD^JV_q3BUGq!S}D zY0AdJdvblx^DC&|^A!!?4r;x3YU=`{i5XDC=iIbGGFO-u=yoVuj_$@}KEh*0WOV;e z$U)rSo^isxVL}^B+Eecy7yf>k`nEoVew^Y{)Wj6xP&mHRKQ8zd*-2p6L^wp5)TZlw zFM_ax{7^=tgsF15gQ%d0UZ1%PcnFQ}y(!6iqC)bS=Jy&T((NxEY~#DkUthE)tr4jj z=nhGc%ST*jnPEqd&-i-zr&Gm^?VV#dXYWDO}y+e34+4ttT?{smA_) zbI|9(^P)ea-gFIyotkhilx!s`lbFzU_g}0u>V0E>s1Rmih&#GpIAQkN`5Cggg4qH^a3r=Q}NpDa4fzJEY} zKuh{={_Ao+$B+$`iQuFovzQUej!V_mXTmcrau7Rx9BeIdN7oC7r7%-H9KsdH@Dh~l zNY30>R&ZMYlbsre7Vmkg$9>#8my9v568vi7`NZZf7uk9Z`Imz(9f#>ysAtEq?-6ln z+4mS4TFrq}aeZG>NeRDItdy7vnYQDjL5>)grFtY2KqhvIX3@XN+$s@!3Janpmql>Q zxeNI~e_Pud{=hkqL5O)~wk3aXQ~y8Z1T~)oJRNAusBOhON}xla&E?fR98 zayseo(ZI`#9rL?`^J?7s;qza>VQ(8e{oeKMwd~Eb>?943>Cevl<)~h|X+yj2yg?X7 zi*MWw)7JU-XVLT|{xVh8W(5-FR2~QU&}h;J>f~QMw1yv*`Oh3YSU-Lp7y7h?k326A zVvBP1$YRxHWMpWzTDN@|O!W#(znC>Pc#iq_3a^m|mH+Wf3!IZwGA>exGzK%ZHLcQ} zVIXo*hEj{!)i~1pxXXivHAw!uVS$P~J|Kce+iV^Sep1Zz>cM9x3*4p)PKQ%OLGik~ zX0>lWn2!ve;Zf*A()6rvr#^ParxXwnul(&K^j6Z>**$n^x&+k)gZ*}x`AB3@5a!BrtSr@u*ez~eV{KnmO4YN6y!t&5orPUWd z**|XQ5}DTOYy_qBlNhBoB|gWLVb8&{gy`=>lc>jud9oui#-CQT3t0>QqwREb3Nl=2 zy#agJ`gmI;m{WRpGIq~Y{_#SvG;8*2C|whC++rR`P0P70g{(F6&!6Y6o!@DmAGO?W zEZ=ENoCmAUw|&|p%&ydef06bq?ysd6Y%Z7Uq(}sku$3QR`~nv?T&>6q9lgvJ*E!oY zgc!C4QQ-RgZMXXksK4(&{uMfWop~rfz91Avw|@(W9Zo+w7$%9~{O5P28+o2-JuW;- z_QE(S2|`YX$ue@6L<7zjh7v6Mn!~TWkWGSB(9h+Ga}=2CNU@e|HJ|V#Byr`pbikaN z(YjSp`BtFq$Pc5|Ypa&TGk&gzt=1_7%|Jr<>=&>f@{EYnj5@Ccec}DG*`6;+$(LD6 zW^z2QB%Qk|F*@5ZG(_#49siweMU5i!%%xkQ1?A{>;F;#SQAN$_*u0QUTebzWNRfSK z+7M{tX+mW!vxQ0tkNWn>75emnJ;{F8gB!N!`M86t_cR+y_9-wuSOH0D0;BtKg-r}l;<#E)>j_KaA*)I#{SSn~0(SdXr-n=B+DQJGxmUo-D_ zE*_#1GuAGMUQHi3f_j=yOiW~DWUxlU?SjN6+q0q*8rwawulC+4IKRskQ?;^*F-k{I*ydzZbvpxpc72| z&8+oV5!R|9N)-tB*v;xyrcxvtMhWi+{cO85TOLe`_1xm;pM^ZgD}T(-faUO|pfiv7 zDR^O5(!2oOU(#|%=-m2+KfTX)aW&lc46|f662Ib%2$o3BzCv1l7;*Sq?Ck78D2aTB z`@)W^+Ps+c*gK~5M>pbnvpcrRlKJgMrIua_!xsu;Gd+XWdyW8JG*F@1NIZT>VMVC* zX6$rU53}Uhp*5LWSdPvSKAfX2-TjwVNS|*IQxfND$CIe3`4|EC|M%K~ii0=xlt8lA zM;}ln`BI#zRSOj-O)(V;(th>`c1O++T^8~6Au#tl?it*0QY58vM5NyaL(T2G{ZTMx z&&9{4!x5#(DBeo!ttgRhJe|;x7irw|zP&?jP1QSNAv@Aq=E1Jwi$$9=wm8QT-1;zr zA9eW|33!bjl}wZ_QUC&2t4?zePc1&v-9+?h9-yjS?L&GA+mveYTAIku4#%XT>{9-^ z#w?_!H)(74SYpjHGf*lrlf{j$e;~N6e=I!F%7|ZQ^=5yhCp|ZO*sM#A?AJ`oXGik~ zbA?b-hYL`Fx~|-S@#U#PY|E@1XEb)G{GEOiYXnh4*}R&XbAZ zOt0}qmYdthyEXR9ez6&!7#MuG)}0`x0%$>YS(zY0o<(eEw4aTUKrtPOXn7zP^?#7a z*bQmHM#tjpf(<%=r)*oEU4B;VMLxCf^k~#=>f-}(@n3X`!&CatfjI0dWQUJhzzJzi&yzmHH&0Mr@8w+c(hZ`9nsm_0+*ZL)9)1$gN;DBzoU2552!9 zZ1)t>05KKWzdAQW7j7WuW!$LHYVLATg+TtHt3{9ioM@$y#Vi6iA!E#k0U)eI#qB%n zR2gaA!R?4$@_H9;Nefaus*fLpYb`B z-_5)tEp&$5dDd#m4i`)9k+9p#9fHkEOG3TZatW5?ZWYt34aF2aUA#-Xv}Aifs$cs- z{T&$pLtQ7rW*ssbyLRpLZlG3U9|`57tA|_6&DF{spJqFLx`@%`)d2wm`08{ADW3n{ zr*oZ8&M?b&W6C0gFYc|k-p5zj6p9CnXZoYB#g|$$|@>Mu8E=xQ?YPRv*Qw0f#r5kLr8aBZTWgl;s{% zo}5L~7&|FP>?BM*mNnK!|m{f-6RL-p=hwe|NA$4}*`D{neg1g6i@*tth<#+a8 z?BH)wF7c0|@xPYjUitly;eld8dHuXKM)Y&VFr&^pb}tB{>Colv<^f#j2b0-7fyT!+ z_U-k;U%WF02lH71nW_*z5v?wU0Q!Is))7m40_PRb^;udib|rJ!KB0Y1;g&q|z-A75 zxd)sVq(Q-R6AR|X8gMQqt=>0Pjr-E>e!XwCNa^K~>Af9mPmL<<4b3pMuL$IAK_THG5>HVj#}DZ%Ac zI@Nqm>m0tN0ekL)abGTUTeA6|+qZ3xE|NSoLL^Oy`&*KEDezD8!3+hDs&I>zqGBsP zoG~+ZPpxcW85FlUnrN1IQb!N-+hf%@T!gVNV%LQU;h%Mi%lZvniCM5AjHML~|7m0@ zNjoh_#QxZ&f;>{+_UJr$e|LxcOcR0dd&hx}I3GnPbZ}-I{@0n|^2unnllA-8$H@P` zf6M^~jitiHa5IPW1}HM*qPKiQY#~`th2+ts*#{I@C=_C+#aS7g<_6ck8i)n&;^! zbw<3t#i-J}#A)wDjw18|zTCg~L?=wn*EDzxaJC#4jr_02*|G8^Z(5W7cX1r{wav#{ zdkJgF9OC$xOY~-m(f1|WUjA#?-^sy98}PyYq3RoQ1?hS}G-%EnWE{_Nb2bY<1lyU7 zY#2JKoq-naV}fmjwH1KV9F2-8NIa`5j&n|EAI1j=&RY24m}Lp0R_NQxg7*?|wY!0Q zs_zlH$_D*-LC0%_Z{~(2)O)`lP>9ZOayjC9uQi`KnK_uo8Yg|?qBBCmL1-acx7L^v`N#qp<|yKa(d$%4uA7A0jKFgs6dU@o5i}?)N~_ZYv^*L(0JO zu56eC!?pLA=EkT`bDe>_;YnB5jV=H{FG$sv$C3ABy3S!z>q8PpdtFmao}B;DaT#zg5Mz^NN$1<1KekGmTQOh05OL^r z3x1t=dCxSs9<}}y9{cA>wV*%7PzQsRT9<^}`ZetmmYM;%VTvs%sQQfP@z1`0veQy7(6R*%(4aQsq+eI5O&!o#5OY?f2*J!A?CwMe&n`&iNaenkKZY&_ANtphR`*Vr+$o>;Z$E;C40>=}8Tm zG33CBseFApgo*#0UJYaoY5j7vOoJ23%Z11^S1QJG2N`~%^)N=#)_w7u1^5~-P##~I zKjcB^D?&A{;kpBZM#~OO_p7ZUZW-Imbwg+32aHhNj|OiBM5Pgwy?A{xkoFcN)ThJ{ z8RcxC=;6lOa*_v)2<};89_Y{Miyc8(w}%&hF$3}_UxOa z-Y|W>>nC>5q+MIHf5`kHDe1eDv$L1#w~7k)QgD~ce>$z)S7e(6Wqu35Cg-b(La*P= zeh$s|alKP{{!E6&gq=1g)(rKqaDBPtO@hZjxyNHdiZGv(Zxn|Ot*v~Q>)blqJ(X zi(3R0W?KMCOyAt%NZH^2C@ZI(%9J@w%IU8Zwm<8=7RAN_&J&Iy6H-_Sk|Ea+;S&px zKPd80fb3lV`cXV;agR8;Vgwm8^(fD4)@W}jml3*$MRzegli=7;r=9)}Q*TuyltLHRPXFQH^V za5zLUgKpf#FZfcrJTq(AH&aUdeHLac%p^{IyP_P6Domf{~CT=pU>?~-N znRxDfQlrA0yUGDw9NM1_?d7x*ba-Z@C~l-YIK6n@YM|f*R+}VgLdvovDbC4X8CsDj z&ifkdH2`&I`#eM$moMpl!i)pjhUaW9%YU)9l3!qKlDxoHzB%+Uon_(1XcGR{FSHM! zR~~oe`Ttkuql;sheW8bs-Nw{82@rMIFMz$6SC4Cw_6k z@P>k*N#7F|?=iEi|LV5e?P{W04(DD@;8*Jl+j@dQ$f=zUct*zlVy({)#i|aU{UA{$ zkL9fUZ-Ffn7TjPWt*%>hT^HHMa5RK>9x`2@p|$PvJS$uqs^* zXJ~yvNICSX#=agI0O7QMes|_ju`Y-QO4K~%tXIQSv;Q_g4*)JW$sKx6VU}-@n4RC? z2ihV{u|j~np}xGkJe_qotJZrI@BQiM;I{&L!vhIF(K#(n1?%0PyE@X%D-|Q#J4^wI zvS`nhG$^{#aR_!IS%WNoU2Krf<^4Oy_3G%W&Ju%Jk2!NlNOT`NsGTwhHKmODMIMwgu<9D4VJD((aEwp9q5>9-`;ezyut8R1{ z11T4#sfIs0oCy1~-tqFk%9nQhx^e6MyLZw6MD9k=%6ZUx{S0$LPEJl1!1Y2V(KL!| z64G_b2cbQ$2U_?y4DpqUj114h=wnNCK8L#wVLVf}9jR&+=D}M_3Rs6aQ@7}o*K7Z> z`SPd*RkOp&0a|1^*ZVwkSg&t;=CEI92RY9iuItiXoHra>vu%y;4oGdB-C?=3QzO`o zchqygQ03l0(tv!b*HxC^?VboZN(#lQnpLs2y=@8d+X$SBt(IXw+iM7&MuG+&X}g4W zPsTr=ZJM>!G+dr97NT_QMUo&kCc%#kUF%tLB`_LaYv^M zhs}_w4iHR=Y(Pm!qnabQz8{}(V3_nma{_KPI2Ln1ZRT_%pgyY0i=;dN^ zVaO4;hXO&XPG8JW`%-&N=3Nj52kP z;S#gTRv8Gj7MGM*!kE#|zCMlI+_(mACfj6A?}mcl38@J2db_T7!dEc*8fN?_QaSl6 ziR}-6y9qC@ly3&r*OrrY+=4V-$5L2_Ira&BZbB@2>cShzF(1f^KmgeUB2r#V3>8H9 zGT_a1LSx&a>LtO^b8Ngov<_T5%z&38r`j!|1YK`=(|6vY>bkgqXC`oRn~MVzD}h4( znF!8_)!``&WgihkL-{y)4AC%h#eAlxC#arVL>>V4SH50pJav%dBR95#on}ZvFmRjy zs)Ro~3&dQvosIrs6Q~(QOe+r|KQ(Pr5rUvl21N?dssx;m@k~1%?RyMsrv9#HwRmcq z7mj^}+G~Xw+Pdn$Q%TsZ3NK?jpX_h@xBn|_PT=i@KeyY$QhiB?wSa%hq;gZ##~M!6 zJ^N3&vge-&;{>)#e9_eg0HZ12X=4AJIfd5S5Rk> zpTHdmVi}`-&^x}v6h`6#9Bf%S+_1%tFPbTzhk=!8qm}{FM8=Uftj5fd4}s5?D+v-1(A7# z1~;Bs#!5j0KIlbYV||24{R9j*Ao*ij7jt^O{G$0n8nt!e6fl5$xN!{OPV2vS9917a zhurB4-{H?4W7A>$Q!1u%_imZ+lZ2PUr`518#EPHz2Mj>2xkoNX*6MFOly3$pv+7yz z49LKUL&=!wiWR4$P8X5u~L2- z+-7wZ6%|(E=h&)jnKSv&)elKi=N-{G$+hkZ2tOJ=~B26O_8nD{J%reQCSiC39CR_%S34e^EUw0huAyz1aWA5+BP2PS)XguNDc4=SNA9-A_SPNrsNwuG~27!heP z^aO~h_%co4yfaCJ-S$p+=ifq(_1@ym1f!;&Jm~V_ueztn1X4K~mMG{xxa7ep@Z2jD zfF29zbpOCsuRkB`l6cm_SN3r4a=y8{kZ!U*(ve8O7N`nqKm}6XXa*fB6Hs3}!|9s( zw^)jW4U{C6KdYYmbV50tiNHT}OzO&0!;@kApSUX+JL0)38fY1Xr2Sl2F#|-YO+FDc z7l>T!BFXpCIr>c?mM2I^-t5S>uSC3yNA`ohP(kzwouyT`t$=~+VaPuEUwCKOw^Qh4 zIdGNQCT3>vM^FIhpVC7idF;!NQU;6|nT4Kqlukb2I>i@}!FjDeC9kpvJ#o#FyC?n? zZ`AA*l+2{f_6asO5l0dLIIt(@lwpn>o2sf(YF8lL!FM%Q)qTTP<{#R5ioUR*uS;0g ze!NGE_qnUMr!so``n)jS4BlCU`PMy$v4$}4WVwatY|mMq_vsX940E|2X0}-Q#)UaY zlOQ^?k?&*H>^UuxeAHLn<3@#X+`9wvQNGnO7>vW4aEy=~WB$0fwS?40sh>XZi9yw1 z(sl)8IG+U#*D{%W-I_|PM6tu@=-sZ*S>|i?{<%Xsa2cEI6K)KI&Pqj4uCy}sg<<+n z|H>rfynUS@rhVjv*W0af;{;(9ULemvfu6OS18M z4NwJPWp{x~Vk{FwGQJ9H;d_wRLR1+<)GCpFTEpp=<{8qVtZ-eit~Z7aN-%_uA&tQP z+<;SOwJHsI-;Fxy9z4Mps^GdNyv1G_N%#x10Up}4e0wV)*s5kHvw|e!79+5;ZvHFV z0kcqNBoguXQwiMi=1LB(1(3$efR{)s#rLK5gmOIPBrAw)pd&QYeCq99oJ5^d#-(`j zJB(@=at6E}if=HlksoPXqCjyeg{|xI^^U6c1Zgk)v!~@Hydo@#do8B=>v>J9&;YLhe6HKUM4QLa=#pNnq5gj?8C^o=Ew{7mbwM;ddPR7RXeEq)4f2Jif}K z$}mFj(AkHEI0cUfF5vqrD$7?g_;k`0FL}JdR9#A`v*~&_d|9M-Y8IYQr^reQ-a}l_ z1Q!9xzWV_s6wEVcX14^IqvFNX_1s1zJ|^?e*&l0 zuu2g#>8B^OjpQS!Xum4Zf51Nrv&8^PJ{PHM`l>troY#E*oi1_G{r~QQ_`s_gA5_6O z<8@Cl0iy+c=pb@ESCLvy^H*WyN)`!9+739K5xVA}t4mY2Hff0CFxyMw-Cp`xxA1SV z3&W<8cZS*WgT=*ZV|brdzp(V{X&hxHT*J2R@AqxN8DO1fJxONWnfGzlLk>@r>9a3U z(=?5TWXI$DqM|~cJmbTs{)*9d{O?8kzF8}|&ZYQ4~&QVyofjQ_oeGs*d> zwL+t)CluPWxrmx^?FOuU+TP0^E*u8x1?j9 zZlz}&_Umeu7Y%1#V#V$V@Gax=Rvxgj;B5?I#v)Q@2+OQi)|QY*mvlEYo-#YZ4JLv# zUGpGfZ$pPu+Pnp96U@r6U+U`^Z{Ct9+Mi%!N*)gLR6Uu&xGuWbLumz3IH2HW)Dc+i z!OcFrjQ$O-#CCK;%2w*sRQ&G?de6li!7u;$&B7%3pV1qY!-n%PBZaHP^C?VTJRl|} zMp(3MXvoT!t8jvGOQGO98H|vR`L=sf7*)_~ou|Q<`+A;{>-z7oDSyTeNV+IJe-N^M zotmCTO$9f{2=LdrB%JsQ{5h}UgwwQ4A!3dX*h6u^gf5>vNcThQ9e$& zj#7RN*ir@w9syenhF{v_%U7e_IyA1ShekitW1`+k&M(FlM#3+y$b-r*NP4|)$lspL zT?$$B$(cm*tjHt+_bo7;PoB%cmH-rY59|s9PnV7RWI@! zmLjPP`{3voV;fw0?B95JFWYM>q~!!Cs;{hkE66>|$ddfdi<`Jzj#(rBhobn8Kltq$ z$<-x#_^I)iWhFLVJPznHyYx0>lCf$eTAea>_tj&C?=Muq=S6%ngbhwUgomGfgQd>0 zQ_^JGl5dJ&DkNmVtUj17|H;XqEozjdDBrc%ybkEXS?wYV=2o+ygQ;oR%O zU?5Kob^cA0f+wvOOL##aZw9O(1VJ2J;{zSRuUei3f5_wNULQ2I{C4ok)J?Ol3RTxA zOU78wtaO@Y^IyHG6jI1bW^A`EM?8u0;CdJ=DZjRH7KgkV;!CbPM6sa;&)r!7jOHZB( zzq8g>HBHRM`ZYMdy%9gvF7Pb{J}Ohu3>Gm}oH4gwd1Ft#`IReRMU!>-|7N@dRDzdNO@XrLH_E_V*nopY+jJ z&uf&OI>gEvz@bLACq4A9%HEHP3SoJLtf1kB>Cdl*gx|D$s!(#253R094r-}&=-Mvo zW*#!~&RCkopzc1@dXEbuUnnZinrIl`;hv;6D1<8ZzTgrKM)+w=W&`qjDlaj5z1IFa zeK4WLx=X#0UAoZuP$ZnG#^`*A{Ka@+RwjSuup0bIV;UfZ%0NnC27yeJ&pKV*#4-@^O`IEDo)$7#2g4C|JF$yx?kxUWY`R0 zm+qv>;;@lH;u(5TQP>u8PhOCjphH_L>o7q z`g9VM6%|@>;5(|PKbH(k?L7?kidR0pXPp^9V1E_xw9?;{&Q%9d<&%#`)`XPXXHG5C zGC3<%>BELW&#XgJG?=TjksKFO#~~v3Pwk4b3OFoXKG&zis#O?&aXHNO$9X-7>%dWE{ig6<07p#AKjB15 z&I6g4+c)WdEDbYEFW=#raliD^(*sQ1TTioyTC+rj7(+}E#;?<99T{QVBwAw9G#I;+ z20^j{7!&c8fjh*T3`80XC1E>cr|Rfv-wsP9gpQ4e5^L70_@2%fmHYildHj`LW^E4& zMxiYTesy0s$i8VT@i)H4a^^O?UfmRA<4C@dvD)t8iLtGHLC!rv%st_aRjVy4Coj@q z`p;j&#pJ4s3!-cNwk=~hnkiE+8C@9!FSCeQSy`R+T3}Xbe{lV8oTDg|+Z8(yRv!u^`^SU!f9o-A(y`_TmtV>;caD`M+0jdnz?bV@ z$nW?x+S)(f8879Zgot6;_|M2S6izhIKZ^gBwqW2t6_DB=s2CBmDrHqAnVvFi$hRDp zbMJb^BqnolFlRxVf{WhC-rWnx|6mpr*S808dkBe>^CBkPG&v-zYj-!bcIe+3`op|x)GdC z5L#amTs4M%DEt`_XDP1@Z4x*qaZ zSuyw-T30qV2XIQ2e{_W(k3iGP|A&43Zd79B`t#%@p*z%e9!t=+7nmKN!LwhtGA5_`6#O{X34ZsIpu;7;2zVb36FR|Ncm32g=tVvvdy$y&rC^$C9Gy9M-E2Jsr0J?}dq9Ex-+UK=+ zmG#Bf97lx`C4`suE5wk1W`dy{bqoh?Tp1YWlO#X~ygMnRn&w%*0sEVK7;UZ{y$p3h8NV6^4DgwcGq9;?6@0P~tNsAqWGN{(SODFJIz&gCrg`u{=F?Vb_ zvu^`O=}vxO>$82FRa7BH{x#ZRvA`(fjH;v8wkwzf(k&RSPu0y)wij}ALx-qM^ef!c zde%JDRsHiR`8d_z*$V0xS{=Lsw& z47WfhB9A#O4rESQupz-5tJ)dNRS^b>3_=a7K*bt5+n{ryaLcvt!foJenKi`y>QRdE ztP#%T0s?YF;N4_zRAeM#=g)}}kAJ*ktlZDY+BPH>bv3JzNuEk)Shy5OiGm6fFhF`h zDhBU}ez`M7T~|$quF%x%jj88J200|(<_3nDvQXQc;T(iSspC)|8FhQaS}Lwc_fKX9 zB-G)7a%6EwRpoJAp}8n9IA6km8NHkw*yJWm{Rx_BjYR;zJC&*{BL6_h(f#emcsVv2 zZQ+1jr4M6UAsf%V#DgzUJAcbx=A17KXQ$VvGFZ@{I zRpB(j^v$kMQEXt6?n9wlk@;m2$lv3^Y-7Q|PSDT&-yM~KY91Mi985Xas!uE%2Gi61 zVOXaP4OK=5C5a%()}hrI$1o?D_~&*UfJDRNJ4ugp+wK;!-~G!h6KMlVseXDSYRv^L z5^N4h>;rY}qoX6#Mj&DF8x8bD>QRQoaMKzn*R;_4d@{nVVWU<*qp!Yn(0ERPyTqir zK{H`W&m5Q^K9agu`5yM>ZNW>lMA$p1QjvH{1lbF)0?1hiYSa(IN9pa!YSURhI8z^- zDv8&HJuB$Y4FpAeSNpsUDK;D>uWSIJFc_4=PsrGMrYXs^++84t{F+nbP={Xu1X{i{ zWVe1!QULOF{#kU-OBW3O0(AG~ z#g$DH_-5Mw?oF>fL(51H91;+wOQgMwLDtUnuqsYgWCAC$h9n@Site_nq$dt#u0hATl~&9lfR5?1*^Av-Gq zJUn$@UtjshAg><%;p#j-5z8^w@|@=W;4r=adkIs5S#9I|e@4@i=%7}E95}k;+1A0g zQvo1MtZL9-fUL3tP!nT~7lOWoK7d3D2Sad*locPmn}fpgK?CsG8`$pmFx@D%Pa>SS zk)^+3;UHzk6Ul@`cCXxY1&pAEEiW|hV<4k4QqqUABf^X8KSe#mcrln05X1F_ThD!5 zc)(}TqK9;8_&Xh|N%F4ZMI-hYtoP$GaC8+SMi*3@I+VLZE?KlgaLXuM5P9-AZr=O zA6p?9#GrQ40l_IIF?R!_^o1YFza&O@eb#mVVKKQC|9pM+Bf(fKpbU;DmPR8~G?bB@Bm(aA%x;{SM*-9H*0^_)$1qmb2a41ggm?f&<iuLPebL@1$XJf#upYOFbBBRLKlaJ5by0SQOIZ z0^^Mu1T%gRb8F^vV}}=O`z?Nl6vEM@!^2A+`e#M8x2xm&u@J6=hnIyFlJcQOZ9z}N zwqaK_oPDNlS2gwd5j`lO*K)~!qQUmv6bQVYudTg!6+*62BbQlu%t939$uhGv$*{3A zi`BS^Wf*Qf38)*ZCZm5w?_E~AB;(N1MjiFWe*Svs%c!Td!;VX|v~JD8jYO?Bs&gqU z)U50Q%hFOA>w&3JCG?Z0%`7kH#+Iq5tb79_VQJ4u59)DUEjx<44Yu8j!9jT3t zjr4c3YvbhsNph%$9g(&>N;x0uI<&xb^MVG`!h0Bza&%? z{xJLY<|z(4f!GoK3uRQmFyKde!)e9NBxz7gRy_idp2NZZ${&RIfOWJLbvKZ{21(R929gv1@wz-k-?ct7# znDAXoRJ%(k6D(C=YRKSeWduY#ES+0m&;x^x6@!!H69u=aObHWmE|V3f+)H6L07QR( z=Pwt(v>7yalFn5|^FJi}{JKqC)kA}xwF2c5*vsKu$Cd?ICSX@gOD`jsO}lk8Zm?ZGge{25Yi?ah89}jr8hRxg>q{JHHSUelO%FpZ# zcwuifj6uVH$~0E1h|>0=Z!Um`JYUU(q?N~WfHk9KogRr>fSCbfk+Xhy791Yf5G5di zsc{JK_@ae|O(5a+0}@&cNT7*~n;g)-qUvDP9Sm)X`X9lGa5|tfAo~ZZszYzK|r zfu<1w8&x<{JiOyR)dzEh4&Bf~zH$|LiG~l16!SS19(sCuzlOR&`yaN})N{2SFkxs* z2}dSr+ar?HAHE+N0|)es3rRG7;8t*FUI$F6kWLMwV@7GD;lMp6M|iRI$XNs_NDU9o zoF6ta3HAIjXU_ZITso;;7uW=mr!HFMruYL8Z(ve@8JRr`I3{WTnhIAs3q59OEUJ!f z+bUn4e5_S>w-)x%)adQ8zr{sr5{b4S#1$S-;bSYBo52g9fuk+jkpr4>vE zE#L}41p1F3OQ1UBnsp^TowQW(OBgpJ6)JK9X93r%gUPjBk#5k$gl$E3JOkv`k8p|- zA`HR43jbNudTL>*mXcYh5ffNJn+gnF=2oajq}iW2>UhrCc%He+`wm`QoxDkgEeee^ z@7VB?_2V-PXE$ih1~=+=`s)|>csCv}*MzLReZnz-4tl|NF{hgB)9$T>9L(%e4cbpl zT=g9*ZUlrEDtgDQ9DA2klwEY_8#KMHE;AF{Um!y1Lc(CZ`}CBmkiqfZ`ui&Z(`z@v zi%9~PHa7{Nwvv8`H8osiCVfr-ann~IW5p@;QqUL8WW{(&|H<$lQv;U8qZsRq>mVDi2LR_ufk#fsT760ZU%5Uj~4IO7HdH#KRO_y*Mif61b6?i#`Xc z;X?J?U4^V+DzjAeiLSyx>AlSS`@~bj#AHz^>!GY^gSjs2DK}SlT2aLis_te2-{1G@ z0CVDYBJV~$x4a|I_OmDCQy+y^x#SCH($}{r%h$Pwe7dUu1)}jfPTCmp#Zybypop0% zc$wH#pPmjCx?3P_Z-xLor;sCM$dn7gfEIG<6xN4_oqCInI~1;YIch1t}&)k3su`)9ps>W~Jn9z4zA~x&$T^ zrx+lrZ`S9o13yms`+oIRJR)6Yp!Ec}Le^s?cM0{FAysW^9{04bvchSUFVz#bKx***0FzR`$hU?f$8-rgY#ANa}bC{YCLq=Dp2*v5aUQRkH|`rdHCaVCTEPU z$md=rmXAItE*N{nR$PbdTpX;&evza4p@==AlqEAi0dpexC7r*gAq)@oWb%waYY=(^ ztE!+saqRjZ7&9IQ0N%{nG9l;R*c}pX=$IzIZx+ z=6ofY!3CFbDs0G-T1{NPtf}>Y$orY*bBds*p&lhaItlJ=|JS){EnXt&7)mtRImu zV0@f^CZ@)WUlDREwCPO*YUQ2(SKw$=tg3hic-L>BRI2YIKokA2kWQ7JxM*$ar|aU0 zp?V9<=Ae-T2uI*I`#c!X65G^>bU~0xuD3o`dQZ(~5S`rMZsPQvBnn79)9n9R%{W6^ zzUg0hszhoQTQaHwy0L>$hHH=B<0Rc2?!8Z>WpR=PYEar1*4dnKW=NL%$e}0sx<<&Q z3!Vu}3TuUfgHOg6b=w3}l-^j{TdPgDjA0#L3MAVfOljG_#=KF3`Q&n?1vep?u6vEJ zNrU1QVLLmkms{{2bEet!=76h&9;;g6W{pF)=5UpB@IuHiI6y5#KiI5o`^xju%By$( zNBz@pqqldE%x!wJW|oGMMJheieVGC8-~=Z?B;Vc?0@vJEgb``iT(r!lvKh1$OJc!c zuD-DBe@Fph6amT!;cNJA7UOSPeQ?FU6v7C60{*OgU_uXSQ%gOBw&vUa$hY5vaPTwT zOytsMmdwx(f{|$|CyIt&rTi>@m5t2$5<2zwYs^FR&!r&($75piBn?l#bHutyKtc;D|7*9io? zHYfN57uSdy+%qZebL%(f{?k-_Fxv_#{P@QN;Np~^%-uB>l9It&-|w{9b0(U#Z&zOZ z{~0o8H*F$P4GX+^X=Gq}2d^e>DC&TOc;Eq=SA_pw24=txcgqFW^###dh8>ubnz%HH z>15ziTIASHN;pm~HSd*w&J}4;tIfO;NJb#J#P&cnMF!|IHZ@ZJ}*UdS_qKn3PNt-^4V3NgW{Pz_`}bAB|4%H_Sxkk%OkpHsv{4 z?Pf4~lrRP-Q25F9y`P64PA`Eg;AFAXZ~Fy6Ed$YSvNz+JwQe^DVR324ApYf{m|sJm z+j`Putg|Nqdlm;berlkiT! z!Hj?ePI$q?#I2W+O~!F9rSRxU;Or_G{&`Y3KX2T)Wu=ZXpZ?Zdz`rH0ckD>iT^2En z{ryW0SdOZ(Ake~(SJ}?`ImTb{mtg&$X-qJsWqm8PVrQ*8@k66ZF85{VIT`hrK*0~* zUcnbnxFX==F6ZIJ6W&E}ET7uob&gS7h6|}cys%=A?e@~SOLLx1fj)OdJfrEOUDVFL z#z)M5I>-JWWq38%_v?!!eknl%RhD12{$&5nvkq@Cs~SYopXij9z94ISj$-x3LxuU^ zhuH*JFH3!WzsJ%R$tEp_^HeyWP!IfqMt9_i*2hOxe+?4%P-0{8930Q!-MZvjyO4&e zVS0>yZcIsTj8I-eI<(3C5b3b0*vafPEi(FH6Ze}mq|!unqjJ2{60}?++rAYrt>1MrwF=?>7j%+5o{KS91%JZTde3n<2At9rz`PJ1f$u8?q8duC<8doAE z!br|$SWPsAr@ECl!b^;as};s< z4Ln=T*1f7S@h{y#*aL?FCqvY?#taB?enwUJZuV@Q^ZYqV#(uy>9X8@O-uaL2@?}tS zp^a{M$R|v!6hkvaL^WMdE1byI)-3x(CUW(fE*4hvxaMwgIrbb>wDT$HV+cVq2cT%; z(gTz(<}b+wgNsiuuk~a?J7#h84w>QdS?m6-s(`Z_lTyCVZI&Mge|C$g%KHXvzkX(; z%G^TZ@+S~CYNmjz;zKVy!?QosEXAORCiq{Tss0j${mq!K2L3T-)($rn zfJ-E!c6V>g>4LU0kaGuN$R266GLj3|yk2ScNB_|aw*LW^9`BYi*G=8)9H>Db2WdCY z)Ai1wmz4+ZCsRIb?bkGjjDolg;Y0@R_gM$7{Md8!td0EW5^|m~F9@l+IPN?Pmsg_L zgOV%Amd(o(sx(w@3l+r+Um(mM27|kZEam3=gJAy4LHrS}zo`K&>%q&LG2t-2d=+)$ zMZ_+B6-z>Q4>u5P&nUZT9I`t|mWW>p8$dVN0VJ=yP`A9}=s0Qo1jBo8`;!J3%u=>W z9<-hBXfVF_JhEF$_x96?*522p^J|DIm*gj0iF%ANSmK2c3Q8b8XBZ)DSYz zPoEJJ9t_${CT>G?8L?%8HApCoJ~W=sG2ttb8rb&Lr&_3K;`noSB0Eaf)2HqFURRc2 zDevY{#_4Kb_1NxA?dL|H-Ab2k6Sav^_8LC*-_?oX9Q;jt1MOeQ^*q`Ks+(GWSMSwr z=UJ`Y8ujh9JsI5`+qZ4#^Eqi2-CIzSn;a{hFRb&d1(T~?099fJhQ2Dnm@wwm2cgXs zNZQ_Z^;Bly?Ho80XPUmI;`iDtjF%V-RwKR|+*DOx`e8$4%a~fe#&5Q3$$b%RkLI^L zWhW3s9V_Dylv@Wp)AthQ&+Yz5q6@Y1ux#FS?%o&G|F!`aULz8CwC^tsqQ9m(ORbiS z^+;oRj0+7cWLGBP{dVy0xx#=JS0-jNRndKPM5cM%CbbvenT8jrGE+NuzwZ008kQjw zxuTS|^XK-YaW#(Gw#Ji@@O22euC?CT$|#XeKaEmnV;lR)1VWJ9DTUl*`5AlN$*Rk` z^XMeoOkLj{w_gUgo?2GCNZWFVxoZWe;r$7L2zug3`@%@s6qeqUOtF{6gBv>Nv01Ld z4=-+Ar@fh29b2uH?yzb6duBdFza5KiNVtd=4y-X34k(^jcJ3B(P z2eZ<0#P7xWN*QCCM+aC>x+l~|FFfM@a)i`>P|7gR<5gZ9p~Z472$9y*_39R&b=#|I zVDZQd5q4=u1T{?pM7z#bC#KY@Gm5&VK%K}CoOY=|L_4>B4R$ZLft24OWeMQ_rhJ7; zJh}AT_aK$w;C?lb)XAZ88;II7F!GWKnU1-fZ@EYj^BgVhua^EH8JYmpB^dD*MschD zQvY%oK#Q^UDmD2!^3{igL8u6AC=8qhay%)aY#CKRp;T${rFN|}jn9C`cV87e-qCA_ z6XhYf?}&=9{Jfjy%Xbp|rN5W%AaILId=G!%y9`8cmU9|)YHXeyR-FOx`OU8Y#NHRA zz9^Ksmn-tHa2KPYdp)ZMFvTBLjD_|4WAj-U-r zSMcCvC;J;NVkEI?1w4^SUqc<8Z&eP9c|v}bz%uAlvr8d?<^Dp~GU1Po<)Bfb z?Z=??96Y!K7HyvgQ&H1Zb9+TRTDJwMPsssaz0B}VN%p)!2DLee)BndvNhSg#-914p zP|EnnCH}`<2_;fsU$>|4iIwc+W&n*loFe+A&qVArIT6xx+x|UM75XhAp098D09s~ z7bd@_#_P5R8Sp|>ZI=bit*L?5-^8SVbiG62){z>j-s~oN=d$X6z&lw=^;`qR!UuM! zLQ#Z_+Sa=}@5I;skeHdHG8gTvH}hT({vyTrr>zkk@VjoS9XL-Js|}JyTaYxGR1Rh0 zN%tVL#oxRJO&QC367~8(GwGbMb-BfzBwfGcm@{sdkW?{kss2XX7=dN&j{tfye4_Gb99ua1CmAw??l zVzmIrsUP02Ewm=cFyBV|)0{0mcZWf6N;YbbZC5uppEGry&-}US7&LH0kJ5aD2VVTe z8}XkZEQwJw#$$FQj#(88=!Lm;ax=T*2;+D^wjR^BStnlKo7{W|@2guVgWzYVEVHU1 z0!>+}-E{q&gS=Nek^z5zS{ZPUs39lE@A+V`Hu;)5VB_pJX~s>W`8^b1$%r#QY>Q}9 zV{djKdxu{bzV&f`d`;_*t%D>Jkgs1PEW@pSx2wS92e}T|j7bp^@t;Sgm8PJb##rwF zRf+kpAAjoyvoI?1F}JFFg&)Dw5Z@+(rH=GSW3%)Q!5RfTf0uTyPpqCX5NhT+=`(XG zkog>+kT-$9)hakiUUBxbcnm}DDDQM9iKBwUdolBFZFTticj@91X^y6z4dg1Iqg_8Q zcfDi#!o=$M>{MySwLikF|I?cvUO(Sa;#?Cc4hsuw*bi&q-76XsDqwmnFO0U_!1O}d z$y({1g3!#&E~}Zz;}{{LtMtF@b?y?n>`iN|>hkhh&G6Y}E-d_jy1Tn4U1P;o2V`L^ zXx9-~$;ru!pz;b^fTZ{!hh0QqRPTXCM)c_xAr%$) zi)GyJDw>*992GmK$#^knTtb*#e&i2?mV0>n>)d=k`Ou*o`~CU%E0nZ(F!dm*Hwk@-p#Q3S@f;U zyOUi?y;8!oFBL-koe^sMn+eW`acCJ>+!R+PWT)_)@$v(^R|w!B&qcw*P{_itJV+EUxg{1;Yi9xf^)qcPbJ-ym<+KDlkwU zrFL=8sR143nE1y5r^1+BLcR}T-oQ=NYM-UVwO(KMI=}ztc4jf~~?rmmV($T#UT)KpA>zhVLBzLak zL?--tC5eL{PN;V-#2_sEY9-PyXg4Y}nRdX}gB>jCjIXwv=}G2kf*j%%HNEZTd-l_f~I#*CsCv<1?z|3&*hkKc0XUELO6iuVi5u75uE%QyPw57Nac+4`3hVUpY{ z%;UH)SqqEYwdr!T?;V}_2@KNm=EV#5Jz;S^+EU8*%U_=c(9^L<+<%eE(yLmK;C)Qc zvseL9R9C0pry0(r|A0lXvH-@}%A6-};6%S7qS5nB+Q)|tS5TG@-@gB&Yr4tTE`{az z*IdACa3^TLA}T9Yx}n|O%tTaY3MWIycJ8YK)YJ@(jpgj2e=!>?hNur%;k$E0WH;HulUU*9W&=_$UzW1)o69tbj0<+|Q1)YFe1jO(p&a6PD?Z0;U!6%SvVCS%hr zkPYkZ9{dq_I&j8_I&W?Q=(0%Fuj1pysWZiE_x5i7!>atPYyKHPa)uE&YAIg|bsN2M zp}osM*91gFh!hbW9g#_czs!Z-#wpxiSs#c=rj?j$+^Ide3N!w?RztnOA|rCw0QVS- z+A+FrPN{^-c7GV?>PxOpc=$faU+ke(l+|}q`rO@Bp1h|V3FK3yM%7+|t=W5DBE0dj ziKOjci-||Zne-AX50Q~c92cpV=7p~XwH4)lF=H9WU+o>}-Q4!zZSrwFxkEyfu>e^! zvRq+c7=Xj!xjrK%K)Ev3Xl-lPXpipjB*cr23uo&J*SEg-pzBW)rZ(-%idKCdZHGHO z6T2q5cNcZ%`oA8;ynSn_h8*HjH88kKEXyPte_@5MQ2w&V94?59pK+7raqD0HqmzG& z=+B2}(WeK5OIE{P#k;Kq0-N-G&lr*|l<_;QdsI)}!EU`4pNPj6*0=D1 zQw=uw@-HlaQ77{j7PIO=szCfYuS@R%k>d$FaR7Qv>`G?*QkE9G=TV82y5w+x(EYYm=C_FegaK4H7r5U4M=_L*mdT(iE zmDFot@mTs+aG-cz1##?4r>+#~sjJ(SOF3ao!?FLobDaq>ldMe7wIb{Kl z9zDt%xLbBtTR7>qSkD{wr#XudY`fX_?2=6_Q-lr%j${mAs}2`Di` z#h#7hF3vgSKB1?WdvB5tDnH$ey%t621+QLat zc-}keJvPoQw^tIe3T(n1;g`OlpfK4_Nbck{-f*H-K68)jk^$!=3Ilp1|Ep4_8$ z_}Nd*pa)M#c*V{O#F?#|;Y1f#+_|F`?=HYt|py0P_k`jO-W&W>P=LafS3qatCuyu~@dk7hbkVh4n`#DV$l zcj55&(@MLk8=YwXKZUL3N@pNXu{DuF!t4RB0mPa6y_U-8a987L2))ZEbp~xp1dW>S z*IZSDIfXJO4VZSFiw+!1Y@1{#{TGG-BR{_mk!n$F&XNL9HhPJ>xwv{vJ4q!Dm3?9b z{l_>KxF)}U?RL`Gne4u;yumM5+(~r%eo&D3%AurtSXih^uJSz@Q_8vd{n3GYCvBr^ zsJf%-RZ88NA8cASw$~k0KXrW`8Xn6|EG)K*dG(4ZnTsN^-D(DeIB}h{IT;-BvJ8_6 zUw-@WIz|sgWZe8*X5P&uB`qDFnMnutLMRpI<>gr?-WKyrU!ZLzt?pSeSv@tot)h6)qpFEq(Uv7N#1mtD#l=J% zQucDxZYt(Y`HwssGU8#|L1tPDKxYXY>>)+m0sigZl8MI7m!4c;_bueS#-=X|~?W zw1H}N*gQFQ)VB|0iAEZAvcp{f_g4-ooK(@#DO`Z)H~^Sj2+J-s>2{!3yFI%_$X?CXl+Z&B$V$rx}S<#=onW?c-BQM3h`Zc>d-lXHxo5$xJ6_BzYtXA5o zpDwbCqiD28S@R1#dvfQy1e0%V{x!h#!}CMSvPZ-hR@evqzN;fE->WLg48z-<-=l`I zJE$=W3!c-y;dX5VY-i8m69M!=+dfEs8}~fptf25j=#!xoTC+o}ahYl^s3>7N;MXo} zC8xDT=+sPzD{`?H`*!TymrE&%ONuE_QCA+GuR8OvErFi43aoMzpie&kPNGXr&(x)k z>@+gpOh_otJE7^pUU7DA9Oa^zxVHM#;rDB3-%xLclcDvRdiSSKabLeau5_1tk^I<> z%r{#PLC{lD~WNqAWajj-2E-kG%u2|khzE_RusoVLPw~9kaZmtxqo&p5|NXe?QDW*O~E<+ zuw`&Zu6FGC+PzCNY$PPd^3+jIHuw(Xn&aNne={2X{zlDA+LXjawDn64Fv+_^t#D{{ zt&iw?*)-<62C;d{DclaSuDv3Up_1x|1^Q2n!YnLIOfiL%Rj8Ua2E(~mpTBIV=np^r zP-=cvP0iH2vOwS&Zoo$a-jPrfLkN=Zuq@|iWGK+xmI-ZjWRy*PFUHcP5E&Wia4Jzt zLPEl_jP3B5L-0KFYyQtZS}MRzu2$0YEmYw^YYOQG$HR!tr)_FxF@NKQZrl-}p{Sx# zIPvTBm;jcLJMQ=+v^cH3R@>EkaIy&_L?Vai72!7RfN&!VJ9|}_TI~WYaA&%YK)BlW zFk6?h9in4Ph$kZX@kP?DHh$#dOQYGSTOCzc9}|i&4@BxpMGwl&@bLD~kNtr;I5WP& zxB^itEMr!Ak;)}51+q%h{GHTz|z~+(UNa@9)LzwTUHW1x>q{u3qVVmSeOCsLh2K^TB>`_YKI?8=9 zC^<9IStwvL%QF>z@EzU1n-Qz1srjD%1DH_Bx6#))b@kkj>4_N*EKW-mO!{opIjQaY z^QX}_#~ zvN@9BJjD3Tq$3>H)7z^y5OC$8%ha_HM7~P?sD|Fh>4syI!02LUU+1@^aSbOYY+SZp z^+J-Z#303m<)TgbGRLOwdy4xF4nM#D5& zy{S9^=$TRqW{~4WPcYH0GZ3rM7Z7C|&bb^qXjTuwi+y80y?&%6)o|EO&RYH9un_bAs}l5c z4r(A_^{`t+fljTYLqYP>_^r~;Hig#Tj^5UUXsI)ZP>zW1*vz%a)58D#CWp z&lqLi6$J8FuYwGu;- zi*t~0aBz4A%274KYHg5`{6*ou$I&?b)WlPtb^T(7ND+`T9==Q6`^}C1sIs!mt(z!$ z8%D3W=nu2AHMPgYnmh5lN*Y+aL(Dc`$p%02m z98VLX5ECr4l118w>ivWwlL@(B>e!@Txq#_~sES5U#<8Y4*A16CzA6J8+{!Qu^DI!u zfhx-sygsQ-s?=VWcID%E@I*V*6^np6Zf(ulsz$iWkckKTa0PS$R*v+IjIyBhLyMSq zuTu5mXX@}K-5QSAdHtoS^EOoN)GIc{Z=Yk<+OCQR$Un79$l1A)zChbT>78bUTj}iP z*k*8W@C|0QXIm>ryQ>cC6OWPyLfk6XJ84FI4cxw4{Szb=v3v*E{rIM~sHk`_FPpaxhBd;y z4y>xM%Z3L8&*?xPV3|(n5aXKk+UqC67NXcHW5wCo#?GfppicmmX?;tpLrO~C+pptk zGIijFQ^^^{MY32ZDIpU%$NRj+*f!_211XhfpOLcj1Ckw7 z+%EN&4Uca|VDNL?xKa<6Z~0W#qZ=OY zb7S?qtz0dX(g(TFrs_~xb*jJDZJ_C8I(07JOlyQ+sUiDAtbz!O2Y)W{Bs2^I8BW-v z515-xImIRaNbM|a6twl%0>{89BSmPRfACL~k=k=b)>ctTX&tIaSftqhZkbl9MSpZz zwZk)8d(Qd0*h=U67h+YHI5!lC;z{@(?LBv z^DVJ(CQzVDhuw;U#ZX-H$BLNvI_~@lTQ9a&O-Epgm)upW77| zi%eKpAW;zjHcpx9R&v7B-jC{;gf5d5_T%y?x(VD|S*>cDP2H+8FYCJ33P#x=e=NMt zrKmjI#cuO+>FmaD2T;rBvTYT63Qu4qEOu~oM3j`sgFrxqP7^fxRO(x;Bn+vDiksO2 zg|!(g)GP@{sX1@BS-;#YML2rG%%GrQ_GIufk4cZFXqNh1ySrKk&y2dwpC_Bh+a$$H zr!)7IO)hPZNf$^+bi{X~9{+5*no;wwct_uyTR%m};X4)=cf}in^YZh*+9j85#XmK{ z->D$A4^n1Ku}4Vbt^`7|G2!r}-pqDsh9?ipp3ilrrgM8Av37TNt4PVl|U90t}8Z9<-g_VU|V+Nh&rpdaH;YAP% zl_@*P6sW^Q+DC6QL5Nf}5sa@4)ENUA*SH&F-tR7K5vyq0FXmO|Ib~#lKEpl;8R^jy z2|)=(xmy?Vsn|!j?CK}ZCHU()>A!&4b1XbZeMgQvZ3Gbd6XfVM}JJ*4s z)P3V%;-!qMj=BgG&<(hqf1`(n|`K~Wi~)^zrdo8uVB$rh1NJOj+&H|l#5c1e!;C? z+?BLQDYc{B?}3(`{VDvmn@-TMy#0c-PR}UHzZ|f4r{CK`hwWR(S(`@zHoaB6o_b0Kis!z2I|Ol*+-AKyX{lNa>7PEa zzIo!F2Tg>X7+dMM-HuZe7e^4KZp`@7W*FRLLF+~GC5GP;RFHZ^n6+|?ih-`oiZ0P7 z3baLOXapHN0pW-U*accdlr7(S13{pmNN7SZQ(c`u3Dy(!DhP)R zr@QoQ*!p{WdUgY!JqtP`D%ju$qz9u7-vDf8u~d?;}0eGu6GdZx9U)4OHdo7Aprwb>b~CI(1Re0F`>!6LXpR71?^j6_uIW z=S(^ZNW)tEOT&$f^*;*61ZLMQdYd>fzoMeT)`hsn9zDL8%zL7)iAxCVqfg z0-?T5uTRb&!Z)T%7=o~}Dt-|vEG%?5u+Kc1H7cuMlt6`Yt+E;q2CMRfu?#>{=15-Y!r)vhO#ofX8j8L7A}tM9u$A2w8bdgsxxt80{;Qma!(}r zWSy1uS>NfE@)zUpLG8rtD*8e6D?kX`8!dRY6k`o9aX^3sVw~hdL~*CaGVVaT)R#W3 z>I8L+mb?$QhDvSCO!LlH9@r)eI$DFu;zdJYi67l^4+Q+y`_t9DlQA}u{}%Mmfy7zI@J1b5HvfDw-YEIOHT_&Y;$WF ze|gWgz1@CYWuiE*44P>qF-{tSa<1ds*DUO;l!#PJP2cB;L7J{?ZE;E~J4F=eG%3WC zfa(Lp{~$=2b?T0w4;@%Wu4bXFDKliD#Nyo}OO%vV+|ZPyG^8>t`835r6+ThtyqZYV ze&i{563AQKM`wv2?o!%K&0V_Set1-GB56XQ-5Qo5&h7CP@Rk zeIUS8KXY1h7Ox%h$|&0Dc#-^WqPkD3s%yO54E`wk=RKHmarFLlAKP*C>2s7UDJU+R z4r}@T%aAJG$$xj9`&{WJMA&LfjOHXo_J!y;LQE}~bGYZos07A@K}Xe8|KCPCPtk-O zP7b6VRR=ib*148rY)9x$6D+IVFORsxE6c7U8!zZ{J}<|-Qp&1PCd2BV0UZ)Cg!@EI z-|%_TA7r`qH%Fb*`S~Z$7^%h3L5p@)J?UXE>n?+4kbSKOBZIi zpSy>Hod{YOTr4iuNqa-%V6{o4%d=_U*MTcb=gy3=9n7QE5-^hTNc0t$g9SF~Lw@ ziVq~TbR840tKYx%*g6KDulkgMlhOpj$HT)jnmcdf=vdyj4P%mC>gpzxEwGzH-tA4~ zaCL8r?Wy!R^prZHM_E7r%fA0rj=Zocq5)`ZvF3BQqY}ReeoG$t zJHcwi>8}VwYn+d(+9H!_(fs>y)M&(W2m!c4+2rKeb^ZfS;2EKJTa^jNL_?nd(+r0O zEymi8jEyldG3GRt49IC}DnD(msDQ+kSVjg)h~@pf8w<7Tw7!w>@p)9jEr&&-4T@K^i1?JC!hQS6eY{5q=aJ9m9x&gOTLNLd6{UKkAq5_*#$Q-~pl69K_g!}b+f$vK54TN{zJ|shl>h9<5qsboMyO5R zP6f2@DA~VVIx$C#Rg8IFG&8?bL!fw{hG{(bTXhC^8C*BfX2LtU%zI4WKNR%uH-4s| z`(ThtsE9*U6wyjbR8;2ZVElV*OE!rlaP8XG;#AFb%N0z+;gS5&wgqy1ZUda5FUwz; zEywpa1#_;^X*0Ht9`A1=>gz?wI5di&ok7nfdL!PHFM^aYZz5iEX}~TfFSUvfI6PS( zcw*zgXpO6^Epcz&AQlR?%N)iVJfkD-aLY;@9EC@B`wA%*4hryp^B@X8*x&e4Tj3Vu z5X^Z5VV7v}w_zzQd2B(7eGA01!sdX6=aG4Z~mo9nsnojjZN(Y}`#CK)`Z{{r7V`RJ#B9AXye z06ApJuYn~Dj36!p&|bv5QO*ne(!;CLP(t#^9u&O7jOAq z{20*7*}Fa&n#!QcnUgGj`=A@5$*ZZyso*50ffasbwQo&S9CXk&aiB)v;<&5 ze+Rf>j1!*RA})?VU^+j$-4V+@AJyx^FKbkfQsYPI=s0B~RW!Z}LB3&+fn?km&_UH8 z7Q{Ds=W#-mg0897WAR1oI<*T`>;I3mw+@QJYyXB-1SABck?w9Kln@1lMLJe#DMdP^ zTS<{xx?LKUknZl#r9+Ss7wK-EvwrV;-uL}I-uLs&y#K(?FvBp1bIx^LpE^eAJM?rQ z;6AqU_qdxGy=vgj^E)3d+PqP=Sb+471!=~GSL61pWeLC6P8p-E7hRJn*?VX%5;uOk zo|P9*YX8rL|1-mUV8f`pcemq?NgEkFnu6cpRv@?D6$d|myv4}tIuJGpvsN2<)s~GKiobU4v*wB=L2PNTJQazk%+Dd+ygrJ}DUV0$>O(!?l&}i_ zMv#(|DQG!YO1P@BD-qJ`*b;d&qyiBDM8P7_balD!fEwknd5$UWxtwP;S@V*d&wz`B zT?rPP7e$XL1RFn4>&gl?dw2p=pa9fe|6V`LWso^)1o{bLGyxCjY#kuEv?VJE4}lV0 z2}n6Dl6Bp-sT*bZl#QTQ6#R;w-yLQ;Jziri#l(?|J1htHQyi;lW0kjqhGQ9kCT6P$ z{eM+Y5-cEz&y9c*#Q?P*d$q9eSZ`%LYvEA0Y6^a5SC>m|tw!Bi!ytGu1V?$msMoN@ zRZThd0V`17z(B%8KjM4+8Qjb5{O1`3xTB4X!1PmY_FEe?wT(sc@^&XQ_7@*+tLtcs zFsxRme#$@pen40dki9`eRvg}cJ8oiVLgw=dw|kRPJY|rES0mJU%FJ{DjCM*K;m7ttu-kdr$BvXx$OfB@9|8*nMqs ziY%O*F9DsJk1U<#)IsfY#`1Oys|kKK9ZzxFT{=t!{|h-7!T9XC{6zq_fsdhQCVl$F zeoQP(tEZA$)O5ZH1JZR@(q;D99lVeiY+oQ94y%?EUdtbkB6pcSKT}vNGl4YJc^n}s zM;ZKWoE8B^pL$P31OmDbNPSzSbU0oxl#-c8_}83?a2IwqH^8~P)o|O|069U^lu%`L z;xOg&!*{mE{)t_SZ}7F><@|GbwaQ0yrjuz_DndUVU>w2=0q;k5bvy@%`Id~=*XS>| z4aMb`6d}smLUF>Vmmwh`Yfo6_gY) z++va6YV~ANM=&Hj5(5)U>PQ=)a54hlKT~+xrF6d-W8v1yOdScrid991_%VbO(y)Mc zR&P4xz+BaYfKB=+mgrt0uXYIMThk)YI_ja(%##z$fFC9+P(rVR-fXR z)iP)qs;OYC=&0FkTI=@K8oftGc^wfU8u=z5WLupPm*f5mO-;B2?oDpMw#j>WH9{Kv zT`>thE|N_rfO|e7ic&Nd;rb0R&o{j=ND#sqp(haV=Dj9WhZNIv1pBj1>SHU#Dpsm( z0xUaXQ*Kl!683JNzMJYS1ivQzdaQn<`#}LnfN0I!Q;1c1U(?xS=VMr^L~E%5Wh9*x07?nzvhXnJ2z}YibvwZ% zzC-odG=1pCCuKvx_vHi9Gp~bVNvO%2e5%2%(9`wGK+5x&0dOkBba%d+i7z?MUVI@h z9=P60ENv0GK3S`~IXkG0qk$OEz1`u+fDMa_0~>3J>m#1^$D2i&JVuNm){oCkYwZ@Y zDn|iehsKcPl7ZOiTL?4(+I|0-<~I05L}q_>Fk$#Gma2ntscfv)gu4s#&xRAS+s3N) zzBr}hk6c)s1X%qT!>X@~&7&`klLlzE<`L?x-2r!RZxBEQYm}zeAcI=l6 zqv$W5sXklrjQ3h^lA`0WM)k>#8o6zF($WzA^+N9Pt*319%xGdP!Vpch!$je>k(m<5 zb|^}D>EJ-Y(8|Twdnc-Q!b;6-W^=Z-FvpQW?|Xm$Z?&2SF6)5}9!JVBVtia25?*7J zH5OKo!gx9{MKoMF4MHBfKjO0$=CK~|sn-yQ9v}l#U*0v7QpD$pIGQ2)V$w#&=lGnR z528IE$e|I!xE}&CH^@&*_@|LE6F57#tc)W_#4X|yFn_S}^Q++E;T4BKbsB(RTOHP+ z1G?))7@u$+Ny&b~VpSsVN05*s zf~-}rB*LzzoKHgP>Rfk>)9I^w{a=^0TpIaAUO&VC5wNr==}{*+EZeu%IjZSxw7i<; z?Q`X1rXITV{nGXIOsVx>wwy0Mz7kf2-B@iALjjKrgP+}dAJbhSFsR zk`1g>0O{S&4IUtVBw~?sCbzA40gH9gQMzF0ENE(ydDvO_2l$eP1-Z?{fE@y@Im{bD2zWb91B#@= zNp7dFc8RA8FPq!I^VdiiA>~)ac;4TKk0%qqyw*z9Pva05^_?=X0@CCfstl+5HD~cl z@fV6%v)$LEZvI^UiOX`i+4mwH0#T=@nL6CG^r$jB0$cAU*-XWmh}l`G*WcHT7mMZ& z!cG!1O9AjtWjEhH;JGK05gHK!1LWfUkDrMVo8L8lm%`ElR~VgG*>m$&fT;DH>LF0! z5(g5woDY!8Op`X1UM@xx%Rr*ys@=H{G6v{uIOu~}ih+%Htm*?&H24Wx-VFq%T2~q8 z8THfE)fXcZ!-NDMBdw)v;`jHRSPd9=xTK{GL&L%z2BQ_*fcG=QQVS))v6rt$E|Rs` za~O#Gcy_)(%Zdty;oK~QPV9QThGmZ-5Xh<3nFPw=GNVPrBsuE(@GD;=Y;_;A8}rN` zE%`hqOie>eYRaS=KTBgjk**c0y-#T%YKYNI`L`Jf2*CKT!jWW1S5cuoBP)7ja``A4KmV&b*`kv$CkOJ9sd;J&V#FShVe0rsPVJw+o zih7r<_kQ(#;8QLfEEOwjYZ%xexE%~jr>PfIS!B@_s^)}k-vC3n*rz#~;ZC657aFQ) zx-os5n3z0s+O@PF8hMR5>nY?JUr~CO}s29t8J@4i+U*qEmy6jwumWHGcYWlL5$VCi(-96|@s^9b#BH2DtPX%L?E%vM|zhgeJqK z`R7=VPY+!9UBIobGB&Zou;CV2{&DgtC|p8DDs^oNB(;=v zdZk-aN2V=;SU_>Ho)-4Fp>LpXu`D+I{b|Z|sCkK47cKRY6?+xQ-!_q7>#%PeV*68E zS8QQCzhOaa8W2GBgWF6CK^P(}iKBRUNwD*FIqw@#M-}8aM!I6km;puxs8HEC7QlmE z&Ih8bda&5M^1$ynQ1h!tQ+3w$1If)_liuEU9G3w1S4E%R;bHgq634q>>gZwyUGQ+j z5G+5J@qnfW8u@X&cs>u_uzA|Ao$N7UBExt`$fuR57Wf`bO-+NzOTaarT~zcuvhPDx zUSXkz8J3`nsy+51Q&$XmNrKDDxjk7xr~+82ABSPLgFexec|>t@iocxdk+|dF-BOO; zR7P*m|)H@3a<+D zuM3sg4+cShr|f>*w*dN4WrzhdI*Foc-V@-*8kEs{?4b2J6YJAwO(SRLk4Z>BS1m0q z-k4nF7_=dzn?Y}7b(MPoH$)?K!f(Fpw)7V{fJU(}(Z`MoIgabK?$(IwZW8n-dtRXJ z0JSMDTKW_3%!VvDq_How_4Js+$oaJ{Y)iJR4st~$vBd2Fj)~R@0F)z#_z&}xmzNjd zs%2T*b;NbDpeN(~#($!`6y|t88p^ZuszsG`s~yhhHq2f z<115Zic(Les1mW7?TPj`9kedmX|cI#8LFa^unr)h(M67dv^}vnSBXE2k67y?clWx% zJ&kjXmXH|ssW{~d)?wk4hrUA>51h4M|Fl_;s!m^I zl`69-r?E+~`Q(B-&^~i{0a$FZ*M^sQe-F7X64Fcc(0`)uO@20*R`X8(5CvH1^4Ig; zT5%4gFdERH#rJCDpi#bMv>@f|(7JbMNE^qxv$IoOO)V30`?1c=CHZ>)B|pn(N<*Cn zf2a40WQpzrIchngUb183y3hD;>ns>4z4n-f^Z0Kg4F9`Ctup+N0N49Qsiq zL(jM03CUpTy}sr^#=@;z-PZ@HP9^;ENpI_O>!gdmxf6}6kw6uOAxZaN&m*XQ92gH4 zaLa>~YagEym=eMDBrPn|%tn~9p5jJ((J0;Z4+m^fq513S8AdfTZd4Zlp@X42F*z?f zx#4C{%-$<@N2GICoAx&q{zg(zZVJslGdn-ihHfnEU(SXaQg7G z?txc+^P>^KnWLqnr=bxe;eGxBw4+7fDjT7T*Pnm2nlEerd9s!>m^|h)xYOwJ zt8wDcIsM_i_7RKBra5**ddj~wi2noE0^$oe!K_o;7=QD5R$pqP=ZSjuc1F>U3v6d0 z%W=IR^c{y|Jep90`} z0jm}`8N7pq!nOW9f=F;RyYj+N1NI-47{1VS|n;a2k+?`ma}=C(a#fsLAZUq zkl0OlhZ+eDD^TNKCL#d##Sivlh%wkkbne;pi!Yd8X$~ z-ej3F4m8s1arA7fnjJ#Itt>f_mg zx3~Wm{ayx9N#r`Yw0)>vgR2^hs^eU=qp+btLME|wch>>tin#-cr;?o}`}y^w85W~~ zjDJTB+|}%iGPvv!@XJuRd)a4pDEEyOj1{ho{hnwvkXA9aA~dSPn|Y6qZv&3)?aj-T zW3#JhKfhPk=y|MRlzNTkA5TkY5Bpp8`;DyMNuDeZTo_s7HStC8{vE){~!sL(4< zMp({*y2zrg!nsv~X3E2@J7=o{$_$xBD=6WY^)8LXKdrLUEj~1?T$&`dj*SZiBqkV-0?%VcGJ5*XHn1eT8!vnAHKP{S((1R6*R46Qp(WY z-1r%Pe^tfzI4g=OyuX)JbocSNfIyZoR@CEQ4MvQJY|;RROFRl*d-0V%jy{H$Kz*Eu z1UQ#cuWzxEZdD(U$0Tz;Yx2I1?x|R_e&7(5*vvJsLT|Te$;f2N%6(c>b^ls3I3&%? z+7DF1u)^^?Cq_xp<@-mVIwY@g<>Q{qa6R*6hUIL*`|jxtdF>F2Z_+a~H@mOg=kKif zEPwZ*n~(yIv%K){S=+h&hkf`@pMHw%=#aHcY%MC{0VN|YD=Ul73=4SVwF;;^xPv2z z=m6r=l|xr3L3iaWfh7$vpI4bsig+WbX$annX79$(geBiI%a$_UNpb)`0PI~7#Rb}K zXt(w2L%;s$mtap}F=N2Av_ystCyEVAr0x-PO$}OU zb2Z$kCqbY30Hyx1a4#u+{_3*+9#0xO+0{jY!+SU|2UlNhDNdb8UvY~+tnwJNL^XVl zTr1z4g#qbM3Kt2APvm=TTFVc+#0Op<^y-{?xeQQ30F-x1Y{e#|%i9+XhRVYNe%;!^ zT_Q5UKskchsk1Pm8?yZbfL&T8d~@l|cCA0<3X2}TbFVZ=n{=WnvLdX)rlNEQPWrp< zUPJanQtZI~+%lwJZX8yma3U0V6)3zk~iM8K1U zz-_vEO5k}aDlXw!(~wuVH*oRJs6^GK6u7xS-(p_950^wpZ0ux2-U1UvGMBLH#i?^G zpz|U1j}u!L7mZs}T!H2j5@Ts)6+2{xwGjf9@b;GQzQGO#r@SA#a+nP##X603{g{Q| zU8X!yYJv(sX|-4MrG!1cM?$`Yq*Z-*KsVuW7P}PX^$bfQ$S)d+R4bK=L$3|rfQ zC!^eMN1f_bIq#xdl98FE5AePaKV(w|>dGiEg~&gB`oYxP9GG6w#ngpU7-ZT$v79FB z%8WxW$Qp;Y-FY=VX1GOoEeI_Z2Q~(+Db7KE*X~PKlAj`%cL)AVMwFiTKUliroSaYh zE$B@C)r|aN#W+^2G2fWxTO2E2=^yJLn1^D|pRGOGw}?-?i%mxve6cLj`|xjD4+sc} z@ZZ>DnDsL_{)*-1)syr-m+fWDTu_6gm9xe=dE+angF8;m>`b+;q`VxWoFX3$#%}pH zuJGvBz?XmWVv+v^s}5u9Te&taEh!~-nvbEhme8Cwk(}WK)~IjHn9}d&uyvcCjNhe~ ze#ymENRz<0J1Zwu(G=6|@eb1IbIHY>7lYtZn$IT=fHt2zcDdZJEx+0uhNEn%R+}iX zkJbV0Q~q{Pr$o5{7c|CXINTsOqQ79^2VmD(xw#bq!TwsQ-v7)`DB!^cE~lfs&lCAT z@)7}ML4$)y+Tf)Rxz;3dstS+Cd5oc+>|2Bo0R$@#gN`Spi~f91@tuTuKfWcK6U z)j$BrJ>PJeWXy*TSTD<+YTG%XXa^Yxw1B<;EwI4E6qz6S!%=)Av`DA&sqxbYa{52< z7ecz=;a`wfRC2fq=pPUl!Lx8j%_W;lUh}Daf83jF`u6ge+@Z)1U0p-z(@$|#)wYelou?;tw`=Y9wCh7G z>PjRJ=t69P+tT5l;5QxfZNO4-Z4>}<5&+-XAe;lBw(zDVgQTRSWiYj&>u#%SpzHhc zbRuR(pyWRN{Q_I>Ps@Cgg+wE;U^Ojd8pL-oG&QEPh5N5viA0_ym4km6ies1H9U zH@C2YJC=%vi)&5QoX7$?I5_BE!F?-|l0Oomda04C56aGy3o(2#4*eZ}e?53wqoF#Vq3L62WM#R@;EBj`&ROc3v0 z0H?nCQKZfnm&T;|05Mo%;vW}1aCbSNkY=zS0W9206w5{D=bo&i4A(7}fNtz}Nwb2k zZZbT_KphDaXXtr#qPW#>M2vtpDjDe2@{mb;juiv3D}LJ9ZhHAyOmlye$|W>+LAWvP5k@X}bRrE6;;zbJGBR5H{h?%8F zUmrI4F}brOGHhYk=soDD=NeQW%wL3#mbkWr87 z9=Cz_=~p%TV!8c8L88b*mvv%!XiawR>)=nEkDBOTMMsv(18YDO3u|gzQj(6_(WZV7 zC(Zg3ooTIFz|WrQT3RNjh`AwA72GlDaxhZa#H!ZqgUy#=qg@{qR|pM{81!$)bB);b z9&aj6ouSyvufTd;HeZx1TYY~(nIzMQRa$S~FGBT7=&0Tm^JJLpy6PupmTskFK}9~j znv9M@G-psz2>f)MJUp6aSb$(y2A-IL2KKH+bZOGN7urNZ*l+^4o)#3^5APk(1C=h}(Q|R{ zb9hpkj4^s_#q8CPZ`<9$i@XEm6BimeL$Lwago+9ZA&_e1p16Fwf9d-Yw6!o4R;cw- zoVUd3-IisQ{OobaZU#cRWTe zk9+5rC$|@N7rl$2)dBXaJ|`>CmzCz$&A|XiliR^X`1`ka5K}TSdtP*Qo4P2p_`B9v zPS^l9c{G=Ba77_-<@5p2R5MZLn2s_af%#dYOzde#O(4+;uwt?r0KbVqel~YFs!7Vm zb?vkjP=ccRr8}d_n}rv3E*Eu!c$z?5_F>(3t+48+yPJa|7SL1(bXvnG;7<4Nx&5&B zy->zF{czpWwc_B@&12tQk*#DAddSAxlwkY))yz>T&te*rzji(!=+LIyT)l<#uTcUA zfO)X6@n*?YW<7w^AdzItq_Nl?P%y&LNJZz#lJP#58Q9%9l42_<^UP215_8&ew>5gNrl$jUeQMYv|s~& zS>dh^I=3rxW4w5#{8hU-IxmWzU6B?rk^Q46;bq`%|7ZLw7+~nLii?X2S=5O~p7qEwxAqnN!Qie@!kmePD7~8_n0? zqpjBePAK&WA54D&3BYBsIvI`N%gx?-(Lb!vXxFb?vXozX$0uaL2nRstf1k}t1sievqz}Da`Q|1W?n4?i?;QJeGue!IcsO0EMv?| zR^yK}_n&H)ykrN+e$)z1Aq}x5Loe_U;UEvN`zOvsZQ-xxobr#inrX-pPSaY`qUqeI zwXdZr%U^TTd%p2sa(+yF8IWxe8+mlxOOklcENNshhW9f3lOJpB4Ot!PuFztBc$t>A zc5jjR9?jxNjW=9>W3KTTaP??SYk_VPyf8h20Ipt4GpyfhYeg1N%Jc^teQ{3rHAfI% z(S*NXYjzk;cLe>GgWuc1=}NxhF!mP`t$$3+PG;BWMZok7I;Uyv`L+@m1pRO4)z#dv2Qoy7hgzKbUX8m8vK6)Ae{e6PR~@ zm)fm_sr#{A*^uOwKHu;8rmlS`=!QFCz@V1dkfHV%buOGH+GeUPgx6Q+d|BT(Kb+e9 zb!7k3#QDV&-Qh$8ge)gFw7<}idt8#y+=@|E2uW>oDneB+aJL(XmY zX*HHipbwHC$P-+|XFoh{UDBxmY_bVNSLJMb@dPii*(ge5F&2Ox=4P+0kyn{5uqCcJ*uNe5 zIcW7Pf)$Ot_$E`9vg@ZxxJ6s9=ZI{E9X&(D#N0J)Iiq|5LO{V9c9?X#7Oj+3P2lg} zV>JG~CGo*^UNF$BON#k_)4-w2J|#wKHn&ON7Enzx6K0>*aHr6ndg4LZuoIY z2tDmp0RCwDy|&O)23}a(7wJv+ zV!F(+y50KbPGNqT0<2Ot@p)rf+GbQ=mB+9F%*7c1hYBhx6tMv4(t7nOlbDRrK+<*l zQMNuhtKi-PT?-%#xEtF2*oideUt195A)~?jeeps?!d9}8Hyp_6?2|6*?$kod8lu=78nlvgwo=J*u$5aWIzxrYsFfayv zmD~JM0Km_ir-5K_(rZZIy6%@rpuim0LIhGpcFQ_umbyVm$r%oCpKxlkHTx3Wi0d6@ z8p#>fRFI6+q?2@K0lwmgFalE*W53Z#*ItK%I{Xm{gNvo0cUy3`8T0((qg8d(Ua{sW z{yT++2Ula#Fem3_`gE>^wVu*u#n360If=90!kOH?p=uzc%7-r-?A^4m1Kv#-&zKwa z_lvpdX|87^pAHg!n440XnEhO@MRS+vh!~L18lqg^H#{5B26I{B;sHFxyuM* zuE9;11)ORAf1hcfqXGL7_tWitYLV*Un5XCo2M^tm?;5>IED7N8cz`kUZvB^oLA~XX z&A*QFFB_(gxp@h%2D>ZQ=YNvRz`pw*(MEc&S`r+x-*d-ucwC;>GOXaLcY`1(R)NW~ z*g`iv^88xrB^o?aF8D5Jc_292M9)>9s>XZm9pFWFhQ;TXzbW9+bFK^rw<5zEDl^j8 z-cQSk)GLyb=RIb=+lSSO#QBO^JgooY#Es{5S|23r$A6}X^LdV2p#}q>J}6SYw%o@Va9!gf*96iS z)z#}31Kms5NA=wtcYdyd%tRcR=w81Ud-Gh&HDy&axF}}tzx7hWmJxkuYoe}$wQR^J z8N#be$1S^un zQX_yc0->2sCy&QW#eo<|{YXi4&TX%1yof)X_l`NGijm6Evq1H68F>BHT-3VsX}(mf z5ekMX%zJSU7Zx@&=08}!!oK5%L^AE{#ORZssOkvpOzsf;cHXB5&lR*MNih}|<534M z5)~m4e1=U{O7&=w-tx){=p92cI?#sTd-v|;-9!LIYMwpbD`IeKz6^u97ZHw@nKM*{ zUA{uXsQD*B#t^}%h?{*VyE#TDUA5xjhd*=Usp_lLl9(dGdl(WWtN~t~8J|g*x>io# z8rB#%DF01I21Gri7?RnbxPaR=;0Y&G$J>8Gg{b=-Xgcq{k zH`vGyvH>-EwdX%=$DoxZ?~03&b8~!DRHB$GxKTpziLIW-ba7Qc5|6&poxAK0suDRt zwbg!V8zYpjMONG14uM95LCvJvSy#e_V_jj#Q@Oi4pa9VxzN;1Yhy#+extZjm_RD6U zD}&9I70x(Xr4*rZEYunZ$e|ra{Q{9N=V9TW(wAeMR%~&i64c5Z7CRJbU3ap$gcG8o z7=e!quxBHT0MT#&X)*=(*pUeDQxv;kj=oGDO{fL*7d_g)s0V@k=;TaGxIG#|_5XAA zbn5(LGZpb3kNZ~mjncrCWX|na0I;3+EB)gQ@^L}4K71{qVqn$u_Spl$X>tloC=^w> zlbb({WK!PEVh~s+?sMrMtwlp-#_$GRbQ;qLsJC|w=cL?t?k|w%pKmq8cY{i@k#VkO zN{x3nRoBK~wo=s)Xt&gv=Ke-Uvs6q@a&<#PXWgbZx@&z9{YyPjCfM@_62{M>@{#J{ zoC1pLMRsMee!VhUO)Fnt=Y3H(Tj#8ZmYe}W5L&c{dJNak#D4?Cp?9k}@Isw<;rg%i zfZQzn#P7k}{*lW!uw)$iwKpaBm3KOdnh9Ub(j`*A#c#o?)-u>W-dO_OVvx?+&cSX1 zNKf_sjKAFwS0vUK`lY{CNPKZ!q(Xd2cq@{F54_XsS#0*2jG4_b{kd&Vs{{v>_)7w|s1epju^l zH!li#`g1Vjjr3BO>%m&(K$4^i2+8nR?(Hc9N*kaqBi%*~P!LyvevnmwLz9LK`@n@) zCR50}O%@1av@M`egkBU7Sb+i#@>&TjpuofVc0da7r2^j0A*~Z7hPsOQ1eF!s`y~H# z-wmr^=e4-);?Nv@^d!JS4~`(%+!0kHNjV)@;%5TJM<6uzU9DU$;ICZVYWGaYuoMi> zyJ=TfF)WRa`rZ*y<{_=vG7xM4@r;P=)K9yI3}l5>hfT7wUHd&Hl2ss#$qab&qobpl zz=UM{ydLbd7<9XmmlwIV_EyW72*){f1X1Dien%=6iTJ99K8tE~;N4+C0U~zuSvF}R zd{(S&nGS6?88&@;8vJbgmI=t*S-^D&bh)TR%+b}nqtwW718+ic=s)P%FU=q2f($_J z*u4O!_pvF!*ok(4Dk8~l7SMa1$9}z`hKnO3<$ax&?hAXu4||YRe4uyh#pkD=U?5>C zq+|+NZ1;W@L~Y3{Z%VdXMI2onXBI1^r7NkmX?6NEohCTFui)l2bO!QF2i170j>fXl zAJyupk;+*HBtod^OkDx0Q`IJkEK!zeI$hW3)oq?TqegadAER9`U3r4FOZzEva>%6w}2ATNt7nL z9Q5-y5wtg2~CoD(b|;TO{0gQJSxQ1}xyn!szj zHf-4&!kki5{=A!$sN37o?4c(C)xTq)|3`Bt5q15u6DAb~C$z4DLEWLRuaCv$d5Q*b zb-YMC=K<_4=~Vreuc_&5G)OE28N;yk6dDBuab{pii2+GQo!!e@k?a1>r%ClHH6Zoj zW)_H&xdrLIom4_dzr3;ZOr6#$E_6$ZLLTbUSxB%8yS;;ZBkq&+4%b$*q3%d=tQF?M z+TN?^{Z0XeQ_qByiz7QX0|TiyNm|X3hlef-D#Ld2zl0&Gbu0^Mjc3a%Ve#(Hw41H3 zWW4>JCkYLr?(r^F?GFPDTe){Tmbk}Z(ftiBX67fGbB)|5Cnw;@qLFP&%WMQfnNG@+ z14MVIaqV$Lpb}k?H!%2~|m#(rW8U{IZ$?_+#1o zTrNd=%Jo^}7PfVmC~0Y6ivW;nG)_zo2cw)KkB)%-;T#PSkKpI0icd^FHx9b5EYZ(3 z!GfsAAyd$H_~N`Fq_bHX0v!s2rfQTtU^9Syq~v>jb&yD0QdCYBSjhiDL1?*fSSo5D zab)F*M?JL49L?V|#ZhbTi#tI^rAvVBc4rRrPasf8VJV>Ci1aspic!5NS zR1rUG3vH9ha+1T+Gj5qt*{``eFI~Ki51^L_I>|o4{v{DK7uE6+okA`yW0*?fBsyih zW$RVRug5R`A4ZSU7{4J-Xrv&6871ga?g9gtqdcYG3nAbi%rCM1B~ZNkbCB9^-w#gP z&@^Ny-F*3yfL@%7W8$a$$s=7MT)I7g)ci&@c$ZvW08ESZDh*P_b+u{e1A$5ggOI{E zqB{j&Tn~yhlP;r65Z-(5qY<%h4mzb-PKIYt{8BL^=~qwGo6d9ZWzTP~u~k;?$-_Yo zpIT8p(^V$v&s(v`{R61ky|FE}FVQu6FViKv1fMc7{UfCLIY5O*6Eix{lvS|>6u50c z&|fwZ0b~|%kWmAW28Vl~udC>oqyfXpFGrvhY%m&0x7EaGDE8=p7sTz|>rYKa|f2|=Gy+CT}eQ^MVc?ZM-#vCwI8qrT> zWTHC4!?lEBC2?e>BX)h3*q=uO67}s{Ok%t~9E4%B8Zeh|tFZ{ZjRh&oMRfi0&H0d2 zlpAHNEBMID`09?oGwR@hJ7F0}B?^I1j{K&fxZ?g9HwXi-U>QuMM;Y?Eq{wZO`Gg;l%jGQH|iCpt;Kk zqUtJh^z~~U#=|4l(+-tXSW^#Uf^1TXUr6^RV1? z|6GoK=8-e(0Z;t=%1pW;uXo;p%-FxJom0X~`|bGuzIL6y=NA#(b`ST!PF>makstfUJohbFd7Twv zpbedXypV-MlpN)P*FLQ>xY?j%tz%9AjWE)NuSrSDWx29zWz^D zwlM`QpG5Po!aY3ppZY~%hht54jms&2<68dY{sK}8bz`>^_!G7I$RLq2LqKU9L&gmi z*VOq6f=v3MEVq!we`H)M;nBtJkRqXzmmS>`JV25Q%jH!Wo5c%6%WWROL;^RSr`)T2 z;t=P|)UE4tye%^)Wffg;jh^%Br!eQ$W>_;iV=c9_(8nhhu^tL_>A!u+X#kcR_KA`& z-_Q%uhx_&$q*t2=aoLc1u}JSQWfeV7C%rp9URgi?!}j;5hbfCC(z4u3B2sab;ZXrW z;M;R0B^cPCI*Rx|S5`v!1OeXa6e0tk%tWuKsxoQYiYkX-FhQ=tq=Xr%or zkM@}<`tB`~stlZ-`F3tZ)n@iw%jR~FCaqnyWIMVTM0YoOhJveo`zrsvFEoCnh;C{i z5i(tm`o;j`!FvB7nu~BUhQXNNmbL@AX~g)rwwCrbHobH%3u#gN@H~^kReGbaU+7xj zo{!4WeslGRaIVMD{{BE>8VZF<6o38mIX{WZyIH01jCF>ZWTh`jluDy-?u7_ihlfms zpYbsuZu#Hh%V6Nsyq@KIMzGnZGkF>>aunh940)wjQZzU8i#}l>}?#4%7 zWolsYt0~r#emV*jh0UI)i5k!9iBW5V)pK3oA64dc=EAC zHx^RA;`i2IL?`;gHi5(G6=>^WI@^2vaW7#ys_JKh zp0DE|+#KbjDYaWOEQRvoC5@MCaAhW2-Yc7Ih}vY16B$he@IB9zcPv$`j?UHxEv~Y+ z{@4ObCG{~m7<#a)bs6a`Hrp&3cXn#zMQE6E7pX$A(?=g|UTRU$ddWd#F%wPrZ!G|- zAkp>yHO%2qyKL)#NKsEBm(wF{)J@iMEs`mS0<^KLy;;;{f5WeRWX?CgZrWl785gRM zGyu#7%=aYvEI#Wid}n*^NdRuNXOAD>ASNM+N=ccGG)4mqT%7=!yFQdA8HPlvvucED za(-KvEgw~cZl)QliW$MVA!Z^9X6N>yBwvOS5_aw`7u5@c^)$(-|8#v$y8F>?X*k8Y5%72>T-D3~0Vf7=0S0m_01@qqD=xF{({&g+4jEo(dmmQCvm>@5o>Raz zJ6Tc2SDXQUry_LM-Og~Ca5HQv;MC}234hbUf!Yj0N>Wu|l>Vt#U90;}LB8i$l`Blz zm`EM#P%Hl?HIpvNjQeU1(nl<4IpNz>6I(vaIaAIIixw1GZ9FOttuh~SczvOprSDZ{ zMYXuNcvl@C6eh!L%7LLF8+k#FrP^%LfM^}fG%!bVuuQSAICoKKLk$D|?7%Vn-BZ)j zhXP>5Qr=+?xqVu0D5L^A5dT4_j4t{AaR&wx^Y+2yDUzI6@7MuMGo1NZ1cSpqC2*To zajJwk9jG-ru`YJ$j=W|v(v(kLeMzjboD@y01TQQuv$uvoKl?u}J02M}5}GP+TMArF zc<$=3a0B-lLD08m_HYcjy0?4=E4SCeCeyqjRi{G}&*bWLP^$q&eqKpo&~_aKQI32p zUN|)mjUdsp52q&eiEmiH70Xs2tMo5COkOvqi3xb)b8XGPYs0E+gn;Cn$#j=%ej?>o zA+{dP6nDr1YWL=B!tV7B zL>}z5P&1K`|GT}`_y%~Ih9K|dkchCE-m zcr$8!nbZoq!c%=g+mN^KZ#iCU`-LWCKPJX;Hg1JBSE5@}%`a?g_o0FEVE9;92vbYUWE?@;e{n4c*9+MXcL^@Tu(eFdfWz z8$8n4)7R$@?lw5Lf}ks)U0}wl{NMy}-dUTKoQBzfJ%t_&g^WlrQUwK zLK=+kUijOW;#d#<;1mODDO2U!i+OdWdjAQj7E8L$TbUtS^-UcQ(m@SA!WqwgEy>*q zqqNqlC;*Df=Sk-c#8qHajq>85{_OvRkyy(^mL#e=r&*E&Mn(p#MlJq?6jgUokUk&F zQ~aKbnodKd+neTe`7y<(M+GybuBXZf;CfR-43$NB4c9%f#ZU#SXmuE?SV5c7_}owL8?Y}jq45* z!k`g1-CnpJ5AyQe}#E!q%gn{jk4a)#{{WEnaBtQ%@j`lW^-pa_Z@}DkC`JAYM{I&x0ut< z&2T<^x>cc5M{KVsTWmc;tVu;2^17{KRg<)sTn-j#XEjGWlg3J$D^+#6DVdTiM*cyn zNg&iIo1z43|LJART+iXw`}<9EFBX)^0z^wOKmUMpTUl`upFIzuSBW3FLL4zWeH+ zrZfjXe7o9}Ki?L;Q7 zIIawEA+3rc-xh|O5x5p#Qd*)~6HbP#Bx^r(}=owa%e=wN)*al-WIM{lBe!bDYz=z*zXHTnbjEmm3N1NTCw@JAvx)FXid}{@J zDbo96vRj>uWk=xxMxb2MUr8B~wLj_Y0-RhNxrTd39=uU+^^rR_5=mZg>NFCB>l z%FXDCqdwC(hr=gR_U81grphsKaB#Rfe>8cWMW&@ifXG^yu>dfr1EelghH4e8aI~>z z3<8k5%W;2t*d9rruJS7HQ(TI8XozQbrL6dds(%ihG#VqeU@b*eeP zs{K!6jom#AcND}FFT{yCA5*UGfzPGHVl`_8cOQ%`<|by)tCha7+lss(2WGPI;T`^l zdt}Tv0yINKqP|oF%ajw+kcIk2$s|ZE(aP`Bp47YTk>@=3!t|5L?Gs8UI{JOTX5nA5&D&-(}!Oks3Dw?FxkfGZ(o#abT)L^^(?@_pTK(&Z6h$)4AoDP zQEC#RDEUx%XXowlwG?mjYlj4RitKqI+RjnMYjTCxy-I-s_o(gLgrS_Z7OdYT$gNZE zM!3r4T*1w}kA8?r#k*gjh8`HnqP9;xJsTi`2`MSz7}x~&;FJ##$CJOC+uh-{8~DiHg&>6e+J7cY$#v$bN>Hg zmpVD4ANbJ^_GFB*1xE0){XKpn@;l$Nwnvj*FnudqTp}?NO1vBQf+b1&?V7jecI&eQ z0aWdSOl}q$pn7#(8Jj3`woHN8vay4xf1_R`e{9Csw6p({wlb4d#0SfoUA&PP+>~@- zybp9~0;fk-vuoLf{?{8Lr~6gps)!f*JV)T%7X~+roFyCVjaBKFEs?PG8BT*0LVDi{ z881d~T;FGqj}?Txxt~E8y;i+^-78WekDqTt$!UepLjB#V+L{{D&n8_Ww*rdGEmrsa zto6C?N#HKAD|by-{wAWPE>y>7V`XLJ<-HA_m;rxOx83ft!t(0s(mx4RfpQA65%#5^;gIA#C|Gi~FvBKXL2k`Fy(=aM&s}-FGeO_yG_)iSX4|yyW|Bv#} zXvlL#UXCam*`^xz1FoS|?ym5}t;bob6|5dFPW7^&y@#FUC3ZloVb0CV^seVVJ{HDM zdUFQT+8b<1_jAg5@A`(pdp7Eq3HBsvI_?G!$DEJu->*rwFXd~~GL)t~L&3SP(t5V$ z@e;k5ooM3OHfLD-U77aZ@)|rQoX+Y0$bdFMWsq0dESG@{CO zJ_1-`;QtIXKtwEOW5XBz^mC|-^X9C$9zZqa;$nWe5U(hAWlIo(3pU(o%-?gVCrG&8 zWscwHQf#HctKGi6i4kM7z2*F5PtXDhXG?DAmU2m#GB+&e?|n2O+~Q_EapYGYf$`-& zY2hUbVcmMRDE4<1&m8`*Y;FZkw(|EqYD|XmeS!*4L{PO9M8xD~F;3YyY}c6scG|-D zO#nnw<5gJenQiEsUtBELBuM{dK6%0=u<+|G`M0g_%m-y))bP^6CP(bo-LmapN2t5J zCD*DS>~PIDvje|q3!5Q*(asKH2HXd1h8B{ty2&flK(&ibz3(YD0cF4Ts3Vxf z@Q|O%jsR$M#h*#rNtDv}__aYJ1j|H#LZK-}t*x!H64!cm)!M9zlD^ zK?lIu4gB_+vD_hPBWe~b0vx1%%mISli;geKmxb47;%@}B1|y@@W8>nua77ETLb0>^GG9BPQuw@9t;QYD5eEc;(^ph>RT z8@fcL1FS_#lAd;NGvHLQ!O)*-CFfdwt>>>URg*Dy`nI3@zzNn0JGhl<-V#qiNQ$uz zt?bp-9&?uQ_GXdzWQ7F@7;#EHj%?&Z zTfs&813Wy`)7#scuaT>&s_F|^miw2o<*~6@pslWW=oKmhfj(I18V}+fc1-7g3-+dz zSFe3<6AQ*rKPX(Q8`KfhhqtNh=+V7YdXrO?y+yxq*q84ZzuR{JpZ&k5@A%?Z09bQn z4gK6v7C8A#y7KOVAx_Sq`|arLq~vK*lOBh4+c_cE|v8 z$?rKr8y98EiFijxm6TGjmTp6WiQC7s;-vVrH%)8}DbIqP#xxrso2bGp-Qt6t58jr+ z?%BQ2a@!{@)89w1yRJ8aYgMG9@YNu*0WHlesLA6!+DvbH=qcaPsW_ddV#^E>xHk)Kn|eb!FSPrf+zbVb9x8Je_>X)XUrI^q5v%EgDZDfT|ANZ;{_M{_{~ z6Y0ambdhDe4QI@c=!8OaMx+m71d3J6cW)cXaPd^fUp46R4>(AWczFE*ZJ%?p$BMix zCl}W$&MHI9GPLRAPj+Z7j>GbK$S^;KOHRPP5m_Yi96vw56O^5-M$|+E@p!O@ieyAt zfsg&u4;E~^akWdOwGMVG4nx({b2TLz|CVQiWB>bGrZD*LZHekCs=jlv$^o? zP%TXC?4|Q06`EtwTPzEG_%@rBRZF%vCRa~=aEkeb?)EAZIdNxt%wrbR;B$IEqS!7A zI6K!=4UMO3Z{Jp*(c$dY^2^Sb@b6q%e4aW{KfDALP~lC|A^~EBSj_va4=mh{y!PLR zPV2a;yu8C?cb=XIkhJR;D&lO5GmwJTH2N8~s+6k>{nrEHMV&HU*jpmrl1yC1T3|mfZ7W5LX4x3FD^e*Ft$QhxreCqd z#cHjqZvsPkDWAIp_wKcuS*RxYsh0)?n*U1BRY-Jwv6a`|qeey@c}p_tD%?DJ$<&3J z=O-IPr)-}V-IS@uIwrfJCsV?7MjM8cXpv(I_b(6xG>GE5QC@c%FlRM_FYTFWh8Lw8`#~El9H0PRg^ocS&_iS zL6G>cf(&ZPcCeZ)hHjjNjZHc`A1ZZV*MY4Sl+T_&Qju(ViI`b$2J8UvEf_iE|DG>u z_5Br)I^FtSjr2x>LPh6oW+)^tk3BIg=E09?$xfsWp?Ks<2C#RKb}M544K9nkcN?00 zLp5#f*UL@hER#xnV*DFxJuv5Y(A6eh zU1)r~egRF^KP20VG9SUDV0tz6g6w)4WYhbOnm+dAJXl=Ln&pzFmt@)a*)AHQJU3%h zx#J@}%cXE!+fJ=mrb>Tt?s4l`QniimYSICklXYRPeM-d)#)#Q`|A;Zw0P+pa0A2I` z`~t>0A)qrEw)`{LCBI3?mn%MabrvXaKIZv=t z=%VEQ89+Mj7#h-mTTBr6_++EeQ=Ku)kY^r4#E&E=Cx7+!t!9KT^%3e`Kgw-Rc{=`j z$NCepE_;%$?HMw*`;o~>C5EN41ed`{YG9aR_KCx9augogwHFEHF`)4NZ_w^BIX(uC z7K_+*Gf3H`LGqMfd(`+!`uQbt@peueCyT)Gy-jqUWqe*9XKQ=A`Il`3axgTK`gs)P z!Ks@MzNlvCK!rZIv?ZFFz{AdR2VtgDWK?D;lC#(c$>zHv@!IK~mX9rEc!A&3{fT<{ zm8mS5{0NDO=t3|?Fs@`W#W9<7EtW&vyk+{=gUdupD>W4qTV+vA!mSN_A55iN^9S4V zzq~r+Y>po&)soKU3dp{=5a!y`V53h-xt~~E`?V$L%o(?OvZqWey~*OI@u;r7J-5fw z@T=^6xQqEEeoMoAskgjI?q(6g(lIsc0KPikPMm)vpY-kbGgmJ~sJrwh>gvSz;EVJ{ z&P^|)(xEh}mu|8XHKZ%GI%^YJL{7#En>20dJd0xgz5k*57|4a7_rUi*de+#1onP}M zzA8+2?uZ1v|!|IMJM+|%tWVI-)nL=TR z2%aMObPWv)EEK3afOiI_nw7w$1{Y=4``xft0*Xqcj!OF9SW+%vNvsAIFJl0Ui*S&- z_P2076dyJdvW817)jIxbqki(lZvPXwnW!L{ER@?L7|jpYdJ}^0!)P2rCeO!icP!Uy z?Id*S+uN~i9|AdvvokxwpLZHE-ABLvvEt;$fUv0o(AQqC8@KCfYpYt%dx}6(h04ck z2%#fhXSOu=lvLcW_(SEj>Vd2Dj^A(Ad7%TwWtweCj`a{O-V%@IcRy##$z@Qxl9zH* zRY3IkvqSOZP@VH?MeA<}_TFedZGUwrd@EbInxC@ZZNMTcmvQ zcm{h$Brhd=#$I4>0!(@)yQ~er{7cb$hrGa`@&a&No1jo<#17!`kC>Rpf2Sj+-v9{o z7#cGgv6u(r)o67h@SC-k{nQ

    `=HZ0TG(%f4I9YVa#J~J{9dJ3j9(Co!3=Q`^q0r_#(eu9o+_749 zUhdU77dN*D&gziBg~ZKqEtea^Bfoi0*I0LDIi$)=+Bqk?YQvY}EVqLs9zjM^lQHJ$ z;WzgQW-Hfr(_vL)Yp^U_4n=UKKOr|^7*W~MccFdRT+W~lL|MNqVpC9!^1#IrN<^l5LZe+Q|W5WO^Jq@0T2PKC;6XnRe^3T#;-BgU*Jl5_(pV zjDI}M@x)^=c!;qFIvpb_D4RT1{w6rawq>eNaJ|SDERmJ~-~|c~4Fyhs_`MF>I5^8+ z?syZc@m;41M^+=15z9`<3Q#H1$oCana^xuV#kl$FB=`m0Dv}gDrz2i#wuFl~@COK} zV%tl4Y<)U@Q$H57?yFR7`^DD`@_%n@yPkW)>(FzCNjFbFtKYC8;0e`H`g}h9GWKG! zHLI_kWxllZ0pnOxlb`d#07tI#C$bo9G3b~(TDy32olScm4jqb-n0Kf&uC3I!yjDsn zb{G?4HG-$cf#bY!gH%LBMA&^Hj-~8zrB4jN%3l2dc&inB<)E)b zPYrgbT+1fRo=6Jx3(Vg1K_N;p?Ete~~ z#3~GQj~`#o#$bSu^%3Rfj>Dp# z$?M=dV);35#!r}~9h!yi(v#>Fcgs^=4nqg(^HCa@F;5!LB{6Wk#pAu-GRN_IY| z1@fUpGP2zE=I@?V<`obN~G*#98s$(sM_aX+)lxc4~-f!aO$ciFd zOjgIp=?@8~uKa&uc1aCplfIS#I%Pe)o<|bF}e2^9u?M zLgcw-8%JdwH5X1!&Jw3jY*fTl{r5+{4Pg&?NQUx@R9b+H;->{%bi!5rNEYbJVZrru^@8NA5Nt0Qh1EMm5t6tS?U9nDnL`zpX0yZ!W;vi>qqop1jzBaL@a%^72L#(5cr%aux=a zQmI8qyp02%G!2zK`1MOB)X_-4Ntm9u^Zqqso1+Tl#>QW+Ql*-H zGD7J5z+{Y@%ZEe0nVk`O-%?4F%XI`9m|u{ySg{mM?B(<$b$R1Q<_%DB7d!u=$ce{L zT-&FDno|cm0Es*%DJeHQU;6n44j{6JkRq!&xw%`x0MBWv`P2r@rL&(=ZmfM5to$(L zImS;2nT^!0mYuy13BAk6mrm$bLT&W1F{nB%4QgU;R$Kv;ZP^Uo{~DIgc+|CkXYs}| zM@9$A=+DRdGN0OO{W~!*XM+JW5RApJ)D4#`(S&CRciqdoUB@8E$#Oh4!`S6VE)Fyr#5&9k+yu2N)2wv0SpMSCD1wRzzXD1^$ zuVBIHx5HDHU@p1+)7)LjV9-T$k4D3i74vXYWbM|WD&<`F(22W}Dm3ebq=f>2!WW&@ z`R3bmJb|sk%iGk`={nuy58XTod{~m~d1drs&`KYH;_KcY=@*QOs$UZz5M8vWGX~SOSt|-YO`t;MM^X6=b zWegj?=?l2k31dOE;3oGM>|(=rrfj}z*}OdoS`-I650!-eIO>c(W@U97E;Tz8;Irp= zV7OB)@{onpygl5}p!(c}mMfoMXO)(gwy~0joOE0HA^xS#SBI9_q3k9HH8|n5v1M?# zi~s3V|#iP`~0W} zme&mQb}Qd`p$}@{9=B>csH^;fLv_jv<0qjDx=+C*!emi<>} zb@XMpTt9VZ+H3I|(fU?PmdX)H>3t)v>2D}I2f#y94D$=*<)vH}%avcU)yd-X;`s9P z-%URNpukzUNKH)*Gx$3X9y}n$Au|U7hy>FiavYKcfz%-@tgf?0AK_D3>MYLttKHgd z0-6>;Jv-iVd0W;1`TC_5{w}|qZ%Qw6(2Fja-_WBN`3Isq&4)*<@YfQ?@PKG2CdT;^ zlRci~aoO8e(@EGkWAkcw`$JZnFAbgzTF3Iy?v(E)OH29X;|gIL7&)~)fV20_M3LYANmdP$2f;3u>l=*-6P2a4)w`*cl+%8fZz`0=qqhX6@|;&f3XN5! zw%Ia)p5PD5@_oQaJEoxL9M5v0&xN?TVfIuhc4mr z1mPHG_^{-s3Lfy9u6A%JV`;<@<~tVQM7NHixgt+Q;uo0G^udfc(sAZTcL~qp_3`pT zuSuHcf2;uvV;@&!=bt!@C8pr3R>I|}k%hE=Pbx_g*f~{u*lyqzaClm`dY#bzOV!z~ zJ#_#S`SAYX*7{p3lh1=Wc@D;_-3e#fRcr-aqC0tDH_d}LXL7tJHH{+1YPJ~q_WG`G zd*${&@vf(n-`rGhhP#eZ;x)D@;_1>(m?WL*KCzc6m$fz_UNcdH4BOrzq1yGjiPYho z)^em`Z8JT%PxXaTPx`=&ZUh-yU@?iZc3*mN*y(@&(Wpy0^`lg)-5J|EwMHu~F+pd0 zyuAqojZ;;sFF%DjegopJ`~&RzvH~=uWF)V!$B}M%jcSZkeEd`}75-`8$R2)%3Ozx1 zHtf!NQVG7k_p;knUZKEKrpi{ZT(qRl(dc_q z?R~=P`0?oZ4J%edL|i@*y-Mqz7gJM*!QaCTxFHRd+y>r^y&WJ|IeW1CwSCp_`4@wM zWdruhw8x|+C8C^ctqroT#F^Gh`@dauOu z)z#RuR(%xAE^gskZ}@m3=;i7tVGie5w;?oGpqIsD)Yj2~A8bJ>4s07?*qnjFC}&6% zG8qA_Fy;r~p_oj=fpaII8i?g^;BOPxV>l6M4&39J(S$F*Hw1pf_n+L>+{3<(JI1QxU65<30TCF-T{-#fu-(cAvRtR8yu`&8bNf<;Zm7QH;RYhx_lia1#uzM z{clU8wOZhq7lQ*%D7^nZHR>%VWY7)iQ^5QK=7PfDkcJ9rq5g{oq%&*K>7F z^qu!}GW_>0x&LuUP5}hf$ZA`KY*e2hUw)x^nlTt1w6)wi5nkup`1KbVOS2Xx>a@p> z*S(sk#8F_UB?nvTHQJ^>`$$@nt}@k_-IeRVKb*kIli$aurTKMw-~0OdCMFh>9HLVf zb2GPSj_gUHY&kXrQtjLY`rL|{meXG~zn{6qE^6*hTeEOG1_QjHXRD+cc7m+;4HU(u ze+B1KK?F(Bt@9BSz}bR|&>B$cvrbS{2*~}%ULJF+f6AFULn?g7@fr5PpAWHbR6pY`&&+3%rii zf6;r*Xh6az5Jj0IAoEUidFHXj3LxNeb#A|hR(g%7K5hlwRX`F2o&M@x`{kh5Yda}& zm6Y?EN%M=Bx2d#}n|6bBs@~xQj1}zSbl*U

    s1Q^re*(szE5vqUJPHuK8j}pqLE%bEsn=x-?TgX>WImKd1Pv&7>wRempa;3gQ?xu# zPwe#=^Tcx2cHPM&_MjtRDKvczcYN1Na7>iR)Zy2|bI(ouPnBzFRp|6TyJ1H!*}ii# z2k@h%Nd-2Ofe@XX(_6BeH%{~?{CfYFbxJmf))h*%o?2`18m7_kongzM4J2*Z%la8l zsru`C%=z-=RE-5Jp|Shu`V)Pq-!S*g&0it#`qAr&nBG*8&n$Ql^X|KQwo-&dKOr%< zii#4?>{tX7z5M_mx?(JtrX&Ii#qaF+Lh)53e?NAog1h6bCdDLM&L6@Z-fyG~h}Yc- z1N4h9(bk;CW~ylFOA%ut_qr1s(h7(Y;aic<)`=#6$k#lf(O?~&eYG_U$h#`v0)Rj^My|cAehzM^9?3R zi$+m+wt7R#v%gou+?K<~^nJI$jKSV=%K>!x+BUV`KZvYTLymbyV`LtLM2y|<;g6i@ z1xox=jU2P3<GY%8NEEk5_?j4o=k7VMG*>R)a?xl>tmGjxWO#*^M5 z&OR@%d1GVI^!v;>yAiH6|BKln7_yR%bl>uOnQ-^#$YW%w`lmvHkv#tZANbUC5|?&n znAE&y^sfjmCP%?A%C~|lCd)ikz1nB7u(HH?@nwTSz+C_Rz-nw<(eLJ}1mk|5vnNf=HJc zc=VSQXWpP5=7wj8?|WuTHS$Y9zsu=6RaN-gn~A4Bunk=>fOXi%w%7-YV|t zAoYECE8YnmqBCj?u-@L_$%17Gx<>WQKl65}_tZMAGybucx(+ zjD-cWc_S|l$yVA*RbW~2pXF-%0J}DSmq#T;=fK%!#A;Bp@N*QNnv&l1vOTO!w)$KY z%~PBs6l4aK4;)a&s<9Y+O#-fQZ?765MAZ=rNL*+p%0SnXcIVz46!A&!f%NGL{+*B9 z0X!+iq;-;xg@9l@5@1oD9o!ko!Zf>{ZbiGf)f=V%+A{l}_g+ zkU^8N1gob~@liYVVEV#LTS6O^XO-i`i4L+qC9KCknr<6lwV+l6hrsY8zL``VeD7Dv z+fZsr-N87oX;|~Dd8)Phqt1?&x{l=uWkm8zMswakZoMsY{k&kS0%meW8A1j ztK!>f?me1k#>Pt7T;@l{owO1!TT0^@O)p)pAbx7*IBCQE|VfWc=?D!GEmzbCbY-nJ@I@_+X)TP69^4ugm*<8#1MySx)H0qPIg8XLQBHk1n!(`%r0Dx2&eCHR6yO z5gGU@%8_(+Vf}hQs+Y&5b⁢3e3YSdGX2>`~*bLD(0f7!uMmOo!OW_&}mf~P!9+S z$|I3?8#_&N6xujGEqv=u8)7hm?QHsryk3xzUsKK|s?7&bzq3sNEWdO*Z>e;`VkjOz zyfZcm(HV@dt>(lBX(PE3yPnXn|3o+16>^NE>(Fs&IusQ7nj1{VUx31CM z@_GcQ-ZWTo3t3u}ZMha?&2?P@9l_md+eg4?q{8b8Z4GvJ=99#&lw*}9$~GUq)0R8- zZ6}om`tz@$L|}WH`SN9Bo&)p)Ndh+N|N1itTouHN?@Pc}A1)AN10wf0M&DG&p5$0@ zOb>sjH5m4nZ^lW z!G{dcxU-Jrv3}|{tl&IDLsWi>=&n@bZ9{_w0;ebJR@j8!@b=go;GhVfrXWJe_Jx7` z66cw_N&mOPApp=u5I7%_qBA0M-|gfle|why^(!=R|BRx~4{z8o(1t&_ zRP(%o=7{qt&@-~pY^>f!Md`%;?!z}wT6RT(#}u;j_jwaW(Djkr+0&z1WI@jdp-dXm z2+{Fw*{h_2;~GKI59CR`#RhUYfbgd`{H%0a8)0NEYkm&~qzn5niN5{v7n+g`x4;{q z_XQqXTbp1u`%fz7(>TC_kU7EHw)-JbII^T_oHC-AaZ1?x)oQ^lMmQ1}sMdWw6=|Yz_ zelT-dj;Il>LV3JgP+O}n<}{+a&c?Am0@{3KLFGHmOvHjd+zHo=^0M=lQiPgNZt-bp zo;NIURmCvTJBEx{#Ew8X^_ zokXn}9%P7)PgQ$pAA8NF;^eMb9+;%AAgbLFDw{`5PDwXK* zEoGXOg0V6$+Ab%rjZs0x)b(3;PdyhI(G6)$a-Lb8OEjrqH=-4BISD6eZj9&Kc>+Jj zu*O44oD#$cPGGqjoXznBd_O=qzsKxh_^`GG4U;{uN(V__C3W2kY_%4Rb=0~g_kvo` zT<-`(TpS~Z8IQJq5iKwrkFbyr^1L%w;-&wd~3hgNhljU_tmTei3R?5=A#<)=X zW-tr`C6>ScG1xkDiHzijCKi^Kl3i8wp__d8>pVo!6Fk@fa6^iTi#v(n%|+;d%=WIC z86yY`^P!Er0X3J4R^?ukK0)J>h$~HXm!R;Al0J)P%bgx9Coo-)0zan-*r23uz9ZOi zQj!>UT)N3`if50OtB9fG#>PrE&}2OsrF9ODjuo_H_?z~>9@Oo5e$xmy*5mEF=dUnK zKP$Uijq1D;CTV2rUDEz|MLRKZ#{Rvoh0nfPmo|q)di3u=;y_$XA};P^ztEG5yYr%R z@Y@Zj*1ioEUSD4=RJgJ27C?1kk;!00wt}Cu=-l1&ZOe7O_Jt3g_vHhDZQ)ShBtekq zxbWGq;C2O#*QVgYimV>R``>j&zH^I@jeRJ znOF82p0IbsS6}a;4g395f6rcjtA=8ePWUW9<({v&x$|IjqN1Rp_a zHon8pLt>DW9>7bH_i2J*;S4^+y3Cye#3`G4tlsbR6KUo&<*T1qu7_|794veOoJ9oj z^oR(hL4<8BAFzv=*^tW~?12D*o`l5Y?-=o@d}xgHbUNKLF{@Dfyo?*En!mm>B29d$ zp}DOLk` zNeRfN)^9Rhl7bHZ;l^)1QpJL{q;byc*P@Ems8SITmmgikd(RJ~2}GYi2BwV|{-+~R zX`iF)ttxI%J3WB=;Qr&#d27|Y=UhZh<=K@ebJU-%!Xsb4kI8QCFp7%5Zy%t4nJKfK zf#tnOTRvXW_B+A&3?$5@Zf2PCPa&Mot`S_6NuJM$2Tf(X|7YM9+TqLc3U*V(DW*-F0}5NxQPZ81 z;Y1!toNc?<%5)774dBy*5Cb@c1WI^dTuxg%0fkvxThsJua-Hwxz^OxDeqFX5zj-~+ z;K0l8p#A)FgpRrpo5l~p7xw~6CA%Iu5Op5)n6J%tc~BN*D{`b9+%sq>9v^-H(FD5I z!Mfeg1MN9H`8O~gQ3bqzUK05D2Y%LopGe}x6xWPv&&2RX(us$L#HPFI33S1*OBa&*rijHF+88V{som7Coq6D|%BirwP=3t@ZvD=T;76 zAD9*ZDl%N@NDs8%I5&4FM&WX-`J!Pal70o1-cMUEc`{+;T}#AIx}_t_dY3t5O_+}Q z)xGXRzcwMu6co?~Oh0AHdL&}975S&C6^mDfJuMm!p`Wok5@O96EPn%MVsEfGny}pJ z@!L%HKLf{C8Y1{7p|1Rxov)C1?NxU6NjBxwMy{f|$3Rie37gJBQxRP_+wqO3q=I|? z?MiC$l~$Xb$}ysYC1m8UNdocs!cnC1LLQhFmT}SBsyhiP& z<3}V0#z?;zgjTYxe=1V16^|axI(=<&xXMILqzsjRylrZ{XLgkvLfo|U^+N$};6B(Vg#MAG`D2)d@m+84cRUKB+P@<% zTFcJq>l?|hk1FM6zRf)rC>I|no?KxqS`_j&s`NH(2-Ygy@nRSlB7IXYA;0mf>5#5| zcya(qxBPD|K+nZz-oGB{$7*djqn~CkU*p{&+fmt)m3q{vCiH;G0ay7*3i%c!AP&p~ zi~+5G#VH4nmq*cgAW6ZZ`Q9oDedY{NDLd=4z`LdvPP&OQO{ni~V7Ak)W8xR2m9Nc# zvt1l4yo9p__dQ@jf~Elvkb;FfCvE{`UqCm@rbfH9GA7JSF>UQoIvvKSJk(oGhCg;$>9#%dWux-}bk z(CyE9g4X=@9fwf`NT9>?w2K#i?k%)<7tx+ zqJx0(Asvff9b4=;DG?}#^l9o;rj#onR|B*9690p{1_^EjqB)zZ=C|HCpNRKd;S|aH zwrbw5t=+6?bUx_|uV}nh1h)67?T<;hxetrEweBqy%bQA?@#A@u53=%ViL~Y_)%iD7rWWxJjs~Piwv6|2Bvj`-tN>1FEI#2g}sR zYW=eN=Y<}82?kFP0vsoZ6y+jmq?DB}$W)UwsSi%GHmAu{#|C?@-HLvj#bk-pJ{JfLPaZs3;k59f68YyC&NRx7R7Jq{j&H+>aD*!-23m7KAEi|yPe#`^(21lz%cLe z4NPU(j>8c@;kHrl5o}rdPayDjb}1m+K-1~-*a2WJGU&0wxE;-8L@Vs{7py7MR?tB`(+q^ZV?WdN4R3YiVR3YF@~9ao z^S@h*dr1VafDEuPjXL^E2W^B0t7VnzmDP!DBIvnZW^}ciy;OhcD*)*seg#Xa-_|Rx1W5uW-Z`3L$P*bois5 zefLSLkK5NoM>GVg%b4l;^YigFtvs5hS_&N;8i!)!u~kKt(Q6EV&fYN-WD<1cTN zym)D5@R2+PANAwvUfAZ>ZeAg(eBGvf1A@wa-C79RKCjBqd-hc$b|fFE3^ zB8rw@HzWR}%w@#!n3aWQta7E)Qt$bE@u|^)<4hXlY2wd;;7LVJGhtx}s&-jp<8;m< za5exMg3Rs4+^ZeJ>Q$&x+3L$;{!GC4d7YhKZa2hUy0P+Z^#CWU!Kk5Wum9JF{||h0sp$~~4$1pF%gVkZ#^ph&bn4Wp z;VPROKvEagqOmDTV&Wi}ua~4}s{R?p^R<&A7lXOGUlI6C4RfcN&-EMkMoos{5o}5F z+9qdeXc^ab6)4TPLtV{_xj_o4Vz;dTXkeb7VdEOPYT7CcGMpg zbeMwU;^U=a@x$P3>;~-fBiQ?ljiX|EBJXRN+be3U0BQmCdp+0-eP)EZUc8hKeVyU?=TQc0Lz218xu{S zUyXMw_doCfDat(JDn%6Cua*FZCCTU$Htj08zrhjB`v(R&fZuS zc5r;-zm|?RFL$*cQSeqjeUO0`N)-BhIP0ZEAkgr-mX}|{_fVfpWx;CVzC%3EP-3Sl zK4zRz2cOL(VVp>Of<|}r-o8DKvM`4NG@{SWr}VS3`Xrn&04ehr*%j$K%nUK(G6HW| z?jbloUX;`iUa8-^U^iIM^z8hVOO%w))5JW%OY|&^MulM}3^LAmH%o!bj4qZoHXIgT zOaSERN<`_arW2M5@j9^f5#ntGs^>0_Ij^oRCtzl2l;aEoGFv4q=uwY*%}v`v$$$TJ zQ*C~Ocr4jLiNG)eB8*YPMI%d^it?&^I9Hu_HwR;9g8y6teWZfxwiZA_*>wl61M}Ef zvji2InCAsh)_Fl1h<Beqf(l&2bZp3epwY21hWk;1-FiVp^Y3$|9>9ITkLxEi?^L8bB z_2<=#GEDcEgdZO6%tuc$%Kpt;nVBiE8@dLXy_tWLdN{!4v{0Qe)Q|%d5+EyEqbX)F zvgy0vIjqnU&`0|qaO>+f??z^s)fkUam&nqUj;-@!Ne+qxUm#eFj-}jF?Zu1j|JpE~ugYao9_-IcI=a-+AAFZf5^=pR?JJ-0{Mj=hk#^&ay*6I|S2A?V z_nOCMyJAxe7mE%Tihia{?%zd7xTN-rNlMa7NJwlqUG)Drt7|hPd4N+9cAl1r^%&?L zT9N$;_(Vs*en)@+hjdM)$-#mS=WgsFjh*pil{2Zv>-r}#Qm>Q(b3PWc)4*0+a>ml3 z^wzq@iyVlM%q;{_ znl!_YBjUMqIEfv55_pCy9Ik*l82~WLVCfnJhJ37=6ciM9W|1NGIf?B#j@T{Oe5 zD*JdDKNyP|+%6un`u*jZ7ul83@c)B>zZ(VK*5^$NOLO!Yib}^x?Kf>KZApNL9^7R~ z>Oa^c7V#khs63_fYC`6=+nn#;c3bEV*ik+Go);2+)v}Coq^_c+lrO$#9O5$Yb(kt8 zu?`ax6OGS@qurvTC!PFEyomKolf$({B@7#O05ZFlYw!{?aa1;j_T5lI@4-e z;d0R8d`3WhSNZH{!_mPCx#y^}x@XJ*3pRmub)n&CuOa5ei;+w()}fI)-%8ZG_lsAD z_Ldz3!o+c%sK84m$P+kY%#SY+u#h+ov z4!{5|zV_-dCLl;|)_bkqdkUm)m@Jc_w#_RdAIJ4;=A*epD$43Ry=j#%!-&X{e?x&(nw=E=ZYbwMj(T~3t7rCXtwdd1qb|PAJ~!HpSqaR zDZ{iUZ}A%c>r0Q74zuf+nVA{q-P}mMyHVU57mt2k%n%7DF_tn|3P zlFjnY5WHyy_Y*BuRrZ7;U36%S4QE(%hP$Cj9hY|gy03@X2=WKC5&tbE z94zV|^c>y`hyn2E_En}qz|YM`9D6@GF8%f;;K75hg@-N!tCq`m!VZOX>U0KEeowsq zHxj>f|8^#Ch+<|UzJol4;A4N&&N7#jP1n=Y(~_xj_~bgJJRcPq*)#A-yeX4}Et;|} zV(pF%wgA+j1(0ipiP?P+vN4N_UY6V&=}QigK&?4;p;#A6nMAq-Wgm^abIti+06O9c zzbzyA&|AOcm6zi&GKTz#l`>Te<25&@|4s!di!V}B3x=_yhUtQXNGNsAD597?_4TQo zwcquP7d={RILs%>6<9^;Ce&|_H5`oHUuNn!`r+B2f!lby`__uLmZtB67KhLW%e>t0 zC7h>n;@H&k2OWloZTmSx;x^h!c?af#LV$`bUoBN#S^{s-vrrC#N!Qrh=n)w5v)d` z4>{oG0g?ad+rk<@9eH#9I)ZQ=DLNTm}?&=AY94uRUsQWS~}{zk4HcqLUmje zt%kyeW_0l}Fbz*-x+RuuRqpet_sH2TO;_UGKXj`!P}VOaGe_ioq=Z*IBW_Kf3 zzF*~kA9$8D*{F{W8y|cRsstHX*;C;9;4nISD3gpmLR=}At46tM&E_`W+1}n3AT5$< z-i3T42V3BjN@^g2kh?4LT6NhGV-61{kBrcRpNDe9i_kn0dbQblqLMM_32)B|TV{?G zmPy|GrrVgS_9*|Y^vA-T1sAH%{Lt&@^jk$X{MOEU#k$T z)5JIx9H#Oy;Q{v#*+(Qz_5g!&-a*MAY*(%l2eBkacXE-4$Op34t^XPdI2bxZNT@NC zn0puRnz|M(lfTfpIiO?mMqXvtuFK9BJ}Ae*W@SU0?@HZs%btX(Fy4CWp-mWW3cU>p zwja&-IcAJQ_L8rH3XMKJJ)NSB3&F~M&p?5L4vLe>{I@>(;-fW`%Hi$D!y~Cb-?B7y zqN-RKeiK>{_FgNntFP{_DMw8w-Le@ug6HO=298`Yh{!~17jrg6^$wx12jrJuW$GbB z=>6YbQop!E%4fUg%6rd3)jIaO?vxr|l9*>Y3ZsoNe3M!EFB}}y1PH|A{kt4Q$X3Cp zO)sm+->A_?nIrBlAzxSn~r}7UG#KMvZV_N^!&Fy&2C?1 z(0HwAqgIRRn@#$GrK?qm^nOJ@zZjVKhaO8AV;}|IqRyiL5yYfDNr4 zhIo4=MvP@w$;ReQ=rdRFdU@~dn68jbMW!}4r!%Rl9*-`kBQ~C`-#<7=kyrml5-WH_ zfxz8t*g5}NIW4qdC#AtgJzZXBYHBL)9Jcq>t5-@Am9O_VX?`u59O=vQp!URkH@W?H zdQ61gO+@*xGrvz4HyDs(DSrF*?bVFRdHq6KE{8SVqcvV+J$BTjJrIZ;sk~dvp9W-j@6Fqv$aOmOKzW4_6^*c|SG6Jn?gZq3aGFfIEF5JKr_C4~5!d1k9Lpic?sC z1bjyBb)ouxE>}+8gHB_(%9$D1rCOpr!1drNdWIJ+SyQb}e&ZYny zQ${9+S8xhhWtk0f=o#ci&%uGi*qEyAZ3;BkPPoH^BO$AlQl+ljTYzDDC?^YaO<&E3 z3Hi}+axc|SnP3U864`Xra2XwbeLkOM*{f;O&R(+O)Yy|hHByi4_!#a4WnUugx}#2n zjJp!z0RtT}5yZ*#+S*$Mlf}f0W@`7F<&0$LT=lYhj%Z0Dw;j%Yz<%jMMFk72)c%Up zT8q58s#|uG^2vKNS4gLYl1K#&^U&es(Sgapb0CV5h#+>?DI2zfq!(B=cHr3QS%1o> z^td)@!y8K6MFw6C`DUrK*Xhg@?>?(SKZlf&5dXZkvh}S#3ZK&5VzA$K8}ho5R1rJ6 z?_A1}du(f(>q)EZ;OPIn3&>96A`~a{+E-cLoH9w?%|jZnvmvhZrd7Utk<0q#RF0w_ zHOVoh%%VcQvb$)C@TjPczi04);Nb)c7N`iGo{1x=Q*JL0cf@5#vW0P>Kk_K9dTTu+$5xS`giItUqzeHKsA#; z&b-*e2Q0h)J&S%sWQ|A0fRPR6jk^)qbg^U>Og0u4G_6% ztfxbZ*q&L-T>9~%|IZWozk45w9%8;u7)N%0@4UJZ&u*~Na8zHdg)g>>OG`-<61bZt z55_0o^amfU934mwmv;H-le~g*INLZ*a;lIS#zD7>8V-HR$RZOkTr2oKsUuayHBL2` z)3oJCKJBQ~3vr_hG0Qc@#py#cks@|6#5b0~Tz0hV_3LZrUmm(J@xek}K;f0o>bvtN z5(CxngZia~HVk+H05IjE6}N7q_DMuWwnIRS4Uns{`+aI}PO<*g3Mfb%^bDsw!P>-c zZ}V?GIjj&yR>b-f;}*cL4Ycqw6{I8Tqk6Lnz*Cc|{zRAh#dDJPQ(K1rsA4T@t5asn z@`+zxJ==**E7SIH>JP14-cb$?M-}(~ehr|I9{n-46-l?Xdy3f3M66nwJEInsiT-Ic z1uilGkwpVh?k}FUmylK<=%WCkR6{e6BBZgXcgRpF*7lRn&6le;K4cno9Y`FkfBqoD zNR08{s+oNA_9=|eAWmDGV`1PJyt-SmP+mN{bElMVV5L0yG ztG7dI_1U|GG}Pf|W79jZXcP7~h-@@07rOt&W+<-zNl1t%2 z7umlv2-?`r#p%k?CB8YZTCvtn0MegKB^LDY{gMuKyl+T81Z8f@>o|N#*P>Ur(z$Z) zn_7-2r%}kZ8*B$5qPtb4H{w!Af3)4fdGAH13}h!xWTH2%Og9OK;;a-$-)O_$#UC@k zRU^JcWlz2cjep!F*5d<ROgPK!+AIiTko z`A8v5gw4OzEL6(ADGOFDYFNE`TrNo9O;k3;L+1r9&hoE-durJ>!A#*!hQ1TtQ4OHD zxBXAcZ~S(q{~JAGkKAPh1iOvIzk5b zDTgX<5^q4IPDCgAkP7UWRz#<@RM{mC?B9@>yxpLAH`&B~Efko)+# zrOnXjXju5>w-iSqt{#2dLR7$hR_N92fe&=P_?UJ#c0s0~%$@WFg2@u0dEIsD4MebD zB@h)&! zEVllXNwA7EE^8RCX&CuUd=mr=eoS!r$B){|^6vnOdODI$(05$^V-W^@W$QUmIJ4hm zDh_@G{t`0P)lO4SKtsbi(iO|LdNd+d^tB4JWP9ZQGuCxP%VXr4WYyB?F)ja9Yk?cd zw+5}3s3W3p9J?-e{TNFUB19zg-tik}q8X_EB6%-z!t{93Q+5KrU@~%!h~s6OCW{~U z*4EpWgMVCYzN7Waqw4=K_LX5#w(Zt4#L%HIiZn_(gfu9PiiC(L4h@1x!_W;wDk32u zp`?f+F?0pEkd>s)JN%*~xG?PO;7sofOFpnvB$Q{EYHo1N8ruX-AFqQzh}q$<|R3 z=2-RF#^2<4xLm}%2`?V5kbZv4I7~h_NF2Nf*e5xNXxOLdmUu~z4Z76_()gUo>70qo z-qGiP!_U2WNFhSl@G6YzG8}$|S z0P$c8>fKLYV2u1d?N*_xwPMNx&Sv#YPTz!yS=L?SZX}}xt7n(b z(BaxsslD%OrJh}iPRk7RHR}dadnwBmujT3j&io(oFqVJVNZHG_7)#WhAgP%Y84ww0 z(Y77i+wyF3THUI!62cal+vm4boN^ly)OA-)-uxxEPPa}cu0W+l*a!J^x zWNQ)-!6|pbMRKlIEpJMPyS#T`9?bC!hLsL=&z>(FhThj_3<>RZ41Pfw=VRjg^phv5 zfLCP!=JMeL%cYD=_uULX-jY9a(HMo``+>IB{r6ex!0!C#f@i?CO29MRb*uYl8Q=Qz zkP-4ptKDbE-4=`ZOaiO+Wg6T-mD9{?x`ZSDU!G^%56+&AeJ8MQ@N5@Ry{EC!C+XT? z*=_tN_s_4I#AmZ@J+T+tE+9^lMc*c{-JaxS6ubSTK+hJ(lTw&?lu-XU>RC6WkTTt+ zshW~O*Gi2-P_Lw!Pe4~khYP&AcrcIb1UGgLpQ+puxdITNPiG&VWhGo9dh7+*se!Bz z0~>3_R7ZzAVY-onfQ>3CA0-1O`0e-F7)I9%o~tAdL~H_;RJ3IizRLnzIG2gM1fRmD zYDsstzJD@3Y!p_ELva*p2lCh7@#3D`B@6 z&949&v;KKTb2sa~N2@g>iRF%?C#l`RthKd~)`55{r8;>@lS#;hgMU zM~bVRrNoICUma*bfsISd-=Ravj9Yxpk<0}5=-8d z)^x(b$>SfB_dw?e@hmyhxY7`Iw|y*lkGY5yDvgUYdruD(IqbQaZ;m!$?w6Bf!z_7Jtj(m_X5dpCUgpJEp zhSt`Nz;p0X?gk+2I0GW2qN;{~0BO#}DEB4j(B;Ns#4qTYo!xzm9WP2_yZB;Pb^181f@eh~z{D!Zp z6DG-jBitHIT5s`X#xYk-;iyRP{ov|Rxw--72w!r8La)KYaK(7<&h;@Ju;^%;amBFH zG2qC6aR0bH{W$#gp~9D2RXl*QpgOhkY@H-VOH(r{*{n&%4FGe#awLY^z3M{WD6xk% z4M(@qXQq!Qyq5)BGzpwmQMhV_#Wzi z!%-fj>-6p&<|0;%MPm?g*J+S!V4TjKJE6w_*s5EQJD&sSKkrK6gQRZLbU7%8MgJj6 zNmE27i@2|N67oaCdgZIMS>$|gAMOanf1E9gQM~=g|Aw&6#kk&&D&2D0C8lqH)L+N! zJJ4j?xxRAu*(%tGjY(Q6HGxDM)$GMfyUcfdxD)X)KVJi&DZk?{g>?3VgnnC{B2CIk z3DB}_4~3>w1q7^W3^@ivWiGFU&g+aU?LpRzzGooB(vV7Oaj%l-N#?>x{Pm)`UXl=5 z$t!%_=y4u}q>|H2WF`1SH`PhHpM7=NR^NWOTFAhVc?L`sl%R`S_OhsoG9e;+$H>a6 z%lj9Zzr9!G?zI;A_Ih!&uge80h3<%UIT?=b zu5Cp`mEY96*j17pDefgFuzd7#FL*Ebf82O*|1-p`aPvelh1!A;7jaaLQn+H(43297 zG+QOW|NVDgb>2sdXGb`82g$UA?VEgD%3+jje0yuM2vO3{SpXn!2U(UXgWw+5SS!#} zp?n?YQwvN>(pMjjP^~HL2M1p;6+hD5XBNDzuO^sskYl=hm89BnvYvavWv&BW%9rsG z5INJkzt*`2EYDP-y;tN7DmXbMjxBqT;^D1J*vh3{5Ie>rOL zu?m>A+8_~#7eqJ`NLKH>PfFn)_ujrV=D{=+bTlwdv)(Z(Iw|aTRa#s$-#>GXq$eGA zndWVIf4}qU$Put%zti6SgASs<)gL4^`Nm{nAy_4f}nCIpXNcn7!93GHhf@XRZ|bOzN~>}UOmRU8~( zZq@%9iWbREnx?tDw03++F^9YAp6(6O3Jt;)#g9 zSGbM3|L!eN(ZwKlozmX2X}mc;?$Xx6SCXMZ-L`JIB_yxcnuBy~gngVKWa6MkfibAm ztgst4n-a%4>g&_uCPB$C2aE|ctrby3rldF;(ip(w=qZvSs`=;Po;K=yey z(-PTYL*;JkTSS!;IPe9^84oN%`G5xLjgPrZs8U!Ti0N02>sn^&^ampp}J*!TNXiG(-G2hY5B6K=P*joA1O5ndN@^VOW0kgU3S0_5A$?Ot@MC`ze8d!Dk>A5SVwhe z__Sw=TM)TljM(J3@kBcj@px9TobxT*(jwX2EvM|TfR()K- zuEz(tg`tEQ@6*XobGGdGp(o}A_JJ%vbo1h_P4k=FuALFik~cnwHUGJKse!rr!}0L> zQmC1!Ka-2lK9t$t^L6zV0bvkIoUVwt>gKuB1igP)7%^x z`*)u^&uN6OJNv=7@V-S&mP+(Hw$cw}*92A)jkMs}yuBD@WSMddig$ z`ChW{&3j4r_8Aq>==J~obOK>`lOD&O0P*72TuUT~YI4b2eRzQLY4X3z=rzqv2t-3i zj&LIH1`CS^D5@A`y^F*o^5aSNXiCz{gZx|8Z%2`3K(~?G2HPe~IHfOI$Cf|Cj8+5l zR&Q9<6x|k`C|mFwE*_vVbh7KL@7y0h6q|CiqcE^**4G_grlI(7#nw%mGsVIus;<_Z zoAbZcg#Z3iU_}ah1>QgF5X_;pq*s|npC+`;6 z9h*Ul5T`TYXXqu!fICB93FdWqPk8Bm{jZ?}Kaml?lq1_K?=~4n2L=Z_fsk)-%g8syycAtKPv+=KU&U< zG4U2p$9l>uSA@2MBGCJ{XoDgWa#RQQ6{(S=`b6@iF#dH$1^v|6?KqCqyoQVXH z?$(M7uqu})sFG{0a7AgwlMhO`+f6i)g(#Q;758QRyUu}ofqR+Nj75!zK9sEcN)jla zv_T!T%$cJFQ@5?S?is2Wf7AE^%<2^PgabmU0rC;5GGr{=eL!7GmBq#E`D>X;2Mqnr>X|c=qb#I2dcV6Hzr>lz|mLO{I=gXwKhh- z7G9tlWIoXTY8FDxjFhuE}~|`7G`;s+IP2F4rTSAX(BeEfAySX91XN zCEhzWGtR`1&@8vyNv{(_!pasNJnbM5<*q4XAA)Q<{y=7SS6-HY?0p>8L^AT*CnDmF zGQYy(uJvd}p#p?7A1!vfe^JyWYr0GOrymp|l$2=b1ssAh=7z5G&-_)cq88^VYEp=a zHm_T@7ECR+XL{+|4pj?RgfrnXR;&DP zLgL%4M9iZlg(@|LE&)PM9<3Fxg2FbaMSp1W1(U7YMGy}7TVuyOwi>Pth1n_pQ{sZ! zu)9+6s-UZFkCUie5J~2ZwGwKSP2Zu_cR<7ib}WkDfZ+R4rP?2IL6z!*)a(k3v7G{U zN;L;&)LS2dc?5EC{3&4sCkLOnSu3B=z3ZZR!FHqdO<9)IcfR8`Yrxip3qAvaHM?m0U2WBR5VB?_^6mN4t6upIg z3UqE!S11fIVh^%m&?RU18V%CGN8=k+IJs-6f*umN!1>8xV(88wJfZ3XY$OSWFBCth zbr`=XjhaIL-eWfqZ@b=~OUTU(!U$x(_4@j7ae}$Q?zaS58c*ut zpP8~1Mh*Ucc-=gCUQ)|mTr!kwM%5?!YwhX$NqXRaEX-6H@dr z-0(nCc$v?T>6N%z=Y?P(m7Lkn?hQJ~&TS`)Zf^hfc>0~(seho4psMr+#})B;0RWkh zU2)$e$QWCPW_bI|^Lrf!p!>l})W>|^>fdyT>rdYK#!&5K1{CPm-QC>0rLIjnNAp zI3O8hIB-qUM8a8IRJ-~=t20*rWQSJ(H?qA;Xyqby{ht0SQwsF%yf$=3E6Wo7}eT%EOC8t zN~LAVJ=FKw{MRFTN~>>EI#{P=?Dm93Ak&4j5n~E=AyMm&z8pd%N_|;AqfM$Yvmb)w zkPLA~hx47gINK-`s;}3BVWzx0S$3j?zaqCrd~EE+8E3G1^xO~w z3_q4Z2E@?A_;Y-LT|4i&9WNmJzZ;`h3(D<;&ui?o=AveL+43uR1R;_f*YR)yPJb54 zMVnQ9Eat}W1@Dqz`+n>Q`3chr^KK9@6c8!m!Xc^x*{2AIAfBO(7ZzR1>H4(!Loip* zXo%`l{@e*|y+KC3YLFP zjd&RSFl~YK;d6O(D%Kda6^EJ*3JA zAjDqh<-I{@goEUyQ?%@P4=Cra@rW0_QBnmVFk^u$XNI7Io zs}k|p6Efrkrd!gF4GkC<*z9HH5x;oQOy& z9MUQ5nirecSKr12r4!7Tk+YT_OBjepd3pU?e zQ+fRsak}-TD~Mt2svIr$6@SZ;M;wc-bcIEF*%4=Y<$c*9^HC5-Qi;EUBd!6CxEQO` zaII(8uKCf!|B0xd*M9;kgcYOk`64LBH0oOz2Joh=HCS$}M;czczLKAl^D=J~4uKPv zeted6(Y{^x)ANwIC$nKVQsYoz;ejTkIIe#ThtqMk`J|D4S|Q`T-4+j%ar*U*~Ado~+iO6fJEvZ-o;s?oan7{Rty)4SZFJ)}WB{-Pan(FR;;m z4zj7P$3(4H32ItN59|6RJ~1%@YY24SdYE0nBI5@^@(aC~r=-)j50GE#r-Rrr^R#F;pY#L`_p+FA9MqUh&Krug})pGh>^5Kp#nC@ z`3|l;?h*5Bn0(u}%W(u4_A@f0Vv~`R*zVC!tagj!8Hg4=l&PXB82;&Rv1z(pYKB~n zLHr`@tPW!{-|=Zbf9&?}{UU__dRbBS7c7IpK|c;;ko#q@97Qq!lD@js=ZjKd@Ynkre#Px#jUi84XeHT(Nb@|UvYYWd6XdCfA>Ii@XN=? z4Yc$bxm`vzo&9-(zG`az#~VB%0jEms-@A7D9y;qM46gw<=J~4+1!+$*9wn~9u;ic0 zn`gy-swBJ>kd!`%^vpSy?4n#Nrih&Ia%}0aV>Dr!{T^Ry11XY`_Ln zR3|(alLLZNVMc&i)MF!z)++f4gBKqzezj+gd{TVhR>CzDcH3M;_#07JDiDUO_(F1D z1Bz9;0Xrc_LfmSAh#yLU7D9cMc`-~LuR{wV60gUR8kPSz{Z`d}WMiw4nFlD#>Hv0k zx*SrD@E^{ISspI6WU*AFbS_WHqqe)^_%)((O}DdH;OOHx&q1yIQX}gqETVjgpFuM} z^4ay5#kXpQe(ak0{D-ShpiGz)(0^Uu*a4X|Q!#$7S|S8?bmHBX2h=qkRG?xPDdoav zUC<&Zi1*Q|$Lx5-I=NcuZ~%(xrr+UfjdwEr%M*6U;iEr$Vj>6G4Yu`#gAXk z(M#aZ_U}iJ?$YeZa|mD@>%q|I_i$cDIH8WdcZJJU|f`<77 zBM%u_WAx{D!GdwI+EgEh3eOY*C$>fTos5HFmv0FolPK=gvw8V54GxyU6PVkNCXw#BYV*cjYM?oc?Hm0!r2wUs2Xir$QmNU_~}vj z6vcHG*PF4ATus7#@e!UBn`~%A{u2rcifo|rL%{pBUf#FsRE?^Pg=z>7U$Xfcn2p8i z0~v(pV;D&FugkEUOuv~Hs%pQZig22jbT-o?6R?UOb;cg~b3+xY+dM6IC)5KNi&`rh z%~k;_))WQ8k}AW4cTAmCezWEd=Q<$&+&;Zbh21#I^@YwC^$!tCH%H4Iw$0K#?1#GC zKpiMbiyr^kZ+nv^;?(<0wYy^{@Sa`9N9pcT=gp6~HmM+zQy5f#cFBl@!x8V@s6mC( zTi_!k?Y-hjSJmk!Q}4ea0GV1kjARmV-hR&kMdp6B$CnzjAA21glHOS=5JTdlu9_6~!dmk*DKJ6d} zk_JZ?uvA5EP)u>2JuR zqqe^?#@Y6u{DJ>yPnc{4)g{|P<2puna7k?1qSSp^y)-OGi&y^*z5JIMT~1)aXXy7v z>^KH@1&4x6!Tk*>d5K%7la8KL=CR!JP#av6CI0i0Do*UkOaBwUo<2Dv_eX1HwD6YN zrF&-DHq8rAcJ78p$l-eP&mdngw~_rOs?r|QhwK3i>I{140#r+Xv(Qy9dF<#o?SY zT~QgT7f>W5Hv;5I_4)2}M_3~R#BrpoUAMMLk2cu2uY$xiv{CRE;h4$=A>A(VIIg}xI1lyvix7rR3ey7_yH6(RrT0_a@+s|zSqNTH>c_b>f1^!$H}!v6wp6qfO5 zDkEqgq$rT>;@4WJ{Pnv#%-`M2yO==yI+Jsme+`xl4KE4k^Iv)BTB9UgQGWvKSV;F{ z-K3`;;!iQ(W3A)j6TfEm=eo4Ga&PI)w#`>>VY-2Y7ti!HDRA~Slc$8e)wVRMq>v#+ z<+T@7_pI9+rmx*iE{S#jtp<65#xgQ8rc#jL#fq#@IP;MsnmOqX^4%pP_O@jq-6Dl4 z=g={^>v|j{B~QP?KywCE+08N9CMH6lg8tM?o}rSIpCR95MXbd+f49fvO~i-J{?e)l zrlFP1AXn@w`>W{iOBIglz39}WccPY2WL;pzNOTPWEQsC!Y93k8#yKJpNXgyhDRfMl+wzreEZ=?PZO;0$?#JTM#Mbv280eVlET5|Be`} zKa;`}&ehSir^^{&)m}hpQP+BVjLTmR#>FUOx_pd4S=Sd%;0B zN&S&=8LOvn3#Sdc#NT;yZ_6T@DW^CS9r+Br0ZE?4hMC*1i#5)JWj4oLHtsb-z~jjuq)T0wD38WMsr ziz7014qYAT6*J}eWVfiMZk#!Q;2i?W9*l=@_h5Dt;d@PJGLxID;!_fqcnmuPmIA`f zWqp0ase|UbaT2b?qe$KQUxVk?bCPq00KsJk>l2dVtKiBw5d`~!B=La0%3L5bZ^-Js zaPaC#te9!EJ*aJ&u;k(cOUkD_6Ihd- zkYa4yX@l8b^`k~3BpXT}7aBOu@7ATnG|4@5pmnBThxPTdB?Q$Ia&ib~x62aWc$=Kc z1NP$HG7UNsjR~3NVzJ(;8mf*ZY>U@~g>}4l)(|hKS!03f&kbBBPhSl@zFqD|plUN;{P;d<(LyOn7 z^6LQDHB>y1EKW8dqvnK|I z-uWRYO1iiNB(1`PZMW=n6wWo?Hi&GRh|O1apu3#e&)fFHV5|SkM@DRqve;%mzZ%;4;Da4U zMh=)3Fuo)^>Xlz9{PaA_b!qHAkii{!x3R~pi5h0P*|ASX#{vS<*0giB1@W7~$$GyU z+}sl2JUP#{U9h&Zix;)*Y6Ig|fOve@fyg)YEN4Z_IfF9FWT*A4Esqv^)%BaRa7b59 z;EnUUK#$c29b+KIRPiN_@-vtr_OKjd6(LGUfm?x{M7P!-h>1-R8x`mlH#wafH8sqf zhn|~%r-5Q2l`J6_HUR^b-JvI$h=Ae*ypF4x08jgH)ITR*hXHgqNKQ#J759}sN zbm~tJnTi8bVjjdIVD?bl{njY9L=NQ_Odg0r=OE8Q%XI>2x23XDc{Fm;-)FKGX>80v z$Nk`~uw!JLwdnJXut=u4+3>jDng^2^IC{h0*jQ}~)W!|Mn_zpKx#X*?e86cCC7ZBX z>?4tP@z=LW41OP>f|v|tZr_;vVnN*5(eVttYHh$eLsgG}Zgyp;SUJWv7oZyl+*R!$ z4sB|pS$k-vory2s#2R|FI=l?lL_US^XuK<1`AD|@U}ZfzHR-A=5F#|+NGFygRam^f zt8Xdul&Qgr3cwR4uluY1I?oIw#;(j$8|Hg)QRKmWH z&Dqlv5)q3f*KQG4@;Wh(FBl}S2#E(C35+V)f4W5C5!UI;kasnH3sL-rWw%RQHjkTJ z&>yBG`thDk*1S>%M&5g|8cAY$Q`|dul}dV`Y8UI@B!R5W_dI={|1lM$MA^FCZTWpd z!tDTF0i)-;S8|ti?mi*;ea^;AIf(kAOT0)fhy=v6lIK4d^(q}wA3dRk=To!F3^mV@ zpHLNLtvYAp;9c#j&ixZJl0-_2uki5Hw4A^gad3i3jlMZTR$x%@DM!|MTXJ;&jx8Fm zb8MD>`T}=etq4=sE=u8MT8)EuhrkIvp24)32oCT5^AS}z>O(Wt65uxu7Sfsikk5VB z8lapa=}5`^o`E$I7ZYQ6k*4@fnNzk@xeXmb)db1?J(gVq0`Rv$8$fd+>`5-Q5y z+TO~$#matZF-g}CQn78DaJVzzASN(Vx(5^ZM){EFS{+3hY2zy!Ogs>El910fHWE@k zL?~ip&T*H~{oe|UDud;TrxTTfPVywrNJwe3xe6NsjLzXU=F8p7OH0{~4S})oDf3<$ z<7{2hl7(>^DN^lxh^lAp7xn6WtN9SRMn(cThQu!rj|*>tsR)#zCw=Ow2QU?%xlS^5 zE`#N9I!ZPWQVAL{}W=YU^|HJ>Li$gv_UgwKxxFy2E!`@j0X)N3LJ&wXyX-c z0Z&r9kutrT9;@7d&Io-Tm5U_uoRpVhX^yiKL_?xk#=$7S= zeHyA78gKniC@fl|j5c4$`*MN0g~~Pe_`3~MD%sEHte#nZf<0@J839ST7>rYgeAXJF z(YkO-x&PQeNKd+2vC_Uv_JM}l+cnvvwu!omr|^uii0)Bi1_D1%+EM8+E&{z0%t@IRWm3L7W#0Bz)&14+^4| zMqALlo(~xHYCVe|JwbuylDo3Z`X@IbKo!o1<&-GmM8r1FcHZ8EPj~w&VO7v5DCD+P z1Q0Lfa;cHu6=fsJY1NwXa7MbwS1<<}e80zo<=OpW3+F0-YkcdlD15V@Z8wBCUDi)~ zxpcDB@<~|jNh_QE&aebt`WK@dwx)l*o=yHH{boG@%fe8P_5#D@uENdXg{&7Osoexz zp(sHWYI)oBmar1;(Xr~fcFTsquGZyJ7t8k*8|`$R$|F2o%g5DfWIOrNGpc6hKVfoH{f*p~TAPU>1Vwq+)2$|M$@irLCKW4W@j1I}dHf839kobbGlbyZC)Hb1(j zH*Dbip;+eQ$^Na+yZiOB73GDqtlh9P>N5?`T!X@7xmzNkbM1$UiEziddZ|;>*CB|huci1DWwi$ zwbt{4&?@#qFQAO9XHc%`O@LL^2;XNojbd3lB2)bm`z8(Xv!(m(mZjho6;7t_WHMWQ zyYf0iODn@Jo4dDMJ}MihRXb)ToP71JJKTQTEq62~b_recszc4|8V8@l9_s?cqx6H~ zH|Nb}Rl<{Lte|lfZ)%OZq=Qj~!_;<|cCzU6@4)x&qFxm$lOK@mfD5fff38kYwdU+b zrTr*3AT@|t5Q9dDna&V1#Y(bBF8_zbkhJZBONK)t;-pbN6ERsukx7dsFRo?3$bYtD zat0rmDIfIbLm)M2amtOc4EL}@M*(wh+1dt8R;jQoyERrl_tNcGPxgIIfr5(mt5-Kw zzdD&Dk*Tr;jD~ETA_|Z3(tFL#llv=l3rzX4f2zm13+r|xxJNZoZ{jT*rc>AkR{Qv} z+vOVe>efTcyAw@L>s7sfq#5zku!X?lLY-Fy+*oc2-&yRC%xJMOEeyV|1c*Fm#V^10 zyv}r=BvvqO<*FU%uJPzb$tl+o-OElVHgFi0`8j;)s4zce(PK1<<&p0|s-$Dm0yZ@C zKyG<>fu(C}bF=vQ*_$z*Jts&d%r>IpSUNLrzgahO+!)lRZvz&u$I7wV8Or#2O7c+e ztfhiuYSQ5Uu9T(4Vy)^=r^#va!5E9>SYg1Q;C1yU1MAtjHfnJvi~g1NsdFpc%br-{ zBCeKc?_o2MIZ|D8?42-Faq)Wn-!+@Z ze+a66wh))noAMNY$W2Ky5CWp4e+VI@0L|xsD6fbFLsWpy9Txzu)$cxe@v@OYin38L z$Go*mTPjEQ?rUZN!iZN~LWNm@?tiMOu(fRqXlE4WfTgCb_JHDJ=0=F`!5kZ9%8dJx z%~4W+DdIL64y=dXjgk~=>$4W++&Ab-5vMH8QtbHsXsy?gkQ@R-S1V8koq z8bK2v5YQ5;wwJB0ggl+TXrJXN1h6d03GZ+E;0s50_*=g1>Q*5O^JC1-#4-_)Z*`+s z#~-nd$>V2beo+}cl{wC0s~h7?8(moPkJ{N=NgJFUn_&Sw-VHFa1qZEV+$!>IHQR*b ze|X(Hwm4YflwkJMc_3mZ&0U7n60m=;fSprZfqdhIc+KUBtV-NYD^+30)<_j4*XLwy z=kRvx*pzp}`6r<+E2zQys+~7zsWH`7c-k+9bx$z$XZ13kf3zSR%KN3a7`src#4I&9mV$5{y}~^ApGVb3&rn~pjS7261i^i2>m*_<4nCPQAG_2koDf( zka+=6mMd~*@lZ@zzNwyG^3;n@fXXLqL0oA!{0kK1C&ajZ;1W)ZaYF5@9CU$=8wR9w zRcB@|%k{h=3u5RUz+sqK+p1Du%)Nq#$0}Y^+5O}7nxV%C>wf=op-JC0rx~FQw04Mb zd;HqC=!t{*dM6C#zgvp*?P`}(;Vj+UikoYdKYbe497$ljZy|H>@RW6^f!jT%cCXUd z$?5XK*tjNgs9q&{YVZ=Z>rRBpakCT>>Px;O>|`dtz+5hC{}~Z)TRhtBFNZR%pE@iT za19p`%9MKldqaN|fYUv{m%MI7&RveUOo~tIq`=ah^!y!c(y57zs%K888dKjlU-L0J zWFCbY_9q2jd~}%X19aVJEoF_^)C)Hs*$!&%GIhtrp0--b`yI~i8+lAl9L)QU?eU3j z?Qyg|kDTV6UaJ4JyZsK-%Vzm4j)fpMUUyx*&R8tPiieMV_;eq!?WK_>gL}Q_?0m7A zmE@{%SJJ~iPU>%TSfydxTk|t17ccr>X z(aa-{40o_k{%Q#S_uztNM8jO8o^(QDskZ>{R6xJ%9#D9Ej?M%4>MaXmfUa`Xc&tl+V`K=Lto?ZS?EnH4062Ok z!Fu+ipJ%4z1hf}v2#^!S;Ngm9(E0M|H>hYUCzTR!0oA>!J|MMvO zhRfbpfwa+~){?-$zP?GA(OJ!BH_%&hrkh`)cO}KLL?x%DPT9TK^xyT`_gN!0nr3`> zwR2L!Q=WM-x2VV*=SO@q>LfYUj^dlw{7iI(n)l(d=KCpZSDN0Hw=TuqdD$|RbdN$X zwS=~1XD%7R8aIu+ZW-OVcB%Dx7%T{b|NMe_b*Y&eT{-DrDrB`Xgq>rtWNKy#a9a8u zr+==L@U|O!I4d2B4(gQt78|2bZ9LqSJ@t9jP0E``i*0L5gR?NO-nrqdvMGmo~wdee`Rkl zxdCnM{TaaXfOSyh6t*v)E?dFn3ZJaX+elQexb;ts*!l9H0%1~Czn8v8vLb|?7i6Rk zqvwAB-c3-6Ky#ANu*8Dx8nx%{#_x#>d$nCS#ec{@hGmdwK7e>1?y|8*B=EnTp^Dr* zSlD}wpl2@9Hy~5OJ&0zNjV)|AxC_R7oOy#!+KZ;Iaf#lNT*_wqDe63n&(sI|l?6-7 zH0s*U`faGkH(ruso~2@4CA*%{@!WsYW#oOE=v8L?^TG+gXY=2a!Ls^Qpsy1T%yHC! z^d}&QsJXe71H;oE(Epy(O!D$X!ND=XJ5F7C;hc(!4vrWDu&9Qf;-Tyjd4`7{J}%N_&SPMt1nSbLH*}J;5tWYb;bQJX*~HT_`+pi0yX~n5 zg`9RHoeLj*0t><%60|0c4crX3Y zRr0wX=M{)Dg)w`&Gk*}!&@E%F8kRqjsfsf^*q5!!fSu9P8qZ4r*wCN9H zZf~6{?`I5>P%}HHV7gMJE!Cb#j4D!OQ~VUkHaSxwlvbPpVxsY1etuF{Qc}~^Lf(CX zas54_{HgnVbc_u|%u1ZINcEqf0M|v^z`z!bh=6kDl#y}kaI#U5VdyHOM{ zNw6SwZ@wx{%{ct+yb_Xk+KeY=!aFZ|a@ke@4-RX4e%sVmJ_Bn@3`nOCO zJB2Q>nEq9vC^eALN0`HvgB6jycMgGk%2tf|i<+{jp<-hHna`^qLVI&iCS~U9Yerkg z4$D!ZA(-P}p0$cs^R`OJjqqZ|hB`BWOvKqpG!d5kQiK7y>m1B&T+ny}4P5SIcpHw! z^EAi&SIbSroW?dUI~EnJ4v2fK;bUg%8vH8e_E!HcO^hNN3`@rUaRmcRtZ@VAu6{rR zKTZll26j*qa7sJ<3W2I?soC$hJ+!}E`zj-!rQ_!p!Y2!WPhC(vV=Ec%rEj9@@L(Ctc*MJ!9&C4BigDUnS$O&#h&11}@cBF6Du z6K$0;i)1``-j;o=tt|@He(uAtP4aC1`77MAQ+A~lromdUE_~pp zq}>J{_f;^zO9qB1goXloCAWd~%sCi)jvE4_G$(W(kpF7~o&N1k@-!fUA!t~|4#9w- zYDo}KGn5!4la!VMEwnza~%OG+^%rYE#c$2u`1DP!<@f`ecX$;yjr99=C@cz zEpM$q9rX~xSeK-x9>1h#u(IRblJ0uj*$<5lHkY`JRnkk`m^nIR-kS}dOKY{ikJs5- zyV502#X2^;<-OuFpr1Y(&Ic;X;5=C=yEPXU!;4rOR#!OhUuhy3LrVggSpvh)fc+BR zlYZldjju-fe{P#sFJRFeMMC33W_(d^Q;r+Dd>p#Mj_PKNH7b_T-dI9%#*I`xO#h6pRZzc?#fv$jP#UthdufjQ{gmG}!n zf@@~1`%x4X{ywK;tfSy~I*#@4cXExU{xhE>17=6HXehuQU08pp2Jwer2}lXpZ-bhG z9(scSMFeRFg9c&+vJOgMNv{j|E!S- zq1vTSv&6*2I{Nn}6MgpTV;t+N9Xl?{`EIvC^dKMqK=`v!m!|mkl;s09XRkaaUbwDs z7g9528d|$hKqD9P^K*CGE=$cbYwP4%Y4_UgMIF<%D-Zmnxxd5oB=`lpU z3El(EM@K3B+giO@}!yyal6xK>0CV3#kj`3%i39EPkKsxZ7Z)(&LfY^y`gruM}MO9yiQ3Y z`Kjw)yf_t$*O|rZ{GEWvflJKsb)I>4Zi4*H%QP#!L7+XfJy^3&UBK=O_Us>y*fzMT zwF>IqX!1G(3TYzQ4^LP+{nJ4k{ioUCvz0v0U#V#0->K+cMxJSL6_)~%A^H;B&Dm_u zG2m$OzL~vD2I5wfx!vK^DYV;@ZMEtr>z-w_{dwQDW^+6jyQ=w5Ozm^3h6B~2Zvobb z&y$#)$N_o22?9le(FRaS)C9E&*Ez=wl%(qC%bY_+4*}8TnkT&Z3Pr|4Gv^Ue$uG_H zQ0Dl=NmcU0-Nfl@DxI&B*V{gPQ@60+xj9_pk>1ze-?HQPx^owzwOQt2`;$Q zh|#03LG({Y#4nmot-Ubt?a&8+$*RZMF8CK1Jq>`oqbJ8Xx5xI}kDAoa$dh&n+c)e? z{t3U2`R4F-;CM_^;{ukP*;F|)yK&%WAXjEyTP8RkQcr}%=oiU1AX8sjl+ilI!c9i{ zMSvx{;5{TEB-B1U`8_!}b!TkO4m#n@(s{xP(%w{fO~HqQf3;Mm=Pgx7!>+loU=)dK zx!)*1;X|l?KZ!!#1NmXW7t4vHEngX*yWWWgE?Z}A8C;IYR~ll z)i|MSAzS4)Eh+vG;^CXmmfYubK)X%`sCx;J8`5AB!IuZ4&9*EgQ0@LWqazgq@EY`8 zbn%3OwYg!KA=3PLh(Ba>ALdW~PSaJGhLA z(BU17iH`-dNlhY(nJtq00>-tEwSS5b1|h*NBbC@Err;M>3}rMRFG8Jt1C}AM_cz_b z9$_zx2=X)9jSPW)g>5zIZ5+?nuxJ!I5BLz=!~s+=j}lc5$V2D*cTUwoU*R3(AJ{Th zxDUU9BNd4l1AM|+z(Lt;Bw_O;K)P0w3vny+gkY3hYn*u!V^Gy6ji0!qT`ZC0a?AaQ zPSpf<>$tR2Z#tAsTZ~}W&3&D^RL-GFkNT}E6@`TczqYFDFjSPdexkXvS<$)hmatpN7uEc{0>1W5S9P+4uT1=1o zIpZbC^414?niUiFw{PkEreI^A+v7+)*9Mn$3pQlWk;3b>=w;R$VTbx8Z$ayonP=7f zOZOXID-%)0e${^p3K{Hl!-EXkvcuS|>$19Z)6!%p{Z@tL^V5j~X4Dg2CtgN;|AT5vxA;%*&L|`ybF{1gB*Vavf`n?YHFH_W-~)(nng8p`ywtg;d+zWUnCQQt0w*j zM`_ER_6z>q?tP46ZF^_XT?W0*=$gs}+p|N;&-VX9fc}p%3urf#_Inw))j9_wc_UyT zgOa8AO!%zJRs%5~Ax|9x1X>%rvuI)XmY`AKm>e@xs1l1t*??&Os5WTi)>fH%`M>I)W3I}%-g3wJN8c4dD^txZZQMcnys!zsXFxP-gK{|kNcsMw2);JzXd9vPLT>5&;z;{Zh za`cH5UMFWTZ}}jY3J`P#soZ`Gvi)|bsrWN;9!tDgHOvpbeIQg*LQa6ccS5)9_04R% z^X~)DtaNl!!rr$=Rii?k;xNLbDnYOGRjMDf$Qf*6|7j9-ocI%JG`>9JCYFIXI#lwa z=DAnsV*TJ>Wz_!PWz-A(53+*)kFmD^ih}Ll#%YjLaRo%AL}C}P=vGky38|$)x>>qY z>8?dO1W76B25FGer8}j&>woX_zVB~--}gTM`JRy(W*EoV`@YY)&ULPHo!2v#5(AKV zMTss%^|i&XQV)Q#cl1a?V}JPBTO98`MwZO{BiM8S+GQ4yno=}W5{nm$d5taV1L`SB z3}n;b%E9U{JYGyEo1IxP)rPtk{PtUtJ8W-ro8Nk87+d+@_Vd2+e!$sRYYxn8Y(d^&3n zSk{fAQX3!J(6=08g;;Yh;6-(N*f$QJ00wV`iwD0ZLBKI*#r?pvWB4ouEt-Hh?;P?t z9k^)Y$AEq_zUFCg+n?Z)p!De)YEag zf#2c%@{32<(#7eI#Daw|*+HMe7oq(p4Ij-i&jrUP{#|T2qKmEkKQH0ly4C=?#MpxU zyY*d+tgD`j=<=RfnBg^0pA;=kuL!u$zZXo)pPYQPqOo)BeRTJF-nsrC)!4JL*qzyx;sgflnY3hd= zT66q|3B-G3rBMSTSfLjU3UjG&E!-$%3NWo0n??9|*qv6-sx&;=XA0O5q3~ z+Kb-{-a0%ir$CgWeK!75Xk);E1TGGU7^(g9(Nmk=*!YiEw#ew5S2;|f1&{gQRKsij+}>?=6~{x`eFSJ_XR3MXZZ)JZS&F!(n%ht>FW!U zi|+~}|GG{d1BX1u#0L$ek8Wr>zl8q`Z7T^$pA&^sn|>82!FfW(@pQa#bI9jp1N1_Z z`t4|704>=Zc~{}&pw??~v#y?7reAKjI|+q@|JZcaz?@c*t#SB&;>tR@Jo6<*6N3{X z+&gf*ap>IwZIPdWCL(8pJ<_`OJSNkgkUbeRqje$^@p8HzcPUB6ElHlR;C?UwwtnHS zB-PsY?6~$_75Z+Uj}+Gavm`?ofj2n|R1<=7q$EuQO;Fw=jJlzx9|^Sp3D(DZiBSjQ z$j-B!ys};|I13~jK@>L^3wh;=88h^zY7@Ux0y96PfD~<;My1=m@-r3;S$B0sXZH&E zQpa4mA0Onp9SnA~m*fEfilM7#N81$!xw7d`u5V&LsUnypX3}Dfx9DhT};-b4m7?VTz_yia(~2< zXjqJwNr2uO2cil~MFkFL1_S9_HICSDwCb-30${JzU$k&|#K+g_m{szB2v;a`wD-QR zSW_P#UW<>@<;(@NTAMkzd(!OuQbIN`X$}mpOdRq{jXGIt6OqZ57HDO4T@3Yg?mto zaTA%6N`irrggTz;nV;wAJE}>QLuShq?$pZO{|LsFyIhZjO#1#EkNp2b{w!CioOd0p zipvnM4>1YpXbf2te>zWXubH`6j+3q+g47JT>n2?+C)Ovs>&|F_kzLRD2DU?||D9G;~ zem7#bL0p`AXJ+wyePj(=!$;|)s;LF>!wooRo^t!}{cUHHJgn$PYW@QB{3z;*ky zj};P|63@s*&7EN5J(|t`!G2>>>9*!9WVrw5wz(H8bzO@vHMMN#XPyoT2Y*3qZK??3 z$MMdBwwv9Z8+2bfLb&ez-p7Ai0Y-*wg?VKiT}>pKo7-L{#@vBHq2#!Qa`JNCP+kqp zDxU^S4KM~Xq_Gd6^kV(-bTnv~+yHFi+$+AmaFMG~j9@iT@Jpma7c&P(d=4c%t^_R? zrf=5}4sNm~bY;aq!xsp=BYoSq^b^r9pbD{wa=Jl5Ina7TGWU=5C8lOgA%`9)oq%kF zLSV(m77J(KUK%v1Href$?Z}a-nG_o1{<}p*%5X!o{Qe)wbNgb-d!%;0?k!@ndN!%T ze2<1}rHHo1go?TxF)Dn|QNTXf-N>ZqBQ*uVZUJg z5so2&5rYB4)WI6SX+jf8BnMbGmqttU+HVldqV|@cnd;$MA2459ihdPpkdf($LeJyH zKQjtZ7(ziu^w-K(f$7&c4xLRUnO9~@hOXY)mzz=bIK2layLt*kWIJSAO8~trp~2Rj z7NfJ{B?|4%7}j48pX9DuV?I0FK#h2b&j?-i8PhRBN355zM$1f5_wegltiyrF$&_IC zB~wE6=Q<)0<;&vQmKW_E6UG1#iRott08Q&T=7;&SkL zamn$>u1kK_v~yrixCs@-B-M%s?iT{>YBYx)_Y)?ChlYkOPm3OVk%HkN(=+UDE39{~ zM7)0}(ajRNv#X>ouJkW(Z*ivQ^S*?G#?2WoV>)cQJC^)HSW>EkSAyc9dv{OA*+YW} zmcTnT_e2_szHiHsKP~Yo1g}X{F5xw%cUjp?SNg|(DT8L|?&LeAQ#HEpwH_-PdC3Y- zFDLQzd4i^;bRWun>)T;DIt3mIes13j7RQhLRHz2XNtmA>k>=PE=KGpEbg`hC0{-5C zf-X)2&l*N$I1#;hDo>`jj|m%WYk)9gUODZOLHV!dMC(%|`Cpvmzrpl~2P_Sc4XiR6 zFYdb{uDy)vY>x$MVIPjS;-xDBalq;<+b^~pTP2zxMg>zSpWml7=$RVX+~!KE*?7W* z0rOnnyL4F?BCcEp*c23K|F5pqrwq*Axivu!knZ;B9R2b%KR@*aiwY1Y*2DoP2=23tLx{;KEAz*Uj$F%!d!jKo)1Hl35r60A9 zK$wXs9cB^s>r3_^_L1l59%MBl(dfw6R^C6iTPc5O?xVkfOkvz^SVSH1Rb`=PA;Msec5?bCL_D4V2K2Z!`gxE^Lz_twCBP{qvw z`Wp<*^=j&p+5(xIyr!mw7Z@aS2*84umzSfZ;9^wL8b8n_|JK}*!{wR%dvL$7Asct9 zUR-0=YlGiAaT z(SkJSL-(-zT5B52<9LD3V7ta$1CYK;Aga{5OWajekK{JS98cYzyj5rSyyB&* zFwIBZK>5*{Mki6YZ0hfB;8j?ZWDAV9wQlj!uJPfhSFLM3B)pQ#+)K8BDqFQ7qs2>W6jSnQbG*FvMk`KEp6$p3o0UF)Jii+Ig|F6e z@}q?3il2$H9&OKf;@M!<`3NXpWBNvY-f(4C5qa`Nnve)TUeCj0JH@)81&BL}MBI*c z=3Z7i?;~L1pKOs@(Hw)104~$-{mk=|?4^{Xr2brsZz-6T07A)3INajnTO!ekY!*13p$ED@^m;9%kHduw&|0^0!$!Pejv);eHIAkvnao)o)a&LVW zlkHGM3oi2}&w1-r^A8|@YwECv0#!$0v%}-oa(bDC$;lC;#s-8(?5Li6Otbq*(**tC zbfJ()Wk&`0d#D|`u57v-vj5Jq4;Z?7Bc7)r2sE5$2e7jrU_2z$YVN`hgL6Ci%6~-E zp^cucv0mhg@nSdcngVBVd4=y{cw|(MfB;$AixO9_s`s|2Ty*xkL+=!fq^2>G!Kf@e zB;l|OYc7?%QhDEFNv~MpD)%?Q5QYlT?Uzw^^syh4CH+-cK>89?DLKFCC4>W z9p(ajc8s-BdX5d+^*J|5sW?6Z3~L$0Tk7Xd0S}q7xdvbA+9Y6>;t|7QKz?3q4U9It zMrfTMO`O|YPn^GwM)%m3U=%P7^KO+#55Mw~svnhO9)0{WGHShG01OJi480q+Ml2F} z7IUWn_-aTeatD%VG|v(CSs)Ms_G>YNl~{1}-UI6@EQIg+o5o^qXc8d4Gk+-U>pp(| z`VU*!0Oa-?2dw&1n-@*3fsaIm>%O-!rym?md~~Y_zC$@$wrc?@!-b3vqOuS!u!?p+ zS-6V<7Ylu}&ej_p#%;z*D_@xQh;B~Gi~Q9ZNucn0Nh9jwjRCqOe6#ez_eClS#xB_b zPksAOx2C+L9-XB4X)w`=%)nuUIlkPImFD>+(4_KC)L-l!FZFQN&EbI#Uo&~F(3i`k zf9Za7|3gv(A80b^RISBUrq=R}4IV)~aEtgxkdi4Jz zDyn4=m}01~J`5r-16w?Y0m^beJ~b3Ym5^3#wdB0pjG2xjmVr>P$;TE~0hCD?htv7tkD3VGsoH5s0__el_M@cuwvee1q zd99<+BBHx6m4VhdQ{c#53YHlxAj;j(9TenmW<8l=!o|FURfSd|vLJ9Pd1CHdoGgTg zlHYH7b#Uu|!nY8lKu^MKTAL|QF5UxziZ_3{9gt^N$J$Z-!c$xEU@MlJ-CE52!}nUQ zn{mPtS#P~DW1>sR`O=eQ3(}z)=gZRlA07)e>VT^##j;D$w-5{W7|pyap%q##OiqtY zYv%j1@x{{rVcLk0InnkQx&_?-(FE40QYVU@tHK6d{FjX0g*Tby72L_M0@#V}hU|nu zF$z2`*R^?f0ey~{0?18TM~M*tpi(AYW7l9I?#sBf*GR>M?F!NsnVAIthhg}Oxoo4Y zd2yqD=5kAk`78ZZdYRVt^q9Q-(|3mb4|G!|WZe0$UcB26!UZ#oBWzVB)wDE~D%9BeYpCp^>Ji?@8Wr7e z#ph$f=#j}f#9-0{g|-JPuKW3E6EGY=)xWU<`sxzU4a|CEfHn8|Xn52*B@BM#JfAS@ z6&n#_`oL++0aZHEVOy1VxTQ(=%cvJ{5#2X9)XF-uGwE#Zu&@&NqcIreETLB#@{YN& z@ne2r$E_qXg#G@qwD&J0798Uiq{;_+-#hNynlnp?Q@wQfdsaqdArJaHHo1A`3m1=SskV`ddzY44n zeO_-rOn7weK%39iP=W6FW>805lCanh1aOKG-~NCp@h8tf6yB_B43q*UtCY+s#)A7R z<~Dz3U!>aiCu>sKY&h>#4R@&CL=?d_g7ECJMsD@pp0Utc`K9%5$x+D&x;+vk0@O3^ zR~lKe@y$Bc*f-_df_6O?d_~gOMm^qdp?dfRNHlRL!Y`exfKp=tj1;t8iF~RTDO&OR zy@Ty{bQ_xJn*7fS#Xvj;%%WM0%-LAvy;mmO6T9g03EXOJz?K^(1xyydb9GNniDU<| zq31WrMYDr~B%Loq^~Z`o{J7r2TeJF)7NGuEMfsc7LY2s=8FP~i!n+0lp)t}h)rpEt z<9S60;=E7@N9WcRf&&WwW{IWQEWWnbrC>-S`WhSWE)`vOlblJs{D760qH-a(XbLb^ z;MJV++z~~{oZ;)-=u^|m!n;h?OFN#!!<)@3Mnv=`@hO092qCp7V25ciR%#%xrNt2& z8|!m{bH5M+WIEI1ttl*jC~jezy4OcS4Y82{knuX1f6NiCT%#w>{&bUQHJ?bKNe?jXUg$(h77Njhoa zdt1FG9FbHw$z(a(Iy>#C?qq?UsS_gmT5_Sgi-kG<%nmiFvVkH43Z^1W&6QU2RtPWB ziDu^Vfn@ji?ldj2?`C_*>e78|gPh58!XuX!oS8ih7Qm3VZw4$C5vSKDp9`C4kQk{d z4?9Sek6SEl|67zl^KVgp_f*FZ8jAB@^$p=FILyo;D6BS;){baqBu}7v4J}OD{cr{I z*!z-kIU{7f$@|pD1}t|%o=q{g`+N6w%^30eZ6+vbOty`YP3CRbI*7a(Yh7E*mDVWQ z(8Cm7|Hb@fhh}~{rRdUZ>>ouc29ziZpijvw0b^M_76xbGast&<2m1bLmg=?hn3x)v;{R+tKpCcc^6V&!FDIZQRP74bs!E1zKqi z-oYxDbBumwFXEeQjrt{A3>^kG?NWj8I#O2ns@0tAPhMF;9jjd|N-T`l>hU@T2OGx& z!^vzs2w8M!A~zLfmm7;NU4FkvBXh08=2J_p3CE2C%%X;id8v3lTVz^Uddka(QW}o_ zlVMNYPTnq9a#>9a{*Oc|Gb5t~1Foc`gl9NW=lmTT4Y|V1F>vM(zBXY9^Y2oGSXD11 z#4z9pm+#(Cc2)um=?~C@)VJpkDqSB`rZa+2^#cA*`9xN^*kW9EV)TEfG_NCK%_q}k zsZPJ9x4Hf`ZSmK^qJqQ0=G1ywxo(JzGD{r(F>4j3<@iYj7<?7M49 z6A9vILwCUxm#4s=jI^TmD4_lL(?61`K7WJFUr7{!156_0ng;Q+@aA@~cW^KWMeks+ zVknq*Kpk_NYijs0XR(aZ57*7~PXgZK)%m=9|1eEjee@B#kW_``lp!Tkm8ivZSsSlH zb2%HZSgCf{OgJSz*8TmNC-jSA+PVy|qX30_tn=-d-SzQ$_&}7~sFA!)O&ynm54O0F z(~jt)cS)12tWb8wj;a@PbjRBw5r7OGD>thg#Gif{_$r1QM^qWeo`~=BzLmT8gq~jf zX(@jva3Bs1My5P0&OhiNCFva}W5~0a59{BD}6ANqM79{ct|yA4w&Q z3&HY6Uk<16Z4uA?#>!;Ys9OQ?|{bKLn|?jbI~gq(C(a|ciH$Znfv9Q zfCDn5V6ylFH77v)ID6}V|!6+1-+|!ZBgb%aN-3+}7eqxl$V^=8H zNcz2t2U6=tw%e~m2r-8xdtyQg?xRP=$L`Ml1|?~xS!If}*SE4V7$0IsmG4zSh`s%d zQNoC!JqR=lzKw-#&h#joZzU-y0a$jZ)mC)l*I~_;o2$lWel_sh=+0WgdEbW^FEasT z9avmYYCOnLLIc9K5*$wL4>h&~JLTG)rO-bykr3mYT>}lS_CaZHD26{F2-=`+!ByFB zeGX71n6Ve1eWX9@cOz>!Q7&k>{(q~m=pqLo=h|gwPmX`_m%aMKCj3Nsp%Tq?p!m-S z_3mve*rKf?{((ShaKNi?H*X=9L<{uc`m3&3oN(pdG@~u8IsxOg2r>{}-HY62^khAd zXf6S}fl}c0IR+-yVeIU&fVTn%{V_Mb)KEGW6A4C(8P1Ps!OF~DEgMt6ko$b?Rlt$F z#(p2U=(xckk$W|jR{XQ6MQji3(@ea~cs1K1E|m~Nfb%;x3TfcQ7IW0o*A)#q&onm) zCs4o5jpqw|Eii&NC(AoFXs>4{)4m5&Ovgsy5-~N}@&LIL^5MFgIb~m!YL-D)P!J*7 z`YgjxprD+x9}Fa5(KfLR%s>&Gpewe-D>1pl|^TgvDLBw$jy!6hR> z;7(y)eEb2CBzzGEd;}c8_ezBMM_XzyC~vjcjDv*OA`(-B)jW$7KKRFKvSYuB!~*Ss zApR%l9)T+M|5ochyj`O!U(F|BU^#Ji@bZ5j%-I=<9?iCD71~g$S=(RAtb!W4h{Vg8Q7gB<{AfSs&@?R^+#kNxJfLX`Z|gh~AS zdh5fRhMFMi^CmtwsL^-qe8z->=`pslczFN(E!Y%;8+@0VL`>T&5m@=Nfypv%>yZ}W zw7{|h^De+bx*W1b1V%sVs&n4gzq&a6AM?T3*wYwt97H*K?p7Tjp8gaeyeh1d>?TOk z=p;I(AvAfJV+P8+p-swb2;Zx;CP*Q@+GXgu^W9qdr!U|*mM`!uTcmXnmlbg*rD!la z|BPT5lOa97VkDK4(ODMT1dR8C0h_DrCNw71r6u|O=2UfJ!2cg5jaNW}vx2JD!Y#r` z`A_MQgQ4jC@@5l+7q6L*VD@Gc5_HyYG=U+LaH6Js;ADuc1=zTrDy`UFZ8{;>E zaN0G_ps9($*=z#`Oep69cXTPaqg%&>=VjLz4wy02{N|Zt7>3&-Ok6MX@`mGoBDR1l z_-Ff<-rwAgqLHjD{=8jr@gkr#1 zm{qj2w$R_JfGaY#hQ&<7R17)z#Q(@j5oWMs#OMBiB#mQeOx3%Nh*}eBj4|QPp*@Vo zc`{HVRjD9?F)*TE$N+@vW+~nVm}1CZX?~8>hVk=0v_o-6)xI0r7xv4;{NF96qY=4t z=!jXpTFF0qBc`Hrx9yKH?R?}vayuQ48KqKv#%~Hj-YcUi!q8@QLJbVeg8uX|-kV1l zPxD6#Vk5u2rXI=fEg!?T~Tw5L*vhzFFv zPrQ7v8T!7z2NrM&84t3fjTr)6^mK3Aa`EsCCPqh3CcU~NxGroJsr{YBk*s#L-qmq` zPf(mo`gpn=UO!W*jB;65Pjx#pVdS{H+`eWR(QfbzX$XUXX>~;OBCsRlerXyF25|aE?mCIUgb~pG3Oej{p_~Uwrf+sSu-@3e6NJ=X_33Kimin~=f z`&^2!#i~A2hh3w&u0_ey1{%St$vPRLUS<8l&si~afE9_C!Bz#+$lB-FVgKMJjX>Gl_o2Enwgt+07zcbsx?$UnEu-XIy3H`y86@Q z#LzqY-O!@v8NiJk_)wuI2iRUp!JjLo8A_t^4OQzu`m>p=ek^q|HYv}1fGU(Re<>Bm zqf6c>%9MwVZv{TQ$B_b>&suM-ESH% za_Y|3%$BClX<>01E~@d3Iytw}_SBM(6tipA3tbc9vZ98;F2i+06EP(5#=$=MsaBpk zyGC6ZOuZz}e8S`XhpVI|9IM*RiYIa8Zd=tW_rm0-2pd#nZVrY^ngw_gpvB7Xzu#eO zfj|mx;(n;OTd2b?MfkeHMxyy@QJGGmM!rUVJi^=hnVjr9R?S1^_eOHYSy1Lg*C%5e zW~3Z&@Y!~iRDa%%F^et4;}P!FOiAq0O?xoGrg_M-{ph-;a>_syPyfHZT-rpp&JAoZJmp+b>G z8k)dqjT<{piya14{$`m6rH^qtL=CoF?kVN>bC?aMDDI_#^)aR}tYRBK9!*;Tdf za;T|2@HjcS{ZSG5;(mgF=dMUwf%weL1hrUV57y{AKkyr|TeSX}?*@OI6kXd5&;FnWdey1|gT@KdZ zI2b3m4ji>b)fJK7NbmCwbSlG`oce{1iQ$zli;-^#lPl<35mM1{nYEp04M=00Nx49b@FvO1PiZgZfTo>Q<+h@+6?8H zW>K}=@ukpW=@;C8$7k0;v*bfm8JErk?Y7gh*#R-98#s`3k935QNJ%Lw>8jRhU$2cU zQP5sxTpVO~K)vcOkHUNMi>}n2ciZB+!A!}Q6NS)s<%jANG0&FWZ25#2AGPK(^(b>I zC7m$6`c^sPoDI`#$UAMkwr-z%U8>6`5@uBQLeV2k63#PY)3dWHM`hZU$3^b)t?tqJ zg47KTwqQSGEXu5(BTm!7Sz&5zSl!A(KObDB`bp=N;40Iaub9r|QBfhOM;pQS@+29`<;U!D!DV-Cd zhFQ=y@QaF)2m7!!W(XCiKi(6@^id3)1yAJqV(rR?BBfl-nrxW#)fW17gv==fe(-d3 z+4%>Dp-;Cbk#|j(RR-0f*9NZxP?_hiAHQ%At9#**zBX{a-8cg~f6c{!^w#6!&5V!J z_NcBqnGX(qx4#%I3)Hk$Z0o~DUCnL33iePfPs{8K#ICwh!M20D$P2xGFb`7lI!bq} z!TfY=_WQ|*;E0b^_-{bWhqgc_>90x_nOY!2l)7|DR~?=gZxnLbTOfi^ z4Qb5>xKFd7zKPaEJTecY!@Is{rDXs6&euj06obwmC^Zv&Vg`HmH_fQ`pFXz#KW={l z|J6^S3irl;kODkF3TR4Z(|Z4eMX4y~&!^cJO6f43&OJ+2%bTot^s7^jGZvrXk8D1I z5R@>e7cMjmF% zXanyr_16VqsJrQ}&v}MW_9NR=9-AW9ry@Omp^4oax7e&g57bm85px5jrTSw*r6xVY zHk;)gakuL>D<7dQCRo1=V4YcT+QI{80Dh7D=b|fl;x$v@sI7M2_~Rih zdM^C2IbjH-YjX}l{w1}n{N?~$+o+ZFIO5-W>L4L6Pc3R6SJV=Bdi~dzXd~sUFk%aL z1X2_MH{B(yW}1g6`Kc(A-N4&j1Ik>3)14n=J@~Y41D$Y@tKIO#B|L@0wOF_oD~!h9 z?Qr9J|J{d4(4v|e)p5(+s3mL;ZFX4jT~4S~!>KYO^u7C9^!1^Ms{QadSc_Ws)aq&; zoRplZU|AhZv!WEHU0st@ezsA1v~g%~=_hTlu>5Vw8~=f6p7t)5=lO(ZtQ1FmT`#|_ zR);D4`wEN&u37*6R@uoZm>G|m8qwe}wJ(TV%sD!C-qfshPR3C4L(uTyk-A}XmFl`x z?dMB-F^m7x#s}eJj>{u*xhhNThtAhGFLH|g5V1=*d`IXXhEft_mm_@bl!)M$K}<6% zucno2!|h|vo-SV z3}C=g{ruj?pYMe#lxo(jrlqg!t!jJz`=Gpt4}!hl!mDYMn8qRsKl|%dbF*ocxKYoe z(5rD@H@v9Rjjx39s070Uo*mkJL~nLy9C=muq9%QOQ0RDCouhFt&$Am2?XJTtaN{zX zZn0T4fX8@VT+)WxwcYj(7d$hL=UVT5tvG+Zz6sI?A+-UC~zi@yGS4>G}%ft_>vsPy)8AvOn1XH?j7l6v}_ z=(OJ8KZdfs$2EBSo4BbkisZoHV zIRX68#^+Yy&0_`)kPRq5RuaHrtdw(v!eg^s8-`=O#ZyHpd+sue7Jt1ADAk`JZ**K* zd-ZhFpd-pVbqJ*+JIIF<&`ip_TkZ#gZ+x?6BMi|P$>wBdZm&;Z zJ4x}7aBMj%PU~Kw%OS`D+)9xgwHMuAU;LhDf+noJF9+S)+#Cv#H<0`|tuqfsjolGi zk@EsDi|#y$O6Pr~Upk`YTJKE4X7HZgd@M+bCXVt|%{{e>3{}4f?KB2RY?@)X994c| zf3I1pd*4~9-pbbNv;;0Q8O9KMm(Vhi*t&>scR5vu6}QGqY~gb$AO4+itpPj$xcLF{ zn~G#a^(W>Orwl!Ns6Ou3I64@){VNfaLguwUWjA782N;*0M(g8`_|{1PvtckB7~ z@5bzNPAXrW#Wixb+)>I)@x1aiRBg7@y6)v8>Q1TWA?2j%te$F%^8UW!aP@BgZY+hC zlZndE*$Gdh#Vc4?cH;5o!S?|8_rA{p73SqS)~Z!bCaN`V24iKXb)~>-$B?Rd!m@Ug zI`-SR)C~sp_hW1EMk%aG+HC6Ymo@+sX3=*IkS1!!%UoQk?0Ckr@#11d#OgDZ8J}3I z2(SJ}saDYn0{90fw)u`@!N^KkiJJL`Hu4huDh}a7M=IZynRp7b=l!a;S6_t^AEbaA zpNA|gc3PGD^NLdRahi^h9^U-|zF=G#f6+cV$Ik(>59dq9eU5aC-xx`*NU|zq7X{Pha**G{>_S53#Pxpu}>f0!=8NEFL>Y9I#O*kdrhf+k`Cps za`X+n3SbUp*qR3ig7)wP!Z(WiE=#fS1ZTy}`-}G<(y(byd0t;GjjtC1_-?@loTaVU z*ngU_y?)P?*SLpLkQEYgVgTEIt%Un|eHg}x( zWYZo)=a$oZ-)!2GP62MUzg=QlLNaJEIIvf$yOdyag-d}nE2{sW+5q^Mo3g2wgwT~j zdI*FM244P^rrFf3uh4jDE(<5u)KBM)J(SJia>ri;24e!AN$6tKP#9mVsL-Nw?$L%}i}h8H(K%LG5mzAICMT)#!dhbPwC?qV8g{XcmCqa6ge~>;Ha9VX@$p zWQ0Z@t<+Sq|80SX=Zm>TvT}u}omXmw5=zlE%LU-EE(5Ipqu41n53b0rYZ;SqC)Y*X z{tI|PgM|9`Gm~I^o!G-~1!RQ|71NvALeGz4_c=$E|0?%5K>-I-8wz-H%a>o`RrIb< zo}|TOS=Ol=!NZQYQ`dae+cyLACXPk#eUZHDsiJ!m)9mNi3UbW0kyZX@N^P-*N1Kvw zw;C{2vOk@exysi#-dyh~c(!bJ*}zC+G`@Y3lBe9$Jut9mb5y;8l;W%}01nuWPB*}s zFEZ}^CoskUb9)d1xNM&#rM`LKoGEy9vJeV;^CSwCtrfZEzHd9^qXc2$I{xOq2!B9- zwCzRbTDFzO1h~Ky;+Jq3)loe+=4YF<*)+0hAR&HZVu9(uE_q`LHPi<3^q&qu{;D99 zuJKy!(j?(BuVvfx%eMLKC#C)D>}$yEYA-J~LB8g)zesd7KlgYd7jD7KrWuMfOAwIK zyy6;PKlQw%_M8~5DE}{vVI|XppXhuKTxA5z8y@cjTvp zU3g=DOiK|((;nxWE%LVIZin;1oZLfww*5jpFl}q?Gk_sE1RS6t+)mtpv70G!nOZ{s-sU*~Ot4x9ZS^o3q47zoa}X_it}b~T z@8m|*45xf=FsTL7wFNMhVD+5gcfqqpR^!m4l+9}aiL!*ooLk@SlCg8<_o*rLO`LsO z-nSNcEJd(4E0BQ#D11t>{6$4ZDWnt<`;9@JzDvAf{(@ee{ujvJnfm(qU|a}bsl8dj zrw>XYWCK4WYHJ{EQnNvMrslT_<)DtEJGNsJ+8+aXps=FKO#2iwGqadSPJM)~5)LU@=ZS)}^{ynd> z2yKm1t25=~d5eNBLY1%5UEVK3^S&}plr81M_$W{Fv>a}p5~lli!wX!RLorTaPDU z>dy7Qcss!4d5e9ZvmtW*6lV?s34~#h@*!0oIelN|8U&Qg9>=vGb_vJuR{hweNaat1 z3|7Dj>Qh2z{J`u&x5M6N{dbVXFVCDeN;(plwvMKYi!|&8+YAoJZM&nGdcrxysv?*O zWa|m9%`v7kKVa`QIl zKZ;{(_19PDKcsHRABL$ zw=AZ$26R~4GywcxwJKR<-C~Sj4mq>8Tno&73%7e4qujca^@+idn}!HGfU?%^KC$SZ zhkq#FwKFp>l#G8tp8-|yWsviX(VEKoL(M0DROPe?dBG{L8h-d;hTBz$f5|ds_<7}f`S@})SHeuKaDIZU8I9VsUS%M{m-u~ zP6_AmPuIGbQ_Yc8F9~bFN76&WvjwHlwsBUi{P0q>VXxrz^B24zCgtQlLfwrTZ;=J#Cx{h2aS(yhePF^I-fFb*OLb_?P2fv6<|)LOa0KJwer9<% ztQ&p*Gia(Q-YP-IA=RM{U+X7gzcpN6ZiC{DSGL#JcKY*|K=U^FC=lG;TNNvPFhAuh zRtBL9v$@VCoNULiM8(y$Kpm`2UM+1?jk&Z`h)6bEwkP!;Q_7;VdP4*mKmQBK2<)Lhm~~`42hB zqO~PF0j2sgP(~+YEku~-xcRz?M-o0v>V$)YEdCIgt|wEjdCIm2$sj`&0kC?y{J}hr z!(demCw+m8R78bz(xG@`+Fkl|_>=<4Wi}F<)dMG%QX#s_&bq(M$u@Hb$q!Fjd+h_b z^Dw&tw=mz}aCoV1ZW{y&kF#IriQS~o9CEu#w5-XcQxPxK)$(1!o`8IN;I`BJHI1MY z1z1q6&qSTKecoECzK%G0u*`C#8^;xc!UW4qBcTnmrm~xv9MR zB)T?94H<;tVyHx9;bXohy974+#1&t(>(dF+214R|G@J~!9rkx-l z3k;u$%GD&)g)uN-vDW?ouDLwe^z(?lkT4oq+IsC-i^*eC0O~b zWx>V)x;J7Ws|00>wAT6@Ede4R^-B0Y?{v5FPP2R4AP8NoIu>##$Gq)*Z2%)jDn=Ay z;QZhmLS?7cW#Xjapw9|(p8ttVhcEj()z{sRQjp9*gUIB!=^VH zeIJsXG(n@nj-}J7fJHtRw8<<|Q8VlGNcobAc{ZCIyVYLT4a^2Cp&4L~yHzZ+SYE+w zDS5M4A!0%lrW$3DwCzx=vi*?js|;51(BjwBLr9I@uQRshW~A{le64ykv_*%b$P$!F zH8SzT)H2|=wmRyM-c-c@&BN+!qI>!{%B@YvpaMqTacMv}{Fo;p4;}zl6wC;6S%k7N z(65&*L6=n`UC~(U_0fmv*i#G*;^hwtmurAdH-0c+=z+}Ii6~NG{n#!2R=iWZmz9pj zdfTjjnmWPYxKHG|rPT6Ad+A<5xwJS8xupKwPQzRcRDp~FKedaiitT7)v)UOA%iD4X zuV``!SIw?k(py{^eAu;0>tI@?8N}?G!?3t2!=HSB?P50ovcjdcd`Bd(p?Xdx{WVAl zN%W2>KS0?{i+gMHOAi1tnJi?E+&yhXd)X zM5;$qyxz|mZY0Hz%CtVSPNsZQrF-KpKX-_Uq{`5f!CN2BKxv6!Awy?tDqM*hBL zEh(g9E%T3#*)6X=;TP?|<43OHY|noG+o>|1Ap!PZf5uF`>oE_~W(mtH8|suX|AS1# zT#~#}90Tvqzj9X+$X$ZDQ6Bvzo2vqN34sgzBEQssmYWJyWObv!Xm~>QENBqTEx%S0 z7_nh2m;jL5-X^7drbOt!`&Zy6U!P&$(jB!WOL3NMjjvf#Lf+3bmS)#}e@aN@e=ZeS zB!38EXV34fJ;)w}Osf%qD(ToPwFX_`3S?1LIOLQKQhYOnL5H5kx<5G7bz0Gj-)ZOQ zYg4=ZOhbc)GYVxeaTMIM%&wlebf)}GxK_2s%6K*JwU1hDb9s-8$T`xoAEq&pn{qEBx!SD3yPefAIlo4W27`d0- z7&V&TvujANobYOfyUPP!WC7FsQ8p>9NBfOgs;8J6ts&&0t+u*}VHja(?@stFL-}tx z;It3fVvDM>pDQor0~fiRT(`C#!vrb;!f!ALk!9wK8pm)AQ0)ZB~56=t!w|?Xe$muT|~a&dYa< z>EUS1(6CuuTFMCKV4bMcmglHKxRzT$DJ*Mq%x65_k9iEXEk#LjLgTcp#j9O9`by1? zxqhyrt5}shU2&#q=nJKKLAuy{2Do@}h7Y8pYj;=*Z_hbrtIdroHTwLl;NI=7pceXD z-;A?iw3LHEr%0C@V){Je$){T`RAPN(rKq<612rat(A{Y^NT}5!qw@4BTlS)|AC>tQ zZhr`A>S?{>YH6sxYr6bShIi2JELm#4G9@O1aRT`16ytbFzcYGglx&JYIbtW8i{}*) zXy;pP29DL9vYfyR5=!9J_TY&gFtEUV|04*VtsRmOXVk~X>=CFp&sS0pG%i*Ig&Htx zv4KLiw_0=}A#faHGQ!Jail2Lxw=_(o&Nx$atjz^%=X;Y@t2ds57Y%`Etoz>pEr}G3 zb?y;1O=cF*OR3Tp1%Wl%;ky)w<) z4mBEUP>g%=$f)VaiPPv*Jie?zWi6QH*Mm5VUgO|4m zg?4ftVS<&c(8!SIU`1J_-bB2}k+qw^_x|0tdg>`(8AnQKp(8d6`I^o41t8+0+&LbY z2yPr(Nc}xR0-aIf3y ziyJQxSoJm{Nae@adI)R^$4epml}QsarAnU(7(W=^bUylA)Bio1LG#06{XJBu%D=gg)VH*v}wn~G8gFW^_J zdHtKJn$@p~30Z{!);5nRAcBD{zbG{*LmpMBDHYpt(By}ktc(lFi54!+KX=!ycQjwG znl?*pHqt38v9Mt$BM*r+i+l{8QF}C~%O4@bJGw`%8aLVgCMZ6AY)t$rGf;(m5HF5F zsR+RqVFN_TZf)*=2vj=Zxo!~5;I;Jt5mlGRs3IqpM5WI09?fo{JvDHQ?e5I1Qw%P#D%>00Bf$ZSp8=3*G^Tb z_*qfRju+_-97bY51CRs`Fb@2Tm_xCucy`{K=wbKN-f!I9)NbgauNuOBK)7U59eo%P z8d4%1;Q7T6{JbsE9zPl~lC{Qg_Z0e>#%=cDy3#ItntMVl>AdirD@VVOlD^Vm>cG ztzKzO%xg{PhxwEfr7iP%DEBap+TKOBpvKbn*q?O zX{E5GuyaOW2DGce*2T@QZ7ZSF`1xS`tQj6QSGXl{+@lzBdp0H|%8l^Ykx+$@+jA&| z_C(I2pJppYmwa?0`r9Z6P@;)m)mpd(y_j&!f?EtWM2>_{e)XaYpi?+|+LwcCzT*TA zWzzrTW5}o;L(K4&#^8r-3BUo>is}caAV|Ru#I~iJN%kz*W15SD<#=q$XNOa-U~hWj z?;4JZlaHTB4pJV=yJVmZe-gyb7cy)jY+nu+QVG^Urrh z$k51xdC+Qs=LB4UoTaQ|f9RM9!kSo|e8l3|Ce4W~3o?`)aU1?HRL1e8P4ls>bagr> zkLcxNX;wz8usMNa_#AdVREpkdeZE*(Fg@D|`8o~-9zS_Oy+Pf?Cq4}NpwX^p^R2__ zXq*0OTKMO=o++=%2dFmTVACjJXO8MNxm9z1&Ve@NRZBGDcikz@?`2K;*8p1*@&Zm5 z$A&b8Vfq-Do-8mjOE(GlS~&t-uyC;|oec6|Tx4=54!RYP(whWJ4(`=9NEbi8?z z43FDn$mSw2p)c|ex|^d_R!qIe^;Rw!66Oe}jx&Z-7aThsi+42mT~;Sv*VdsG-jCz1 z5q85?o<+Ry>s1ACq53I41i>%fGR&^hwnM)&F*}Z}T{BqD+c58pf6iFT>^A0c8g+7QvYi~}a?wR>5G(`t}L373CQ>{z`j9|GGOhBzneya4qC z(41Sp6y@l6$cjXFYKv<0TK`j1hj`Fcc>1iwC)UtbL&jE7Q5F`SHW{sxtUwpxy~c>6X|JaXfgjarpW!PW>ohu zR|=aZ2i@Icf8EU#gfQjEMjHyk#T518YV;Ps^D>39My+tXqWGB%joK@A#6FdD)V@Oc zmri@r<559_0rSPtWr>@H1m*@K5cJdSZ+fL$zB2bP+ zsA$lY);L{J5O`;vnTeFJIG*f#$oF{-+PXj?ult zYqHCc)Fz5GO+Hys77&+jAv+=@OdI?W=8WXIgT()=Y*LJTYyrL&4+>o`Ykdtuoh;#* zWK-cumQU9M7qdw`r6a7w5L!xS8-0*b5i!sa7MKcUDD3m@#n@|v_yKI14JQX2Ye(&8sMB> z!vk_3GzF`DmJ{ZNb5c%@|D|;R>)JTq3*W>J_%1e&kwWaEqoVqtti%ECF6knU50yEU z*VC*rxt57WNTXUa4a9HhYBAeUfLftaW2V!P}^J932`6^rOn zQzlvYN9E7>eJ`M!Q^4nxo3{DNIdlA-OaPsW(50X5Z741DR>BAbV*dDzUxri=m_k*b z-n;cV;(%v}R=3A2RAByKCkLCu4`zIdr@ikMTTf7XT86x!50Dr+XU}&v))g zsgPkd83}4vRmevt%})UEdH9M!rR^)YtGQ#@edWyf_Nbt8cfZ4L7 znjpv?MHEM+x~YA`^@jsJi{9)H@xCe(5}927?=aU5Cx`nvFrO){BsHClNSoRMbY%Y0 zMCw$J|I>=Dh)f(e9nhHAr?nS&8$0QP!jtNTeMU9G|op z!oG4OUDe6u5wn-#OQ^D7HkB=%J3+5E6!6FpTJFtlaX`a01kQfpmsM-hAaz{0Q2!u>?@IU#0>={0l=q+XGGX z71g~9rLQbU^}bLw3%gnZ>MZ?j{-JtQ0MBe;D6Ug$Dyh3e6GScbp_8%8&yxY-Kl!+1 zx{bMGuZUvfWA|JK$OA6BOiT}pHTG+J*TIWoi zE4#5lP``|di(ceX8e8upgBl-m`+5dvj|!*yl09LkKm2bj_TmRO)lRFaetoG{z5lhh zpW$!q-^%1}z01CJ_oOb0J6?nVq=VACeA0j?X=>K!NU;aMB6C)(NA089_ zlUXd8Kr$}qx}&ZM&reP1Pi9Yclprq^`+P`jf;~`-6fgr&DeTC&T!~C<^4JoaHP2Mg ze)@v-=17!=GS<_o?!7N~0-v*GK@bZ_a6NZDe$gqQDo&;Y%Dbg8_U#U!;CcRh-;`x_ z+}^FfA_Ql=pp#MuEY2l@~?*n+BCriJ(4(%Xwx+Iqe2 z<1v4o*vLVbL~}{>5tI$f#OCF;ev8j;-}}pN%G^EalyXM&wb#n^A_q{S zN~#WDQNjR``&GG1pLC|3oKuSE)gkhRS`Y@gMt1eGb`Ck;n1KbokGayq(9mo4tw!9$=I6lglN(CwQt~P{%rfp2by!8YxLd zvcLAevFJ2`M(NAFP`n5Sy>7kt>c}GdRlq-&m2C)wNOQ2d(&fd;d8fGxfX)E-T!hNc zOR>#e`%vB(ySobD6Fr2#Hz5T%>3Nxl2ZCAH4y{G}aK;zg-@fk$29_6gsM;}qaC+nZ zX?g4QW<&H(=wFD>H6H{zGGl6>XZvG%;)SOmr3&wzlE!gWa>v#OzEaFxlc&>0GEtT_ z5G9Az2}uEL2IHS!MN}0e&?!3~K8_1pj1IqbY{{k0YP(Ep5Hqz9V*G*g4PjIpXW$mVs+8mVI7!A4=pIgB4ZtkncF+toO>N zm0Ww8{Zz=DwT1?XciYhnCDtlrAw^Y^l{?oyo(#N{4j6w===yh}cgBPjZo~lCj!23bVosY#>z9xAhMpJ;7&|YU9hSQRPl6eM zp9emcn>(@NBx@!nK|cP`@2#-CsO;v{m(-pNwT^hp4D4J)kdCJF(`N}^DQCt>-?FIv z{HAgap0^7=rh1HWSFVjC(#|8KZ&cK~Z1&%v-o|+T*`PoTqAa>@m7InQ(_a@D0|^Kf z=g^<5XDwm0+vH@ud}r}_q>siJJExlE(a=rd$+7f1Ntq4<=dTJX4=SXd#}9}qpnZ*#A^VZ+7> z|JRDw_YIP3TEwL)o76R=%?u=hU6c7xV#KCPcg4;7H@qa}fr)XikXKT?U5{h0RBF9p zcXweVaR_HM{MJOLh}3)gse5OXDcNF;cP)R6G<@|v_(ty6(lsb;T1mv=?eX_2!*=J3 zo*sM7`bpQ7If$*T(@})Unox8XacAmcgMiE){kcw_W4-EB3?gy4YX-Kjt@5Ovn+uQ3qZQ?sXL}?_Lz+@jfs?Jzh5-fMTp}rWc z{4n>K%{()(Q4f~Za(iK7TdFj`qhi~;1wawRciMf2a&o|+kGv50GjhBkD!UpHsDU7C zXd@CIZ{k~WcKov%2Bu6(sl8~^=;RF&&0;xvqLr3imJiAATJCS2+=hs_Y;2oOfcms+ zI6K4SjoF@2MVKDW=$vaAW5-t(5#>kZZT$ryK6l}OJbq}zrzwX3%J+wZGk zD^(5D(h!~MJtx}Doo3q2RNzVKV-)-Zl7>DTWksa4{Vm>V%581aNic!^NX!0CO`aE! z5@OqQH3CZA=`{t?@X^!Y1gW%kehG%hyw5&vhW0uGAumT@%~NH*9#MbEZNch>FX^;-@mNy zM^$TgM9KNa4op~R7(ZV5&Tj6YZ7P$lbSy$A&El7){Pir(0AzFqd=oFH2j3fCkfYqx)FXjvg@}d_cKvE zC<9j4T6nu8+%)+u8W;ji1m6E`*$3C<4cUUy!k_9$m^$pHLx$B^hbl~2Tpe(~9LIVI zGl*y{To{gvP@OR**Ge}8D{+487!W1>w|mF-gQ#A{$k|Q4VJ_7>(V6x&%mRLC5Tl~ z6W1cq-CEQ9%gsGt+Y0jUY^tyE((z~G{>Lz;l`Hr;b5HZw>Mrk< z7jwDMnVaizos4t%++T!4^|K{YrAk+!*^qf};QW|u9eYk$^1m(?SXWjrhw<%}6$bkNPllpz?$l|EhHYcL39IN|y#4=2hjasZ@;=4l+|3;D zt6#F}o7IBB_x^brF%pXkC<^J`2t7|dX!!kb|1O*5@%x&PLWzIi{eO$e!w-Jm*sb+0 zVV}1#w4HBDba@sX5j%(w`c=d|H&_Wo&2n8oTDXOnf&xbBa)6>)5@s1<8vL=T`0-CO^9W@rBAK(U*$4jP`xVs<(<*hrfP9%KoKvMO{-E{8Ebk n?CaHn``wQlb9v-8Z#AN=P z7Q>Qy^bqCRo~^G;=B~dyEUI^PZp&N8OGl4{NAA|YC@Ly__WTp)3kG}xVh1G5d`>-% z+g5zrNz`*lzV+kh_=s)NC*dvF)2lEG#sLIP>SEwTCHVuJP>W6z(AM7b@vJK3FhSbhv+=0WRhxjv!?@3FQ^u7% zdcTinFMSz1^)}&?W`~6F10Fr*$AGD$8h?E2BEMDfDT>bUPkm7P)j0LxY{TA;NYjU3 zl-u6(h#r@TKO)5Qe22T+^n?DM^LvnQX$IP#1YFkkJ$kTB=3v`B$I~hb9?R$3&B~27 zjX#v)e_k_dn)d12qg|P_Lv~|F=AUaO5WJ}YRor!H%X;wc(^WB}Cx?5*3Iwa(6rVR} z`204=$>P-X^$pV^J;^;PKkakSfA*}N$H+dfOj*5{eg4+oddsF?PKKsuP!xpf&(}X+ zJ-5%c`!dP0FGME!TYuPX|owF;@9}F)J)oA|U3Jxfv3*XHF$e zU9XsVY92*UxT`QbEATmDj|cf0v0JJ*%lYb*MA`BN2I|*6vlH-*PdJ=>Ud8@*clWNF zLz$Ik&nq?No_$D{F5GxF|6y|J#P&~zL{IDOYUR_=+j{;Gk3hJb7i@P?u!sfYs`QyuTUa@t8deZMx z?Nz)=^wz~y_dPa|FTD0z>%KJ7gh$XwZ{Js5#g8I!;oFL~2+v^~4m{!oM0fqrJ7>fr zxJ61pPttaC_YDEd@ZC4It36TtxPO7Kx&h-OAhShr^2m8Xr6+kG`Ny`dhs*4GB6WLj z4S$9x-*4HI1`(x)cw6^;dnR+}*z}G~{ZBWJyyZEnfB&Vx>kChN+`sNR8I}9V;fBtd z%EPUif)3AuKULpAp5v?mQ}o!M*htwJqmzd(B*|qO9W{?Sci78F{sHeRIptTT zdu5(IbGq$$dhy+%YrCGs{%StelHYjKnd$ug4Ec=uY4Dv>hx#dryx6klDyQM0N1pow zc9iqk#f-=5#MeHqeR`m&uI0DS!w(u=+oU3m>ocd4{t$kbPrdh{eRmw=@#aft@%UY^ zd~oBmHa|8(xdGjXXu>w2T15UJcenEKKH-&p{L)BN?72WV_o}{$;hgJ3HJvwg;h0EFDiri=Vi#c-6j!FQ z-_0Joe8Ix7-|h|0CU^Ktknw7Xq<%X}LEqzzWh_D8E3P~-Ad@9H`f2uzu=*qe==o}K^gb>(7CUt z&pR)D_|$Ow%-Man;#T9F^c@-x>8JcwYV>Y2lk>~in{hbfSVmOy5$7!5T;Hd@QOiX! z;lJ$PTzs?rO-Po3MOIGz!@T?1^ZY7)CFe_2T@_usJ5)P|XSdA?&5E4*D(NUWEa@+) zuIQU|t$kNJElJ0+DDh2VeQq=5y{ae8Q?Ycydt6IgtKUECp6hsruJm)E_G84XhAdK& zx3s(6-O)R2_oQF>O*W~n{#`6r^4;Y76)lT(weQ1SBC6lYP0B6CFJtv(Y>e?Y;{s!ZMcTeD`<9yN=l%>| zhu^_{j5nzzcJZTP707X~F_kfq@5oeV-6*76_lGSaaw739N+Ol=jVi;dhNhLtpQKvy zx~Rn!#p>N5xOI!9qY7#MFBxquU50=B#>f%4RQg#`0J2HB2RDElMRg*Z@guO=V>3Nl zJ^gy0j~0`C^~^(_H;j6+KI)bqOl>c86XCDsQx?=kT-*_}Ge4>0;RJ!e#n&^_bduP=&ls5HNq)hf^$E&>lVUlxw-z@7v#RJq*O^`&5 zY#l%T1AJ~!u1uR@^WJb|wp2De`J?%g`J6(&WSxn~RZ0?ed4vwsU5Et5-;{`>m$hcmugU6+qBlbVUUAo3DX$+ z-PP0g(%sXK;}l3I0WYZk$>S&3q&rCuk^}bamn=NFfNHia_s}TSb_r_PIVJH^VY}7# z!ZG~w(v!}ExJv<-W-ie!TME1t2nvoi+Di_TfkH>G9)10~PIb{E0F$ukxDmE)+fp`4 zz$nZlI-V#|$UDXU7GJ&CCc;GC1W%Xi`iHG=Cktt*{ZV_aHZd_>b|0v854x~fKU-=?1htS}KYYi!{ z0({x+CHLL}K@pxYR`@`>MS@%l7hls~txZjOCT99dv!424&9Vt`5#B zWfxB2HNoJT(aP%0_w8kbgT(am360upT!;fQq>CuSd{Ngib41c8D z+a`MR_rOh_i3NRjBp=T`9iEr7rpy8Ht=A`bj|p(((-*4coAy1v{7?YRe@JvNLSf5k z1p2<&L3Hfr+ARxnUweS(=LUP7&Pr~hDHS7HnkOh*9;10D^X__jK4cFKS*>+F{s>+V zVcu0nDzWBj??C2{-{8N?32uc?!aV)AcZZCSBMDq+xcO+yLK^=^#asRmS?6l>otOrJ=q=TkEqkj^2sji%B#%XI*6AeErbnV)Fs~ZoMr*bUenR>wQU=u1YpGgkDXz^C5Ml43`im zNRA=2TDLlz5+~d!y}ELwrgEh8Q?8n4;R1q9^?Hw$R=bs|hAz!W&GS0dkXT;u#;a6k zlvMNS&)W9(>~Ez|g!TeC>qV{osm{yY@(<)2vfQU;y-tB6eWo$g5nHj^?;s0ZDwmqy zRW^`wnSke~)Ug+w86;swg* z#yw|bdg!Xf@|ke6&uuC;nIqA{Pm<9 z2gBJ$7VUf5c32H$FNDYwS1Y@*l}A7a2EQOx+_Hwz1I34k0BCvGtq2^?N@ZfdK-PN3 zu=EPx&&FB9h#s_RwjaUO)oXrsR_&=Q-7g#Dv`Xp1PgUpO&Xqa0iDOo_4JUNQTNnCT z5YL4nLbuV>9f@%t7c{5A^i^E~U;_tp@@$*&Rj^1raMu;KgIgq?>9Pz__yc8p6YzC< z){BkU{Q#a2a3a@bYJLeGx}*a!<40}cy_(=P3W`0XXbS7HX;Tu=7}+cI%Z8l|DDMHE zSP&_(xz0v8v({_->+VA{X>B~VcVLt=1LX@>Bn|>t&=YdOaQsjk9}-l(ohU$_I%MJj zsF~nqmIJY2+(qWCZhU_J-XYEXu)H*UECn7K-+_(mP0yG`T|2wNiN$u{DZip{^9P(D zr|~xXyf97y8C4D~Z#t)`5r9rk|H#Q%;A=gCNJVP7?_d+Edo5!=baIfK-rfifpz z6DJj(b95}R^#7LfbO9uw1QI3Q?{a>1cAU3jq zRp98gj=y(?C~apfwN9!hy)Tm&ey^Hb7Fq%omk4LgIPL3IDtk*sXlZ`L;~}}QG(Vio z6#xJ)oG3m|aIgW;pv%4?7abV?X=L4m{Z0{G*tJjt4-!%242cDVj`>elC+P*{%ZtwD0RZXLtg-{oO1` zuJT$0V2UIE$lMqR;)v`^ff>Q)QbxKy!VP$(7I=`wUPq0$+JZRZ*sk7or{uwc$1Su$ zQhez+@SzK~Ywp~zBEh?0yd{HP(p~qkBN*DIJE2ir@@o+mjt7kM@x#_SDW{JV#HFp1 zwv6?ig(lIiEr?Ucs^RJpCO(Nlx{q1}yb5*EtGG{OWoi?g@4^X7FTD#_zh{t$94VFO zDwTN36F1cGvYRRuFWF3D*u&wQ#2%JlgdMoq9o!7u28)H4NiB;cxiR=X2JBA!h8#w& zn-y)l>%-~iv>!@wh9cUR*;s;B*cqqwdF$tV@KcD00zS99$|W7zP%gnCB}^}*sR&PWY@7870)JRp`jon0G zjgIVUf>)cP#rX%^kjW9%hKOJhaiT z#7Gi^O;CUz%$q^q3-B-CmymKqg!mi|iyQ$%7J;K62KS5-KMxcz(N@2YHvGE5)U<$Y z7U9QZXE)J8kR?G4F7aNovsz1KJ-&aC2&wbm**@>^qkLA_>!pgh{AG79*PNku0}d^l zH!g=Rr49xR&R!oSoG7|qLVn?@jXAL|IExwGr0FwVoXaYB?8Q1!BvqCP$z(4KJT{bX zWT@wh8e1a`a;OvrZ4huC!1tO4@&NC72m+}k06y*t+~RevMsb%d4o~7nhQ^}3QmbraB1&tG&iKT zMq9P(@cPO+jiyIy0esdv-XhdcS_BjP`~WCg23?*EbyL z;j`;;19U6J3jhwZ735^TD;tQDI9Jnzmc%KrPg^?Stx#=d1|+C~7DRM1UXW*MZkPZ|7PJZ)~Rz1gO55 zs7h3S&;JSTUku~)nmkymcDW0?@v~VOAe(^;OGJIYG>=0BOexP_u|xf(&K(QgEu?$! zts=NA7klujNa=({H*y$t5k}=Pva538-J_%}i!WdbFfqQ@)GC;hRt6U^mZkJM^WP;D zv@GMjI8)88>F`p>8_=S=EM(P`+B^768$LnCzUjcf3p@?qIcS+-jDB{zl|V#grIPel zk;HCL1bc;L`|5Qijy8<+tA#i?b-3aP@06)CFjoP0ScQNEJd6*8T}|ZYZ`(}H!ucih zok&HIzN)8_RQ@$POhBtU#s3KUWgH`$II?>e4hUgY4i)B*bHl(6FE$m1ILRwy1`Y>r z!C7#V7N2tF#EAkD{LwUn^!TBFVI2|D0d;{Rwb^AOL%tqYt2^u(%ViFAmqOpbVDQo) zYnASJk%%e;?9r z`5EhU*zCmxFpS+|{)~v|WVP=mP!Yy%?ga*Q+u)IlG`$~Oy8T>Xt@U8!t}2B@GzAd( zz>7l+8&|f)*1Oa}`XDw7inqqW&R*LmIUw2+cCPA0Kf}_2V6kyHGFb?os_md7@7xO8 zN1%h1>7f$v>McIMXV^BkT+TMw@>6LwT9 zZgR45q*a7796JXX57NUPJcLoX7%DKuNmxVYFbzri1MoDEGzpAXj%?7WIOeijCmXei zFQdX9;BAJ5)8TcpSPe)cSG1#XJf26`a*0 zg9*OtKvFj#9R}a8om20tRefdmdi+87hdGBM`(xv`?R)dA;-Ph{ppcQ<=LTO-=BQ9d zVT6-+%ju+%s*vSJv`1-8AL=iApYGGRq8W}UNuOufiH%no&Q%J-g49_WhR4blCmPiky4q9Lh+Zz6y-(({gx}`eLYm#~aiC_ag#&5I zczrGj&Y7@kqvzH=a)7Wez1`iHkx%Cn+jo1P<@BPo#H8zKo&-}$9Y%T&Z9kF5y8km0 zpDhIDKmnmALDwkDBe>>|&;XPJ`TMhJND-*4t`75jf*x}Psdmq8I|a?}T8vbG<;WJ9 zC+iJEX-tt>!a?C^n)9gt><*y4D}EWVBLg0!J%=-d{lVg@s@Gq#4p{;fiFGiINXlHNdp>+mSIPrKLKQ}IRoCN0g;%58}Sm$@V(C;HvFX4BD#-*Ns(&= zqD;#YFVn+^Q~W-pJO_GqNyo;i{mVIOwonPiqE{MjeYQ3Mm7~^?$16V+Pbu}n>?!Ov zqU!woI&*SJ;J+MjOq>8@^*)Z-1RRvi%f?~7 zK(w$?kTf_wMl?0EN8_$%+XBO?Worb)^ngM+$vC#k+?BiX266>&5wRZbpw7U7YP~y; zz}`~)`~iC|jX92oA9Nn+`3B9CfX~*7RHVZ7TIf>}gXPR(ByaiCJZ1-IuvDQT>=YWQ zkEdlavUz(V9!gM1ligiQtm<-^q7%y%0blN@R3PuYiE>G!k(g1-q+l*Z+AWB5m0i|9@({ixP=W+M(m@!Bb^uFU_0c5F0^Zaq?Bd} z_iU8h151wrH`~SZ8?>hoL(0N%6_kY9NH3=01wdEe+2zs$t=Dp~e|m9au=ML!QFieW zV25g^q*>Y;3u3JTnmGjHhVZ zz~&|#(=fP#iA);7B;a9N+{>}5$BipH!7hL|sI6^ZzZt1JgDk%#+V1+@AnFGHLD1bOTI(Yg(M`!Nu9IRYya&b+O-{2hRc zz2W&yW#M;;A45R$`uj@MAEsg77vMEpxf4uaL9wKeeN}KD8L7%lsFP(IBG0c;=4H`YpQD^auKF-Y9lxhs9xi3S#;&0^RQP(_q zTKw}?LNhsjVpmW}P+BL!S7TxOs+8C9&Zsbfba1P!rqUNEa+u`ll20Wy_ENk3r{BXD zk($)1NX&}I+Im~RetwS#WPhEQ!^p@^K)Ue4U$9-g7NpF8b26U`L@CIxc!KBlJ1vT6 zX$SWxaToszphrux($=U99HV+C z#q;7^AP#xddk{5>Al`su*6h;5A3Jz$5*XRQ4FC=Ipvn6Tbi(GyGJD_kT=A$0W)<+1 zfo6`LxJ;;>KtFGa!&>H2(SxxpWl{L)5F+W^Z6LNBuEO@hr?X_)fuOY?9!_a*x;h4Z zKQ;)zlc!YK-!!jYM@xx#szDFiygw#cLR*4_}HR1>^Q)iFl$2HK0!qSjoBFIzv( zrY69Qd*Aw)cqYTy`PfsCmwlK8kKll6*bJ2Ff%Z!J9j_`%0M64dz@)g@Z%BmHK7Hyk zuw_G}0O{65BJ9(jLMpnuyx3Ro9*6fWc`9~WbxpYt(t$l_Gk(Ws{k53z+h>2ot zua3w3ly1$u87RUuJ^v>q$Um*6p5u`Zy*JYSiru|u!)|kgQ9{tigYB#yia0d`+HjUv zyc7Z&QUgPlAm{VVtda_d2Wt%kgEZ~TfX~wJn4AQJ3F`!XfofVY?X@Di?KHcq z$)*AdPW01tuLu0KTtJ4k0}fsYEr#e^-wfp^>_TAyqER)}44dzC^L|}?O?UP zAd>*hY$r$8t-rc&9q_nYpgj5I+CFmBA{-=3G5bh^JVrP4!TS|U$ljF~ zn3C9ygk`T%jOJ<9FC2eq0&?-XZ43Sv!&y8Gj5BRy@Jl~TWbt!tJ3C@tkDaQ&FBj;A z%wSnC$p(hh)sm?AQ()Oa1U$NC=e50=D@74~)@-t^Ib(!`IS-9bA#*$wX8K$?nIL~N za2ln>y1zW)=-CaT#^K;vpK&5k58VPvo^7kZk6O{-8vb5&!b>m@tXO~$-AJ1x<#x zSy4{IrL$8S&pJV66V3m}n3F8ez&oMqXy^b{gmp6;EXJHp&q=-2RgUhz$jtZuJenCjORkIx8G z2a1-)PxLD1U$`frG^uG#;24s>4M6De3)I4yAd@d7W0%eMn0_niz0I%I{2oSP9-8f3 zBgXEEe|N)BxorGeS*kM=KS(mSJ-3G{<|vYTadXE>M=gn6V-L=PWxod*^Hg<*S{8Bt zm6kIyDKq2kB8)lyTQ~0btm(QjmMYAyDTqlP4ok%pt})LFc}`t;G}qEV*cGHj9!hzj z5IAi^MBL3kre;3C(nxSFJsU{DjRMjk(OVopmi+4LoAq@Vhpr%VWQhAgo&SYF~JP{W$}q_TCbo#SXi-a>lo&&?z@6 za;WIjgUp?dVQfLLRK|<{L3=OB5uSqw05wiQtK~{Kv%=N>J81Ll1-L%)84Z5|4zep) z&K{Gb5)F645NeLTnQI2(mys50K)?HjopgAXLkk)+03ztl%vRryUQmQ)lb!t`;FXPp z)r%$INC{qO5g3oKJ($%(otE^*mq3@;z|O1CqpKtR5APh;xmFC|EG*!B1niiM^$4l$ zr%nZpOsH8t8kxB&4Ub9v_RFWgZf)V!YDuVrM&Lk7#mYGvnkYoYg%lhu@e~SLv&(H* zuE-{-if}X5+m(uUMF@ece)E8nn|Rd%(p7qT?}&F^KD>+8eSU%tl*0Qw3D##( z8E#>#orPbl{itrx&krGQfE{kuNU!}2tUaCt`+k?9vw0BG!M1;@ftJ|Fjh!WBX6vY|YFIM{ zE}lROXI8?~Vf-kX!+`Mw4Q659LQ!EtOhD_7|Nbi2-RSU}O;mdXBYPBG4R^1jouR*~+%;eu|p0!t{v_hvu5D7FA#KPCX zh9!SozPdT=AANTk8ArxNbd$g@B*RZeN&}>Yr&S#oIJPQ7ePtSQ^M%_f#pfVtFH@nK zU8VOYRchh;3)u2sBeAjZ_fN%<$=jSHR>4967|uTlP<9hTVQs z)dI|+&ovWw!r0tVS-vjpwY14g>nT|}vyMI147?ZsMvay0zMqj%->tp}9jJcNlSe

    )byT_@MU9b`yB-LBm9g3}ABf}h=yk+n?x7O&W!o7?(9%2BS!i~-`x$ttG2wK1 z%O%P2QMYH*rxLaM9@>j_y^c;{Y;}inAaL|yY{L8e@E_@wCw5oHqnDGMA!}W8md4;2 zk9byX#jd6Jk}k05TX%tMR)Nmi<{KpIY*`wu^=>$(i6HPvf_cwQUSN5tY?{H!W3umTb+|%d_0399Wbk1 zIpmD$(lt$fQg}oo+m3#-u_&uEu}y6Apv>n&+xoly&-Ti{x3UI##6BAePL-z^TZ@pq<={rOHdd3^9J@*@N~aG^NujKA49w%+{$#KBd_HguSbnK0jj z!(%isA=`7S?|}O8#?eLNyy5Ow+VDB-JcscO7CI?xk=@jEbR{T5-eL)TO5MBLC9`A2 z9K7xz*%3lo~FMW3nXi7G)LtSw8?UoP=! zSpv$PD<{Es$~s1VZFzv&(ay6ICwfu8Vb7|m5VsOr&XE4-w8UzbL4ghCL)>7Zkrurr zQ&exo_Qqn4bf4|J1BZ#chY_;K#GdWfA>LajomFfx?fd0nm-BfpzH~NE42N^DXurR2 z#FBVmD*?Yd;7b#==f0(_RoMmTjCWq5wD;0kF1m5bN8%QbZU!lIIZ!y~9~phh&ED0e zMQI7jHlSq<$~wq%PQ6*_?Fx`xL#*Ftfhg8KUSl4}!C+f43cHiznW-=B za-1m6S|Ti!H>AElFjcWa4vdLBg=0ikV{4YBQ;(SZqfy}k`O=pU^eeJ3k^2p&jHI$s7rn$2?kYuIIjSpzl_Cz;&YM#EZB^vDjK?PmlC z5Y8|!vQ$3N4n@3q86%PX)>JENhK6rc%}(+_>~gK>>=bHdJa4;gOi#HswyECtXpm0| zhnrfwR25QJhdsHqwRZ6O&c9hJkw|NLCH5&wJ9={rB3OaSJ6SuTxZ2gF6z>yn3A0(B$ zlH@NrcN~&)cYx|M%$ii^i5Oh$y!H2kd-)N95se84 zb+)N=a+f*L)(tRwtryxA?c^tZ~X;$US zeQeGmhTq~hzN-FSp^&jnn1kl#;Tr<4!>3V#$wFKY%dCfsw%c7)jb4xme#L%FcvHa; zX{6vSFP`ulE=^8gj;In_Iu$japzS^uLG6%evKj%D>97i z0L0u`%M4u#UJDKNvmH9nJ;TYQ>4V;|jCEB_YDyaw(-~a91KsO954Gk>k$o3i^V{o;sazx5ij zTi4?PZGoN)m81)UFt2~;V3-HyKOtugK9$(dzV;hP+L$v)j$SzLIvM(vz~qo93LJUU zmJqtJ_g|j5ki6UQhA|CfZMsy`enr!oVH zCaI@AIkdc)M)kEM{R(E4lx=FX73#w^=nBJ0HJFnOUvbt1*x&oi1!6wk^fMpD=aYa~ zxWRM-W7Fcg2|975^apFLmR4tK&fRF1?@wTvJT~3QRCa|NQb(=~`<#gW&d7_0@21$G ze%!fkQtNP#Nk4~!Z`94cEUpI`<1h!W=2IBja2~)0Ao6g%EolwBK#V@pwE8*g=Ec=5 zf8*D^0F&AqZtOFJSNx{9fOQ?FoYd|5=+70=WDX%x_8e|?IZ|*Etc6U@}9-!^pNgT3fqkjyXxYY*?mt?@c^D<>d$G zIQwwTinyC=RcOY(UJvp7(zdG%l#w0u>Q);lV$2qJDlE83q}w{1+*MfP8{&XaT&>W~( zz=mA@O#bW@$Oc<$br(TTx~^2u{3eRhO)Wu+2Jp~FUOEzz^NAu)ucI9HYU_A$dZg6h zZ7+sor+&{cj5zl$GHd&X3GpU@u|`(Sb-8@vgtKg6+JNl`#%Q9M@NezDQpY$LjMl!C zJCuFSSXhsB)OtA6G_&!RjqPAf%hlojk1^k+ClLIQzY%ER5T28fDA+6uCnCCnbrl1p zR#dqPM7%JfFcPl%4sga;z-|3LAs9|>%~-zfs|jOmt@3^&U0k-$7RO{D<=OOk;sb^; z_%25Xus0D0{)R_?@3OObV17L@$jqa>2OOxwI zOiETc%ylR085&VC>_3#Yes23(@YAv%T#AV!$B`wNbbMQqmFz-UL%D1(Zl7y^Phqvn zGeM7MLQi?`s(Rig_*(R{*I_V9(*3+R=Y=)mrJ8e`m!C&^U%a5D?pZ@2=H_Ba1q19R zqokKTdNIot90J#vF<`(Va9RH)#!f%@NOi)b=RG-@onw`wsxuE1fnw`E=R6~E7JT8BzDqV!p{R7>+2%OQb-CDmnxc}fx z%Z2f>5k_#R_TkploY-f;8eQ<88UKx#^^bc6WTV*(UbD4Q8ggfNAYz_K?33KZ7YqU< zc{ed3>`z6HFUi{CApiY5AI+M67G;u5Pd8_}!hVo>pkRLT(QL$hB}}g^;goW-AMssC z?yFh(v<=+XdzpQoTx&YRYA5JGH}+iWoEqH$FHWH$06$xng79t}fU=4FT7nc0HrPD( z)su>5(?KtM7ymF)@<3}MfO@Lo+| ziq_~C;r`RN)dqRY$`OUNJ4j|wM!_~UpN+S#^CrGFa4%Z_Z0-)L+Clk>wAQ+=leLK) z!{qm{JRP>%XR3A(X!&BR1tM2Y3SAjDOsN$tbVeA~8>5QX> zWZu)!ts|HX<(EU&AnN^fGV>Va1CE#C`z~d1t9AkGrLyznS|Q8zxy9c~+sZ(@3)>qQ zT_bS&u=Mc7s~mxW_au{{J<*ponyBdR1dWi&4x_9>QJBve4G!ZJ?qH58hfTbA_#)Z$ z;=;Q{VbceO5tp@acU(HzPlf_z+<-z=yU@hfUD=-VF0nA0RYhdQXNAUmkNn`S+m;?D zBublBh7w?&mBCc)GBKZDu?q;BoIxHK!93`;mn}&3Va58*HeL{Sc<6km&NEiY^}Ng1 z1L6+~IG9`r(o_UuiF@4y&hp2F1Onf+1G#@}HX0VF1GXd z%#^skb()Hir|@i#jN&`b*wB+1vRoUm4)N&Fk*5$1CJgXa4YTn6gIUK)B zX_2H?#rRom_T~V~%^CCVH~w27NRT_*lT^zf)XmA_=BHma>Wp(Vhx9_F+b!9_c^E5l zdw#d(H_lFqkK&gs^l@d>&l$fttiJv(_ghYqf%Qj`yoz-|Y$~;;x>3@i-&vekxlXSW zMCF?h)~neeXeSr_xJYVDO|lYBP^t=33z6An_BaBAveW7c*x=l4EF6D>MQGI`;IAa7 z9ONN7w0)d^$OstW(}5lmMDyYsjyKY zIqvCBrihE<{Jod^8K+OzrksT>_Noj2&NUTyVYYh6+K)hp#&r_%6jGFuplR z8nU$VUsY$|)6l+4_kdIKvxwO9JR+(UOvT>`q>@YJH4$C4(f4(`4lH>j;M=%a{QlD} z_{X;G)q3r>I_s%fCJRUeHLe~cabnf+Q`7%Xh2bTyBtdw-EF6->g+7{o1eMS5$kBw8 zol({Qi2q>*r^r(uP70PIJ|l7|B<>y~?%pF)y(?&^a?e@kxmXUyXO(~k3DRrQlYK&V z*0{p`cI#Lh`3Lz^89cVODJ6w>nkn~EBiUo zL1T#6Rfk#cAbRpc^?-9+)bpFQnITW_M{rwfjFj_KxXM#sz82Wzn4qnGBG(Rc)NlHP zesq^Op=vnoDnq^hQGHS%#^?_%nan}IlMkR$`HtI;l6_zV@@|%#%x+o9(ZfHGH(FM8 znqI!2Vyb{@CS|^rc~0H_nZfPyZ}mq5swLvhG%L(iS~>ZM4G8~WLc9!SC_%-DM!{k7H;=BGVS z!c%u?$>N09Ql1lzor~l&G*9DdMzo|%#~b{;WOX)8tM5aK5WbhM)-c5?hkf;aZwFjM zujjSsa=f?u8c8x#of|bTmM=6bRx$BXIO>fM_0hj%lnD~+@&=ySCy~md{`AKT@?qN9 zt9g2##TD;8V%z#tju1-n6Ea^FuX<9U**=7_4VyJj zIWXhc{PR;JF^1@R`BPM6cHM5E3Z8c~ZZPTSA;AaoHfNpBYn6)8iJLS$XK>apQ?Uvq zHD}>tF-%I+N}Omoj}~fUW{ccOk%~ZX$bSuaFj>z$T(R4Amo>jZI98@yY;4g+aLnF! zbLPRIxJtS5eoZF+Q~p!&(%p$&5o=Mm^U)k?6;f(;_`stytgk~Glx?RPXmT8CyBzXr zA%+T-{Vc*sEo36Iw&d@u!Dg!LYW1O7E5%)K+V{N~S?_t2`V@xgMw12|S9O2d&OsGz zSqjEZ$W*h)u2!y^?}r^V64=E)Nb2HC;19PFB&~&Wz0|WukSXWkWDR?$(a@+A&tY>h zQbPGGD8kVkLQlw`)P`-rX{Q7;jy`+CcUw7$Av&a0IwrUq0NIxK;#osZj@l@Gf0BZK zX#r5~pmXG_rs%V&%ksf)KxVa+7M(N23U+=84v7eJJX9KQFy%*mW1^mRM$PJ`;p{+Q zfJ1ZT#IBosfKYTBRoSuV)6tyqYc`@q71s6ZsE2;|(_&gKo?>O|-Hp&HvEngn^c{*_ zU-!C9{>`o3(F2S{Gq-q>Ja@z1a@Rq$UDZr0EPxhhJRny4wXN zyawlddDN{p70S4*X+3sct@|7&yHMLK-qXiBZpLk{Z|WcwXpe{y#Ty;WAO}>~H5r+Y+<9i?|uFLfa9K^^H z{XeT;yucNgATk^s2MTzmC%~sbU6_{m`ad9Df80v{F+QDAxUjmsANf#JP#B43`RfwO zl3%2tHa~?oJrww-whzhab5Lc5bVH-B0ej%MCUUU`(h$}kh_@QI!{Pw1*u}^4LGM8H zpX#|I|B%h~9#AV&7bgE-{<=!C>6VrkC6bw6W;<9**ng~foTZ^F@*eS%_CJ=9En)9C zYj;hiJL1iNc4Uh5J-I;}CfNrZ%znrJBeRzf@d7RP7R7kJ4v*W?4xU$^nIMln7uQJ|~6C-(r^Yb1<7=1iyeV4byQ zdCRdsoTFZ{2e%{2mo?7C`b+)%R3u0(jhnp@A?_7#js0L*{7GiY@U+E0UivToSOg1z z{nB1U*TY09TlP1<;p~2VF~xtTNy%&|9w2I20+!blq+d5~E`h8)FNm|fHU|KYj^15RdG^w{>d3&?g!~O016T&eFLTgq`=D$m@e?LxN705crHo4e+ej{p) zktjV;`TR(0`BMR!KRoX=rjJ=SnUMD`KYD`lA0OpbfCMO+xWeI{qnT>X{-2-z_mzKs z*i)x6&*%QLm*q+qlK0*k&Y16Z21mU9{B!@~^ZECUd%ci@J8fJbw;+c6*yP|BfrUPx zk$D^}MDs^y|BvhJ7mxhsB_%+Z&#C7(6MndJFH6+ji2_#VVGGdgdwk^k-@h0m{S%`y zwBy-ZzP}N%9QLxLcA|oQw;-i7`+sje|I>c(@B6%91~+Ce)9i)(OA2yhNY7?gQWh@# z*KOqAzxIpkdU?~qT^V7+p1~)EGMn#2B@`9%`D!Qsr}g)Xi~NlBv>Wi~=L*yA!q)i$ z{m!`Tg_B6ff48##uquA}<*Dm%kz?n=kS8)Je}Ssb8a4wv(c{hk4B-F!hm&ALK3JH; znO+E5mXls^=-$=)0*u7F3DJT7+Y|ZaeW?l}L*0ZunLJp6M-(sI^cw~q=1tJ#I5LFJ z{{5G~^S_1b)`cfV{IqA755cm0as#;>90-qY6dw88SMrsIC-xecmEN#6gCx>VR04jV zrA)}q6%^S z6jgmfbk(un3bw@qrLQji{iuq8{`!^ErN|SB2$8sS0+Gky0xTu_SgZi`-@Gq~%R{iS zN4}2Ey!aDVa(DdaI+pLf#PXLq>`$NQSC6rRRH7hWSokG``-M|-7fAi|i7G>9Ws(2= z-y-q9ud5mY)5)`U%epVaIu=18w**4C-*f3_{_a`^UJ3&&!DGhuEO04{Q?Mmita0c4 z-5UyFhG!HTMvuVSy^fLO>XvnaSShW!J9zddkq_C@pnJ#Wygb;o8)P@m|!JlqrDWVo%jbW z-)8&xluG`eR?dH7C-XZpwBDlo?KcvM3rR#*ZTlBNfm2LHPUpV^=>HjPs&&Ys{z@aY z4$r?;I@zH5Q93AI7eA;l9*_gF;p#Ae-d95N?`SRf|gBF4D6f4U-*U{ZuUUkYE=1AtLY`p zgD7xEkB?=ql{C9KvY7ht`{*Iy0ETWs9&cF(GX7VD7mIK1tEj(Y^Z@^ZX}}Z|vRgvi zdv3VWSvhYHuH69;`TN1ccf*ww0is~X!i0O;y0cr+SE>4v_2K?oQ+9>G3D>h`;g=jc z4ji^ryL{jL@UJ4{vJ9MJTnr4qUUPR9zP|Q5e-&K36onSvgezaw(s#V2emgq|=~W)1 z7P!LPrm1UnZv@mZ-V{rYsO|uYKx?qU6C|0_LHXQwXh^=7Z~GEqcpXU1F^C$*S+@c6 z47&af^a@mw9nk5!6KnyVEuG%_{vL-C zlG;Bo)!wxQh4>e*^UVcS(MB<+U%x2{NGO$`N1Go=y%F5W!@AWP`VJLpWVH^j)_E0>v}qkTo_%Nf22}RASZ=d{ z7mA5OCA+Ke3$QtlToj>yWX^ zjA}F*?2&=eyF6Yk)@ zLj0-c>otZ&*scEcS^xEA@>S&6qj=;m^sBw!OI|7&I*D~NUy(pv1=4XWj0xCFi9r>c z3uKue6?g((h}+O}&#@rUVhRXQ{D5uY2MMVxL_i&0jAQPsBMjGhvTx|k3b>aV6oU18 z9Ka_B*8x2q2f9)p*+4vm8sulv!F`NRbTR5tHoU842eE&OB)2C6)8 z@|psy;0koWxIpKG(`grQ{vBW!!$wtR55Y1GyF)>|NtzI=Mg|Oo-+#{4>9NGlrSmCm zK|AJ9TQ0B=)1iDe1qm*sY2{SAtz2JNu9^fB_3kBW(4+oh^z`_6Q5*jh1RUB$G8msp;1@jn=4;-G`b4-Kz9*OGfO5Jt-$JZm9-ZVY$bF@ZzH1bD2 zHFw>#EvJdKZX>&m>wth}d#tZQc^xwx8wGgduRFd2STr&0JIQOUF97g zjMh}sJYL#X?6hi!v3nqf*mt_1+zC(l*^w8-uHms!zim9L+rO0pJ90X}j>k;6N0c2s z1oLym$>5)O<1LH)^>GPlj{`7{>yel|;O#Xo*%Q<%u+fSM0-rkd>7h%@;ayg{CRQ0C^}UcX_|&ZfNfX}p2Aq>snyHVz=!3@dDEOfBz^{AKn0cJNlb#F`;>4j<`l zWEAq(15fqfLBMzvhF+1c=V$~)vYSUBG1$~+wYX`3%7UjJ6$)6_Q4jg%4`HX4z>OHcePLXC1Qh8GSk>*ImVrJ4ZT!b)05&}eymT@Wv#0I;WGK&)d^kNk z>@@L-!p9+5)bK*e7?6w4Ei1oQ{}>OL zhIRT*-t*7^cd&GK?$?l;kM&cruWj z7jrAG0LbZ69{+=k36DW>=&1v03_A1Vlc;=Z$Ioh-mowK8b&mv)@*iW#9cB2e?IyAdW6OZnF!TMu(P1cWlZ{OR zG?Is1fx(D7wFbj`vXV{62DxC1xVzwZ`I!m*0XJ+&f%+axsHp&hh&!O@)Um435w%Tu zM5Qkfl>GwqD90@Nt%7=xjuz%YXISPO@Ao zUmj@QlmBfWdX|F^H0@mkczzmjYl`H$!JcjYG09^D(K2}{C*EO*m?#HMj{>24>w_E~ zAQ(x=ywqey``$j-8?}R++XLxFxx-9+C&YJcgbfh#h|&2zQEvH#QfqN@uBp_0Im3&D(=ly-|9EV^qqFR4rAkcR7S<@k`$=9OrB9(`C8r3 zb}D(a23;=)q!V_?tz;LE{cA#y9`_r4Ov-| zonfFYVG^dzpI}I@;eUeBB>+vzQu|$YZ3oah!$O+R+Cb|6p$dy>*|1Z*RW0?&RLS}Q z7&pF&d8g@0fcF`TaG$(n+2&m;w3YzITv*?ed1taa(?%ishPSd|F1|bU`}R&fKFj5D zKDEv*vkH|aUl+LselYoqXVk-$Pei=JJY)!40CcpxDa-YgB{BDHZT>!unVjeD3plOu z{--B|T0~&*FK4+cavP@|QLq9C#$(I!15xj)Ks}`p7p|C; z=O-ncGi-bzEAKLs=!&Y_3-HqBH=iN!O;%ODKhf3n!ZC6Ya#Xo;I`JeIFUBt&qS+$d z|DB+~mU`LppuoaeCqlQ-FL&5!B8DGGp8xg5WjZ{h(zF-J-`U9SB2by%KiEYCk>06L zM9>zlumugLhouSR|!!G`=NFW_KX+xx&l0X=Lbtic?yG3i9W+z*tOotYAZq#0fCLy3xkb zTHYAM;>_xj823oak?P6wtXv6VT+2kACQOI#tm}i|S37L3NvGI5xIl31xX-)suX?h& zB^G26`1~R8rP@z-RKq~PsNa_3*Vse^{pA*zqm3$zTE}iBrFw#Z+tZ#FrD90({JT=i zt~OKKD~_+uus(HS4fmRZex5qq;vR^Vp$<3Q3GdAJ2vvA{E+D6=Vq1z%t*{~7 z!_cRVGg#~dj;^;9^B$5GVx5Ma+0v)T< ziGyXVidKE=ukG<7XlB;pyvN;$2mI|a>&2pI7oEO6POd!rBCFAwLcz6#X=39Kau89ca=qq$CMzgz16c>S6GgRz+gmqfA% zahGp?{#;d8ZSy(yEys_5%vSht`YrOvDanIFq8l!CRs&Q1OxDrmwLDyQN6w4yGl3?K zWjyRMoc{SI1LfBS$Na?x80OCL{bG?oqX~+a#G3T9U(^*6$%-T`u|*&=dxSpGM)PI5 znzufxF5#nmQk2$tf8~jB0Ex=EwQiRhIRj~Dk*VAL^jE|qCyx3o8H*t%+wPze8M*+` zh15()!fFokaK4PU%#Ed4wpuA=oCS`d zdr{^ELAK$tS9U`+WZep(+-3UXUxjHE?z@3Z;P1YONf2rb_qxyhravVqdMgp6UP|}T zdxEcvHWWS%0&5}9^bDGDXC^vRW5$7wNy%THkq$SUhctIrS zgZ=2pIx`)|iz8I%ET!0CqVPFKD}3aoXvisNur{$=f|uIAl^;y}ccb7X_eO-&B9^*gDSeB*r%SY*~ao=BOac z&QPQWl=M1>7suZF4O#+8fO=!*hO=8u)EHi2F;cttB+Ak;2ywZQjf&|_B6lE!x_t39 zIBUCkF-;Wogc>m_C0Zw{lpAu%DeLKOn2Ylm!gP&Gw!4aj>8l2&qBR%z3r(SVUhg+Q z0okT+4|}>blZlWCjrOt#h+ZR^yr4? z_YYliCd2k!!*xRrH_307Ep5A+&vlQY@7z;F;8oqQbPE#f@#+b&lWFMdZp#9s}%?|c5hu%xS zXsJq&F%<0QJKZW+;rYW5^A8zy^4d?Z@#P_BiDPI|Ru};t;bZ(g+=%m*?DNT^*$hM* zjo2k^f8xWSamJVTep1U&+EY?DR^ypZy`BDgBX)pkEbpiwM!w+e+0+kQn&TX``!z=k zgvF8+P$fXkZ;g`p0nv9%`Qquh`QDtTRuxv*e!#>9l-%0EIxSiZv+CDCy8+vq5RGn~ zySb4@*B2RI*pR{j?&@I-t@d+31>w=PDv2B{mYg(7C1O2F&cSn1F_bR`}_|cA3#%OB^2MbwHu~Iux z(7G7iP6^((iT?|g9O5L?1la)#Ph?d5atDO=&0MdqPb#8n20-hyEo&@NBX#d*9v|h> z#N=m*;m@CSOhYe#sc1>)IJ5irVRFMc`{kO;xcpQ--}MjnWxXAtQSA&tcgy$=nXXqj?oDEK!vr7`T;2BHu+dKlc&p&N^u~J7a354l zOZV75EkIjpS6?tcAeEDTlelM0l8WcP(XQ<)3q{RIv8elM-YrN1(X!Y_-m`5Ll~XVz z#f|ds*a6yU{5l|n6b5DzUMK{3c7I^`@NBt-5@QEIBX0)(1n%R8Us*Q$GPzg}%)gNf z_ZGZJMMI`*StuQnE7U9~$p4^^t!1VwNPc0svW%vO5=Z#aEbi?#76}k$ z-*9g$!mD(05oOW!&qhhLDMAKdFXy4&y$w97QbJgY z-KoJIG`W6S47;LBvrejhqRpc z2dpda+J-KdUOt`KP|mUD>f#^SsJ;WzMdWyitePH9+S5)4I&Z{!P>i~ljf|;duU82HHya_??hW%s-bgVJwvcy(0kZNS$@ z3yeKmeO4yyFdOpyHmWjU9vdZ4UHrojuSHP`_KwvgTzZD1#?GL{J2qT6_y|?kL1D)$ z2FeyeLuS2%0VcYFX@QQid#7!!U`l3f^GM&%cC4!zlmnDnFJ&0)?S9h?BWXDSaI69v@nfNKm+X`-rtk2x;gAj$!~2rb82(^2_Z~w0d;jq?fl+oN@G)D z324GQ6+G&)k@fTxFm)D!dFEr_zWRMObrk*p_!Q;zxD@gt_hf9-&Jd0&Ld;a6}8ed&T_D{3ZEf|UbyLb9^97q=K|sDzy?LuR1O$OpP_!F$YVDHBMAhvnn5 zC?4s!B5TE z1YV3By7Ln6mG?oHe;>Qfd!E;TYzfQ3zCUKKwPqFMK6;#2&(JWFawOW~F(?R|?*3Sj zd-)ja@}09`FXjYpd{c__ipjWQ+l4>Li2XASjSQFAH56kpdz{Jp ziw+s!v54tE0)+XUsK%0Kr@JP`*IK_f$PRyue*!{IMZQ%kVk)50EkmdFf_QKe#&MLL ztBNyx5t!=i;g&ix%q%a+Pp7dcDq|3REefD(_0Z^-lrN|A@z<5vgBr_RTJ z@_Mak%eE&bKVwbBa+Nc_Mj{_JH1oD&!DZ#PI-hp5Pkga!tbq=+Nh9{ln){{kZ=f_$`8Jx-f&*@V%WP3A89TZYkV{7FoaHoJ&!5%h&!?iQN7F1%1?UBTe_t?qE>*7wjD zbE=-|-P2m+L0O2FFP5ZDnqN6L-a=3KT197Qr=!5q)C+)Q#)SoPIa@W9aL2C|EfGvR z_}bGST^pg`j`|&6k-NL0TQpe2n);}{Hu=M_^ZssoVz$1kb;YCgN6tpkP3T|5fA+QH zAFS_hr4ASW;=9m(gu(MH_tf@68Ik@AV8+EE;)hjG#y7dw=wke#Q?ENPa5;$<0Yc76 z1=X-V9Lo5-clA9S4~WCAO;o>?Ki195*s_V*siESSsd~h`V;wPh4AO=WlHK1KC6tcO z6I26WeXTHCcnQFSkYS?z>9}sRy6y@g4=+kk|w$bir+C2W(XB&z=?{iBDi*I zn#vx!n1^u%Irg9tsf19!F(ouk{dL9G!voY#{l+pcbLZ$Wo_iB+GanIX?-C39{=Vyb zyh0QkiivEkwBa-VutRvoe0I))9tRg5l$p)ysV6UcfWB1>6fa+jVcB+bRRB}J1EiDV zG#-{zg8;D%=b=m`5CC`^E9QXP>`;F=n+`q{x^+p`x3e-HB4D0EpFzpxA)AWIsPn^RqPlRCw1$ED&O0i0ld zaz*+PdUMOWr}(g0?Cauf?anr3n|v08X*~@j%OD4EFUbU^Y%<;ipusc5JmBTv@jFIw^^H33Hq+lrBh(!HN4c97atl^ zUX@C}NpKqMRjKE`uUgqqjg?WqEymCAeb{y8*>sNz`4^MTFJ|2Kjbw|fu18vb0VqnA zKybSsU#9&MnUI`vsvklsNKc<}vRmcEGxRg8!1(ivLl&)5GE`P~iM=|jk#VE)Ts!OO zjJ6!_R39y7m6(2tvkM*&)-oI^wscHmh*8Qg`kWy&lx%g{MD$Ojqp1~Ia`(#t7#&EE zbE-hmg1GsHGa$a>k-l`F+W1=_w8ZI&b0=lLu2FpqBcei`o2bcMv9KqqNpGSr-qpy| z2VMM#;od4&du|X<&$QG@+nON&tu2TsJ1Zd)5m`;%?cg63(e7%N*I7q~#82m2-8HN$ z)n0GkWwAyKEK*;PbdP|Xwvj&T(jz!*jG68dCg-fa$ z0OZAZQe}a<*~UO*4!5M@Sbd31)r$`l{sZ0&i0poQUw2MnAhh2F4x;yRb)`O&$_03z z-MY??FMc#NLd8T*;skvjbaY2LzUC1$s}6f_AiE@ViY`rY#V+J3?$s`qO3Nv2j|ViO z%9fkt%=epwm)jLc+RXYHXdbUL3hz{M-ySv3Soa==tt7u(Xz88!zIOC%s=vS3?K^Mv zZ0eLvcNY2m+m!z-s_~C8GO^-?tx8eiA=S8FBk)wkh$iU=;6VC12@`hP0*>aI6Es%| z4ketDVacuJlfV1!Kb(U8+{rKvKg1eyFc#yZk8qh^_Fun6j&eYzojdWu&eoPA-}~vz zz5?N?qfaSiP9x<#um&#OQllU3CNb1z^#GCnn}t0sI_sd{%^bCaM!Yx5ETVxjDb-y% z7!rbmUh8zQTr^Yk6by0CP+uBL5MT0!toKt)$n0RFu^-JeNU5;ph(%Go4bVuL0Nd-v zdkU2rgqAE`4xpga7)3V(LQXmvUORn}+;%AgJ>?1Ny(>p8Fx7V_R*M0WDT*XJ2jk;srgqTXd)kx0Y>W6Yv2B zkS1Hvxa!wHKghFKp}sNf`D(pM7wl9ciI@ICU~aTPTP{IvbZs9wU1IDp18X35eX%7q z5^>P(kNW!Ykk8X31y>waSEJ?M(HX9#(rGBVVjX? zeB|3M>pP%PbKqBMSus7y{FogJwL65j`}&%ua8`>0~&2uLKQ79|Mh5)~shA)z<3 zhHS{$Z{KYSrhT}-x9oZNp4^%PbmNEzzhGo9GvYLunrNU!%w7w*LpnHio5Jx3N>;D9 z5R$m+n%Cj;G2y8XRVA47WJRWjET;C$A*!fKR4^d`h$xio;U95%-yX9vs^XhNx&67C zcK#64Y$R)TgS60oAa`ehg7`zMV`SRCt+$H*&%5Z^&6@l~#O@RGXTnrw`6~s?c?Vwz6nE3N0%fts@w|1S3WnToH8v zFNQ(;M8KI_ebCWR0@(5b?+_ryTgGp_lp3E;0*r~yO3A1*I`P5Kb|~jQ`VROhM3>%Q zpFq--#F&5=FWNyp6A_DDOyYB@2A4ItotJ{s=U7>ybKW-HOAqaOz}{oOy#*PRnT|q4 zUP#;grzbo!LO@XNC8&ho?(#L3Sj?j(O`jjr6cl;zx{$(Yb>-A16 zgsRapyLD|&e(7?>%z0t6Iqb*sDAnPPx7cZ3{tsRN#%V%$jdGhuw3J=49=Ac#Zazc5 z_|vNUD9RmXfw~uGRzN$70hJbs3>~`ZIn};w6RK97icgd(2M*?s>Y{LCUr^Q^q?_}{ z+ZVpHb=2;!N5=A)8^1Gq_k^ayx^fSfCoJ1;la_h^5VvLjd6^ltK&s7DbDVXAAYnHV z?XD3qn_A*87MFAi%~ZKJ;xxETt~iv%vz!@B_K54rzo0gMY-QRY*~m~$+D`=Ge|Phf z>raJD+y$AxaJ{4aU+#u8m~|_%J_hTB7@($boFbAxCt%rM(DHuW|FORnV@o?PLwUs8 zH&QXmBH-+%oocAj!n*;PUf{s3C3B0XVo6Ire&`{6kRvTz5BtiqaBvXh_1^5-N%g9v zC=u+x$;F!R@o-p!g9!e~LQ$qQ#)Cs?@AB!4hGaFmW*iKJdi5Zs@%pNqWQ*gZrgXL= z#{mAO8-r9ANAe>thvpNN3K?F4iNfQl{RLqnJn8zjbR!&~4}H7UcI4xtg2j}z{El*z zzYByMckI&E+m7rh81@@3Tv?B8cwr~1DNEw9(nuZ;85sX5?aHS-PI|Qp$@l;V{qy>| zl(l0m5XPvL(Y8F4j&n#^BS*cGKEALV}Hj?i79K? zd(TNlcDiiRZ29tRLp?o6lS~1H6ga4F+~XMhV{mjoiidXI_rPBLYjPrxcYa$SiARUQ zk{Wc(?IIjNp~S7hB+xAQ{QLi4eZT*o!1aj^8d%)mB&N5zVDOLG7*)flGN$6 z5h`1hC5lQY46>{2Tb3a-qz+mvm1G$^Va8haU4*iWu`{GBGj_%@48QyByVW_q%lZHJ z&-vg?Gw;0b^E~%`U-xxgcYW+~z))IV6ZTlQ_}R3R!TO=JQjJvdP^YQe5#8qoih0_C zxk~pI2S(*flSV$jd9qi+k^YS9hRf$S+zxd)LlrE;I31g%^y8Yt+`?wEV@7cblcw4^ zC!h8+Fid2V%Rlyz!O1BbvQ=&oeXyGIy;IQ*kT#l)T&Jqwabu61)Zk zEV%hF(guPLNA7w739q~R>yK0znTJEHtvX5|2?zn}iA=2m#mgA?;A(>!JN1WV)6Y=( z_^0>}VFLF#1e9!&1dHcha`OoA7rl~Ej+MyQ^p+E-+qJpc%R@N|Ukl`?M4cN6o8H^S zoUzIO#z83=4IZ9ZHoq}^cS-eh0~}McANWN|MEnu6m*67<5}l2}XxQG?tKDv;zKBh; zRPc|L+PJpoiM+^;Fi~sru`QIPe!=QNq}72^fAp}JeLVQMPWfFKc8E=DT1)Xv#kPegyhls>aWHg2^kZCF<65vtHf(QLTa% z((*2Izdr7<8}uiEm)E$6K(LPZRFzkn$~HYftj#H($Ac58NCm44SyrX~=I3##JO!_z z*J$DZId^qKKutR3x?sY`rL)0P;)@vcP2-j|%H&Nhs|6l5nn!c2N&OcW=gv0> zjWw?BL}J!ciJk-zbJc3l;T@Z8)7gTPz*w-=Y8Jba9TvBY6bImJC&KO>6>m!F$vgY? zIsfh*^**5$EVesCkDz%j9*|j&${h4sCL8l4DJwhi3sjuSo8~?VWA~{?DJ_Fx*~3QV zA@x05E*xy!G#9Xwwj~3=>=5uW9r~IRG|YQr|4%FKy63i&x_M3aq28H?FG#xSeBnv( zZ;xNr8k>KB+%h$83_`)!ni^&Ii2c2wt!qptz!obOz7CO{1Lm(>_0r2|Fu6LtC<3@+fYZ!iYBCGX(DY!R%i?2>ynxNe2EhU$3 z|JDA)`%v=H+;;U_+7Y**fj=Jt%*4($F{0h5|F(8((36;eo5cIPS2EXQ?daM$R-wQ7 zGx6Xi$6wCd9i@hzwTs2X6Nh^$udYdxk~Ia@)U*07sWwC-DNZPm2iet~$2yu@)AY{0 zTZC{MCPup7;(kEi(wHt^BTi85F&NbMQ3)4a^&h5xz z3qW|^m?FMJ;WQCVrvyd1(8IJuh)p+-+WWC@*E>vh5Vsnp#mXjNj>DXR^p86=$B8Ro zYK_~Q??TB)SJu`^>V`Mgf7Jkc^rZ744wK9KwiVx7@p>{L#%9AME?@;J8zE5YT_%qVo3TCI$ z4<+_%N7p&cAW))kGWF#)2U0DF*JP0H2LAcA>OmOKY*0~@d@eCh1kEmkkJ3qW5p7keN&c9WRUMdIINc0o;O|n=S zf?k)w9D&)hZGCZ{x*3C{>)d4m=}55dEg0z*@(RY|op%7! zizD=ic@S!=)Ksho55aacLIiQAmV-1dMDOtgytDoz{?!8+<0A(ZUV^k+qm8U)yG}>| zeFVd?rlcE(k%6pmOxSk5c88kXWFgk@;I4mo0rWcO3KV8r5YHg&S0uxXy$imOq0sy| z_U7$6={3UI3fXkGgTK!G4Vv}1yi5Z#>`Gol8q2%g6POh0uL-Iu>t7A;)qWT_b_TKd zX`&(QS!9CW5Y%EPkG2K1O!U8h^r&b(ni$-CHqj0HowgTxm~(GpV1Qu++uLq>${7Ea zaery1M$4M~qh9u3^vdgATR&cL{`&ES3HZtl^|?)o024-}bd_c)V+fjEQQ!iJ9Sgtv za1@;7o;R$=+Jt*Q0KzDpMeMch$ns^|xZFziuHDt&>%_@h2_4e|z zv9pIFT>|$(^Fu$Kk^g-8Z$RUIq4i%?R!5FqkCSHx!7cqI=f!`cCk_e&rwu(f-aqfs zb@Iv|Qp|t+=!sGW#RB8xLBf`Q-NAoc=(?$=pcu(($Kfet6CQ{Nu-$GW92C13`VCbZ1k-wa{BO#Z-+<=t*AH za3K73c^vC;CgiA#<1xR)cV0IYS;%DRTHgxHD>8Y+EP4t_G7+25PV>ZwYxQAE zrEEZUah?UTR2Z~|`k<$l_HcKs>-vY$fBLIu`H}1VhrY^v(#)eP*4BiUK>}M3yT%G} z_u~uuE>($=!d`=YIHS=m?l2Sr^H0NID}GJAhI%ODvO5W^gIvJIy{2$qIL~@`$sd}f zKR#HQHgHG4ZrTDb*}D^Q^a4A&Jg9k8Q?xW0`GJxzF`yiN_H>+mi}5}o6W@Hu!FEy- zHdeZ?dj$N)l?4+7l5WZm4+yh@vgnR3|1luL(oV#ICI!ffW=LBRiG#lW+f21#qFz2E z;q@>iuDr13P9YGP6M(c7X$dL$5a@&IXhlH6&t`J$huqW>D0@Mez5Aa_Gaw)g;&R1* z7vb^)%~R#h0LY3&$mckL$|eR459f6$13%u8KkWr|7g!p}E3CiBn6oYdG54KQUXacN+wX>3h!vb%F!pv+TD6eu{e3-dj2O8!AQk4Kyj2vHR zB<*=`BmY%Vuw}!R{f0pOHc>eRu4`KFL7`(~2~6lc>UV%mx7BImzJGIA{`<#yYV&)6 zxL+9F$bj9P)JJ^l((JHKj4hm}LK3Th?TA4iAjt)wLBCCNi{>4(U?^=XUNvu9H! zQ%*g0@#{ynfB$(&ya?4IC^8|!Sl$r41xy;k#l@p0k*U_9Tb(feuux6Lzv3J!&t}x@ zoMJ45X0xAlX_QK6dH35b?+|(OpYHa*|F8!#?1*UlsiScH$#Y0jaNnF+U?sNE4l|(tp4&Bv)Dw?%--Chzx9=<=gX(;E!=`H zCZM+P*2>K~_i`pM++NME7H*p8@xOW)s$+n}szKc;vplODos^Vx(Kz8;31pT-Ks6LW z6l{n+If$uFHZP$D`avOs12#Pl`jes1QwxO$iUZtPWH^Bc$`oPCwz?!;@!C}+frf?k zG5@@?VTCud610lzH*;AT!t6-5!(u~u*QNQ=t15tNGG+@Ivt6g+kzQ#7-kmo040y%* zRPo2XWc$}T3t4`_6Wjs@P5Dk1$8F`1xpe(4=Z8L}$8QaVEw0_l_^@Z}%7?_}q|;7* zRkGJ^sS8`2ym0%Oq2R%%%sUy`FTLPJSsC;#MRYnW)b)mTmQTDZoRxH`upZ87EOuDP zU&gH+eDDjy*8k~0GJKo|GrDU8S$*2)H?d_Pzz9l%*_1qFa4Qh!{j6^>ap1mAx9*)^DZjYGyEYD6tE zaW{f<f82Gdsg%hp#z`deYTE*>pK)2Mru6C%Ur)SLNT=L9V)Y^)v1LS{16a^)C>{vAu zG;k5a!y)ji6CzmQ@c1=grJKR54{gpgKOaoC4NQlugPk=-*f$B|Tg}BZuY;rW)2BU( zL9Xom5vPnEZmTH3YaBRxH~Plo1(>9m#@>eZV0;BFzgq%DQ?d?#=M;|KJ%`%8OrL;a zWgs?d5LATzZ7!75+?O!G@&wZt?}6UM&4aaHTBcX89sZx!$S(|?M`gTvtqQ+{+dkV0 zh*Ayu0vg@sodt2DF{#EvF6?0eM2eytm$f`wph6V6Llte;9MC}rjsv5{aV!cuADW!B z{4LmEAEN=g6<{#Wb<&`IFhfXmlt+LGUOJBdU<&b zrDQhPZy1GR{~{%R-;Hku=nWcb-?{`#7R!<9_;@Q<9~Vj=WK7?h#e-hI4g-zH-Uv-Q zySG4O3fK$uqiJF=CN<#P#90TR(#aQM0gA2Lb}YQ#-x5aX+UP)}3V5BkuM@mbV>^+S zuJD9Hd6`WDA>T+kl`Cq#`}X7ibu+>1?j-->*=toi6YiXeI^K$xdBdCVIw*)}X#hsa zshyy)p%BH;iO{zU0gX~K=tAm;Dme|V5#Q+5scE(OV2qPzmDt14B7lNa<=?e}Jhcu4 zSe6bjzq9Skbu$qrN(qQl1Qt`GtvX;c7&b_+-OmKAV?L?`8XN?~{Im|-nc1Bb_whgy z0bC3}BwwkMUY%yiwEtEU|IJ;Nz9N3&xBI}s4}o)Tsj_(r+6^ViWyyQtT+2f!`*HN- zwYpFty}LSxY`m(VT%R`UU3|>&k)WQE@O%L}b z;eVLj{I9=xp4@#f182+`-eN882~1s5I84kMigcSe4SjW`!ol$H{h1)R+JkP_>u!U= z@ea^q2?b!RuLqiWX%dt!o6OS`+x%~@(>`0uwWBjc)zpG+7T+&~-b+0sC4HJ)ioInZ}E<(7o(Ajntd3gN8>Tr02`;rMeW!AkF|GFvv`pMI3C)^%>&9GVBV+;gC z3HOzIvw$pzAk*yXzy%DgOMDL=$(2l8qIJj5-qgIF9kZnrYVCdKo?@VC)B%TKUja>f zp?#HVL54+Kf)BGQ$Vi)sBlXF`L~ooi9IH2lBh7ysOMGrI+z^3CYkPPBym_L*ynIPW&G-1?3LGtY zWoaCiT%!zy&~ZjT;!FD4c5gsL-3)!tK5uO1_= z%aImnQ&^|)KX1Sdo8BosD6`4YvuhBuWZbulX61Jow9L&$sakK#Tg8}kw6!gsK6~b8 zKw`;0?PY|0=A%ttQLo6b&GS>oQiN2h=Rh6Zimu{6emtp>ckePNvWfd0>P`!coCE1_ zJTFpVVT|;x5hGs2B+NXQU88}ydXVq;+O1<%m=O8DaHYP7GHUf8j$6w{$H)IB(EuKI zxbX0BS1%`^uCjVzvNw1+W4Bkk&5aD0wu`G1wMj6RdR&1^@;rr8W>R

    r&l<)7@Kil2Xa@8&t&O1EYVRy{c-17be6xjGJ{Bd`Um$ziXG(lYb*jiO* z6=v5qAFA_HOg!;YVd}4b|9SfO5M=;yl{*7bdyhS=%IYO3oD%LuY#-htT$yr?MXmV? zKb5k-=axt5Glsh89M}$z)YpDina{p?@3W_W$N-Jhk8T0)WgyJICpI#s-enIaTzW@6t6f;ivXCQA z`**N39Gx}GFw$>>{61fn)>g!B=Q>NYYjPHlUO7 zVE|qO>7I}X`yz~1Is*?<#oyj9g{mgY;FNASqTW3o={H|dUYPxBD0=fn`T^l`U|e6( zX?(~VDAfqib5?YQm8VVne144i$EHUD>t08 zo>(lWj#jyN<^I;aPhnP=Rgz%03cO-Ddhb_`zb0MW*UFku2}FNC*A*-tvG z_ZfH8U4`?Y3MEdrdpw@OX(uG;!nTG}0J=z83$nr9v6guc4_!b3jOLDD?_2QK{?z#w z@8MH7ksiC67ag4_e^@#qT6^DbZ$@knam5TEXE*&X+6A)g6BMY+Bw9kwhfe%dl`A-Z zlFh1orvElO{`gU%2G}$(Y7ppRO_z*r?*WE2=? zxIveQlx{q03d_QBx@(w$4VzLM-%358TL}V;p_^U!yTCWRFH`*&$HVv_+%kx@>74|l zes5+|kewj5VHutG0Z^qTT*4u({J@5bA3eZZ*wv7xF*usV4z0#RxkMjAs%HzaAz*?S z;<>FGr}42S=hNBbMo z;lDA7*}EwI_c4`A@OkitlfcF2l;;WA_()IFQkgg-ThSXiQ1wP+Z%Ie>hOM zqGY&-{nY+J1iXx}s`&mheeeZ$i=BsY2LB2q?U!#ix+FW)1?LaC5DQxY)g4u4Z?C7S z-9kYt$+_JaX363C?Z-!UZu#Hwc3VPpD2SYvhlE%0UztPB3AcKb68{B0>j{zdmKTP+ z;f8LW(B@Lm-Ac13TER;cI78$l>gzpTVzkmZskAt=yn+i6PSLw?DdX+i0uEW zk=wz11RW=JalcjF8;SVrDD;Yg5}va{i4-^as`P!5^Gn(M>i>Q~?l$2P7bfEaYn=Tn zs>j1FJ-ib0FK}AKkS!|QQJwNqnr%=C4tHZL9Uk^x8sth|)g46$3vV(gKLnNS&xDyD z`R9Wk*b1!3S9}c^_b1LPnY;gEfd4ZC*B;lGi+>qD{D$v>IvQAtK-?s!1x`FClAj1F zy^GGTrj=96H7sTPPVCwGzh9Hy<;NigR*pd~y5~v(<}mEwz#btMmXCT@fAZ5j^pC;* zkCpHsQS0Pgz1RA-$zXsfY0|yJl*O7=|J?C_(SoXx!9q2W0C9)d#f${qDis<72Y{5q zz*>(o_F}cwsAtJ_H_fzvrnq+^Tk2}oXlS5X6I|;Dsm3nec=kAbLANi9QqL=L+;qO; zU);_izS)bnr|hk84Y3jV(wC;xU->K7u9?d>O)Lh4+)~GHc3MT|8}IAVSs&|~tE#|* zv^+!Il~;p8-afpq{4*L#CH^6K^{+*ToT3io)-p%(g8ztP=UzP=9SCLQp$gQzMg{_| zw)`vNJ*~2p8eb%Bq&F}_|JJUQ*QG_*Muw)2js^dWt}=`F>(Y=9;4jyrm0I}YLdNq( zzWa`=HRy#g^#7^iJ+_4(2plWcTqtVH6_QrlB{BcnSSg2F+gPCk#Q%w&e_}U1fsTm! zsynflAC3=gy{3J8*sFqCc-}?|yddd{Y@Z0B7ksVSCrT6ihrMoZ6Z=0bp~D=?xaCX) zKMZX2t^|0W$<{^Op6om;8Tic({$7FtNQTWn|kxr+Yf z%8;g*wR+@uLCNM#kN^11{{q$FW$}*%sy%9--qiiCw^H1{MJ%9VZr0-W)Bn~E5j44m zp^zbIzWFcC@SoFPevhww{IfCc$3IoU|D{O(@lE)xi<5>(mYM&^{p?C+goy6(8ef+#d!f8h5E<|$b?ol%Af>a7T*#LOk5e73iZ9jAw8|yq z7d}YL+m$2c6e#d|Q=~o#`XNTE zeMj?`y8&qR*c%VW)%Sztqr#1$oaGiPd2HwdMm@i7(T{gW{&$`Gm$7kKBu^l8k{4m^ z`%TGOPZVlIavCWYM1p(XKiU3Z5f6^yg%V?&0j3+EW^j7q{9AMWtjm*9_>HGg=X8HZ zihszDb>d-8A>a*nt`K{7()Sa;Kg$kVO(iW5Uq}5rkM_?8@DK}6jL%O!bLb<*_RZNU z@3+dI7n_+kF0_Y;yfGP~W{PJfkw{ zivl;)V#T&Xf$H$CU3PK5%Wz=@s358xsY;Wx)EISfZ$g{o9eRPce;%bt+;r__=gR=p z(i5qCyS_#Bx@95xWAxH~lfiaJ=C|jxnw4&t(>-{mMaJTQPyLobKP-F9z44jQs%zy* zB6^~xQj_vELtC3Ub#VPV1+!jF8k@1+KH(?jF4)OK4qpB#Wd*Dm0_XgMl&a=FKgT-+ z3}=qGrh2({$)k*zw@7Wa1 z1PCkx$B~`karJ-I=J`+Y@-kb$W6c9rc-ElfsX8NHvKFcG#Ge*jW-%qdr0YR7@LLe| z-QC4==a&QA+c|dmu|L;bGF7k%2OqS`4st(s3pMIlF&;4ZDe7R=T9CWVjk?>b4I-Ex z5z-&YioHv!E+dQ;HaB~D7|s-;njYHWd{7Qi<+$IH@&By~Ma^R8|3u3r*oOAuvv6U+2=JFlZYBVnsXTF$^Pdji zU8Op%Ys%Ah7X;S+Qb}KqqgY^BZ>Ooyrg5;G%Fe&hF0%Zx)b}D zqVZyst;OQz)7sze)%e+9R^DK5Dr1&oYd?r$;MGx zel1g(5h|j)F_|eUtHiY#l=XB|RJC1Evl*KpNJf_w)$RDPrPKT`_p(*4apG)8} zAc9$nXkjWZJ!;XKFbiADKolJBhd~k?q&u{(Lk}`l76dVGZ$Qa4(?=!57#T|TczPY% zQY2)kF_%5DUYDVS?&>thP9-4gSX@q@m_GAM`1>{6tSc)P@5>T+P!B$$dadr9dJa9K zEtYiaD%kGeT#0iSMdLv|AY1t_2}UH7H@VFF!_mj4cjwg<(V?qiw;kKQ@An%u<=)NU zOfqWm-qNjo0$&tPNI5=o^5@D$bsLFYOOjqvRj`Rq9AI$IjQi1YI91K-wTQ!&rA*_0 zvakPx|Ar`{kE@ON#!vATTofqXZGL$;E$xJ)hIgi&K>1^nyW?he9a_EL2n5yVLo6bG zQ7rIp?U_CpmP~YwIuLNLtzExAXxr!Gk#7!JUtkTHA5BivR6M+u`gvK{GbVl84n-gG zMz0}a{OZutddVX+H{I2g!CScC!B*Ex75#5ho`|`uE(YS+JQotDUM_D$Od8fM22@o0 ze!u0o_E}2k<#E5mho>%@JmoXiHCVHc-5S31F+$kgD7rH4WnHvwGs5f#0$Pe9_jf`$ z+3A!o9kcb(2OfHPMUHc^5=rN7oR)AQrD*4c%p(hD9mBYyb6gq0=C(JOitLniR{k8f zzb;7MXp*1RuS+ofgj`y$0N52Kdk|L#v>u}?TXYuBOhz461O-DP?{$62Ry#!y{CC>7q#umz4-cNhI<)(yek(|^?bUY)EYaH2pZNo391ceH zociG%(?)@g?f)}Mf&!GAco7@*&y8Nj?sj~CFV$ieV&=Q)w0k`?(@ad@`f5AG)|vqc z-G29Cf-YN!-fT1+B4QD zf(*SY97+l4GZlVAOu(#VJU6;08Ys()la~!%TCAooepfvFW4G*=7y{q?E2C!yu4Cpi z+zUy2;~$OT4@zxqUv2xm{H(}upe_28&Xr9cU9n3dPbgQ#1Kduiyz*b42tmUiPWeYZ zT>8kb(@IM+a(cNWDp30uGj=<&uBOhq4Hq6xYm|wS7=*Snp*S%9!L z-gT&(6U97^?i1~M^;fqBy}bQWP3~05_aXmQZn&60Q>=d6nS2jXU23E8`I@i$P?z?- zdTR2oCO3aKP<>I)WqLufV?BLZ1iB~LTGg=WjF0;!7|1t~C{VqxY{$B6%wg@wT`#MZ z(q=8*PG3qFJs$n8VnU!=y{h-c<;&L=e)tS5SC7poNdVLf&@39t`hzz6W>h1Ky^t?x z$h*1no>=3$8e-<+cvrV1DFup(FgT8a!&Zo9J6TR7H+OasE)a%u@$DD!?*a#W&IXqW!W}c5c?_enRj- zMooGP0;rMNXMV>Zy2D?fPfjdJHq`iJi@z8+UrhqN86U{nEwZ&VFElkk8e6UG`>cb- z75H*(?KQR?iDl`W`u2>jhYp=_{4>hso6?bVnM_ttcZlO915z~M+FSQ%NMoyp^T{LY zFY)YZya!9JC3>vgSXsS#xKn$FSe&d_xT&O3%>_bjgOK+myXMC3zb9`7eTURRU2#B` ziClS=5H~F`FME0WzNF}ymvUScyp2XTKWuHCi9NbX;J@f6ol|+j8cpue7x5Bk>`*yE zMQl0j@TXe;uX=@V_UR*7JPr-*G1|{Z9Nk>yY1aOW#|`{bLA)Y zUpn90)QdcK6aa3YF>3AYa!7wtD_uiOFA+Z|*(O33yrabSF}u8D>|1o9u_m+qcr~BT z40`Ho+(YlZ;rIAB%_4_>eBhb`*KT`WwYHQ7$?T+6ildEk<%%rn>xAac0v@2t0 zUuS-)U1?KL5O}vL5Om76{+O;-^<2&9N<^#p-Elmp9iLyvb+ML#wN!`E*Gda}zbE8P z9hXkY>?oCZ6{CNMz0F2F{=!S`>6iK(dA{QU@ix&Pjc;C`c2N!)=CM)lqg9gAg?e8* zpP%x*I4tR8d^`B3gjJjc*wGl*;r zLIuIENA~G|6hZM_P-|WF)fr8lSj(DN|6_e@r1{L@=g&B7JX~!EB#?@a?pAA(i(GN! zP9}ZZ+9js%eWo1rZ4wi*QQy66IS@i(Pi$!DS;j}i=9*3u%V%buMlTBP(8BBWBw_Zo zNS^7ozFc|im!COyI7tfQI-y};SNBHdueq>#`eOHvD%#93mYu=9V>DBM)qmyN;x!c! z%e0g%@z^EsM73-3gKm|BKH&F&tc5SBmaetlwmL4Xprz;<*G}g$ip)&csLN(y5T*6K z$A{V(%~Dzh@ZnlNCXN^`q1@0R_tuNB>HXH}Ajlofp~;n@_laH4uPF7@_}dxmdk|Fm z&%0SL)`06I{0P@I<^DyF z9%SxTd=+|#|H*+M@lQ){u$;RJy}zF-hkhal(nClg$MH5Ip&I$l9-imXl^5Kx z%d>t?vf}$w;4+&#i0-%&2FOh4OI`Ds^_>|CnOIGK9|4KJW8?ZhpV#-v3(f zTGm*je9t+%oU_l~pU)oeD#AJLZ89E@LSPdqSen^N5^~XF44FSOaDI+Stq$jtsKlZ& zpUW%G2@~?JU(TDA8B|LLq;{9^_}Q$MB0?kczw>IPXU=ne^BWIaZW%W&B1mLa_aoCD z2g@kj=PL_6oSO66RwSiY?tYcg#Zrerdt^e`{Opd%8u=&Fj*CfA?X5h?6@2>%=`O;Y zxM-4P>F40x8_^}%@m@ds;{vs1{SFfgfeNpoE>FcUaq948zwnNSJzXzL#8Ez>|FG0d z3s3iUT!x^3lIiV&SP`{b@2u@=fo4{q?(LOQ?KB-#*dCKAb19OXlWUO|SJ{xi!Z(H@ zx47i;d$I<{OqJJ*XjQ0r@Q#PSN!PiPN)wYFHF?tsVyv7`R+P`&jfKzsro;V@(!_DP zk6)E~hZ3@n5kdpdiM2Z!Vk#jSvpPHjNWB^u&_|C-d1aSmjf+&g( ztX^BVV^uW|??E4^UP4Jd%bs2qEM)HYNpEo9$M7oZ-PX>)Z{yFdKZDvv+^P8Hj3%c< z)#`(8i$)pPZTjJ5yQ9x`H?c+-uiF-;5B{qcKxSn0&e6_1i%cIWbG433^hMd|tQ)SN&bXzj{ zK26?Xhg0=_4SjGgS&5ST=uD&B_s*5Nf`P7^LEzlN;NcT!=~^NblHh2OHUAWjHl5a< z*LW!|JLb&Gihd?RB#=s*t2c?w+4!{TIertBtfQG#Q}CtPtbx?B(^4i$if7;um{%}RJ)tYeN>!m?r{1O37_MKLDob~QQP-y| z&JPWC+JLF_F)2QXLzTy0WrPUm$CGlGcwmbxK0+T9LR&CaDiaHLwE+f_(pQ~Ch?<=0 zlRH`OI6pE@2o&`(cGQ;rc(oy0!hA~OBTm!X;_vQ_i<>mwK2top6PnMd_5?MK4o7E&^V0O5qDg#)4BqiJ0lGYL1aiTgv zKmTB)Sob?-W3svmzMN>R5!c@u6p;wir&bba>gw>(&q~s(W|?GNbTKIVoUu(^5{FKnB{7Smy8qb+HZX6y~b6hh7Ai__&sPh8Q0-= zSV)uQp`Ga5%C|kE3xB=2J@1>$L(a;}@@|TYisI)e9jmYAc=YpJh{W+-w<2 zl<6pqR~g1xJPNQbM2QXs{cJdBq3xZNX#~1|{5CCt={xq2-2Geq(Sk*$x;i0iz_X(n z$9Dt<{opy8r)ET|LOh=|wp*u%?qH*#{1U(5`+SpmI~$W@jeE_+eUQiP?S6vlffn|4 z&j&l$_VL$up(_f-*Bm8kj!9=fL5{)sm+xD>&{D>0)`Oi#vbbflj%x8e#A1w4@LbPY z1;{fc+n@p0+;izUPgAcPU`?M{a!Qvwj!M(FX(-<7Vsxh$qE=*^*V`89V9TUc8?+Ev z?PLSUs)ILPTwfPDyCN?Yd-E}_3Y4<+AY^OI&sWF2%P01_MU|~_sR12jk|qWIDR1*v zxy5EF3TQ+`M2BoNFz?Hbb>v!(K#bqSdLg}Ng*d|7o(z)l46@>PsK|!w6}IC{ywd-+ zz1lN>u*+hy#O^4`>!xLi=6lT*`!xXdV(uHfom7B2bJ!^@W$`&vAI&jj*3`Tq6Rb?#(pOEzc; zsY?Wcy{|2#Kz`WE9Nj5%TQ{o4-$NR$%QKAU@dh87dQtS5fci}=w6(V1*hKKQ1 zTImDzmlURq#v_+CA346qa)KQ+9)OvqKzdw5>!O&Oxm|VhWP3)=I66*@X`S25N+|Y` zqRAZkD$P2%mg_j{8dxtCWT}*Jozy!lXsmwr>u)!r8|zeWFmJL#C-^(?^8;IQy#rAh zkqX%ju0U0JHm#c)f;LzzIC(H(ocBk!(kAL#+zaPNw3V08%$^zIl&lB1A6|hfLJB(0 zQ!@F`A-+0@ixUYWiksV|K`In+;Ra#J5(0WFE~Rb7fd&$Rd(y;x=&$wR9v@z_sO25; z1aT)icd-Lb0{v!8BZG8&qmhbqb%c2Pm?foEOYzX{xVJ7ga1bZ=dC9 z_ZnINj#s>I@B#_t4(Jo9HyU>niOv?>V&@CqZhNG99g3;US2U&sUjW>p&RZnR&^NL;okNCb1U$Ip;b=<1gaG%Sxmr$UF;FKI0j6%LWcjz#4o}o&_@BSvsf`FziCPwaj zr@o%^tx-oflcX1rCHQVk1@^?a>i%Y|lI=2$51UGBS)~+PLcp@3WR|@Z342t6-dJ?` zg0z_pBUMT!U`V%Pv^s=mG;~M;dk=EjVR-4$W<~VC%sMkbGOe;KhK+jPIA8l-agb&k zqtVX5nVy1+ypc+N_c|aCP-3z6WA;+rr&N|1`a4KDGS55jsYMhUxrRo)iY`=2DC7yB z%M6EO-$Hz}v66hZOwEi1{t5~5|CJZZy+HGpMo?K?+A{mv;)HgNRlvdEdnT(O4QNV_ znQw_gkxib?XTN^MbS19T#Eq$fuVt_aZE7e*jj=!=^NGgEqH|HcGiSSP8Jj~OQ@i<7 zspRAfdCByfczgI=3YX=}b?0SrmZ%w5g0^Edn9xNa&~6amn%?|#1;$kb8GS8(0gLMg z;CV2UhF0X}#n!fa7(py8P~ZI>qR(UT;TkKRN*Sadrg?Wk0S(|5qozr}=hnl!NrJZm zzvuIv*DQV9jND&{e#FGxu}b~j#|Emn+9G2%tOdRXw4uX8v|jmP)Lc@#^z%PvDH&r$SrAlEvQ`e8ZQ%MWw9mD%5EX}fUX@tnS5nPHNu&`7HAb4eUyhg zK|$Fm*>Kk5oRlaa-~|v7evhTIzFIMayUxwJIjuoN9j$_2TpvKma&R%?fWMLi_oxjq+cMv_ z!Im^%{^SC?%G-Wk=@66{1ao_oL6kU9s0KRA6(qPpSBKJKa^J%kxdTIfT49TJerU$U z`bMaR!R!Q{@c-(e=;BBFFyXI{_9#ta)CJ#T>l4)uzCnCBoDC?$&`n_8fbq)w@~~EU z2+S#4KvRYJ01~a)c%K=gwwsF`?Ay}$DukE#7{@kF*BX@CWBt-*#2X1Yxzr+g<2*(U zAPJG`xWRD5Kq)*~+Y@-8bNTea1!iYb@JtsYd}xMowcNg(RUXS2yQ9opzat<$s?qLD z-%XS`#YB=XtL-fzdqW1T`pLnrv-=$iUiy!U^MYTe#6h}ts7s_))fzOK4XH#S#1+b; z{i|?k#@i{Q@$!xm21*3$r?vLI6=2@QCMPomiLLGYA}0wDzh~%@ALbI56J=;oY`I}D zrfSERe1GUC==nuFpP%k(I!e^rWHG~$fowsBIY5?!slX=noXvdShu*#&A1qbdDf8E= zBmB8B^zvsl|L!Hu5I&@INO~jl2C&Snu{*QVfA>P;w?R4GL0eKXV)U^(l2kbSPGG^{ zayo@ph`+@AJEG^CL>LuU>a-o>*p>@Udh5fDdMluTPfi-d<~}P+HHndrW=-SRBSSi& zQ6DJ$Kxj5P{1_-RaHTH!{8XZ{lu+wQ9dDb{91I`-f%+sPn|$0H>u;&oKAFG<3CaR@ zFnGQ&u1T-t9Ul~8Tvv41?mCn3 zpn+NsLc374_E^^nxr57u07{+2N4rh1y|~KATm?HNY+e3rB#ierM=dFy%9zt(=W#yQpYVyR9wl&3omv$YV0+ z-QK@P!;}V61_{mx_~fhR+XWnydVmhMvA$Uoj9OQS1TMOmwM(0AoC~$=MK?)HI)a3g79yz&aN>MQbTw89;~A-Otta2*cf{pW7{7o?MiMvTo5`+h`GRDM zigKmZm*|Q&;0^QZ-}Q>>fZU31l@gr@Tz;1@(9|uB9RQBTZ-=HX-@6Hw+G4^Z?TE?TYIe)f?8+r^+RcFxD;bpQt_pp~`p%?}r}S+HQO+5sz3;~{;>2=y?qeTvKd zX=r|ozjbt9q(%^3r9?Z2CstkmIZ5q)H`^5R;Fp<%h+3!-QFYHiU z7sFMx?}$)Qush4)0bPn>uho(QYeTvWZHKuQQ!a@@um{@Pj9JLfKD@h{KOGywDS2QW zHLg$LK==+1wp$DHlUq_q$GL4=JxP#xFC5onXo%tn&3d)1*%7}1_M%2HBy(2tB(KT9 zC6Q9pn2M*Emv6*-iygTfJjDwyCSI9qxD(&4lJC^eZ0B4lu?8{ew0?X-g$guB!3^h? z-oZ_}td%ffwIMx6J58FS@qWF`6^B69%!!VQVyImUcjb1v$jYKiadpKLC76CSQE{|d zAp9QoTZ_yAtSB;-n*u>ni;`{skDYpujci%0IBXWbLiIhL<_(tneXqIOY7`a-!o?y> z`1pBW4>N!ave;TGVI$X%kXW;fmHK9Go=B)y|+!6l58Unhbg%T;)>9QV)n~6}>r2y>)lh8-D%+a|o{_RG zej^55b;}d!fh8UsO^5*6#3E$EXTJc7h2AUHqOCtuzkQC_G^)#ziLL13+ZvSH3#Vb; z9z3cW`&4%7AJQ)$wyL7e-ggBouQ}<#tfq4bEz_UDUtI0ss|wN3kPzeJHS^+DPA_V! zK2q$^_P}Wro?IIybI~*H0Iv;1bb)<9autT*y zm8K9rqX;xJVC7{$O~EC|BUncN+C;QIR=vqKZ=k6j4+saiLtdMT?#iCG(C3Cz=H&?O z8NkeNa@b&YB&1PRBbt(&gzVQcw@B3nT*qPsLAX;J-^WSog<;GhZ!h8+0kB4+$|zw< zAMcxCi$}baj<>MTofH97*zL<%Mrpr=`XudAeiMFWRnZQer-#%4Z>7Ep0}4RGn(%4w zJSCQ>lK1)LsUa1Sz@&>5Oi33_)rihv5DG1R=MAG}NU)PZ9UZTdOZa27T_o%pEC^r* zTvf`8fg`L7>4Cp|Z6KW59j)q4V?fxB#V(;-w5&%h*z6Z%&ipJle4ZMx`X#x%=Sc9l z4<`3LmWR*RSx`TPu}JtxGl6wr$G!6f*Dpn+Ad%7sa0n=n`H@w~>7))IaPZFY*5+W9 z>yd$ik!uMXd){3remNNLVsk0ZM~LwB(b%HSfli1dV9 z_-K&F9Cvps4fZX^LNKti;Y$8k^^2aO2p}VIT0*Nz=p<+m zkx@VWxsc3X73S5G=!C7Q;Tr-C2$;-TGqqcwrov=h#!iq zQ$}}%Y1)!rPUErM!)&d^Xkr9F3oQV^ctSW$`JmrkgchQO?TDJcURA{^c53?rj9)BA zpW46(82n9Jl^@;*S8KMfbBEuwLhuf7-BL)oM-rmXG$@x=rC=szHSUu(Z(DUt4VUvu zjyG3qL-&uf>e4_lG%rlM;~THziZHftwh^L^VFJqSN92&#ChI@ir74&m9_tZ~1|~yP z7bW+5{SpAWd00Jg9@B@o#`7@=rd)cp7>zi4PB@HN-Tl-#pSiPJ9Pu#eJ&C%^b2#1^ zT%yet0J+~gSD>IB9Jzf>y`53VCIWEjAf`9Fu>G?Sl6Q?) zS%GrXXfH9;Bp4}jT*2iHxye+5J#bh8Hh_zqIR%3yU5-bJ)mC0ZE)y~$Wp0dZzsMS? zeRbd*Qjy=)u6Z}!uV+@vIzhMqn@@CVaDlEdF4~7KK)Pjr)S!(uk6wl?s%W_(t8>{d zDJC+uH`yC%+S0e$0uO|>cYXGD43kJ^ z^0y79&6{2(2qppVYO$VNSy0tdd1b%mWAkUX)GcP3*>mGXTZ$~ z_M{$CwfpixqJRfo)hFj#;RxaJx{xmi+YdS5m+bOMiBRM8Lv%b5dw)=tqgTsA zdG~0r%I}xwSfSUT#`eqj5(`1%YUOf;C{KQ1Lj19Hz=TWQ%Bx?dqn1AtpK`8LDCwCq zhtK(7ge`X-ho&R_uD-0^@JlVi)QzM| z@GPZ#f@Y%k*_f7ZhDJ~o<&$vYiYAgsIv*4uf2TZ*l?uw<$Jo>Zm8p0iCv*E{{2VDk z#iybBL_gOd0@--^xiMlKD$W6Zc+ZLt3(t<;T3PdvHPOsLVTR@BVRr#CAeuDs00u-N zIe&rlQZwHb=LGY#9%kYS97f*T*?lbUXr1MnJeP%}e9l_$+}g}4Lr>{k(4dKEjK19}zQAsw}&hc+#5@!Ad(Ks}f&kP@=DMn9y@irrBtaPM%NhKS4D9=;6>kadtO zcvzs2*tMUxtzOuj%BQx-XK6oZ(H2@kqn6aqL*f4W0;zI6(!OuzlDRrf%F%y7y-I{l zMjENU?T!6fbb!w(X^3JoD;JRZ`DFjDZJRgl`EF13z4!2S3p~4IEYK3V8vOqI%Y=r< zE#qV#kP4Wm!{jB{8Gv$R%HWArM@nn;Wwi+vIxOoWs!s zSeI*hm0{2{63Edg+VS*A0K+s@HaAfwdjen2l1$*;thRZNaoYcd1?zWi8GJ0XP1xw> z!_^nsf>$ObWW3$m3iaQaMS+(|*zsYv+6pF$tPl`k*avPhv%X=oQUG1D1!81A!Z}R! z-ntWz_im=nRyX(?{ad_d^XOwTBwt=^+lv@C<_DDX6IDJ*>A9gpnFKC|ei$$rVM~T$ zQ`66Wm=vIwy+)nHH^r$-~k>h+HK1PAc*@94RxS7?S441n$&((7A7s z;Crw0NWfdAlBicv^IXd=)v0L%@vF)YS2~5R@qKr&c*=G8f)>{h7EBWj%wlY1;Xhv> z9zJk}%~hPK>r47Q$>i)=$IbiErc?WYCxhd;UlcrmYY^{8Q&Tx;jiN#cyk(IKU5i+b zhFaclh?XZjIae87RISNmRGxuB%6qi87eiM>SlSKs9d=dJ`sljEH@MtPmkt&yrx>*; z_#a_30kBM?@!XqX31qsQ+R&pf^}P>4~E6Df*rU~#&?e9m31Nc@^}j#D;YVz`IFcKAJ* zv+D`iTkHGhk52WPTFNK;OW9(wB3@>{+-0_YG4NH?8fjS?Y*b>vw1&D33(75*ElTe^p{25Ros{9cN6m_#VQfqH6 z>~#DxI6!m1vF-BD!JLgr5qOq-q*U*I3;P+5a&&{+rPX5lD|xr_6-U<+Z&2v@!_Vpw zEN6x3dcTUB-zCG=h&`ou(h$|huKlTN_mwWdKD;s%Ojx-JKLl>bSZ2sajPxnCLlf0) zfjzMHY9wn2y`wrmp_~ODJs%%0BWa((uYXau)tl3>vXsY$2$NBB<*XdczB2Fg1 z3fhbXT+aX4AJ?_da&Sp4#{Zn})YNP>qBdN3vQ$#PjgGxHAGFE=FY2~LX1I;-m;$5R zvWlJS&%c3&rhaera?MRiCmJ89thxcU->Vu2AD6C!lZRIeaj%zif5>liyS&&;0luzZ z^=Y6_rn2jaFEJQ(BG&ol%kzBAGy9$CkvcYT_-I-&KSY;Jo{+9N_V3w7OxmYh<3Ik1P!5<66IT`{#WS z*Hmwgq>$H&La^W`Y9iQPK>I%*H4%BNz6)Pp*&47O)$|&Yh2*UPz?^2=dhK3R`9%F z$;K-?)F)FC>kO@lyXQrmY+yuC(#vKE@KFSBz1lf`^2R!@|qQ^AZJ$fiB$=lyz z76BE&*6V`k^Pqws!3psybN(|mOUOxiefXoFwQKrz?PQmj{1+5p6ufTQ&kAh=>!)UT zCkExqmthGSv})sWYccn7D!Q^LK~g-~{ycaN;!wxFj3AkrBmnb5qmuj9N10=>rZ5sY zL@vWYXlb?|a-oq4;`o5mfh{b(aLL4QGiUDrn)i9i$@s;ST!}}xP`Sh5Mc-4JEJiM@ zFIV?5R8Rj``)76H`-v>*Eq+2iwqz1;i{!F2c_bvBM30a z5DCY6Gx|Db(cN++35ja{X<0zh=U2nMR%`xAw{#zoDS65s5-+8l5cP^1w6-Kb(cyL= z&wrQlv^SHRX=Ppk?ZG=Xa0NILfit#4L1|*}Bp7Rf8_n|*64CasePUq44O2ur0R@zO ziz-roBgXW`%w_(@p3U+g*X3%iXuTC04pQx)4dyOUsNazzJN&jiy%N2pGXMA-^oj)R zhnP#%Hd0~_5=RZmqLMUos^>qf@*%{qQ+J6rIe$s}&EL89Ru=7Qd%d{~(h^3ZGWdUp z=JM4q%oCmj24sEFELdJ4UncF_ZYeLPvql5c#@6O~or2bZN13J%XL4|3?V+|pGb*P>_Lt2Hq{p5_~;j9qo#iGh9o+*J94H!dLl7Eyo~@a_zGQ*1U@^jS~m z;))`!Aw%I!aV?Q`Y*idyH)i{2MY~VU>9W6Gv!kUJ2fjzg(lnu}|G?&d!qD^#0chk2 zyfR6sG$eb$)-X?%Yrjn|OOZ(`y-N>_;$q!h+PIKJbmsiPPhoHabZi#~bwRRcnb!;GWnA&e#x z&1{}*yA(8wZwC3=A!o1V*ONdo>x`;x&4>r==#(6<+2f<~v;oVHX`C|6{B^R@bz&AB zuI8=h#<$rRUHMG8KuOK}BZ2y5dR$hbQ?U|t_ugK7@t!Md*@uJ@2zYro7_Byv6LB$P z_+y2R)h1n|qOuB9hv==UI|afFH*wdz95aFXj& zaY@v3bwEfjM*)vt|i6n+5=At_VB~C&s&|LX=HO?`T z`zW~rBwf;+ZTdsj4eg%rzqLAG2DyL=usnXf(r?!n!Ty~Je7khGXn5=zx1rOo{PeUk@Etw^p;oo1Zy= z5XtgV{oQzvPC=;bmc$7O#PIu)#`nBhk9{lytxT%16Go1g#QyZ2{ed zk{Iw$0f9<3wWsYuYC}8^@bkMxDJEA;RowrBAC$0mygHww0=vrK&bOj<`*DzBj0U%7 zY`E@nTQgg|aPDoZSr~R{K{9L{maQF2-EZC08(j@w;S3a0$u@e%88~x4SBTdM*3Nrr z@(fjc^CCc$Kcr1&E<#(}HX%pWpS1sXD-o{;Aq-Em)?RxhM4dNlbZPk!Vl@Ck4D z6?ax~Y<_E?Ch7B?R^ChCOi5@}!c*<6L0S+77XVOOIOCzENNj05xezYcD5-bECu0xH zTDL$>s?eb^XMpVs!YUc(1Kpu4aYWj*UCHzsaDXBhA->#o~V)v{$7>+A;}Tg zXNyq&4-!mYD>}KS8geK!rS+C0awyl6)jL#=9*MvrUAgETro~)9zha<}QCQVFiNDiO zw{GlxsW__o3^6n_21@&sT9XhEZf!#gTQ36?VKW?iyF={YNNq}stmY8or24)s?-#r;Mc6{UV&YuU&$uo)e}xhK zg(g;_>zsM$FyC-*<){6{Sk-V(U=wDuC&$BlAu`Dgy2N2})a(2KQStPqdeQWemY%g< zQ%{m_n_DNb%-8PMYYP%^uCWKbp-5f=)&JzHNvl4s)$!rYhyDkmOOWso>FQOb zJ6U!|{d*cGD+gp@rzG@a<+{Rtf1J~@Y)`+Z)Rie=Rbi6ZSb$oSVifdNrFv4 z*dzkU7O+cdbnzy5I3@392nkb4^R@9T`SkV3mEMzZ0qc{sBsFJ04X|rbh7@F0cy*Pd)4opSoqm%*BolrP(EvnNThr;qO?tas7NET87Ejo;q42 z-x#Q_it}%f>fox*5pDSWY3uDB9$uLG>We(~M8zmcZ>t-48G1ht&zEq9whsWr$G5Q+ z&#mW-uDyF_J>3KoPC)4Nr`6twqp4CSnEa1go%r*T<-b@~WD-mLRwGdB&xEdt^5}V7 z_$?FdrVx{W7*6%?w#ycr1B_0xQpy* z3d@9}8$OR9)f%6%k&sVU@yvPK==-Fu3Ix*_CgOiI3u(U(*&gBu(x>lYh*;K?rT>jR zB2!BE0N?USd~_AoK&`&ta#9|Adi*=EQgSE;gJBu^2LAF;C(g6y(XbP;QXU=s3W{8l z?ScvmQLyJvub{U1TiYzlQS-nea^5hit`z8aJ~{|V0!oe*7EwuWk~0Q9W{!@*bQJ3& z*y+iI_<7%a-ugPbD(s21muT455}MZx=oz|WQJ-U}&i@`9rS)?jrBtK+UGTN5pqI9D zxKy&h=6Q4W>K?aw5;1wL&Ul_N2xoGlr0iwz8jZn(p{`OoBn;GRKcF`r5V%8X%5ABb&(l|@g>{uV{jgM;%GLcrQnqT0&ySF zT^Z*Ak=LE+0I5vv4g?>QXv5!&Y>*;O+%P_X5iGUiB~HPU;S(L(7{XuO11Wnm0*6@PXLO77&G3f);X6 zlUh!aPr5d=&%^eHW}0ty1M8`?Vr7 zm0J1_+Z}hTas7yNXl)cVf~9W=tx7`TnlyP|^zR!1 z*3RxNx%Z-Tqol^VDoI&!1@)b*21T+RI;IC|WBgC;qhBSByTA%M0<{}^{i0+zSH&EP zi$0Oa%3qK_bNAeO=}34koKv!R()Ad%Ybsx;A-~q731`F&J@14D-Lz~X-;zi`+^Ky- z$44hgpn!Yfy*Zrsi|Tz05D6MGTsMVHu0#rb=aA=KcQnj}M0G$}>?ns4n&ct09$1&N z>$C-upeu704brhDC#{%M7om9h9HtB#Ni@Gdc$^N72T51 z@(m>4k5+b*Ii=szexkNyv|VOdEo!Sx4QM8qJ7#|0dh#hUYeQq1;dv=+Z;i{tZ8P~~ zRsidl^&8K7;Utz#DN7}Cd)UxGEzvJ_5-$UkEa)&aEOjcM0d|z=B<=H=!v8(A0e}*x zBhO3Q+2qq}cj4)$96%sg*#-vc?sxd{1YO_W=(bM>+AJZg$9{?WL&re5AaK>&qY;5I z^*^JEZ%YyL)RBqU*6*zyhn4^H;tM5VEPap@Yj1!7!I9 zbzT!qd{fR$Ce#vZJ<8{L_tuRibNG!k(#DBZ(q*M2TozcUq89lIw=C=|nr z8&c4tY1Q1h`Q7xfM;~bXQLpT1PspIWcir~i=~KsK?pYT7(8jk5QPM;mh|7jsj<^U^ zV`1RXNR2Bwx~sL^hs|M{o@lOs4i2}fYNeGHb!WVn1m6-F8jeZZXHnCh5v<~LGUH4F zI5Hk5d$_w{K590K6T%ha7$qWoHC{q^@bfxxM~=EhZul-kY4{4bkN11T0PV1{6VUw`!0F&sjTPm(&rQa{b zPm>B6o6B?Ta|qNEbwChYUWcQDCP{5cKzw-EqC>~k&6XW)gXsWeWo#f2B0bPME6BRN z7WSzHowsF;Usr`X(hko!Q-GQI6zKvn;FaH#9`Bwj*;^>F`EyNktYr}vOea^leq2mspGT#RZMfW-2^9n#ZtcU8FLt` z6r6SJ$63Af`lclEQ`N)Y~O3gy8669FMyT|W;P&v^6c&Y-L)0R2dlYC^KvzYK(^PVgs43Q2pg9}RX15@D&9Gy;d zmKhDjD^4B1VrSFs#3Gjr?3MDU%;&_+!upwwu7o+%o!TrSBlm=~$S6dLhm_8^pnqC9lYy=SLkzugtPI=d*=$AnK3wAzY{7ck1cVIyZh-UkY zVi@vlKcl+1RMTFE3k9+9a&sak2xXJzO!iPHCh1>J-MMj%woML?MBh_kj)A#S3yOMx>IOVpmP|`Fw zS$mZ)BwItGQ$jDk1Y6S_j>(3HZxxr?NZLuAXiP$$23sj9b|@M=R4&j7?Be}S_-@fu z*A>b|lWkz%fNjpa&>~0INY*r?OV8gr&!Jn@PdQg!kBd;d_Eg@S%N=~M$V0-*&(%2p zgpWkWO#&Wq4Y=2J>na02U^A!i&y6d@SK%;`KBlG1jU|2D82&!~%lVU|dRgZ_wKv{< zoM?x3K2>uMMR3`hFgZJ{2E+HHL~qxgxF|U>plv`gnQ9$DS&?{+dy!Xd997Gv8&cbH zt^gh;N!Bd=HdQ~~eO~KY)aBP-Ih`b!&?!&);u#3?%OgSvGnFR0H&$m5*0eb_Wxdf( zh5Q+)he(&jv+%r0%3_ko<#{e2uTRz%f9A5swHXqo0pHI0%?i>We4BPj!{kPFNKyXR zz@2cd{O4wUHsuK!bw_U4sbpj=yIQ1@yKSM4GX4>X@Ah+#98qal z=gB{1HX)y6(I{{G5SI_W^h@hTzwDOYtS8?9Qf%-2PkitL*B=pE;#BqR-pgz)zP2PK zt2r^*9p4kBUXpdv87jfz*ST`Eu_nA<2(V>~|md+0NmvRZ;NyeiwEgo8Y< zg3ufHEQ*-ul;gpSzia4hLjiVq-oGt7e>=^2CMQACBt_6qNuE^MUCH(<5ckg)mIq87 zv4Px4%uq7*rnoS`27URE6l{Ds7FREa z8xB+iJ?m_%-!GEubf{-Ojs_UM$1dUc=#pA-%&2E{d7|Pl^h!!K?2~909o%Jsc@gFw z5w~~NkHkQ0x#%p`I??dI=y(gJ>pu~p4nsZYxk_T2?K4m6(y_a zEe zCRiWlejpN>kLj!bS+~(162B3nam^Auwn-wS+a|cj$daaVMT1YGro)Preqd6miAzRFR4lyh`s3x-ay{x%b+PB`0>C z$4>{pP**U$d3{%Ql)6^RsXp%bYnh*Av3m$xW401Ey4Oc*cAo{UG5zfg>!>r zla*qN-(zXDRs0_HB2 zKC6ZSD;xvcK^qQ1FL%wrC|==wg!b+u`zxfqPEiED=Q-(d>)h^~~Z zkF!C~xNO`y(Xvv1JD@&mga(O1AYE&kZb4XxTy;=GB$NwiaY!n`oF$Jo%=9jQ#rnsv29t;!2*Gcd zyLwl9-}fuO3Eqjdn_k{Iov3G26#0I5x|H8`qlk{$97aV6YNrq`0Fk@1I(`lxFN~36 z4Zq;jWv6rX{)q5>u^0jS&-R6Utn~Kf;48O+Szt-CCyhc`$9+HhdiUx-&5ZrtQ4CmB zk)6vz^l#4k{GJ`y+nQ6C>7%3ofFF~QUasRPf@RYV!>mFI=r52{0yW6~@&$E=JfyoQ z{NsT+Dbl;PBI49mPrLs8%SzhEk%u~P+DoDX%f}CmbL(5{ubjM|ulxua|8?K_-d%;{ z#u4sEo6>8Aa!%1E%S%Y7j}F|VL4^6qL#lS|P7f^VzrTKy^QH0N`x|4scus`?wJ5}F z1Hz0eSix|!lTPm~P^K`k`NvS9-pD0VdpT|%6sJHd{eLdtV&&t|tb&)Ow(e8aaLA1Z zKmT)G|FO9L{z3x~#%$cqHMakiWAnmmrtU>T-&+9JMfuy0f0H@{JPhyYzFOO2Ls9=i zcpboWZ>%i0g!ToeF2mh-z(cF5dk#J6;_qJ?XR|J@`}G5yjps?7(yrd>zpVJ+nYeAk z_(bC2x1Uvp84@oOw^d0k#HP!tcvzT5Gq3(btf*7koS?;+y)UNfO8LRh1|`J(C=L*T z88!hv=chw{&&Vn~VB%XZe7bvjd~=R0qrL17%U`O4^ov*5DQMS1NogQ?k5RGWOXXL- zou~45GBw&jMAYkKD|k4sR9)a!zmQjZfmGs|b^Uh*{KwDSX|6jch^xu#o*A1k9cppW zQH9i3`F-`9`(IyAv{3WpeAwY&|GSm{pJqyH^~~HMU(N0B#{7%PXarCJHeJUuhCu)4 zBmd43{&Qlq){mRLP;+ECYJa`b|DQ%p_kLVZP$0I2TKL~j+t(7jb*}!_o7+A|V{ff2 zB*q*_=C?Dmwc3{*a<~rrgZ~S2qDalMi9cV})Qo@3H~EJ(|3N|ETc`%c&Cyq8Un(pc zk%-s2?r@7j-3xO+*d(mKO_>V)K~fqM0LtS~!Ee8nT>NfsQvKnf|1~AN8Q7bmUPnhK zX`|5n_Z!WK^CqW@pAAc)uQ%mMyP>c3Y)n^*5a+FcPrOEa|K0Rlgrow0T3gjUEZJg+ z2+d!rm&yGL_56qFYEV!hn8*DGzH(0AvRmm+dmDOu(7!mKcG%FXU4OXM)b!|{St2bX z<7MR$KuV11fAs>W_MBk)I`CiH(?5Fv$<%I_S-Zb$z(3hCA(=bll;41ANXAgi-*^h3 z=*smf1R*`Q#4E>R>c=#3?#58z;R~mcT?;$okEAxo%F)x$G($Y5Ybw&dc5;^W7_XX?KGZXh)p zR9p*`Yoz<5G5o_#%rD}mn#-WmgJp-W1~;Fuh?*B&3z0pR{`SSR_@x?|)!DA{ukv-a z(a0ZpIAYdRN#eK*Y#cgL|Lb4;(=@N&p+K;Ve|>R7aV#V*cl3y4pl~97Wsfkh!OIeu zd!h1Zpf^hvK2#8CIZM~}2ZfV_H&_i<5fu?J@b|0M{x387lWS8@;pNNU$OedR92nP! z1mE;)`*fPybJU~~uvt#}0Q5f6*crqY4$vR06D*G6Rt>&eAMj)NQbhdU`6CL9YM|Tv z4x8DA7bMzp1#)`Zy?n_sd0E!ADr9fME$dRU&!6Tt3>21-sGj}wXo{S=>YCFA2eNW+}Bl1Ij=v_C`acu0S zU;U@K0)WC-u2jwhyxBziowcX)n5p}amYPagYOL_zEao3$0>D^HR0G52lPhA+Hf+)@ zZ|nZ8LjNR4pvHU?#b+YJ^Y0>FQWjslZsZ5f{#g5yPXA88q$i_{XqQ9mE$(4DzReA8XvDxH)bdoA|dr^A~df zunA8R4e9J6U;JOl42*qE#u9KN%}M3|V$n=MaCtTv)0?vuuRx^yR(tW|;)j1aHvh3x z{;MtG%E?eO^bs;#Bx}dgpd#tmCSxTV@A<9u_qCH+r2O}7^RK0QJ%NHUpa+z1bEf%F z3Qh&DDpFuK5wJP#u+$Mxu!>as-z*VD%dcVok-|odffRPqYR~@~v-|kLV@z47-oAw* zTh`6QAxF{eZ;z{gndx!~X{FFl{#V!si-ieqZd2=Un@9pz@b`(fCOR9Pc~!Xb15=)*uJa^QcXamRd3gy`*E4s8aG< zrIc_Z|GSOtU%f-)CoSoDGahY8kjEM*fKisr=a;7W+`m0bDmgPSEdOdwf0Wk$>!_%l zizI7J$%u^5B136`?ldB6KN0#>XmTZP1u#203j8mO1H48F*j$|!Sa=N7cy^tV6y`W; zM>BkYPgM&byKhbyXFg&1yR+|KG>ysuktQaa5q?gSG%;p&X-9Mj=^kT{Tm5>oKTlPq z1b&C(U;gE9-#$ZW}ouP!ueg^6;l;C#p#9@&3-zjmd=g|AoxRDZBMZfjzP2 zy#?uVVviz4%2}^BUQWKM%zn)^^|$BZzu4dkAIY1L(-Wc;fd7ZG>yF2|@BSq!MBTWE zGFpV9?2%oOY%Y5fviBZQl*(vGHp#l|y+!16T z8(-h+GtPOR_j#XlC{k;>GPSIxS`(7%9buNmSX*zj)o-j9yp#gG6t!VKE9}`YOgqU% zv8ui$$u;|WHj3LqwQuaoKOWD&`-bQ$67~$e7D6s;aLw{`yVXob8g=_g5pU1BIzxuv zI2Z?f=ip*C#a=##*q}drJJZ#$izDyPEk(SJ|7^`jL4D}I?%b~)rJ)K8#@wm=Be3+g zzHqpDVc?ME(nJ$=I6SQG7WIkWc~}WN?CA>*2Dp4DE?=?#CI#UCuA^zd1nlJ*sQ|HG z|Mq{mM+foqE{f{&J}EgC1gq(}v(mi^l00LSA4|E$_#NBdSS<#tgLvh4$vKcs8D`IM zp4aq%r|nRgb%r?OF$(Kq_>iPglHh&zRXtM~uzDE-s_3w;2qe?I3Y zgCTWY^j+_9k?kwb(41w>a3?th=X6OO;hX=)@sw8i({oM!TvFYNw&<33CI zcx|)XZ(Jqc5?Hk{vRpjG`@XITrWKq6uDb|gsq;l*lwEH=TT?1fll{gqlTwCPi@awU z3@cHBN7UFHCo&XsIb0Gm_lj6h@b3WdKbXfn(-y^EwL{o;h?oqyymle1N0Ja zlOjj;#`6oe=b!<5;UQ8!hNb(QXMc5@qW&k=H)pf0Y9A05*^Y>76`F@->Q?sgM}l716~e!T)Q(;aA+>^b~> zhNKi$lTiG_@wW1SF$?3+}Y5ZJf_;X=ke9Ia_|> z{Tu(k2P8g+;NplvPj_5Ccti?P}k%1ae>HKwIbHND<=vHQ{U^-rl^I827)4gyiY?f58O!D{Lz;iY4ktaRD@ z!WMkeCuk*#M=njZSuT&ujX8gP`kzO);T#x1H^-~|C@}G-4tjY`SY%h4Kps-LckW71 zW$I_PeKa&fH9=RFjs>(4EE58HxV#vJH_JKjHJI_bSVw}-n={Dsd-bxtv}yix8$=My=J3}0jTLyW9-o4~2J;yt0foTr$0)1O8qQ?nBZt&xt{I^G-O01We6 zSMn=ES;~7UO+0^Dqj=AsunMX=tI}$%dHN*jy^MFZ)>`UXpZ+uG7JWxZLDf&~Pzx7V z^zok&K&gsT0{mHx7mTuJ^;Tv&^MNjG(EVIa^Gxf`+Py=hk4}@5@4M+Jz2R3{*12v= z6L(gw-+X$~pq|zx8h({Eeaqm#c(@-rU}viI44RK0#OGkV;<;|SHS&;N##fs+9!Z>O z&i55ctmT3yZyir#o9oFv49AL6R+Gr{L-&ij^$Mi&}%lEKIUgLExNOk z_au<|xFTTQV0bgM8SYy4`AA64_C!r+>3gWw5Ph7_9fN*oB!|KVi1M5JaB9P zKJz`bVc3iUiQ_^Ya?q(a6hbat2aT-Qq+>gor_=HOE-N1BvN;u{~ zMs7Hpj`@)1QVb;!>=MF{58=1g?Xtk>iRe4!` zd3|+tYiMr}VFwF?T};3_{WQlgyJmjggyx@4FMNRKNW6(pnGT`F2vG+H-MAbk-Kgq6 zAmB7}oixR6tj>8N!c>4>S>VoRiuUf!mDzKR+*iG~UGd5l&wlyU;iHs?p~-DXnz*%k zx@IA!5)v+UB33uT97NWK4+&1ZHWHU8mls-55%(RK#};G3U`$t}IevP6p_!=@j%}SY z){57nBlQ`#WlyVH$0-HQ@3*eFkInSt8d;6h`X}>ZDXFLg;~unExH$-}bSP&+e#JnB z`Eq-d;gHvw4#bband-T<&O=@_mHqJ_aEYC*epbsBfOh1|bwC3URzs&!w%r_$q2 z>_s+1gJn*$CTNU`9q*@}#EL<;Nm?+U3(>qvv%-pDGcXyYk?^az&z44Tkr4pZ1)2-XE#4MJ|06@%qjmXUHe#cFsKzjNMIbt@Lo2ZA*IM1!I2@nA!u2 z9Q{$H*D1{PSDJ#Qs3!D`#As|yYk~wh>4zC|ZAhWBm5&6R!mHQG(f4IOpazuTkdycN z$Po|l2y-ymk?5Fdopox>poVG89k0!0JMoI##`hCL-dk4Xph>fW+wPk_0L?Gt%v=_S z-}%m-&)2VeN*%1IuD%Jjw-`*TU|7%#!ZX4kqJ)#MMnD%_`yx<#4UFF4VGvHE%AFaqFp$80f-%%~ zpH6CKA1e&P%(d;q39$KK22Z==0V=ubb)j7)h({NF3!T4WLCz#sa4wjymcsiO(Zx-x zl$qy5Y|b+spC}yBtsJ__X6qG8&G{BtrHot|v%#ET@XNnFza2gE%vz5n1Tu*6%-S@R zT~75ofYfBs5^7o6)6O5gI!@WVk7O~x2$)y!xGzt&q8($D?rzdbs4Ekot9cwIWU6se zaenv3WaH>4y0%%kSmV=K>Y=xI41vGXPF?gB13y}*cTOI5JiC^FEMZgo)I65z;!)GlC?etcf_5^Z*|PgH z{Oy3jOtC%kPwl&JEh`#m`77RM5l??(ude(NcO41P@zj)iG|(T6v^3Tq47hC4CVBIw zD%e}9^RB8#{9NvR&ZZ$4iCjxKSYThEUDHkf0r0>4^lB%2Z#7&8k2e?X(3~p*(_f3= zvhdS8oy-$>vTsZ5)kW!bvKVyKZ;zjAVnt)VJt(QI)5$lHP5VeBpzHblquu+!B%Y_! zU>%M`FIvmEbY#?)5k;mcnQ_cRUj$psYsUN_7aeql*IaIWg^pbc>iIP&VkURK3!ug< z%s^)eflPIc0M-Js-Sw z5=O`e#A$LX!`H<-y$`WYr5>laRqzTsP2QaWE1PXj3pn4)!uBri1=CrLvMsIiY<+f)txGONIvtI1a=56=TVp|8SR_|t1y;k!2XRdhPnqSlcEO}?PX%Hx>0|d1! zT5mrqrpU#-q0D{+sp0pTbCbWi=F5DN@iWv zU?K=-MSfDUWbkPXObhm^+YjRf%e1@x%*s|^2aBf%iOj{q?8gY4xd+wsywK+A16oUy zE$oOPv#lk>ylubJM+wu=HF9U;M!4T z*D~;CV9wa63e00=Xs*p+D8Wma?apTB7C(DXt{PM0eMWa-Sjc^OvAHjZchC%D9-Wr_ z{naIn=V>EkG?NgOkdS0BaXm7HSqkg5v0keh@*#W1>V>^Mm;GYzyARftix|}tMfpH4 z!g)kg(!)~KNaodEp%>pJs9$00*<)X6Q029?IyiDk&GQZEW9na^_x~bAUwdFBwbdUgxDIUM#u% zH3Nx&W4XIKTUvl~C5kt|$9RQ{{Af7$%Iu{4Rw@9n1-?&?EC)`hj&};G-akwt&f4nU zO%mBvXaU!?)lm7Os-D>843;Sv`uC#fmb$)L{o8w~#Ym2p@{;=S{EwJb7@F%eqcY^w zsR5C#@<+YmG?x6@ECU2eIYW_mA8g6IXW`v;MoTBtUSxP*$?OL$v5_YDegs~7U;$*% zFXU;8&pxFW5q7MExO*VYdb^2th!0&Tvb%0nKI}&6tCLNn96)TYpXkZ2bhLrmKw3kJjb;E@bYU8^8RJr{%@g| z2_Co8`~C5AMg8#oyi4f|yx;8LJClbQ3jOl)4AuCf1kNbZOHYdKG*W6owrMyEP~OA? z$4aq}aAo@dlbC}hOjCc@>f>WnQk}|pNBrkl+-IU8D0T74mmuXoPZqqrR=sBhNZ}|+ zX&H7)h>N#N!A}1;#bG@}Z5nN0fc8S2T_r}dX%$^s zc8_-I(y&{jwedd}a*e|gMPL&G6BioPLMEQm0dbPBe=J{G@vo5n6Lz0TbpLOvu(gG>veFr+ziX2 zV%I&5s_+32BF<{Xs-HP&1E^uZAi5sn7py;(t{KEpK{GC<%N5}fepS<4m5fDZUu)A2 z%#)b2s&B!Nj}6ky&`g}z!p>Y467ssVtzBI$eKjlWK5q%hSqC2bp z`VY))atHv!T>?2Z;_bu3%fE1OzQ>61{jvQ6FVXsHX#V$+m}&R-WwBJPyEKLGhq_BLFPkg&9KG^gc6nC-oRCuoA{F)lK?* zM!GM0V+B53PLUOsu^_G|Y}B9pKFg;7v~p*z^K{zV}9TlFC^llQ^?G$wi z(Vn_~9IN`yrg*-;w4EY@#2k~6u8_=|F`MN9UQ7@jH{e!XpM&}6J)6Gp<_TNgLwfI? z{+z5$7VyUS^5muTdT!$=tLM&JFjVPKy+Y2lRAw%^E=Kv-(}nB6A_hl>#3)|C_2guA z@lVCy9~CY}^~feoJ1v4myHY8aX1)e~)iLh8sEFJ->_u z9+6yk$qKJgc`ZlAV*nNJmZnAK=;kjcd4vc2#C*U0CsYSqVDHKuM}#mx$TCN{PEs@d zD-T6Yxk&uZ-6eq~;?7r$YUT_+j-u?zj?z@YoyJ#H4#VJz*(~z{OaM)Xb>&J|s;JYP zI|R%T%pIpXkf1yMM%7N4H}8Psiv|TN&^U_+zg(O>n2ZXDk#4$v>B?B>)8OO z@>o+gZe|$(bRWbnUezj!ec0Npd|ah!rly-(NTh0x&|K{1w-@qny5x;-))8&8Xwy-4 z+4L2pISBdHofdL0Sa<^z!uo2ua&qqFZcpPkv#e`59wMvPUjHq&dkWa@3m*p&V7p<~ z&>*7n&BQ5WGGDofz%Qr$ApQ?6eHhD{z;u$bjd^Ig(war zC|<_G5rJ@e1RYnPwem&tQoMA`(bFBHXc>~e=Im;%Vr%aKxj0-?((Uk@uY4f%dv@35 z4P1MiaYv$GgEcWRAHt$2j*Nj0uq0|j7$ljwRXf%xEJJk8Jq9?-G)XkQZ1Qv!XiZ25 z-s)9*Cp5mtt+kREQDXWm)L#rZb?Z#xe;8k`7uo%qrcwz!2-Ab?$)`hVKoOmL?+vyH z4D_o8&$;WCCA`k16UaHKwoZ{>>i|v{FMR8AtA$dgX5ltv*vt}fPZ{fFy{i7@aVz&F z(H@H&7Ha@|o7cX7Anz)3vRE+SK+-I~Vh^Cz0`Nwvg)Y56WT1wGKEg!CiM3fQF|GRy z6Opng>JL;ZvH24!cG65U>H;0cO_Z^6jbfN8s!JuTFULsMF^*L=jWH&Bp3iRee*D3xm@nS zR><|=ELw_E5WS{LOsro=XIhb4<~U7egG!l6$5d|;L>A`8zK<}&SPFWq&OHcdD{Siq z%0}LF2N>?&yD2dfp4>Kr*KD7<)^7rxBwR3>bC`6g`viNcvr+YH1j_j7el)g`qeh&R_8Zz)1yFm0TU)C14VLhB+4S2%PUn;?|7s*LoX$Ovjrq4#K8P z;$FxL-zAb_U7pLlHXEx08}me5;*?@Uk6OpE4K zdQVUJ*MFxK^?gb3Sb6X{$xp*MN(4$^wm(?+-SzWl;)!B&QL2?uEf4?-Ms+Ov5fEE$ zE>HVnQ}ZyG>auwlq@DwNI~3FOc?^0pNz@|-N+P_R_=Ve2ItwNMy#lwq&jl=w5{B!N z1%c@OV54sh&k*y%lq2A?>$%32NaxgS5T>1|DrX4XXE32QT)Ez4UiK{lXSA1*`3qhtlS2h}+hxK$pc9x^byJM+ zKvWp38e8GDZqMg#w3nPX9QUor&o7Oy!?C(~k{TakkwvP8I`#?WG|EvQ$K`$ZQiaPG zU}{^yF={f`9>BDD;1nV;&ls_|iY2)yK0Cu<6GXcKoFrc(`YuOAPFAp_SQ9uUL9_rW z^!D3MNPsvNy87z6jKN!#y|AMPmYk-n8AP7q;T}|3zy%9NEf!$D#)6og$MD0^AuHhN zgv#L@N@RBBeb^&p%Uk;vmq9(>$^xU?i?pAZOT^)5-zN!4AlSEh! zK=+t$NB4?1L28hRFv&@`sZx}Em$l8)R0_J5D#0HD&tx{FtVm!%;4BI7s$4q;F7b-a zD+}RI6}>fU52k))L;u%}NNK?m_dPgux6RFsg_ZfD@Hlk*3Y(l}c&Vf0L<^Z<3JCrjd8vv9#P4 zE;r7&zaFSo)WxM-Z307svk({RG1 z++~68WDnX*yED=#>eOmiWe((6UTI4D>O^dC56A=k%364F&gDGOQozPbnlCAdAlb&K zW#jTcRNr2^30bLaDtC88iyiSKRt2Rpa6iJ5D0tAZUA8&AcGae8BiV>tC5GQ}2IzQR-7?*5nLF9^xC%=S@$xd`e4RJNdK=} zr!zq6%6qC@1{DO=6S(qA|UCo!zywQ?Xz*fhw6TAho z%G}+1s^>n=Z6Ua_WysZ6h2S;B^xp=qF{L;?70c!^4CJxj4{_EwvlFIa&H9$97IlL8`sH(q$7d z_Hem8%B5vlztA}j6xjFd=$hx;3qB(ez=wK;Vhq~(p*qyXJq=lp0lRuyx%DmJL$6di zKj4nYfRvNQIHWv}YIP>dM39wf6R%9GcHWWj6kHWMWnwbaL7%itd7?mNczYN8{(K&J@hj3x$q!retO z${v8ptw7^;{8|J+8Eh7K?V54T_$yeL#ZIeX3Ka&=?p6opg!TdWkU4`j?(^Q7Hzh(H zDMT1RI-hN5JXkUaxp_N@m*9j5oC|Sa;|-ajZN-OEhM&@Kvn;FGJBGf4GX13vrl&YfWW?V)O|_meuLPNaWK(?4M z(2;1ZBFhN!0+rX_BQ3H5PnpbjXUEN#&1OB@L1$_6bIkL82NrC@DXP{jT-*MZK3|(l zqMpEe7CrM*1RQ7-*XrJIWH&$QDV|GaQ#IcHF(?&Ma}0iZO_mf+~qM5D2clVDR32-Xhg{rH$Vhz>JW$mR{ zwX=f!TXXqiS*l+>Dfqfi32j%Yh(-P_{SXVRPy3RQ1=xKC6^3?{^NXExI#cg*_v!TF zYl*a-I?=0LS~k7g8%q)`V!owFFPdhxsmBM2LBVKJ+;4xoL&?97;?gl9fk6Os$)d@% z!I%#i9w5|}_aFkM1HOZ_B2O7x)c}Swa4(1fok66a~y66f|;r+dKn8zM*fm$Az2^dcL)qW=+81NfH`PW@`#;zRKt zKY21AVH%qYjOyXT<83Pnwu>8+aUqzj+2Gy^x22lJdEhKFfPP7qj>ZI+nSFgz@=nU7 zKVFjHC^85({-a#=s-WHIhgPTO8m=4T;YewdDGhHbvwlYF-Lvo7~Sr_X}+(M;!+&1jqb2H##+#3QSQ6{f3pC9WxdbsPX@t>P#;F+5H6WjOGstqd#usN9aSelK3z8I$Ja&y#KdU#gE#=p1 z%K&B+4MEq!q=3rlZj85{_08y}qR*5nKd^5Vf)?*a1eEjkM^7sFCem`!C`hHT=b-R#g zaAV|RY99zIL{rkUs{}?)+?`|iUYlaw+0rwQq}?y5eC2Q!gxp|+^twEXmzgrh6I0v+`N+T3uOqth&Ae6wLmA&>c!6@Z=)~OFY*7A7HeS~k5$!cR zJ2#*6o^;7Mg-Zu4E>c#Y1C!?yunXWu-X~9 z&8JcV4Jo|UyGwC#;_FMZLb5cV+a9{?8dStR4y-6uIWmPS?DZ5O8iWg z%T`;ul8BY!?3Pf9+pB6W8EeqS|@dzBDt+Vt|iwa(&_MZ*3Ylo zz=m>ButjDy#`DJ~yNAl5*LuwxHb%**9{z1X!hcFx<&kxp?MQnu_%4V0muquRbdV@D zZyb_}mXOsml%|)>j_<#B@Q~$?Z=WbBa-M!vKDja$B2si+Jd9`?k(KbcF6vJJ2vgpbPjC3)j=tB>yp}c`y+O6E%-AX3PFgP;;u3E^J&)=EeYgus%TJV{3n|B=5 zZqbRi_zodNAJDL;Xl957>0B%_r9Ya|??1Ae4xcHL=a7fB^W&pUlgB?cPN4Sp6#lC}WrL zFf{=zt$o88>*56?^^<%NlP?#Bs~u;ep&;>OLC>^&0g=rd19bH;32pGAEUMIV8t-v1 z{yTjzgLb^xB>QDg)T^JD`z=d+$YK<5&X_1ks}zyV^O}lG<`O3VzQaHNMI0nA*r+Z~FPQQ(R&QeNp2#Fd=0eF4!&KkN{yq~ZkCCH0(|s-0D>_8oqsx?@rN z4#^O$zMtvJv>=*52zLZ^3oh0i;%v@0{zR5Cnt>yiwE^AFYQKa0q^{yRx}{V`glRP9 z1MnJiKu0*?bSl}VdY0D_S(f;Sd)3AMERRwsT9T9Ejgj}z1cjFoyU!YwD>^0fCN)+A zH`E1ME9I~eM_r%M7?&)zdM35(WA*-jC9jPULU7fR8YybYfvBYiRP~%^zd{IUzav%i zG#dZ@U-%wTX*`fFVFn;Lufk$}Kz7MnBQ!x$@5KI;!=HNxbJ9{@-mm{~1`*b~oy!6E z_%!Brk0ghQCDXA7e|aZ<=0%WT;L~;AVuDF^yb2Eq_MzlIzZA7TN-VuZ72x`!@pcR2EltUph%93Kr%bMyyOyPy+|Cx{cb1D9X zZy0!ys--%o;fsJz@Uw@;RAb^zCe6}m|_7nq5RUcw3 zp!?@)*q7O&Nk0aJpzH`q{V*t_oNr4@|2XwH|5r&X{1xA~1j}1~5My7Mp6~g`_l=(e zfAgx6-{U&eF_AT>=of>1Ab4As_7~4AdKT}{5z+!_Xggpzt7s*q=efBjkN#^r|6DhFt>Ltk*gQ~uMQxc3BkF(9SY#T!4^Y+qlI`fQHDl+v z{}BD(hqzY_eYjR!5~$#rQd%`TiJEPq% z5u(X><>1?kK*N2leZ=w4CHu>&{Pi2YwTPgFuRD?xSyjp&V83s!s>7;2vpb29$oo&g zpX>XVZ-6u%&Si+82RsMwM1#st{V~No%9Nj<`#*9=|8Qo0e!g$n@Ts;di=wIF;N*)Y zvjCL7JW2cs_m7MFm-jf}%LNxCnXq93R!}q)_U*p$)Ze9S|LKzv-3CK!pkvcaiImj+ z--Z3WnA(fblmE}p0e?9e-oixl<#%{N-#tR(H?`JKUosDv;`LuH>VI1i6@B1FQR}02 zQ(*tFcoC^NTIs5nS=rc{=gG&X{`%?v%a;8o;L>3r2cQbwYc>z&T==ceC3h4``2rhj z#NabRf)zg3ty{N*+ic~2XK}=!9`JH%W78+ltZV#yNQ{8f=O3ZHRKN8ulyKxs4u+jU zP6x8YY`!|U->F!QABD2q%=PY2PS~W=DBy*Hbx)EK|Ccq~zu*^*V0-QyYsUS|gL3@Q zbD5c$`Sg|MeOj=8yfw%-8kFG~Zte#BmmwGE;|pa5$xbIgqn?Z4)6AnsOa`Oo4E z14QLejWx=Wd;I^uCZY^m06y|K$sIfFd z3{m;Vj~`V*$0b8cH1_|uEAZPd6TwK@e=zL^i!g*YtVwgW%RJ||B*0iVhEO>tofvQAq=@<||I>!-KlVR=F7yD{ zkV9}T7>My8Lnv#Z0>!xBbFco!l7uQFkafo1_6mF?c4UJtLo*ZTnledDF!?*Z8z?GZ zgZ!!q4uEyCJo;AIpcv9@s@mGoLGKM^e&gsK@U@2$AuZCWIjCj(p@*D3X#exchoN~N z2m4>m)I|owDmdoyqroZ&`Z}`Ay?Xib<$!M<({H^>2JvFVtOk<_^z~7A^na(<`OI^> z=6|1Cx3i1T#vKbvt?_7CK!+JXQdF51*^f(~0$o;9tPmDNzlNY!C^&fN@Osv5Qv;%( zf!_9y#s_j$fb~Fk%ZUXpAX6!6cr9gCHwvg<&g8Dy&h&5;71QeM)q(sMO!C2}<=1cn znbKh_&)j5@VrHOODLN;46TN0PAo)t%o}1DR`6p|n1SkOLETIhe*Yq)}vUw=HoCCE< z5#(zTt~xU{wSmri-2e*;2=U^^Z{oD?k(%_$=9nT#G4?>8$*4~wFX${F+ro3V?YwC@ z4Y#>E)Qdf*J4eq8ZP5adjti@hSnPu^(-LrM^GYA2abmWrkoYl0o*pP`Yq^-JDyGtI z<)Lr^=egHy-=`}ko1!Kyv5>H_02*3)?Az!~q%I@Qd-D^20f16<2&-QReEH+t%^s+$ z9=2?U#JC02e=A{`@;QbipMNzRFeX^ukrHW~*?Lw*KV$k>$7O)K*Fe2Gnt+mB*Ajy3 z2nPyi>s1Kr@^a~q+LpedFcidr$k$ zJUnfP$X3#f)K1R8_^Tl1sbL<1sH1abG}oj>-?bj0_q=lVDbm0Qmem>#Cv)LELX<&R z5V*{$ubZt~+2Gmp0EdNqucWFQNXcU2-QJFc2eM{Yv_V-HZXRlF zw=x!?CJ0;Odno1t(cJE&$liAi)}1U+IS7oXuWif+r|#{&x}*OyRS6?I+XpIF=CR#` zem4a^yNVxNgo4wqUf}W2R*;&ze1I*G_In+l1ICiWD{XosbMj>s{dt<` zHCnFwF|cv@h~R`e7q*M}h;y9lC}I2TlSo{*C()d)LlZZ|mI9OghmX_E;b+H0;O~{r z7h%t07vOwokj(w?I{)rTtvNH{RM0hd;gb2p5aG`=J9D^)^p1npz{H!CF(nhYgw~vaPXG_L^O5l;^RmG-d8bYtfj`t=2#jHvYb&vs!U_gJ!7zR^&01qreEV;9O7grRQQ z;;o^@oWK{gO9#zPOXLSv;$PI>R*|;@5vu4{I-a zkEopA_76UMz&;{iR5n~>`Op2!i1>avngJchC=?vAVn=9=L9rZ*OgXZq3OTw?GMLe3q*P$nyRXtn4-QnH=Zb!L#pfr^hxK${i>^Y~HgodjD zc(Cz#Lz;tnzK~MyM6}LBcQ!?OKpPrU6ThW6@4gLuu%%1C-NOZ*Uff)JO2`1Dg?-C6 z;Qas8K*5v>w!CIo!Jh@v7nVM;0za1ro`l~>1wsiJiOPi3DNrjJOhHP9m!svn0r5o! zA4m&Io`Dvk4ZASF0t=6%>hxJ3d5A@X??6g9v*?xC3@hB2LlW%o22x30f8zJ0()0WZ z3iaXB-SEfb9O|i@MLp>W+YL1G@F3w)kRb8!CQmwOUW_9N|Fzx!0^1By@k*_Dn?EBx zB`T@HT5ArUm$ExkJ^xXF?fZkg2Mk_EGs-#sOlVdOQ2OSeXvu$ePVy#t1by_1nif!R z^N>SyVv+F(v4g_G!QK*RY1v(bNSX8?N2lMv!kOz=3oSRbeSr?Rfi4xENlmouFR@Fi zEyHcPtE@}|kH$Z}Pd-C>T$UdjVn;vcgGF`J~97 zc{n<{UY!i9Om7K&`I=9Hv?^FDu%o<6xT%(1`|TLy{a<-O?hL7h!@}W~^*!%T9|dY~ zr;5RdrYPPhEhiQ1MLy~dq#PbPJvmZ_L(??X$0U7&*lOqllM8&Frj&6& z8*Wf#dVn0eM&1WF`NZw6m@gRVz=yQX>Po)k=&oqU({nb5i0n?CJPnepX7vQYmbO}` z!BmH=>O4Y+t`6^*tzMevm^VPGf;}MHc6JQuebNPj)&|cCLjU0bdAC3>y6KGx)hp15 zp$s|_yr-6%wE)R#i%)dT+uG(Mr)}>s=TKt%Co{4N;lG&^_c3%9)$VfBbnOM01G8?O zKuJt`JJf&fNW_WgxOh!pgHf3bralHOjXhqo{4C)}`KDWC9~|CH5bCIcujoN5N5V0y zK#w2h&YWn~vA-jdTZ`sQM~q^P_WC*Rs8|93}5`93So0;bfa_T9z$=yV@WV zmQZPh6sE5EyHtWaJrct$0_q33;^q<1YdGO=++pDg5E;(AVV7|H;#;zEK^gtte&0Ve%Tu8c7{)s?Lsqy7x~&PXLR;* zCUo@bleezwZb3G+DU%~>GC9k^vYkI$J@;%IQrR~&`F3sPz2K`VQ}biol_pc##tE1a z+(zkr3z59QH6f)Tc|;&M>6YNo>L8vy9vGcpo-HFpIlA^tU2w-Tp&JKP@n1@_+#SMV zv#U2dvt?S!T`z}q3j|G9`c!5s&UEKa6}DFj3(yUC3{D7}0_Z|bcdccCUYLJLY^_X} ze0fJ9yV2e2t5>E2$oO-Y9G(rW-E=vEv%pLkRnwPc4-HO+tR}_DE>}v6NYFt-A~`N< zSxDe(a-7vlUt(M}*C6bHg?r-lo|K|#x7o5O(;I67-cSM6Jd4TN0FeRtdS!zU&jR<4 z*zj`gkm;&A*ZMX@%^7WxEw9vS5hNTTF(kO+V9`_feRnRio2)W;hHe8cXUqb(dz0|U z19C$bD!%M)UaJM31)-VMyK!Y|PC-!bVhISa*r8%`P&9#5EfA3^{|CPHUH2h;|C2Uk zxO;Ht$Ca>&wTCZN>I_c($B)^|4uj^wQ5bh^wSUbgiEPuXw)V?TqPQ2wRssAl5tp2z?a>*B4fajtN0Qm)ylK*zD=*y&rZ9;F99n zGh2H=Ms~7X+E|zV;6i|;P5C?zQcHEVkt{}GaT?589sodI+ZRZcGh6Am=b!^xM4Gbz zJfb7Sxv9NMd$ls%UJvC~ntPKQ?Qy%tP`f2^`?B&waW6DDg5yLC@ZM_zNwBOeZQ+_n*)EtO3;Vcy=z7u*#-17@i)3);`@A)A}N{|3pdF17L9wK%jNWH8=ef&Df z!ZL!tN_hrbEB#7{8cOGn`>fwbx?$ifzbU>VqA8aROWF~y13^Y0zQ;y(%jtliX1e1sk7CZkX-7jZNd*xa`w2<)ZMo&VTF?vBUvTNz4!GtU7 z+7S{OV~p$RqM+FB!O0=!l<3j^xaOO&J}L({kg9R_WfLj_n$BRp(W8@1XCtd1%?P?G zIfvjSkP0cPskEUW4f>%R8}eJ#J+iiTOHLf9LqBW;P_L&FN*+UHE86 z~4D@k81*0jE1PN2T>U^-Iz=g4~0AT;7Rn&Ig0&{Ikr{w35u5mMPC|?SV0+N18+F6!^hbKU(cD_SMNo*aw8< z(Y#WrdN$EtTl{`_13EjKKC>lTF{!ChQCD*0EpPj@N6FBJIqlAubILoO3*F0oZ*#Jd z?uAnoe@J@cPH9uTlHSYkgDWdiIW3x7w_k-sa%!PQL6}BjEB(-mA;+)L!M=DEnBG~S><7eI~N92Me9pBVvgbkQKNIHBgN20T1 z`Qa|ebpw<_#YYZTJ3J4^vm#n?XRUQRE;f6yCSh2!btBEN2ULVqC%ZabH6_pFfLfwO z&@)0NTf3AakDfr!oLMIe$9^l>hev{&`D(a4nLc5I@!8ga#m>=^zXi=5*0V{gXMPET`w+@HiQ(c{S zB0lJsP7>0Z^`|)6Juyul6*P%Hd%L$tk{D9NHE`X~W`wq|vbKyV*7c3DzrdWgO8jhy z&vqjD;A5I$A5J2XS%Hhu6THEe6vsTLlft)oLYC8Ll5kAbIa#Vod@|WyoF-o$S&qpe ztrw7rxIk-b(BRUCS9zlv$v>2`}VmPG+LT9;}jp z2$R_$w#_Zmq5k`zWZ9nMwcPBhxH5rP#OxKVYb(t zTxnDyl zD`>uZkD$Uphv3ahrMRzN@pKcH0i5w^Q`FAs2~|($;_-c*W#bp|fmn3yj^+f;l;gzF z&2jSz9c2UF*b|NaMvg3)dn^x`uO!9$^5VbM_l?AmF}6|kD~4$lr~McN=!k2+DO9yF z5v909aEoG^7{*k%Z4>AsWX4SjRv3xpoC__P&c#ul0*Y&PrAC&r|=4_#Lm zL~@kQw&o!9DXd_q)`*4|h7q%+V_>4giQaXH!YpEbU~rj8;RNBHSp>$aOt!_;>q|b~ z0|3R&W#~`p2Dfb%VBKyZnq22p-(gW0+il&xA_R3inb26Fc{Yqib7t$_R^$>iC*ePs)iacxnwRX6 zZFY$^9Va_b4}+W}7pnTb%0h_Ff9@Oz-$Z{}Z#Wm?7^^HmN#N`>RJs;ho=N94)Szo) zrqFIT>(rY)UA2glOenY>sdcMEM=kpyXsI{$=3)r@soHnz<)a>7yQ^4idFH}(mdmLM zuPin4jEDRiK8B^+UJm!T(-=G7S4>%M`#rq%vPu`(a~>6zyQq~tujWcqfoeAsZ|sss z(a_Rvm97XUG_j3v7@YhP@qIa>r9`&;7_l>mEA$uW4b)70*g9|1q=}DdtS!ObFjfwq z4fhBl)N{p!kCm`ynzqDtZ78=8mZ_DNien>Lt;?nv_ga;-vN3CaG@*C7-_Dkw zbpIeC#4+yXEFKDR4{le-g1-o=3O3Jugo#1jWb`^YxY9{ zS<5GqE~=bT4o@}hz<}rzVzmZ=WI9r>FWP0ZFFtQox!ZhMX-^@$<;-mgg|0Kcp9Kj` zuTX79JE(uLgBDSmxo^o+8%Ua|=A5BuC_w~Ft?y$ZNJ(NsL zcr5yn^j(ONOZh<+KVV({@~Gm9gI^&))lwP!acu|(pmwm$~Z@|73sNeb+50pkty9eq53@Z%FS*^MMXu0r_oQw z9cSv&@$2CTIkz<)q^V}(B0c{!tfA+4={EmJ=)^((acPRkTT(UC<@YQq0$<>Z5OvBY zP~m-0JHrDq>FUh%qBRKfM03)YZV!(?DdWbyA0fUxP=)C6Gm|N}2_iVOzY};TeHXt! z_GTv4hO4+UqjXok&+uZ3lwSO$@3AGNp|(xgDcXOagHJ!znW}p0))APeT~@6!w6WzB zG_|xOr=3~x*#W@z`ojyEw>xkTQpjkfk23#Z&$sPtp&NSg@(Tss!j=Kz^X?rbE(sPr zRH5aLgu9*rWUOVgI&N+J9Jilll!^y8>{hBd*n>iI;O^P*vLi^>h&rEl;_PW=yUw*= zKnpE2OsOFiOO`(4b$x6oh&rLe%M7{;+-gxR6XqiA=Z-J^8mP8(wfmz}LhSp^wFP=@ z)6J;#taD8FI!O+E&DAWAh9a!>snbateLi;AN9FdY<=15QK2ipTUx;13vFQFxV${cF z{uTwszF&xuPs8lS*&SHwfvSvG)A=~=}dWV0KpmsI?3|$Tb5_koSQVPW81t` z3=P*Z=#5D;ry|fhk)IkbxZj301Nqo02<`Z%BuWvXPc=9%0&}r@CDn@RIajw~*n0@8{b!E;Jn?uJVyw zY3fGbUQa!6?_+kkL`8<>e;3$xzHjkk8C%=~Qjpg;Ms>ZrglqpImaL@yto2K_4dfag z$J`JnW|I3V$7F9$9W$(eC3BOKIYCJzRl)cykDQz(`#KrhdG+r%j*{sId@TFSDw&Hk z5ui1RIpy`$Fy~w&Y9--A%t+kzP(%CA%qjS!4lJ7DFH<)+Qo@d^&42zjdHu}p_q47H zCB)rmERCAI)GOs^2RWzlsr+UTbU8)2=}CV%E0)0C)p_gu^7k#Bv$dAu&52bWF?9hS zo{ofunslTp-ZE+lqtQp-YJ*=Ld{ibX1x zbr|0}EC8EZ+dE&hP?LeLL(g{p0O=5Wf0gH@3x-P{%UJTEvgFp9-oR{r%>BffI9H;M{N=hq}mYM*jbB_TKSW z@Bjb!sW`dXU6QOsBszw!L^Z9~VA9yLFIHd+02i0V7?>bcdrv2Tez3{v03pDqY@w7T-| zQrOhy7R(li5LIq_5{ok!g}Sg&TLcWfTU!;c&h1*+u0P}c0yi@geeqLj!$pqCf*Hw! z9EJi5!pRjP))Z%-{Fo{i`NA=k&a|TQgXusJ>D%(Fi~?Z_<`LyxQ_PEL4e!RV7P|E( zP%Q81)JDJ~ov@;x$iG(yA&tBtb4s& z$(JK=uRPM}uSkvOE;@d5uin~q7q)rkM?95Dy6)9v8XoK}chy`?TMlDyp48C8fcj8^?~gRWW<>w) z0DaI~jn!ZShtP?F0O-+TdrcJl6B;?gKkoIAtsPx*?#0o3`m zlRuV7i_zSAFbR_@*&se)C+u*rx8P3d5cf^rq2AjJ4Q||o8G-quN$NDwUG8=V!%5>v zS>=+M>Jp_|>^#C*GsUDi3uQhNwA?_!Nv=2a3U}o5>r2L+R%%PJCh0}%AI!*3t1pW!4M`OEoDGD>%M!YyKwnC%nh%0sNl-jzVSpo)or7UZyg*}C>a@w>EtKdL3*|*2E#|0O&8|MUQ zcnd}|;nJoP%SkFt&E_p>v<%YcZY_$9zj{bb+i}tMb>Z^#xoD={$fsfLju=IDUQ-y9 zPB9VVADvihb}8LtBl`>xreu+_B8NHk$D$S&n6UWni^F93MP69H08?Xygr(ExlCO7(Fq)3TNhC9p^EtG<&Pzo)$f@#um@9R_~+d zu@}-WwA6TTs>i)|&(m*7`q2T|)zT%yfsDdjd%witJ^O+`p5s4{^jiVHc@JmzjuxMV z5WR?mXw%+-lECbC^MCD^ND{7^MM4dFCtgNc1OW~vE+&nTyXYjVtcYY&>r_#Y8+=GjhK4wxP1yl9*%V~B3op67PZFk zXeIHB3HOZg4WR?}3HbY$GB$|$vU~zDx!-0o8MWIdZ#y*aDCj3;)e{`biDo(6LRS!t z