From 56a1e4027aff511f2733c1310a3274b087a65619 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Mon, 4 Nov 2019 19:40:07 +0200 Subject: [PATCH 01/41] fix(admin-api) change detection of foreign entity endpoints to explicit (#5186) The original detection worked like this: ``` local inverse = not foreign_schema.fields[foreign_field_name] and schema.fields[foreign_field_name] ``` This worked great, but it held an assumption that can go wrong. Example: Routes DAO has foreign key to servive, specified by the field with name `service`. So `routes_schema.fields["service"]` is specified (see schema.fields[foreign_field_name]). All good so far. But then there is another check that I would say is flaky: `not service_schema.fields["service"]`. Service schema does not obviously have field called `service` currently, but if we ever add a field to service schema with a name of `service` this assumption turns up-side-down, and we have introduced a bug. I though if we could make this more clever, but I ended up with an explicit function argument that specifies wheter the autogenerator should generate the foreign entity endpoint or entity endpoint. foreign entity endpoint: - `/routes/:routes/service` entity endpoints: - `/routes/:routes` - `/services/:services/routes/:routes` This PR fixes this. --- kong/api/endpoints.lua | 45 ++- .../04-admin_api/17-foreign-entity_spec.lua | 256 ++++++++++++++++++ .../kong/plugins/foreign-entity/daos.lua | 27 ++ .../kong/plugins/foreign-entity/handler.lua | 3 + .../migrations/000_base_foreign_entity.lua | 45 +++ .../foreign-entity/migrations/init.lua | 3 + .../kong/plugins/foreign-entity/schema.lua | 12 + 7 files changed, 362 insertions(+), 29 deletions(-) create mode 100644 spec/02-integration/04-admin_api/17-foreign-entity_spec.lua create mode 100644 spec/fixtures/custom_plugins/kong/plugins/foreign-entity/daos.lua create mode 100644 spec/fixtures/custom_plugins/kong/plugins/foreign-entity/handler.lua create mode 100644 spec/fixtures/custom_plugins/kong/plugins/foreign-entity/migrations/000_base_foreign_entity.lua create mode 100644 spec/fixtures/custom_plugins/kong/plugins/foreign-entity/migrations/init.lua create mode 100644 spec/fixtures/custom_plugins/kong/plugins/foreign-entity/schema.lua diff --git a/kong/api/endpoints.lua b/kong/api/endpoints.lua index e374eabb1b4d..eb49756a417d 100644 --- a/kong/api/endpoints.lua +++ b/kong/api/endpoints.lua @@ -396,7 +396,7 @@ end -- -- /services/:services -- /services/:services/routes/:routes -local function get_entity_endpoint(schema, foreign_schema, foreign_field_name, method) +local function get_entity_endpoint(schema, foreign_schema, foreign_field_name, method, is_foreign_entity_endpoint) return function(self, db, helpers) local entity, _, err_t if foreign_schema then @@ -414,11 +414,8 @@ local function get_entity_endpoint(schema, foreign_schema, foreign_field_name, m end if foreign_schema then - local inverse = not foreign_schema.fields[foreign_field_name] and - schema.fields[foreign_field_name] - local pk - if inverse then + if is_foreign_entity_endpoint then pk = entity[foreign_field_name] if not pk or pk == null then return not_found() @@ -439,7 +436,7 @@ local function get_entity_endpoint(schema, foreign_schema, foreign_field_name, m return not_found() end - if not inverse then + if not is_foreign_entity_endpoint then local fk = entity[foreign_field_name] if not fk or fk == null then return not_found() @@ -470,7 +467,7 @@ end -- -- /services/:services -- /services/:services/routes/:routes -local function put_entity_endpoint(schema, foreign_schema, foreign_field_name, method) +local function put_entity_endpoint(schema, foreign_schema, foreign_field_name, method, is_foreign_entity_endpoint) return not foreign_schema and function(self, db, helpers) local entity, _, err_t = upsert_entity(self, db, schema, method) if err_t then @@ -494,10 +491,8 @@ local function put_entity_endpoint(schema, foreign_schema, foreign_field_name, m end local associate - local inverse = not foreign_schema.fields[foreign_field_name] and - schema.fields[foreign_field_name] - if inverse then + if is_foreign_entity_endpoint then local pk = entity[foreign_field_name] if pk and pk ~= null then self.params[foreign_schema.name] = pk @@ -555,7 +550,7 @@ end -- -- /services/:services -- /services/:services/routes/:routes -local function patch_entity_endpoint(schema, foreign_schema, foreign_field_name, method) +local function patch_entity_endpoint(schema, foreign_schema, foreign_field_name, method, is_foreign_entity_endpoint) return not foreign_schema and function(self, db, helpers) local entity, _, err_t = update_entity(self, db, schema, method) if err_t then @@ -578,10 +573,7 @@ local function patch_entity_endpoint(schema, foreign_schema, foreign_field_name, return not_found() end - local inverse = not foreign_schema.fields[foreign_field_name] and - schema.fields[foreign_field_name] - - if inverse then + if is_foreign_entity_endpoint then local pk = entity[foreign_field_name] if not pk or pk == null then return not_found() @@ -642,7 +634,7 @@ end -- -- /services/:services -- /services/:services/routes/:routes -local function delete_entity_endpoint(schema, foreign_schema, foreign_field_name, method) +local function delete_entity_endpoint(schema, foreign_schema, foreign_field_name, method, is_foreign_entity_endpoint) return not foreign_schema and function(self, db, helpers) local _, _, err_t = delete_entity(self, db, schema, method) if err_t then @@ -657,10 +649,7 @@ local function delete_entity_endpoint(schema, foreign_schema, foreign_field_name return handle_error(err_t) end - local inverse = not foreign_schema.fields[foreign_field_name] and - schema.fields[foreign_field_name] - - if inverse then + if is_foreign_entity_endpoint then local id = entity and entity[foreign_field_name] if not id or id == null then return not_found() @@ -751,12 +740,10 @@ end -- -- /services/:services -- /services/:services/routes/:routes -local function generate_entity_endpoints(endpoints, schema, foreign_schema, foreign_field_name) +local function generate_entity_endpoints(endpoints, schema, foreign_schema, foreign_field_name, is_foreign_entity_endpoint) local entity_path if foreign_schema then - local inverse = not foreign_schema.fields[foreign_field_name] and - schema.fields[foreign_field_name] - if inverse then + if is_foreign_entity_endpoint then entity_path = fmt("/%s/:%s/%s", schema.admin_api_name or schema.name, schema.name, @@ -782,11 +769,11 @@ local function generate_entity_endpoints(endpoints, schema, foreign_schema, fore methods = { --OPTIONS = method_not_allowed, --HEAD = method_not_allowed, - GET = get_entity_endpoint(schema, foreign_schema, foreign_field_name), + GET = get_entity_endpoint(schema, foreign_schema, foreign_field_name, nil, is_foreign_entity_endpoint), --POST = method_not_allowed, - PUT = put_entity_endpoint(schema, foreign_schema, foreign_field_name), - PATCH = patch_entity_endpoint(schema, foreign_schema, foreign_field_name), - DELETE = delete_entity_endpoint(schema, foreign_schema, foreign_field_name), + PUT = put_entity_endpoint(schema, foreign_schema, foreign_field_name, nil, is_foreign_entity_endpoint), + PATCH = patch_entity_endpoint(schema, foreign_schema, foreign_field_name, nil, is_foreign_entity_endpoint), + DELETE = delete_entity_endpoint(schema, foreign_schema, foreign_field_name, nil, is_foreign_entity_endpoint), }, } end @@ -816,7 +803,7 @@ local function generate_endpoints(schema, endpoints) for foreign_field_name, foreign_field in schema:each_field() do if foreign_field.type == "foreign" and not foreign_field.schema.legacy then -- e.g. /routes/:routes/service - generate_entity_endpoints(endpoints, schema, foreign_field.schema, foreign_field_name) + generate_entity_endpoints(endpoints, schema, foreign_field.schema, foreign_field_name, true) -- e.g. /services/:services/routes generate_collection_endpoints(endpoints, schema, foreign_field.schema, foreign_field_name) diff --git a/spec/02-integration/04-admin_api/17-foreign-entity_spec.lua b/spec/02-integration/04-admin_api/17-foreign-entity_spec.lua new file mode 100644 index 000000000000..928da9c6923a --- /dev/null +++ b/spec/02-integration/04-admin_api/17-foreign-entity_spec.lua @@ -0,0 +1,256 @@ +local helpers = require "spec.helpers" +local cjson = require "cjson" +local utils = require "kong.tools.utils" +local Errors = require "kong.db.errors" + + +local function it_content_types(title, fn) + local test_form_encoded = fn("application/x-www-form-urlencoded") + local test_multipart = fn("multipart/form-data") + local test_json = fn("application/json") + + it(title .. " with application/www-form-urlencoded", test_form_encoded) + it(title .. " with multipart/form-data", test_multipart) + it(title .. " with application/json", test_json) +end + + +for _, strategy in helpers.each_strategy() do + describe("Admin API #" .. strategy, function() + local client + local db + + lazy_setup(function() + local env = {} + env.database = strategy + env.plugins = env.plugins or "foreign-entity" + + local lua_path = [[ KONG_LUA_PATH_OVERRIDE="./spec/fixtures/migrations/?.lua;]] .. + [[./spec/fixtures/migrations/?/init.lua;]].. + [[./spec/fixtures/custom_plugins/?.lua;]].. + [[./spec/fixtures/custom_plugins/?/init.lua;" ]] + + local cmdline = "migrations up -c " .. helpers.test_conf_path + local _, code, _, stderr = helpers.kong_exec(cmdline, env, true, lua_path) + assert.same(0, code) + assert.equal("", stderr) + + local _ + _, db = helpers.get_db_utils(strategy, { + "foreign_entities", + "foreign_references", + }, { + "foreign-entity", + }) + + assert(helpers.start_kong { + database = strategy, + nginx_conf = "spec/fixtures/custom_nginx.template", + plugins = "foreign-entity", + }) + end) + + lazy_teardown(function() + helpers.stop_kong(nil, true) + end) + + before_each(function() + client = assert(helpers.admin_client()) + end) + + after_each(function() + if client then + client:close() + end + end) + + describe("/foreign-references/{foreign-reference}/same", function() + describe("GET", function() + it("retrieves by id", function() + local foreign_entity = assert(db.foreign_entities:insert({ name = "foreign-entity" }, { nulls = true })) + local foreign_reference = assert(db.foreign_references:insert({ name = "foreign-reference", same = foreign_entity })) + + local res = client:get("/foreign-references/" .. foreign_reference.id .. "/same") + local body = assert.res_status(200, res) + + local json = cjson.decode(body) + assert.same(foreign_entity, json) + + assert(db.foreign_references:delete({ id = foreign_reference.id })) + assert(db.foreign_entities:delete({ id = foreign_entity.id })) + end) + + it("retrieves by name", function() + local foreign_entity = assert(db.foreign_entities:insert({ name = "foreign-entity" }, { nulls = true })) + local foreign_reference = assert(db.foreign_references:insert({ name = "foreign-reference", same = foreign_entity })) + + local res = client:get("/foreign-references/foreign-reference/same") + local body = assert.res_status(200, res) + + local json = cjson.decode(body) + assert.same(foreign_entity, json) + + assert(db.foreign_references:delete({ id = foreign_reference.id })) + assert(db.foreign_entities:delete({ id = foreign_entity.id })) + end) + + it("returns 404 if not found", function() + local res = client:get("/foreign-references/" .. utils.uuid() .. "/same") + assert.res_status(404, res) + end) + + it("returns 404 if not found by name", function() + local res = client:get("/foreign-references/my-in-existent-foreign-reference/same") + assert.res_status(404, res) + end) + + it("ignores an invalid body", function() + local foreign_entity = assert(db.foreign_entities:insert({ name = "foreign-entity" }, { nulls = true })) + local foreign_reference = assert(db.foreign_references:insert({ name = "foreign-reference", same = foreign_entity })) + + local res = client:get("/foreign-references/" .. foreign_reference.id .. "/same", { + headers = { + ["Content-Type"] = "application/json" + }, + body = "this fails if decoded as json", + }) + assert.res_status(200, res) + + assert(db.foreign_references:delete({ id = foreign_reference.id })) + assert(db.foreign_entities:delete({ id = foreign_entity.id })) + end) + end) + + describe("PATCH", function() + it_content_types("updates if found", function(content_type) + return function() + if content_type == "multipart/form-data" then + -- the client doesn't play well with this + return + end + + local foreign_entity = assert(db.foreign_entities:insert({ name = "foreign-entity" }, { nulls = true })) + local foreign_reference = assert(db.foreign_references:insert({ name = "foreign-reference", same = foreign_entity })) + + local edited_name = "name-" .. foreign_entity.name + local res = client:patch("/foreign-references/" .. foreign_reference.id .. "/same", { + headers = { + ["Content-Type"] = content_type + }, + body = { + name = edited_name, + }, + }) + local body = assert.res_status(200, res) + local json = cjson.decode(body) + assert.equal(edited_name, json.name) + + local in_db = assert(db.foreign_entities:select({ id = foreign_entity.id }, { nulls = true })) + assert.same(json, in_db) + + assert(db.foreign_references:delete({ id = foreign_reference.id })) + assert(db.foreign_entities:delete({ id = foreign_entity.id })) + end + end) + + it_content_types("updates if found by name", function(content_type) + return function() + if content_type == "multipart/form-data" then + -- the client doesn't play well with this + return + end + + local foreign_entity = assert(db.foreign_entities:insert({ name = "foreign-entity" }, { nulls = true })) + local foreign_reference = assert(db.foreign_references:insert({ name = "foreign-reference", same = foreign_entity })) + local edited_name = "name-" .. foreign_entity.name + local res = client:patch("/foreign-references/foreign-reference/same", { + headers = { + ["Content-Type"] = content_type + }, + body = { + name = edited_name, + }, + }) + local body = assert.res_status(200, res) + local json = cjson.decode(body) + assert.equal(edited_name, json.name) + + local in_db = assert(db.foreign_entities:select({ id = foreign_entity.id }, { nulls = true })) + assert.same(json, in_db) + + assert(db.foreign_references:delete({ id = foreign_reference.id })) + assert(db.foreign_entities:delete({ id = foreign_entity.id })) + end + end) + + describe("errors", function() + it_content_types("returns 404 if not found", function(content_type) + return function() + local res = client:patch("/foreign-references/" .. utils.uuid() .. "/same", { + headers = { + ["Content-Type"] = content_type + }, + body = { + name = "edited", + }, + }) + assert.res_status(404, res) + end + end) + + it_content_types("handles invalid input", function(content_type) + return function() + local foreign_entity = assert(db.foreign_entities:insert({ name = "foreign-entity" })) + local foreign_reference = assert(db.foreign_references:insert({ name = "foreign-reference", same = foreign_entity })) + local res = client:patch("/foreign-references/" .. foreign_reference.id .. "/same", { + headers = { + ["Content-Type"] = content_type + }, + body = { + same = "foobar" + }, + }) + local body = assert.res_status(400, res) + assert.same({ + code = Errors.codes.SCHEMA_VIOLATION, + name = "schema violation", + message = "schema violation (same: expected a valid UUID)", + fields = { + same = "expected a valid UUID", + }, + }, cjson.decode(body)) + + assert(db.foreign_references:delete({ id = foreign_reference.id })) + assert(db.foreign_entities:delete({ id = foreign_entity.id })) + end + end) + end) + end) + + describe("DELETE", function() + describe("errors", function() + it("returns HTTP 405 when trying to delete a foreign entity that is referenced", function() + local foreign_entity = assert(db.foreign_entities:insert({ name = "foreign-entity" })) + local foreign_reference = assert(db.foreign_references:insert({ name = "foreign-reference", same = foreign_entity })) + local res = client:delete("/foreign-references/" .. foreign_reference.id .. "/same") + local body = assert.res_status(405, res) + assert.same({ message = 'Method not allowed' }, cjson.decode(body)) + + assert(db.foreign_references:delete({ id = foreign_reference.id })) + assert(db.foreign_entities:delete({ id = foreign_entity.id })) + end) + + it("returns HTTP 404 with non-existing foreign reference", function() + local res = client:delete("/foreign-references/" .. utils.uuid() .. "/same") + assert.res_status(404, res) + end) + + it("returns HTTP 404 with non-existing foreign reference by name", function() + local res = client:delete("/foreign-references/in-existent-route/same") + assert.res_status(404, res) + end) + end) + end) + end) + end) +end diff --git a/spec/fixtures/custom_plugins/kong/plugins/foreign-entity/daos.lua b/spec/fixtures/custom_plugins/kong/plugins/foreign-entity/daos.lua new file mode 100644 index 000000000000..a5285ef9fd73 --- /dev/null +++ b/spec/fixtures/custom_plugins/kong/plugins/foreign-entity/daos.lua @@ -0,0 +1,27 @@ +local typedefs = require "kong.db.schema.typedefs" + + +return { + { + name = "foreign_entities", + primary_key = { "id" }, + endpoint_key = "name", + admin_api_name = "foreign-entities", + fields = { + { id = typedefs.uuid }, + { name = { type = "string", unique = true } }, + { same = typedefs.uuid }, + }, + }, + { + name = "foreign_references", + primary_key = { "id" }, + endpoint_key = "name", + admin_api_name = "foreign-references", + fields = { + { id = typedefs.uuid }, + { name = { type = "string", unique = true } }, + { same = { type = "foreign", reference = "foreign_entities", on_delete = "cascade" } }, + }, + }, +} diff --git a/spec/fixtures/custom_plugins/kong/plugins/foreign-entity/handler.lua b/spec/fixtures/custom_plugins/kong/plugins/foreign-entity/handler.lua new file mode 100644 index 000000000000..553fae644fbf --- /dev/null +++ b/spec/fixtures/custom_plugins/kong/plugins/foreign-entity/handler.lua @@ -0,0 +1,3 @@ +return { + PRIORITY = 1 +} diff --git a/spec/fixtures/custom_plugins/kong/plugins/foreign-entity/migrations/000_base_foreign_entity.lua b/spec/fixtures/custom_plugins/kong/plugins/foreign-entity/migrations/000_base_foreign_entity.lua new file mode 100644 index 000000000000..312b753f82f2 --- /dev/null +++ b/spec/fixtures/custom_plugins/kong/plugins/foreign-entity/migrations/000_base_foreign_entity.lua @@ -0,0 +1,45 @@ +return { + postgres = { + up = [[ + CREATE TABLE IF NOT EXISTS "foreign_entities" ( + "id" UUID PRIMARY KEY, + "name" TEXT UNIQUE, + "same" UUID + ); + + CREATE TABLE IF NOT EXISTS "foreign_references" ( + "id" UUID PRIMARY KEY, + "name" TEXT UNIQUE, + "same_id" UUID REFERENCES "foreign_entities" ("id") ON DELETE CASCADE + ); + + DO $$ + BEGIN + CREATE INDEX IF NOT EXISTS "foreign_references_fkey_same" ON "foreign_references" ("same_id"); + EXCEPTION WHEN UNDEFINED_COLUMN THEN + -- Do nothing, accept existing state + END$$; + ]], + }, + + cassandra = { + up = [[ + CREATE TABLE IF NOT EXISTS foreign_entities ( + id uuid PRIMARY KEY, + name text, + same uuid + ); + + CREATE INDEX IF NOT EXISTS ON foreign_entities(name); + + CREATE TABLE IF NOT EXISTS foreign_references ( + id uuid PRIMARY KEY, + name text, + same_id uuid + ); + + CREATE INDEX IF NOT EXISTS ON foreign_references (name); + CREATE INDEX IF NOT EXISTS ON foreign_references (same_id); + ]], + }, +} diff --git a/spec/fixtures/custom_plugins/kong/plugins/foreign-entity/migrations/init.lua b/spec/fixtures/custom_plugins/kong/plugins/foreign-entity/migrations/init.lua new file mode 100644 index 000000000000..ad8ebbc58d9a --- /dev/null +++ b/spec/fixtures/custom_plugins/kong/plugins/foreign-entity/migrations/init.lua @@ -0,0 +1,3 @@ +return { + "000_base_foreign_entity", +} diff --git a/spec/fixtures/custom_plugins/kong/plugins/foreign-entity/schema.lua b/spec/fixtures/custom_plugins/kong/plugins/foreign-entity/schema.lua new file mode 100644 index 000000000000..e29843642608 --- /dev/null +++ b/spec/fixtures/custom_plugins/kong/plugins/foreign-entity/schema.lua @@ -0,0 +1,12 @@ +return { + name = "foreign-entity", + fields = { + { + config = { + type = "record", + fields = { + }, + }, + }, + }, +} From acc68c7fd60273688a6e4da3a89a8f58ec5256e3 Mon Sep 17 00:00:00 2001 From: Colin Hutchinson Date: Fri, 8 Nov 2019 09:54:29 -0500 Subject: [PATCH 02/41] chore(ci) remove daily build task from travis-ci (#5209) --- .travis.yml | 68 ----------------------------------------------------- 1 file changed, 68 deletions(-) diff --git a/.travis.yml b/.travis.yml index 0fa7666e3a78..23930357062c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -144,73 +144,5 @@ jobs: - make release env: PACKAGE_TYPE=rpm RESTY_IMAGE_BASE=amazonlinux RESTY_IMAGE_TAG=1 KONG_PACKAGE_NAME=${PWD##*/} KONG_VERSION=${TRAVIS_TAG} if: tag IS present AND tag ~= 1. - - stage: deploy daily build - install: skip - script: - - make setup-kong-build-tools - - pushd ../kong-build-tools && make setup-ci && popd - - make nightly-release - env: PACKAGE_TYPE=deb RESTY_IMAGE_BASE=ubuntu RESTY_IMAGE_TAG=trusty KONG_PACKAGE_NAME=${PWD##*/} KONG_VERSION=`date +%Y-%m-%d` REPOSITORY_NAME=${PWD##*/}-nightly REPOSITORY_OS_NAME=$TRAVIS_BRANCH - if: type=cron - - script: - - make setup-kong-build-tools - - pushd ../kong-build-tools && make setup-ci && popd - - make nightly-release - env: PACKAGE_TYPE=deb RESTY_IMAGE_BASE=ubuntu RESTY_IMAGE_TAG=xenial KONG_PACKAGE_NAME=${PWD##*/} KONG_VERSION=`date +%Y-%m-%d` REPOSITORY_NAME=${PWD##*/}-nightly REPOSITORY_OS_NAME=$TRAVIS_BRANCH - if: type=cron - - script: - - make setup-kong-build-tools - - pushd ../kong-build-tools && make setup-ci && popd - - make nightly-release - env: PACKAGE_TYPE=deb RESTY_IMAGE_BASE=ubuntu RESTY_IMAGE_TAG=bionic KONG_PACKAGE_NAME=${PWD##*/} KONG_VERSION=`date +%Y-%m-%d` REPOSITORY_NAME=${PWD##*/}-nightly REPOSITORY_OS_NAME=$TRAVIS_BRANCH - if: type=cron - - script: - - make setup-kong-build-tools - - pushd ../kong-build-tools && make setup-ci && popd - - make nightly-release - env: PACKAGE_TYPE=deb RESTY_IMAGE_BASE=debian RESTY_IMAGE_TAG=stretch KONG_PACKAGE_NAME=${PWD##*/} KONG_VERSION=`date +%Y-%m-%d` REPOSITORY_NAME=${PWD##*/}-nightly REPOSITORY_OS_NAME=$TRAVIS_BRANCH - if: type=cron - - script: - - make setup-kong-build-tools - - pushd ../kong-build-tools && make setup-ci && popd - - make nightly-release - env: PACKAGE_TYPE=deb RESTY_IMAGE_BASE=debian RESTY_IMAGE_TAG=jessie KONG_PACKAGE_NAME=${PWD##*/} KONG_VERSION=`date +%Y-%m-%d` REPOSITORY_NAME=${PWD##*/}-nightly REPOSITORY_OS_NAME=$TRAVIS_BRANCH - if: type=cron - - script: - - make setup-kong-build-tools - - pushd ../kong-build-tools && make setup-ci && popd - - make nightly-release - env: PACKAGE_TYPE=rpm RESTY_IMAGE_BASE=centos RESTY_IMAGE_TAG=6 KONG_PACKAGE_NAME=${PWD##*/} KONG_VERSION=`date +%Y-%m-%d` REPOSITORY_NAME=${PWD##*/}-nightly REPOSITORY_OS_NAME=$TRAVIS_BRANCH - if: type=cron - - script: - - make setup-kong-build-tools - - pushd ../kong-build-tools && make setup-ci && popd - - make nightly-release - env: PACKAGE_TYPE=rpm RESTY_IMAGE_BASE=centos RESTY_IMAGE_TAG=7 KONG_PACKAGE_NAME=${PWD##*/} KONG_VERSION=`date +%Y-%m-%d` REPOSITORY_NAME=${PWD##*/}-nightly REPOSITORY_OS_NAME=$TRAVIS_BRANCH - if: type=cron - - script: - - make setup-kong-build-tools - - pushd ../kong-build-tools && make setup-ci && popd - - make nightly-release - env: PACKAGE_TYPE=apk RESTY_IMAGE_BASE=alpine RESTY_IMAGE_TAG=latest KONG_PACKAGE_NAME=${PWD##*/} KONG_VERSION=`date +%Y-%m-%d` REPOSITORY_NAME=${PWD##*/}-nightly REPOSITORY_OS_NAME=$TRAVIS_BRANCH - if: type=cron - - script: - - make setup-kong-build-tools - - pushd ../kong-build-tools && make setup-ci && popd - - make nightly-release - env: PACKAGE_TYPE=rpm RESTY_IMAGE_BASE=rhel RESTY_IMAGE_TAG=6 KONG_PACKAGE_NAME=${PWD##*/} KONG_VERSION=`date +%Y-%m-%d` REPOSITORY_NAME=${PWD##*/}-nightly REPOSITORY_OS_NAME=$TRAVIS_BRANCH - if: type=cron - - script: - - make setup-kong-build-tools - - pushd ../kong-build-tools && make setup-ci && popd - - make nightly-release - env: PACKAGE_TYPE=rpm RESTY_IMAGE_BASE=rhel RESTY_IMAGE_TAG=7 KONG_PACKAGE_NAME=${PWD##*/} KONG_VERSION=`date +%Y-%m-%d` REPOSITORY_NAME=${PWD##*/}-nightly REPOSITORY_OS_NAME=$TRAVIS_BRANCH - if: type=cron - - script: - - make setup-kong-build-tools - - pushd ../kong-build-tools && make setup-ci && popd - - make nightly-release - env: PACKAGE_TYPE=rpm RESTY_IMAGE_BASE=amazonlinux RESTY_IMAGE_TAG=latest KONG_PACKAGE_NAME=${PWD##*/} KONG_VERSION=`date +%Y-%m-%d` REPOSITORY_NAME=${PWD##*/}-nightly REPOSITORY_OS_NAME=$TRAVIS_BRANCH - if: type=cron script: - .ci/run_tests.sh From 0ff3dea2dec36085ddf90bcf3dc87de2303cc49d Mon Sep 17 00:00:00 2001 From: Hisham Muhammad Date: Mon, 11 Nov 2019 09:23:32 -0300 Subject: [PATCH 03/41] docs(api) document check_hash query argument for /config (#5210) --- autodoc/data/admin-api.lua | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/autodoc/data/admin-api.lua b/autodoc/data/admin-api.lua index 1c0454724174..1c986bafe818 100644 --- a/autodoc/data/admin-api.lua +++ b/autodoc/data/admin-api.lua @@ -1758,6 +1758,12 @@ return { `config`
**required** | The config data (in YAML or JSON format) to be loaded. ]], + request_query = [[ + Attributes | Description + ---:| --- + `check_hash`
*optional* | If set to 1, Kong will compare the hash of the input config data against that of the previous one. If the configuration is identical, it will not reload it and will return HTTP 304. + ]], + description = [[ This endpoint allows resetting a DB-less Kong with a new declarative configuration data file. All previous contents From 57491bc461d0db2ad4684a71bd994e841a26a79b Mon Sep 17 00:00:00 2001 From: Guilherme Salazar Date: Thu, 14 Nov 2019 06:27:57 -0800 Subject: [PATCH 04/41] chore(deps) bump lua-resty-dns-client to 4.1.1 (#5221) --- kong-1.4.0-0.rockspec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kong-1.4.0-0.rockspec b/kong-1.4.0-0.rockspec index f8c5b2fe85e6..64957651dddc 100644 --- a/kong-1.4.0-0.rockspec +++ b/kong-1.4.0-0.rockspec @@ -30,7 +30,7 @@ dependencies = { "luaossl == 20190731", "luasyslog == 1.0.0", "lua_pack == 1.0.5", - "lua-resty-dns-client == 4.1.0", + "lua-resty-dns-client == 4.1.1", "lua-resty-worker-events == 1.0.0", "lua-resty-mediador == 0.1.2", "lua-resty-healthcheck == 1.1.0", From 5cdce7c19dd7d65e02794c782baf96efbcf4a0f8 Mon Sep 17 00:00:00 2001 From: Guilherme Salazar Date: Thu, 14 Nov 2019 15:51:09 -0800 Subject: [PATCH 05/41] fix(grpc) preserve_host behavior Fix issue preventing gRPC preventing `preserve_host` behavior from taking effect. Fixes #5053, #5217. --- kong/router.lua | 9 +- kong/templates/nginx_kong.lua | 18 +++- spec/01-unit/08-router_spec.lua | 149 ++++++++++++++++++++++++++++++++ 3 files changed, 168 insertions(+), 8 deletions(-) diff --git a/kong/router.lua b/kong/router.lua index e722d0cd6297..597c24f48b24 100644 --- a/kong/router.lua +++ b/kong/router.lua @@ -1,3 +1,4 @@ +local constants = require "kong.constants" local lrucache = require "resty.lrucache" local utils = require "kong.tools.utils" local px = require "resty.mediador.proxy" @@ -156,12 +157,8 @@ local function has_capturing_groups(subj) end -local protocol_subsystem = { - http = "http", - https = "http", - tcp = "stream", - tls = "stream", -} +local protocol_subsystem = constants.PROTOCOLS_WITH_SUBSYSTEM + local function marshall_route(r) local route = r.route diff --git a/kong/templates/nginx_kong.lua b/kong/templates/nginx_kong.lua index 6e96a82ef603..5c6c1db03874 100644 --- a/kong/templates/nginx_kong.lua +++ b/kong/templates/nginx_kong.lua @@ -176,15 +176,29 @@ server { location @grpc { internal; - set $kong_proxy_mode 'grpc'; + + grpc_set_header Host $upstream_host; + grpc_set_header X-Forwarded-For $upstream_x_forwarded_for; + grpc_set_header X-Forwarded-Proto $upstream_x_forwarded_proto; + grpc_set_header X-Forwarded-Host $upstream_x_forwarded_host; + grpc_set_header X-Forwarded-Port $upstream_x_forwarded_port; + grpc_set_header X-Real-IP $remote_addr; + grpc_pass grpc://kong_upstream; } location @grpcs { internal; - set $kong_proxy_mode 'grpc'; + + grpc_set_header Host $upstream_host; + grpc_set_header X-Forwarded-For $upstream_x_forwarded_for; + grpc_set_header X-Forwarded-Proto $upstream_x_forwarded_proto; + grpc_set_header X-Forwarded-Host $upstream_x_forwarded_host; + grpc_set_header X-Forwarded-Port $upstream_x_forwarded_port; + grpc_set_header X-Real-IP $remote_addr; + grpc_pass grpcs://kong_upstream; } diff --git a/spec/01-unit/08-router_spec.lua b/spec/01-unit/08-router_spec.lua index a042c7c78b2f..1372c274266c 100644 --- a/spec/01-unit/08-router_spec.lua +++ b/spec/01-unit/08-router_spec.lua @@ -2362,6 +2362,155 @@ describe("Router", function() end) end) + describe("preserve Host header #grpc", function() + local router + local use_case_routes = { + -- use the request's Host header + { + service = { + name = "service-invalid", + host = "example.org", + protocol = "grpc" + }, + route = { + preserve_host = true, + hosts = { "preserve.com" }, + }, + }, + -- use the route's upstream_url's Host + { + service = { + name = "service-invalid", + host = "example.org", + protocol = "grpc" + }, + route = { + preserve_host = false, + hosts = { "discard.com" }, + }, + }, + } + + lazy_setup(function() + router = assert(Router.new(use_case_routes)) + end) + + describe("when preserve_host is true", function() + local host = "preserve.com" + + it("uses the request's Host header", function() + local _ngx = mock_ngx("GET", "/", { host = host }) + router._set_ngx(_ngx) + local match_t = router.exec() + assert.equal(use_case_routes[1].route, match_t.route) + assert.equal(host, match_t.upstream_host) + assert.equal("grpc", match_t.service.protocol) + end) + + it("uses the request's Host header incl. port", function() + local _ngx = mock_ngx("GET", "/", { host = host .. ":123" }) + router._set_ngx(_ngx) + local match_t = router.exec() + assert.equal(use_case_routes[1].route, match_t.route) + assert.equal(host .. ":123", match_t.upstream_host) + assert.equal("grpc", match_t.service.protocol) + end) + + it("does not change the target upstream", function() + local _ngx = mock_ngx("GET", "/", { host = host }) + router._set_ngx(_ngx) + local match_t = router.exec() + assert.equal(use_case_routes[1].route, match_t.route) + assert.equal("example.org", match_t.upstream_url_t.host) + assert.equal("grpc", match_t.service.protocol) + end) + + it("uses the request's Host header when `grab_header` is disabled", function() + local use_case_routes = { + { + service = { + name = "service-invalid", + protocol = "grpc", + }, + route = { + name = "route-1", + preserve_host = true, + paths = { "/foo" }, + }, + upstream_url = "http://example.org", + }, + } + + local router = assert(Router.new(use_case_routes)) + local _ngx = mock_ngx("GET", "/foo", { host = "preserve.com" }) + router._set_ngx(_ngx) + local match_t = router.exec() + assert.equal(use_case_routes[1].route, match_t.route) + assert.equal("preserve.com", match_t.upstream_host) + assert.equal("grpc", match_t.service.protocol) + end) + + it("uses the request's Host header if an route with no host was cached", function() + -- This is a regression test for: + -- https://github.com/Kong/kong/issues/2825 + -- Ensure cached routes (in the LRU cache) still get proxied with the + -- correct Host header when preserve_host = true and no registered + -- route has a `hosts` property. + + local use_case_routes = { + { + service = { + name = "service-invalid", + protocol = "grpc", + }, + route = { + name = "no-host", + paths = { "/nohost" }, + preserve_host = true, + }, + }, + } + + local router = assert(Router.new(use_case_routes)) + local _ngx = mock_ngx("GET", "/nohost", { host = "domain1.com" }) + router._set_ngx(_ngx) + local match_t = router.exec() + assert.equal(use_case_routes[1].route, match_t.route) + assert.equal("domain1.com", match_t.upstream_host) + assert.equal("grpc", match_t.service.protocol) + + _ngx = mock_ngx("GET", "/nohost", { host = "domain2.com" }) + router._set_ngx(_ngx) + match_t = router.exec() + assert.equal(use_case_routes[1].route, match_t.route) + assert.equal("domain2.com", match_t.upstream_host) + assert.equal("grpc", match_t.service.protocol) + end) + end) + + describe("when preserve_host is false", function() + local host = "discard.com" + + it("does not change the target upstream", function() + local _ngx = mock_ngx("GET", "/", { host = host }) + router._set_ngx(_ngx) + local match_t = router.exec() + assert.equal(use_case_routes[2].route, match_t.route) + assert.equal("example.org", match_t.upstream_url_t.host) + assert.equal("grpc", match_t.service.protocol) + end) + + it("does not set the host_header", function() + local _ngx = mock_ngx("GET", "/", { host = host }) + router._set_ngx(_ngx) + local match_t = router.exec() + assert.equal(use_case_routes[2].route, match_t.route) + assert.is_nil(match_t.upstream_host) + assert.equal("grpc", match_t.service.protocol) + end) + end) + end) + describe("slash handling", function() local checks = { From eaa204eae962a881213db3837b7ee0c3c960e39d Mon Sep 17 00:00:00 2001 From: Travis Raines Date: Wed, 20 Nov 2019 12:28:30 -0800 Subject: [PATCH 06/41] fix(cmd) create globals before config subcommands (#5230) Initialize the kong global and database before running any config subcommand. Remove an existing, now superfluous initialization specific to db_export. db_import and parse invoke parse_file, which runs entity checks. Entity checks can require database access, and the rbac_user Enterprise entity does currently. If the kong global is not present and/or kong.db is not initialized, db_import and parse will fail. --- kong/cmd/config.lua | 30 ++++++++++-------------------- 1 file changed, 10 insertions(+), 20 deletions(-) diff --git a/kong/cmd/config.lua b/kong/cmd/config.lua index 87b27f891118..d885b78f876c 100644 --- a/kong/cmd/config.lua +++ b/kong/cmd/config.lua @@ -23,16 +23,6 @@ local function db_export(filename, conf) error(filename .. " already exists. Will not overwrite it.") end - _G.kong = kong_global.new() - kong_global.init_pdk(_G.kong, conf, nil) -- nil: latest PDK - - local db = assert(DB.new(conf)) - assert(db:init_connector()) - assert(db:connect()) - assert(db.plugins:load_plugin_schemas(conf.loaded_plugins)) - - _G.kong.db = db - local fd, err = io.open(filename, "w") if not fd then return nil, err @@ -96,6 +86,16 @@ local function execute(args) error(err) end + _G.kong = kong_global.new() + kong_global.init_pdk(_G.kong, conf, nil) -- nil: latest PDK + + local db = assert(DB.new(conf)) + assert(db:init_connector()) + assert(db:connect()) + assert(db.plugins:load_plugin_schemas(conf.loaded_plugins)) + + _G.kong.db = db + if args.command == "db_export" then return db_export(args[1] or "kong.yml", conf) end @@ -114,16 +114,6 @@ local function execute(args) if args.command == "db_import" then log("parse successful, beginning import") - _G.kong = kong_global.new() - kong_global.init_pdk(_G.kong, conf, nil) -- nil: latest PDK - - local db = assert(DB.new(conf)) - assert(db:init_connector()) - assert(db:connect()) - assert(db.plugins:load_plugin_schemas(conf.loaded_plugins)) - - _G.kong.db = db - local ok, err = declarative.load_into_db(dc_table) if not ok then error("Failed importing:\n" .. err) From d8dbaf7bc7cc96b0babe104d7b252738ab0d2e96 Mon Sep 17 00:00:00 2001 From: Colin Hutchinson Date: Thu, 21 Nov 2019 16:44:08 -0300 Subject: [PATCH 07/41] chore(ci) build nightly releases from jenkins instead of travis-ci (#167) --- .travis.yml | 1 - Jenkinsfile | 236 ++++++++++++++++++++++++++++++++++++++++++++++++++++ Makefile | 6 +- 3 files changed, 239 insertions(+), 4 deletions(-) create mode 100644 Jenkinsfile diff --git a/.travis.yml b/.travis.yml index 23930357062c..e07261296584 100644 --- a/.travis.yml +++ b/.travis.yml @@ -58,7 +58,6 @@ cache: stages: - lint and unit - test - - Deploy daily build - Release jobs: diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 000000000000..806ec5f3c32a --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,236 @@ +pipeline { + agent none + environment { + UPDATE_CACHE = "true" + DOCKER_CREDENTIALS = credentials('dockerhub') + DOCKER_USERNAME = "${env.DOCKER_CREDENTIALS_USR}" + DOCKER_PASSWORD = "${env.DOCKER_CREDENTIALS_PSW}" + KONG_PACKAGE_NAME = 'kong' + REPOSITORY_OS_NAME = "${env.BRANCH_NAME}" + } + triggers { + cron('@daily') + } + stages { + stage('Build Kong') { + agent { + node { + label 'docker-compose' + } + } + environment { + KONG_SOURCE_LOCATION = "${env.WORKSPACE}" + KONG_BUILD_TOOLS_LOCATION = "${env.WORKSPACE}/../kong-build-tools" + } + steps { + sh 'make setup-kong-build-tools' + sh 'echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin || true' + dir('../kong-build-tools') { sh 'make kong-test-container' } + } + } + stage('Integration Tests') { + parallel { + stage('dbless') { + agent { + node { + label 'docker-compose' + } + } + environment { + KONG_SOURCE_LOCATION = "${env.WORKSPACE}" + KONG_BUILD_TOOLS_LOCATION = "${env.WORKSPACE}/../kong-build-tools" + TEST_DATABASE = "off" + TEST_SUITE = "dbless" + } + steps { + sh 'make setup-kong-build-tools' + dir('../kong-build-tools') { + sh 'make test-kong' + } + } + } + stage('postgres') { + agent { + node { + label 'docker-compose' + } + } + environment { + KONG_SOURCE_LOCATION = "${env.WORKSPACE}" + KONG_BUILD_TOOLS_LOCATION = "${env.WORKSPACE}/../kong-build-tools" + TEST_DATABASE = 'postgres' + } + steps { + sh 'make setup-kong-build-tools' + dir('../kong-build-tools') { + sh 'make test-kong' + } + } + } + stage('postgres plugins') { + agent { + node { + label 'docker-compose' + } + } + environment { + KONG_SOURCE_LOCATION = "${env.WORKSPACE}" + KONG_BUILD_TOOLS_LOCATION = "${env.WORKSPACE}/../kong-build-tools" + TEST_DATABASE = 'postgres' + TEST_SUITE = 'plugins' + } + steps { + sh 'make setup-kong-build-tools' + dir('../kong-build-tools'){ + sh 'make test-kong' + } + } + } + stage('cassandra') { + agent { + node { + label 'docker-compose' + } + } + environment { + KONG_SOURCE_LOCATION = "${env.WORKSPACE}" + KONG_BUILD_TOOLS_LOCATION = "${env.WORKSPACE}/../kong-build-tools" + TEST_DATABASE = 'cassandra' + } + steps { + sh 'make setup-kong-build-tools' + dir('../kong-build-tools'){ + sh 'make test-kong' + } + } + } + } + } + stage('Nightly Releases') { + when { + allOf { + triggeredBy 'TimerTrigger' + anyOf { branch 'master'; branch 'next' } + } + } + parallel { + stage('Ubuntu Xenial Release') { + agent { + node { + label 'docker-compose' + } + } + options { + retry(2) + } + environment { + PACKAGE_TYPE = 'deb' + RESTY_IMAGE_BASE = 'ubuntu' + RESTY_IMAGE_TAG = 'xenial' + CACHE = 'false' + UPDATE_CACHE = 'true' + USER = 'travis' + KONG_SOURCE_LOCATION = "${env.WORKSPACE}" + KONG_BUILD_TOOLS_LOCATION = "${env.WORKSPACE}/../kong-build-tools" + BINTRAY_USR = 'kong-inc_travis-ci@kong' + BINTRAY_KEY = credentials('bintray_travis_key') + AWS_ACCESS_KEY = credentials('AWS_ACCESS_KEY') + AWS_SECRET_ACCESS_KEY = credentials('AWS_SECRET_ACCESS_KEY') + DOCKER_MACHINE_ARM64_NAME = "jenkins-kong-${env.BUILD_NUMBER}" + } + steps { + sh 'make setup-kong-build-tools' + sh 'mkdir -p $HOME/bin' + sh 'sudo ln -s $HOME/bin/kubectl /usr/local/bin/kubectl' + sh 'sudo ln -s $HOME/bin/kind /usr/local/bin/kind' + dir('../kong-build-tools'){ sh 'make setup-ci' } + sh 'REPOSITORY_NAME=`basename ${GIT_URL%.*}`-nightly KONG_VERSION=`date +%Y-%m-%d` make nightly-release' + } + post { + always { + dir('../kong-build-tools'){ sh 'make cleanup_build' } + } + } + } + stage('Ubuntu Releases') { + agent { + node { + label 'docker-compose' + } + } + environment { + PACKAGE_TYPE = 'deb' + RESTY_IMAGE_BASE = 'ubuntu' + RESTY_IMAGE_TAG = 'xenial' + KONG_SOURCE_LOCATION = "${env.WORKSPACE}" + KONG_BUILD_TOOLS_LOCATION = "${env.WORKSPACE}/../kong-build-tools" + BINTRAY_USR = 'kong-inc_travis-ci@kong' + BINTRAY_KEY = credentials('bintray_travis_key') + DOCKER_MACHINE_ARM64_NAME = "jenkins-kong-${env.BUILD_NUMBER}" + } + steps { + sh 'make setup-kong-build-tools' + sh 'mkdir -p $HOME/bin' + sh 'sudo ln -s $HOME/bin/kubectl /usr/local/bin/kubectl' + sh 'sudo ln -s $HOME/bin/kind /usr/local/bin/kind' + dir('../kong-build-tools'){ sh 'make setup-ci' } + sh 'REPOSITORY_NAME=`basename ${GIT_URL%.*}`-nightly KONG_VERSION=`date +%Y-%m-%d` RESTY_IMAGE_TAG=trusty BUILDX=false make nightly-release' + sh 'REPOSITORY_NAME=`basename ${GIT_URL%.*}`-nightly KONG_VERSION=`date +%Y-%m-%d` RESTY_IMAGE_TAG=bionic BUILDX=false make nightly-release' + } + } + stage('Centos Releases') { + agent { + node { + label 'docker-compose' + } + } + environment { + PACKAGE_TYPE = 'rpm' + RESTY_IMAGE_BASE = 'centos' + KONG_SOURCE_LOCATION = "${env.WORKSPACE}" + KONG_BUILD_TOOLS_LOCATION = "${env.WORKSPACE}/../kong-build-tools" + REDHAT_CREDENTIALS = credentials('redhat') + REDHAT_USERNAME = "${env.REDHAT_USR}" + REDHAT_PASSWORD = "${env.REDHAT_PSW}" + BINTRAY_USR = 'kong-inc_travis-ci@kong' + BINTRAY_KEY = credentials('bintray_travis_key') + } + steps { + sh 'make setup-kong-build-tools' + sh 'mkdir -p $HOME/bin' + sh 'sudo ln -s $HOME/bin/kubectl /usr/local/bin/kubectl' + sh 'sudo ln -s $HOME/bin/kind /usr/local/bin/kind' + dir('../kong-build-tools'){ sh 'make setup-ci' } + sh 'REPOSITORY_NAME=`basename ${GIT_URL%.*}`-nightly KONG_VERSION=`date +%Y-%m-%d` RESTY_IMAGE_TAG=6 make nightly-release' + sh 'REPOSITORY_NAME=`basename ${GIT_URL%.*}`-nightly KONG_VERSION=`date +%Y-%m-%d` RESTY_IMAGE_TAG=7 make nightly-release' + } + } + stage('Debian Releases') { + agent { + node { + label 'docker-compose' + } + } + environment { + PACKAGE_TYPE = 'deb' + RESTY_IMAGE_BASE = 'debian' + KONG_SOURCE_LOCATION = "${env.WORKSPACE}" + KONG_BUILD_TOOLS_LOCATION = "${env.WORKSPACE}/../kong-build-tools" + BINTRAY_USR = 'kong-inc_travis-ci@kong' + BINTRAY_KEY = credentials('bintray_travis_key') + } + steps { + sh 'make setup-kong-build-tools' + sh 'mkdir -p $HOME/bin' + sh 'sudo ln -s $HOME/bin/kubectl /usr/local/bin/kubectl' + sh 'sudo ln -s $HOME/bin/kind /usr/local/bin/kind' + dir('../kong-build-tools'){ sh 'make setup-ci' } + sh 'REPOSITORY_NAME=`basename ${GIT_URL%.*}`-nightly KONG_VERSION=`date +%Y-%m-%d` RESTY_IMAGE_TAG=jessie make nightly-release' + sh 'REPOSITORY_NAME=`basename ${GIT_URL%.*}`-nightly KONG_VERSION=`date +%Y-%m-%d` RESTY_IMAGE_TAG=stretch make nightly-release' + sh 'REPOSITORY_NAME=`basename ${GIT_URL%.*}`-nightly KONG_VERSION=`date +%Y-%m-%d` RESTY_IMAGE_TAG=buster make nightly-release' + } + } + } + } + } +} diff --git a/Makefile b/Makefile index 7135f9189786..33bfe782772d 100644 --- a/Makefile +++ b/Makefile @@ -29,7 +29,7 @@ RESTY_VERSION ?= `grep RESTY_VERSION $(KONG_SOURCE_LOCATION)/.requirements | awk RESTY_LUAROCKS_VERSION ?= `grep RESTY_LUAROCKS_VERSION $(KONG_SOURCE_LOCATION)/.requirements | awk -F"=" '{print $$2}'` RESTY_OPENSSL_VERSION ?= `grep RESTY_OPENSSL_VERSION $(KONG_SOURCE_LOCATION)/.requirements | awk -F"=" '{print $$2}'` RESTY_PCRE_VERSION ?= `grep RESTY_PCRE_VERSION $(KONG_SOURCE_LOCATION)/.requirements | awk -F"=" '{print $$2}'` -KONG_BUILD_TOOLS ?= '2.0.3' +KONG_BUILD_TOOLS ?= '2.0.5' KONG_VERSION ?= `cat $(KONG_SOURCE_LOCATION)/kong-*.rockspec | grep tag | awk '{print $$3}' | sed 's/"//g'` OPENRESTY_PATCHES_BRANCH ?= master KONG_NGINX_MODULE_BRANCH ?= master @@ -43,8 +43,8 @@ setup-ci: .ci/setup_env.sh setup-kong-build-tools: - -rm -rf kong-build-tools; \ - git clone https://github.com/Kong/kong-build-tools.git $(KONG_BUILD_TOOLS_LOCATION); fi + -rm -rf $(KONG_BUILD_TOOLS_LOCATION) + -git clone https://github.com/Kong/kong-build-tools.git $(KONG_BUILD_TOOLS_LOCATION) cd $(KONG_BUILD_TOOLS_LOCATION); \ git reset --hard $(KONG_BUILD_TOOLS); \ From 9b9008cc31ad8246acd8f5007ab717f223a47834 Mon Sep 17 00:00:00 2001 From: Colin Hutchinson Date: Thu, 21 Nov 2019 16:46:10 -0300 Subject: [PATCH 08/41] tests(helpers) be more resilient to port availability --- spec/helpers.lua | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/spec/helpers.lua b/spec/helpers.lua index f51322828d3d..3e47cc342b5a 100644 --- a/spec/helpers.lua +++ b/spec/helpers.lua @@ -783,7 +783,14 @@ local function mock_reports_server(opts) local server = assert(socket.tcp()) server:settimeout(360) assert(server:setoption("reuseaddr", true)) - assert(server:bind(host, port)) + local counter = 0 + while not server:bind(host, port) do + counter = counter + 1 + if counter > 5 then + error('could not bind successfully') + end + socket.sleep(1) + end assert(server:listen()) local data = {} local handshake_done = false @@ -839,7 +846,9 @@ local function mock_reports_server(opts) local sock = ngx.socket.tcp() sock:settimeout(0.01) while true do - if sock:connect(localhost, server_port) then + if not thread:alive() then + error('the reports thread died') + elseif sock:connect(localhost, server_port) then sock:send("\\START\n") local ok = sock:receive() sock:close() From e7eae9d8dbfbfb3a8ce4b913a63e5f6a455caadc Mon Sep 17 00:00:00 2001 From: "zee.shen" Date: Mon, 25 Nov 2019 22:23:07 +0800 Subject: [PATCH 09/41] fix(balancer) remove old balancer from upstream_ids (#5229) --- kong/runloop/balancer.lua | 1 + 1 file changed, 1 insertion(+) diff --git a/kong/runloop/balancer.lua b/kong/runloop/balancer.lua index 3fee08038dd7..e9b63286efe7 100644 --- a/kong/runloop/balancer.lua +++ b/kong/runloop/balancer.lua @@ -68,6 +68,7 @@ local function set_balancer(upstream_id, balancer) healthcheckers[prev] = nil healthchecker_callbacks[prev] = nil target_histories[prev] = nil + upstream_ids[prev] = nil end balancers[upstream_id] = balancer end From 8b67d52c924a0857878e3ecf56e7365c1e5d62d7 Mon Sep 17 00:00:00 2001 From: suijaa <56831512+suijaa@users.noreply.github.com> Date: Mon, 25 Nov 2019 20:33:17 +0530 Subject: [PATCH 10/41] docs(upgrade) fix spelling (#5232) Corrected the spelling of "decommission" in the whole document --- UPGRADE.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/UPGRADE.md b/UPGRADE.md index 2756ba667ef2..52ad3e86fc0d 100644 --- a/UPGRADE.md +++ b/UPGRADE.md @@ -88,7 +88,7 @@ The migrations are designed so that there is no need to fully copy the data, but this also means that they are designed in such a way so that the new version of Kong is able to use the data as it is migrated, and to do it in a way so that the old Kong cluster keeps working until it is finally -time to decomission it. For this reason, the full migration is now split into +time to decommission it. For this reason, the full migration is now split into two steps, which are performed via commands `kong migrations up` (which does only non-destructive operations) and `kong migrations finish` (which puts the database in the final expected state for Kong 1.4.0). @@ -435,7 +435,7 @@ The migrations are designed so that there is no need to fully copy the data, but this also means that they are designed in such a way so that the new version of Kong is able to use the data as it is migrated, and to do it in a way so that the old Kong cluster keeps working until it is finally -time to decomission it. For this reason, the full migration is now split into +time to decommission it. For this reason, the full migration is now split into two steps, which are performed via commands `kong migrations up` (which does only non-destructive operations) and `kong migrations finish` (which puts the database in the final expected state for Kong 1.2). @@ -530,7 +530,7 @@ The migrations are designed so that there is no need to fully copy the data, but this also means that they are designed in such a way so that the new version of Kong is able to use the data as it is migrated, and to do it in a way so that the old Kong cluster keeps working until it is finally -time to decomission it. For this reason, the full migration is now split into +time to decommission it. For this reason, the full migration is now split into two steps, which are performed via commands `kong migrations up` (which does only non-destructive operations) and `kong migrations finish` (which puts the database in the final expected state for Kong 1.1). @@ -801,7 +801,7 @@ The migrations are designed so that there is no need to fully copy the data, but this also means that they are designed in such a way so that the new version of Kong is able to use the data as it is migrated, and to do it in a way so that the old Kong cluster keeps working until it is finally -time to decomission it. For this reason, the full migration is now split into +time to decommission it. For this reason, the full migration is now split into two steps, which are performed via commands `kong migrations up` (which does only non-destructive operations) and `kong migrations finish` (which puts the database in the final expected state for Kong 1.0). From 4d51c52ea067baf494139fbe9d40e69fddb955b5 Mon Sep 17 00:00:00 2001 From: Hisham Muhammad Date: Mon, 25 Nov 2019 12:06:31 -0300 Subject: [PATCH 11/41] docs(autodoc) document endpoint-key-based notation for Admin API calls (#5245) Let the user know that they can use `service.name` instead of `service.id` when referring to a foreign key, etc. --- scripts/autodoc-admin-api | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/scripts/autodoc-admin-api b/scripts/autodoc-admin-api index 05a8c5aba552..0c8afb74ef78 100755 --- a/scripts/autodoc-admin-api +++ b/scripts/autodoc-admin-api @@ -322,10 +322,28 @@ local function gen_notation(fname, finfo, field_data) table.concat(form_example, "&") .. [[`. With JSON, use an Array.]] elseif finfo.type == "foreign" then - return [[ With form-encoded, the notation is `]] .. - fname .. [[.id=<]] .. fname .. - [[_id>`. With JSON, use `"]] .. fname .. - [[":{"id":"<]] .. fname .. [[_id>"}`.]] + local fschema = assert(require("kong.db.schema.entities." .. finfo.reference)) + local ek = fschema.endpoint_key + if ek then + return ([[With form-encoded, the notation is ]] .. + [[`$FNAME.id=<$FNAME id>` or ]] .. + [[`$FNAME.$ENDPOINT_KEY=<$FNAME $ENDPOINT_KEY>`. ]] .. + [[With JSON, use "]] .. + [[`"$FNAME":{"id":"<$FNAME id>"}` or ]] .. + [[`"$FNAME":{"$ENDPOINT_KEY":"<$FNAME $ENDPOINT_KEY>"}`.]]): + gsub("$([A-Z_]*)", { + FNAME = fname, + ENDPOINT_KEY = ek, + }) + else + return ([[With form-encoded, the notation is ]] .. + [[`$FNAME.id=<$FNAME id>`. ]] .. + [[With JSON, use "]] .. + [[`"$FNAME":{"id":"<$FNAME id>"}`.]]): + gsub("$([A-Z_]*)", { + FNAME = fname, + }) + end else return "" end From 6ddc95a27901f6a217f951d6d8eb15d4db1bb2f4 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Mon, 25 Nov 2019 22:08:51 +0200 Subject: [PATCH 12/41] fix(migrations) oauth2 tokens ttl was incorrectly migrated (#5253) ### Summary This PR was originally send as #4588, but merge conflict was probably wrongly resolved, so this change got missing. Here is another try to get this in. ### Issues resolved Fix #4572, #5242 --- kong/plugins/oauth2/migrations/001_14_to_15.lua | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/kong/plugins/oauth2/migrations/001_14_to_15.lua b/kong/plugins/oauth2/migrations/001_14_to_15.lua index 1362186f5397..fc2de630edbb 100644 --- a/kong/plugins/oauth2/migrations/001_14_to_15.lua +++ b/kong/plugins/oauth2/migrations/001_14_to_15.lua @@ -43,7 +43,8 @@ return { DO $$ BEGIN UPDATE "oauth2_tokens" - SET "ttl" = "created_at" + (COALESCE("expires_in", 0)::TEXT || ' seconds')::INTERVAL; + SET "ttl" = "created_at" + (COALESCE("expires_in", 0)::TEXT || ' seconds')::INTERVAL + WHERE "expires_in" > 0; EXCEPTION WHEN UNDEFINED_COLUMN OR UNDEFINED_TABLE THEN -- Do nothing, accept existing state END$$; From 4fb2284712dd7093224cbd522052561c08b5990e Mon Sep 17 00:00:00 2001 From: Colin Hutchinson Date: Tue, 26 Nov 2019 09:06:51 -0500 Subject: [PATCH 13/41] tests(integration) reduce the flakiness of the start stop test (#5250) --- .../02-cmd/02-start_stop_spec.lua | 27 ++++++++----------- 1 file changed, 11 insertions(+), 16 deletions(-) diff --git a/spec/02-integration/02-cmd/02-start_stop_spec.lua b/spec/02-integration/02-cmd/02-start_stop_spec.lua index a29b97005755..4277388171af 100644 --- a/spec/02-integration/02-cmd/02-start_stop_spec.lua +++ b/spec/02-integration/02-cmd/02-start_stop_spec.lua @@ -208,24 +208,19 @@ describe("kong start/stop #" .. strategy, function() if not ok then error(stderr) end + + helpers.wait_until(function() + local cmd = string.format("%s health -p ./servroot", helpers.bin_path) + return pl_utils.executeex(cmd) + end, 10) - do - local proxy_client - - -- get a connection, retry until kong starts - helpers.wait_until(function() - local pok - pok, proxy_client = pcall(helpers.proxy_client) - return pok - end, 10) - - local res = assert(proxy_client:send { - method = "GET", - path = "/hello", - }) - assert.res_status(404, res) -- no Route configured - end + local proxy_client = assert(helpers.proxy_client()) + local res = assert(proxy_client:send { + method = "GET", + path = "/hello", + }) + assert.res_status(404, res) -- no Route configured assert(helpers.stop_kong(helpers.test_conf.prefix)) -- TEST: since nginx started in the foreground, the 'kong start' command From ecd378129717357fa4a841c797f86c82f7ecf1f1 Mon Sep 17 00:00:00 2001 From: Colin Hutchinson Date: Tue, 26 Nov 2019 09:07:05 -0500 Subject: [PATCH 14/41] chore(dependency) bump kong-build-tools pinned version (#5251) --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 33bfe782772d..dab3f756056f 100644 --- a/Makefile +++ b/Makefile @@ -29,7 +29,7 @@ RESTY_VERSION ?= `grep RESTY_VERSION $(KONG_SOURCE_LOCATION)/.requirements | awk RESTY_LUAROCKS_VERSION ?= `grep RESTY_LUAROCKS_VERSION $(KONG_SOURCE_LOCATION)/.requirements | awk -F"=" '{print $$2}'` RESTY_OPENSSL_VERSION ?= `grep RESTY_OPENSSL_VERSION $(KONG_SOURCE_LOCATION)/.requirements | awk -F"=" '{print $$2}'` RESTY_PCRE_VERSION ?= `grep RESTY_PCRE_VERSION $(KONG_SOURCE_LOCATION)/.requirements | awk -F"=" '{print $$2}'` -KONG_BUILD_TOOLS ?= '2.0.5' +KONG_BUILD_TOOLS ?= '2.0.8' KONG_VERSION ?= `cat $(KONG_SOURCE_LOCATION)/kong-*.rockspec | grep tag | awk '{print $$3}' | sed 's/"//g'` OPENRESTY_PATCHES_BRANCH ?= master KONG_NGINX_MODULE_BRANCH ?= master From e7be8f619009d0f4f7070c528746bc5c47fee174 Mon Sep 17 00:00:00 2001 From: Colin Hutchinson Date: Tue, 26 Nov 2019 23:47:17 -0500 Subject: [PATCH 15/41] feat(release) setup signed rpm packages on daily releases (#5262) --- Jenkinsfile | 3 +++ Makefile | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 806ec5f3c32a..e44278126822 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -194,6 +194,8 @@ pipeline { REDHAT_PASSWORD = "${env.REDHAT_PSW}" BINTRAY_USR = 'kong-inc_travis-ci@kong' BINTRAY_KEY = credentials('bintray_travis_key') + PRIVATE_KEY_FILE = credentials('kong.private.gpg-key.asc') + PRIVATE_KEY_PASSPHRASE = credentials('kong.private.gpg-key.asc.password') } steps { sh 'make setup-kong-build-tools' @@ -201,6 +203,7 @@ pipeline { sh 'sudo ln -s $HOME/bin/kubectl /usr/local/bin/kubectl' sh 'sudo ln -s $HOME/bin/kind /usr/local/bin/kind' dir('../kong-build-tools'){ sh 'make setup-ci' } + sh 'cp $PRIVATE_KEY_FILE ../kong-build-tools/kong.private.gpg-key.asc' sh 'REPOSITORY_NAME=`basename ${GIT_URL%.*}`-nightly KONG_VERSION=`date +%Y-%m-%d` RESTY_IMAGE_TAG=6 make nightly-release' sh 'REPOSITORY_NAME=`basename ${GIT_URL%.*}`-nightly KONG_VERSION=`date +%Y-%m-%d` RESTY_IMAGE_TAG=7 make nightly-release' } diff --git a/Makefile b/Makefile index dab3f756056f..e75917c8e7a3 100644 --- a/Makefile +++ b/Makefile @@ -54,7 +54,7 @@ functional-tests: setup-kong-build-tools $(MAKE) build-kong && \ $(MAKE) test -nightly-release: setup-kong-build-tools +nightly-release: sed -i -e '/return string\.format/,/\"\")/c\return "$(KONG_VERSION)\"' kong/meta.lua && \ cd $(KONG_BUILD_TOOLS_LOCATION); \ $(MAKE) package-kong && \ From f8793e751ef55f53f2f96fb45b5d22f251ea9807 Mon Sep 17 00:00:00 2001 From: Colin Hutchinson Date: Thu, 28 Nov 2019 12:16:46 -0500 Subject: [PATCH 16/41] chore(ci) drop trusty as a release target (#5272) --- .travis.yml | 6 ------ Jenkinsfile | 1 - 2 files changed, 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index e07261296584..dfbf6f11ad8b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -77,12 +77,6 @@ jobs: - make release env: PACKAGE_TYPE=src RESTY_IMAGE_BASE=src KONG_PACKAGE_NAME=${PWD##*/} KONG_VERSION=${TRAVIS_TAG} if: tag IS present AND tag ~= 1. - - script: - - make setup-kong-build-tools - - pushd ../kong-build-tools && make setup-ci && popd - - make release - env: PACKAGE_TYPE=deb RESTY_IMAGE_BASE=ubuntu RESTY_IMAGE_TAG=trusty KONG_PACKAGE_NAME=${PWD##*/} KONG_VERSION=${TRAVIS_TAG} - if: tag IS present AND tag ~= 1. - script: - make setup-kong-build-tools - pushd ../kong-build-tools && make setup-ci && popd diff --git a/Jenkinsfile b/Jenkinsfile index e44278126822..c23c36feaac1 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -174,7 +174,6 @@ pipeline { sh 'sudo ln -s $HOME/bin/kubectl /usr/local/bin/kubectl' sh 'sudo ln -s $HOME/bin/kind /usr/local/bin/kind' dir('../kong-build-tools'){ sh 'make setup-ci' } - sh 'REPOSITORY_NAME=`basename ${GIT_URL%.*}`-nightly KONG_VERSION=`date +%Y-%m-%d` RESTY_IMAGE_TAG=trusty BUILDX=false make nightly-release' sh 'REPOSITORY_NAME=`basename ${GIT_URL%.*}`-nightly KONG_VERSION=`date +%Y-%m-%d` RESTY_IMAGE_TAG=bionic BUILDX=false make nightly-release' } } From 32040b5c4bd43ee9643794a5068fae35a07f0574 Mon Sep 17 00:00:00 2001 From: Hisham Muhammad Date: Mon, 2 Dec 2019 17:41:30 -0300 Subject: [PATCH 17/41] tests(http-log) make http-log tests faster (#5274) --- spec/03-plugins/03-http-log/01-log_spec.lua | 48 +++++++++++---------- 1 file changed, 26 insertions(+), 22 deletions(-) diff --git a/spec/03-plugins/03-http-log/01-log_spec.lua b/spec/03-plugins/03-http-log/01-log_spec.lua index b0b30cb36a85..9ddeda3b3b5d 100644 --- a/spec/03-plugins/03-http-log/01-log_spec.lua +++ b/spec/03-plugins/03-http-log/01-log_spec.lua @@ -113,6 +113,7 @@ for _, strategy in helpers.each_strategy() do name = "http-log", config = { queue_size = 5, + flush_timeout = 0.1, http_endpoint = "http://" .. helpers.mock_upstream_host .. ":" .. helpers.mock_upstream_port @@ -324,6 +325,7 @@ for _, strategy in helpers.each_strategy() do name = "http-log", config = { queue_size = 5, + flush_timeout = 0.1, http_endpoint = "http://" .. helpers.mock_upstream_host .. ":" .. helpers.mock_upstream_port @@ -341,6 +343,7 @@ for _, strategy in helpers.each_strategy() do name = "http-log", config = { queue_size = 5, + flush_timeout = 0.1, http_endpoint = "http://" .. helpers.mock_upstream_host .. ":" .. helpers.mock_upstream_port @@ -397,10 +400,7 @@ for _, strategy in helpers.each_strategy() do it("logs to HTTP with a buffer", function() reset_log("http_queue") - local n = 100 - if workers > 1 then - n = 500 - end + local n = 200 for i = 1, n do local client = helpers.proxy_client() @@ -427,21 +427,19 @@ for _, strategy in helpers.each_strategy() do }) local count = assert.res_status(200, res) + client:close() if tonumber(count, 10) >= n then return true end - end, 30) + end, 60) end) it("does not mix buffers", function() reset_log("http_queue") reset_log("http_queue2") - local n = 100 - if workers > 1 then - n = 500 - end + local n = 200 for i = 1, n do local client = helpers.proxy_client() @@ -479,6 +477,7 @@ for _, strategy in helpers.each_strategy() do }) local raw = assert.res_status(200, res) local body = cjson.decode(raw) + client:close() local client2 = assert(helpers.http_client(helpers.mock_upstream_host, helpers.mock_upstream_port)) @@ -491,19 +490,20 @@ for _, strategy in helpers.each_strategy() do }) local raw2 = assert.res_status(200, res2) local body2 = cjson.decode(raw2) + client2:close() if body.count < n or body2.count < n then return false end + table.sort(body.entries, function(a, b) + return a.response.status < b.response.status + end) + local i = 0 for _, entry in ipairs(body.entries) do assert.same("127.0.0.1", entry.client_ip) - - -- we only get a deterministic order with workers == 1 - if workers == 1 then - assert.same(200 + ((i + 1) % 10), entry.response.status) - end + assert.same(200 + math.floor(i / (n / 10)), entry.response.status) i = i + 1 end @@ -511,14 +511,14 @@ for _, strategy in helpers.each_strategy() do return false end - local i = 0 + table.sort(body2.entries, function(a, b) + return a.response.status < b.response.status + end) + + i = 0 for _, entry in ipairs(body2.entries) do assert.same("127.0.0.1", entry.client_ip) - - -- we only get a deterministic order with workers == 1 - if workers == 1 then - assert.same(300 + ((i + 1) % 10), entry.response.status) - end + assert.same(300 + math.floor(i / (n / 10)), entry.response.status) i = i + 1 end @@ -527,7 +527,7 @@ for _, strategy in helpers.each_strategy() do end return true - end, 30) + end, 60) end) end) @@ -538,7 +538,11 @@ for _, strategy in helpers.each_strategy() do local proxy_client lazy_setup(function() - local bp = helpers.get_db_utils(strategy) + local bp = helpers.get_db_utils(strategy, { + "routes", + "services", + "plugins", + }) bp.plugins:insert { name = "http-log", From 97f5a21c190e982d9926b7f943d5d3b6aa385ad7 Mon Sep 17 00:00:00 2001 From: Hisham Muhammad Date: Tue, 3 Dec 2019 13:36:58 -0300 Subject: [PATCH 18/41] fix(http-log) do not impose a retry delay on successful sends (#5282) The exponential backoff should only be applied in case of failures to send. Starting the delay value from 1 was forcing a 1 second delay between sends (even in non-batched mode). --- kong/tools/batch_queue.lua | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kong/tools/batch_queue.lua b/kong/tools/batch_queue.lua index 4517976b053f..95266dcafec0 100644 --- a/kong/tools/batch_queue.lua +++ b/kong/tools/batch_queue.lua @@ -173,9 +173,9 @@ process = function(premature, self, batch) local next_retry_delay local ok, err = self.process(batch.entries) - if ok then -- success, set retry_delays to 1 + if ok then -- success, reset retry delays self.retry_delay = 1 - next_retry_delay = 1 + next_retry_delay = 0 else batch.retries = batch.retries + 1 From a46904b90d9092e69dbb7878b517d0f583677dc9 Mon Sep 17 00:00:00 2001 From: Harry Date: Tue, 3 Dec 2019 09:28:01 -0800 Subject: [PATCH 19/41] fix(reports) do not log error on TCP connection issues (#5281) --- kong/reports.lua | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/kong/reports.lua b/kong/reports.lua index 71971016ef12..ce0313423f2b 100644 --- a/kong/reports.lua +++ b/kong/reports.lua @@ -89,7 +89,7 @@ local function serialize_report_value(v) end --- UDP logger +-- TCP logger local function send_report(signal_type, t, host, port) @@ -124,21 +124,16 @@ local function send_report(signal_type, t, host, port) local sock = tcp_sock() sock:settimeouts(30000, 30000, 30000) - local ok, err = sock:connect(host, port) - if not ok then - log(WARN, "could not connect to TCP socket: ", err) - return - end + -- errors are not logged to avoid false positives for users + -- who run Kong in an air-gapped environments - local ok, err = sock:send(concat(_buffer, ";", 1, mutable_idx) .. "\n") + local ok = sock:connect(host, port) if not ok then - log(WARN, "could not send data: ", err) + return end - ok, err = sock:setkeepalive() - if not ok then - log(WARN, "could not setkeepalive socket: ", err) - end + sock:send(concat(_buffer, ";", 1, mutable_idx) .. "\n") + sock:setkeepalive() end From a5a75927e8bd205d036429ef79d4c5cf6ade86f9 Mon Sep 17 00:00:00 2001 From: Hisham Muhammad Date: Tue, 3 Dec 2019 14:36:39 -0300 Subject: [PATCH 20/41] chore(deps) bump lua-resty-healthcheck to 1.1.1 (#5287) --- kong-1.4.0-0.rockspec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kong-1.4.0-0.rockspec b/kong-1.4.0-0.rockspec index 64957651dddc..3cb51680d38a 100644 --- a/kong-1.4.0-0.rockspec +++ b/kong-1.4.0-0.rockspec @@ -33,7 +33,7 @@ dependencies = { "lua-resty-dns-client == 4.1.1", "lua-resty-worker-events == 1.0.0", "lua-resty-mediador == 0.1.2", - "lua-resty-healthcheck == 1.1.0", + "lua-resty-healthcheck == 1.1.1", "lua-resty-cookie == 0.1.0", "lua-resty-mlcache == 2.4.0", -- external Kong plugins From c536ec845f987bc990861b8e01fbf7128ffdc427 Mon Sep 17 00:00:00 2001 From: Hisham Muhammad Date: Tue, 3 Dec 2019 14:45:48 -0300 Subject: [PATCH 21/41] fix(balancer) handle errors when creating balancer properly (#5284) * Do not leave the `creating` flag behind when the balancer fails to create. This includes a refactor to ensure that the mutually-exclusive section of the balancer creation always releases the flag, so that this doesn't happen again. * Make more explicit the fact that we carry on if the healthchecker fails, by logging it on `create_balancer` * Add a sanity check in the ring balancer callback to ensure balancers and healthcheckers are in sync, as already happens in other parts of the code. Fix #5189 Fix #5283 --- kong/runloop/balancer.lua | 80 +++++++++++++++++++++++---------------- 1 file changed, 48 insertions(+), 32 deletions(-) diff --git a/kong/runloop/balancer.lua b/kong/runloop/balancer.lua index e9b63286efe7..01307057e6a0 100644 --- a/kong/runloop/balancer.lua +++ b/kong/runloop/balancer.lua @@ -161,7 +161,6 @@ end -- @param rb ring balancer object -- @param history list of targets/transactions to be applied -- @param start the index where to start in the `history` parameter --- @return true local function apply_history(rb, history, start) for i = start, #history do @@ -180,8 +179,6 @@ local function apply_history(rb, history, start) order = target.order, } end - - return true end @@ -231,6 +228,9 @@ do -- @param hostname string local function ring_balancer_callback(balancer, action, address, ip, port, hostname) local healthchecker = healthcheckers[balancer] + if not healthchecker then + return + end if action == "health" then local balancer_status @@ -356,7 +356,6 @@ do }) if not healthchecker then - log(ERR, "[healthchecks] error creating health checker: ", err) return nil, err end @@ -366,6 +365,8 @@ do -- only enable the callback after the target history has been replayed. balancer:setCallback(ring_balancer_callback) + + return true end end @@ -390,50 +391,31 @@ do return nil, "timeout" end - local function invalidate_upstream_caches(upstream_id) - singletons.cache:invalidate_local("balancer:upstreams:" .. upstream_id) - singletons.cache:invalidate_local("balancer:targets:" .. upstream_id) - end - ------------------------------------------------------------------------------ + -- The mutually-exclusive section used internally by the + -- 'create_balancer' operation. -- @param upstream (table) A db.upstreams entity - -- @param recreate (boolean, optional) create new balancer even if one exists -- @param history (table, optional) history of target updates -- @param start (integer, optional) from where to start reading the history -- @return The new balancer object, or nil+error - create_balancer = function(upstream, recreate, history, start) - - if balancers[upstream.id] and not recreate then - return balancers[upstream.id] - end - - if creating[upstream.id] then - local ok = wait(upstream.id) - if not ok then - return nil, "timeout waiting for balancer for " .. upstream.id - end - return balancers[upstream.id] - end - - creating[upstream.id] = true - + local function create_balancer_exclusive(upstream, history, start) local balancer, err = balancer_types[upstream.algorithm].new({ wheelSize = upstream.slots, -- will be ignored by least-connections dns = dns_client, }) - if not balancer then - return nil, err + return nil, "failed creating balancer:" .. err end - invalidate_upstream_caches(upstream.id) + singletons.cache:invalidate_local("balancer:upstreams:" .. upstream.id) + singletons.cache:invalidate_local("balancer:targets:" .. upstream.id) target_histories[balancer] = {} if not history then history, err = fetch_target_history(upstream) if not history then - return nil, err + return nil, "failed fetching target history:" .. err end start = 1 end @@ -442,15 +424,49 @@ do upstream_ids[balancer] = upstream.id - create_healthchecker(balancer, upstream) + local ok, err = create_healthchecker(balancer, upstream) + if not ok then + log(ERR, "[healthchecks] error creating health checker: ", err) + end -- only make the new balancer available for other requests after it -- is fully set up. set_balancer(upstream.id, balancer) + return balancer + end + + ------------------------------------------------------------------------------ + -- Create a balancer object, its healthchecker and attach them to the + -- necessary data structures. The creation of the balancer happens in a + -- per-worker mutual exclusion section, such that no two requests create the + -- same balancer at the same time. + -- @param upstream (table) A db.upstreams entity + -- @param recreate (boolean, optional) create new balancer even if one exists + -- @param history (table, optional) history of target updates + -- @param start (integer, optional) from where to start reading the history + -- @return The new balancer object, or nil+error + create_balancer = function(upstream, recreate, history, start) + + if balancers[upstream.id] and not recreate then + return balancers[upstream.id] + end + + if creating[upstream.id] then + local ok = wait(upstream.id) + if not ok then + return nil, "timeout waiting for balancer for " .. upstream.id + end + return balancers[upstream.id] + end + + creating[upstream.id] = true + + local balancer, err = create_balancer_exclusive(upstream, history, start) + creating[upstream.id] = nil - return balancer + return balancer, err end end From 3b5694b148650803490f028e30db99cb2cfb6a95 Mon Sep 17 00:00:00 2001 From: Hisham Muhammad Date: Tue, 3 Dec 2019 15:48:28 -0300 Subject: [PATCH 22/41] docs(changelog) add 1.4.1 changes --- CHANGELOG.md | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1fed253a1fc5..231e17c64b9a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ # Table of Contents +- [1.4.1](#141) - [1.4.0](#140) - [1.3.0](#130) - [1.2.2](#122) @@ -32,6 +33,52 @@ - [0.10.0](#0100---20170307) - [0.9.9 and prior](#099---20170202) + +## [1.4.1] + +> Released 2019/12/03 + +This is a patch release in the 1.4 series, and as such, strictly contains +bugfixes. There are no new features nor breaking changes. + +### Fixes + +##### Core + + - Fixed a memory leak in the balancer + [#5229](https://github.com/Kong/kong/pull/5229) -- + Thanks [zeeshen](https://github.com/zeeshen) for the patch! + - Removed arbitrary limit on worker connections. + [#5148](https://github.com/Kong/kong/pull/5148) + - Fixed `preserve_host` behavior for gRPC routes + [#5225](https://github.com/Kong/kong/pull/5225) + - Fix migrations for ttl for OAuth2 tokens + [#5253](https://github.com/Kong/kong/pull/5253) + - Improve handling of errors when creating balancers + [#5284](https://github.com/Kong/kong/pull/5284) + +##### CLI + + - Fixed an issue with `kong config db_export` when reading + entities that are ttl-enabled and whose ttl value is `null`. + [#5185](https://github.com/Kong/kong/pull/5185) + +##### Admin API + + - Various fixes for Admin API behavior + [#5174](https://github.com/Kong/kong/pull/5174), + [#5178](https://github.com/Kong/kong/pull/5178), + [#5191](https://github.com/Kong/kong/pull/5191), + [#5186](https://github.com/Kong/kong/pull/5186) + +##### Plugins + + - http-log: do not impose a retry delay on successful sends + [#5282](https://github.com/Kong/kong/pull/5282) + + +[Back to TOC](#table-of-contents) + ## [1.4.0] > Released on 2019/10/22 @@ -4218,6 +4265,7 @@ First version running with Cassandra. [Back to TOC](#table-of-contents) +[1.4.1]: https://github.com/Kong/kong/compare/1.4.0...1.4.1 [1.4.0]: https://github.com/Kong/kong/compare/1.3.0...1.4.0 [1.3.0]: https://github.com/Kong/kong/compare/1.2.2...1.3.0 [1.2.2]: https://github.com/Kong/kong/compare/1.2.1...1.2.2 From ccbf8976f364b7bce0f0e08294343eaf1d237d3a Mon Sep 17 00:00:00 2001 From: Hisham Muhammad Date: Tue, 3 Dec 2019 16:03:12 -0300 Subject: [PATCH 23/41] release: 1.4.1 --- kong-1.4.0-0.rockspec => kong-1.4.1-0.rockspec | 4 ++-- kong/meta.lua | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) rename kong-1.4.0-0.rockspec => kong-1.4.1-0.rockspec (99%) diff --git a/kong-1.4.0-0.rockspec b/kong-1.4.1-0.rockspec similarity index 99% rename from kong-1.4.0-0.rockspec rename to kong-1.4.1-0.rockspec index 3cb51680d38a..f33ad662936a 100644 --- a/kong-1.4.0-0.rockspec +++ b/kong-1.4.1-0.rockspec @@ -1,9 +1,9 @@ package = "kong" -version = "1.4.0-0" +version = "1.4.1-0" supported_platforms = {"linux", "macosx"} source = { url = "git://github.com/Kong/kong", - tag = "1.4.0" + tag = "1.4.1" } description = { summary = "Kong is a scalable and customizable API Management Layer built on top of Nginx.", diff --git a/kong/meta.lua b/kong/meta.lua index 4fef51853f34..f4a9ab9f7cdd 100644 --- a/kong/meta.lua +++ b/kong/meta.lua @@ -1,7 +1,7 @@ local version = setmetatable({ major = 1, minor = 4, - patch = 0, + patch = 1, -- suffix = "", }, { __tostring = function(t) From e13cae639afdce3bf9c2e9f908706e74947723b0 Mon Sep 17 00:00:00 2001 From: Guilherme Salazar Date: Wed, 4 Dec 2019 06:02:04 -0800 Subject: [PATCH 24/41] tests(grpc) add logging plugin tests (#5276) Add test cases for grpc/grpcs on logging plugins. - tcp-log - udp-log - http-log - file-log --- .../03-plugins/01-tcp-log/01-tcp-log_spec.lua | 46 ++++- .../03-plugins/02-udp-log/01-udp-log_spec.lua | 167 ++++++++++++++++++ spec/03-plugins/03-http-log/01-log_spec.lua | 124 ++++++++++++- spec/03-plugins/04-file-log/01-log_spec.lua | 99 +++++++++++ 4 files changed, 432 insertions(+), 4 deletions(-) diff --git a/spec/03-plugins/01-tcp-log/01-tcp-log_spec.lua b/spec/03-plugins/01-tcp-log/01-tcp-log_spec.lua index 1675125a1b8a..4a209c144072 100644 --- a/spec/03-plugins/01-tcp-log/01-tcp-log_spec.lua +++ b/spec/03-plugins/01-tcp-log/01-tcp-log_spec.lua @@ -129,7 +129,7 @@ for _, strategy in helpers.each_strategy() do assert.is_nil(log_message.request.tls) end) - it("logs to TCP (grpc)", function() + it("logs to TCP (#grpc)", function() local thread = helpers.tcp_server(TCP_PORT) -- Starting the mock TCP server -- Making the request @@ -192,7 +192,7 @@ for _, strategy in helpers.each_strategy() do assert.True(is_latencies_sum_adding_up) end) - it("logs proper latencies (grpc)", function() + it("logs proper latencies (#grpc)", function() local tcp_thread = helpers.tcp_server(TCP_PORT) -- Starting the mock TCP server -- Making the request @@ -216,6 +216,44 @@ for _, strategy in helpers.each_strategy() do -- Making sure it's alright local log_message = cjson.decode(res) + assert.equal("grpc", log_message.service.protocol) + assert.True(log_message.latencies.proxy < 3000) + + -- Sometimes there's a split milisecond that makes numbers not + -- add up by 1. Adding an artificial 1 to make the test + -- resilient to those. + local is_latencies_sum_adding_up = + 1 + log_message.latencies.request >= log_message.latencies.kong + + log_message.latencies.proxy + + assert.True(is_latencies_sum_adding_up) + end) + + it("logs proper latencies (#grpcs)", function() + local tcp_thread = helpers.tcp_server(TCP_PORT) -- Starting the mock TCP server + + -- Making the request + local ok, resp = proxy_client_grpcs({ + service = "hello.HelloService.SayHello", + body = { + greeting = "world!" + }, + opts = { + ["-authority"] = "tcp_logging_grpcs.test", + } + }) + assert.truthy(ok) + assert.truthy(resp) + + -- Getting back the TCP server input + local ok, res = tcp_thread:join() + assert.True(ok) + assert.is_string(res) + + -- Making sure it's alright + local log_message = cjson.decode(res) + + assert.equal("grpcs", log_message.service.protocol) assert.True(log_message.latencies.proxy < 3000) -- Sometimes there's a split milisecond that makes numbers not @@ -277,7 +315,7 @@ for _, strategy in helpers.each_strategy() do assert.equal("NONE", log_message.request.tls.client_verify) end) - it("logs TLS info (grpcs)", function() + it("logs TLS info (#grpcs)", function() local thread = helpers.tcp_server(TCP_PORT) -- Starting the mock TCP server -- Making the request @@ -300,6 +338,8 @@ for _, strategy in helpers.each_strategy() do -- Making sure it's alright local log_message = cjson.decode(res) + + assert.equal("grpcs", log_message.service.protocol) assert.equal("TLSv1.2", log_message.request.tls.version) assert.is_string(log_message.request.tls.cipher) assert.equal("NONE", log_message.request.tls.client_verify) diff --git a/spec/03-plugins/02-udp-log/01-udp-log_spec.lua b/spec/03-plugins/02-udp-log/01-udp-log_spec.lua index 7b0607363801..3af637186984 100644 --- a/spec/03-plugins/02-udp-log/01-udp-log_spec.lua +++ b/spec/03-plugins/02-udp-log/01-udp-log_spec.lua @@ -8,6 +8,7 @@ local UDP_PORT = 35001 for _, strategy in helpers.each_strategy() do describe("Plugin: udp-log (log) [#" .. strategy .. "]", function() local proxy_client + local proxy_client_grpc, proxy_client_grpcs lazy_setup(function() local bp = helpers.get_db_utils(strategy, { @@ -29,12 +30,54 @@ for _, strategy in helpers.each_strategy() do }, } + local grpc_service = assert(bp.services:insert { + name = "grpc-service", + url = "grpc://localhost:15002", + }) + + local route2 = assert(bp.routes:insert { + service = grpc_service, + protocols = { "grpc" }, + hosts = { "udp_logging_grpc.test" }, + }) + + bp.plugins:insert { + route = { id = route2.id }, + name = "udp-log", + config = { + host = "127.0.0.1", + port = UDP_PORT + }, + } + + local grpcs_service = assert(bp.services:insert { + name = "grpcs-service", + url = "grpcs://localhost:15003", + }) + + local route3 = assert(bp.routes:insert { + service = grpcs_service, + protocols = { "grpcs" }, + hosts = { "udp_logging_grpcs.test" }, + }) + + bp.plugins:insert { + route = { id = route3.id }, + name = "udp-log", + config = { + host = "127.0.0.1", + port = UDP_PORT + }, + } + assert(helpers.start_kong({ database = strategy, nginx_conf = "spec/fixtures/custom_nginx.template", })) proxy_client = helpers.proxy_client() + proxy_client_grpc = helpers.proxy_client_grpc() + proxy_client_grpcs = helpers.proxy_client_grpcs() end) lazy_teardown(function() @@ -78,6 +121,78 @@ for _, strategy in helpers.each_strategy() do assert.True(is_latencies_sum_adding_up) end) + it("logs proper latencies (#grpc)", function() + local udp_thread = helpers.udp_server(UDP_PORT) + + -- Making the request + local ok, resp = proxy_client_grpc({ + service = "hello.HelloService.SayHello", + body = { + greeting = "world!" + }, + opts = { + ["-authority"] = "udp_logging_grpc.test", + } + }) + assert.truthy(ok) + assert.truthy(resp) + + -- Getting back the UDP server input + local ok, res = udp_thread:join() + assert.True(ok) + assert.is_string(res) + + -- Making sure it's alright + local log_message = cjson.decode(res) + + assert.True(log_message.latencies.proxy < 3000) + + -- Sometimes there's a split milisecond that makes numbers not + -- add up by 1. Adding an artificial 1 to make the test + -- resilient to those. + local is_latencies_sum_adding_up = + 1+log_message.latencies.request >= log_message.latencies.kong + + log_message.latencies.proxy + + assert.True(is_latencies_sum_adding_up) + end) + + it("logs proper latencies (#grpcs)", function() + local udp_thread = helpers.udp_server(UDP_PORT) + + -- Making the request + local ok, resp = proxy_client_grpcs({ + service = "hello.HelloService.SayHello", + body = { + greeting = "world!" + }, + opts = { + ["-authority"] = "udp_logging_grpcs.test", + } + }) + assert.truthy(ok) + assert.truthy(resp) + + -- Getting back the UDP server input + local ok, res = udp_thread:join() + assert.True(ok) + assert.is_string(res) + + -- Making sure it's alright + local log_message = cjson.decode(res) + + assert.True(log_message.latencies.proxy < 3000) + + -- Sometimes there's a split milisecond that makes numbers not + -- add up by 1. Adding an artificial 1 to make the test + -- resilient to those. + local is_latencies_sum_adding_up = + 1+log_message.latencies.request >= log_message.latencies.kong + + log_message.latencies.proxy + + assert.True(is_latencies_sum_adding_up) + end) + it("logs to UDP", function() local thread = helpers.udp_server(UDP_PORT) -- Starting the mock UDP server @@ -100,5 +215,57 @@ for _, strategy in helpers.each_strategy() do local log_message = cjson.decode(res) assert.equal("127.0.0.1", log_message.client_ip) end) + + it("logs to UDP (#grpc)", function() + local thread = helpers.udp_server(UDP_PORT) -- Starting the mock UDP server + + -- Making the request + local ok, resp = proxy_client_grpc({ + service = "hello.HelloService.SayHello", + body = { + greeting = "world!" + }, + opts = { + ["-authority"] = "udp_logging_grpc.test", + } + }) + assert.truthy(ok) + assert.truthy(resp) + + -- Getting back the TCP server input + local ok, res = thread:join() + assert.True(ok) + assert.is_string(res) + + -- Making sure it's alright + local log_message = cjson.decode(res) + assert.equal("127.0.0.1", log_message.client_ip) + end) + + it("logs to UDP (#grpcs)", function() + local thread = helpers.udp_server(UDP_PORT) -- Starting the mock UDP server + + -- Making the request + local ok, resp = proxy_client_grpcs({ + service = "hello.HelloService.SayHello", + body = { + greeting = "world!" + }, + opts = { + ["-authority"] = "udp_logging_grpcs.test", + } + }) + assert.truthy(ok) + assert.truthy(resp) + + -- Getting back the TCP server input + local ok, res = thread:join() + assert.True(ok) + assert.is_string(res) + + -- Making sure it's alright + local log_message = cjson.decode(res) + assert.equal("127.0.0.1", log_message.client_ip) + end) end) end diff --git a/spec/03-plugins/03-http-log/01-log_spec.lua b/spec/03-plugins/03-http-log/01-log_spec.lua index 9ddeda3b3b5d..961d2e7984d8 100644 --- a/spec/03-plugins/03-http-log/01-log_spec.lua +++ b/spec/03-plugins/03-http-log/01-log_spec.lua @@ -5,6 +5,7 @@ local helpers = require "spec.helpers" for _, strategy in helpers.each_strategy() do describe("Plugin: http-log (log) [#" .. strategy .. "]", function() local proxy_client + local proxy_client_grpc, proxy_client_grpcs lazy_setup(function() local bp = helpers.get_db_utils(strategy, { @@ -141,10 +142,59 @@ for _, strategy in helpers.each_strategy() do local test_error_log_path = helpers.test_conf.nginx_err_logs os.execute(":> " .. test_error_log_path) + local grpc_service = assert(bp.services:insert { + name = "grpc-service", + url = "grpc://localhost:15002", + }) + + local route7 = assert(bp.routes:insert { + service = grpc_service, + protocols = { "grpc" }, + hosts = { "http_logging_grpc.test" }, + }) + + bp.plugins:insert { + route = { id = route7.id }, + name = "http-log", + config = { + http_endpoint = "http://" .. helpers.mock_upstream_host + .. ":" + .. helpers.mock_upstream_port + .. "/post_log/grpc", + timeout = 1 + }, + } + + local grpcs_service = assert(bp.services:insert { + name = "grpcs-service", + url = "grpcs://localhost:15003", + }) + + local route8 = assert(bp.routes:insert { + service = grpcs_service, + protocols = { "grpcs" }, + hosts = { "http_logging_grpcs.test" }, + }) + + bp.plugins:insert { + route = { id = route8.id }, + name = "http-log", + config = { + http_endpoint = "http://" .. helpers.mock_upstream_host + .. ":" + .. helpers.mock_upstream_port + .. "/post_log/grpcs", + timeout = 1 + }, + } + assert(helpers.start_kong({ database = strategy, nginx_conf = "spec/fixtures/custom_nginx.template", })) + + proxy_client_grpc = helpers.proxy_client_grpc() + proxy_client_grpcs = helpers.proxy_client_grpcs() end) lazy_teardown(function() @@ -191,6 +241,78 @@ for _, strategy in helpers.each_strategy() do end, 10) end) + it("logs to HTTP (#grpc)", function() + -- Making the request + local ok, resp = proxy_client_grpc({ + service = "hello.HelloService.SayHello", + body = { + greeting = "world!" + }, + opts = { + ["-authority"] = "http_logging_grpc.test", + } + }) + assert.truthy(ok) + assert.truthy(resp) + + helpers.wait_until(function() + local client = assert(helpers.http_client(helpers.mock_upstream_host, + helpers.mock_upstream_port)) + local res = assert(client:send { + method = "GET", + path = "/read_log/grpc", + headers = { + Accept = "application/json" + } + }) + local raw = assert.res_status(200, res) + local body = cjson.decode(raw) + + if #body.entries == 1 then + assert.same("127.0.0.1", body.entries[1].client_ip) + assert.same("application/grpc", body.entries[1].request.headers["content-type"]) + assert.same("application/grpc", body.entries[1].response.headers["content-type"]) + return true + end + end, 10) + end) + + it("logs to HTTP (#grpcs)", function() + -- Making the request + local ok, resp = proxy_client_grpcs({ + service = "hello.HelloService.SayHello", + body = { + greeting = "world!" + }, + opts = { + ["-authority"] = "http_logging_grpcs.test", + } + }) + assert.truthy(ok) + assert.truthy(resp) + + helpers.wait_until(function() + local client = assert(helpers.http_client(helpers.mock_upstream_host, + helpers.mock_upstream_port)) + local res = assert(client:send { + method = "GET", + path = "/read_log/grpcs", + headers = { + Accept = "application/json" + } + }) + local raw = assert.res_status(200, res) + local body = cjson.decode(raw) + + if #body.entries == 1 then + assert.same("127.0.0.1", body.entries[1].client_ip) + assert.same("application/grpc", body.entries[1].request.headers["content-type"]) + assert.same("application/grpc", body.entries[1].response.headers["content-type"]) + return true + end + end, 10) + end) + it("logs to HTTPS", function() local res = assert(proxy_client:send({ method = "GET", @@ -221,7 +343,7 @@ for _, strategy in helpers.each_strategy() do end) it("gracefully handles layer 4 failures", function() - -- setup: cleanup logs + -- setup: cleanup logs local test_error_log_path = helpers.test_conf.nginx_err_logs os.execute(":> " .. test_error_log_path) diff --git a/spec/03-plugins/04-file-log/01-log_spec.lua b/spec/03-plugins/04-file-log/01-log_spec.lua index efb44ea28fdf..c92a8a6681a7 100644 --- a/spec/03-plugins/04-file-log/01-log_spec.lua +++ b/spec/03-plugins/04-file-log/01-log_spec.lua @@ -12,6 +12,7 @@ local FILE_LOG_PATH = os.tmpname() for _, strategy in helpers.each_strategy() do describe("Plugin: file-log (log) [#" .. strategy .. "]", function() local proxy_client + local proxy_client_grpc, proxy_client_grpcs lazy_setup(function() local bp = helpers.get_db_utils(strategy, { @@ -33,11 +34,55 @@ for _, strategy in helpers.each_strategy() do }, } + local grpc_service = assert(bp.services:insert { + name = "grpc-service", + url = "grpc://localhost:15002", + }) + + local route2 = assert(bp.routes:insert { + service = grpc_service, + protocols = { "grpc" }, + hosts = { "tcp_logging_grpc.test" }, + }) + + bp.plugins:insert { + route = { id = route2.id }, + name = "file-log", + config = { + path = FILE_LOG_PATH, + reopen = true, + }, + } + + local grpcs_service = assert(bp.services:insert { + name = "grpcs-service", + url = "grpcs://localhost:15003", + }) + + local route3 = assert(bp.routes:insert { + service = grpcs_service, + protocols = { "grpcs" }, + hosts = { "tcp_logging_grpcs.test" }, + }) + + bp.plugins:insert { + route = { id = route3.id }, + name = "file-log", + config = { + path = FILE_LOG_PATH, + reopen = true, + }, + } + assert(helpers.start_kong({ database = strategy, nginx_conf = "spec/fixtures/custom_nginx.template", })) + + proxy_client_grpc = helpers.proxy_client_grpc() + proxy_client_grpcs = helpers.proxy_client_grpcs() end) + lazy_teardown(function() helpers.stop_kong() end) @@ -78,6 +123,60 @@ for _, strategy in helpers.each_strategy() do assert.same(uuid, log_message.request.headers["file-log-uuid"]) end) + it("logs to file #grpc", function() + local uuid = utils.random_string() + + -- Making the request + local ok, resp = proxy_client_grpc({ + service = "hello.HelloService.SayHello", + body = { + greeting = "world!" + }, + opts = { + ["-H"] = "'file-log-uuid: " .. uuid .. "'", + ["-authority"] = "tcp_logging_grpc.test", + } + }) + assert.truthy(ok) + assert.truthy(resp) + + helpers.wait_until(function() + return pl_path.exists(FILE_LOG_PATH) and pl_path.getsize(FILE_LOG_PATH) > 0 + end, 10) + + local file_log = pl_file.read(FILE_LOG_PATH) + local log_message = cjson.decode(pl_stringx.strip(file_log)) + assert.same("127.0.0.1", log_message.client_ip) + assert.same(uuid, log_message.request.headers["file-log-uuid"]) + end) + + it("logs to file #grpcs", function() + local uuid = utils.random_string() + + -- Making the request + local ok, resp = proxy_client_grpcs({ + service = "hello.HelloService.SayHello", + body = { + greeting = "world!" + }, + opts = { + ["-H"] = "'file-log-uuid: " .. uuid .. "'", + ["-authority"] = "tcp_logging_grpcs.test", + } + }) + assert.truthy(ok) + assert.truthy(resp) + + helpers.wait_until(function() + return pl_path.exists(FILE_LOG_PATH) and pl_path.getsize(FILE_LOG_PATH) > 0 + end, 10) + + local file_log = pl_file.read(FILE_LOG_PATH) + local log_message = cjson.decode(pl_stringx.strip(file_log)) + assert.same("127.0.0.1", log_message.client_ip) + assert.same(uuid, log_message.request.headers["file-log-uuid"]) + end) + it("reopens file on each request", function() local uuid1 = utils.uuid() From 0cf5bfa2c9ba7b24e52e8904981233b8adce3eac Mon Sep 17 00:00:00 2001 From: Colin Hutchinson Date: Thu, 5 Dec 2019 10:46:38 -0500 Subject: [PATCH 25/41] chore(ci) small Makefile command typo (#5304) --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index c23c36feaac1..72d49149f726 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -148,7 +148,7 @@ pipeline { } post { always { - dir('../kong-build-tools'){ sh 'make cleanup_build' } + dir('../kong-build-tools'){ sh 'make cleanup-build' } } } } From b481e38e817ec945aa508372fce7721717d91166 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Enrique=20Garc=C3=ADa=20Cota?= Date: Thu, 5 Dec 2019 18:11:37 +0100 Subject: [PATCH 26/41] fix(admin-api) allow plugin api.lua with schema/method style (#5303) This change fixes a regression introduced in #5174. Said regression consisted on an error which was raised when Kong attempted to parse a plugin `api.lua` file with `schema` and `methods` entries, such as: ``` [""] = { schema = , methods = { GET = function(self) ... end, ... } ``` The regression only allowed "lapis-style" entries, such as this one: ``` [""] = { GET = function(self) ... end, ... } ``` This made the Prometheus plugin stop working with Kong, which used the first style for its `api.lua`. This makes Kong accept both styles again. Fixes #5291 --- kong/api/init.lua | 38 ++++++++++--------- .../04-admin_api/09-routes_routes_spec.lua | 9 +++++ .../kong/plugins/api-override/api.lua | 9 +++++ 3 files changed, 39 insertions(+), 17 deletions(-) diff --git a/kong/api/init.lua b/kong/api/init.lua index 0adff0229570..aeef819dd9fc 100644 --- a/kong/api/init.lua +++ b/kong/api/init.lua @@ -44,26 +44,30 @@ do -- the autogenerated endpoints the plugin's own DAOs introduced). local function customize_routes(routes, custom_endpoints, schema) for route_pattern, verbs in pairs(custom_endpoints) do - if routes[route_pattern] ~= nil and type(verbs) == "table" then - for verb, handler in pairs(verbs) do - local parent = routes[route_pattern]["methods"][verb] - if parent ~= nil and type(handler) == "function" then - routes[route_pattern]["methods"][verb] = function(self, db, helpers) - return handler(self, db, helpers, function(post_process) - return parent(self, db, helpers, post_process) - end) - end + if type(verbs) == "table" then + local methods = verbs.methods or verbs + + if routes[route_pattern] == nil then + routes[route_pattern] = { + schema = verbs.schema or schema, + methods = methods + } - else - routes[route_pattern]["methods"][verb] = handler + else + for method, handler in pairs(methods) do + local parent = routes[route_pattern]["methods"][method] + if parent ~= nil and type(handler) == "function" then + routes[route_pattern]["methods"][method] = function(self, db, helpers) + return handler(self, db, helpers, function(post_process) + return parent(self, db, helpers, post_process) + end) + end + + else + routes[route_pattern]["methods"][method] = handler + end end end - - else - routes[route_pattern] = { - schema = verbs.schema or schema, - methods = verbs, - } end end end diff --git a/spec/02-integration/04-admin_api/09-routes_routes_spec.lua b/spec/02-integration/04-admin_api/09-routes_routes_spec.lua index 0544fc172d8a..279b8fdfabb3 100644 --- a/spec/02-integration/04-admin_api/09-routes_routes_spec.lua +++ b/spec/02-integration/04-admin_api/09-routes_routes_spec.lua @@ -1910,6 +1910,7 @@ for _, strategy in helpers.each_strategy() do describe("GET", function() describe("with data", function() lazy_setup(function() + db:truncate("services") db:truncate("routes") for i = 1, 10 do bp.routes:insert({ paths = { "/route-" .. i } }) @@ -1924,7 +1925,15 @@ for _, strategy in helpers.each_strategy() do local body = assert.res_status(200, res) local json = cjson.decode(body) assert.equal(10, #json.data) + assert.equal("ok", res.headers["Kong-Api-Override"]) + local res = assert(client:send { + method = "GET", + path = "/services" + }) + local body = assert.res_status(200, res) + local json = cjson.decode(body) + assert.equal(10, #json.data) assert.equal("ok", res.headers["Kong-Api-Override"]) end) end) diff --git a/spec/fixtures/custom_plugins/kong/plugins/api-override/api.lua b/spec/fixtures/custom_plugins/kong/plugins/api-override/api.lua index b1af1291115d..9b83637eca89 100644 --- a/spec/fixtures/custom_plugins/kong/plugins/api-override/api.lua +++ b/spec/fixtures/custom_plugins/kong/plugins/api-override/api.lua @@ -13,4 +13,13 @@ return { return parent() end, }, + ["/services"] = { + schema = kong.db.services.schema, + methods = { + GET = function(_, _, _, parent) + kong.response.set_header("Kong-Api-Override", "ok") + return parent() + end + } + } } From 04fcf38d1868bd55c080855b38985026681517c4 Mon Sep 17 00:00:00 2001 From: Colin Hutchinson Date: Mon, 9 Dec 2019 09:09:07 -0500 Subject: [PATCH 27/41] chore(ci) changed a make task and explicitly defined some variables (#5271) * chore(ci) changed a make task and explicitely defined some variables for CI purposes * chore(dependencies) bumped libyaml and removed uneeded variable --- .requirements | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.requirements b/.requirements index b80c4b8b8dff..dafcdc41d9d0 100644 --- a/.requirements +++ b/.requirements @@ -1,5 +1,10 @@ +KONG_PACKAGE_NAME=kong +KONG_CONFLICTS=kong-enterprise-edition +KONG_LICENSE="ASL 2.0" + KONG_GMP_VERSION=6.1.2 RESTY_VERSION=1.15.8.2 RESTY_LUAROCKS_VERSION=3.2.1 RESTY_OPENSSL_VERSION=1.1.1d RESTY_PCRE_VERSION=8.43 +LIBYAML_VERSION=0.2.2 \ No newline at end of file From 616b135d181b232c3b85460f8536a3b642093009 Mon Sep 17 00:00:00 2001 From: Hisham Muhammad Date: Mon, 9 Dec 2019 13:36:13 -0300 Subject: [PATCH 28/41] tests(declarative) verify that validation errors are caught (#5312) Increase our coverage of declarative config testing: verify that validation errors on a valid YAML are caught. --- spec/02-integration/02-cmd/11-config_spec.lua | 43 +++++++++++++++++++ .../04-admin_api/15-off_spec.lua | 35 +++++++++++++++ 2 files changed, 78 insertions(+) diff --git a/spec/02-integration/02-cmd/11-config_spec.lua b/spec/02-integration/02-cmd/11-config_spec.lua index 3f420891a4f0..04247c9dfeac 100644 --- a/spec/02-integration/02-cmd/11-config_spec.lua +++ b/spec/02-integration/02-cmd/11-config_spec.lua @@ -4,6 +4,11 @@ local cjson = require "cjson" local lyaml = require "lyaml" +local function trim(s) + return s:gsub("%s+", " "):gsub("^%s*", ""):gsub("%s*$", "") +end + + local function sort_by_name(a, b) return a.name < b.name end @@ -150,6 +155,44 @@ describe("kong config", function() })) end) + it("#db config db_import catches errors in input", function() + assert(helpers.start_kong({ + nginx_conf = "spec/fixtures/custom_nginx.template", + })) + + finally(function() + helpers.stop_kong() + end) + + local filename = helpers.make_yaml_file([[ + _format_version: "1.1" + services: + - name: foobar + host: [] + port: -23 + protocol: https + _comment: my comment + _ignore: + - foo: bar + routes: 123 + ]]) + + local ok, err = helpers.kong_exec("config db_import " .. filename, { + prefix = helpers.test_conf.prefix, + }) + assert.falsy(ok) + + assert.same(trim([[ + Error: Failed parsing: + in 'services': + - in entry 1 of 'services': + in 'host': expected a string + in 'port': value should be between 0 and 65535 + in 'routes': expected an array + Run with --v (verbose) or --vv (debug) for more details + ]]), trim(err)) + end) + it("#db config db_import is idempotent based on endpoint_key and cache_key", function() assert(db.plugins:truncate()) assert(db.routes:truncate()) diff --git a/spec/02-integration/04-admin_api/15-off_spec.lua b/spec/02-integration/04-admin_api/15-off_spec.lua index e35ebb8bb3a8..d2c50cdda1fa 100644 --- a/spec/02-integration/04-admin_api/15-off_spec.lua +++ b/spec/02-integration/04-admin_api/15-off_spec.lua @@ -449,6 +449,41 @@ describe("Admin API #off", function() }, json) end) + it("returns 400 on a validation error", function() + local res = assert(client:send { + method = "POST", + path = "/config", + body = { + config = [[ + _format_version: "1.1" + services: + - port: -12 + ]], + }, + headers = { + ["Content-Type"] = "application/json" + } + }) + + local body = assert.response(res).has.status(400) + local json = cjson.decode(body) + assert.same({ + code = 14, + fields = { + services = { + { + host = "required field missing", + port = "value should be between 0 and 65535", + } + } + }, + message = [[declarative config is invalid: ]] .. + [[{services={{host="required field missing",]] .. + [[port="value should be between 0 and 65535"}}}]], + name = "invalid declarative configuration", + }, json) + end) + it("returns 400 when given no input", function() local res = assert(client:send { method = "POST", From 95cb157f55e043018b19039635b84f379ac6aeb8 Mon Sep 17 00:00:00 2001 From: Guilherme Salazar Date: Mon, 9 Dec 2019 17:25:02 -0800 Subject: [PATCH 29/41] tests(plugins) add test cases for statsd on grpc --- spec/03-plugins/06-statsd/01-log_spec.lua | 85 +++++++++++++++++++++++ 1 file changed, 85 insertions(+) diff --git a/spec/03-plugins/06-statsd/01-log_spec.lua b/spec/03-plugins/06-statsd/01-log_spec.lua index 7de156969d2a..0ae5a48c4aa2 100644 --- a/spec/03-plugins/06-statsd/01-log_spec.lua +++ b/spec/03-plugins/06-statsd/01-log_spec.lua @@ -10,6 +10,7 @@ local UDP_PORT = 20000 for _, strategy in helpers.each_strategy() do describe("Plugin: statsd (log) [#" .. strategy .. "]", function() local proxy_client + local proxy_client_grpc lazy_setup(function() local bp = helpers.get_db_utils(strategy, { @@ -235,12 +236,49 @@ for _, strategy in helpers.each_strategy() do }, } + -- grpc + local grpc_routes = {} + for i = 1, 2 do + local service = bp.services:insert { + url = "grpc://localhost:15002", + name = fmt("grpc_statsd%s", i) + } + grpc_routes[i] = bp.routes:insert { + hosts = { fmt("grpc_logging%d.com", i) }, + service = service + } + end + + bp.statsd_plugins:insert { + route = { id = grpc_routes[1].id }, + config = { + host = "127.0.0.1", + port = UDP_PORT, + }, + } + + bp.statsd_plugins:insert { + route = { id = grpc_routes[2].id }, + config = { + host = "127.0.0.1", + port = UDP_PORT, + metrics = { + { + name = "latency", + stat_type = "gauge", + sample_rate = 1, + } + }, + }, + } + assert(helpers.start_kong({ database = strategy, nginx_conf = "spec/fixtures/custom_nginx.template", })) proxy_client = helpers.proxy_client() + proxy_client_grpc = helpers.proxy_client_grpc() end) lazy_teardown(function() @@ -475,5 +513,52 @@ for _, strategy in helpers.each_strategy() do assert.matches("kong%.statsd12%.latency:%d+|g", res) end) end) + describe("metrics #grpc", function() + it("logs over UDP with default metrics", function() + local thread = helpers.udp_server(UDP_PORT, 8) + + local ok, resp = proxy_client_grpc({ + service = "hello.HelloService.SayHello", + body = { + greeting = "world!" + }, + opts = { + ["-authority"] = "grpc_logging1.com", + } + }) + assert.truthy(ok) + assert.truthy(resp) + + local ok, metrics = thread:join() + assert.True(ok) + assert.contains("kong.grpc_statsd1.request.count:1|c", metrics) + assert.contains("kong.grpc_statsd1.latency:%d+|ms", metrics, true) + assert.contains("kong.grpc_statsd1.request.size:%d+|ms", metrics, true) + assert.contains("kong.grpc_statsd1.request.status.200:1|c", metrics) + assert.contains("kong.grpc_statsd1.request.status.total:1|c", metrics) + assert.contains("kong.grpc_statsd1.response.size:%d+|ms", metrics, true) + assert.contains("kong.grpc_statsd1.upstream_latency:%d*|ms", metrics, true) + assert.contains("kong.grpc_statsd1.kong_latency:%d*|ms", metrics, true) + end) + it("latency as gauge", function() + local thread = helpers.udp_server(UDP_PORT) + + local ok, resp = proxy_client_grpc({ + service = "hello.HelloService.SayHello", + body = { + greeting = "world!" + }, + opts = { + ["-authority"] = "grpc_logging2.com", + } + }) + assert.truthy(ok) + assert.truthy(resp) + + local ok, res = thread:join() + assert.True(ok) + assert.matches("kong%.grpc_statsd2%.latency:%d+|g", res) + end) + end) end) end From 7a9baf536882e7a0a37c0c7591bb741b0b4ca5ca Mon Sep 17 00:00:00 2001 From: Guilherme Salazar Date: Mon, 9 Dec 2019 17:45:06 -0800 Subject: [PATCH 30/41] tests(plugins) add test cases for syslog on grpc --- spec/03-plugins/05-syslog/01-log_spec.lua | 104 +++++++++++++++++++--- 1 file changed, 94 insertions(+), 10 deletions(-) diff --git a/spec/03-plugins/05-syslog/01-log_spec.lua b/spec/03-plugins/05-syslog/01-log_spec.lua index b862c44fbbb8..e396b4b1f615 100644 --- a/spec/03-plugins/05-syslog/01-log_spec.lua +++ b/spec/03-plugins/05-syslog/01-log_spec.lua @@ -7,6 +7,7 @@ local pl_stringx = require "pl.stringx" for _, strategy in helpers.each_strategy() do describe("#flaky Plugin: syslog (log) [#" .. strategy .. "]", function() local proxy_client + local proxy_client_grpc local platform lazy_setup(function() @@ -61,6 +62,61 @@ for _, strategy in helpers.each_strategy() do }, } + -- grpc [[ + local grpc_service = bp.services:insert { + name = "grpc-service", + url = "grpc://localhost:15002", + } + + local grpc_route1 = bp.routes:insert { + service = grpc_service, + hosts = { "grpc_logging.com" }, + } + + local grpc_route2 = bp.routes:insert { + service = grpc_service, + hosts = { "grpc_logging2.com" }, + } + + local grpc_route3 = bp.routes:insert { + service = grpc_service, + hosts = { "grpc_logging3.com" }, + } + + bp.plugins:insert { + route = { id = grpc_route1.id }, + name = "syslog", + config = { + log_level = "info", + successful_severity = "warning", + client_errors_severity = "warning", + server_errors_severity = "warning", + }, + } + + bp.plugins:insert { + route = { id = grpc_route2.id }, + name = "syslog", + config = { + log_level = "err", + successful_severity = "warning", + client_errors_severity = "warning", + server_errors_severity = "warning", + }, + } + + bp.plugins:insert { + route = { id = grpc_route3.id }, + name = "syslog", + config = { + log_level = "warning", + successful_severity = "warning", + client_errors_severity = "warning", + server_errors_severity = "warning", + }, + } + -- grpc ]] + local ok, _, stdout = helpers.execute("uname") assert(ok, "failed to retrieve platform name") platform = pl_stringx.strip(stdout) @@ -69,6 +125,8 @@ for _, strategy in helpers.each_strategy() do database = strategy, nginx_conf = "spec/fixtures/custom_nginx.template", })) + + proxy_client_grpc = helpers.proxy_client_grpc() end) lazy_teardown(function() helpers.stop_kong() @@ -81,18 +139,34 @@ for _, strategy in helpers.each_strategy() do if proxy_client then proxy_client:close() end end) - local function do_test(host, expecting_same) + local function do_test(host, expecting_same, grpc) local uuid = utils.uuid() - local response = assert(proxy_client:send { - method = "GET", - path = "/request", - headers = { - host = host, - sys_log_uuid = uuid, - } - }) - assert.res_status(200, response) + if not grpc then + local response = assert(proxy_client:send { + method = "GET", + path = "/request", + headers = { + host = host, + sys_log_uuid = uuid, + } + }) + assert.res_status(200, response) + + else + local ok, resp = proxy_client_grpc({ + service = "hello.HelloService.SayHello", + body = { + greeting = "world!" + }, + opts = { + ["-H"] = "'sys-log-uuid: " .. uuid .. "'", + ["-authority"] = ("%s"):format(host), + } + }) + assert.truthy(ok) + assert.truthy(resp) + end if platform == "Darwin" then local _, _, stdout = assert(helpers.execute("syslog -k Sender kong | tail -1")) @@ -119,5 +193,15 @@ for _, strategy in helpers.each_strategy() do it("logs to syslog if log_level is the same", function() do_test("logging3.com", true) end) + + it("logs to syslog if log_level is lower #grpc", function() + do_test("grpc_logging.com", true, true) + end) + it("does not log to syslog if log_level is higher #grpc", function() + do_test("grpc_logging2.com", false, true) + end) + it("logs to syslog if log_level is the same #grpc", function() + do_test("grpc_logging3.com", true, true) + end) end) end From 2784ac7f3dbc77275971e0268933e62f322fe0c9 Mon Sep 17 00:00:00 2001 From: Guilherme Salazar Date: Mon, 9 Dec 2019 18:09:11 -0800 Subject: [PATCH 31/41] tests(plugins) add test cases for loggly on grpc --- spec/03-plugins/07-loggly/01-log_spec.lua | 107 ++++++++++++++++++++++ 1 file changed, 107 insertions(+) diff --git a/spec/03-plugins/07-loggly/01-log_spec.lua b/spec/03-plugins/07-loggly/01-log_spec.lua index cbdd402484f5..2d15d16e1f17 100644 --- a/spec/03-plugins/07-loggly/01-log_spec.lua +++ b/spec/03-plugins/07-loggly/01-log_spec.lua @@ -8,6 +8,7 @@ local UDP_PORT = 20000 for _, strategy in helpers.each_strategy() do describe("Plugin: loggly (log) [#" .. strategy .. "]", function() local proxy_client + local proxy_client_grpc lazy_setup(function() local bp = helpers.get_db_utils(strategy, { @@ -80,10 +81,73 @@ for _, strategy in helpers.each_strategy() do } } + -- grpc [[ + local grpc_service = bp.services:insert { + name = "grpc-service", + url = "grpc://localhost:15002", + } + + local grpc_route1 = bp.routes:insert { + service = grpc_service, + hosts = { "grpc_logging.com" }, + } + + local grpc_route2 = bp.routes:insert { + service = grpc_service, + hosts = { "grpc_logging1.com" }, + } + + local grpc_route3 = bp.routes:insert { + service = grpc_service, + hosts = { "grpc_logging2.com" }, + } + + bp.plugins:insert { + route = { id = grpc_route1.id }, + name = "loggly", + config = { + host = "127.0.0.1", + port = UDP_PORT, + key = "123456789", + log_level = "info", + successful_severity = "warning" + } + } + + bp.plugins:insert { + route = { id = grpc_route2.id }, + name = "loggly", + config = { + host = "127.0.0.1", + port = UDP_PORT, + key = "123456789", + log_level = "debug", + timeout = 2000, + successful_severity = "info", + } + } + + bp.plugins:insert { + route = { id = grpc_route3.id }, + name = "loggly", + config = { + host = "127.0.0.1", + port = UDP_PORT, + key = "123456789", + log_level = "crit", + successful_severity = "crit", + client_errors_severity = "warning", + } + } + + -- grpc ]] + assert(helpers.start_kong({ database = strategy, nginx_conf = "spec/fixtures/custom_nginx.template", })) + + proxy_client_grpc = helpers.proxy_client_grpc() end) lazy_teardown(function() @@ -119,6 +183,31 @@ for _, strategy in helpers.each_strategy() do return pri, cjson.decode(json) end + local function run_grpc(host) + local thread = assert(helpers.udp_server(UDP_PORT)) + + local ok, resp = proxy_client_grpc({ + service = "hello.HelloService.SayHello", + body = { + greeting = "world!" + }, + opts = { + ["-authority"] = host, + } + }) + assert.truthy(ok) + assert.truthy(resp) + + local ok, res = thread:join() + assert.truthy(ok) + assert.truthy(res) + + local pri = assert(res:match("^<(%d-)>")) + local json = assert(res:match("{.*}")) + + return pri, cjson.decode(json) + end + it("logs to UDP when severity is warning and log level info", function() local pri, message = run({ method = "GET", @@ -131,6 +220,12 @@ for _, strategy in helpers.each_strategy() do assert.equal("127.0.0.1", message.client_ip) end) + it("logs to UDP when severity is warning and log level info #grpc", function() + local pri, message = run_grpc("grpc_logging.com") + assert.equal("12", pri) + assert.equal("127.0.0.1", message.client_ip) + end) + it("logs to UDP when severity is info and log level debug", function() local pri, message = run({ method = "GET", @@ -143,6 +238,12 @@ for _, strategy in helpers.each_strategy() do assert.equal("127.0.0.1", message.client_ip) end) + it("logs to UDP when severity is info and log level debug #grpc", function() + local pri, message = run_grpc("grpc_logging1.com") + assert.equal("14", pri) + assert.equal("127.0.0.1", message.client_ip) + end) + it("logs to UDP when severity is critical and log level critical", function() local pri, message = run({ method = "GET", @@ -155,6 +256,12 @@ for _, strategy in helpers.each_strategy() do assert.equal("127.0.0.1", message.client_ip) end) + it("logs to UDP when severity is critical and log level critical #grpc", function() + local pri, message = run_grpc("grpc_logging2.com") + assert.equal("10", pri) + assert.equal("127.0.0.1", message.client_ip) + end) + it("logs to UDP when severity and log level are default values", function() local pri, message = run({ method = "GET", From 7a4a996e711a93c56af396281833dbe2a349ca54 Mon Sep 17 00:00:00 2001 From: Colin Hutchinson Date: Tue, 10 Dec 2019 14:27:06 -0500 Subject: [PATCH 32/41] tests(plugins) add capability to test externally hosted but bundled Kong plugins (#5305) All bundled Kong plugins are inferred from the rockspec as always being `kong-` and `~`. From there we determine git tag from the result of `luarocks show`. Once we have the plugin repository and git tag we clone the plugin source code, copy the necessary fixtures and then run the plugin spec tests. * test(plugins) add capability to test externally hosted but bundled Kong plugins * chore(ci) enable azure-functions and report status if any failed * chore(ci) combine external plugins into the same CI job * chore(ci) nicer output for plugin tests * chore(ci) remove the added stage we no longer use * chore(dependency) bump the zipkin plugin * chore(tests) exclude zipkin from the CI tests until next release * chore(test) dont delete the plugin code we used for testing --- .ci/run_tests.sh | 53 +++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 52 insertions(+), 1 deletion(-) diff --git a/.ci/run_tests.sh b/.ci/run_tests.sh index 99aef58e5438..ba16951a48ea 100755 --- a/.ci/run_tests.sh +++ b/.ci/run_tests.sh @@ -1,6 +1,13 @@ #!/usr/bin/env bash set -e +function cyan() { + echo -e "\033[1;36m$*\033[0m" +} +function red() { + echo -e "\033[1;31m$*\033[0m" +} + export BUSTED_ARGS="-o gtest -v --exclude-tags=flaky,ipv6" if [ "$KONG_TEST_DATABASE" == "postgres" ]; then @@ -23,7 +30,51 @@ if [ "$TEST_SUITE" == "dbless" ]; then spec/02-integration/04-admin_api/15-off_spec.lua fi if [ "$TEST_SUITE" == "plugins" ]; then - eval "$TEST_CMD" spec/03-plugins/ + set +e + rm -f .failed + + for p in spec/03-plugins/*; do + echo + cyan "--------------------------------------" + cyan $(basename $p) + cyan "--------------------------------------" + echo + + $TEST_CMD $p || echo "* $p" >> .failed + done + + cat kong-*.rockspec | grep kong- | grep -v zipkin | grep -v sidecar | grep "~" | while read line ; do + REPOSITORY=`echo $line | sed "s/\"/ /g" | awk -F" " '{print $1}'` + VERSION=`luarocks show $REPOSITORY | grep $REPOSITORY | head -1 | awk -F" " '{print $2}' | cut -f1 -d"-"` + REPOSITORY=`echo $REPOSITORY | sed -e 's/kong-prometheus-plugin/kong-plugin-prometheus/g'` + REPOSITORY=`echo $REPOSITORY | sed -e 's/kong-proxy-cache-plugin/kong-plugin-proxy-cache/g'` + + echo + cyan "--------------------------------------" + cyan $REPOSITORY $VERSION + cyan "--------------------------------------" + echo + + git clone https://github.com/Kong/$REPOSITORY.git --branch $VERSION --single-branch /tmp/test-$REPOSITORY + cp -R /tmp/test-$REPOSITORY/spec/fixtures/* spec/fixtures/ || true + pushd /tmp/test-$REPOSITORY + luarocks make + popd + + $TEST_CMD /tmp/test-$REPOSITORY/spec/ || echo "* $REPOSITORY" >> .failed + + done + + if [ -f .failed ]; then + echo + red "--------------------------------------" + red "Plugin tests failed:" + red "--------------------------------------" + cat .failed + exit 1 + else + exit 0 + fi fi if [ "$TEST_SUITE" == "pdk" ]; then TEST_NGINX_RANDOMIZE=1 prove -I. -j$JOBS -r t/01-pdk From d20594b869b2be934438cd346bb7c7a0774c61e8 Mon Sep 17 00:00:00 2001 From: Hisham Muhammad Date: Tue, 10 Dec 2019 14:48:12 -0300 Subject: [PATCH 33/41] fix(balancer) fix event propagation for post_health --- kong/db/dao/targets.lua | 2 +- kong/runloop/handler.lua | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/kong/db/dao/targets.lua b/kong/db/dao/targets.lua index 32942264a4da..3b6d5242792e 100644 --- a/kong/db/dao/targets.lua +++ b/kong/db/dao/targets.lua @@ -354,7 +354,7 @@ function _TARGETS:post_health(upstream_pk, target, address, is_healthy) end local health = is_healthy and 1 or 0 - local packet = ("%s|%d|%d|%s|%s"):format(ip, port, health, + local packet = ("%s|%s|%d|%d|%s|%s"):format(hostname, ip or "", port, health, upstream.id, upstream.name) diff --git a/kong/runloop/handler.lua b/kong/runloop/handler.lua index f80dbfa8d52f..0d84005eb25a 100644 --- a/kong/runloop/handler.lua +++ b/kong/runloop/handler.lua @@ -356,11 +356,14 @@ local function register_events() -- manual health updates cluster_events:subscribe("balancer:post_health", function(data) - local pattern = "([^|]+)|([^|]+)|([^|]+)|([^|]+)|(.*)" - local ip, port, health, id, name = data:match(pattern) + local pattern = "([^|]+)|([^|]*)|([^|]+)|([^|]+)|([^|]+)|(.*)" + local hostname, ip, port, health, id, name = data:match(pattern) port = tonumber(port) local upstream = { id = id, name = name } - local _, err = balancer.post_health(upstream, ip, port, health == "1") + if ip == "" then + ip = nil + end + local _, err = balancer.post_health(upstream, hostname, ip, port, health == "1") if err then log(ERR, "failed posting health of ", name, " to workers: ", err) end From 708a87709f6328bf4a3e6ef310154aae6ce76d12 Mon Sep 17 00:00:00 2001 From: Hisham Muhammad Date: Tue, 10 Dec 2019 14:49:08 -0300 Subject: [PATCH 34/41] fix(balancer) fix behavior after target history auto-cleanup When the Targets DAO decides to cleanup history, this causes new_history size to be zero; this needs to trigger the recreation of the balancer. --- kong/runloop/balancer.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kong/runloop/balancer.lua b/kong/runloop/balancer.lua index 01307057e6a0..bd22572a20f1 100644 --- a/kong/runloop/balancer.lua +++ b/kong/runloop/balancer.lua @@ -505,7 +505,7 @@ local function check_target_history(upstream, balancer) end end - if last_equal_index == new_size then + if last_equal_index == new_size and new_size > 0 then -- No history update is necessary in the balancer object. return true elseif last_equal_index == old_size then From 3a2072154ce5e2baac1f5a939ff3889c5fad6e07 Mon Sep 17 00:00:00 2001 From: Hisham Muhammad Date: Tue, 10 Dec 2019 16:57:55 -0300 Subject: [PATCH 35/41] chore(deps) bump lua-resty-dns-client to 4.1.2 (#5319) --- kong-1.4.1-0.rockspec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kong-1.4.1-0.rockspec b/kong-1.4.1-0.rockspec index f33ad662936a..c3f698b8039a 100644 --- a/kong-1.4.1-0.rockspec +++ b/kong-1.4.1-0.rockspec @@ -30,7 +30,7 @@ dependencies = { "luaossl == 20190731", "luasyslog == 1.0.0", "lua_pack == 1.0.5", - "lua-resty-dns-client == 4.1.1", + "lua-resty-dns-client == 4.1.2", "lua-resty-worker-events == 1.0.0", "lua-resty-mediador == 0.1.2", "lua-resty-healthcheck == 1.1.1", From 0fcc8cc9636d94186ef69503eeb75daae69c8f08 Mon Sep 17 00:00:00 2001 From: Hisham Muhammad Date: Wed, 11 Dec 2019 11:17:14 -0300 Subject: [PATCH 36/41] fix(http-log) do not use a queue when queue_size is 1 (#5323) This avoids memory issues with an ever growing queue in Lua memory when the http-log queue handling is slower than the number of incoming requests, essentially restoring the pre-queueing behavior of http-log when queuing settings are not enabled. Without this patch, running http-log under load (e.g. testing with `wrk`) displays an ever-increasing memory consumption, up until the Lua VM panics with an "out of memory" error. With this patch, running the same test with `wrk` shows stable memory consumption. We still need to address the memory growth issues for when queueing is enabled and the flushing is too slow, but for now this is a useful fix to make the default settings usable. --- kong/tools/batch_queue.lua | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/kong/tools/batch_queue.lua b/kong/tools/batch_queue.lua index 95266dcafec0..8eaf5ae56ef3 100644 --- a/kong/tools/batch_queue.lua +++ b/kong/tools/batch_queue.lua @@ -267,6 +267,13 @@ function Queue:add(entry) return nil, "entry must be a non-nil Lua value" end + if self.batch_max_size == 1 then + -- no batching + local batch = { entries = { entry }, retries = 0 } + schedule_process(self, batch, 0) + return true + end + local cb = self.current_batch local new_size = #cb.entries + 1 cb.entries[new_size] = entry From 540e628dee47102b43e1068b92e6e3ebbfb0b15a Mon Sep 17 00:00:00 2001 From: Colin Hutchinson Date: Tue, 10 Dec 2019 20:09:22 +0000 Subject: [PATCH 37/41] docs(changelog) add 1.4.2 changes --- CHANGELOG.md | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 231e17c64b9a..eef939ffc993 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ # Table of Contents +- [1.4.2](#142) - [1.4.1](#141) - [1.4.0](#140) - [1.3.0](#130) @@ -34,6 +35,32 @@ - [0.9.9 and prior](#099---20170202) +## [1.4.2] + +> Released 2019/12/10 + +This is another patch release in the 1.4 series, and as such, strictly +contains bugfixes. There are no new features nor breaking changes. + +### Fixes + +##### Core + + - Fixes some corner cases in the balancer behavior + [#5318](https://github.com/Kong/kong/pull/5318) + +##### Plugins + + - http-log: disable queueing when using the default + settings, to avoid memory consumption issues + [#5323](https://github.com/Kong/kong/pull/5323) + - prometheus: restore compatibility with version 0.6.0 + [#5303](https://github.com/Kong/kong/pull/5303) + + +[Back to TOC](#table-of-contents) + + ## [1.4.1] > Released 2019/12/03 @@ -4265,6 +4292,7 @@ First version running with Cassandra. [Back to TOC](#table-of-contents) +[1.4.2]: https://github.com/Kong/kong/compare/1.4.1...1.4.2 [1.4.1]: https://github.com/Kong/kong/compare/1.4.0...1.4.1 [1.4.0]: https://github.com/Kong/kong/compare/1.3.0...1.4.0 [1.3.0]: https://github.com/Kong/kong/compare/1.2.2...1.3.0 From 0ce8e3402871f8c585918d889015d16877d57de5 Mon Sep 17 00:00:00 2001 From: Colin Hutchinson Date: Tue, 10 Dec 2019 20:11:39 +0000 Subject: [PATCH 38/41] release: 1.4.2 --- kong-1.4.1-0.rockspec => kong-1.4.2-0.rockspec | 4 ++-- kong/meta.lua | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) rename kong-1.4.1-0.rockspec => kong-1.4.2-0.rockspec (99%) diff --git a/kong-1.4.1-0.rockspec b/kong-1.4.2-0.rockspec similarity index 99% rename from kong-1.4.1-0.rockspec rename to kong-1.4.2-0.rockspec index c3f698b8039a..d29c8c3c3ade 100644 --- a/kong-1.4.1-0.rockspec +++ b/kong-1.4.2-0.rockspec @@ -1,9 +1,9 @@ package = "kong" -version = "1.4.1-0" +version = "1.4.2-0" supported_platforms = {"linux", "macosx"} source = { url = "git://github.com/Kong/kong", - tag = "1.4.1" + tag = "1.4.2" } description = { summary = "Kong is a scalable and customizable API Management Layer built on top of Nginx.", diff --git a/kong/meta.lua b/kong/meta.lua index f4a9ab9f7cdd..2ea8080113c8 100644 --- a/kong/meta.lua +++ b/kong/meta.lua @@ -1,7 +1,7 @@ local version = setmetatable({ major = 1, minor = 4, - patch = 1, + patch = 2, -- suffix = "", }, { __tostring = function(t) From c9b62748fa67dc186663d7590cb6d187ea671537 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Wed, 11 Dec 2019 22:05:19 +0200 Subject: [PATCH 39/41] fix(admin-api) incorrect PUT behavior for /certificates (#5321) ### Summary @hbagdi reported issue on `PUT` behavior on `/certificates` with #5309. This PR fixes the `PUT` behavior. It also fixes several other issues found while fixing the original issue. ### Issues Resolver Fix #5309 --- kong/api/routes/certificates.lua | 126 ++++++++++++------ kong/db/dao/certificates.lua | 3 - .../06-certificates_routes_spec.lua | 32 ++++- 3 files changed, 118 insertions(+), 43 deletions(-) diff --git a/kong/api/routes/certificates.lua b/kong/api/routes/certificates.lua index 9b0b3f3ae0cc..13d6d7362a88 100644 --- a/kong/api/routes/certificates.lua +++ b/kong/api/routes/certificates.lua @@ -1,72 +1,122 @@ local endpoints = require "kong.api.endpoints" +local arguments = require "kong.api.arguments" local utils = require "kong.tools.utils" -local Set = require "pl.Set" +local ngx = ngx local kong = kong +local type = type +local find = string.find +local lower = string.lower local unescape_uri = ngx.unescape_uri -local function get_cert_id_from_sni(self, db, helpers) +local function prepare_params(self) local id = unescape_uri(self.params.certificates) - if utils.is_valid_uuid(id) then - return + local method = self.req.method + local name + if not utils.is_valid_uuid(id) then + name = id + + local sni, _, err_t = kong.db.snis:select_by_name(name) + if err_t then + return endpoints.handle_error(err_t) + end + + if sni then + id = sni.certificate.id + + else + if method ~= "PUT" then + return kong.response.exit(404, { message = "SNI not found" }) + end + + id = utils.uuid() + end end - local sni, _, err_t = db.snis:select_by_name(id) - if err_t then - return endpoints.handle_error(err_t) + self.params.certificates = id + self.params.name = name +end + + +local function prepare_args(self) + local name_field + do + local content_type = ngx.var.content_type + if content_type then + content_type = lower(content_type) + if find(content_type, "application/x-www-form-urlencoded", 1, true) == 1 or + find(content_type, "multipart/form-data", 1, true) == 1 then + name_field = kong.db.snis.schema.fields.name + end + end end - if sni then - self.params.certificates = sni.certificate.id - return + local method = self.req.method + local snis = self.args.post.snis + local name = self.params.name + + if type(snis) == "table" then + local count = #snis + + if name and method == "PUT" then + count = count + 1 + snis[count] = name + end + + if name_field then + for i=1, count do + snis[i] = arguments.infer_value(snis[i], name_field) + end + end + + elseif type(snis) == "string" then + if name_field then + snis = arguments.infer_value(snis, name_field) + end + + if name and method == "PUT" and name ~= snis then + snis = { snis, name } + else + snis = { snis } + end end - if self.req.method == "PUT" then - return + if not snis and method == "PUT" then + snis = ngx.null end - return kong.response.exit(404, { message = "SNI not found" }) + self.params.name = nil + self.args.post.snis = snis end return { ["/certificates/:certificates"] = { - before = get_cert_id_from_sni, + before = prepare_params, -- override to include the snis list when getting an individual certificate GET = endpoints.get_entity_endpoint(kong.db.certificates.schema, nil, nil, "select_with_name_list"), - -- override to create a new SNI in the PUT /certificates/foo.com (create) case - PUT = function(self, db, helpers) - local cert, err_t, _ - local id = unescape_uri(self.params.certificates) - - -- cert was found via id or sni inside `before` section - if utils.is_valid_uuid(id) then - cert, _, err_t = endpoints.upsert_entity(self, db, db.certificates.schema) - - else -- create a new cert. Add extra sni if provided on url - self.args.post.snis = Set.values(Set(self.args.post.snis or {}) + id) - cert, _, err_t = endpoints.insert_entity(self, db, db.certificates.schema) - end - - if err_t then - return endpoints.handle_error(err_t) - end - - if not cert then - return kong.response.exit(404, { message = "Not found" }) - end - - return kong.response.exit(200, cert) + PUT = function(self, _, _, parent) + prepare_args(self) + return parent() end, + + PATCH = function(self, _, _, parent) + prepare_args(self) + return parent() + end }, ["/certificates/:certificates/snis"] = { - before = get_cert_id_from_sni, + before = prepare_params, + }, + + ["/certificates/:certificates/snis/:snis"] = { + before = prepare_params, }, } diff --git a/kong/db/dao/certificates.lua b/kong/db/dao/certificates.lua index 7fc0b202a05e..80d23ae6ae4a 100644 --- a/kong/db/dao/certificates.lua +++ b/kong/db/dao/certificates.lua @@ -99,14 +99,11 @@ function _Certificates:update(cert_pk, cert, options) end end - -- update certificate if necessary - if cert.key or cert.cert then cert.snis = nil cert, err, err_t = self.super.update(self, cert_pk, cert, options) if err then return nil, err, err_t end - end if name_list then cert.snis = name_list diff --git a/spec/02-integration/04-admin_api/06-certificates_routes_spec.lua b/spec/02-integration/04-admin_api/06-certificates_routes_spec.lua index 58030983ee35..10ad994f43ab 100644 --- a/spec/02-integration/04-admin_api/06-certificates_routes_spec.lua +++ b/spec/02-integration/04-admin_api/06-certificates_routes_spec.lua @@ -326,7 +326,7 @@ describe("Admin API: #" .. strategy, function() assert.same(json, in_db) end) - it("updates if found", function() + it("upserts if found", function() local res = client:put("/certificates/" .. certificate.id, { body = { cert = ssl_fixtures.cert_alt, key = ssl_fixtures.key_alt }, headers = { ["Content-Type"] = "application/json" }, @@ -336,7 +336,7 @@ describe("Admin API: #" .. strategy, function() local json = cjson.decode(body) assert.same(ssl_fixtures.cert_alt, json.cert) assert.same(ssl_fixtures.key_alt, json.key) - assert.same({"bar.com", "foo.com"}, json.snis) + assert.same({}, json.snis) json.snis = nil @@ -440,6 +440,20 @@ describe("Admin API: #" .. strategy, function() end end) + it_content_types("update by id returns full certificate", function(content_type) + return function() + local res = client:patch("/certificates/" .. cert_foo.id, { + body = {}, + headers = { ["Content-Type"] = content_type } + }) + + local body = assert.res_status(200, res) + local json = cjson.decode(body) + + assert.same(cert_foo, json) + end + end) + it_content_types("updates a certificate by sni", function(content_type) return function() local body @@ -467,6 +481,20 @@ describe("Admin API: #" .. strategy, function() end end) + it_content_types("update by sni returns full certificate", function(content_type) + return function() + local res = client:patch("/certificates/foo.com", { + body = {}, + headers = { ["Content-Type"] = content_type } + }) + + local body = assert.res_status(200, res) + local json = cjson.decode(body) + + assert.same(cert_foo, json) + end + end) + it("returns 404 for a random non-existing id", function() local res = client:patch("/certificates/" .. utils.uuid(), { body = { From 7d11d3657635dd15aca04a15c9a9fbf913f3c7b2 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Thu, 12 Dec 2019 13:23:17 +0200 Subject: [PATCH 40/41] fix(acl) get acl by group fails when more consumers share the same group (#5322) ### Summary @albertored reported issue here #5280. This PR removes `endpoint_key` from `acls` dao, as it is not `unique`, and using non-unique endpoint keys leads to strange results. And of course it fixes the issue reported: ``` http POST :8001/consumers username=alice http POST :8001/consumers username=bob http POST :8001/consumers/alice/acls group=foo http POST :8001/consumers/bob/acls group=foo http GET :8001/consumers/alice/acls/foo http GET :8001/consumers/bob/acls/foo ``` All returning `200` as expected. ### Issues Resolved Fix #5280 --- kong-1.4.2-0.rockspec | 1 + kong/plugins/acl/api.lua | 60 ++++++++ kong/plugins/acl/daos.lua | 1 - spec/03-plugins/18-acl/01-api_spec.lua | 192 +++++++++++++++++++------ 4 files changed, 206 insertions(+), 48 deletions(-) create mode 100644 kong/plugins/acl/api.lua diff --git a/kong-1.4.2-0.rockspec b/kong-1.4.2-0.rockspec index d29c8c3c3ade..fd71e28142be 100644 --- a/kong-1.4.2-0.rockspec +++ b/kong-1.4.2-0.rockspec @@ -297,6 +297,7 @@ build = { ["kong.plugins.acl.daos"] = "kong/plugins/acl/daos.lua", ["kong.plugins.acl.groups"] = "kong/plugins/acl/groups.lua", ["kong.plugins.acl.acls"] = "kong/plugins/acl/acls.lua", + ["kong.plugins.acl.api"] = "kong/plugins/acl/api.lua", ["kong.plugins.correlation-id.handler"] = "kong/plugins/correlation-id/handler.lua", ["kong.plugins.correlation-id.schema"] = "kong/plugins/correlation-id/schema.lua", diff --git a/kong/plugins/acl/api.lua b/kong/plugins/acl/api.lua new file mode 100644 index 000000000000..c04b601fe77e --- /dev/null +++ b/kong/plugins/acl/api.lua @@ -0,0 +1,60 @@ +local endpoints = require "kong.api.endpoints" +local utils = require "kong.tools.utils" + + +local ngx = ngx +local kong = kong +local escape_uri = ngx.escape_uri +local unescape_uri = ngx.unescape_uri + + +return { + ["/consumers/:consumers/acls/:acls"] = { + schema = kong.db.acls.schema, + before = function(self, db, helpers) + local group = unescape_uri(self.params.acls) + if not utils.is_valid_uuid(group) then + local consumer_id = unescape_uri(self.params.consumers) + + if not utils.is_valid_uuid(consumer_id) then + local consumer, _, err_t = endpoints.select_entity(self, db, db.consumers.schema) + if err_t then + return endpoints.handle_error(err_t) + end + + if not consumer then + return kong.response.exit(404, { message = "Not found" }) + end + + consumer_id = consumer.id + end + + local cache_key = db.acls:cache_key(consumer_id, group) + local acl, _, err_t = db.acls:select_by_cache_key(cache_key) + if err_t then + return endpoints.handle_error(err_t) + end + + if acl then + self.params.acls = escape_uri(acl.id) + else + if self.req.method ~= "PUT" then + return kong.response.exit(404, { message = "Not found" }) + end + + self.params.acls = utils.uuid() + end + + self.params.group = group + end + end, + + PUT = function(self, db, helpers, parent) + if not self.args.post.group and self.params.group then + self.args.post.group = self.params.group + end + + return parent() + end + } +} diff --git a/kong/plugins/acl/daos.lua b/kong/plugins/acl/daos.lua index 9f597bf2d213..1565a9c6d9b0 100644 --- a/kong/plugins/acl/daos.lua +++ b/kong/plugins/acl/daos.lua @@ -5,7 +5,6 @@ return { dao = "kong.plugins.acl.acls", name = "acls", primary_key = { "id" }, - endpoint_key = "group", cache_key = { "consumer", "group" }, fields = { { id = typedefs.uuid }, diff --git a/spec/03-plugins/18-acl/01-api_spec.lua b/spec/03-plugins/18-acl/01-api_spec.lua index 6fd23581d5f2..11ae3d0ff5e8 100644 --- a/spec/03-plugins/18-acl/01-api_spec.lua +++ b/spec/03-plugins/18-acl/01-api_spec.lua @@ -1,3 +1,4 @@ +local utils = require "kong.tools.utils" local cjson = require "cjson" local helpers = require "spec.helpers" @@ -46,21 +47,21 @@ for _, strategy in helpers.each_strategy() do describe("POST", function() it("creates an ACL association", function() - local res = assert(admin_client:post("/consumers/bob/acls", { + local res = admin_client:post("/consumers/bob/acls", { body = { group = "admin" }, headers = { ["Content-Type"] = "application/json" } - })) + }) local body = assert.res_status(201, res) local json = cjson.decode(body) assert.equal(consumer.id, json.consumer.id) assert.equal("admin", json.group) end) it("creates an ACL association with tags", function() - local res = assert(admin_client:send { + local res = admin_client:send { method = "POST", path = "/consumers/bob/acls/", body = { @@ -70,7 +71,7 @@ for _, strategy in helpers.each_strategy() do headers = { ["Content-Type"] = "application/json" } - }) + } local body = assert.res_status(201, res) local json = cjson.decode(body) assert.equal(consumer.id, json.consumer.id) @@ -79,12 +80,12 @@ for _, strategy in helpers.each_strategy() do end) describe("errors", function() it("returns bad request", function() - local res = assert(admin_client:post("/consumers/bob/acls", { + local res = admin_client:post("/consumers/bob/acls", { body = {}, headers = { ["Content-Type"] = "application/json" } - })) + }) local body = assert.res_status(400, res) local json = cjson.decode(body) assert.same({ group = "required field missing" }, json.fields) @@ -99,7 +100,7 @@ for _, strategy in helpers.each_strategy() do it("retrieves the first page", function() bp.acls:insert_n(3, { consumer = { id = consumer.id } }) - local res = assert(admin_client:get("/consumers/bob/acls")) + local res = admin_client:get("/consumers/bob/acls") local body = assert.res_status(200, res) local json = cjson.decode(body) assert.is_table(json.data) @@ -123,13 +124,13 @@ for _, strategy in helpers.each_strategy() do end) describe("GET", function() it("retrieves by id", function() - local res = assert(admin_client:get("/consumers/bob/acls/" .. acl.id)) + local res = admin_client:get("/consumers/bob/acls/" .. acl.id) local body = assert.res_status(200, res) local json = cjson.decode(body) assert.equal(acl.id, json.id) end) it("retrieves by group", function() - local res = assert(admin_client:get("/consumers/bob/acls/" .. acl.group)) + local res = admin_client:get("/consumers/bob/acls/" .. acl.group) local body = assert.res_status(200, res) local json = cjson.decode(body) assert.equal(acl.id, json.id) @@ -139,29 +140,133 @@ for _, strategy in helpers.each_strategy() do username = "alice" } - local res = assert(admin_client:get("/consumers/bob/acls/" .. acl.id)) + local res = admin_client:get("/consumers/bob/acls/" .. acl.id) assert.res_status(200, res) - res = assert(admin_client:get("/consumers/alice/acls/" .. acl.id)) + res = admin_client:get("/consumers/alice/acls/" .. acl.id) assert.res_status(404, res) end) it("retrieves ACL by group only if the ACL belongs to the specified consumer", function() - local res = assert(admin_client:get("/consumers/bob/acls/" .. acl.group)) + local res = admin_client:get("/consumers/bob/acls/" .. acl.group) assert.res_status(200, res) - res = assert(admin_client:get("/consumers/alice/acls/" .. acl.group)) + res = admin_client:get("/consumers/alice/acls/" .. acl.group) assert.res_status(404, res) end) + it("retrieves right ACL by group when multiple consumers share the same group name created with POST", function() + local res = admin_client:post("/consumers", { + body = { + username = "anna", + }, + headers = { + ["Content-Type"] = "application/json", + }, + }) + assert.res_status(201, res) + assert.response(res).has.jsonbody() + + local res = admin_client:post("/consumers", { + body = { + username = "jack", + }, + headers = { + ["Content-Type"] = "application/json", + }, + }) + assert.res_status(201, res) + assert.response(res).has.jsonbody() + + local res = admin_client:post("/consumers/anna/acls", { + body = { + group = "foo" + }, + headers = { + ["Content-Type"] = "application/json" + }, + }) + local body = assert.res_status(201, res) + local ag = cjson.decode(body) + + local res = admin_client:post("/consumers/jack/acls", { + body = { + group = "foo" + }, + headers = { + ["Content-Type"] = "application/json" + }, + }) + local body = assert.res_status(201, res) + local jg = cjson.decode(body) + + local res = admin_client:get("/consumers/anna/acls/foo") + local body = assert.res_status(200, res) + local ag2 = cjson.decode(body) + + local res = admin_client:get("/consumers/jack/acls/foo") + local body = assert.res_status(200, res) + local jg2 = cjson.decode(body) + + assert.same(ag, ag2) + assert.same(jg, jg2) + assert.not_same(jg, ag) + assert.not_same(jg2, ag2) + assert.not_same(jg, ag2) + assert.not_same(jg2, ag) + + local res = admin_client:delete("/consumers/anna") + local _ = assert.res_status(204, res) + + local res = admin_client:delete("/consumers/jack") + local _ = assert.res_status(204, res) + end) + it("retrieves right ACL by group when multiple consumers share the same group name created with PUT", function() + local res = admin_client:put("/consumers/anna") + assert.res_status(200, res) + assert.response(res).has.jsonbody() + + local res = admin_client:put("/consumers/jack") + assert.res_status(200, res) + assert.response(res).has.jsonbody() + + local res = admin_client:put("/consumers/anna/acls/foo") + local body = assert.res_status(200, res) + local ag = cjson.decode(body) + + local res = admin_client:put("/consumers/jack/acls/foo") + local body = assert.res_status(200, res) + local jg = cjson.decode(body) + + local res = admin_client:get("/consumers/anna/acls/foo") + local body = assert.res_status(200, res) + local ag2 = cjson.decode(body) + + local res = admin_client:get("/consumers/jack/acls/foo") + local body = assert.res_status(200, res) + local jg2 = cjson.decode(body) + + assert.same(ag, ag2) + assert.same(jg, jg2) + assert.not_same(jg, ag) + assert.not_same(jg2, ag2) + assert.not_same(jg, ag2) + assert.not_same(jg2, ag) + + local res = admin_client:delete("/consumers/anna") + assert.res_status(204, res) + + local res = admin_client:delete("/consumers/jack") + assert.res_status(204, res) + end) end) describe("PUT", function() - it("updates an ACL's groupname", function() - local res = assert(admin_client:put("/consumers/bob/acls/pro", { + it("upserts an ACL's groupname", function() + local res = admin_client:put("/consumers/bob/acls/pro", { body = {}, headers = { ["Content-Type"] = "application/json" } - })) + }) local body = assert.res_status(200, res) local json = cjson.decode(body) assert.equal(consumer.id, json.consumer.id) @@ -169,14 +274,14 @@ for _, strategy in helpers.each_strategy() do end) describe("errors", function() it("returns bad request", function() - local res = assert(admin_client:put("/consumers/bob/acls/f7852533-9160-4f5a-ae12-1ab99219ea95", { + local res = admin_client:put("/consumers/bob/acls/f7852533-9160-4f5a-ae12-1ab99219ea95", { body = { group = 123, }, headers = { ["Content-Type"] = "application/json" } - })) + }) local body = assert.res_status(400, res) local json = cjson.decode(body) assert.same({ group = "expected a string" }, json.fields) @@ -188,14 +293,14 @@ for _, strategy in helpers.each_strategy() do it("updates an ACL group by id", function() local previous_group = acl.group - local res = assert(admin_client:patch("/consumers/bob/acls/" .. acl.id, { + local res = admin_client:patch("/consumers/bob/acls/" .. acl.id, { body = { group = "updatedGroup" }, headers = { ["Content-Type"] = "application/json" } - })) + }) local body = assert.res_status(200, res) local json = cjson.decode(body) assert.not_equal(previous_group, json.group) @@ -203,28 +308,28 @@ for _, strategy in helpers.each_strategy() do it("updates an ACL group by group", function() local previous_group = acl.group - local res = assert(admin_client:patch("/consumers/bob/acls/" .. acl.group, { + local res = admin_client:patch("/consumers/bob/acls/" .. acl.group, { body = { group = "updatedGroup2" }, headers = { ["Content-Type"] = "application/json" } - })) + }) local body = assert.res_status(200, res) local json = cjson.decode(body) assert.not_equal(previous_group, json.group) end) describe("errors", function() it("handles invalid input", function() - local res = assert(admin_client:patch("/consumers/bob/acls/" .. acl.id, { + local res = admin_client:patch("/consumers/bob/acls/" .. acl.id, { body = { group = 123, }, headers = { ["Content-Type"] = "application/json" } - })) + }) local body = assert.res_status(400, res) local json = cjson.decode(body) assert.same({ group = "expected a string" }, json.fields) @@ -234,20 +339,20 @@ for _, strategy in helpers.each_strategy() do describe("DELETE", function() it("deletes an ACL group by id", function() - local res = assert(admin_client:delete("/consumers/bob/acls/" .. acl.id)) + local res = admin_client:delete("/consumers/bob/acls/" .. acl.id) assert.res_status(204, res) end) it("deletes an ACL group by group", function() - local res = assert(admin_client:delete("/consumers/bob/acls/" .. acl2.group)) + local res = admin_client:delete("/consumers/bob/acls/" .. acl2.group) assert.res_status(204, res) end) describe("errors", function() it("returns 404 on missing group", function() - local res = assert(admin_client:delete("/consumers/bob/acls/blah")) + local res = admin_client:delete("/consumers/bob/acls/blah") assert.res_status(404, res) end) it("returns 404 if not found", function() - local res = assert(admin_client:delete("/consumers/bob/acls/00000000-0000-0000-0000-000000000000")) + local res = admin_client:delete("/consumers/bob/acls/00000000-0000-0000-0000-000000000000") assert.res_status(404, res) end) end) @@ -281,32 +386,32 @@ for _, strategy in helpers.each_strategy() do end) it("retrieves all the acls with trailing slash", function() - local res = assert(admin_client:get("/acls/")) + local res = admin_client:get("/acls/") local body = assert.res_status(200, res) local json = cjson.decode(body) assert.is_table(json.data) assert.equal(6, #json.data) end) it("retrieves all the acls without trailing slash", function() - local res = assert(admin_client:get("/acls")) + local res = admin_client:get("/acls") local body = assert.res_status(200, res) local json = cjson.decode(body) assert.is_table(json.data) assert.equal(6, #json.data) end) it("paginates through the acls", function() - local res = assert(admin_client:get("/acls?size=3")) + local res = admin_client:get("/acls?size=3") local body = assert.res_status(200, res) local json_1 = cjson.decode(body) assert.is_table(json_1.data) assert.equal(3, #json_1.data) - res = assert(admin_client:get("/acls", { + res = admin_client:get("/acls", { query = { size = 3, offset = json_1.offset, } - })) + }) body = assert.res_status(200, res) local json_2 = cjson.decode(body) assert.is_table(json_2.data) @@ -326,9 +431,7 @@ for _, strategy in helpers.each_strategy() do end) it("does not create acl when missing consumer", function() - local res = assert(admin_client:send { - method = "POST", - path = "/acls", + local res = admin_client:post("/acls", { body = { group = "test-group", }, @@ -342,9 +445,7 @@ for _, strategy in helpers.each_strategy() do end) it("creates acl", function() - local res = assert(admin_client:send { - method = "POST", - path = "/acls", + local res = admin_client:post("/acls", { body = { group = "test-group", consumer = { @@ -369,10 +470,8 @@ for _, strategy in helpers.each_strategy() do end) it("does not create acl when missing consumer", function() - local res = assert(admin_client:send { - method = "PUT", - path = "/acls/test-group", - body = { }, + local res = admin_client:put("/acls/" .. utils.uuid(), { + body = { group = "test-group" }, headers = { ["Content-Type"] = "application/json" } @@ -383,10 +482,9 @@ for _, strategy in helpers.each_strategy() do end) it("creates acl", function() - local res = assert(admin_client:send { - method = "PUT", - path = "/acls/test-group", + local res = admin_client:put("/acls/" .. utils.uuid(), { body = { + group = "test-group", consumer = { id = consumer.id } @@ -414,7 +512,7 @@ for _, strategy in helpers.each_strategy() do } end) it("retrieves a Consumer from an acl's id", function() - local res = assert(admin_client:get("/acls/" .. credential.id .. "/consumer")) + local res = admin_client:get("/acls/" .. credential.id .. "/consumer") local body = assert.res_status(200, res) local json = cjson.decode(body) assert.same(consumer, json) From f4661ec1b6d5d75bd7cb624c5436e78e688c5b36 Mon Sep 17 00:00:00 2001 From: Hisham Muhammad Date: Thu, 12 Dec 2019 16:29:40 -0300 Subject: [PATCH 41/41] feat(scripts) make-patch-release script (#5331) New script to automate most of the release process of a patch release (x.y.(z+1)). --- scripts/make-patch-release | 601 +++++++++++++++++++++++++++++++++++++ 1 file changed, 601 insertions(+) create mode 100755 scripts/make-patch-release diff --git a/scripts/make-patch-release b/scripts/make-patch-release new file mode 100755 index 000000000000..e51b3ed29f1f --- /dev/null +++ b/scripts/make-patch-release @@ -0,0 +1,601 @@ +#!/usr/bin/env bash + +red="\033[0;31m" +green="\033[0;32m" +cyan="\033[0;36m" +bold="\033[1m" +nocolor="\033[0m" + +#------------------------------------------------------------------------------- +function step() { + box=" " + color="$nocolor" + if [ "$version" != "" ] + then + if [ -e "/tmp/.step-$1-$version" ] + then + color="$green" + box="[x]" + else + color="$bold" + box="[ ]" + fi + fi + echo -e "$color $box Step $c) $2" + echo " $0 $version $1 $3" + echo -e "$nocolor" + c="$[c+1]" +} + +#------------------------------------------------------------------------------- +function usage() { + echo "Make a Kong patch release using this script:" + echo "" + echo "Usage:" + if [ "$version" = "" ] + then + echo " List executed steps for a given release" + echo " $0 $version $1 $3" + echo + fi + c=1 + step "create" "create the branch" + step "write_changelog" "prepare the changelog" + step "commit_changelog" "commit the changelog" + step "version_bump" "bump and commit the version number" + step "submit" "push and submit a release PR" + step "docs_pr" "push and submit a docs.konghq.com PR for the release" + step "merge" "merge, tag and sign the release" + step "update_docker" "update and submit a PR to Kong's docker-kong repo" + step "merge_docker" "merge, tag and sign Kong's docker-kong PR" + step "submit_docker" "submit a PR to docker-library/official-images" + step "homebrew" "bump version and submit a PR to homebrew-kong" + step "luarocks" "upload to LuaRocks" "" + step "vagrant" "bump version and submit a PR to kong-vagrant" + exit 0 +} + +#------------------------------------------------------------------------------- +function die() { + echo + echo -e "$red$bold*** $@$nocolor" + echo "See also: $0 --help" + echo + exit 1 +} + +#------------------------------------------------------------------------------- +function SUCCESS() { + echo + echo -e "$green$bold****************************************$nocolor$bold" + for line in "$@" + do + echo "$line" + done + echo -e "$green$bold****************************************$nocolor" + echo + touch /tmp/.step-$step-$version + exit 0 +} + +#------------------------------------------------------------------------------- +function CONFIRM() { + echo + echo -e "$cyan$bold----------------------------------------$nocolor$bold" + for line in "$@" + do + echo "$line" + done + echo -e "$cyan$bold----------------------------------------$nocolor" + read +} + +#------------------------------------------------------------------------------- +# Dependency checks +#------------------------------------------------------------------------------- + +hub --version &> /dev/null || die "hub is not in PATH. Get it from https://github.com/github/hub" + +if resty -v &> /dev/null +then + LUA=resty +elif lua -v &> /dev/null +then + LUA=lua +else + die "Lua interpreter is not in PATH. Install any Lua or OpenResty to run this script." +fi + +#------------------------------------------------------------------------------- +# Default help +#------------------------------------------------------------------------------- + +if [ "$1" = "-h" ] || [ "$1" = "--help" ] || ! [ "$1" ] +then + version="" + usage +fi + +#------------------------------------------------------------------------------- +# Variables +#------------------------------------------------------------------------------- + +version="$1" +step="$2" + +major=${version%%.*} +rest=${version#*.} +minor=${rest%%.*} +patch=${rest##*.} +rockspec="kong-$version-0.rockspec" +branch="release/$version" + +if ! [[ "$version" =~ ^[0-9]+.[0-9]+.[0-9]$ ]] +then + die "first argument must be a version in x.y.z format" +fi + +if [ "$step" = "" ] +then + usage +fi + +EDITOR="${EDITOR-$VISUAL}" + +#------------------------------------------------------------------------------- +function prepare_changelog() { + $LUA -e ' + local fd_in = io.open("CHANGELOG.md", "r") + local fd_out = io.open("CHANGELOG.md.new", "w") + local version = "'$version'" + + local state = "start" + for line in fd_in:lines() do + if state == "start" then + if line:match("^%- %[") then + fd_out:write("- [" .. version .. "](#" .. version:gsub("%.", "") .. ")\n") + state = "toc" + end + elseif state == "toc" then + if not line:match("^%- %[") then + state = "start_log" + end + elseif state == "start_log" then + fd_out:write("\n") + fd_out:write("## [" .. version .. "]\n") + fd_out:write("\n") + local today = os.date("*t") + fd_out:write(("> Released %04d/%02d/%02d\n"):format(today.year, today.month, today.day)) + fd_out:write("\n") + fd_out:write("<<< TODO Introduction, plus any sections below >>>\n") + fd_out:write("\n") + fd_out:write("### Fixes\n") + fd_out:write("\n") + fd_out:write("##### Core\n") + fd_out:write("\n") + fd_out:write("##### CLI\n") + fd_out:write("\n") + fd_out:write("##### Configuration\n") + fd_out:write("\n") + fd_out:write("##### Admin API\n") + fd_out:write("\n") + fd_out:write("##### PDK\n") + fd_out:write("\n") + fd_out:write("##### Plugins\n") + fd_out:write("\n") + fd_out:write("\n") + fd_out:write("[Back to TOC](#table-of-contents)\n") + fd_out:write("\n") + state = "log" + elseif state == "log" then + local prev_version = line:match("^%[(%d+%.%d+%.%d+)%]: ") + if prev_version then + fd_out:write("[" .. version .. "]: https://github.com/Kong/kong/compare/" .. prev_version .."..." .. version .. "\n") + state = "last" + end + end + + fd_out:write(line .. "\n") + end + fd_in:close() + fd_out:close() + ' + mv CHANGELOG.md.new CHANGELOG.md +} + +#------------------------------------------------------------------------------- +function bump_docs_kong_versions() { + $LUA -e ' + local fd_in = io.open("app/_data/kong_versions.yml", "r") + local fd_out = io.open("app/_data/kong_versions.yml.new", "w") + local version = "'$version'" + + local state = "start" + for line in fd_in:lines() do + if state == "start" then + if line:match("^ release: \"'$major'.'$minor'.x\"") then + state = "version" + end + fd_out:write(line .. "\n") + elseif state == "version" then + if line:match("^ version: \"") then + fd_out:write(" version: \"'$version'\"\n") + state = "wait_for_luarocks_version" + else + fd_out:write(line .. "\n") + end + elseif state == "wait_for_luarocks_version" then + if line:match("^ luarocks_version: \"") then + fd_out:write(" luarocks_version: \"'$version'-0\"\n") + state = "last" + else + fd_out:write(line .. "\n") + end + elseif state == "last" then + fd_out:write(line .. "\n") + end + end + fd_in:close() + fd_out:close() + ' + mv app/_data/kong_versions.yml.new app/_data/kong_versions.yml +} + +#------------------------------------------------------------------------------- +function make_github_release_file() { + versionlink=$(echo $version | tr -d .) + cat < release-$version.txt +$version + +**Download Kong $version and run it now:** + +- https://konghq.com/install/ +- [Docker Image](https://hub.docker.com/_/kong/) + +Links: +- [$version Changelog](https://github.com/Kong/kong/blob/master/CHANGELOG.md#$versionlink) +EOF +} + +#------------------------------------------------------------------------------- +function bump_homebrew() { + curl -L -o "kong-$version.tar.gz" "https://bintray.com/kong/kong-src/download_file?file_path=kong-$version.tar.gz" + sum=$(sha256sum "kong-$version.tar.gz" | awk '{print $1}') + sed -i 's/kong-[0-9.]*.tar.gz/kong-'$version'.tar.gz/' Formula/kong.rb + sed -i 's/sha256 ".*"/sha256 "'$sum'"/' Formula/kong.rb +} + +#------------------------------------------------------------------------------- +function bump_vagrant() { + sed -i 's/version = "[0-9][0-9]*\.[0-9][0-9]*\.[0-9][0-9]*"/version = "'$version'"/' Vagrantfile + sed -i 's/`[0-9][0-9]*\.[0-9][0-9]*\.[0-9][0-9]*`/`'$version'`/' README.md +} + +#------------------------------------------------------------------------------- +function ensure_recent_luarocks() { + if ! ( luarocks upload --help | grep -q temp-key ) + then + if [ `uname -s` = "Linux" ] + then + set -e + source .requirements + lv=3.2.1 + pushd /tmp + rm -rf luarocks-$lv + mkdir -p luarocks-$lv + cd luarocks-$lv + curl -L -o "luarocks-$lv-linux-x86_64.zip" https://luarocks.github.io/luarocks/releases/luarocks-$lv-linux-x86_64.zip + unzip luarocks-$lv-linux-x86_64.zip + export PATH=/tmp/luarocks-$lv/luarocks-$lv-linux-x86_64:$PATH + popd + else + die "Your LuaRocks version is too old. Please upgrade LuaRocks." + fi + fi +} + +case "$step" in + #--------------------------------------------------------------------------- + create) + if [ $(git status --untracked-files=no --porcelain | wc -l) != "0" ] + then + die "Local tree is not clean, please commit or stash before running this." + fi + + set -e + git checkout master + git pull + git checkout -B "$branch" + + SUCCESS "Release branch was created locally." \ + "You are ready to run the next step:" \ + " $0 $version write_changelog" + ;; + #--------------------------------------------------------------------------- + write_changelog) + if ! grep -q "\[$version\]" CHANGELOG.md + then + prepare_changelog + fi + + CONFIRM "Press Enter to open your text editor ($EDITOR) to edit CHANGELOG.md" \ + "or Ctrl-C to cancel." + + $EDITOR CHANGELOG.md + + SUCCESS "If you need to further edit the changelog," \ + "you can run this step again." + "If it is ready, you can proceed to the next step" \ + "which will commit it:" \ + " $0 $version commit_changelog" + ;; + #--------------------------------------------------------------------------- + commit_changelog) + if ! git status CHANGELOG.md | grep -q "modified:" + then + die "No changes in CHANGELOG.md to commit. Did you write the changelog?" + fi + + git diff + + CONFIRM "If everything looks all right, press Enter to commit" \ + "or Ctrl-C to cancel." + + set -e + git add CHANGELOG.md + git commit -m "docs(changelog) add $version changes" + git log -n 1 + + SUCCESS "The changelog is now committed locally." \ + "You are ready to run the next step:" \ + " $0 $version version_bump" + ;; + #--------------------------------------------------------------------------- + version_bump) + if ! grep -q "patch = $patch" kong/meta.lua + then + sed -i 's/patch = [0-9]*/patch = '$patch'/' kong/meta.lua + git add kong/meta.lua + fi + if ! [ -f "$rockspec" ] + then + git mv kong-*-0.rockspec "$rockspec" + sed -i 's/^version = ".*"/version = "'$version'-0"/' "$rockspec" + sed -i 's/^ tag = ".*"/ tag = "'$version'"/' "$rockspec" + fi + + git status + git diff + + CONFIRM "If everything looks all right, press Enter to make the release commit" \ + "or Ctrl-C to cancel." + + git add $rockspec + + git commit -m "release: $version" + git log -n 1 + + SUCCESS "Version bump for the release is now committed locally." \ + "You are ready to run the next step:" \ + " $0 $version submit" + ;; + #--------------------------------------------------------------------------- + submit) + if ! git log -n 1 | grep -q "release: $version" + then + die "Release commit is not at the top of the current branch. Did you commit the version bump?" + fi + + git log + + CONFIRM "Press Enter to push the branch and open the release PR" \ + "or Ctrl-C to cancel." + + set -e + git push --set-upstream origin "$branch" + hub pull-request -b master -h "$branch" -m "Release: $version" -l "pr/please review,pr/do not merge" + + SUCCESS "Now get the above PR reviewed and approved." \ + "Once it is approved, you can continue to the 'merge' step." \ + "In the mean time, you can run the 'docs_pr' step:" \ + " $0 $version docs_pr" + ;; + #--------------------------------------------------------------------------- + docs_pr) + if [ -d ../docs.konghq.com ] + then + cd ../docs.konghq.com + else + cd .. + git clone https://github.com/kong/docs.konghq.com + cd docs.konghq.com + fi + git checkout master + git pull + git checkout -B "$branch" + bump_docs_kong_versions + + git diff + + CONFIRM "If everything looks all right, press Enter to commit and send a PR to https://github.com/kong/docs.konghq.com" \ + "or Ctrl-C to cancel." + + set -e + git add app/_data/kong_versions.yml + git commit -m "chore(*) update release metadata for $version" + + git push --set-upstream origin "$branch" + hub pull-request -b master -h "$branch" -m "Release: $version" -l "pr/please review,pr/do not merge" + + SUCCESS "Make sure you give Team Docs a heads-up" \ + "once the release is pushed to the main repo." \ + "When the main release PR is approved, you can proceed to:" \ + " $0 $version merge" + ;; + #--------------------------------------------------------------------------- + merge) + CONFIRM "Press Enter to merge the PR into master and push the tag and Github release" \ + "or Ctrl-C to cancel." + + set -e + git checkout "$branch" + git pull + git checkout master + git pull + git merge "$branch" + git push + git tag -s "$version" -m "$version" + git push origin "$version" + + make_github_release_file + + hub release create -F release-$version.txt "$version" + rm -f release-$version.txt + + SUCCESS "Make sure the packages are built and available on Bintray" \ + "before continuing to the following steps." \ + "Once they are built, you may run the following steps in parallel:" \ + "* 'homebrew'" \ + "* 'luarocks'" \ + "* 'vagrant'" \ + "* 'update_docker', then 'merge_docker', then 'submit_docker'" + ;; + #--------------------------------------------------------------------------- + update_docker) + if [ -d ../docker-kong ] + then + cd ../docker-kong + else + cd .. + git clone https://github.com/kong/docker-kong + cd docker-kong + fi + + set -e + ./update.sh "$version" + + SUCCESS "Make sure you get the PR above approved and merged" \ + "before continuing to the step 'merge_docker'." + ;; + #--------------------------------------------------------------------------- + merge_docker) + if [ -d ../docker-kong ] + then + cd ../docker-kong + else + cd .. + git clone https://github.com/kong/docker-kong + cd docker-kong + fi + + set -e + git checkout "$branch" + git pull + git checkout master + git pull + git merge "$branch" + git push + git tag -s "$version" -m "$version" + git push origin "$version" + + SUCCESS "Now you can run the next step:" \ + " $0 $version submit_docker" + ;; + #--------------------------------------------------------------------------- + submit_docker) + if [ -d ../docker-kong ] + then + cd ../docker-kong + else + cd .. + git clone https://github.com/kong/docker-kong + cd docker-kong + fi + + set -e + ./submit.sh -p "$version" + + SUCCESS "Once this is approved in the main repo," \ + "run the procedure for generating the RedHat container." + ;; + #--------------------------------------------------------------------------- + homebrew) + if [ -d ../homebrew-kong ] + then + cd ../homebrew-kong + else + cd .. + git clone https://github.com/kong/homebrew-kong + cd homebrew-kong + fi + + git checkout master + git pull + git checkout -B "$branch" + bump_homebrew + + git diff + + CONFIRM "If everything looks all right, press Enter to commit and send a PR to https://github.com/kong/homebrew-kong" \ + "or Ctrl-C to cancel." + + set -e + git add Formula/kong.rb + git commit -m "chore(kong) bump kong to $version" + + git push --set-upstream origin "$branch" + hub pull-request -b master -h "$branch" -m "Release: $version" + + SUCCESS "Make sure you get the PR above approved and merged." + ;; + #--------------------------------------------------------------------------- + vagrant) + if [ -d ../kong-vagrant ] + then + cd ../kong-vagrant + else + cd .. + git clone https://github.com/kong/kong-vagrant + cd kong-vagrant + fi + + git checkout master + git pull + git checkout -B "$branch" + bump_vagrant + + git diff + + CONFIRM "If everything looks all right, press Enter to commit and send a PR to https://github.com/kong/homebrew-kong" \ + "or Ctrl-C to cancel." + + set -e + git add README.md Vagrantfile + git commit -m "chore(*) bump Kong to $version" + + git push --set-upstream origin "$branch" + hub pull-request -b master -h "$branch" -m "Release: $version" + + SUCCESS "Make sure you get the PR above approved and merged." + ;; + #--------------------------------------------------------------------------- + luarocks) + if ! [ "$3" ] + then + die "Kong API key for LuaRocks is required as an argument." + fi + + set -e + ensure_recent_luarocks + + luarocks --version + + luarocks upload --temp-key="$3" "$rockspec" --force + + SUCCESS "The LuaRocks entry is now up!" + ;; + #--------------------------------------------------------------------------- + *) + die "Unknown step!" + ;; +esac