mirror of
https://github.com/valitydev/token-keeper.git
synced 2024-11-06 02:15:21 +00:00
ED-25: MVP Erlang implementation (#4)
This commit is contained in:
parent
f4fbf800b7
commit
feb6e92fb7
23
.gitignore
vendored
Normal file
23
.gitignore
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
.rebar3
|
||||
_*
|
||||
.eunit
|
||||
*.o
|
||||
*.beam
|
||||
*.plt
|
||||
*.swp
|
||||
*.swo
|
||||
.erlang.cookie
|
||||
ebin
|
||||
log
|
||||
erl_crash.dump
|
||||
.rebar
|
||||
logs
|
||||
_build
|
||||
.idea
|
||||
*.iml
|
||||
rebar3.crashdump
|
||||
*~
|
||||
Dockerfile
|
||||
docker-compose.yml
|
||||
*.sublime-workspace
|
||||
.DS_Store
|
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
[submodule "build_utils"]
|
||||
path = build_utils
|
||||
url = https://github.com/rbkmoney/build_utils.git
|
24
Dockerfile.sh
Executable file
24
Dockerfile.sh
Executable file
@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
cat <<EOF
|
||||
FROM $BASE_IMAGE
|
||||
LABEL maintainer="Alexey Shaporin <a.shaporin@rbk.money>"
|
||||
RUN mkdir -p /var/log/token_keeper
|
||||
COPY ./_build/prod/rel/token_keeper /opt/token_keeper
|
||||
WORKDIR /opt/token_keeper
|
||||
CMD /opt/token_keeper/bin/token_keeper foreground
|
||||
EXPOSE 8022
|
||||
LABEL com.rbkmoney.$SERVICE_NAME.parent=$BASE_IMAGE_NAME \
|
||||
com.rbkmoney.$SERVICE_NAME.parent_tag=$BASE_IMAGE_TAG \
|
||||
com.rbkmoney.$SERVICE_NAME.build_img=build \
|
||||
com.rbkmoney.$SERVICE_NAME.build_img_tag=$BUILD_IMAGE_TAG \
|
||||
com.rbkmoney.$SERVICE_NAME.commit_id=$(git rev-parse HEAD) \
|
||||
com.rbkmoney.$SERVICE_NAME.commit_number=$(git rev-list --count HEAD) \
|
||||
com.rbkmoney.$SERVICE_NAME.branch=$( \
|
||||
if [ "HEAD" != $(git rev-parse --abbrev-ref HEAD) ]; then \
|
||||
echo $(git rev-parse --abbrev-ref HEAD); \
|
||||
elif [ -n "$BRANCH_NAME" ]; then \
|
||||
echo $BRANCH_NAME; \
|
||||
else \
|
||||
echo $(git name-rev --name-only HEAD); \
|
||||
fi)
|
||||
EOF
|
23
Jenkinsfile
vendored
Normal file
23
Jenkinsfile
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
#!groovy
|
||||
// -*- mode: groovy -*-
|
||||
|
||||
def finalHook = {
|
||||
runStage('store CT logs') {
|
||||
archive '_build/test/logs/'
|
||||
}
|
||||
}
|
||||
|
||||
build('token_keeper', 'docker-host', finalHook) {
|
||||
checkoutRepo()
|
||||
loadBuildUtils()
|
||||
|
||||
def pipeErlangService
|
||||
runStage('load pipeline') {
|
||||
env.JENKINS_LIB = "build_utils/jenkins_lib"
|
||||
env.SH_TOOLS = "build_utils/sh"
|
||||
pipeErlangService = load("${env.JENKINS_LIB}/pipeErlangService.groovy")
|
||||
}
|
||||
|
||||
pipeErlangService.runPipe(false, true)
|
||||
|
||||
}
|
201
LICENSE
Normal file
201
LICENSE
Normal file
@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
75
Makefile
Normal file
75
Makefile
Normal file
@ -0,0 +1,75 @@
|
||||
REBAR := $(shell which rebar3 2>/dev/null || which ./rebar3)
|
||||
SUBMODULES = build_utils
|
||||
SUBTARGETS = $(patsubst %,%/.git,$(SUBMODULES))
|
||||
|
||||
UTILS_PATH := build_utils
|
||||
TEMPLATES_PATH := .
|
||||
|
||||
# Name of the service
|
||||
SERVICE_NAME := token-keeper
|
||||
# Service image default tag
|
||||
SERVICE_IMAGE_TAG ?= $(shell git rev-parse HEAD)
|
||||
# The tag for service image to be pushed with
|
||||
SERVICE_IMAGE_PUSH_TAG ?= $(SERVICE_IMAGE_TAG)
|
||||
|
||||
# Base image for the service
|
||||
BASE_IMAGE_NAME := service-erlang
|
||||
BASE_IMAGE_TAG := 51bd5f25d00cbf75616e2d672601dfe7351dcaa4
|
||||
|
||||
BUILD_IMAGE_NAME := build-erlang
|
||||
BUILD_IMAGE_TAG := 61a001bbb48128895735a3ac35b0858484fdb2eb
|
||||
CALL_ANYWHERE := \
|
||||
submodules \
|
||||
all compile xref lint dialyze cover release clean distclean \
|
||||
check_format format
|
||||
|
||||
CALL_W_CONTAINER := $(CALL_ANYWHERE) test
|
||||
|
||||
.PHONY: $(CALL_W_CONTAINER) all
|
||||
|
||||
all: compile
|
||||
|
||||
-include $(UTILS_PATH)/make_lib/utils_container.mk
|
||||
-include $(UTILS_PATH)/make_lib/utils_image.mk
|
||||
|
||||
$(SUBTARGETS): %/.git: %
|
||||
git submodule update --init $<
|
||||
touch $@
|
||||
|
||||
submodules: $(SUBTARGETS)
|
||||
|
||||
compile: submodules
|
||||
$(REBAR) compile
|
||||
|
||||
xref:
|
||||
$(REBAR) xref
|
||||
|
||||
lint:
|
||||
$(REBAR) lint
|
||||
|
||||
check_format:
|
||||
$(REBAR) fmt -c
|
||||
|
||||
format:
|
||||
$(REBAR) fmt -w
|
||||
|
||||
dialyze:
|
||||
$(REBAR) dialyzer
|
||||
|
||||
release: submodules
|
||||
$(REBAR) as prod release
|
||||
|
||||
clean:
|
||||
$(REBAR) cover -r
|
||||
$(REBAR) clean
|
||||
|
||||
distclean:
|
||||
$(REBAR) clean
|
||||
rm -rf _build
|
||||
|
||||
cover:
|
||||
$(REBAR) cover
|
||||
|
||||
# CALL_W_CONTAINER
|
||||
test: submodules
|
||||
$(REBAR) do eunit, ct
|
1
build_utils
Submodule
1
build_utils
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit e1318727d4d0c3e48f5122bf3197158b6695f50e
|
94
config/sys.config
Normal file
94
config/sys.config
Normal file
@ -0,0 +1,94 @@
|
||||
[
|
||||
|
||||
{token_keeper, [
|
||||
|
||||
{ip, "::"},
|
||||
{port, 8022},
|
||||
{services, #{
|
||||
token_keeper => #{
|
||||
path => <<"/v1/token-keeper">>
|
||||
}
|
||||
}},
|
||||
{protocol_opts, #{
|
||||
% How much to wait for another request before closing a keepalive connection? (ms)
|
||||
request_timeout => 5000
|
||||
}},
|
||||
{transport_opts, #{
|
||||
% Maximum number of simultaneous connections. (default = 1024)
|
||||
max_connections => 8000,
|
||||
% Size of the acceptor pool. (default = 10)
|
||||
num_acceptors => 100
|
||||
}},
|
||||
% How much to wait for outstanding requests completion when asked to shut down? (ms)
|
||||
{shutdown_timeout, 1000},
|
||||
|
||||
{woody_event_handlers, [
|
||||
hay_woody_event_handler,
|
||||
{scoper_woody_event_handler, #{
|
||||
event_handler_opts => #{
|
||||
formatter_opts => #{
|
||||
max_length => 1000,
|
||||
max_printable_string_length => 80
|
||||
}
|
||||
}
|
||||
}}
|
||||
]},
|
||||
|
||||
{health_check, #{
|
||||
% disk => {erl_health, disk , ["/", 99]},
|
||||
% memory => {erl_health, cg_memory, [99]},
|
||||
% service => {erl_health, service , [<<"bouncer">>]}
|
||||
}},
|
||||
|
||||
{tokens, #{
|
||||
jwt => #{
|
||||
keyset => #{
|
||||
test => #{
|
||||
source => {pem_file, "keys/local/private.pem"},
|
||||
metadata => #{
|
||||
authority => <<"test.rbkmoney.keycloak">>,
|
||||
metadata_ns => <<"test.rbkmoney.token-keeper">>,
|
||||
auth_method => detect,
|
||||
user_realm => <<"external">>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}},
|
||||
|
||||
{user_session_token_origins, [<<"http://localhost">>]}
|
||||
|
||||
]},
|
||||
|
||||
{kernel, [
|
||||
{logger_level, debug},
|
||||
{logger, [
|
||||
{handler, default, logger_std_h, #{
|
||||
config => #{
|
||||
type => {file, "/var/log/token-keeper/log.json"}
|
||||
},
|
||||
formatter => {logger_logstash_formatter, #{}}
|
||||
}}
|
||||
]}
|
||||
]},
|
||||
|
||||
% {how_are_you, [
|
||||
% {metrics_publishers, [
|
||||
% {hay_statsd_publisher, #{
|
||||
% key_prefix => <<"bender.">>,
|
||||
% host => "localhost",
|
||||
% port => 8125
|
||||
% }}
|
||||
% ]}
|
||||
% ]},
|
||||
|
||||
{scoper, [
|
||||
{storage, scoper_storage_logger}
|
||||
]},
|
||||
|
||||
{snowflake, [
|
||||
{max_backward_clock_moving, 1000} % 1 second
|
||||
% {machine_id, hostname_hash}
|
||||
]}
|
||||
|
||||
].
|
3
config/vm.args
Normal file
3
config/vm.args
Normal file
@ -0,0 +1,3 @@
|
||||
-sname token_keeper
|
||||
|
||||
-setcookie token_keeper_cookie
|
233
rebar.config
Normal file
233
rebar.config
Normal file
@ -0,0 +1,233 @@
|
||||
%% Common project erlang options.
|
||||
{erl_opts, [
|
||||
|
||||
% mandatory
|
||||
debug_info,
|
||||
warnings_as_errors,
|
||||
warn_export_all,
|
||||
warn_missing_spec,
|
||||
warn_untyped_record,
|
||||
warn_export_vars,
|
||||
|
||||
% by default
|
||||
warn_unused_record,
|
||||
warn_bif_clash,
|
||||
warn_obsolete_guard,
|
||||
warn_unused_vars,
|
||||
warn_shadow_vars,
|
||||
warn_unused_import,
|
||||
warn_unused_function,
|
||||
warn_deprecated_function
|
||||
|
||||
% at will
|
||||
% bin_opt_info
|
||||
% no_auto_import
|
||||
% warn_missing_spec_all
|
||||
]}.
|
||||
|
||||
%% Common project dependencies.
|
||||
{deps, [
|
||||
{jsx, "3.0.0"},
|
||||
{jose, "1.11.1"},
|
||||
{thrift,
|
||||
{git, "https://github.com/rbkmoney/thrift_erlang.git",
|
||||
{branch, "master"}
|
||||
}
|
||||
},
|
||||
{genlib,
|
||||
{git, "https://github.com/rbkmoney/genlib.git",
|
||||
{branch, "master"}
|
||||
}
|
||||
},
|
||||
{woody,
|
||||
{git, "https://github.com/rbkmoney/woody_erlang.git",
|
||||
{branch, "master"}
|
||||
}
|
||||
},
|
||||
{woody_user_identity,
|
||||
{git, "https://github.com/rbkmoney/woody_erlang_user_identity.git",
|
||||
{branch, "master"}
|
||||
}
|
||||
},
|
||||
{token_keeper_proto,
|
||||
{git, "git@github.com:rbkmoney/token-keeper-proto.git",
|
||||
{branch, "master"}
|
||||
}
|
||||
},
|
||||
{scoper,
|
||||
{git, "https://github.com/rbkmoney/scoper.git",
|
||||
{branch, "master"}
|
||||
}
|
||||
},
|
||||
{erl_health,
|
||||
{git, "https://github.com/rbkmoney/erlang-health.git",
|
||||
{branch, "master"}
|
||||
}
|
||||
},
|
||||
{bouncer_client,
|
||||
{git, "https://github.com/rbkmoney/bouncer_client_erlang.git",
|
||||
{branch, master}
|
||||
}
|
||||
},
|
||||
|
||||
% Production-only deps.
|
||||
% Defined here for the sake of rebar-locking.
|
||||
{recon, "2.5.1"},
|
||||
{logger_logstash_formatter,
|
||||
{git, "https://github.com/rbkmoney/logger_logstash_formatter.git",
|
||||
{branch, "master"}
|
||||
}
|
||||
},
|
||||
{how_are_you,
|
||||
{git, "https://github.com/rbkmoney/how_are_you.git",
|
||||
{branch, "master"}
|
||||
}
|
||||
}
|
||||
|
||||
]}.
|
||||
|
||||
%% Helpful plugins.
|
||||
{plugins, [
|
||||
{rebar3_lint, "0.3.0"},
|
||||
{erlfmt, "0.10.0"}
|
||||
]}.
|
||||
|
||||
%% Linter config.
|
||||
{elvis, [
|
||||
#{
|
||||
dirs => ["src"],
|
||||
filter => "*.erl",
|
||||
ruleset => erl_files,
|
||||
rules => [
|
||||
{elvis_text_style, line_length, #{limit => 120, skip_comments => false}},
|
||||
{elvis_text_style, no_tabs},
|
||||
{elvis_text_style, no_trailing_whitespace},
|
||||
{elvis_style, macro_module_names},
|
||||
{elvis_style, operator_spaces, #{rules => [{right, ","}, {right, "++"}, {left, "++"}]}},
|
||||
{elvis_style, nesting_level, #{level => 4}},
|
||||
{elvis_style, god_modules, #{limit => 30, ignore => []}},
|
||||
{elvis_style, no_if_expression},
|
||||
{elvis_style, invalid_dynamic_call, #{ignore => []}},
|
||||
{elvis_style, used_ignored_variable},
|
||||
{elvis_style, no_behavior_info},
|
||||
{elvis_style, module_naming_convention, #{regex => "^[a-z]([a-z0-9]*_?)*(_SUITE)?$"}},
|
||||
{elvis_style, function_naming_convention, #{regex => "^[a-z]([a-z0-9]*_?)*$"}},
|
||||
{elvis_style, state_record_and_type, #{ignore => []}},
|
||||
{elvis_style, no_spec_with_records},
|
||||
{elvis_style, dont_repeat_yourself, #{min_complexity => 30}},
|
||||
{elvis_style, no_debug_call, #{}}
|
||||
]
|
||||
},
|
||||
#{
|
||||
dirs => ["test"],
|
||||
filter => "*.erl",
|
||||
ruleset => erl_files,
|
||||
rules => [
|
||||
{elvis_text_style, line_length, #{limit => 120, skip_comments => false}},
|
||||
% We want to use `ct:pal/2` and friends in test code.
|
||||
{elvis_style, no_debug_call, disable},
|
||||
% Assert macros can trigger use of ignored binding, yet we want them for better
|
||||
% readability.
|
||||
{elvis_style, used_ignored_variable, disable},
|
||||
% Tests are usually more comprehensible when a bit more verbose.
|
||||
{elvis_style, dont_repeat_yourself, #{min_complexity => 20}},
|
||||
% Too opionated
|
||||
{elvis_style, state_record_and_type, disable},
|
||||
{elvis_style, god_modules, #{ignore => []}}
|
||||
]
|
||||
},
|
||||
#{
|
||||
dirs => ["."],
|
||||
filter => "Makefile",
|
||||
ruleset => makefiles
|
||||
},
|
||||
#{
|
||||
dirs => ["."],
|
||||
filter => "rebar.config",
|
||||
rules => [
|
||||
{elvis_text_style, line_length, #{limit => 100, skip_comments => false}},
|
||||
{elvis_text_style, no_tabs},
|
||||
{elvis_text_style, no_trailing_whitespace}
|
||||
]
|
||||
},
|
||||
#{
|
||||
dirs => ["src"],
|
||||
filter => "*.app.src",
|
||||
rules => [
|
||||
{elvis_text_style, line_length, #{limit => 100, skip_comments => false}},
|
||||
{elvis_text_style, no_tabs},
|
||||
{elvis_text_style, no_trailing_whitespace}
|
||||
]
|
||||
}
|
||||
]}.
|
||||
|
||||
{elvis_output_format, colors}.
|
||||
|
||||
%% XRef checks
|
||||
{xref_checks, [
|
||||
undefined_function_calls,
|
||||
undefined_functions,
|
||||
deprecated_functions_calls,
|
||||
deprecated_functions
|
||||
]}.
|
||||
% at will
|
||||
% {xref_warnings, true}.
|
||||
|
||||
%% Tests
|
||||
{cover_enabled, true}.
|
||||
|
||||
%% Relx configuration
|
||||
{relx, [
|
||||
{release, {token_keeper , "0.1.0"}, [
|
||||
{recon , load}, % tools for introspection
|
||||
{runtime_tools, load}, % debugger
|
||||
{tools , load}, % profiler
|
||||
{logger_logstash_formatter, load}, % logger formatter
|
||||
how_are_you,
|
||||
token_keeper
|
||||
]},
|
||||
{sys_config, "./config/sys.config"},
|
||||
{vm_args, "./config/vm.args"},
|
||||
{dev_mode, true},
|
||||
{include_erts, false},
|
||||
{extended_start_script, true}
|
||||
]}.
|
||||
|
||||
%% Dialyzer static analyzing
|
||||
{dialyzer, [
|
||||
{warnings, [
|
||||
% mandatory
|
||||
unmatched_returns,
|
||||
error_handling,
|
||||
race_conditions,
|
||||
unknown
|
||||
]},
|
||||
{plt_apps, all_deps}
|
||||
]}.
|
||||
|
||||
{profiles, [
|
||||
|
||||
{prod, [
|
||||
{relx, [
|
||||
{dev_mode, false},
|
||||
{include_erts, true},
|
||||
{overlay, []}
|
||||
]}
|
||||
]},
|
||||
|
||||
{test, [
|
||||
{cover_enabled, true},
|
||||
{deps, []}
|
||||
]}
|
||||
|
||||
]}.
|
||||
|
||||
{shell, [
|
||||
% {config, "config/sys.config"},
|
||||
{apps, [token_keeper]}
|
||||
]}.
|
||||
|
||||
{erlfmt, [
|
||||
{print_width, 120},
|
||||
{files, "{src,test}/*.{hrl,erl,src}"}
|
||||
]}.
|
116
rebar.lock
Normal file
116
rebar.lock
Normal file
@ -0,0 +1,116 @@
|
||||
{"1.2.0",
|
||||
[{<<"bear">>,{pkg,<<"bear">>,<<"0.8.7">>},2},
|
||||
{<<"bouncer_client">>,
|
||||
{git,"https://github.com/rbkmoney/bouncer_client_erlang.git",
|
||||
{ref,"36cb53a7d4fea4861d5ea5cf7e2f572eba941fde"}},
|
||||
0},
|
||||
{<<"bouncer_proto">>,
|
||||
{git,"git@github.com:rbkmoney/bouncer-proto.git",
|
||||
{ref,"7ac88717904c6bab73096198b308380e006ed42c"}},
|
||||
1},
|
||||
{<<"cache">>,{pkg,<<"cache">>,<<"2.3.3">>},1},
|
||||
{<<"certifi">>,{pkg,<<"certifi">>,<<"2.5.3">>},2},
|
||||
{<<"cg_mon">>,
|
||||
{git,"https://github.com/rbkmoney/cg_mon.git",
|
||||
{ref,"5a87a37694e42b6592d3b4164ae54e0e87e24e18"}},
|
||||
1},
|
||||
{<<"cowboy">>,{pkg,<<"cowboy">>,<<"2.8.0">>},1},
|
||||
{<<"cowlib">>,{pkg,<<"cowlib">>,<<"2.9.1">>},2},
|
||||
{<<"erl_health">>,
|
||||
{git,"https://github.com/rbkmoney/erlang-health.git",
|
||||
{ref,"982af88738ca062eea451436d830eef8c1fbe3f9"}},
|
||||
0},
|
||||
{<<"folsom">>,
|
||||
{git,"https://github.com/folsom-project/folsom.git",
|
||||
{ref,"eeb1cc467eb64bd94075b95b8963e80d8b4df3df"}},
|
||||
1},
|
||||
{<<"genlib">>,
|
||||
{git,"https://github.com/rbkmoney/genlib.git",
|
||||
{ref,"4565a8d73f34a0b78cca32c9cd2b97d298bdadf8"}},
|
||||
0},
|
||||
{<<"gproc">>,{pkg,<<"gproc">>,<<"0.8.0">>},1},
|
||||
{<<"hackney">>,{pkg,<<"hackney">>,<<"1.17.0">>},1},
|
||||
{<<"how_are_you">>,
|
||||
{git,"https://github.com/rbkmoney/how_are_you.git",
|
||||
{ref,"29f9d3d7c35f7a2d586c8571f572838df5ec91dd"}},
|
||||
0},
|
||||
{<<"idna">>,{pkg,<<"idna">>,<<"6.1.1">>},2},
|
||||
{<<"jose">>,{pkg,<<"jose">>,<<"1.11.1">>},0},
|
||||
{<<"jsx">>,{pkg,<<"jsx">>,<<"3.0.0">>},0},
|
||||
{<<"logger_logstash_formatter">>,
|
||||
{git,"https://github.com/rbkmoney/logger_logstash_formatter.git",
|
||||
{ref,"87e52c755cf9e64d651e3ddddbfcd2ccd1db79db"}},
|
||||
0},
|
||||
{<<"metrics">>,{pkg,<<"metrics">>,<<"1.0.1">>},2},
|
||||
{<<"mimerl">>,{pkg,<<"mimerl">>,<<"1.2.0">>},2},
|
||||
{<<"org_management_proto">>,
|
||||
{git,"git@github.com:rbkmoney/org-management-proto.git",
|
||||
{ref,"06c5c8430e445cb7874e54358e457cbb5697fc32"}},
|
||||
1},
|
||||
{<<"parse_trans">>,{pkg,<<"parse_trans">>,<<"3.3.1">>},2},
|
||||
{<<"ranch">>,{pkg,<<"ranch">>,<<"1.7.1">>},2},
|
||||
{<<"recon">>,{pkg,<<"recon">>,<<"2.5.1">>},0},
|
||||
{<<"scoper">>,
|
||||
{git,"https://github.com/rbkmoney/scoper.git",
|
||||
{ref,"89a973bf3cedc5a48c9fd89d719d25e79fe10027"}},
|
||||
0},
|
||||
{<<"snowflake">>,
|
||||
{git,"https://github.com/rbkmoney/snowflake.git",
|
||||
{ref,"de159486ef40cec67074afe71882bdc7f7deab72"}},
|
||||
1},
|
||||
{<<"ssl_verify_fun">>,{pkg,<<"ssl_verify_fun">>,<<"1.1.6">>},2},
|
||||
{<<"thrift">>,
|
||||
{git,"https://github.com/rbkmoney/thrift_erlang.git",
|
||||
{ref,"846a0819d9b6d09d0c31f160e33a78dbad2067b4"}},
|
||||
0},
|
||||
{<<"token_keeper_proto">>,
|
||||
{git,"git@github.com:rbkmoney/token-keeper-proto.git",
|
||||
{ref,"48a18d87ea2443540272c80213f7612beea6af9c"}},
|
||||
0},
|
||||
{<<"unicode_util_compat">>,{pkg,<<"unicode_util_compat">>,<<"0.7.0">>},2},
|
||||
{<<"woody">>,
|
||||
{git,"https://github.com/rbkmoney/woody_erlang.git",
|
||||
{ref,"f2cd30883d58eb1c3ab2172556956f757bc27e23"}},
|
||||
0},
|
||||
{<<"woody_user_identity">>,
|
||||
{git,"https://github.com/rbkmoney/woody_erlang_user_identity.git",
|
||||
{ref,"a480762fea8d7c08f105fb39ca809482b6cb042e"}},
|
||||
0}]}.
|
||||
[
|
||||
{pkg_hash,[
|
||||
{<<"bear">>, <<"16264309AE5D005D03718A5C82641FCC259C9E8F09ADEB6FD79CA4271168656F">>},
|
||||
{<<"cache">>, <<"B23A5FE7095445A88412A6E614C933377E0137B44FFED77C9B3FEF1A731A20B2">>},
|
||||
{<<"certifi">>, <<"70BDD7E7188C804F3A30EE0E7C99655BC35D8AC41C23E12325F36AB449B70651">>},
|
||||
{<<"cowboy">>, <<"F3DC62E35797ECD9AC1B50DB74611193C29815401E53BAC9A5C0577BD7BC667D">>},
|
||||
{<<"cowlib">>, <<"61A6C7C50CF07FDD24B2F45B89500BB93B6686579B069A89F88CB211E1125C78">>},
|
||||
{<<"gproc">>, <<"CEA02C578589C61E5341FCE149EA36CCEF236CC2ECAC8691FBA408E7EA77EC2F">>},
|
||||
{<<"hackney">>, <<"717EA195FD2F898D9FE9F1CE0AFCC2621A41ECFE137FAE57E7FE6E9484B9AA99">>},
|
||||
{<<"idna">>, <<"8A63070E9F7D0C62EB9D9FCB360A7DE382448200FBBD1B106CC96D3D8099DF8D">>},
|
||||
{<<"jose">>, <<"59DA64010C69AAD6CDE2F5B9248B896B84472E99BD18F246085B7B9FE435DCDB">>},
|
||||
{<<"jsx">>, <<"20A170ABD4335FC6DB24D5FAD1E5D677C55DADF83D1B20A8A33B5FE159892A39">>},
|
||||
{<<"metrics">>, <<"25F094DEA2CDA98213CECC3AEFF09E940299D950904393B2A29D191C346A8486">>},
|
||||
{<<"mimerl">>, <<"67E2D3F571088D5CFD3E550C383094B47159F3EEE8FFA08E64106CDF5E981BE3">>},
|
||||
{<<"parse_trans">>, <<"16328AB840CC09919BD10DAB29E431DA3AF9E9E7E7E6F0089DD5A2D2820011D8">>},
|
||||
{<<"ranch">>, <<"6B1FAB51B49196860B733A49C07604465A47BDB78AA10C1C16A3D199F7F8C881">>},
|
||||
{<<"recon">>, <<"430FFA60685AC1EFDFB1FE4C97B8767C92D0D92E6E7C3E8621559BA77598678A">>},
|
||||
{<<"ssl_verify_fun">>, <<"CF344F5692C82D2CD7554F5EC8FD961548D4FD09E7D22F5B62482E5AEAEBD4B0">>},
|
||||
{<<"unicode_util_compat">>, <<"BC84380C9AB48177092F43AC89E4DFA2C6D62B40B8BD132B1059ECC7232F9A78">>}]},
|
||||
{pkg_hash_ext,[
|
||||
{<<"bear">>, <<"534217DCE6A719D59E54FB0EB7A367900DBFC5F85757E8C1F94269DF383F6D9B">>},
|
||||
{<<"cache">>, <<"44516CE6FA03594D3A2AF025DD3A87BFE711000EB730219E1DDEFC816E0AA2F4">>},
|
||||
{<<"certifi">>, <<"ED516ACB3929B101208A9D700062D520F3953DA3B6B918D866106FFA980E1C10">>},
|
||||
{<<"cowboy">>, <<"4643E4FBA74AC96D4D152C75803DE6FAD0B3FA5DF354C71AFDD6CBEEB15FAC8A">>},
|
||||
{<<"cowlib">>, <<"E4175DC240A70D996156160891E1C62238EDE1729E45740BDD38064DAD476170">>},
|
||||
{<<"gproc">>, <<"580ADAFA56463B75263EF5A5DF4C86AF321F68694E7786CB057FD805D1E2A7DE">>},
|
||||
{<<"hackney">>, <<"64C22225F1EA8855F584720C0E5B3CD14095703AF1C9FBC845BA042811DC671C">>},
|
||||
{<<"idna">>, <<"92376EB7894412ED19AC475E4A86F7B413C1B9FBB5BD16DCCD57934157944CEA">>},
|
||||
{<<"jose">>, <<"078F6C9FB3CD2F4CFAFC972C814261A7D1E8D2B3685C0A76EB87E158EFFF1AC5">>},
|
||||
{<<"jsx">>, <<"37BECA0435F5CA8A2F45F76A46211E76418FBEF80C36F0361C249FC75059DC6D">>},
|
||||
{<<"metrics">>, <<"69B09ADDDC4F74A40716AE54D140F93BEB0FB8978D8636EADED0C31B6F099F16">>},
|
||||
{<<"mimerl">>, <<"F278585650AA581986264638EBF698F8BB19DF297F66AD91B18910DFC6E19323">>},
|
||||
{<<"parse_trans">>, <<"07CD9577885F56362D414E8C4C4E6BDF10D43A8767ABB92D24CBE8B24C54888B">>},
|
||||
{<<"ranch">>, <<"451D8527787DF716D99DC36162FCA05934915DB0B6141BBDAC2EA8D3C7AFC7D7">>},
|
||||
{<<"recon">>, <<"5721C6B6D50122D8F68CCCAC712CAA1231F97894BAB779EFF5FF0F886CB44648">>},
|
||||
{<<"ssl_verify_fun">>, <<"BDB0D2471F453C88FF3908E7686F86F9BE327D065CC1EC16FA4540197EA04680">>},
|
||||
{<<"unicode_util_compat">>, <<"25EEE6D67DF61960CF6A794239566599B09E17E668D3700247BC498638152521">>}]}
|
||||
].
|
264
src/tk_audit_log.erl
Normal file
264
src/tk_audit_log.erl
Normal file
@ -0,0 +1,264 @@
|
||||
-module(tk_audit_log).
|
||||
|
||||
-export([init/1]).
|
||||
-export([stop/1]).
|
||||
|
||||
-behaviour(tk_pulse).
|
||||
-export([handle_beat/3]).
|
||||
|
||||
-define(DEFAULT_LOG_LEVEL, notice).
|
||||
-define(DEFAULT_FLUSH_QLEN, 10000).
|
||||
-define(LOG_DOMAIN, [audit]).
|
||||
|
||||
-type opts() :: #{
|
||||
log => log_opts() | disabled
|
||||
}.
|
||||
|
||||
% NOTE
|
||||
% Keep in sync with `opts()`.
|
||||
-define(OPTS, [log]).
|
||||
|
||||
-type log_opts() :: #{
|
||||
% Which log level to use for audit events? Defaults to `notice`.
|
||||
level => logger:level(),
|
||||
backend => logger_backend_opts(),
|
||||
% http://erlang.org/doc/man/logger.html#type-formatter_config
|
||||
formatter => {module(), logger:formatter_config()}
|
||||
}.
|
||||
|
||||
% NOTE
|
||||
% Keep in sync with `log_opts()`.
|
||||
-define(LOG_OPTS, [level, backend, formatter]).
|
||||
|
||||
-type logger_backend_opts() :: #{
|
||||
% Where to log? Defaults to `standard_io`.
|
||||
type => standard_io | standard_error | file,
|
||||
% Log file location. No default, MUST be set if `type` is `file`.
|
||||
file => file:filename(),
|
||||
% http://erlang.org/doc/man/logger_std_h.html
|
||||
max_no_bytes => pos_integer() | infinity,
|
||||
max_no_files => non_neg_integer(),
|
||||
% Maximum number of events to queue for writing. Defaults to 10000.
|
||||
% http://erlang.org/doc/apps/kernel/logger_chapter.html#message-queue-length
|
||||
flush_qlen => non_neg_integer()
|
||||
}.
|
||||
|
||||
% NOTE
|
||||
% Keep in sync with `logger_backend_opts()`.
|
||||
-define(LOGGER_BACKEND_OPTS, [type, file, max_no_bytes, max_no_files, flush_qlen]).
|
||||
|
||||
-export_type([opts/0]).
|
||||
|
||||
%%
|
||||
|
||||
-type st() ::
|
||||
{log, logger:level()}.
|
||||
|
||||
-spec init(opts()) -> tk_pulse:handlers(st()).
|
||||
init(Opts) ->
|
||||
_ = assert_strict_opts(?OPTS, Opts),
|
||||
init_log_handler(maps:get(log, Opts, #{})).
|
||||
|
||||
init_log_handler(LogOpts = #{}) ->
|
||||
_ = assert_strict_opts(?LOG_OPTS, LogOpts),
|
||||
Level = validate_log_level(maps:get(level, LogOpts, ?DEFAULT_LOG_LEVEL)),
|
||||
BackendConfig = mk_logger_backend_config(maps:get(backend, LogOpts, #{})),
|
||||
HandlerConfig0 = maps:with([formatter], LogOpts),
|
||||
HandlerConfig1 = HandlerConfig0#{
|
||||
config => BackendConfig,
|
||||
% NOTE
|
||||
% This two options together ensure that _only_ audit logs will flow through to the backend.
|
||||
filters => [{domain, {fun logger_filters:domain/2, {log, sub, ?LOG_DOMAIN}}}],
|
||||
filter_default => stop
|
||||
},
|
||||
ok = logger:add_handler(
|
||||
?MODULE,
|
||||
logger_std_h,
|
||||
HandlerConfig1
|
||||
),
|
||||
% TODO
|
||||
% Validate that global logger level doesn't suppress ours?
|
||||
ok = log(Level, "audit log started", #{}),
|
||||
[{?MODULE, {log, Level}}];
|
||||
init_log_handler(disabled) ->
|
||||
[].
|
||||
|
||||
validate_log_level(Level) ->
|
||||
eq = logger:compare_levels(Level, Level),
|
||||
Level.
|
||||
|
||||
mk_logger_backend_config(BackendOpts) ->
|
||||
_ = assert_strict_opts(?LOGGER_BACKEND_OPTS, BackendOpts),
|
||||
Type = validate_log_type(maps:get(type, BackendOpts, standard_io)),
|
||||
mk_logger_backend_config(Type, BackendOpts).
|
||||
|
||||
validate_log_type(Type) when
|
||||
Type == standard_io;
|
||||
Type == standard_error;
|
||||
Type == file
|
||||
->
|
||||
Type;
|
||||
validate_log_type(Type) ->
|
||||
erlang:error(badarg, [Type]).
|
||||
|
||||
mk_logger_backend_config(file = Type, Opts) ->
|
||||
Defaults = get_default_backend_config(Type, Opts),
|
||||
Filename = maps:get(file, Opts),
|
||||
Config0 = maps:with([max_no_bytes, max_no_files], Opts),
|
||||
Config = maps:merge(Defaults, Config0),
|
||||
Config#{
|
||||
type => Type,
|
||||
file => Filename
|
||||
};
|
||||
mk_logger_backend_config(Type, Opts) ->
|
||||
Defaults = get_default_backend_config(Type, Opts),
|
||||
Defaults#{
|
||||
type => Type
|
||||
}.
|
||||
|
||||
get_default_backend_config(file, Opts) ->
|
||||
% NOTE
|
||||
% All those options chosen to push message loss probability as close to zero as possible.
|
||||
% Zero doesn't seem reachable with standard logger infrastructure because of various safeguards
|
||||
% around unexpected backend and formatter errors.
|
||||
Config = get_default_backend_config(Opts),
|
||||
Config#{
|
||||
% Protects against accidental write loss upon file rotation.
|
||||
file_check => 0
|
||||
};
|
||||
get_default_backend_config(_Type, Opts) ->
|
||||
get_default_backend_config(Opts).
|
||||
|
||||
get_default_backend_config(Opts) ->
|
||||
FlushQLen = maps:get(flush_qlen, Opts, ?DEFAULT_FLUSH_QLEN),
|
||||
#{
|
||||
% No need to set it up here since we'll sync on EVERY write by ourself.
|
||||
filesync_repeat_interval => no_repeat,
|
||||
|
||||
% http://erlang.org/doc/apps/kernel/logger_chapter.html#message-queue-length
|
||||
sync_mode_qlen => 0,
|
||||
drop_mode_qlen => FlushQLen,
|
||||
flush_qlen => FlushQLen,
|
||||
|
||||
% http://erlang.org/doc/apps/kernel/logger_chapter.html#controlling-bursts-of-log-requests
|
||||
burst_limit_enable => false,
|
||||
|
||||
% http://erlang.org/doc/apps/kernel/logger_chapter.html#terminating-an-overloaded-handler
|
||||
overload_kill_enable => false
|
||||
}.
|
||||
|
||||
assert_strict_opts(Ks, Opts) ->
|
||||
case maps:without(Ks, Opts) of
|
||||
Empty when map_size(Empty) == 0 ->
|
||||
ok;
|
||||
Unrecognized ->
|
||||
erlang:error({unrecognized_opts, Unrecognized})
|
||||
end.
|
||||
|
||||
%%
|
||||
|
||||
-spec stop(opts()) -> ok.
|
||||
stop(Opts = #{}) ->
|
||||
stop_log_handler(maps:get(log, Opts, #{})).
|
||||
|
||||
-spec stop_log_handler(log_opts()) -> ok.
|
||||
stop_log_handler(LogOpts = #{}) ->
|
||||
Level = maps:get(level, LogOpts, ?DEFAULT_LOG_LEVEL),
|
||||
ok = log(Level, "audit log stopped", #{}),
|
||||
_ = logger:remove_handler(?MODULE),
|
||||
ok;
|
||||
stop_log_handler(disabled) ->
|
||||
ok.
|
||||
|
||||
%%
|
||||
|
||||
-type beat() :: tk_pulse:beat().
|
||||
-type metadata() :: tk_pulse:metadata().
|
||||
|
||||
-spec handle_beat(beat(), metadata(), st()) -> ok.
|
||||
handle_beat(Beat, Metadata, {log, Level}) ->
|
||||
log(
|
||||
get_severity(Beat, Level),
|
||||
get_message(Beat),
|
||||
extract_metadata(Metadata, get_beat_metadata(Beat))
|
||||
).
|
||||
|
||||
log(Severity, Message, Metadata) ->
|
||||
DefaultMetadata = #{
|
||||
type => audit,
|
||||
domain => ?LOG_DOMAIN
|
||||
},
|
||||
% NOTE
|
||||
% Matching on `ok` here is crucial. Logger may decide to flush the queue behind the scenes so
|
||||
% we need to ensure it's not happening.
|
||||
ok = logger:log(Severity, Message, maps:merge(Metadata, DefaultMetadata)),
|
||||
ok = logger_std_h:filesync(?MODULE),
|
||||
ok.
|
||||
|
||||
get_severity({get_by_token, started}, _Level) -> debug;
|
||||
get_severity(_, Level) -> Level.
|
||||
|
||||
get_message({get_by_token, started}) -> <<"get_by_token started">>;
|
||||
get_message({get_by_token, succeeded}) -> <<"get_by_token succeeded">>;
|
||||
get_message({get_by_token, {failed, _}}) -> <<"get_by_token failed">>.
|
||||
|
||||
get_beat_metadata({get_by_token, Event}) ->
|
||||
#{
|
||||
get_by_token =>
|
||||
case Event of
|
||||
started ->
|
||||
#{
|
||||
event => started
|
||||
};
|
||||
succeeded ->
|
||||
#{
|
||||
event => succeeded
|
||||
};
|
||||
{failed, Error} ->
|
||||
#{
|
||||
event => failed,
|
||||
error => encode_error(Error)
|
||||
}
|
||||
end
|
||||
}.
|
||||
|
||||
encode_error({Class, Details}) when is_atom(Class) ->
|
||||
#{class => Class, details => genlib:format(Details)};
|
||||
encode_error(Class) when is_atom(Class) ->
|
||||
#{class => Class};
|
||||
encode_error(Other) ->
|
||||
#{details => genlib:format(Other)}.
|
||||
|
||||
extract_metadata(Metadata, Acc) ->
|
||||
Acc1 = extract_opt_meta(token, Metadata, fun encode_token/1, Acc),
|
||||
Acc2 = extract_opt_meta(source, Metadata, fun encode_token_source/1, Acc1),
|
||||
extract_woody_ctx(maps:get(woody_ctx, Metadata, undefined), Acc2).
|
||||
|
||||
extract_opt_meta(K, Metadata, EncodeFun, Acc) ->
|
||||
case maps:find(K, Metadata) of
|
||||
{ok, V} -> Acc#{K => EncodeFun(V)};
|
||||
error -> Acc
|
||||
end.
|
||||
|
||||
encode_token({JTI, Claims, TokenMetadata}) ->
|
||||
#{
|
||||
jti => JTI,
|
||||
claims => Claims,
|
||||
metadata => TokenMetadata
|
||||
}.
|
||||
|
||||
encode_token_source(TokenSourceContext = #{}) ->
|
||||
TokenSourceContext.
|
||||
|
||||
extract_woody_ctx(WoodyCtx = #{rpc_id := RpcID}, Acc) ->
|
||||
extract_woody_meta(WoodyCtx, extract_woody_rpc_id(RpcID, Acc));
|
||||
extract_woody_ctx(undefined, Acc) ->
|
||||
Acc.
|
||||
|
||||
extract_woody_rpc_id(RpcID = #{span_id := _, trace_id := _, parent_id := _}, Acc) ->
|
||||
maps:merge(Acc, RpcID).
|
||||
|
||||
extract_woody_meta(#{meta := Meta}, Acc) when map_size(Meta) > 0 ->
|
||||
Acc#{woody => #{metadata => Meta}};
|
||||
extract_woody_meta(#{}, Acc) ->
|
||||
Acc.
|
151
src/tk_bouncer_context.erl
Normal file
151
src/tk_bouncer_context.erl
Normal file
@ -0,0 +1,151 @@
|
||||
-module(tk_bouncer_context).
|
||||
|
||||
-include_lib("token_keeper_proto/include/tk_context_thrift.hrl").
|
||||
|
||||
-export([extract_context_fragment/2]).
|
||||
|
||||
-type encoded_context_fragment() :: tk_context_thrift:'ContextFragment'().
|
||||
|
||||
%%
|
||||
|
||||
-spec extract_context_fragment(tk_token_jwt:t(), token_keeper:token_type()) -> encoded_context_fragment() | undefined.
|
||||
extract_context_fragment(TokenInfo, TokenType) ->
|
||||
extract_context_fragment([claim, metadata], TokenInfo, TokenType).
|
||||
|
||||
extract_context_fragment([Method | Rest], TokenInfo, TokenType) ->
|
||||
case extract_context_fragment_by(Method, TokenInfo, TokenType) of
|
||||
Fragment when Fragment =/= undefined ->
|
||||
Fragment;
|
||||
undefined ->
|
||||
extract_context_fragment(Rest, TokenInfo, TokenType)
|
||||
end;
|
||||
extract_context_fragment([], _, _) ->
|
||||
undefined.
|
||||
|
||||
%%
|
||||
|
||||
extract_context_fragment_by(claim, TokenInfo, _TokenType) ->
|
||||
% TODO
|
||||
% We deliberately do not handle decoding errors here since we extract claims from verified
|
||||
% tokens only, hence they must be well-formed here.
|
||||
Claims = tk_token_jwt:get_claims(TokenInfo),
|
||||
case get_claim(Claims) of
|
||||
{ok, ClaimFragment} ->
|
||||
ClaimFragment;
|
||||
undefined ->
|
||||
undefined
|
||||
end;
|
||||
extract_context_fragment_by(metadata, TokenInfo, TokenType) ->
|
||||
case tk_token_jwt:get_metadata(TokenInfo) of
|
||||
#{auth_method := detect} ->
|
||||
AuthMethod = get_auth_method(TokenType),
|
||||
build_auth_context_fragment(AuthMethod, TokenInfo);
|
||||
#{auth_method := AuthMethod} ->
|
||||
build_auth_context_fragment(AuthMethod, TokenInfo);
|
||||
#{} ->
|
||||
undefined
|
||||
end.
|
||||
|
||||
get_auth_method(TokenType) ->
|
||||
TokenType.
|
||||
|
||||
-spec build_auth_context_fragment(
|
||||
tk_token_jwt:auth_method(),
|
||||
tk_token_jwt:t()
|
||||
) -> encoded_context_fragment().
|
||||
build_auth_context_fragment(api_key_token, TokenInfo) ->
|
||||
UserID = tk_token_jwt:get_subject_id(TokenInfo),
|
||||
Acc0 = bouncer_context_helpers:empty(),
|
||||
Acc1 = bouncer_context_helpers:add_auth(
|
||||
#{
|
||||
method => <<"ApiKeyToken">>,
|
||||
token => #{id => tk_token_jwt:get_token_id(TokenInfo)},
|
||||
scope => [#{party => #{id => UserID}}]
|
||||
},
|
||||
Acc0
|
||||
),
|
||||
encode_context_fragment(Acc1);
|
||||
build_auth_context_fragment(user_session_token, TokenInfo) ->
|
||||
Metadata = tk_token_jwt:get_metadata(TokenInfo),
|
||||
UserID = tk_token_jwt:get_subject_id(TokenInfo),
|
||||
Expiration = tk_token_jwt:get_expires_at(TokenInfo),
|
||||
Acc0 = bouncer_context_helpers:empty(),
|
||||
Acc1 = bouncer_context_helpers:add_user(
|
||||
#{
|
||||
id => UserID,
|
||||
email => tk_token_jwt:get_subject_email(TokenInfo),
|
||||
realm => #{id => maps:get(user_realm, Metadata, undefined)}
|
||||
},
|
||||
Acc0
|
||||
),
|
||||
Acc2 = bouncer_context_helpers:add_auth(
|
||||
#{
|
||||
method => <<"SessionToken">>,
|
||||
expiration => make_auth_expiration(Expiration),
|
||||
token => #{id => tk_token_jwt:get_token_id(TokenInfo)}
|
||||
},
|
||||
Acc1
|
||||
),
|
||||
encode_context_fragment(Acc2).
|
||||
|
||||
make_auth_expiration(Timestamp) when is_integer(Timestamp) ->
|
||||
genlib_rfc3339:format(Timestamp, second);
|
||||
make_auth_expiration(unlimited) ->
|
||||
undefined.
|
||||
|
||||
%%
|
||||
|
||||
-define(CLAIM_BOUNCER_CTX, <<"bouncer_ctx">>).
|
||||
-define(CLAIM_CTX_TYPE, <<"ty">>).
|
||||
-define(CLAIM_CTX_CONTEXT, <<"ct">>).
|
||||
|
||||
-define(CLAIM_CTX_TYPE_V1_THRIFT_BINARY, <<"v1_thrift_binary">>).
|
||||
|
||||
-type claim() :: tk_token_jwt:claim().
|
||||
-type claims() :: tk_token_jwt:claims().
|
||||
|
||||
-spec get_claim(claims()) ->
|
||||
{ok, encoded_context_fragment()} | {error, {unsupported, claim()} | {malformed, binary()}} | undefined.
|
||||
get_claim(Claims) ->
|
||||
case maps:get(?CLAIM_BOUNCER_CTX, Claims, undefined) of
|
||||
Claim when Claim /= undefined ->
|
||||
decode_claim(Claim);
|
||||
undefined ->
|
||||
undefined
|
||||
end.
|
||||
|
||||
-spec decode_claim(claim()) ->
|
||||
{ok, encoded_context_fragment()} | {error, {unsupported, claim()} | {malformed, binary()}}.
|
||||
decode_claim(#{
|
||||
?CLAIM_CTX_TYPE := ?CLAIM_CTX_TYPE_V1_THRIFT_BINARY,
|
||||
?CLAIM_CTX_CONTEXT := Content
|
||||
}) ->
|
||||
try
|
||||
{ok, #bctx_ContextFragment{
|
||||
type = v1_thrift_binary,
|
||||
content = base64:decode(Content)
|
||||
}}
|
||||
catch
|
||||
% NOTE
|
||||
% The `base64:decode/1` fails in unpredictable ways.
|
||||
error:_ ->
|
||||
{error, {malformed, Content}}
|
||||
end;
|
||||
decode_claim(Ctx) ->
|
||||
{error, {unsupported, Ctx}}.
|
||||
|
||||
%%
|
||||
|
||||
encode_context_fragment(ContextFragment) ->
|
||||
#bctx_ContextFragment{
|
||||
type = v1_thrift_binary,
|
||||
content = encode_context_fragment_content(ContextFragment)
|
||||
}.
|
||||
|
||||
encode_context_fragment_content(ContextFragment) ->
|
||||
Type = {struct, struct, {bouncer_context_v1_thrift, 'ContextFragment'}},
|
||||
Codec = thrift_strict_binary_codec:new(),
|
||||
case thrift_strict_binary_codec:write(Codec, Type, ContextFragment) of
|
||||
{ok, Codec1} ->
|
||||
thrift_strict_binary_codec:close(Codec1)
|
||||
end.
|
138
src/tk_handler.erl
Normal file
138
src/tk_handler.erl
Normal file
@ -0,0 +1,138 @@
|
||||
-module(tk_handler).
|
||||
|
||||
-include_lib("token_keeper_proto/include/tk_context_thrift.hrl").
|
||||
-include_lib("token_keeper_proto/include/tk_token_keeper_thrift.hrl").
|
||||
|
||||
%% Woody handler
|
||||
|
||||
-behaviour(woody_server_thrift_handler).
|
||||
-export([handle_function/4]).
|
||||
|
||||
%% Internal types
|
||||
|
||||
-type opts() :: #{
|
||||
pulse => tk_pulse:handlers()
|
||||
}.
|
||||
|
||||
-record(state, {
|
||||
woody_context :: woody_context:ctx(),
|
||||
pulse :: tk_pulse:handlers(),
|
||||
pulse_metadata :: tk_pulse:metadata()
|
||||
}).
|
||||
|
||||
%%
|
||||
|
||||
-spec handle_function(woody:func(), woody:args(), woody_context:ctx(), opts()) -> {ok, woody:result()}.
|
||||
handle_function(Op, Args, WoodyCtx, Opts) ->
|
||||
State = make_state(WoodyCtx, Opts),
|
||||
do_handle_function(Op, Args, State).
|
||||
|
||||
do_handle_function('Create', _, _State) ->
|
||||
erlang:error(not_implemented);
|
||||
do_handle_function('CreateEphemeral', _, _State) ->
|
||||
erlang:error(not_implemented);
|
||||
do_handle_function('AddExistingToken', _, _State) ->
|
||||
erlang:error(not_implemented);
|
||||
do_handle_function('GetByToken' = Op, {Token, TokenSourceContext}, State) ->
|
||||
_ = handle_beat(Op, started, State),
|
||||
case tk_token_jwt:verify(Token) of
|
||||
{ok, TokenInfo} ->
|
||||
TokenSourceContextDecoded = decode_source_context(TokenSourceContext),
|
||||
State1 = save_pulse_metadata(#{token => TokenInfo, source => TokenSourceContextDecoded}, State),
|
||||
case extract_auth_data(TokenInfo, TokenSourceContextDecoded) of
|
||||
{ok, AuthDataPrototype} ->
|
||||
EncodedAuthData = encode_auth_data(AuthDataPrototype#{token => Token}),
|
||||
_ = handle_beat(Op, succeeded, State1),
|
||||
{ok, EncodedAuthData};
|
||||
{error, Reason} ->
|
||||
_ = handle_beat(Op, {failed, {context_creaton, Reason}}, State1),
|
||||
woody_error:raise(business, #token_keeper_ContextCreationFailed{})
|
||||
end;
|
||||
{error, Reason} ->
|
||||
_ = handle_beat(Op, {failed, {token_verification, Reason}}, State),
|
||||
woody_error:raise(business, #token_keeper_InvalidToken{})
|
||||
end;
|
||||
do_handle_function('Get', _, _State) ->
|
||||
erlang:error(not_implemented);
|
||||
do_handle_function('Revoke', _, _State) ->
|
||||
erlang:error(not_implemented).
|
||||
|
||||
%% Internal functions
|
||||
|
||||
make_state(WoodyCtx, Opts) ->
|
||||
#state{
|
||||
woody_context = WoodyCtx,
|
||||
pulse = maps:get(pulse, Opts, []),
|
||||
pulse_metadata = #{woody_ctx => WoodyCtx}
|
||||
}.
|
||||
|
||||
extract_auth_data(TokenInfo, TokenSourceContext) ->
|
||||
TokenType = determine_token_type(TokenSourceContext),
|
||||
case tk_bouncer_context:extract_context_fragment(TokenInfo, TokenType) of
|
||||
ContextFragment when ContextFragment =/= undefined ->
|
||||
AuthDataPrototype = genlib_map:compact(#{
|
||||
context => ContextFragment,
|
||||
metadata => extract_token_metadata(TokenType, TokenInfo),
|
||||
authority => get_authority(TokenInfo)
|
||||
}),
|
||||
{ok, AuthDataPrototype};
|
||||
undefined ->
|
||||
{error, unable_to_infer_auth_data}
|
||||
end.
|
||||
|
||||
determine_token_type(#{request_origin := Origin}) ->
|
||||
UserTokenOrigins = application:get_env(token_keeper, user_session_token_origins, []),
|
||||
case lists:member(Origin, UserTokenOrigins) of
|
||||
true ->
|
||||
user_session_token;
|
||||
false ->
|
||||
api_key_token
|
||||
end;
|
||||
determine_token_type(#{}) ->
|
||||
api_key_token.
|
||||
|
||||
get_authority(TokenInfo) ->
|
||||
Metadata = tk_token_jwt:get_metadata(TokenInfo),
|
||||
maps:get(authority, Metadata).
|
||||
|
||||
extract_token_metadata(api_key_token, TokenInfo) ->
|
||||
case tk_token_jwt:get_subject_id(TokenInfo) of
|
||||
PartyID when PartyID =/= undefined ->
|
||||
wrap_metadata(#{<<"party_id">> => PartyID}, TokenInfo);
|
||||
_ ->
|
||||
undefined
|
||||
end;
|
||||
extract_token_metadata(user_session_token, _TokenInfo) ->
|
||||
undefined.
|
||||
|
||||
wrap_metadata(Metadata, TokenInfo) ->
|
||||
TokenMetadata = tk_token_jwt:get_metadata(TokenInfo),
|
||||
MetadataNS = maps:get(metadata_ns, TokenMetadata),
|
||||
#{MetadataNS => Metadata}.
|
||||
|
||||
encode_auth_data(AuthData) ->
|
||||
#token_keeper_AuthData{
|
||||
id = maps:get(id, AuthData, undefined),
|
||||
token = maps:get(token, AuthData),
|
||||
%% Assume active?
|
||||
status = maps:get(status, AuthData, active),
|
||||
context = maps:get(context, AuthData),
|
||||
metadata = maps:get(metadata, AuthData, #{}),
|
||||
authority = maps:get(authority, AuthData)
|
||||
}.
|
||||
|
||||
decode_source_context(TokenSourceContext) ->
|
||||
genlib_map:compact(#{
|
||||
request_origin => TokenSourceContext#token_keeper_TokenSourceContext.request_origin
|
||||
}).
|
||||
|
||||
%%
|
||||
|
||||
handle_beat(Op, Event, State) ->
|
||||
tk_pulse:handle_beat({encode_pulse_op(Op), Event}, State#state.pulse_metadata, State#state.pulse).
|
||||
|
||||
save_pulse_metadata(Metadata, State = #state{pulse_metadata = PulseMetadata}) ->
|
||||
State#state{pulse_metadata = maps:merge(Metadata, PulseMetadata)}.
|
||||
|
||||
encode_pulse_op('GetByToken') ->
|
||||
get_by_token.
|
43
src/tk_pulse.erl
Normal file
43
src/tk_pulse.erl
Normal file
@ -0,0 +1,43 @@
|
||||
-module(tk_pulse).
|
||||
|
||||
-type beat() ::
|
||||
{get_by_token,
|
||||
started
|
||||
| succeeded
|
||||
| {failed, _Reason}}.
|
||||
|
||||
-type metadata() :: #{
|
||||
token => tk_token_jwt:t(),
|
||||
source => token_keeper:token_source(),
|
||||
woody_ctx => woody_context:ctx()
|
||||
}.
|
||||
|
||||
-export_type([beat/0]).
|
||||
-export_type([metadata/0]).
|
||||
|
||||
%%
|
||||
|
||||
-type handler() :: {module(), _Opts}.
|
||||
-type handler(St) :: {module(), St}.
|
||||
-type handlers() :: [handler()].
|
||||
-type handlers(St) :: [handler(St)].
|
||||
-export_type([handler/0]).
|
||||
-export_type([handler/1]).
|
||||
-export_type([handlers/0]).
|
||||
-export_type([handlers/1]).
|
||||
|
||||
-callback handle_beat(beat(), metadata(), _Opts) -> ok.
|
||||
|
||||
-export([handle_beat/3]).
|
||||
|
||||
-spec handle_beat(beat(), metadata(), handlers()) -> ok.
|
||||
handle_beat(Beat, Metadata, [{Mod, Opts} | Rest]) ->
|
||||
% NOTE
|
||||
% Generally, we don't want some fault to propagate from event handler to the business logic
|
||||
% and affect it, causing failure. Hovewer here we deem it required because we actually need
|
||||
% this kind of behaviour when doing audit logging, as inability to append to the audit log
|
||||
% should cause whole operation to fail.
|
||||
_ = Mod:handle_beat(Beat, Metadata, Opts),
|
||||
handle_beat(Beat, Metadata, Rest);
|
||||
handle_beat(_Beat, _Metadata, []) ->
|
||||
ok.
|
430
src/tk_token_jwt.erl
Normal file
430
src/tk_token_jwt.erl
Normal file
@ -0,0 +1,430 @@
|
||||
-module(tk_token_jwt).
|
||||
|
||||
-include_lib("jose/include/jose_jwk.hrl").
|
||||
-include_lib("jose/include/jose_jwt.hrl").
|
||||
|
||||
%% API
|
||||
|
||||
-export([issue/3]).
|
||||
-export([verify/1]).
|
||||
|
||||
-export([get_token_id/1]).
|
||||
-export([get_subject_id/1]).
|
||||
-export([get_subject_email/1]).
|
||||
-export([get_expires_at/1]).
|
||||
-export([get_claims/1]).
|
||||
-export([get_claim/2]).
|
||||
-export([get_claim/3]).
|
||||
-export([get_metadata/1]).
|
||||
|
||||
-export([create_claims/2]).
|
||||
-export([set_subject_email/2]).
|
||||
|
||||
%% Supervisor callbacks
|
||||
|
||||
-export([init/1]).
|
||||
-export([child_spec/1]).
|
||||
|
||||
%% API types
|
||||
|
||||
-type t() :: {token_id(), claims(), metadata()}.
|
||||
-type claim() :: expiration() | term().
|
||||
-type claims() :: #{binary() => claim()}.
|
||||
-type token() :: binary().
|
||||
-type expiration() :: unlimited | non_neg_integer().
|
||||
-type options() :: #{
|
||||
%% The set of keys used to sign issued tokens and verify signatures on such
|
||||
%% tokens.
|
||||
keyset => keyset()
|
||||
}.
|
||||
|
||||
%@TODO Separate classification parameters from jwt decoder logic
|
||||
-type auth_method() ::
|
||||
user_session_token | api_key_token | detect.
|
||||
-type metadata() :: #{
|
||||
authority := binary(),
|
||||
metadata_ns => binary(),
|
||||
auth_method => auth_method(),
|
||||
user_realm => realm()
|
||||
}.
|
||||
|
||||
-export_type([t/0]).
|
||||
-export_type([claim/0]).
|
||||
-export_type([claims/0]).
|
||||
-export_type([token/0]).
|
||||
-export_type([expiration/0]).
|
||||
-export_type([metadata/0]).
|
||||
-export_type([auth_method/0]).
|
||||
-export_type([options/0]).
|
||||
|
||||
%% Internal types
|
||||
|
||||
-type keyname() :: term().
|
||||
-type kid() :: binary().
|
||||
-type key() :: #jose_jwk{}.
|
||||
|
||||
-type subject_id() :: binary().
|
||||
-type token_id() :: binary().
|
||||
|
||||
-type keyset() :: #{
|
||||
keyname() => key_opts()
|
||||
}.
|
||||
|
||||
-type key_opts() :: #{
|
||||
source := keysource(),
|
||||
metadata => metadata()
|
||||
}.
|
||||
|
||||
-type keysource() ::
|
||||
{pem_file, file:filename()}.
|
||||
|
||||
-type realm() :: binary().
|
||||
|
||||
%%
|
||||
|
||||
-define(CLAIM_TOKEN_ID, <<"jti">>).
|
||||
-define(CLAIM_SUBJECT_ID, <<"sub">>).
|
||||
-define(CLAIM_SUBJECT_EMAIL, <<"email">>).
|
||||
-define(CLAIM_EXPIRES_AT, <<"exp">>).
|
||||
|
||||
%%
|
||||
%% API functions
|
||||
%%
|
||||
|
||||
-spec get_token_id(t()) -> token_id().
|
||||
get_token_id({TokenId, _Claims, _Metadata}) ->
|
||||
TokenId.
|
||||
|
||||
-spec get_subject_id(t()) -> subject_id() | undefined.
|
||||
get_subject_id(T) ->
|
||||
get_claim(?CLAIM_SUBJECT_ID, T, undefined).
|
||||
|
||||
-spec get_subject_email(t()) -> binary() | undefined.
|
||||
get_subject_email(T) ->
|
||||
get_claim(?CLAIM_SUBJECT_EMAIL, T, undefined).
|
||||
|
||||
-spec get_expires_at(t()) -> expiration().
|
||||
get_expires_at({_TokenId, Claims, _Metadata}) ->
|
||||
case maps:get(?CLAIM_EXPIRES_AT, Claims) of
|
||||
0 -> unlimited;
|
||||
V -> V
|
||||
end.
|
||||
|
||||
-spec get_claims(t()) -> claims().
|
||||
get_claims({_TokenId, Claims, _Metadata}) ->
|
||||
Claims.
|
||||
|
||||
-spec get_claim(binary(), t()) -> claim().
|
||||
get_claim(ClaimName, {_TokenId, Claims, _Metadata}) ->
|
||||
maps:get(ClaimName, Claims).
|
||||
|
||||
-spec get_claim(binary(), t(), claim()) -> claim().
|
||||
get_claim(ClaimName, {_TokenId, Claims, _Metadata}, Default) ->
|
||||
maps:get(ClaimName, Claims, Default).
|
||||
|
||||
-spec get_metadata(t()) -> metadata().
|
||||
get_metadata({_TokenId, _Claims, Metadata}) ->
|
||||
Metadata.
|
||||
|
||||
-spec create_claims(claims(), expiration()) -> claims().
|
||||
create_claims(Claims, Expiration) ->
|
||||
Claims#{?CLAIM_EXPIRES_AT => Expiration}.
|
||||
|
||||
-spec set_subject_email(binary(), claims()) -> claims().
|
||||
set_subject_email(SubjectEmail, Claims) ->
|
||||
false = maps:is_key(?CLAIM_SUBJECT_EMAIL, Claims),
|
||||
Claims#{?CLAIM_SUBJECT_EMAIL => SubjectEmail}.
|
||||
|
||||
%%
|
||||
|
||||
-spec issue(token_id(), claims(), keyname()) ->
|
||||
{ok, token()}
|
||||
| {error, nonexistent_key}
|
||||
| {error, {invalid_signee, Reason :: atom()}}.
|
||||
issue(JTI, Claims, Signer) ->
|
||||
case try_get_key_for_sign(Signer) of
|
||||
{ok, Key} ->
|
||||
FinalClaims = construct_final_claims(Claims, JTI),
|
||||
sign(Key, FinalClaims);
|
||||
{error, Error} ->
|
||||
{error, Error}
|
||||
end.
|
||||
|
||||
try_get_key_for_sign(Keyname) ->
|
||||
case get_key_by_name(Keyname) of
|
||||
#{can_sign := true} = Key ->
|
||||
{ok, Key};
|
||||
#{} ->
|
||||
{error, {invalid_signee, signing_not_allowed}};
|
||||
undefined ->
|
||||
{error, nonexistent_key}
|
||||
end.
|
||||
|
||||
construct_final_claims(Claims, JTI) ->
|
||||
Token0 = #{?CLAIM_TOKEN_ID => JTI},
|
||||
EncodedClaims = maps:map(fun encode_claim/2, Claims),
|
||||
maps:merge(EncodedClaims, Token0).
|
||||
|
||||
encode_claim(?CLAIM_EXPIRES_AT, Expiration) ->
|
||||
mk_expires_at(Expiration);
|
||||
encode_claim(_, Value) ->
|
||||
Value.
|
||||
|
||||
mk_expires_at(unlimited) ->
|
||||
0;
|
||||
mk_expires_at(Dl) ->
|
||||
Dl.
|
||||
|
||||
sign(#{kid := KID, jwk := JWK, signer := #{} = JWS}, Claims) ->
|
||||
JWT = jose_jwt:sign(JWK, JWS#{<<"kid">> => KID}, Claims),
|
||||
{_Modules, Token} = jose_jws:compact(JWT),
|
||||
{ok, Token}.
|
||||
|
||||
%%
|
||||
|
||||
-spec verify(token()) ->
|
||||
{ok, t()}
|
||||
| {error,
|
||||
{invalid_token,
|
||||
badarg
|
||||
| {badarg, term()}
|
||||
| {missing, atom()}}
|
||||
| {nonexistent_key, kid()}
|
||||
| {invalid_operation, term()}
|
||||
| invalid_signature}.
|
||||
|
||||
verify(Token) ->
|
||||
try
|
||||
{_, ExpandedToken} = jose_jws:expand(Token),
|
||||
#{<<"protected">> := ProtectedHeader} = ExpandedToken,
|
||||
Header = base64url_to_map(ProtectedHeader),
|
||||
Alg = get_alg(Header),
|
||||
KID = get_kid(Header),
|
||||
verify(KID, Alg, ExpandedToken)
|
||||
catch
|
||||
%% from get_alg and get_kid
|
||||
throw:Reason ->
|
||||
{error, Reason};
|
||||
%% TODO we're losing error information here, e.g. stacktrace
|
||||
error:Reason ->
|
||||
{error, {invalid_token, Reason}}
|
||||
end.
|
||||
|
||||
base64url_to_map(Base64) when is_binary(Base64) ->
|
||||
{ok, Json} = jose_base64url:decode(Base64),
|
||||
jsx:decode(Json, [return_maps]).
|
||||
|
||||
verify(KID, Alg, ExpandedToken) ->
|
||||
case get_key_by_kid(KID) of
|
||||
#{jwk := JWK, verifier := Algs, metadata := Metadata} ->
|
||||
_ = lists:member(Alg, Algs) orelse throw({invalid_operation, Alg}),
|
||||
verify_with_key(JWK, ExpandedToken, Metadata);
|
||||
undefined ->
|
||||
{error, {nonexistent_key, KID}}
|
||||
end.
|
||||
|
||||
verify_with_key(JWK, ExpandedToken, Metadata) ->
|
||||
case jose_jwt:verify(JWK, ExpandedToken) of
|
||||
{true, #jose_jwt{fields = Claims}, _JWS} ->
|
||||
_ = validate_claims(Claims),
|
||||
get_result(Claims, Metadata);
|
||||
{false, _JWT, _JWS} ->
|
||||
{error, invalid_signature}
|
||||
end.
|
||||
|
||||
validate_claims(Claims) ->
|
||||
validate_claims(Claims, get_validators()).
|
||||
|
||||
validate_claims(Claims, [{Name, Claim, Validator} | Rest]) ->
|
||||
_ = Validator(Name, maps:get(Claim, Claims, undefined)),
|
||||
validate_claims(Claims, Rest);
|
||||
validate_claims(Claims, []) ->
|
||||
Claims.
|
||||
|
||||
get_result(#{?CLAIM_TOKEN_ID := TokenID} = Claims, Metadata) ->
|
||||
{ok, {TokenID, maps:without([?CLAIM_TOKEN_ID], Claims), Metadata}}.
|
||||
|
||||
get_kid(#{<<"kid">> := KID}) when is_binary(KID) ->
|
||||
KID;
|
||||
get_kid(#{}) ->
|
||||
throw({invalid_token, {missing, kid}}).
|
||||
|
||||
get_alg(#{<<"alg">> := Alg}) when is_binary(Alg) ->
|
||||
Alg;
|
||||
get_alg(#{}) ->
|
||||
throw({invalid_token, {missing, alg}}).
|
||||
|
||||
get_validators() ->
|
||||
[
|
||||
{token_id, ?CLAIM_TOKEN_ID, fun check_presence/2},
|
||||
{expires_at, ?CLAIM_EXPIRES_AT, fun check_presence/2}
|
||||
].
|
||||
|
||||
check_presence(_, V) when is_binary(V) ->
|
||||
V;
|
||||
check_presence(_, V) when is_integer(V) ->
|
||||
V;
|
||||
check_presence(C, undefined) ->
|
||||
throw({invalid_token, {missing, C}}).
|
||||
|
||||
%%
|
||||
%% Supervisor callbacks
|
||||
%%
|
||||
|
||||
-spec child_spec(options()) -> supervisor:child_spec() | no_return().
|
||||
child_spec(Options) ->
|
||||
#{
|
||||
id => ?MODULE,
|
||||
start => {supervisor, start_link, [?MODULE, parse_options(Options)]},
|
||||
type => supervisor
|
||||
}.
|
||||
|
||||
parse_options(Options) ->
|
||||
Keyset = maps:get(keyset, Options, #{}),
|
||||
_ = is_map(Keyset) orelse exit({invalid_option, keyset, Keyset}),
|
||||
_ = genlib_map:foreach(
|
||||
fun(KeyName, KeyOpts = #{source := Source}) ->
|
||||
Metadata = maps:get(metadata, KeyOpts),
|
||||
Authority = maps:get(authority, Metadata),
|
||||
AuthMethod = maps:get(auth_method, Metadata, undefined),
|
||||
UserRealm = maps:get(user_realm, Metadata, <<>>),
|
||||
MetadataNS = maps:get(metadata_ns, Metadata, <<>>),
|
||||
_ =
|
||||
is_keysource(Source) orelse
|
||||
exit({invalid_source, KeyName, Source}),
|
||||
_ =
|
||||
is_auth_method(AuthMethod) orelse
|
||||
exit({invalid_auth_method, KeyName, AuthMethod}),
|
||||
_ =
|
||||
is_binary(UserRealm) orelse
|
||||
exit({invalid_user_realm, KeyName, AuthMethod}),
|
||||
_ =
|
||||
is_binary(Authority) orelse
|
||||
exit({invalid_authority, KeyName, AuthMethod}),
|
||||
_ =
|
||||
is_binary(MetadataNS) orelse
|
||||
exit({invalid_metadata_ns, KeyName, MetadataNS})
|
||||
end,
|
||||
Keyset
|
||||
),
|
||||
Keyset.
|
||||
|
||||
is_keysource({pem_file, Fn}) ->
|
||||
is_list(Fn) orelse is_binary(Fn);
|
||||
is_keysource(_) ->
|
||||
false.
|
||||
|
||||
is_auth_method(user_session_token) ->
|
||||
true;
|
||||
is_auth_method(api_key_token) ->
|
||||
true;
|
||||
is_auth_method(detect) ->
|
||||
true;
|
||||
is_auth_method(undefined) ->
|
||||
true;
|
||||
is_auth_method(_) ->
|
||||
false.
|
||||
|
||||
%%
|
||||
|
||||
-spec init(keyset()) -> {ok, {supervisor:sup_flags(), [supervisor:child_spec()]}}.
|
||||
init(Keyset) ->
|
||||
ok = create_table(),
|
||||
_ = maps:map(fun ensure_store_key/2, Keyset),
|
||||
{ok, {#{}, []}}.
|
||||
|
||||
ensure_store_key(KeyName, KeyOpts) ->
|
||||
Source = maps:get(source, KeyOpts),
|
||||
Metadata = maps:get(metadata, KeyOpts, #{}),
|
||||
case store_key(KeyName, Source, Metadata) of
|
||||
ok ->
|
||||
ok;
|
||||
{error, Reason} ->
|
||||
exit({import_error, KeyName, Source, Reason})
|
||||
end.
|
||||
|
||||
-spec store_key(keyname(), {pem_file, file:filename()}, metadata()) -> ok | {error, file:posix() | {unknown_key, _}}.
|
||||
store_key(Keyname, {pem_file, Filename}, Metadata) ->
|
||||
store_key(Keyname, {pem_file, Filename}, Metadata, #{
|
||||
kid => fun derive_kid_from_public_key_pem_entry/1
|
||||
}).
|
||||
|
||||
derive_kid_from_public_key_pem_entry(JWK) ->
|
||||
JWKPublic = jose_jwk:to_public(JWK),
|
||||
{_Module, PublicKey} = JWKPublic#jose_jwk.kty,
|
||||
{_PemEntry, Data, _} = public_key:pem_entry_encode('SubjectPublicKeyInfo', PublicKey),
|
||||
jose_base64url:encode(crypto:hash(sha256, Data)).
|
||||
|
||||
-type store_opts() :: #{
|
||||
kid => fun((key()) -> kid())
|
||||
}.
|
||||
|
||||
-spec store_key(keyname(), {pem_file, file:filename()}, metadata(), store_opts()) ->
|
||||
ok | {error, file:posix() | {unknown_key, _}}.
|
||||
store_key(Keyname, {pem_file, Filename}, Metadata, Opts) ->
|
||||
case jose_jwk:from_pem_file(Filename) of
|
||||
JWK = #jose_jwk{} ->
|
||||
Key = construct_key(derive_kid(JWK, Opts), JWK),
|
||||
ok = insert_key(Keyname, Key#{metadata => Metadata});
|
||||
Error = {error, _} ->
|
||||
Error
|
||||
end.
|
||||
|
||||
derive_kid(JWK, #{kid := DeriveFun}) when is_function(DeriveFun, 1) ->
|
||||
DeriveFun(JWK).
|
||||
|
||||
construct_key(KID, JWK) ->
|
||||
Signer =
|
||||
try
|
||||
jose_jwk:signer(JWK)
|
||||
catch
|
||||
error:_ -> undefined
|
||||
end,
|
||||
Verifier =
|
||||
try
|
||||
jose_jwk:verifier(JWK)
|
||||
catch
|
||||
error:_ -> undefined
|
||||
end,
|
||||
#{
|
||||
jwk => JWK,
|
||||
kid => KID,
|
||||
signer => Signer,
|
||||
can_sign => Signer /= undefined,
|
||||
verifier => Verifier,
|
||||
can_verify => Verifier /= undefined
|
||||
}.
|
||||
|
||||
insert_key(Keyname, KeyInfo = #{kid := KID}) ->
|
||||
insert_values(#{
|
||||
{keyname, Keyname} => KeyInfo,
|
||||
{kid, KID} => KeyInfo
|
||||
}).
|
||||
|
||||
%%
|
||||
%% Internal functions
|
||||
%%
|
||||
|
||||
get_key_by_name(Keyname) ->
|
||||
lookup_value({keyname, Keyname}).
|
||||
|
||||
get_key_by_kid(KID) ->
|
||||
lookup_value({kid, KID}).
|
||||
|
||||
-define(TABLE, ?MODULE).
|
||||
|
||||
create_table() ->
|
||||
_ = ets:new(?TABLE, [set, public, named_table, {read_concurrency, true}]),
|
||||
ok.
|
||||
|
||||
insert_values(Values) ->
|
||||
true = ets:insert(?TABLE, maps:to_list(Values)),
|
||||
ok.
|
||||
|
||||
lookup_value(Key) ->
|
||||
case ets:lookup(?TABLE, Key) of
|
||||
[{Key, Value}] ->
|
||||
Value;
|
||||
[] ->
|
||||
undefined
|
||||
end.
|
21
src/token_keeper.app.src
Normal file
21
src/token_keeper.app.src
Normal file
@ -0,0 +1,21 @@
|
||||
{application, token_keeper, [
|
||||
{description, "Keeps tokens"},
|
||||
{vsn, "0.1.0"},
|
||||
{registered, []},
|
||||
{mod, {token_keeper, []}},
|
||||
{applications, [
|
||||
kernel,
|
||||
stdlib,
|
||||
genlib,
|
||||
woody,
|
||||
jose,
|
||||
token_keeper_proto,
|
||||
bouncer_client,
|
||||
how_are_you,
|
||||
erl_health
|
||||
]},
|
||||
{env, []},
|
||||
{modules, []},
|
||||
{licenses, ["Apache 2.0"]},
|
||||
{links, []}
|
||||
]}.
|
149
src/token_keeper.erl
Normal file
149
src/token_keeper.erl
Normal file
@ -0,0 +1,149 @@
|
||||
-module(token_keeper).
|
||||
|
||||
%% Application callbacks
|
||||
-behaviour(application).
|
||||
|
||||
-export([start/2]).
|
||||
-export([prep_stop/1]).
|
||||
-export([stop/1]).
|
||||
|
||||
%% Supervisor callbacks
|
||||
-behaviour(supervisor).
|
||||
|
||||
-export([start_link/0]).
|
||||
-export([init/1]).
|
||||
|
||||
%% API Types
|
||||
|
||||
-type token() :: binary().
|
||||
-type token_type() :: api_key_token | user_session_token.
|
||||
-type token_source() :: #{
|
||||
request_origin => binary()
|
||||
}.
|
||||
|
||||
-export_type([token/0]).
|
||||
-export_type([token_type/0]).
|
||||
-export_type([token_source/0]).
|
||||
|
||||
%%
|
||||
|
||||
-define(SERVER, ?MODULE).
|
||||
|
||||
%%
|
||||
%% Application callbacks
|
||||
%%
|
||||
|
||||
-spec start(normal, any()) -> {ok, pid()} | {error, any()}.
|
||||
start(_StartType, _StartArgs) ->
|
||||
token_keeper:start_link().
|
||||
|
||||
-spec prep_stop(State) -> State.
|
||||
prep_stop(State) ->
|
||||
% NOTE
|
||||
% We have to do it in this magic `prep_stop/1` here because for some inexplicable reason the
|
||||
% usual `stop/1` callback doesn't get called in common_test runs.
|
||||
ok = tk_audit_log:stop(genlib_app:env(?MODULE, audit, #{})),
|
||||
State.
|
||||
|
||||
-spec stop(any()) -> ok.
|
||||
stop(_State) ->
|
||||
ok.
|
||||
|
||||
%%
|
||||
%% Supervisor callbacks
|
||||
%%
|
||||
|
||||
-spec start_link() -> genlib_gen:start_ret().
|
||||
start_link() ->
|
||||
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
|
||||
|
||||
-spec init(Args :: term()) -> genlib_gen:supervisor_ret().
|
||||
init([]) ->
|
||||
AuditPulse = tk_audit_log:init(genlib_app:env(?MODULE, audit, #{})),
|
||||
ServiceOpts = genlib_app:env(?MODULE, services, #{}),
|
||||
EventHandlers = genlib_app:env(?MODULE, woody_event_handlers, [woody_event_handler_default]),
|
||||
Healthcheck = enable_health_logging(genlib_app:env(?MODULE, health_check, #{})),
|
||||
HandlerChildSpec = woody_server:child_spec(
|
||||
?MODULE,
|
||||
#{
|
||||
ip => get_ip_address(),
|
||||
port => get_port(),
|
||||
protocol_opts => get_protocol_opts(),
|
||||
transport_opts => get_transport_opts(),
|
||||
shutdown_timeout => get_shutdown_timeout(),
|
||||
event_handler => EventHandlers,
|
||||
handlers => get_handler_specs(ServiceOpts, AuditPulse),
|
||||
additional_routes => [erl_health_handle:get_route(Healthcheck)]
|
||||
}
|
||||
),
|
||||
TokensOpts = genlib_app:env(?MODULE, tokens, #{}),
|
||||
TokensChildSpecs = get_tokens_specs(TokensOpts),
|
||||
{ok,
|
||||
{
|
||||
#{strategy => one_for_all, intensity => 6, period => 30},
|
||||
[HandlerChildSpec | TokensChildSpecs]
|
||||
}}.
|
||||
|
||||
-spec get_ip_address() -> inet:ip_address().
|
||||
|
||||
get_ip_address() ->
|
||||
{ok, Address} = inet:parse_address(genlib_app:env(?MODULE, ip, "::")),
|
||||
Address.
|
||||
|
||||
-spec get_port() -> inet:port_number().
|
||||
|
||||
get_port() ->
|
||||
genlib_app:env(?MODULE, port, 8022).
|
||||
|
||||
-spec get_protocol_opts() -> woody_server_thrift_http_handler:protocol_opts().
|
||||
|
||||
get_protocol_opts() ->
|
||||
genlib_app:env(?MODULE, protocol_opts, #{}).
|
||||
|
||||
-spec get_transport_opts() -> woody_server_thrift_http_handler:transport_opts().
|
||||
|
||||
get_transport_opts() ->
|
||||
genlib_app:env(?MODULE, transport_opts, #{}).
|
||||
|
||||
-spec get_shutdown_timeout() -> timeout().
|
||||
|
||||
get_shutdown_timeout() ->
|
||||
genlib_app:env(?MODULE, shutdown_timeout, 0).
|
||||
|
||||
-spec get_handler_specs(map(), tk_pulse:handlers()) -> [woody:http_handler(woody:th_handler())].
|
||||
|
||||
get_handler_specs(ServiceOpts, AuditPulse) ->
|
||||
TokenKeeperService = maps:get(token_keeper, ServiceOpts, #{}),
|
||||
TokenKeeperPulse = maps:get(pulse, TokenKeeperService, []),
|
||||
TokenKeeperOpts = #{pulse => AuditPulse ++ TokenKeeperPulse},
|
||||
[
|
||||
{
|
||||
maps:get(path, TokenKeeperService, <<"/v1/token-keeper">>),
|
||||
{{tk_token_keeper_thrift, 'TokenKeeper'}, {tk_handler, TokenKeeperOpts}}
|
||||
}
|
||||
].
|
||||
|
||||
%%
|
||||
|
||||
-spec enable_health_logging(erl_health:check()) -> erl_health:check().
|
||||
|
||||
enable_health_logging(Check) ->
|
||||
EvHandler = {erl_health_event_handler, []},
|
||||
maps:map(
|
||||
fun(_, Runner) -> #{runner => Runner, event_handler => EvHandler} end,
|
||||
Check
|
||||
).
|
||||
|
||||
%%
|
||||
|
||||
get_tokens_specs(TokensOpts) ->
|
||||
maps:fold(
|
||||
fun(K, V, Acc) ->
|
||||
[get_token_spec(K, V) | Acc]
|
||||
end,
|
||||
[],
|
||||
TokensOpts
|
||||
).
|
||||
|
||||
get_token_spec(jwt, JWTOptions) ->
|
||||
tk_token_jwt:child_spec(JWTOptions).
|
366
test/tk_tests_SUITE.erl
Normal file
366
test/tk_tests_SUITE.erl
Normal file
@ -0,0 +1,366 @@
|
||||
-module(tk_tests_SUITE).
|
||||
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("stdlib/include/assert.hrl").
|
||||
-include_lib("jose/include/jose_jwk.hrl").
|
||||
|
||||
-include_lib("token_keeper_proto/include/tk_token_keeper_thrift.hrl").
|
||||
-include_lib("token_keeper_proto/include/tk_context_thrift.hrl").
|
||||
|
||||
-include_lib("bouncer_proto/include/bouncer_context_v1_thrift.hrl").
|
||||
|
||||
-export([all/0]).
|
||||
-export([groups/0]).
|
||||
-export([init_per_suite/1]).
|
||||
-export([end_per_suite/1]).
|
||||
-export([init_per_group/2]).
|
||||
-export([end_per_group/2]).
|
||||
-export([init_per_testcase/2]).
|
||||
-export([end_per_testcase/2]).
|
||||
|
||||
-export([detect_api_key_test/1]).
|
||||
-export([detect_user_session_token_test/1]).
|
||||
-export([detect_dummy_token_test/1]).
|
||||
-export([no_token_metadata_test/1]).
|
||||
-export([bouncer_context_from_claims_test/1]).
|
||||
|
||||
-type config() :: ct_helper:config().
|
||||
-type group_name() :: atom().
|
||||
-type test_case_name() :: atom().
|
||||
|
||||
-define(CONFIG(Key, C), (element(2, lists:keyfind(Key, 1, C)))).
|
||||
|
||||
-define(TK_AUTHORITY_TOKEN_KEEPER, <<"com.rbkmoney.token-keeper">>).
|
||||
-define(TK_AUTHORITY_KEYCLOAK, <<"com.rbkmoney.keycloak">>).
|
||||
|
||||
-define(PARTY_METADATA(Authority, SubjectID), #{Authority := #{<<"party_id">> := SubjectID}}).
|
||||
|
||||
-define(TOKEN_SOURCE_CONTEXT(), ?TOKEN_SOURCE_CONTEXT(<<"http://spanish.inquisition">>)).
|
||||
-define(TOKEN_SOURCE_CONTEXT(SourceURL), #token_keeper_TokenSourceContext{request_origin = SourceURL}).
|
||||
|
||||
-define(USER_TOKEN_SOURCE, <<"https://dashboard.rbk.money">>).
|
||||
|
||||
-define(CTX_ENTITY(ID), #bctx_v1_Entity{id = ID}).
|
||||
|
||||
%%
|
||||
|
||||
-spec all() -> [atom()].
|
||||
|
||||
all() ->
|
||||
[
|
||||
{group, detect_token_type},
|
||||
{group, no_token_metadata}
|
||||
].
|
||||
|
||||
-spec groups() -> [{group_name(), list(), [test_case_name()]}].
|
||||
groups() ->
|
||||
[
|
||||
{detect_token_type, [parallel], [
|
||||
detect_api_key_test,
|
||||
detect_user_session_token_test,
|
||||
detect_dummy_token_test
|
||||
]},
|
||||
{no_token_metadata, [parallel], [
|
||||
no_token_metadata_test,
|
||||
bouncer_context_from_claims_test
|
||||
]}
|
||||
].
|
||||
|
||||
-spec init_per_suite(config()) -> config().
|
||||
|
||||
init_per_suite(C) ->
|
||||
Apps =
|
||||
genlib_app:start_application(woody) ++
|
||||
genlib_app:start_application_with(scoper, [
|
||||
{storage, scoper_storage_logger}
|
||||
]),
|
||||
[{suite_apps, Apps} | C].
|
||||
|
||||
-spec end_per_suite(config()) -> ok.
|
||||
end_per_suite(C) ->
|
||||
genlib_app:stop_unload_applications(?CONFIG(suite_apps, C)).
|
||||
|
||||
-spec init_per_group(group_name(), config()) -> config().
|
||||
init_per_group(detect_token_type = Name, C) ->
|
||||
start_keeper([
|
||||
{tokens, #{
|
||||
jwt => #{
|
||||
keyset => #{
|
||||
test => #{
|
||||
source => {pem_file, get_keysource("keys/local/private.pem", C)},
|
||||
metadata => #{
|
||||
authority => ?TK_AUTHORITY_KEYCLOAK,
|
||||
metadata_ns => ?TK_AUTHORITY_TOKEN_KEEPER,
|
||||
auth_method => detect,
|
||||
user_realm => <<"external">>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}},
|
||||
{user_session_token_origins, [?USER_TOKEN_SOURCE]}
|
||||
]) ++
|
||||
[{groupname, Name} | C];
|
||||
init_per_group(no_token_metadata = Name, C) ->
|
||||
start_keeper([
|
||||
{tokens, #{
|
||||
jwt => #{
|
||||
keyset => #{
|
||||
test => #{
|
||||
source => {pem_file, get_keysource("keys/local/private.pem", C)},
|
||||
metadata => #{
|
||||
authority => ?TK_AUTHORITY_KEYCLOAK,
|
||||
metadata_ns => ?TK_AUTHORITY_TOKEN_KEEPER
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}}
|
||||
]) ++
|
||||
[{groupname, Name} | C];
|
||||
init_per_group(Name, C) ->
|
||||
[{groupname, Name} | C].
|
||||
|
||||
-spec end_per_group(group_name(), config()) -> _.
|
||||
end_per_group(GroupName, C) when
|
||||
GroupName =:= detect_token_type;
|
||||
GroupName =:= no_token_metadata
|
||||
->
|
||||
ok = stop_keeper(C),
|
||||
ok;
|
||||
end_per_group(_Name, _C) ->
|
||||
ok.
|
||||
|
||||
-spec init_per_testcase(atom(), config()) -> config().
|
||||
|
||||
init_per_testcase(Name, C) ->
|
||||
[{testcase, Name} | C].
|
||||
|
||||
-spec end_per_testcase(atom(), config()) -> config().
|
||||
|
||||
end_per_testcase(_Name, _C) ->
|
||||
ok.
|
||||
|
||||
start_keeper(Env) ->
|
||||
IP = "127.0.0.1",
|
||||
Port = 8022,
|
||||
Path = <<"/v1/token-keeper">>,
|
||||
Apps = genlib_app:start_application_with(
|
||||
token_keeper,
|
||||
[
|
||||
{ip, IP},
|
||||
{port, Port},
|
||||
{services, #{
|
||||
token_keeper => #{
|
||||
path => Path
|
||||
}
|
||||
}}
|
||||
] ++ Env
|
||||
),
|
||||
Services = #{
|
||||
token_keeper => mk_url(IP, Port, Path)
|
||||
},
|
||||
[{group_apps, Apps}, {service_urls, Services}].
|
||||
|
||||
mk_url(IP, Port, Path) ->
|
||||
iolist_to_binary(["http://", IP, ":", genlib:to_binary(Port), Path]).
|
||||
|
||||
stop_keeper(C) ->
|
||||
genlib_app:stop_unload_applications(?CONFIG(group_apps, C)).
|
||||
|
||||
%%
|
||||
|
||||
-spec detect_api_key_test(config()) -> ok.
|
||||
detect_api_key_test(C) ->
|
||||
Client = mk_client(C),
|
||||
JTI = unique_id(),
|
||||
SubjectID = <<"TEST">>,
|
||||
{ok, Token} = issue_token(JTI, #{<<"sub">> => SubjectID}, unlimited),
|
||||
AuthData = call_get_by_token(Token, ?TOKEN_SOURCE_CONTEXT(), Client),
|
||||
?assertEqual(undefined, AuthData#token_keeper_AuthData.id),
|
||||
?assertEqual(Token, AuthData#token_keeper_AuthData.token),
|
||||
?assertEqual(active, AuthData#token_keeper_AuthData.status),
|
||||
?assert(assert_context({api_key_token, JTI, SubjectID}, AuthData#token_keeper_AuthData.context)),
|
||||
?assertMatch(?PARTY_METADATA(?TK_AUTHORITY_TOKEN_KEEPER, SubjectID), AuthData#token_keeper_AuthData.metadata),
|
||||
?assertEqual(?TK_AUTHORITY_KEYCLOAK, AuthData#token_keeper_AuthData.authority).
|
||||
|
||||
-spec detect_user_session_token_test(config()) -> ok.
|
||||
detect_user_session_token_test(C) ->
|
||||
Client = mk_client(C),
|
||||
JTI = unique_id(),
|
||||
SubjectID = <<"TEST">>,
|
||||
SubjectEmail = <<"test@test.test">>,
|
||||
{ok, Token} = issue_token(JTI, #{<<"sub">> => SubjectID, <<"email">> => SubjectEmail}, unlimited),
|
||||
AuthData = call_get_by_token(Token, ?TOKEN_SOURCE_CONTEXT(?USER_TOKEN_SOURCE), Client),
|
||||
?assertEqual(undefined, AuthData#token_keeper_AuthData.id),
|
||||
?assertEqual(Token, AuthData#token_keeper_AuthData.token),
|
||||
?assertEqual(active, AuthData#token_keeper_AuthData.status),
|
||||
?assert(
|
||||
assert_context(
|
||||
{user_session_token, JTI, SubjectID, SubjectEmail, unlimited},
|
||||
AuthData#token_keeper_AuthData.context
|
||||
)
|
||||
),
|
||||
?assertMatch(#{}, AuthData#token_keeper_AuthData.metadata),
|
||||
?assertEqual(?TK_AUTHORITY_KEYCLOAK, AuthData#token_keeper_AuthData.authority).
|
||||
|
||||
-spec detect_dummy_token_test(config()) -> ok.
|
||||
detect_dummy_token_test(C) ->
|
||||
Client = mk_client(C),
|
||||
{ok, Token} = issue_dummy_token(C),
|
||||
#token_keeper_InvalidToken{} =
|
||||
(catch call_get_by_token(Token, ?TOKEN_SOURCE_CONTEXT(), Client)).
|
||||
|
||||
-spec no_token_metadata_test(config()) -> ok.
|
||||
no_token_metadata_test(C) ->
|
||||
Client = mk_client(C),
|
||||
JTI = unique_id(),
|
||||
SubjectID = <<"TEST">>,
|
||||
{ok, Token} = issue_token(JTI, #{<<"sub">> => SubjectID}, unlimited),
|
||||
#token_keeper_ContextCreationFailed{} =
|
||||
(catch call_get_by_token(Token, ?TOKEN_SOURCE_CONTEXT(), Client)).
|
||||
|
||||
-spec bouncer_context_from_claims_test(config()) -> ok.
|
||||
bouncer_context_from_claims_test(C) ->
|
||||
Client = mk_client(C),
|
||||
JTI = unique_id(),
|
||||
SubjectID = <<"TEST">>,
|
||||
{ok, Token} = issue_token_with_context(JTI, SubjectID),
|
||||
AuthData = call_get_by_token(Token, ?TOKEN_SOURCE_CONTEXT(), Client),
|
||||
?assertEqual(undefined, AuthData#token_keeper_AuthData.id),
|
||||
?assertEqual(Token, AuthData#token_keeper_AuthData.token),
|
||||
?assertEqual(active, AuthData#token_keeper_AuthData.status),
|
||||
?assert(assert_context({api_key_token, JTI, SubjectID}, AuthData#token_keeper_AuthData.context)),
|
||||
?assertMatch(?PARTY_METADATA(?TK_AUTHORITY_TOKEN_KEEPER, SubjectID), AuthData#token_keeper_AuthData.metadata),
|
||||
?assertEqual(?TK_AUTHORITY_KEYCLOAK, AuthData#token_keeper_AuthData.authority).
|
||||
|
||||
%%
|
||||
|
||||
mk_client(C) ->
|
||||
WoodyCtx = woody_context:new(genlib:to_binary(?CONFIG(testcase, C))),
|
||||
ServiceURLs = ?CONFIG(service_urls, C),
|
||||
{WoodyCtx, ServiceURLs}.
|
||||
|
||||
call_get_by_token(Token, TokenSourceContext, Client) ->
|
||||
call_token_keeper('GetByToken', {Token, TokenSourceContext}, Client).
|
||||
|
||||
call_token_keeper(Operation, Args, Client) ->
|
||||
call(token_keeper, Operation, Args, Client).
|
||||
|
||||
call(ServiceName, Fn, Args, {WoodyCtx, ServiceURLs}) ->
|
||||
Service = get_service_spec(ServiceName),
|
||||
Opts = #{
|
||||
url => maps:get(ServiceName, ServiceURLs),
|
||||
event_handler => scoper_woody_event_handler
|
||||
},
|
||||
case woody_client:call({Service, Fn, Args}, Opts, WoodyCtx) of
|
||||
{ok, Response} ->
|
||||
Response;
|
||||
{exception, Exception} ->
|
||||
throw(Exception)
|
||||
end.
|
||||
|
||||
get_service_spec(token_keeper) ->
|
||||
{tk_token_keeper_thrift, 'TokenKeeper'}.
|
||||
|
||||
%%
|
||||
|
||||
assert_context(TokenInfo, EncodedContextFragment) ->
|
||||
#bctx_v1_ContextFragment{auth = Auth, user = User} = decode_bouncer_fragment(EncodedContextFragment),
|
||||
?assert(assert_auth(TokenInfo, Auth)),
|
||||
?assert(assert_user(TokenInfo, User)),
|
||||
true.
|
||||
|
||||
assert_auth({api_key_token, JTI, SubjectID}, Auth) ->
|
||||
?assertEqual(<<"ApiKeyToken">>, Auth#bctx_v1_Auth.method),
|
||||
?assertMatch(#bctx_v1_Token{id = JTI}, Auth#bctx_v1_Auth.token),
|
||||
?assertMatch([#bctx_v1_AuthScope{party = ?CTX_ENTITY(SubjectID)}], Auth#bctx_v1_Auth.scope),
|
||||
true;
|
||||
assert_auth({user_session_token, JTI, _SubjectID, _SubjectEmail, Exp}, Auth) ->
|
||||
?assertEqual(<<"SessionToken">>, Auth#bctx_v1_Auth.method),
|
||||
?assertMatch(#bctx_v1_Token{id = JTI}, Auth#bctx_v1_Auth.token),
|
||||
?assertEqual(make_auth_expiration(Exp), Auth#bctx_v1_Auth.expiration),
|
||||
true.
|
||||
|
||||
assert_user({api_key_token, _, _}, undefined) ->
|
||||
true;
|
||||
assert_user({user_session_token, _JTI, SubjectID, SubjectEmail, _Exp}, User) ->
|
||||
?assertEqual(SubjectID, User#bctx_v1_User.id),
|
||||
?assertEqual(SubjectEmail, User#bctx_v1_User.email),
|
||||
?assertEqual(?CTX_ENTITY(<<"external">>), User#bctx_v1_User.realm),
|
||||
true.
|
||||
|
||||
%%
|
||||
|
||||
make_auth_expiration(Timestamp) when is_integer(Timestamp) ->
|
||||
genlib_rfc3339:format(Timestamp, second);
|
||||
make_auth_expiration(unlimited) ->
|
||||
undefined.
|
||||
|
||||
%%
|
||||
|
||||
issue_token(JTI, Claims0, Expiration) ->
|
||||
Claims = tk_token_jwt:create_claims(Claims0, Expiration),
|
||||
tk_token_jwt:issue(JTI, Claims, test).
|
||||
|
||||
issue_token_with_context(JTI, SubjectID) ->
|
||||
Acc0 = bouncer_context_helpers:empty(),
|
||||
Acc1 = bouncer_context_helpers:add_auth(
|
||||
#{
|
||||
method => <<"ApiKeyToken">>,
|
||||
token => #{id => JTI},
|
||||
scope => [#{party => #{id => SubjectID}}]
|
||||
},
|
||||
Acc0
|
||||
),
|
||||
FragmentContent = encode_context_fragment_content(Acc1),
|
||||
issue_token(
|
||||
JTI,
|
||||
#{
|
||||
<<"sub">> => SubjectID,
|
||||
<<"bouncer_ctx">> => #{
|
||||
<<"ty">> => <<"v1_thrift_binary">>,
|
||||
<<"ct">> => base64:encode(FragmentContent)
|
||||
}
|
||||
},
|
||||
unlimited
|
||||
).
|
||||
|
||||
issue_dummy_token(Config) ->
|
||||
Claims = #{
|
||||
<<"jti">> => unique_id(),
|
||||
<<"sub">> => <<"TEST">>,
|
||||
<<"exp">> => 0
|
||||
},
|
||||
BadPemFile = get_keysource("keys/local/dummy.pem", Config),
|
||||
BadJWK = jose_jwk:from_pem_file(BadPemFile),
|
||||
GoodPemFile = get_keysource("keys/local/private.pem", Config),
|
||||
GoodJWK = jose_jwk:from_pem_file(GoodPemFile),
|
||||
JWKPublic = jose_jwk:to_public(GoodJWK),
|
||||
{_Module, PublicKey} = JWKPublic#jose_jwk.kty,
|
||||
{_PemEntry, Data, _} = public_key:pem_entry_encode('SubjectPublicKeyInfo', PublicKey),
|
||||
KID = jose_base64url:encode(crypto:hash(sha256, Data)),
|
||||
JWT = jose_jwt:sign(BadJWK, #{<<"alg">> => <<"RS256">>, <<"kid">> => KID}, Claims),
|
||||
{_Modules, Token} = jose_jws:compact(JWT),
|
||||
{ok, Token}.
|
||||
|
||||
get_keysource(Key, Config) ->
|
||||
filename:join(?config(data_dir, Config), Key).
|
||||
|
||||
unique_id() ->
|
||||
<<ID:64>> = snowflake:new(),
|
||||
genlib_format:format_int_base(ID, 62).
|
||||
|
||||
decode_bouncer_fragment(#bctx_ContextFragment{type = v1_thrift_binary, content = Content}) ->
|
||||
Type = {struct, struct, {bouncer_context_v1_thrift, 'ContextFragment'}},
|
||||
Codec = thrift_strict_binary_codec:new(Content),
|
||||
{ok, Fragment, _} = thrift_strict_binary_codec:read(Codec, Type),
|
||||
Fragment.
|
||||
|
||||
encode_context_fragment_content(ContextFragment) ->
|
||||
Type = {struct, struct, {bouncer_context_v1_thrift, 'ContextFragment'}},
|
||||
Codec = thrift_strict_binary_codec:new(),
|
||||
case thrift_strict_binary_codec:write(Codec, Type, ContextFragment) of
|
||||
{ok, Codec1} ->
|
||||
thrift_strict_binary_codec:close(Codec1)
|
||||
end.
|
13
test/tk_tests_SUITE_data/keys/local/dummy.pem
Normal file
13
test/tk_tests_SUITE_data/keys/local/dummy.pem
Normal file
@ -0,0 +1,13 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIICXAIBAAKBgQCqGKukO1De7zhZj6+H0qtjTkVxwTCpvKe4eCZ0FPqri0cb2JZfXJ/DgYSF6vUp
|
||||
wmJG8wVQZKjeGcjDOL5UlsuusFncCzWBQ7RKNUSesmQRMSGkVb1/3j+skZ6UtW+5u09lHNsj6tQ5
|
||||
1s1SPrCBkedbNf0Tp0GbMJDyR4e9T04ZZwIDAQABAoGAFijko56+qGyN8M0RVyaRAXz++xTqHBLh
|
||||
3tx4VgMtrQ+WEgCjhoTwo23KMBAuJGSYnRmoBZM3lMfTKevIkAidPExvYCdm5dYq3XToLkkLv5L2
|
||||
pIIVOFMDG+KESnAFV7l2c+cnzRMW0+b6f8mR1CJzZuxVLL6Q02fvLi55/mbSYxECQQDeAw6fiIQX
|
||||
GukBI4eMZZt4nscy2o12KyYner3VpoeE+Np2q+Z3pvAMd/aNzQ/W9WaI+NRfcxUJrmfPwIGm63il
|
||||
AkEAxCL5HQb2bQr4ByorcMWm/hEP2MZzROV73yF41hPsRC9m66KrheO9HPTJuo3/9s5p+sqGxOlF
|
||||
L0NDt4SkosjgGwJAFklyR1uZ/wPJjj611cdBcztlPdqoxssQGnh85BzCj/u3WqBpE2vjvyyvyI5k
|
||||
X6zk7S0ljKtt2jny2+00VsBerQJBAJGC1Mg5Oydo5NwD6BiROrPxGo2bpTbu/fhrT8ebHkTz2epl
|
||||
U9VQQSQzY1oZMVX8i1m5WUTLPz2yLJIBQVdXqhMCQBGoiuSoSjafUhV7i1cEGpb88h5NBYZzWXGZ
|
||||
37sJ5QsW+sJyoNde3xH8vdXhzU7eT82D6X/scw9RZz+/6rCJ4p0=
|
||||
-----END RSA PRIVATE KEY-----
|
9
test/tk_tests_SUITE_data/keys/local/private.pem
Normal file
9
test/tk_tests_SUITE_data/keys/local/private.pem
Normal file
@ -0,0 +1,9 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIBOwIBAAJBAK9fx7qOJT7Aoseu7KKgaLagBh3wvDzg7F/ZMtGbPFikJnnvRWvF
|
||||
B5oEGbMPblvtF0/fjqfu+eqjP3Z1tUSn7TkCAwEAAQJABUY5KIgr4JZEjwLYxQ9T
|
||||
9uIbLP1Xe/E7yqoqmBk2GGhSrPY0OeRkYnUVLcP96UPQhF63iuG8VF6uZ7oAPsq+
|
||||
gQIhANZy3jSCzPjXYHRU1kRqQzpt2S+OqoEiqQ6YG1HrC/VxAiEA0Vq6JlQK2tOX
|
||||
37SS00dK0Qog4Qi8dN73GliFQNP18EkCIQC4epSA48zkfJMzQBAbRraSuxDNApPX
|
||||
BzQbo+pMrEDbYQIgY4AncQgIkLB4Qk5kah48JNYXglzQlQtTjiX8Ty9ueGECIQCM
|
||||
GD3UbQKiA0gf5plBA24I4wFVKxxa4wXbW/7SfP6XmQ==
|
||||
-----END RSA PRIVATE KEY-----
|
4
test/tk_tests_SUITE_data/keys/local/public.pem
Normal file
4
test/tk_tests_SUITE_data/keys/local/public.pem
Normal file
@ -0,0 +1,4 @@
|
||||
-----BEGIN PUBLIC KEY-----
|
||||
MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAK9fx7qOJT7Aoseu7KKgaLagBh3wvDzg
|
||||
7F/ZMtGbPFikJnnvRWvFB5oEGbMPblvtF0/fjqfu+eqjP3Z1tUSn7TkCAwEAAQ==
|
||||
-----END PUBLIC KEY-----
|
116
test/tk_token_jwt_tests_SUITE.erl
Normal file
116
test/tk_token_jwt_tests_SUITE.erl
Normal file
@ -0,0 +1,116 @@
|
||||
-module(tk_token_jwt_tests_SUITE).
|
||||
|
||||
-include_lib("stdlib/include/assert.hrl").
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("jose/include/jose_jwk.hrl").
|
||||
|
||||
-export([all/0]).
|
||||
-export([init_per_suite/1]).
|
||||
-export([end_per_suite/1]).
|
||||
|
||||
-export([
|
||||
verify_test/1,
|
||||
bad_token_test/1,
|
||||
bad_signee_test/1
|
||||
]).
|
||||
|
||||
-type test_case_name() :: atom().
|
||||
-type config() :: [{atom(), any()}].
|
||||
|
||||
-spec all() -> [test_case_name()].
|
||||
all() ->
|
||||
[
|
||||
verify_test,
|
||||
bad_token_test,
|
||||
bad_signee_test
|
||||
].
|
||||
|
||||
-spec init_per_suite(config()) -> config().
|
||||
init_per_suite(Config) ->
|
||||
Apps =
|
||||
genlib_app:start_application(woody) ++
|
||||
genlib_app:start_application_with(scoper, [
|
||||
{storage, scoper_storage_logger}
|
||||
]) ++
|
||||
genlib_app:start_application_with(
|
||||
token_keeper,
|
||||
[
|
||||
{ip, "127.0.0.1"},
|
||||
{port, 8022},
|
||||
{services, #{
|
||||
token_keeper => #{
|
||||
path => <<"/v1/token-keeper">>
|
||||
}
|
||||
}},
|
||||
{tokens, #{
|
||||
jwt => #{
|
||||
keyset => #{
|
||||
test => #{
|
||||
source => {pem_file, get_keysource("keys/local/private.pem", Config)},
|
||||
metadata => #{
|
||||
authority => <<"TEST">>,
|
||||
auth_method => user_session_token,
|
||||
user_realm => <<"external">>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}}
|
||||
]
|
||||
),
|
||||
[{apps, Apps}] ++ Config.
|
||||
|
||||
-spec end_per_suite(config()) -> _.
|
||||
end_per_suite(Config) ->
|
||||
Config.
|
||||
|
||||
%%
|
||||
|
||||
-spec verify_test(config()) -> _.
|
||||
verify_test(_) ->
|
||||
JTI = unique_id(),
|
||||
PartyID = <<"TEST">>,
|
||||
{ok, Token} = issue_token(JTI, #{<<"sub">> => PartyID, <<"TEST">> => <<"TEST">>}, unlimited),
|
||||
{ok, {JTI, #{<<"sub">> := PartyID, <<"TEST">> := <<"TEST">>}, #{}}} = tk_token_jwt:verify(Token).
|
||||
|
||||
-spec bad_token_test(config()) -> _.
|
||||
bad_token_test(Config) ->
|
||||
{ok, Token} = issue_dummy_token(Config),
|
||||
{error, invalid_signature} = tk_token_jwt:verify(Token).
|
||||
|
||||
-spec bad_signee_test(config()) -> _.
|
||||
bad_signee_test(_) ->
|
||||
Claims = tk_token_jwt:create_claims(#{}, unlimited),
|
||||
{error, nonexistent_key} =
|
||||
tk_token_jwt:issue(unique_id(), Claims, random).
|
||||
|
||||
%%
|
||||
|
||||
issue_token(JTI, Claims0, Expiration) ->
|
||||
Claims = tk_token_jwt:create_claims(Claims0, Expiration),
|
||||
tk_token_jwt:issue(JTI, Claims, test).
|
||||
|
||||
issue_dummy_token(Config) ->
|
||||
Claims = #{
|
||||
<<"jti">> => unique_id(),
|
||||
<<"sub">> => <<"TEST">>,
|
||||
<<"exp">> => 0
|
||||
},
|
||||
BadPemFile = get_keysource("keys/local/dummy.pem", Config),
|
||||
BadJWK = jose_jwk:from_pem_file(BadPemFile),
|
||||
GoodPemFile = get_keysource("keys/local/private.pem", Config),
|
||||
GoodJWK = jose_jwk:from_pem_file(GoodPemFile),
|
||||
JWKPublic = jose_jwk:to_public(GoodJWK),
|
||||
{_Module, PublicKey} = JWKPublic#jose_jwk.kty,
|
||||
{_PemEntry, Data, _} = public_key:pem_entry_encode('SubjectPublicKeyInfo', PublicKey),
|
||||
KID = jose_base64url:encode(crypto:hash(sha256, Data)),
|
||||
JWT = jose_jwt:sign(BadJWK, #{<<"alg">> => <<"RS256">>, <<"kid">> => KID}, Claims),
|
||||
{_Modules, Token} = jose_jws:compact(JWT),
|
||||
{ok, Token}.
|
||||
|
||||
get_keysource(Key, Config) ->
|
||||
filename:join(?config(data_dir, Config), Key).
|
||||
|
||||
unique_id() ->
|
||||
<<ID:64>> = snowflake:new(),
|
||||
genlib_format:format_int_base(ID, 62).
|
13
test/tk_token_jwt_tests_SUITE_data/keys/local/dummy.pem
Normal file
13
test/tk_token_jwt_tests_SUITE_data/keys/local/dummy.pem
Normal file
@ -0,0 +1,13 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIICXAIBAAKBgQCqGKukO1De7zhZj6+H0qtjTkVxwTCpvKe4eCZ0FPqri0cb2JZfXJ/DgYSF6vUp
|
||||
wmJG8wVQZKjeGcjDOL5UlsuusFncCzWBQ7RKNUSesmQRMSGkVb1/3j+skZ6UtW+5u09lHNsj6tQ5
|
||||
1s1SPrCBkedbNf0Tp0GbMJDyR4e9T04ZZwIDAQABAoGAFijko56+qGyN8M0RVyaRAXz++xTqHBLh
|
||||
3tx4VgMtrQ+WEgCjhoTwo23KMBAuJGSYnRmoBZM3lMfTKevIkAidPExvYCdm5dYq3XToLkkLv5L2
|
||||
pIIVOFMDG+KESnAFV7l2c+cnzRMW0+b6f8mR1CJzZuxVLL6Q02fvLi55/mbSYxECQQDeAw6fiIQX
|
||||
GukBI4eMZZt4nscy2o12KyYner3VpoeE+Np2q+Z3pvAMd/aNzQ/W9WaI+NRfcxUJrmfPwIGm63il
|
||||
AkEAxCL5HQb2bQr4ByorcMWm/hEP2MZzROV73yF41hPsRC9m66KrheO9HPTJuo3/9s5p+sqGxOlF
|
||||
L0NDt4SkosjgGwJAFklyR1uZ/wPJjj611cdBcztlPdqoxssQGnh85BzCj/u3WqBpE2vjvyyvyI5k
|
||||
X6zk7S0ljKtt2jny2+00VsBerQJBAJGC1Mg5Oydo5NwD6BiROrPxGo2bpTbu/fhrT8ebHkTz2epl
|
||||
U9VQQSQzY1oZMVX8i1m5WUTLPz2yLJIBQVdXqhMCQBGoiuSoSjafUhV7i1cEGpb88h5NBYZzWXGZ
|
||||
37sJ5QsW+sJyoNde3xH8vdXhzU7eT82D6X/scw9RZz+/6rCJ4p0=
|
||||
-----END RSA PRIVATE KEY-----
|
@ -0,0 +1,9 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIBOwIBAAJBAK9fx7qOJT7Aoseu7KKgaLagBh3wvDzg7F/ZMtGbPFikJnnvRWvF
|
||||
B5oEGbMPblvtF0/fjqfu+eqjP3Z1tUSn7TkCAwEAAQJABUY5KIgr4JZEjwLYxQ9T
|
||||
9uIbLP1Xe/E7yqoqmBk2GGhSrPY0OeRkYnUVLcP96UPQhF63iuG8VF6uZ7oAPsq+
|
||||
gQIhANZy3jSCzPjXYHRU1kRqQzpt2S+OqoEiqQ6YG1HrC/VxAiEA0Vq6JlQK2tOX
|
||||
37SS00dK0Qog4Qi8dN73GliFQNP18EkCIQC4epSA48zkfJMzQBAbRraSuxDNApPX
|
||||
BzQbo+pMrEDbYQIgY4AncQgIkLB4Qk5kah48JNYXglzQlQtTjiX8Ty9ueGECIQCM
|
||||
GD3UbQKiA0gf5plBA24I4wFVKxxa4wXbW/7SfP6XmQ==
|
||||
-----END RSA PRIVATE KEY-----
|
4
test/tk_token_jwt_tests_SUITE_data/keys/local/public.pem
Normal file
4
test/tk_token_jwt_tests_SUITE_data/keys/local/public.pem
Normal file
@ -0,0 +1,4 @@
|
||||
-----BEGIN PUBLIC KEY-----
|
||||
MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAK9fx7qOJT7Aoseu7KKgaLagBh3wvDzg
|
||||
7F/ZMtGbPFikJnnvRWvFB5oEGbMPblvtF0/fjqfu+eqjP3Z1tUSn7TkCAwEAAQ==
|
||||
-----END PUBLIC KEY-----
|
Loading…
Reference in New Issue
Block a user