mirror of
https://github.com/valitydev/token-keeper.git
synced 2024-11-06 02:15:21 +00:00
TD48: Update keeper (#5)
This commit is contained in:
parent
ab71c75552
commit
1f526eb70e
8
.editorconfig
Normal file
8
.editorconfig
Normal file
@ -0,0 +1,8 @@
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
indent_size = 4
|
||||
indent_style = space
|
||||
trim_trailing_whitespace = true
|
||||
max_line_length = 120
|
7
.env
Normal file
7
.env
Normal file
@ -0,0 +1,7 @@
|
||||
# NOTE
|
||||
# You SHOULD specify point releases here so that build time and run time Erlang/OTPs
|
||||
# are the same. See: https://github.com/erlware/relx/pull/902
|
||||
SERVICE_NAME=token-keeper
|
||||
OTP_VERSION=24.2.0
|
||||
REBAR_VERSION=3.18
|
||||
THRIFT_VERSION=0.14.2.2
|
11
.github/codecov.yml
vendored
Normal file
11
.github/codecov.yml
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
# Don't allow total coverage to drop
|
||||
target: auto
|
||||
threshold: 0%
|
||||
patch:
|
||||
default:
|
||||
# Force new code to be at least 80% covered
|
||||
target: 80%
|
38
.github/workflows/build-image.yaml
vendored
38
.github/workflows/build-image.yaml
vendored
@ -1,38 +0,0 @@
|
||||
name: Build Docker image
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
branches: ["*"]
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Construct tags / labels for an image
|
||||
id: meta
|
||||
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY }}/${{ github.repository }}
|
||||
tags: |
|
||||
type=sha
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
|
||||
with:
|
||||
push: ${{ github.event_name == 'push' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
53
.github/workflows/build-image.yml
vendored
Normal file
53
.github/workflows/build-image.yml
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
name: Build Docker image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'epic/**'
|
||||
pull_request:
|
||||
branches: [ '**' ]
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Setup Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
# https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#setting-an-environment-variable
|
||||
- name: Update environment variables
|
||||
run: grep -v '^#' .env >> $GITHUB_ENV
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@v1.12.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Construct tags / labels for an image
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3.6.2
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY }}/${{ github.repository }}
|
||||
tags: |
|
||||
type=sha
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v2.9.0
|
||||
with:
|
||||
push: ${{ github.event_name == 'push' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
SERVICE_NAME=${{ env.SERVICE_NAME }}
|
||||
OTP_VERSION=${{ env.OTP_VERSION }}
|
||||
THRIFT_VERSION=${{ env.THRIFT_VERSION }}
|
39
.github/workflows/erlang-checks.yml
vendored
Normal file
39
.github/workflows/erlang-checks.yml
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
name: Erlang CI Checks
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'epic/**'
|
||||
pull_request:
|
||||
branches: [ '**' ]
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
name: Load .env
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
otp-version: ${{ steps.otp-version.outputs.version }}
|
||||
rebar-version: ${{ steps.rebar-version.outputs.version }}
|
||||
thrift-version: ${{ steps.thrift-version.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
- run: grep -v '^#' .env >> $GITHUB_ENV
|
||||
- id: otp-version
|
||||
run: echo "::set-output name=version::$OTP_VERSION"
|
||||
- id: rebar-version
|
||||
run: echo "::set-output name=version::$REBAR_VERSION"
|
||||
- id: thrift-version
|
||||
run: echo "::set-output name=version::$THRIFT_VERSION"
|
||||
|
||||
run:
|
||||
name: Run checks
|
||||
needs: setup
|
||||
uses: valitydev/erlang-workflows/.github/workflows/erlang-parallel-build.yml@v1.0.1
|
||||
with:
|
||||
otp-version: ${{ needs.setup.outputs.otp-version }}
|
||||
rebar-version: ${{ needs.setup.outputs.rebar-version }}
|
||||
use-thrift: true
|
||||
thrift-version: ${{ needs.setup.outputs.thrift-version }}
|
||||
run-ct-with-compose: true
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -19,3 +19,6 @@ rebar3.crashdump
|
||||
*~
|
||||
*.sublime-workspace
|
||||
.DS_Store
|
||||
# make stuff
|
||||
/.image.*
|
||||
Makefile.env
|
||||
|
40
Dockerfile
40
Dockerfile
@ -1,17 +1,43 @@
|
||||
FROM ghcr.io/rbkmoney/build-erlang:785d48cbfa7e7f355300c08ba9edc6f0e78810cb AS builder
|
||||
ARG OTP_VERSION
|
||||
|
||||
# Build the release
|
||||
FROM docker.io/library/erlang:${OTP_VERSION} AS builder
|
||||
|
||||
ARG BUILDARCH
|
||||
|
||||
# Install thrift compiler
|
||||
ARG THRIFT_VERSION
|
||||
|
||||
RUN wget -q -O- "https://github.com/valitydev/thrift/releases/download/${THRIFT_VERSION}/thrift-${THRIFT_VERSION}-linux-${BUILDARCH}.tar.gz" \
|
||||
| tar -xvz -C /usr/local/bin/
|
||||
|
||||
# Copy sources
|
||||
RUN mkdir /build
|
||||
COPY . /build/
|
||||
|
||||
# Build the release
|
||||
WORKDIR /build
|
||||
RUN rebar3 compile
|
||||
RUN rebar3 as prod release
|
||||
|
||||
# Keep in sync with Erlang/OTP version in build image
|
||||
FROM erlang:24.1.3.0-slim
|
||||
ENV SERVICE=token-keeper
|
||||
# Make a runner image
|
||||
FROM docker.io/library/erlang:${OTP_VERSION}-slim
|
||||
|
||||
ARG SERVICE_NAME
|
||||
|
||||
# Set env
|
||||
ENV CHARSET=UTF-8
|
||||
ENV LANG=C.UTF-8
|
||||
COPY --from=builder /build/_build/prod/rel/${SERVICE} /opt/${SERVICE}
|
||||
WORKDIR /opt/${SERVICE}
|
||||
|
||||
# Expose SERVICE_NAME as env so CMD expands properly on start
|
||||
ENV SERVICE_NAME=${SERVICE_NAME}
|
||||
|
||||
# Set runtime
|
||||
WORKDIR /opt/${SERVICE_NAME}
|
||||
|
||||
COPY --from=builder /build/_build/prod/rel/${SERVICE_NAME} /opt/${SERVICE_NAME}
|
||||
|
||||
ENTRYPOINT []
|
||||
CMD /opt/${SERVICE}/bin/${SERVICE} foreground
|
||||
CMD /opt/${SERVICE_NAME}/bin/${SERVICE_NAME} foreground
|
||||
|
||||
EXPOSE 8022
|
18
Dockerfile.dev
Normal file
18
Dockerfile.dev
Normal file
@ -0,0 +1,18 @@
|
||||
ARG OTP_VERSION
|
||||
|
||||
FROM docker.io/library/erlang:${OTP_VERSION}
|
||||
|
||||
ARG BUILDARCH
|
||||
|
||||
# Install thrift compiler
|
||||
ARG THRIFT_VERSION
|
||||
|
||||
RUN wget -q -O- "https://github.com/valitydev/thrift/releases/download/${THRIFT_VERSION}/thrift-${THRIFT_VERSION}-linux-${BUILDARCH}.tar.gz" \
|
||||
| tar -xvz -C /usr/local/bin/
|
||||
|
||||
# Set env
|
||||
ENV CHARSET=UTF-8
|
||||
ENV LANG=C.UTF-8
|
||||
|
||||
# Set runtime
|
||||
CMD /bin/bash
|
25
LICENSE
25
LICENSE
@ -174,28 +174,3 @@
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
127
Makefile
127
Makefile
@ -1,44 +1,72 @@
|
||||
REBAR := $(shell which rebar3 2>/dev/null || which ./rebar3)
|
||||
SUBMODULES = build_utils
|
||||
SUBTARGETS = $(patsubst %,%/.git,$(SUBMODULES))
|
||||
# HINT
|
||||
# Use this file to override variables here.
|
||||
# For example, to run with podman put `DOCKER=podman` there.
|
||||
-include Makefile.env
|
||||
|
||||
UTILS_PATH := build_utils
|
||||
TEMPLATES_PATH := .
|
||||
# NOTE
|
||||
# Variables specified in `.env` file are used to pick and setup specific
|
||||
# component versions, both when building a development image and when running
|
||||
# CI workflows on GH Actions. This ensures that tasks run with `wc-` prefix
|
||||
# (like `wc-dialyze`) are reproducible between local machine and CI runners.
|
||||
DOTENV := $(shell grep -v '^\#' .env)
|
||||
|
||||
# Name of the service
|
||||
SERVICE_NAME := token-keeper
|
||||
# Service image default tag
|
||||
SERVICE_IMAGE_TAG ?= $(shell git rev-parse HEAD)
|
||||
# The tag for service image to be pushed with
|
||||
SERVICE_IMAGE_PUSH_TAG ?= $(SERVICE_IMAGE_TAG)
|
||||
# Development images
|
||||
DEV_IMAGE_TAG = $(TEST_CONTAINER_NAME)-dev
|
||||
DEV_IMAGE_ID = $(file < .image.dev)
|
||||
|
||||
# Base image for the service
|
||||
BASE_IMAGE_NAME := service-erlang
|
||||
BASE_IMAGE_TAG := 5ea1e10733d806e40761b6c8eec93fc0c9657992
|
||||
|
||||
BUILD_IMAGE_NAME := build-erlang
|
||||
BUILD_IMAGE_TAG := 785d48cbfa7e7f355300c08ba9edc6f0e78810cb
|
||||
CALL_ANYWHERE := \
|
||||
submodules \
|
||||
all compile xref lint dialyze cover release clean distclean \
|
||||
check_format format
|
||||
|
||||
CALL_W_CONTAINER := $(CALL_ANYWHERE) test
|
||||
|
||||
.PHONY: $(CALL_W_CONTAINER) all
|
||||
DOCKER ?= docker
|
||||
DOCKERCOMPOSE ?= docker-compose
|
||||
DOCKERCOMPOSE_W_ENV = DEV_IMAGE_TAG=$(DEV_IMAGE_TAG) $(DOCKERCOMPOSE)
|
||||
REBAR ?= rebar3
|
||||
TEST_CONTAINER_NAME ?= testrunner
|
||||
|
||||
all: compile
|
||||
|
||||
-include $(UTILS_PATH)/make_lib/utils_container.mk
|
||||
-include $(UTILS_PATH)/make_lib/utils_image.mk
|
||||
.PHONY: dev-image clean-dev-image wc-shell test
|
||||
|
||||
$(SUBTARGETS): %/.git: %
|
||||
git submodule update --init $<
|
||||
touch $@
|
||||
dev-image: .image.dev
|
||||
|
||||
submodules: $(SUBTARGETS)
|
||||
.image.dev: Dockerfile.dev .env
|
||||
env $(DOTENV) $(DOCKERCOMPOSE_W_ENV) build $(TEST_CONTAINER_NAME)
|
||||
$(DOCKER) image ls -q -f "reference=$(DEV_IMAGE_ID)" | head -n1 > $@
|
||||
|
||||
compile: submodules
|
||||
clean-dev-image:
|
||||
ifneq ($(DEV_IMAGE_ID),)
|
||||
$(DOCKER) image rm -f $(DEV_IMAGE_TAG)
|
||||
rm .image.dev
|
||||
endif
|
||||
|
||||
DOCKER_WC_OPTIONS := -v $(PWD):$(PWD) --workdir $(PWD)
|
||||
DOCKER_WC_EXTRA_OPTIONS ?= --rm
|
||||
DOCKER_RUN = $(DOCKER) run -t $(DOCKER_WC_OPTIONS) $(DOCKER_WC_EXTRA_OPTIONS)
|
||||
|
||||
DOCKERCOMPOSE_RUN = $(DOCKERCOMPOSE_W_ENV) run --rm $(DOCKER_WC_OPTIONS)
|
||||
|
||||
# Utility tasks
|
||||
|
||||
wc-shell: dev-image
|
||||
$(DOCKER_RUN) --interactive --tty $(DEV_IMAGE_TAG)
|
||||
|
||||
wc-%: dev-image
|
||||
$(DOCKER_RUN) $(DEV_IMAGE_TAG) make $*
|
||||
|
||||
# TODO docker compose down doesn't work yet
|
||||
wdeps-shell: dev-image
|
||||
$(DOCKERCOMPOSE_RUN) $(TEST_CONTAINER_NAME) su; \
|
||||
$(DOCKERCOMPOSE_W_ENV) down
|
||||
|
||||
wdeps-%: dev-image
|
||||
$(DOCKERCOMPOSE_RUN) -T $(TEST_CONTAINER_NAME) make $*; \
|
||||
res=$$?; \
|
||||
$(DOCKERCOMPOSE_W_ENV) down; \
|
||||
exit $$res
|
||||
|
||||
# Rebar tasks
|
||||
|
||||
rebar-shell:
|
||||
$(REBAR) shell
|
||||
|
||||
compile:
|
||||
$(REBAR) compile
|
||||
|
||||
xref:
|
||||
@ -47,29 +75,34 @@ xref:
|
||||
lint:
|
||||
$(REBAR) lint
|
||||
|
||||
check_format:
|
||||
check-format:
|
||||
$(REBAR) fmt -c
|
||||
|
||||
dialyze:
|
||||
$(REBAR) as test dialyzer
|
||||
|
||||
release:
|
||||
$(REBAR) as prod release
|
||||
|
||||
eunit:
|
||||
$(REBAR) eunit --cover
|
||||
|
||||
common-test:
|
||||
$(REBAR) ct --cover
|
||||
|
||||
cover:
|
||||
$(REBAR) covertool generate
|
||||
|
||||
format:
|
||||
$(REBAR) fmt -w
|
||||
|
||||
dialyze:
|
||||
$(REBAR) dialyzer
|
||||
|
||||
release: submodules
|
||||
$(REBAR) as prod release
|
||||
|
||||
clean:
|
||||
$(REBAR) cover -r
|
||||
$(REBAR) clean
|
||||
|
||||
distclean:
|
||||
$(REBAR) clean
|
||||
distclean: clean-build-image
|
||||
rm -rf _build
|
||||
|
||||
cover:
|
||||
$(REBAR) cover
|
||||
test: eunit common-test
|
||||
|
||||
# CALL_W_CONTAINER
|
||||
test: submodules
|
||||
$(REBAR) do eunit, ct
|
||||
cover-report:
|
||||
$(REBAR) cover
|
||||
|
54
README.md
54
README.md
@ -1,9 +1,51 @@
|
||||
token_keeper
|
||||
=====
|
||||
# Token Keeper
|
||||
|
||||
An OTP application
|
||||
![GitHub branch checks state](https://img.shields.io/github/checks-status/valitydev/token-keeper/master) ![Codecov](https://img.shields.io/codecov/c/github/valitydev/token-keeper) ![License](https://img.shields.io/github/license/valitydev/token-keeper)
|
||||
|
||||
Build
|
||||
-----
|
||||
Access token authentication and management service.
|
||||
|
||||
$ rebar3 compile
|
||||
## Building
|
||||
|
||||
To build the project, run the following command:
|
||||
|
||||
```bash
|
||||
$ make compile
|
||||
```
|
||||
|
||||
## Running
|
||||
|
||||
To enter the [Erlang shell][1] with the project running, run the following command:
|
||||
|
||||
```bash
|
||||
$ make rebar-shell
|
||||
```
|
||||
|
||||
## Development environment
|
||||
|
||||
### Run in a docker container
|
||||
|
||||
You can run any of the tasks defined in the Makefile from inside of a docker container (defined in `Dockerfile.dev`) by prefixing the task name with `wc-`. To successfully build the dev container you need `Docker BuildKit` enabled. This can be accomplished by either installing [docker-buildx](https://docs.docker.com/buildx/working-with-buildx/) locally, or exporting the `DOCKER_BUILDKIT=1` environment variable.
|
||||
|
||||
#### Example
|
||||
|
||||
* This command will run the `compile` task in a docker container:
|
||||
```bash
|
||||
$ make wc-compile
|
||||
```
|
||||
|
||||
### Run in a docker-compose environment
|
||||
|
||||
Similarly, you can run any of the tasks defined in the Makefile from inside of a docker-compose environment (defined in `docker-compose.yaml`) by prefixing the task name with `wdeps-`. To successfully build the dev container you need `Docker BuildKit` enabled (see `Run in a docker container` section). It *may* also be necessary to export a `COMPOSE_DOCKER_CLI_BUILD=1` environment variable for `docker-compose` container builds to work properly.
|
||||
|
||||
#### Example
|
||||
|
||||
* This command will run the `test` task in a docker-compose environment:
|
||||
```bash
|
||||
$ make wdeps-test
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
@TODO Please write a couple of words about what your project does and how it does it.
|
||||
|
||||
[1]: http://erlang.org/doc/man/shell.html
|
||||
|
29
compose.yaml
Normal file
29
compose.yaml
Normal file
@ -0,0 +1,29 @@
|
||||
services:
|
||||
testrunner:
|
||||
image: $DEV_IMAGE_TAG
|
||||
build:
|
||||
dockerfile: Dockerfile.dev
|
||||
context: .
|
||||
args:
|
||||
OTP_VERSION: $OTP_VERSION
|
||||
THRIFT_VERSION: $THRIFT_VERSION
|
||||
volumes:
|
||||
- .:$PWD
|
||||
hostname: $SERVICE_NAME
|
||||
depends_on:
|
||||
machinegun:
|
||||
condition: service_healthy
|
||||
working_dir: $PWD
|
||||
command: /sbin/init
|
||||
|
||||
machinegun:
|
||||
image: docker.io/rbkmoney/machinegun:c05a8c18cd4f7966d70b6ad84cac9429cdfe37ae
|
||||
command: /opt/machinegun/bin/machinegun foreground
|
||||
volumes:
|
||||
- ./var/machinegun/config.yaml:/opt/machinegun/etc/config.yaml
|
||||
- ./var/machinegun/cookie:/opt/machinegun/etc/cookie
|
||||
healthcheck:
|
||||
test: "curl http://localhost:8022/"
|
||||
interval: 5s
|
||||
timeout: 1s
|
||||
retries: 20
|
@ -79,14 +79,6 @@
|
||||
<<"com.rbkmoney.keycloak">> =>
|
||||
#{
|
||||
sources => [
|
||||
%% Fetch from claim
|
||||
{legacy_claim, #{
|
||||
%% Where to put metadata
|
||||
metadata_mappings => #{
|
||||
party_id => <<"test.rbkmoney.party.id">>,
|
||||
consumer => <<"test.rbkmoney.capi.consumer">>
|
||||
}
|
||||
}},
|
||||
%% Create a new bouncer context using token data
|
||||
{extract, #{
|
||||
%% Configuration for how to extract said context
|
||||
@ -130,6 +122,7 @@
|
||||
<<"com.rbkmoney.apikeymgmt">> =>
|
||||
#{
|
||||
sources => [
|
||||
%% Fetch from storage
|
||||
{storage, #{
|
||||
name => <<"com.rbkmoney.apikeymgmt">>
|
||||
}}
|
||||
@ -138,6 +131,7 @@
|
||||
<<"com.rbkmoney.access.capi">> =>
|
||||
#{
|
||||
sources => [
|
||||
%% Fetch from claim
|
||||
{claim, #{}}
|
||||
]
|
||||
}
|
||||
|
31
elvis.config
31
elvis.config
@ -1,20 +1,25 @@
|
||||
[
|
||||
{elvis, [
|
||||
{verbose, true},
|
||||
{config, [
|
||||
#{
|
||||
dirs => ["src"],
|
||||
dirs => ["src", "include"],
|
||||
filter => "*.erl",
|
||||
ruleset => erl_files,
|
||||
rules => [
|
||||
{elvis_text_style, line_length, #{limit => 120, skip_comments => false}},
|
||||
% Too opinionated
|
||||
{elvis_style, state_record_and_type, disable},
|
||||
{elvis_text_style, line_length, #{limit => 120}},
|
||||
{elvis_style, nesting_level, #{level => 3}},
|
||||
{elvis_style, function_naming_convention, #{regex => "^([a-z][a-z0-9]*_?)*$"}},
|
||||
{elvis_style, no_if_expression, disable},
|
||||
%% Project rules
|
||||
{elvis_style, invalid_dynamic_call, #{
|
||||
ignore => [
|
||||
% Implements parts of logger duties, including message formatting.
|
||||
tk_audit_log
|
||||
]
|
||||
}}
|
||||
}},
|
||||
% Too opionated
|
||||
{elvis_style, state_record_and_type, disable}
|
||||
]
|
||||
},
|
||||
#{
|
||||
@ -22,7 +27,11 @@
|
||||
filter => "*.erl",
|
||||
ruleset => erl_files,
|
||||
rules => [
|
||||
{elvis_text_style, line_length, #{limit => 120, skip_comments => false}},
|
||||
{elvis_text_style, line_length, #{limit => 120}},
|
||||
{elvis_style, nesting_level, #{level => 3}},
|
||||
{elvis_style, function_naming_convention, #{regex => "^([a-z][a-z0-9]*_?)*$"}},
|
||||
{elvis_style, no_if_expression, disable},
|
||||
%% Test rules
|
||||
% We want to use `ct:pal/2` and friends in test code.
|
||||
{elvis_style, no_debug_call, disable},
|
||||
% Assert macros can trigger use of ignored binding, yet we want them for better
|
||||
@ -40,13 +49,21 @@
|
||||
filter => "Makefile",
|
||||
ruleset => makefiles
|
||||
},
|
||||
#{
|
||||
dirs => ["."],
|
||||
filter => "elvis.config",
|
||||
ruleset => elvis_config
|
||||
},
|
||||
#{
|
||||
dirs => ["."],
|
||||
filter => "rebar.config",
|
||||
ruleset => rebar_config,
|
||||
rules => [
|
||||
{elvis_text_style, line_length, #{limit => 120, skip_comments => false}},
|
||||
{elvis_text_style, no_tabs},
|
||||
{elvis_text_style, no_trailing_whitespace}
|
||||
{elvis_text_style, no_trailing_whitespace},
|
||||
%% Temporarily disabled till regex pattern is available
|
||||
{elvis_project, no_deps_master_rebar, disable}
|
||||
]
|
||||
},
|
||||
#{
|
||||
|
39
rebar.config
39
rebar.config
@ -27,32 +27,27 @@
|
||||
%% Common project dependencies.
|
||||
{deps, [
|
||||
{jsx, "3.1.0"},
|
||||
{jose, "1.11.2"},
|
||||
%% NOTE
|
||||
%% Pinning to version "1.11.2" from hex here causes constant upgrading and recompilation of the entire project
|
||||
{jose, {git, "https://github.com/potatosalad/erlang-jose.git", {tag, "1.11.2"}}},
|
||||
{yamerl, "0.8.1"},
|
||||
{thrift, {git, "https://github.com/valitydev/thrift_erlang.git", {branch, "master"}}},
|
||||
{genlib, {git, "https://github.com/valitydev/genlib.git", {branch, "master"}}},
|
||||
{snowflake, {git, "https://github.com/valitydev/snowflake.git", {branch, "master"}}},
|
||||
{woody, {git, "https://github.com/valitydev/woody_erlang.git", {branch, "master"}}},
|
||||
{woody_user_identity, {git, "https://github.com/valitydev/woody_erlang_user_identity.git", {branch, "master"}}},
|
||||
{token_keeper_proto, {git, "https://github.com/valitydev/token-keeper-proto.git", {branch, "master"}}},
|
||||
{scoper, {git, "https://github.com/valitydev/scoper.git", {branch, "master"}}},
|
||||
{erl_health, {git, "https://github.com/valitydev/erlang-health.git", {branch, "master"}}},
|
||||
%% Only needed for some utility functions
|
||||
{bouncer_client, {git, "https://github.com/valitydev/bouncer_client_erlang.git", {branch, master}}},
|
||||
{machinery, {git, "https://github.com/valitydev/machinery.git", {branch, "master"}}},
|
||||
|
||||
% Production-only deps.
|
||||
% Defined here for the sake of rebar-locking.
|
||||
{recon, "2.5.2"},
|
||||
{iosetopts, {git, "https://github.com/valitydev/iosetopts.git", {ref, "edb445c"}}},
|
||||
{logger_logstash_formatter, {git, "https://github.com/valitydev/logger_logstash_formatter.git", {branch, "master"}}},
|
||||
{how_are_you, {git, "https://github.com/valitydev/how_are_you.git", {branch, "master"}}}
|
||||
{bouncer_client, {git, "https://github.com/valitydev/bouncer_client_erlang.git", {branch, "master"}}},
|
||||
{machinery, {git, "https://github.com/valitydev/machinery.git", {branch, "master"}}}
|
||||
]}.
|
||||
|
||||
%% Helpful plugins.
|
||||
{plugins, [
|
||||
{rebar3_lint, "1.0.1"},
|
||||
{erlfmt, "1.0.0"}
|
||||
{erlfmt, "1.0.0"},
|
||||
{covertool, "2.0.4"}
|
||||
]}.
|
||||
|
||||
%% Linter config.
|
||||
@ -85,6 +80,13 @@
|
||||
|
||||
{profiles, [
|
||||
{prod, [
|
||||
{deps, [
|
||||
% Introspect a node running in production
|
||||
{recon, "2.5.2"},
|
||||
{logger_logstash_formatter,
|
||||
{git, "https://github.com/valitydev/logger_logstash_formatter.git", {ref, "2c7b716"}}},
|
||||
{iosetopts, {git, "https://github.com/valitydev/iosetopts.git", {ref, "edb445c"}}}
|
||||
]},
|
||||
%% Relx configuration
|
||||
{relx, [
|
||||
{release, {'token-keeper', "0.1.0"}, [
|
||||
@ -97,7 +99,6 @@
|
||||
{tools, load},
|
||||
% logger formatter
|
||||
{logger_logstash_formatter, load},
|
||||
how_are_you,
|
||||
token_keeper
|
||||
]},
|
||||
{sys_config, "./config/sys.config"},
|
||||
@ -108,7 +109,10 @@
|
||||
]},
|
||||
{test, [
|
||||
{cover_enabled, true},
|
||||
{deps, []}
|
||||
{deps, []},
|
||||
{dialyzer, [
|
||||
{plt_extra_apps, [eunit, common_test]}
|
||||
]}
|
||||
]}
|
||||
]}.
|
||||
|
||||
@ -121,3 +125,10 @@
|
||||
{print_width, 120},
|
||||
{files, ["{src,test}/*.{hrl,erl,app.src}", "rebar.config", "elvis.config", "config/sys.config"]}
|
||||
]}.
|
||||
|
||||
{covertool, [
|
||||
{coverdata_files, [
|
||||
"eunit.coverdata",
|
||||
"ct.coverdata"
|
||||
]}
|
||||
]}.
|
||||
|
34
rebar.lock
34
rebar.lock
@ -1,6 +1,5 @@
|
||||
{"1.2.0",
|
||||
[{<<"bear">>,{pkg,<<"bear">>,<<"0.9.0">>},2},
|
||||
{<<"bouncer_client">>,
|
||||
[{<<"bouncer_client">>,
|
||||
{git,"https://github.com/valitydev/bouncer_client_erlang.git",
|
||||
{ref,"535449a459b70643836c440a863b42656f2a1409"}},
|
||||
0},
|
||||
@ -20,31 +19,15 @@
|
||||
{git,"https://github.com/valitydev/erlang-health.git",
|
||||
{ref,"5958e2f35cd4d09f40685762b82b82f89b4d9333"}},
|
||||
0},
|
||||
{<<"folsom">>,
|
||||
{git,"https://github.com/folsom-project/folsom.git",
|
||||
{ref,"62fd0714e6f0b4e7833880afe371a9c882ea0fc2"}},
|
||||
1},
|
||||
{<<"genlib">>,
|
||||
{git,"https://github.com/valitydev/genlib.git",
|
||||
{ref,"82c5ff3866e3019eb347c7f1d8f1f847bed28c10"}},
|
||||
0},
|
||||
{<<"gproc">>,{pkg,<<"gproc">>,<<"0.9.0">>},1},
|
||||
{<<"hackney">>,{pkg,<<"hackney">>,<<"1.18.0">>},1},
|
||||
{<<"how_are_you">>,
|
||||
{git,"https://github.com/valitydev/how_are_you.git",
|
||||
{ref,"2fd8013420328464c2c84302af2781b86577b39f"}},
|
||||
0},
|
||||
{<<"idna">>,{pkg,<<"idna">>,<<"6.1.1">>},2},
|
||||
{<<"iosetopts">>,
|
||||
{git,"https://github.com/valitydev/iosetopts.git",
|
||||
{ref,"edb445c4a32b15a9b432dc66db5da4371ad71b69"}},
|
||||
0},
|
||||
{<<"jose">>,{pkg,<<"jose">>,<<"1.11.2">>},0},
|
||||
{<<"jose">>,{pkg,<<"jose">>,<<"1.11.1">>},0},
|
||||
{<<"jsx">>,{pkg,<<"jsx">>,<<"3.1.0">>},0},
|
||||
{<<"logger_logstash_formatter">>,
|
||||
{git,"https://github.com/valitydev/logger_logstash_formatter.git",
|
||||
{ref,"2c7b71630527a932f2a1aef4edcec66863c1367a"}},
|
||||
0},
|
||||
{<<"machinery">>,
|
||||
{git,"https://github.com/valitydev/machinery.git",
|
||||
{ref,"db7c94b9913451e9558afa19f2fe77bf48d391da"}},
|
||||
@ -61,7 +44,6 @@
|
||||
1},
|
||||
{<<"parse_trans">>,{pkg,<<"parse_trans">>,<<"3.3.1">>},2},
|
||||
{<<"ranch">>,{pkg,<<"ranch">>,<<"1.8.0">>},2},
|
||||
{<<"recon">>,{pkg,<<"recon">>,<<"2.5.2">>},0},
|
||||
{<<"scoper">>,
|
||||
{git,"https://github.com/valitydev/scoper.git",
|
||||
{ref,"7f3183df279bc8181efe58dafd9cae164f495e6f"}},
|
||||
@ -84,14 +66,9 @@
|
||||
{git,"https://github.com/valitydev/woody_erlang.git",
|
||||
{ref,"6f818c57e3b19f96260b1f968115c9bc5bcad4d2"}},
|
||||
0},
|
||||
{<<"woody_user_identity">>,
|
||||
{git,"https://github.com/valitydev/woody_erlang_user_identity.git",
|
||||
{ref,"a480762fea8d7c08f105fb39ca809482b6cb042e"}},
|
||||
0},
|
||||
{<<"yamerl">>,{pkg,<<"yamerl">>,<<"0.8.1">>},0}]}.
|
||||
[
|
||||
{pkg_hash,[
|
||||
{<<"bear">>, <<"A31CCF5361791DD5E708F4789D67E2FEF496C4F05935FC59ADC11622F834D128">>},
|
||||
{<<"cache">>, <<"B23A5FE7095445A88412A6E614C933377E0137B44FFED77C9B3FEF1A731A20B2">>},
|
||||
{<<"certifi">>, <<"D4FB0A6BB20B7C9C3643E22507E42F356AC090A1DCEA9AB99E27E0376D695EBA">>},
|
||||
{<<"cowboy">>, <<"865DD8B6607E14CF03282E10E934023A1BD8BE6F6BACF921A7E2A96D800CD452">>},
|
||||
@ -99,18 +76,16 @@
|
||||
{<<"gproc">>, <<"853CCB7805E9ADA25D227A157BA966F7B34508F386A3E7E21992B1B484230699">>},
|
||||
{<<"hackney">>, <<"C4443D960BB9FBA6D01161D01CD81173089686717D9490E5D3606644C48D121F">>},
|
||||
{<<"idna">>, <<"8A63070E9F7D0C62EB9D9FCB360A7DE382448200FBBD1B106CC96D3D8099DF8D">>},
|
||||
{<<"jose">>, <<"F4C018CCF4FDCE22C71E44D471F15F723CB3EFAB5D909AB2BA202B5BF35557B3">>},
|
||||
{<<"jose">>, <<"59DA64010C69AAD6CDE2F5B9248B896B84472E99BD18F246085B7B9FE435DCDB">>},
|
||||
{<<"jsx">>, <<"D12516BAA0BB23A59BB35DCCAF02A1BD08243FCBB9EFE24F2D9D056CCFF71268">>},
|
||||
{<<"metrics">>, <<"25F094DEA2CDA98213CECC3AEFF09E940299D950904393B2A29D191C346A8486">>},
|
||||
{<<"mimerl">>, <<"67E2D3F571088D5CFD3E550C383094B47159F3EEE8FFA08E64106CDF5E981BE3">>},
|
||||
{<<"parse_trans">>, <<"16328AB840CC09919BD10DAB29E431DA3AF9E9E7E7E6F0089DD5A2D2820011D8">>},
|
||||
{<<"ranch">>, <<"8C7A100A139FD57F17327B6413E4167AC559FBC04CA7448E9BE9057311597A1D">>},
|
||||
{<<"recon">>, <<"CBA53FA8DB83AD968C9A652E09C3ED7DDCC4DA434F27C3EAA9CA47FFB2B1FF03">>},
|
||||
{<<"ssl_verify_fun">>, <<"CF344F5692C82D2CD7554F5EC8FD961548D4FD09E7D22F5B62482E5AEAEBD4B0">>},
|
||||
{<<"unicode_util_compat">>, <<"BC84380C9AB48177092F43AC89E4DFA2C6D62B40B8BD132B1059ECC7232F9A78">>},
|
||||
{<<"yamerl">>, <<"07DA13FFA1D8E13948943789665C62CCD679DFA7B324A4A2ED3149DF17F453A4">>}]},
|
||||
{pkg_hash_ext,[
|
||||
{<<"bear">>, <<"47F71F098F2E3CD05E124A896C5EC2F155967A2B6FF6731E0D627312CCAB7E28">>},
|
||||
{<<"cache">>, <<"44516CE6FA03594D3A2AF025DD3A87BFE711000EB730219E1DDEFC816E0AA2F4">>},
|
||||
{<<"certifi">>, <<"6AC7EFC1C6F8600B08D625292D4BBF584E14847CE1B6B5C44D983D273E1097EA">>},
|
||||
{<<"cowboy">>, <<"2C729F934B4E1AA149AFF882F57C6372C15399A20D54F65C8D67BEF583021BDE">>},
|
||||
@ -118,13 +93,12 @@
|
||||
{<<"gproc">>, <<"587E8AF698CCD3504CF4BA8D90F893EDE2B0F58CABB8A916E2BF9321DE3CF10B">>},
|
||||
{<<"hackney">>, <<"9AFCDA620704D720DB8C6A3123E9848D09C87586DC1C10479C42627B905B5C5E">>},
|
||||
{<<"idna">>, <<"92376EB7894412ED19AC475E4A86F7B413C1B9FBB5BD16DCCD57934157944CEA">>},
|
||||
{<<"jose">>, <<"98143FBC48D55F3A18DABA82D34FE48959D44538E9697C08F34200FA5F0947D2">>},
|
||||
{<<"jose">>, <<"078F6C9FB3CD2F4CFAFC972C814261A7D1E8D2B3685C0A76EB87E158EFFF1AC5">>},
|
||||
{<<"jsx">>, <<"0C5CC8FDC11B53CC25CF65AC6705AD39E54ECC56D1C22E4ADB8F5A53FB9427F3">>},
|
||||
{<<"metrics">>, <<"69B09ADDDC4F74A40716AE54D140F93BEB0FB8978D8636EADED0C31B6F099F16">>},
|
||||
{<<"mimerl">>, <<"F278585650AA581986264638EBF698F8BB19DF297F66AD91B18910DFC6E19323">>},
|
||||
{<<"parse_trans">>, <<"07CD9577885F56362D414E8C4C4E6BDF10D43A8767ABB92D24CBE8B24C54888B">>},
|
||||
{<<"ranch">>, <<"49FBCFD3682FAB1F5D109351B61257676DA1A2FDBE295904176D5E521A2DDFE5">>},
|
||||
{<<"recon">>, <<"2C7523C8DEE91DFF41F6B3D63CBA2BD49EB6D2FE5BF1EEC0DF7F87EB5E230E1C">>},
|
||||
{<<"ssl_verify_fun">>, <<"BDB0D2471F453C88FF3908E7686F86F9BE327D065CC1EC16FA4540197EA04680">>},
|
||||
{<<"unicode_util_compat">>, <<"25EEE6D67DF61960CF6A794239566599B09E17E668D3700247BC498638152521">>},
|
||||
{<<"yamerl">>, <<"96CB30F9D64344FED0EF8A92E9F16F207DE6C04DFFF4F366752CA79F5BCEB23F">>}]}
|
||||
|
@ -233,18 +233,20 @@ log_allowed(Level) ->
|
||||
|
||||
%%
|
||||
|
||||
get_level({authenticate, started}, _Level) -> log_allowed(debug);
|
||||
get_level({{authenticator, authenticate}, started}, _Level) -> log_allowed(debug);
|
||||
get_level(_, Level) -> Level.
|
||||
|
||||
get_message({Op, {failed, _}}) ->
|
||||
get_message({Op, failed});
|
||||
get_message({Op, Event}) ->
|
||||
EncodedOp = iolist_to_binary(encode_op(Op)),
|
||||
EncodedOp = encode_op(Op),
|
||||
EncodedEvent = atom_to_binary(Event),
|
||||
<<EncodedOp/binary, " ", EncodedEvent/binary>>.
|
||||
|
||||
get_beat_metadata({Op, Event}) ->
|
||||
#{Op => build_event(Event)}.
|
||||
get_beat_metadata({Op, Event}) when is_atom(Op) ->
|
||||
#{Op => build_event(Event)};
|
||||
get_beat_metadata({{Op, Sub}, Event}) when is_atom(Op) ->
|
||||
#{Op => get_beat_metadata({Sub, Event})}.
|
||||
|
||||
build_event({failed, Error}) ->
|
||||
#{
|
||||
@ -255,9 +257,9 @@ build_event(Event) ->
|
||||
#{event => Event}.
|
||||
|
||||
encode_op(Op) when is_atom(Op) ->
|
||||
[atom_to_binary(Op)];
|
||||
encode_op({Namespace, Sub}) ->
|
||||
[atom_to_binary(Namespace), <<":">> | encode_op(Sub)].
|
||||
atom_to_binary(Op);
|
||||
encode_op({Namespace, Sub}) when is_atom(Namespace) ->
|
||||
iolist_to_binary([atom_to_binary(Namespace), <<".">>, encode_op(Sub)]).
|
||||
|
||||
encode_error({Class, Details}) when is_atom(Class) ->
|
||||
#{class => Class, details => genlib:format(Details)};
|
||||
|
@ -10,13 +10,12 @@
|
||||
|
||||
%% API Types
|
||||
|
||||
-type authdata_source() :: claim_source() | storage_source() | legacy_claim_source() | extractor_source().
|
||||
-type authdata_source() :: claim_source() | storage_source() | extractor_source().
|
||||
|
||||
-type opts() ::
|
||||
tk_authdata_source_claim:opts()
|
||||
| tk_authdata_source_storage:opts()
|
||||
| tk_authdata_source_context_extractor:opts()
|
||||
| tk_authdata_source_legacy_claim:opts().
|
||||
| tk_authdata_source_context_extractor:opts().
|
||||
|
||||
-export_type([authdata_source/0]).
|
||||
|
||||
@ -26,7 +25,6 @@
|
||||
|
||||
-type claim_source() :: {claim, tk_authdata_source_claim:opts()}.
|
||||
-type storage_source() :: {storage, tk_authdata_source_storage:opts()}.
|
||||
-type legacy_claim_source() :: {legacy_claim, tk_authdata_source_legacy_claim:opts()}.
|
||||
-type extractor_source() :: {extract_context, tk_authdata_source_context_extractor:opts()}.
|
||||
|
||||
%% API functions
|
||||
@ -45,7 +43,5 @@ get_source_handler(claim) ->
|
||||
tk_authdata_source_claim;
|
||||
get_source_handler(storage) ->
|
||||
tk_authdata_source_storage;
|
||||
get_source_handler(legacy_claim) ->
|
||||
tk_authdata_source_legacy_claim;
|
||||
get_source_handler(extract_context) ->
|
||||
tk_authdata_source_context_extractor.
|
||||
|
@ -1,61 +0,0 @@
|
||||
-module(tk_authdata_source_legacy_claim).
|
||||
-behaviour(tk_authdata_source).
|
||||
|
||||
%% Behaviour
|
||||
|
||||
-export([get_authdata/3]).
|
||||
|
||||
%% API types
|
||||
|
||||
-type opts() :: #{
|
||||
metadata_mappings := #{
|
||||
party_id := binary(),
|
||||
token_consumer := binary()
|
||||
}
|
||||
}.
|
||||
-export_type([opts/0]).
|
||||
|
||||
%% Internal types
|
||||
|
||||
-type authdata() :: tk_authdata:prototype().
|
||||
|
||||
%%
|
||||
|
||||
-define(CLAIM_BOUNCER_CTX, <<"bouncer_ctx">>).
|
||||
-define(CLAIM_PARTY_ID, <<"sub">>).
|
||||
-define(CLAIM_CONSUMER_TYPE, <<"cons">>).
|
||||
|
||||
%% Behaviour functions
|
||||
|
||||
-spec get_authdata(tk_token:token_data(), opts(), woody_context:ctx()) -> authdata() | undefined.
|
||||
get_authdata(#{payload := TokenPayload}, Opts, _Context) ->
|
||||
case decode_bouncer_claim(TokenPayload) of
|
||||
{ok, ContextFragment} ->
|
||||
create_authdata(ContextFragment, create_metadata(TokenPayload, Opts));
|
||||
{error, Reason} ->
|
||||
_ = logger:warning("Failed attempt to decode bouncer context from legacy claims: ~p", [Reason]),
|
||||
undefined
|
||||
end.
|
||||
|
||||
%%
|
||||
|
||||
decode_bouncer_claim(#{?CLAIM_BOUNCER_CTX := BouncerClaim}) ->
|
||||
tk_claim_utils:decode_bouncer_claim(BouncerClaim);
|
||||
decode_bouncer_claim(_Claims) ->
|
||||
{error, bouncer_claim_not_found}.
|
||||
|
||||
create_authdata(ContextFragment, Metadata) ->
|
||||
genlib_map:compact(#{
|
||||
status => active,
|
||||
context => ContextFragment,
|
||||
metadata => Metadata
|
||||
}).
|
||||
|
||||
create_metadata(TokenPayload, Opts) ->
|
||||
Metadata = #{
|
||||
%% TODO: This is a temporary hack.
|
||||
%% When some external services will stop requiring woody user identity to be present it must be removed too
|
||||
party_id => maps:get(?CLAIM_PARTY_ID, TokenPayload, undefined),
|
||||
consumer => maps:get(?CLAIM_CONSUMER_TYPE, TokenPayload, undefined)
|
||||
},
|
||||
tk_utils:remap(genlib_map:compact(Metadata), maps:get(metadata_mappings, Opts)).
|
@ -14,8 +14,7 @@
|
||||
-type method_opts() ::
|
||||
{detect_token, tk_context_extractor_detect_token:opts()}
|
||||
| {phony_api_key, tk_context_extractor_phony_api_key:opts()}
|
||||
| {user_session_token, tk_context_extractor_user_session_token:opts()}
|
||||
| {invoice_template_access_token, tk_context_extractor_invoice_tpl_token:opts()}.
|
||||
| {user_session_token, tk_context_extractor_user_session_token:opts()}.
|
||||
-type extracted_context() :: {context_fragment(), tk_authdata:metadata() | undefined}.
|
||||
|
||||
-export_type([methods/0]).
|
||||
@ -29,8 +28,7 @@
|
||||
-type opts() ::
|
||||
tk_context_extractor_detect_token:opts()
|
||||
| tk_context_extractor_phony_api_key:opts()
|
||||
| tk_context_extractor_user_session_token:opts()
|
||||
| tk_context_extractor_invoice_tpl_token:opts().
|
||||
| tk_context_extractor_user_session_token:opts().
|
||||
|
||||
%% API functions
|
||||
|
||||
@ -40,8 +38,6 @@ extract_context({detect_token, Opts}, TokenData) ->
|
||||
extract_context({phony_api_key, Opts}, TokenData) ->
|
||||
tk_context_extractor_phony_api_key:extract_context(TokenData, Opts);
|
||||
extract_context({user_session_token, Opts}, TokenData) ->
|
||||
tk_context_extractor_user_session_token:extract_context(TokenData, Opts);
|
||||
extract_context({invoice_template_access_token, Opts}, TokenData) ->
|
||||
tk_context_extractor_invoice_tpl_token:extract_context(TokenData, Opts).
|
||||
tk_context_extractor_user_session_token:extract_context(TokenData, Opts).
|
||||
|
||||
%% Internal functions
|
||||
|
@ -1,181 +0,0 @@
|
||||
-module(tk_context_extractor_invoice_tpl_token).
|
||||
|
||||
%% NOTE:
|
||||
%% This is here because of a historical decision to make InvoiceTemplateAccessToken(s) never expire,
|
||||
%% therefore a lot of them do not have a standart bouncer context claim built-in.
|
||||
%% It is advisable to get rid of this exctractor when this issue will be solved.
|
||||
|
||||
-behaviour(tk_context_extractor).
|
||||
|
||||
-export([extract_context/2]).
|
||||
|
||||
%%
|
||||
|
||||
-type opts() :: #{
|
||||
domain := binary(),
|
||||
metadata_mappings := #{
|
||||
party_id := binary()
|
||||
}
|
||||
}.
|
||||
|
||||
-export_type([opts/0]).
|
||||
|
||||
%%
|
||||
|
||||
-define(CLAIM_PARTY_ID, <<"sub">>).
|
||||
-define(CLAIM_RESOURCE_ACCESS, <<"resource_access">>).
|
||||
|
||||
%% API functions
|
||||
|
||||
-spec extract_context(tk_token:token_data(), opts()) -> tk_context_extractor:extracted_context() | undefined.
|
||||
extract_context(#{id := TokenID, payload := Payload}, Opts) ->
|
||||
PartyID = maps:get(?CLAIM_PARTY_ID, Payload),
|
||||
case extract_invoice_template_rights(Payload, Opts) of
|
||||
{ok, InvoiceTemplateID} ->
|
||||
BCtx = create_bouncer_ctx(TokenID, PartyID, InvoiceTemplateID),
|
||||
{BCtx,
|
||||
make_metadata(
|
||||
#{
|
||||
%% @TEMP: This is a temporary hack.
|
||||
%% When some external services will stop requiring woody user
|
||||
%% identity to be present it must be removed too
|
||||
party_id => PartyID
|
||||
},
|
||||
Opts
|
||||
)};
|
||||
{error, Reason} ->
|
||||
_ = logger:warning("Failed to extract invoice template rights: ~p", [Reason]),
|
||||
undefined
|
||||
end.
|
||||
|
||||
%%
|
||||
|
||||
extract_invoice_template_rights(TokenPayload, Opts) ->
|
||||
Domain = maps:get(domain, Opts),
|
||||
case get_acl(Domain, get_resource_hierarchy(), TokenPayload) of
|
||||
{ok, TokenACL} ->
|
||||
match_invoice_template_acl(TokenACL);
|
||||
{error, Reason} ->
|
||||
{error, {acl, Reason}}
|
||||
end.
|
||||
|
||||
match_invoice_template_acl(TokenACL) ->
|
||||
Patterns = [
|
||||
fun({[party, {invoice_templates, ID}], [read]}) -> ID end,
|
||||
fun({[party, {invoice_templates, ID}, invoice_template_invoices], [write]}) -> ID end
|
||||
],
|
||||
case match_acl(Patterns, TokenACL) of
|
||||
[[InvoiceTemplateID], [InvoiceTemplateID]] ->
|
||||
{ok, InvoiceTemplateID};
|
||||
Matches ->
|
||||
{error, {acl_mismatch, Matches}}
|
||||
end.
|
||||
|
||||
match_acl(Patterns, TokenACL) ->
|
||||
[match_acl_pattern(TokenACL, Pat) || Pat <- Patterns].
|
||||
|
||||
match_acl_pattern(TokenACL, Pat) ->
|
||||
lists:usort([Match || Entry <- TokenACL, Match <- run_pattern(Entry, Pat)]).
|
||||
|
||||
run_pattern(Entry, Pat) when is_function(Pat, 1) ->
|
||||
try
|
||||
[Pat(Entry)]
|
||||
catch
|
||||
error:function_clause -> []
|
||||
end.
|
||||
|
||||
get_acl(Domain, Hierarchy, TokenPayload) ->
|
||||
case maps:get(?CLAIM_RESOURCE_ACCESS, TokenPayload, undefined) of
|
||||
#{Domain := #{<<"roles">> := Roles}} ->
|
||||
try
|
||||
TokenACL = tk_legacy_acl:decode(Roles, Hierarchy),
|
||||
{ok, tk_legacy_acl:to_list(TokenACL)}
|
||||
catch
|
||||
throw:Reason -> {error, {invalid, Reason}}
|
||||
end;
|
||||
_ ->
|
||||
{error, missing}
|
||||
end.
|
||||
|
||||
create_bouncer_ctx(TokenID, PartyID, InvoiceTemplateID) ->
|
||||
bouncer_context_helpers:add_auth(
|
||||
#{
|
||||
method => <<"InvoiceTemplateAccessToken">>,
|
||||
token => #{id => TokenID},
|
||||
scope => [
|
||||
#{
|
||||
party => #{id => PartyID},
|
||||
invoice_template => #{id => InvoiceTemplateID}
|
||||
}
|
||||
]
|
||||
},
|
||||
bouncer_context_helpers:empty()
|
||||
).
|
||||
|
||||
make_metadata(Metadata, ExtractorOpts) ->
|
||||
Mappings = maps:get(metadata_mappings, ExtractorOpts),
|
||||
tk_utils:remap(genlib_map:compact(Metadata), Mappings).
|
||||
|
||||
get_resource_hierarchy() ->
|
||||
#{
|
||||
party => #{invoice_templates => #{invoice_template_invoices => #{}}}
|
||||
}.
|
||||
|
||||
-ifdef(TEST).
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
-spec test() -> _.
|
||||
|
||||
-define(TEST_ACL, [
|
||||
{some_other_stuff, 123, <<"abc">>},
|
||||
{second, <<"abc">>},
|
||||
{doubles, 123},
|
||||
more_stuff,
|
||||
{test_acl, 123},
|
||||
{doubles, 456},
|
||||
{first, 123}
|
||||
]).
|
||||
|
||||
-spec match_acl_base_test() -> _.
|
||||
|
||||
match_acl_base_test() ->
|
||||
[[123]] = match_acl(
|
||||
[
|
||||
fun({test_acl, Int}) -> Int end
|
||||
],
|
||||
?TEST_ACL
|
||||
).
|
||||
|
||||
-spec match_acl_dupes_test() -> _.
|
||||
|
||||
match_acl_dupes_test() ->
|
||||
[[123, 456]] = match_acl(
|
||||
[
|
||||
fun({doubles, Int}) -> Int end
|
||||
],
|
||||
?TEST_ACL
|
||||
).
|
||||
|
||||
-spec match_acl_order_test() -> _.
|
||||
|
||||
match_acl_order_test() ->
|
||||
[[123], [<<"abc">>]] = match_acl(
|
||||
[
|
||||
fun({first, Int}) -> Int end,
|
||||
fun({second, Bin}) -> Bin end
|
||||
],
|
||||
?TEST_ACL
|
||||
).
|
||||
|
||||
-spec match_acl_no_match_test() -> _.
|
||||
|
||||
match_acl_no_match_test() ->
|
||||
[[], []] = match_acl(
|
||||
[
|
||||
fun({foo, _}) -> wait end,
|
||||
fun({bar, _, _}) -> no end
|
||||
],
|
||||
?TEST_ACL
|
||||
).
|
||||
|
||||
-endif.
|
@ -21,25 +21,45 @@
|
||||
|
||||
-spec extract_context(tk_token:token_data(), opts()) -> tk_context_extractor:extracted_context() | undefined.
|
||||
extract_context(#{id := TokenID, payload := Payload}, Opts) ->
|
||||
PartyID = maps:get(?CLAIM_PARTY_ID, Payload),
|
||||
ContextFragment = bouncer_context_helpers:add_auth(
|
||||
case extract_party_data(Payload) of
|
||||
{ok, PartyID} ->
|
||||
create_context_and_metadata(TokenID, PartyID, Opts);
|
||||
{error, Reason} ->
|
||||
_ = logger:warning("Could not extract phony_api_key context, reason: ~p", [Reason]),
|
||||
undefined
|
||||
end.
|
||||
|
||||
%%
|
||||
|
||||
create_context_and_metadata(TokenID, PartyID, Opts) ->
|
||||
{
|
||||
create_context(TokenID, PartyID),
|
||||
wrap_metadata(
|
||||
create_metadata(PartyID),
|
||||
Opts
|
||||
)
|
||||
}.
|
||||
|
||||
extract_party_data(#{
|
||||
?CLAIM_PARTY_ID := PartyID
|
||||
}) ->
|
||||
{ok, PartyID};
|
||||
extract_party_data(_) ->
|
||||
{error, {missing, ?CLAIM_PARTY_ID}}.
|
||||
|
||||
create_context(TokenID, PartyID) ->
|
||||
bouncer_context_helpers:add_auth(
|
||||
#{
|
||||
method => <<"ApiKeyToken">>,
|
||||
token => #{id => TokenID},
|
||||
scope => [#{party => #{id => PartyID}}]
|
||||
},
|
||||
bouncer_context_helpers:empty()
|
||||
),
|
||||
{ContextFragment,
|
||||
make_metadata(
|
||||
#{
|
||||
party_id => PartyID
|
||||
},
|
||||
Opts
|
||||
)}.
|
||||
).
|
||||
|
||||
%%
|
||||
create_metadata(PartyID) ->
|
||||
#{party_id => PartyID}.
|
||||
|
||||
make_metadata(Metadata, Opts) ->
|
||||
wrap_metadata(Metadata, Opts) ->
|
||||
Mappings = maps:get(metadata_mappings, Opts),
|
||||
tk_utils:remap(genlib_map:compact(Metadata), Mappings).
|
||||
|
@ -25,43 +25,66 @@
|
||||
|
||||
-spec extract_context(tk_token:token_data(), opts()) -> tk_context_extractor:extracted_context() | undefined.
|
||||
extract_context(#{id := TokenID, expiration := Expiration, payload := Payload}, Opts) ->
|
||||
UserID = maps:get(?CLAIM_USER_ID, Payload),
|
||||
Email = maps:get(?CLAIM_USER_EMAIL, Payload),
|
||||
UserRealm = maps:get(user_realm, Opts, undefined),
|
||||
case extract_user_data(Payload) of
|
||||
{ok, {UserID, UserEmail}} ->
|
||||
create_context_and_metadata(TokenID, Expiration, UserID, UserEmail, Opts);
|
||||
{error, Reason} ->
|
||||
_ = logger:warning("Could not extract user_session_token context, reason: ~p", [Reason]),
|
||||
undefined
|
||||
end.
|
||||
|
||||
%% Internal functions
|
||||
|
||||
create_context_and_metadata(TokenID, TokenExpiration, UserID, UserEmail, Opts) ->
|
||||
UserRealm = maps:get(user_realm, Opts),
|
||||
{
|
||||
create_context(TokenID, TokenExpiration, UserID, UserEmail, UserRealm),
|
||||
wrap_metadata(
|
||||
create_metadata(UserID, UserEmail, UserRealm),
|
||||
Opts
|
||||
)
|
||||
}.
|
||||
|
||||
extract_user_data(#{
|
||||
?CLAIM_USER_ID := UserID,
|
||||
?CLAIM_USER_EMAIL := UserEmail
|
||||
}) ->
|
||||
{ok, {UserID, UserEmail}};
|
||||
extract_user_data(Payload) ->
|
||||
RequiredKeys = [?CLAIM_USER_ID, ?CLAIM_USER_EMAIL],
|
||||
{error, {missing, RequiredKeys -- maps:keys(Payload)}}.
|
||||
|
||||
create_context(TokenID, TokenExpiration, UserID, UserEmail, UserRealm) ->
|
||||
Acc0 = bouncer_context_helpers:empty(),
|
||||
Acc1 = bouncer_context_helpers:add_user(
|
||||
#{
|
||||
id => UserID,
|
||||
email => Email,
|
||||
email => UserEmail,
|
||||
realm => #{id => UserRealm}
|
||||
},
|
||||
Acc0
|
||||
),
|
||||
Acc2 = bouncer_context_helpers:add_auth(
|
||||
bouncer_context_helpers:add_auth(
|
||||
#{
|
||||
method => <<"SessionToken">>,
|
||||
expiration => make_auth_expiration(Expiration),
|
||||
expiration => make_auth_expiration(TokenExpiration),
|
||||
token => #{id => TokenID}
|
||||
},
|
||||
Acc1
|
||||
),
|
||||
{Acc2,
|
||||
make_metadata(
|
||||
#{
|
||||
user_id => UserID,
|
||||
user_email => Email,
|
||||
user_realm => UserRealm
|
||||
},
|
||||
Opts
|
||||
)}.
|
||||
|
||||
%% Internal functions
|
||||
).
|
||||
|
||||
make_auth_expiration(Timestamp) when is_integer(Timestamp) ->
|
||||
genlib_rfc3339:format(Timestamp, second);
|
||||
make_auth_expiration(Expiration) when Expiration =:= unlimited ->
|
||||
undefined.
|
||||
|
||||
make_metadata(Metadata, ExtractorOpts) ->
|
||||
create_metadata(UserID, UserEmail, UserRealm) ->
|
||||
#{
|
||||
user_id => UserID,
|
||||
user_email => UserEmail,
|
||||
user_realm => UserRealm
|
||||
}.
|
||||
|
||||
wrap_metadata(Metadata, ExtractorOpts) ->
|
||||
Mappings = maps:get(metadata_mappings, ExtractorOpts),
|
||||
tk_utils:remap(genlib_map:compact(Metadata), Mappings).
|
||||
|
@ -50,7 +50,7 @@ handle_function('Create' = Op, {ContextFragment, Metadata}, Opts, State) ->
|
||||
_ = pulse_op_stated(Op, State),
|
||||
AuthDataPrototype = create_auth_data(ContextFragment, Metadata),
|
||||
Claims = tk_claim_utils:encode_authdata(AuthDataPrototype),
|
||||
{ok, Token} = tk_token_jwt:issue(create_token_data(Claims, Opts)),
|
||||
{ok, Token} = tk_token:issue(create_token_data(Claims, Opts)),
|
||||
EncodedAuthData = encode_auth_data(AuthDataPrototype#{token => Token}),
|
||||
_ = pulse_op_succeeded(Op, State),
|
||||
{ok, EncodedAuthData}.
|
||||
|
@ -65,7 +65,7 @@ handle_function('Create' = Op, {ID, ContextFragment, Metadata}, Opts, State) ->
|
||||
AuthData = create_auth_data(ID, ContextFragment, Metadata),
|
||||
case store(AuthData, Opts, get_context(State1)) of
|
||||
ok ->
|
||||
{ok, Token} = tk_token_jwt:issue(create_token_data(ID, Opts)),
|
||||
{ok, Token} = tk_token:issue(create_token_data(ID, Opts)),
|
||||
EncodedAuthData = encode_auth_data(AuthData#{token => Token}),
|
||||
_ = pulse_op_succeeded(Op, State1),
|
||||
{ok, EncodedAuthData};
|
||||
|
@ -1,141 +0,0 @@
|
||||
-module(tk_legacy_acl).
|
||||
|
||||
%%
|
||||
|
||||
-opaque t() :: [{{priority(), scope()}, [permission()]}].
|
||||
|
||||
-type priority() :: integer().
|
||||
-type unknown_scope() :: {unknown, binary()}.
|
||||
-type known_scope() :: [resource() | {resource(), resource_id()}, ...].
|
||||
-type scope() :: known_scope() | unknown_scope().
|
||||
-type resource() :: atom().
|
||||
-type resource_id() :: binary().
|
||||
-type permission() :: read | write.
|
||||
-type resource_hierarchy() :: map().
|
||||
|
||||
-export_type([t/0]).
|
||||
-export_type([scope/0]).
|
||||
-export_type([known_scope/0]).
|
||||
-export_type([resource/0]).
|
||||
-export_type([permission/0]).
|
||||
-export_type([resource_hierarchy/0]).
|
||||
|
||||
-export([to_list/1]).
|
||||
-export([decode/2]).
|
||||
|
||||
%%
|
||||
|
||||
-spec to_list(t()) -> [{scope(), [permission()]}].
|
||||
to_list(ACL) ->
|
||||
[{S, P} || {{_, S}, P} <- ACL].
|
||||
|
||||
%%
|
||||
|
||||
-spec decode([binary()], resource_hierarchy()) -> t().
|
||||
decode(BinaryACL, ResourceHierarchy) ->
|
||||
lists:foldl(
|
||||
fun(V, ACL) ->
|
||||
decode_entry(V, ACL, ResourceHierarchy)
|
||||
end,
|
||||
[],
|
||||
BinaryACL
|
||||
).
|
||||
|
||||
decode_entry(V, ACL, ResourceHierarchy) ->
|
||||
case binary:split(V, <<":">>, [global]) of
|
||||
[V1, V2] ->
|
||||
Scope = decode_scope(V1, ResourceHierarchy),
|
||||
Permission = decode_permission(V2),
|
||||
insert_scope(Scope, Permission, ACL, ResourceHierarchy);
|
||||
_ ->
|
||||
throw({badarg, {role, V}})
|
||||
end.
|
||||
|
||||
decode_scope(V, ResourceHierarchy) ->
|
||||
try
|
||||
decode_scope_frags(binary:split(V, <<".">>, [global]), ResourceHierarchy)
|
||||
catch
|
||||
error:{badarg, _} ->
|
||||
{unknown, V}
|
||||
end.
|
||||
|
||||
decode_scope_frags([V1, V2 | Vs], H) ->
|
||||
{Resource, H1} = decode_scope_frag_resource(V1, V2, H),
|
||||
[Resource | decode_scope_frags(Vs, H1)];
|
||||
decode_scope_frags([V], H) ->
|
||||
decode_scope_frags([V, <<"*">>], H);
|
||||
decode_scope_frags([], _) ->
|
||||
[].
|
||||
|
||||
decode_scope_frag_resource(V, <<"*">>, H) ->
|
||||
R = decode_resource(V),
|
||||
{R, delve(R, H)};
|
||||
decode_scope_frag_resource(V, ID, H) ->
|
||||
R = decode_resource(V),
|
||||
{{R, ID}, delve(R, H)}.
|
||||
|
||||
decode_resource(V) ->
|
||||
try
|
||||
binary_to_existing_atom(V, utf8)
|
||||
catch
|
||||
error:badarg ->
|
||||
throw({badarg, {resource, V}})
|
||||
end.
|
||||
|
||||
decode_permission(<<"read">>) ->
|
||||
read;
|
||||
decode_permission(<<"write">>) ->
|
||||
write;
|
||||
decode_permission(V) ->
|
||||
throw({badarg, {permission, V}}).
|
||||
|
||||
%%
|
||||
|
||||
-spec insert_scope(scope(), permission(), t(), resource_hierarchy()) -> t().
|
||||
insert_scope({unknown, _} = Scope, Permission, ACL, _ResourceHierarchy) ->
|
||||
insert({{0, Scope}, [Permission]}, ACL);
|
||||
insert_scope(Scope, Permission, ACL, ResourceHierarchy) ->
|
||||
Priority = compute_priority(Scope, ResourceHierarchy),
|
||||
insert({{Priority, Scope}, [Permission]}, ACL).
|
||||
|
||||
insert({PS, _} = V, [{PS0, _} = V0 | Vs]) when PS < PS0 ->
|
||||
[V0 | insert(V, Vs)];
|
||||
insert({PS, Perms}, [{PS, Perms0} | Vs]) ->
|
||||
% NOTE squashing permissions of entries with the same scope
|
||||
[{PS, lists:usort(Perms ++ Perms0)} | Vs];
|
||||
insert({PS, _} = V, [{PS0, _} | _] = Vs) when PS > PS0 ->
|
||||
[V | Vs];
|
||||
insert(V, []) ->
|
||||
[V].
|
||||
|
||||
%%
|
||||
|
||||
compute_priority(Scope, ResourceHierarchy) ->
|
||||
% NOTE
|
||||
% Scope priority depends on the following attributes, in the order of decreasing
|
||||
% importance:
|
||||
% 1. Depth, deeper is more important
|
||||
% 2. Scope element specificity, element marked with an ID is more important
|
||||
compute_scope_priority(Scope, ResourceHierarchy).
|
||||
|
||||
compute_scope_priority(Scope, ResourceHierarchy) when length(Scope) > 0 ->
|
||||
compute_scope_priority(Scope, ResourceHierarchy, 0);
|
||||
compute_scope_priority(Scope, _ResourceHierarchy) ->
|
||||
throw({badarg, {scope, Scope}}).
|
||||
|
||||
compute_scope_priority([{Resource, _ID} | Rest], H, P) ->
|
||||
compute_scope_priority(Rest, delve(Resource, H), P * 10 + 2);
|
||||
compute_scope_priority([Resource | Rest], H, P) ->
|
||||
compute_scope_priority(Rest, delve(Resource, H), P * 10 + 1);
|
||||
compute_scope_priority([], _, P) ->
|
||||
P * 10.
|
||||
|
||||
%%
|
||||
|
||||
delve(Resource, Hierarchy) ->
|
||||
case maps:find(Resource, Hierarchy) of
|
||||
{ok, Sub} ->
|
||||
Sub;
|
||||
error ->
|
||||
throw({badarg, {resource, Resource}})
|
||||
end.
|
@ -119,6 +119,11 @@ deserialize(Type, Data) ->
|
||||
-spec marshal_unmarshal_created_test() -> _.
|
||||
-spec marshal_unmarshal_status_changed_test() -> _.
|
||||
|
||||
-define(CONTEXT, #{
|
||||
machine_ref => <<"TEST">>,
|
||||
machine_ns => test
|
||||
}).
|
||||
|
||||
marshal_unmarshal_created_test() ->
|
||||
Event =
|
||||
{created, #tk_events_AuthDataCreated{
|
||||
@ -127,8 +132,8 @@ marshal_unmarshal_created_test() ->
|
||||
context = #bctx_ContextFragment{type = v1_thrift_binary, content = <<"STUFF">>},
|
||||
metadata = #{}
|
||||
}},
|
||||
{Marshaled, _} = marshal_event(1, Event, {}),
|
||||
{Unmarshaled, _} = unmarshal_event(1, Marshaled, {}),
|
||||
{Marshaled, _} = marshal_event(1, Event, ?CONTEXT),
|
||||
{Unmarshaled, _} = unmarshal_event(1, Marshaled, ?CONTEXT),
|
||||
?assertEqual(Event, Unmarshaled).
|
||||
|
||||
marshal_unmarshal_status_changed_test() ->
|
||||
@ -136,8 +141,8 @@ marshal_unmarshal_status_changed_test() ->
|
||||
{status_changed, #tk_events_AuthDataStatusChanged{
|
||||
status = revoked
|
||||
}},
|
||||
{Marshaled, _} = marshal_event(1, Event, {}),
|
||||
{Unmarshaled, _} = unmarshal_event(1, Marshaled, {}),
|
||||
{Marshaled, _} = marshal_event(1, Event, ?CONTEXT),
|
||||
{Unmarshaled, _} = unmarshal_event(1, Marshaled, ?CONTEXT),
|
||||
?assertEqual(Event, Unmarshaled).
|
||||
|
||||
-endif.
|
||||
|
@ -27,7 +27,7 @@
|
||||
source := keysource()
|
||||
}.
|
||||
|
||||
-type authority_bindings() :: #{authority_id() => key_name()}.
|
||||
-type authority_bindings() :: #{key_name() => authority_id()}.
|
||||
-type keyset() :: #{key_name() => key_opts()}.
|
||||
|
||||
-export_type([opts/0]).
|
||||
@ -83,35 +83,34 @@ init(#{keyset := KeySet, authority_bindings := AuthorityBindings}) ->
|
||||
| {error,
|
||||
{alg_not_supported, Alg :: atom()}
|
||||
| {key_not_found, KID :: atom()}
|
||||
| {no_authority_for_keyname, KeyName :: binary()}
|
||||
| {invalid_token, Reason :: term()}
|
||||
| invalid_signature}.
|
||||
verify(Token, SourceContext) ->
|
||||
case do_verify(Token) of
|
||||
{ok, {Claims, KeyName}} ->
|
||||
{ok, construct_token_data(Claims, SourceContext, get_authority_of_key_name(KeyName))};
|
||||
case get_authority_of_key_name(KeyName) of
|
||||
AuthorityID when AuthorityID =/= undefined ->
|
||||
{ok, construct_token_data(Claims, SourceContext, AuthorityID)};
|
||||
undefined ->
|
||||
{error, {no_authority_for_keyname, KeyName}}
|
||||
end;
|
||||
{error, _} = Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
-spec issue(token_data()) ->
|
||||
{ok, token_string()}
|
||||
| {error, issuing_not_supported | key_does_not_exist | authority_does_not_exist}.
|
||||
| {error,
|
||||
issuing_not_supported
|
||||
| {key_does_not_exist, KeyName :: binary()}
|
||||
| {authority_does_not_exist, AuthorityID :: binary()}}.
|
||||
issue(#{authority_id := AuthorityID} = TokenData) ->
|
||||
case get_key_name_of_authority(AuthorityID) of
|
||||
KeyName when KeyName =/= undefined ->
|
||||
case get_key_by_name(KeyName) of
|
||||
#{} = KeyInfo ->
|
||||
case key_supports_signing(KeyInfo) of
|
||||
true ->
|
||||
{ok, issue_with_key(KeyInfo, TokenData)};
|
||||
false ->
|
||||
{error, issuing_not_supported}
|
||||
end;
|
||||
undefined ->
|
||||
{error, key_does_not_exist}
|
||||
end;
|
||||
issue_with_key(KeyName, TokenData);
|
||||
undefined ->
|
||||
{error, authority_does_not_exist}
|
||||
{error, {authority_does_not_exist, AuthorityID}}
|
||||
end.
|
||||
|
||||
%% Internal functions
|
||||
@ -259,16 +258,18 @@ decode_expiration(Expiration) when is_integer(Expiration) ->
|
||||
|
||||
%% Signing
|
||||
|
||||
key_supports_signing(#{signer := #{}}) ->
|
||||
true;
|
||||
key_supports_signing(#{signer := undefined}) ->
|
||||
false.
|
||||
|
||||
issue_with_key(#{key_id := KeyID, jwk := JWK, signer := #{} = JWS}, TokenData) ->
|
||||
Claims = construct_claims(TokenData),
|
||||
JWT = jose_jwt:sign(JWK, JWS#{<<"kid">> => KeyID}, Claims),
|
||||
{_Modules, Token} = jose_jws:compact(JWT),
|
||||
Token.
|
||||
issue_with_key(KeyName, TokenData) ->
|
||||
case get_key_by_name(KeyName) of
|
||||
#{key_id := KeyID, jwk := JWK, signer := #{} = JWS} ->
|
||||
Claims = construct_claims(TokenData),
|
||||
JWT = jose_jwt:sign(JWK, JWS#{<<"kid">> => KeyID}, Claims),
|
||||
{_Modules, Token} = jose_jws:compact(JWT),
|
||||
{ok, Token};
|
||||
#{key_id := _, jwk := _, signer := undefined} ->
|
||||
{error, {issuing_not_supported, KeyName}};
|
||||
undefined ->
|
||||
{error, {key_does_not_exist, KeyName}}
|
||||
end.
|
||||
|
||||
construct_claims(#{id := TokenID, expiration := Expiration, payload := Claims}) ->
|
||||
maps:map(fun encode_claim/2, Claims#{
|
||||
|
@ -12,7 +12,6 @@
|
||||
snowflake,
|
||||
token_keeper_proto,
|
||||
bouncer_client,
|
||||
how_are_you,
|
||||
erl_health,
|
||||
machinery,
|
||||
yamerl
|
||||
|
@ -20,13 +20,10 @@
|
||||
|
||||
-export([authenticate_invalid_token_type_fail/1]).
|
||||
-export([authenticate_invalid_token_key_fail/1]).
|
||||
-export([authenticate_no_payload_claims_fail/1]).
|
||||
-export([authenticate_user_session_token_no_payload_claims_fail/1]).
|
||||
-export([authenticate_phony_api_key_token_ok/1]).
|
||||
-export([authenticate_user_session_token_ok/1]).
|
||||
-export([authenticate_invoice_template_access_token_ok/1]).
|
||||
-export([authenticate_invoice_template_access_token_no_access/1]).
|
||||
-export([authenticate_invoice_template_access_token_invalid_access/1]).
|
||||
-export([authenticate_claim_token_no_context_fail/1]).
|
||||
-export([authenticate_legacy_claim_token_ok/1]).
|
||||
-export([authenticate_blacklisted_jti_fail/1]).
|
||||
-export([authenticate_non_blacklisted_jti_ok/1]).
|
||||
-export([authenticate_ephemeral_claim_token_ok/1]).
|
||||
@ -41,7 +38,7 @@
|
||||
-export([revoke_authdata_by_id_ok/1]).
|
||||
-export([revoke_authdata_by_id_not_found_fail/1]).
|
||||
|
||||
-type config() :: ct_helper:config().
|
||||
-type config() :: [{atom(), any()}].
|
||||
-type group_name() :: atom().
|
||||
-type test_case_name() :: atom().
|
||||
|
||||
@ -63,17 +60,19 @@
|
||||
-define(TK_AUTHORITY_APIKEYMGMT, <<"test.rbkmoney.apikeymgmt">>).
|
||||
-define(TK_AUTHORITY_CAPI, <<"test.rbkmoney.capi">>).
|
||||
|
||||
-define(TK_KEY_KEYCLOAK, <<"test.rbkmoney.key.keycloak">>).
|
||||
-define(TK_KEY_APIKEYMGMT, <<"test.rbkmoney.key.apikeymgmt">>).
|
||||
-define(TK_KEY_CAPI, <<"test.rbkmoney.key.capi">>).
|
||||
|
||||
-define(TK_RESOURCE_DOMAIN, <<"test-domain">>).
|
||||
|
||||
%%
|
||||
|
||||
-spec all() -> [atom()].
|
||||
-spec all() -> [{group, group_name()}].
|
||||
|
||||
all() ->
|
||||
[
|
||||
{group, external_detect_token},
|
||||
{group, external_invoice_template_access_token},
|
||||
{group, external_legacy_claim},
|
||||
{group, blacklist},
|
||||
{group, ephemeral},
|
||||
{group, offline}
|
||||
@ -85,26 +84,22 @@ groups() ->
|
||||
{external_detect_token, [parallel], [
|
||||
authenticate_invalid_token_type_fail,
|
||||
authenticate_invalid_token_key_fail,
|
||||
authenticate_no_payload_claims_fail,
|
||||
authenticate_user_session_token_no_payload_claims_fail,
|
||||
authenticate_phony_api_key_token_ok,
|
||||
authenticate_user_session_token_ok
|
||||
]},
|
||||
{external_invoice_template_access_token, [parallel], [
|
||||
{ephemeral, [parallel], [
|
||||
authenticate_invalid_token_type_fail,
|
||||
authenticate_invalid_token_key_fail,
|
||||
authenticate_invoice_template_access_token_ok,
|
||||
authenticate_invoice_template_access_token_no_access,
|
||||
authenticate_invoice_template_access_token_invalid_access
|
||||
]},
|
||||
{external_legacy_claim, [parallel], [
|
||||
authenticate_claim_token_no_context_fail,
|
||||
authenticate_legacy_claim_token_ok
|
||||
]},
|
||||
{ephemeral, [parallel], [
|
||||
authenticate_claim_token_no_context_fail,
|
||||
authenticate_no_payload_claims_fail,
|
||||
authenticate_ephemeral_claim_token_ok,
|
||||
issue_ephemeral_token_ok
|
||||
]},
|
||||
{offline, [parallel], [
|
||||
authenticate_invalid_token_type_fail,
|
||||
authenticate_invalid_token_key_fail,
|
||||
authenticate_no_payload_claims_fail,
|
||||
authenticate_offline_token_not_found_fail,
|
||||
authenticate_offline_token_revoked_fail,
|
||||
authenticate_offline_token_ok,
|
||||
@ -133,14 +128,14 @@ init_per_suite(C) ->
|
||||
-spec end_per_suite(config()) -> ok.
|
||||
end_per_suite(C) ->
|
||||
genlib_app:stop_unload_applications(?CONFIG(suite_apps, C)).
|
||||
|
||||
%% @TODO Pending configurator
|
||||
% @TODO Pending configurator
|
||||
-spec init_per_group(group_name(), config()) -> config().
|
||||
init_per_group(external_detect_token = Name, C) ->
|
||||
AuthenticatorPath = <<"/v2/authenticator">>,
|
||||
C0 = start_keeper([
|
||||
{authenticator, #{
|
||||
service => #{
|
||||
path => <<"/v2/authenticator">>
|
||||
path => AuthenticatorPath
|
||||
},
|
||||
authorities => #{
|
||||
?TK_AUTHORITY_KEYCLOAK =>
|
||||
@ -152,10 +147,10 @@ init_per_group(external_detect_token = Name, C) ->
|
||||
{tokens, #{
|
||||
jwt => #{
|
||||
authority_bindings => #{
|
||||
?TK_AUTHORITY_KEYCLOAK => ?TK_AUTHORITY_KEYCLOAK
|
||||
?TK_KEY_KEYCLOAK => ?TK_AUTHORITY_KEYCLOAK
|
||||
},
|
||||
keyset => #{
|
||||
?TK_AUTHORITY_KEYCLOAK => #{
|
||||
?TK_KEY_KEYCLOAK => #{
|
||||
source => {pem_file, get_filename("keys/local/public.pem", C)}
|
||||
}
|
||||
}
|
||||
@ -163,122 +158,58 @@ init_per_group(external_detect_token = Name, C) ->
|
||||
}}
|
||||
]),
|
||||
ServiceUrls = #{
|
||||
token_authenticator => mk_url(<<"/v2/authenticator">>)
|
||||
},
|
||||
[{groupname, Name}, {service_urls, ServiceUrls} | C0 ++ C];
|
||||
init_per_group(external_invoice_template_access_token = Name, C) ->
|
||||
C0 = start_keeper([
|
||||
{authenticator, #{
|
||||
service => #{
|
||||
path => <<"/v2/authenticator">>
|
||||
},
|
||||
authorities => #{
|
||||
?TK_AUTHORITY_CAPI =>
|
||||
#{
|
||||
sources => [extract_method_invoice_tpl_token()]
|
||||
}
|
||||
}
|
||||
}},
|
||||
{tokens, #{
|
||||
jwt => #{
|
||||
authority_bindings => #{
|
||||
?TK_AUTHORITY_CAPI => ?TK_AUTHORITY_CAPI
|
||||
},
|
||||
keyset => #{
|
||||
?TK_AUTHORITY_CAPI => #{
|
||||
source => {pem_file, get_filename("keys/local/public.pem", C)}
|
||||
}
|
||||
}
|
||||
}
|
||||
}}
|
||||
]),
|
||||
ServiceUrls = #{
|
||||
token_authenticator => mk_url(<<"/v2/authenticator">>)
|
||||
},
|
||||
[{groupname, Name}, {service_urls, ServiceUrls} | C0 ++ C];
|
||||
init_per_group(external_legacy_claim = Name, C) ->
|
||||
C0 = start_keeper([
|
||||
{authenticator, #{
|
||||
service => #{
|
||||
path => <<"/v2/authenticator">>
|
||||
},
|
||||
authorities => #{
|
||||
?TK_AUTHORITY_CAPI =>
|
||||
#{
|
||||
sources => [
|
||||
{legacy_claim, #{
|
||||
metadata_mappings => #{
|
||||
party_id => ?META_PARTY_ID,
|
||||
consumer => ?META_CAPI_CONSUMER
|
||||
}
|
||||
}}
|
||||
]
|
||||
}
|
||||
}
|
||||
}},
|
||||
{tokens, #{
|
||||
jwt => #{
|
||||
authority_bindings => #{
|
||||
?TK_AUTHORITY_CAPI => ?TK_AUTHORITY_CAPI
|
||||
},
|
||||
keyset => #{
|
||||
?TK_AUTHORITY_CAPI => #{
|
||||
source => {pem_file, get_filename("keys/local/public.pem", C)}
|
||||
}
|
||||
}
|
||||
}
|
||||
}}
|
||||
]),
|
||||
ServiceUrls = #{
|
||||
token_authenticator => mk_url(<<"/v2/authenticator">>)
|
||||
token_authenticator => mk_url(AuthenticatorPath)
|
||||
},
|
||||
[{groupname, Name}, {service_urls, ServiceUrls} | C0 ++ C];
|
||||
init_per_group(blacklist = Name, C) ->
|
||||
C0 = start_keeper(
|
||||
[
|
||||
{authenticator, #{
|
||||
service => #{
|
||||
path => <<"/v2/authenticator">>
|
||||
},
|
||||
authorities => #{
|
||||
<<"blacklisting_authority">> =>
|
||||
#{
|
||||
sources => [extract_method_detect_token()]
|
||||
},
|
||||
?TK_AUTHORITY_CAPI =>
|
||||
#{
|
||||
sources => [extract_method_detect_token()]
|
||||
}
|
||||
}
|
||||
}},
|
||||
{tokens, #{
|
||||
jwt => #{
|
||||
authority_bindings => #{
|
||||
<<"blacklisting_authority">> => <<"blacklisting_authority">>,
|
||||
?TK_AUTHORITY_CAPI => ?TK_AUTHORITY_CAPI
|
||||
},
|
||||
keyset => #{
|
||||
<<"blacklisting_authority">> => #{
|
||||
source => {pem_file, get_filename("keys/local/private.pem", C)}
|
||||
},
|
||||
?TK_AUTHORITY_CAPI => #{
|
||||
source => {pem_file, get_filename("keys/secondary/private.pem", C)}
|
||||
}
|
||||
}
|
||||
}
|
||||
}}
|
||||
],
|
||||
get_filename("blacklisted_keys.yaml", C)
|
||||
),
|
||||
ServiceUrls = #{
|
||||
token_authenticator => mk_url(<<"/v2/authenticator">>)
|
||||
},
|
||||
[{groupname, Name}, {service_urls, ServiceUrls} | C0 ++ C];
|
||||
init_per_group(ephemeral = Name, C) ->
|
||||
AuthenticatorPath = <<"/v2/authenticator">>,
|
||||
C0 = start_keeper([
|
||||
{authenticator, #{
|
||||
service => #{
|
||||
path => <<"/v2/authenticator">>
|
||||
path => AuthenticatorPath
|
||||
},
|
||||
authorities => #{
|
||||
<<"blacklisting_authority">> =>
|
||||
#{
|
||||
sources => [extract_method_detect_token()]
|
||||
},
|
||||
?TK_AUTHORITY_CAPI =>
|
||||
#{
|
||||
sources => [extract_method_detect_token()]
|
||||
}
|
||||
}
|
||||
}},
|
||||
{tokens, #{
|
||||
jwt => #{
|
||||
authority_bindings => #{
|
||||
<<"blacklisting_authority.key">> => <<"blacklisting_authority">>,
|
||||
?TK_KEY_CAPI => ?TK_AUTHORITY_CAPI
|
||||
},
|
||||
keyset => #{
|
||||
<<"blacklisting_authority.key">> => #{
|
||||
source => {pem_file, get_filename("keys/local/private.pem", C)}
|
||||
},
|
||||
?TK_KEY_CAPI => #{
|
||||
source => {pem_file, get_filename("keys/secondary/private.pem", C)}
|
||||
}
|
||||
}
|
||||
}
|
||||
}},
|
||||
{blacklist, #{
|
||||
path => get_filename("blacklisted_keys.yaml", C)
|
||||
}}
|
||||
]),
|
||||
ServiceUrls = #{
|
||||
token_authenticator => mk_url(AuthenticatorPath)
|
||||
},
|
||||
[{groupname, Name}, {service_urls, ServiceUrls} | C0 ++ C];
|
||||
init_per_group(ephemeral = Name, C) ->
|
||||
AuthenticatorPath = <<"/v2/authenticator">>,
|
||||
AuthorityPath = <<"/v2/authority/com.rbkmoney.access.capi">>,
|
||||
C0 = start_keeper([
|
||||
{authenticator, #{
|
||||
service => #{
|
||||
path => AuthenticatorPath
|
||||
},
|
||||
authorities => #{
|
||||
?TK_AUTHORITY_CAPI => #{
|
||||
@ -292,7 +223,7 @@ init_per_group(ephemeral = Name, C) ->
|
||||
?TK_AUTHORITY_CAPI =>
|
||||
#{
|
||||
service => #{
|
||||
path => <<"/v2/authority/com.rbkmoney.access.capi">>
|
||||
path => AuthorityPath
|
||||
},
|
||||
type =>
|
||||
{ephemeral, #{
|
||||
@ -305,10 +236,10 @@ init_per_group(ephemeral = Name, C) ->
|
||||
{tokens, #{
|
||||
jwt => #{
|
||||
authority_bindings => #{
|
||||
?TK_AUTHORITY_CAPI => ?TK_AUTHORITY_CAPI
|
||||
?TK_KEY_CAPI => ?TK_AUTHORITY_CAPI
|
||||
},
|
||||
keyset => #{
|
||||
?TK_AUTHORITY_CAPI => #{
|
||||
?TK_KEY_CAPI => #{
|
||||
source => {pem_file, get_filename("keys/local/private.pem", C)}
|
||||
}
|
||||
}
|
||||
@ -316,15 +247,17 @@ init_per_group(ephemeral = Name, C) ->
|
||||
}}
|
||||
]),
|
||||
ServiceUrls = #{
|
||||
token_authenticator => mk_url(<<"/v2/authenticator">>),
|
||||
{token_ephemeral_authority, ?TK_AUTHORITY_CAPI} => mk_url(<<"/v2/authority/com.rbkmoney.access.capi">>)
|
||||
token_authenticator => mk_url(AuthenticatorPath),
|
||||
{token_ephemeral_authority, ?TK_AUTHORITY_CAPI} => mk_url(AuthorityPath)
|
||||
},
|
||||
[{groupname, Name}, {service_urls, ServiceUrls} | C0 ++ C];
|
||||
init_per_group(offline = Name, C) ->
|
||||
AuthenticatorPath = <<"/v2/authenticator">>,
|
||||
AuthorityPath = <<"/v2/authority/com.rbkmoney.apikemgmt">>,
|
||||
C0 = start_keeper([
|
||||
{authenticator, #{
|
||||
service => #{
|
||||
path => <<"/v2/authenticator">>
|
||||
path => AuthenticatorPath
|
||||
},
|
||||
authorities => #{
|
||||
?TK_AUTHORITY_APIKEYMGMT =>
|
||||
@ -341,7 +274,7 @@ init_per_group(offline = Name, C) ->
|
||||
?TK_AUTHORITY_APIKEYMGMT =>
|
||||
#{
|
||||
service => #{
|
||||
path => <<"/v2/authority/com.rbkmoney.apikemgmt">>
|
||||
path => AuthorityPath
|
||||
},
|
||||
type =>
|
||||
{offline, #{
|
||||
@ -357,10 +290,10 @@ init_per_group(offline = Name, C) ->
|
||||
{tokens, #{
|
||||
jwt => #{
|
||||
authority_bindings => #{
|
||||
?TK_AUTHORITY_APIKEYMGMT => ?TK_AUTHORITY_APIKEYMGMT
|
||||
?TK_KEY_APIKEYMGMT => ?TK_AUTHORITY_APIKEYMGMT
|
||||
},
|
||||
keyset => #{
|
||||
?TK_AUTHORITY_APIKEYMGMT => #{
|
||||
?TK_KEY_APIKEYMGMT => #{
|
||||
source => {pem_file, get_filename("keys/local/private.pem", C)}
|
||||
}
|
||||
}
|
||||
@ -379,8 +312,8 @@ init_per_group(offline = Name, C) ->
|
||||
}}
|
||||
]),
|
||||
ServiceUrls = #{
|
||||
token_authenticator => mk_url(<<"/v2/authenticator">>),
|
||||
{token_authority, ?TK_AUTHORITY_APIKEYMGMT} => mk_url(<<"/v2/authority/com.rbkmoney.apikemgmt">>)
|
||||
token_authenticator => mk_url(AuthenticatorPath),
|
||||
{token_authority, ?TK_AUTHORITY_APIKEYMGMT} => mk_url(AuthorityPath)
|
||||
},
|
||||
[{groupname, Name}, {service_urls, ServiceUrls} | C0 ++ C].
|
||||
|
||||
@ -393,7 +326,7 @@ end_per_group(_GroupName, C) ->
|
||||
init_per_testcase(Name, C) ->
|
||||
[{testcase, Name} | C].
|
||||
|
||||
-spec end_per_testcase(atom(), config()) -> config().
|
||||
-spec end_per_testcase(atom(), config()) -> ok.
|
||||
end_per_testcase(_Name, _C) ->
|
||||
ok.
|
||||
|
||||
@ -409,6 +342,13 @@ authenticate_invalid_token_key_fail(C) ->
|
||||
Token = issue_dummy_token(C),
|
||||
?assertThrow(#token_keeper_InvalidToken{}, call_authenticate(Token, ?TOKEN_SOURCE_CONTEXT, C)).
|
||||
|
||||
-spec authenticate_no_payload_claims_fail(config()) -> _.
|
||||
authenticate_no_payload_claims_fail(C) ->
|
||||
JTI = unique_id(),
|
||||
Claims = get_base_claims(JTI),
|
||||
Token = issue_token(Claims, C),
|
||||
?assertThrow(#token_keeper_AuthDataNotFound{}, call_authenticate(Token, ?TOKEN_SOURCE_CONTEXT, C)).
|
||||
|
||||
-spec authenticate_phony_api_key_token_ok(config()) -> _.
|
||||
authenticate_phony_api_key_token_ok(C) ->
|
||||
JTI = unique_id(),
|
||||
@ -446,45 +386,15 @@ authenticate_user_session_token_ok(C) ->
|
||||
} = call_authenticate(Token, ?TOKEN_SOURCE_CONTEXT(?USER_TOKEN_SOURCE), C),
|
||||
_ = assert_context({user_session_token, JTI, SubjectID, SubjectEmail, unlimited}, Context).
|
||||
|
||||
-spec authenticate_invoice_template_access_token_ok(config()) -> _.
|
||||
authenticate_invoice_template_access_token_ok(C) ->
|
||||
-spec authenticate_user_session_token_no_payload_claims_fail(config()) -> _.
|
||||
authenticate_user_session_token_no_payload_claims_fail(C) ->
|
||||
JTI = unique_id(),
|
||||
InvoiceTemplateID = unique_id(),
|
||||
SubjectID = unique_id(),
|
||||
Claims = get_invoice_access_template_token_claims(JTI, SubjectID, InvoiceTemplateID),
|
||||
Claims = get_base_claims(JTI),
|
||||
Token = issue_token(Claims, C),
|
||||
#token_keeper_AuthData{
|
||||
id = undefined,
|
||||
token = Token,
|
||||
status = active,
|
||||
context = Context,
|
||||
metadata = #{?META_PARTY_ID := SubjectID},
|
||||
authority = ?TK_AUTHORITY_CAPI
|
||||
} = call_authenticate(Token, ?TOKEN_SOURCE_CONTEXT, C),
|
||||
_ = assert_context({invoice_template_access_token, JTI, SubjectID, InvoiceTemplateID}, Context).
|
||||
|
||||
-spec authenticate_invoice_template_access_token_no_access(config()) -> _.
|
||||
authenticate_invoice_template_access_token_no_access(C) ->
|
||||
JTI = unique_id(),
|
||||
SubjectID = unique_id(),
|
||||
Claims = get_resource_access_claims(JTI, SubjectID, #{}),
|
||||
Token = issue_token(Claims, C),
|
||||
?assertThrow(#token_keeper_AuthDataNotFound{}, call_authenticate(Token, ?TOKEN_SOURCE_CONTEXT, C)).
|
||||
|
||||
-spec authenticate_invoice_template_access_token_invalid_access(config()) -> _.
|
||||
authenticate_invoice_template_access_token_invalid_access(C) ->
|
||||
JTI = unique_id(),
|
||||
InvoiceID = unique_id(),
|
||||
SubjectID = unique_id(),
|
||||
Claims = get_resource_access_claims(JTI, SubjectID, #{
|
||||
?TK_RESOURCE_DOMAIN => #{
|
||||
<<"roles">> => [
|
||||
<<"invoices.", InvoiceID/binary, ":read">>
|
||||
]
|
||||
}
|
||||
}),
|
||||
Token = issue_token(Claims, C),
|
||||
?assertThrow(#token_keeper_AuthDataNotFound{}, call_authenticate(Token, ?TOKEN_SOURCE_CONTEXT, C)).
|
||||
?assertThrow(
|
||||
#token_keeper_AuthDataNotFound{},
|
||||
call_authenticate(Token, ?TOKEN_SOURCE_CONTEXT(?USER_TOKEN_SOURCE), C)
|
||||
).
|
||||
|
||||
-spec authenticate_blacklisted_jti_fail(config()) -> _.
|
||||
authenticate_blacklisted_jti_fail(C) ->
|
||||
@ -502,32 +412,6 @@ authenticate_non_blacklisted_jti_ok(C) ->
|
||||
Token = issue_token_with(Claims, get_filename("keys/secondary/private.pem", C)),
|
||||
?assertMatch(#token_keeper_AuthData{}, call_authenticate(Token, ?TOKEN_SOURCE_CONTEXT, C)).
|
||||
|
||||
-spec authenticate_claim_token_no_context_fail(config()) -> _.
|
||||
authenticate_claim_token_no_context_fail(C) ->
|
||||
JTI = unique_id(),
|
||||
SubjectID = unique_id(),
|
||||
Claims = get_base_claims(JTI, SubjectID),
|
||||
Token = issue_token(Claims, C),
|
||||
?assertThrow(#token_keeper_AuthDataNotFound{}, call_authenticate(Token, ?TOKEN_SOURCE_CONTEXT, C)).
|
||||
|
||||
-spec authenticate_legacy_claim_token_ok(config()) -> _.
|
||||
authenticate_legacy_claim_token_ok(C) ->
|
||||
JTI = unique_id(),
|
||||
SubjectID = unique_id(),
|
||||
ContextFragment = create_encoded_bouncer_context(JTI),
|
||||
Consumer = <<"client">>,
|
||||
Claims = get_claim_token_claims(JTI, SubjectID, ContextFragment, undefined, Consumer),
|
||||
Token = issue_token(Claims, C),
|
||||
#token_keeper_AuthData{
|
||||
id = undefined,
|
||||
token = Token,
|
||||
status = active,
|
||||
context = Context,
|
||||
metadata = #{?META_PARTY_ID := SubjectID, ?META_CAPI_CONSUMER := Consumer},
|
||||
authority = ?TK_AUTHORITY_CAPI
|
||||
} = call_authenticate(Token, ?TOKEN_SOURCE_CONTEXT, C),
|
||||
_ = assert_context({claim_token, JTI}, Context).
|
||||
|
||||
-spec authenticate_ephemeral_claim_token_ok(config()) -> _.
|
||||
authenticate_ephemeral_claim_token_ok(C) ->
|
||||
JTI = unique_id(),
|
||||
@ -566,8 +450,7 @@ issue_ephemeral_token_ok(C) ->
|
||||
-spec authenticate_offline_token_not_found_fail(config()) -> _.
|
||||
authenticate_offline_token_not_found_fail(C) ->
|
||||
JTI = unique_id(),
|
||||
SubjectID = unique_id(),
|
||||
Claims = get_base_claims(JTI, SubjectID),
|
||||
Claims = get_base_claims(JTI),
|
||||
Token = issue_token(Claims, C),
|
||||
?assertThrow(#token_keeper_AuthDataNotFound{}, call_authenticate(Token, ?TOKEN_SOURCE_CONTEXT, C)).
|
||||
|
||||
@ -687,35 +570,17 @@ revoke_authdata_by_id_not_found_fail(C) ->
|
||||
|
||||
%%
|
||||
|
||||
get_base_claims(JTI, SubjectID) ->
|
||||
get_base_claims(JTI) ->
|
||||
#{
|
||||
<<"jti">> => JTI,
|
||||
<<"sub">> => SubjectID,
|
||||
<<"exp">> => 0
|
||||
}.
|
||||
|
||||
get_phony_api_key_claims(JTI, SubjectID) ->
|
||||
get_base_claims(JTI, SubjectID).
|
||||
maps:merge(#{<<"sub">> => SubjectID}, get_base_claims(JTI)).
|
||||
|
||||
get_user_session_token_claims(JTI, SubjectID, SubjectEmail) ->
|
||||
maps:merge(#{<<"email">> => SubjectEmail}, get_base_claims(JTI, SubjectID)).
|
||||
|
||||
get_resource_access_claims(JTI, SubjectID, ResourceAccess) ->
|
||||
maps:merge(#{<<"resource_access">> => ResourceAccess}, get_base_claims(JTI, SubjectID)).
|
||||
|
||||
get_invoice_access_template_token_claims(JTI, SubjectID, InvoiceTemplateID) ->
|
||||
get_resource_access_claims(
|
||||
JTI,
|
||||
SubjectID,
|
||||
#{
|
||||
?TK_RESOURCE_DOMAIN => #{
|
||||
<<"roles">> => [
|
||||
<<"party.*.invoice_templates.", InvoiceTemplateID/binary, ".invoice_template_invoices:write">>,
|
||||
<<"party.*.invoice_templates.", InvoiceTemplateID/binary, ":read">>
|
||||
]
|
||||
}
|
||||
}
|
||||
).
|
||||
maps:merge(#{<<"sub">> => SubjectID, <<"email">> => SubjectEmail}, get_base_claims(JTI)).
|
||||
|
||||
create_bouncer_context(JTI) ->
|
||||
bouncer_context_helpers:add_auth(
|
||||
@ -733,19 +598,6 @@ create_encoded_bouncer_context(JTI) ->
|
||||
content = encode_context_fragment_content(Fragment)
|
||||
}.
|
||||
|
||||
get_claim_token_claims(JTI, SubjectID, #bctx_ContextFragment{content = FragmentContent}, Metadata, Consumer) ->
|
||||
genlib_map:compact(#{
|
||||
<<"jti">> => JTI,
|
||||
<<"sub">> => SubjectID,
|
||||
<<"bouncer_ctx">> => #{
|
||||
<<"ty">> => <<"v1_thrift_binary">>,
|
||||
<<"ct">> => base64:encode(FragmentContent)
|
||||
},
|
||||
<<"tk_metadata">> => Metadata,
|
||||
<<"cons">> => Consumer,
|
||||
<<"exp">> => 0
|
||||
}).
|
||||
|
||||
%%
|
||||
|
||||
mk_client(C) ->
|
||||
@ -827,18 +679,6 @@ assert_auth({api_key_token, JTI, SubjectID}, Auth) ->
|
||||
?assertEqual(<<"ApiKeyToken">>, Auth#bctx_v1_Auth.method),
|
||||
?assertMatch(#bctx_v1_Token{id = JTI}, Auth#bctx_v1_Auth.token),
|
||||
?assertMatch([#bctx_v1_AuthScope{party = ?CTX_ENTITY(SubjectID)}], Auth#bctx_v1_Auth.scope);
|
||||
assert_auth({invoice_template_access_token, JTI, SubjectID, InvoiceTemplateID}, Auth) ->
|
||||
?assertEqual(<<"InvoiceTemplateAccessToken">>, Auth#bctx_v1_Auth.method),
|
||||
?assertMatch(#bctx_v1_Token{id = JTI}, Auth#bctx_v1_Auth.token),
|
||||
?assertMatch(
|
||||
[
|
||||
#bctx_v1_AuthScope{
|
||||
party = ?CTX_ENTITY(SubjectID),
|
||||
invoice_template = ?CTX_ENTITY(InvoiceTemplateID)
|
||||
}
|
||||
],
|
||||
Auth#bctx_v1_Auth.scope
|
||||
);
|
||||
assert_auth({user_session_token, JTI, _SubjectID, _SubjectEmail, Exp}, Auth) ->
|
||||
?assertEqual(<<"SessionToken">>, Auth#bctx_v1_Auth.method),
|
||||
?assertMatch(#bctx_v1_Token{id = JTI}, Auth#bctx_v1_Auth.token),
|
||||
@ -848,15 +688,11 @@ assert_user({claim_token, _}, undefined) ->
|
||||
ok;
|
||||
assert_user({api_key_token, _, _}, undefined) ->
|
||||
ok;
|
||||
assert_user({invoice_template_access_token, _, _, _}, undefined) ->
|
||||
ok;
|
||||
assert_user({user_session_token, _JTI, SubjectID, SubjectEmail, _Exp}, User) ->
|
||||
?assertEqual(SubjectID, User#bctx_v1_User.id),
|
||||
?assertEqual(SubjectEmail, User#bctx_v1_User.email),
|
||||
?assertEqual(?CTX_ENTITY(<<"external">>), User#bctx_v1_User.realm).
|
||||
|
||||
make_auth_expiration(Timestamp) when is_integer(Timestamp) ->
|
||||
genlib_rfc3339:format(Timestamp, second);
|
||||
make_auth_expiration(unlimited) ->
|
||||
undefined.
|
||||
|
||||
@ -905,19 +741,12 @@ unique_id() ->
|
||||
genlib_format:format_int_base(ID, 62).
|
||||
|
||||
%%
|
||||
|
||||
start_keeper(Authorities) ->
|
||||
start_keeper(Authorities, undefined).
|
||||
|
||||
start_keeper(Env, BlacklistPath) ->
|
||||
start_keeper(Env) ->
|
||||
Port = 8022,
|
||||
Apps = genlib_app:start_application_with(
|
||||
token_keeper,
|
||||
[
|
||||
{port, Port},
|
||||
{blacklist, #{
|
||||
path => BlacklistPath
|
||||
}},
|
||||
{machinegun, #{
|
||||
processor => #{
|
||||
path => <<"/v2/stateproc">>
|
||||
@ -957,15 +786,3 @@ extract_method_detect_token() ->
|
||||
}}
|
||||
]
|
||||
}}.
|
||||
|
||||
extract_method_invoice_tpl_token() ->
|
||||
{extract_context, #{
|
||||
methods => [
|
||||
{invoice_template_access_token, #{
|
||||
domain => ?TK_RESOURCE_DOMAIN,
|
||||
metadata_mappings => #{
|
||||
party_id => ?META_PARTY_ID
|
||||
}
|
||||
}}
|
||||
]
|
||||
}}.
|
||||
|
Loading…
Reference in New Issue
Block a user