mirror of
https://github.com/valitydev/api-key-mgmt-v2.git
synced 2024-11-06 02:15:19 +00:00
TD-635: Add Api Key Management implementation (#1)
* TD-635: Add Api Key Management implementation * Start working issuing api keys * IssueApiKey implementation * Continue work on IssueApiKey * Review fixes
This commit is contained in:
parent
2ff5c62a0c
commit
670db5c97a
7
.dockerignore
Normal file
7
.dockerignore
Normal file
@ -0,0 +1,7 @@
|
||||
/_build/
|
||||
/.git/
|
||||
/.github/
|
||||
/.vscode/
|
||||
/.idea/
|
||||
erl_crash.dump
|
||||
rebar3.crashdump
|
5
.env
Normal file
5
.env
Normal file
@ -0,0 +1,5 @@
|
||||
SERVICE_NAME=api-key-mgmt-v2
|
||||
OTP_VERSION=24.2.0
|
||||
REBAR_VERSION=3.18
|
||||
THRIFT_VERSION=0.14.2.3
|
||||
DATABASE_URL=postgresql://postgres:postgres@db/apikeymgmtv2
|
1
.github/workflows/CODEOWNERS
vendored
Normal file
1
.github/workflows/CODEOWNERS
vendored
Normal file
@ -0,0 +1 @@
|
||||
* @empayre/appsec
|
10
.github/workflows/basic-linters.yml
vendored
Normal file
10
.github/workflows/basic-linters.yml
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
name: Vality basic linters
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- "*"
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
uses: valitydev/base-workflows/.github/workflows/basic-linters.yml@v1
|
21
.github/workflows/build-image.yml
vendored
Normal file
21
.github/workflows/build-image.yml
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
name: Build and publish Docker image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'epic/**'
|
||||
pull_request:
|
||||
branches: ['**']
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
|
||||
jobs:
|
||||
build-push:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: valitydev/action-deploy-docker@v2
|
||||
with:
|
||||
registry-username: ${{ github.actor }}
|
||||
registry-access-token: ${{ secrets.GITHUB_TOKEN }}
|
40
.github/workflows/erlang-checks.yml
vendored
Normal file
40
.github/workflows/erlang-checks.yml
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
name: Erlang CI Checks
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'epic/**'
|
||||
pull_request:
|
||||
branches: ['**']
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
name: Load .env
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
otp-version: ${{ steps.otp-version.outputs.version }}
|
||||
rebar-version: ${{ steps.rebar-version.outputs.version }}
|
||||
thrift-version: ${{ steps.thrift-version.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
- run: grep -v '^#' .env >> $GITHUB_ENV
|
||||
- id: otp-version
|
||||
run: echo "::set-output name=version::$OTP_VERSION"
|
||||
- id: rebar-version
|
||||
run: echo "::set-output name=version::$REBAR_VERSION"
|
||||
- id: thrift-version
|
||||
run: echo "::set-output name=version::$THRIFT_VERSION"
|
||||
|
||||
run:
|
||||
name: Run checks
|
||||
needs: setup
|
||||
uses: valitydev/erlang-workflows/.github/workflows/erlang-parallel-build.yml@v1.0.12
|
||||
with:
|
||||
otp-version: ${{ needs.setup.outputs.otp-version }}
|
||||
rebar-version: ${{ needs.setup.outputs.rebar-version }}
|
||||
use-thrift: true
|
||||
thrift-version: ${{ needs.setup.outputs.thrift-version }}
|
||||
run-ct-with-compose: true
|
||||
cache-version: v1
|
31
.gitignore
vendored
31
.gitignore
vendored
@ -1,17 +1,18 @@
|
||||
.eunit
|
||||
*.o
|
||||
*.beam
|
||||
*.plt
|
||||
# general
|
||||
log
|
||||
/_build/
|
||||
/_checkouts/
|
||||
*~
|
||||
erl_crash.dump
|
||||
.concrete/DEV_MODE
|
||||
rebar3.crashdump
|
||||
.tags*
|
||||
*.sublime-workspace
|
||||
.edts
|
||||
.DS_Store
|
||||
/.idea/
|
||||
*.beam
|
||||
/test/log/
|
||||
|
||||
# rebar 2.x
|
||||
.rebar
|
||||
rel/example_project
|
||||
ebin/*.beam
|
||||
deps
|
||||
|
||||
# rebar 3
|
||||
.rebar3
|
||||
_build/
|
||||
_checkouts/
|
||||
tags
|
||||
.image.dev
|
||||
bin
|
||||
|
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
[submodule "psql-migration"]
|
||||
path = psql-migration
|
||||
url = https://github.com/helium/psql-migration.git
|
44
Dockerfile
Normal file
44
Dockerfile
Normal file
@ -0,0 +1,44 @@
|
||||
ARG OTP_VERSION
|
||||
|
||||
# Build the release
|
||||
FROM docker.io/library/erlang:${OTP_VERSION} AS builder
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
# Install thrift compiler
|
||||
ARG THRIFT_VERSION
|
||||
ARG TARGETARCH
|
||||
RUN wget -q -O- "https://github.com/valitydev/thrift/releases/download/${THRIFT_VERSION}/thrift-${THRIFT_VERSION}-linux-${TARGETARCH}.tar.gz" \
|
||||
| tar -xvz -C /usr/local/bin/
|
||||
|
||||
# Hack ssh fetch and copy sources
|
||||
ARG FETCH_TOKEN
|
||||
RUN git config --global url."https://${FETCH_TOKEN}@github.com/".insteadOf ssh://git@github.com/ ;\
|
||||
mkdir /build
|
||||
COPY . /build/
|
||||
|
||||
# Build the release
|
||||
WORKDIR /build
|
||||
RUN rebar3 compile && \
|
||||
rebar3 as prod release
|
||||
|
||||
# Make a runner image
|
||||
FROM docker.io/library/erlang:${OTP_VERSION}-slim
|
||||
|
||||
ARG SERVICE_NAME
|
||||
|
||||
# Set env
|
||||
ENV CHARSET=UTF-8
|
||||
ENV LANG=C.UTF-8
|
||||
|
||||
# Set runtime
|
||||
WORKDIR /opt/${SERVICE_NAME}
|
||||
|
||||
COPY --from=builder /build/_build/prod/rel/${SERVICE_NAME} /opt/${SERVICE_NAME}
|
||||
|
||||
RUN echo "#!/bin/sh" >> /entrypoint.sh && \
|
||||
echo "exec /opt/${SERVICE_NAME}/bin/${SERVICE_NAME} foreground" >> /entrypoint.sh && \
|
||||
chmod +x /entrypoint.sh
|
||||
ENTRYPOINT []
|
||||
CMD ["/entrypoint.sh"]
|
||||
|
||||
EXPOSE 8022
|
17
Dockerfile.dev
Normal file
17
Dockerfile.dev
Normal file
@ -0,0 +1,17 @@
|
||||
ARG OTP_VERSION
|
||||
|
||||
FROM docker.io/library/erlang:${OTP_VERSION}
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
# Install thrift compiler
|
||||
ARG THRIFT_VERSION
|
||||
ARG TARGETARCH
|
||||
RUN wget -q -O- "https://github.com/valitydev/thrift/releases/download/${THRIFT_VERSION}/thrift-${THRIFT_VERSION}-linux-${TARGETARCH}.tar.gz" \
|
||||
| tar -xvz -C /usr/local/bin/
|
||||
|
||||
# Set env
|
||||
ENV CHARSET=UTF-8
|
||||
ENV LANG=C.UTF-8
|
||||
|
||||
# Set runtime
|
||||
CMD ["/bin/bash"]
|
25
LICENSE
25
LICENSE
@ -174,28 +174,3 @@
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
120
Makefile
Normal file
120
Makefile
Normal file
@ -0,0 +1,120 @@
|
||||
# HINT
|
||||
# Use this file to override variables here.
|
||||
# For example, to run with podman put `DOCKER=podman` there.
|
||||
-include Makefile.env
|
||||
|
||||
# NOTE
|
||||
# Variables specified in `.env` file are used to pick and setup specific
|
||||
# component versions, both when building a development image and when running
|
||||
# CI workflows on GH Actions. This ensures that tasks run with `wc-` prefix
|
||||
# (like `wc-dialyze`) are reproducible between local machine and CI runners.
|
||||
DOTENV := $(shell grep -v '^\#' .env)
|
||||
|
||||
# Development images
|
||||
DEV_IMAGE_TAG = $(TEST_CONTAINER_NAME)-dev
|
||||
DEV_IMAGE_ID = $(file < .image.dev)
|
||||
|
||||
DOCKER ?= docker
|
||||
DOCKERCOMPOSE ?= docker-compose
|
||||
DOCKERCOMPOSE_W_ENV = DEV_IMAGE_TAG=$(DEV_IMAGE_TAG) $(DOCKERCOMPOSE)
|
||||
REBAR ?= rebar3
|
||||
TEST_CONTAINER_NAME ?= testrunner
|
||||
|
||||
all: compile
|
||||
|
||||
.PHONY: dev-image clean-dev-image wc-shell test
|
||||
|
||||
dev-image: .image.dev
|
||||
|
||||
get-submodules:
|
||||
git submodule init
|
||||
git submodule update
|
||||
|
||||
.image.dev: get-submodules Dockerfile.dev .env
|
||||
env $(DOTENV) $(DOCKERCOMPOSE_W_ENV) build $(TEST_CONTAINER_NAME)
|
||||
$(DOCKER) image ls -q -f "reference=$(DEV_IMAGE_ID)" | head -n1 > $@
|
||||
|
||||
clean-dev-image:
|
||||
ifneq ($(DEV_IMAGE_ID),)
|
||||
$(DOCKER) image rm -f $(DEV_IMAGE_TAG)
|
||||
rm .image.dev
|
||||
endif
|
||||
|
||||
DOCKER_WC_OPTIONS := -v $(PWD):$(PWD) --workdir $(PWD)
|
||||
DOCKER_WC_EXTRA_OPTIONS ?= --rm
|
||||
DOCKER_RUN = $(DOCKER) run -t $(DOCKER_WC_OPTIONS) $(DOCKER_WC_EXTRA_OPTIONS)
|
||||
|
||||
|
||||
DOCKERCOMPOSE_RUN = $(DOCKERCOMPOSE_W_ENV) run --rm $(DOCKER_WC_OPTIONS)
|
||||
|
||||
# Utility tasks
|
||||
|
||||
wc-shell: dev-image
|
||||
$(DOCKER_RUN) --interactive --tty $(DEV_IMAGE_TAG)
|
||||
|
||||
wc-%: dev-image
|
||||
$(DOCKER_RUN) $(DEV_IMAGE_TAG) make $*
|
||||
|
||||
wdeps-shell: dev-image
|
||||
$(DOCKERCOMPOSE_RUN) $(TEST_CONTAINER_NAME) su; \
|
||||
$(DOCKERCOMPOSE_W_ENV) down
|
||||
|
||||
wdeps-%: dev-image
|
||||
$(DOCKERCOMPOSE_RUN) -T $(TEST_CONTAINER_NAME) make $(if $(MAKE_ARGS),$(MAKE_ARGS) $*,$*); \
|
||||
res=$$?; \
|
||||
$(DOCKERCOMPOSE_W_ENV) down; \
|
||||
exit $$res
|
||||
|
||||
# Submodules tasks
|
||||
|
||||
make_psql_migration:
|
||||
make -C psql-migration/
|
||||
mkdir -p bin
|
||||
mkdir -p migrations
|
||||
cp ./psql-migration/_build/default/bin/psql_migration ./bin
|
||||
|
||||
# Rebar tasks
|
||||
|
||||
rebar-shell:
|
||||
$(REBAR) shell
|
||||
|
||||
compile:
|
||||
$(REBAR) compile
|
||||
|
||||
xref:
|
||||
$(REBAR) xref
|
||||
|
||||
lint:
|
||||
$(REBAR) lint
|
||||
|
||||
check-format:
|
||||
$(REBAR) fmt -c
|
||||
|
||||
dialyze:
|
||||
$(REBAR) as test dialyzer
|
||||
|
||||
release:
|
||||
$(REBAR) as prod release
|
||||
|
||||
eunit:
|
||||
$(REBAR) eunit --cover
|
||||
|
||||
common-test:
|
||||
$(REBAR) ct --cover
|
||||
|
||||
cover:
|
||||
$(REBAR) covertool generate
|
||||
|
||||
format:
|
||||
$(REBAR) fmt -w
|
||||
|
||||
clean:
|
||||
$(REBAR) clean
|
||||
|
||||
distclean: clean-build-image
|
||||
rm -rf _build
|
||||
|
||||
test: eunit common-test
|
||||
|
||||
cover-report:
|
||||
$(REBAR) cover
|
32
README.md
32
README.md
@ -1 +1,31 @@
|
||||
# api-key-mgmt-v2
|
||||
# api-key-mgmt-v2
|
||||
|
||||
## Migration
|
||||
|
||||
First compile migration script with
|
||||
|
||||
```shell
|
||||
make wc-make_psql_migration
|
||||
```
|
||||
|
||||
Then you can use script with
|
||||
|
||||
```shell
|
||||
bin/psql_migration -e .env
|
||||
```
|
||||
|
||||
```shell
|
||||
Usage: psql_migration [-h] [-d [<dir>]] [-e [<env>]] <command>
|
||||
|
||||
-h, --help Print this help text
|
||||
-d, --dir Migration folder [default: migrations]
|
||||
-e, --env Environment file to search for DATABASE_URL [default: .env]
|
||||
new <name> Create a new migration
|
||||
list List migrations indicating which have been applied
|
||||
run Run all migrations
|
||||
revert Revert the last migration
|
||||
reset Resets your database by dropping the database in your
|
||||
DATABASE_URL and then runs `setup`
|
||||
setup Creates the database specified in your DATABASE_URL, and
|
||||
runs any existing migrations.
|
||||
```
|
||||
|
23
apps/akm/src/akm.app.src
Normal file
23
apps/akm/src/akm.app.src
Normal file
@ -0,0 +1,23 @@
|
||||
{application, akm, [
|
||||
{description, "Api Key Management"},
|
||||
{vsn, "0.1.0"},
|
||||
{registered, []},
|
||||
{mod, {akm, []}},
|
||||
{applications, [
|
||||
kernel,
|
||||
stdlib,
|
||||
public_key,
|
||||
genlib,
|
||||
erl_health,
|
||||
oas_server_akm,
|
||||
scoper,
|
||||
jose,
|
||||
jsx,
|
||||
cowboy_draining_server,
|
||||
cowboy_cors,
|
||||
cowboy_access_log,
|
||||
snowflake,
|
||||
woody_user_identity
|
||||
]},
|
||||
{env, []}
|
||||
]}.
|
20
apps/akm/src/akm.erl
Normal file
20
apps/akm/src/akm.erl
Normal file
@ -0,0 +1,20 @@
|
||||
%% @doc Public API and application startup.
|
||||
%% @end
|
||||
|
||||
-module(akm).
|
||||
|
||||
-behaviour(application).
|
||||
|
||||
%% Application callbacks
|
||||
-export([start/2]).
|
||||
-export([stop/1]).
|
||||
|
||||
%%
|
||||
|
||||
-spec start(normal, any()) -> {ok, pid()} | {error, any()}.
|
||||
start(_StartType, _StartArgs) ->
|
||||
akm_sup:start_link().
|
||||
|
||||
-spec stop(any()) -> ok.
|
||||
stop(_State) ->
|
||||
ok.
|
79
apps/akm/src/akm_apikeys_handler.erl
Normal file
79
apps/akm/src/akm_apikeys_handler.erl
Normal file
@ -0,0 +1,79 @@
|
||||
-module(akm_apikeys_handler).
|
||||
|
||||
-export([prepare/4]).
|
||||
|
||||
%% Types
|
||||
|
||||
-type request_data() :: #{atom() | binary() => term()}.
|
||||
-type status_code() :: 200..599.
|
||||
-type headers() :: cowboy:http_headers().
|
||||
-type response_data() :: map() | [map()] | undefined.
|
||||
-type response() :: {status_code(), headers(), response_data()}.
|
||||
-type request_result() :: {ok | error, response()}.
|
||||
-type request_state() :: #{
|
||||
authorize := fun(() -> {ok, akm_auth:resolution()} | request_result()),
|
||||
process := fun(() -> request_result())
|
||||
}.
|
||||
|
||||
-type operation_id() :: atom().
|
||||
-type swag_schema() :: map().
|
||||
-type operation_spec() :: map().
|
||||
-type swag_server_get_schema_fun() :: fun(() -> swag_schema()).
|
||||
-type swag_server_get_operation_fun() :: fun((operation_id()) -> operation_spec()).
|
||||
|
||||
-type client_peer() :: #{
|
||||
ip_address => IP :: inet:ip_address(),
|
||||
port_number => Port :: inet:port_number()
|
||||
}.
|
||||
-type auth_context() :: any().
|
||||
-type req() :: cowboy_req:req().
|
||||
-type request_context() :: #{
|
||||
auth_context => AuthContext :: auth_context(),
|
||||
peer => client_peer(),
|
||||
cowboy_req => req()
|
||||
}.
|
||||
|
||||
-type handler_opts() :: _.
|
||||
-type handler_context() :: #{
|
||||
operation_id := operation_id(),
|
||||
woody_context := woody_context:ctx(),
|
||||
swagger_context := request_context(),
|
||||
swag_server_get_schema_fun := swag_server_get_schema_fun(),
|
||||
swag_server_get_operation_fun := swag_server_get_operation_fun()
|
||||
}.
|
||||
|
||||
-export_type([request_data/0]).
|
||||
-export_type([request_result/0]).
|
||||
|
||||
-export_type([handler_opts/0]).
|
||||
-export_type([status_code/0]).
|
||||
-export_type([headers/0]).
|
||||
-export_type([response_data/0]).
|
||||
-export_type([request_context/0]).
|
||||
-export_type([operation_id/0]).
|
||||
-export_type([handler_context/0]).
|
||||
-export_type([swag_server_get_schema_fun/0]).
|
||||
-export_type([swag_server_get_operation_fun/0]).
|
||||
|
||||
%% Providers
|
||||
-spec prepare(operation_id(), request_data(), handler_context(), handler_opts()) -> {ok, request_state()}.
|
||||
prepare(OperationID = 'IssueApiKey', #{'partyId' := PartyID, 'ApiKey' := ApiKey}, Context, _Opts) ->
|
||||
Authorize = fun() ->
|
||||
Prototypes = [{operation, #{id => OperationID, party_id => PartyID}}],
|
||||
Resolution = akm_auth:authorize_operation(Prototypes, Context),
|
||||
{ok, Resolution}
|
||||
end,
|
||||
Process = fun() ->
|
||||
#{woody_context := WoodyContext} = Context,
|
||||
case akm_apikeys_processing:issue_api_key(PartyID, ApiKey, WoodyContext) of
|
||||
{ok, Resp} ->
|
||||
akm_handler_utils:reply_ok(200, Resp);
|
||||
{error, already_exists} ->
|
||||
akm_handler_utils:reply_ok(400, #{
|
||||
<<"errorType">> => <<"AlreadyExists">>,
|
||||
<<"description">> => <<"This AccessToken already exists">>
|
||||
})
|
||||
end
|
||||
end,
|
||||
{ok, #{authorize => Authorize, process => Process}}
|
||||
.
|
72
apps/akm/src/akm_apikeys_processing.erl
Normal file
72
apps/akm/src/akm_apikeys_processing.erl
Normal file
@ -0,0 +1,72 @@
|
||||
-module(akm_apikeys_processing).
|
||||
|
||||
-include_lib("bouncer_proto/include/bouncer_ctx_v1_thrift.hrl").
|
||||
|
||||
-export([issue_api_key/3]).
|
||||
|
||||
-spec issue_api_key(_, _, _) -> _.
|
||||
issue_api_key(PartyID, ApiKey, WoodyContext) ->
|
||||
#{
|
||||
<<"name">> := Name,
|
||||
<<"metadata">> := Metadata
|
||||
} = ApiKey,
|
||||
%% REWORK ненормальный ID, переработать
|
||||
ID = akm_id:generate_snowflake_id(),
|
||||
ContextFragment = bouncer_context_helpers:make_auth_fragment(#{
|
||||
method => <<"IssueApiKey">>,
|
||||
scope => [#{party => #{id => PartyID}}],
|
||||
token => #{id => ID}
|
||||
}),
|
||||
Status = "active",
|
||||
Metadata = #{
|
||||
<<"party.id">> => PartyID
|
||||
},
|
||||
Client = token_keeper_client:offline_authority(get_authority_id(), WoodyContext),
|
||||
case token_keeper_authority_offline:create(ID, ContextFragment, Metadata, Client) of
|
||||
{ok, #{token := Token}} ->
|
||||
{ok, _Columns, [{CreatedAt}]} = epgsql_pool:query(
|
||||
main_query,
|
||||
"INSERT INTO apikeys (id, name, party_id, status, metadata)"
|
||||
"VALUES ($1, $2, $3, $4, $5) RETURNING created_at",
|
||||
[ID, Name, PartyID, Status, Metadata]
|
||||
),
|
||||
ApiKey = #{
|
||||
id => ID,
|
||||
name => Name,
|
||||
created_at => CreatedAt,
|
||||
status => Status,
|
||||
metadata => Metadata
|
||||
},
|
||||
Resp = #{
|
||||
<<"AccessToken">> => marshall_access_token(Token),
|
||||
<<"ApiKey">> => marshall_api_key(ApiKey)
|
||||
},
|
||||
{ok, Resp};
|
||||
{error, {auth_data, already_exists}} ->
|
||||
{error, already_exists}
|
||||
end.
|
||||
|
||||
get_authority_id() ->
|
||||
application:get_env(akm, authority_id).
|
||||
|
||||
%% Marshalling
|
||||
|
||||
marshall_api_key(#{
|
||||
id := ID,
|
||||
name := Name,
|
||||
created_at := CreatedAt,
|
||||
status := Status,
|
||||
metadata := Metadata
|
||||
}) ->
|
||||
#{
|
||||
<<"id">> => ID,
|
||||
<<"createdAt">> => CreatedAt,
|
||||
<<"name">> => Name,
|
||||
<<"status">> => Status,
|
||||
<<"metadata">> => Metadata
|
||||
}.
|
||||
|
||||
marshall_access_token(Token) ->
|
||||
#{
|
||||
<<"accessToken">> => Token
|
||||
}.
|
200
apps/akm/src/akm_auth.erl
Normal file
200
apps/akm/src/akm_auth.erl
Normal file
@ -0,0 +1,200 @@
|
||||
-module(akm_auth).
|
||||
|
||||
-define(APP, akm).
|
||||
|
||||
-export([get_subject_id/1]).
|
||||
-export([get_party_id/1]).
|
||||
-export([get_user_id/1]).
|
||||
-export([get_user_email/1]).
|
||||
|
||||
-export([preauthorize_api_key/1]).
|
||||
-export([authorize_api_key/3]).
|
||||
-export([authorize_operation/2]).
|
||||
|
||||
-export_type([resolution/0]).
|
||||
-export_type([preauth_context/0]).
|
||||
-export_type([auth_context/0]).
|
||||
-export_type([api_key/0]).
|
||||
|
||||
%%
|
||||
|
||||
-type token_type() :: bearer.
|
||||
-type auth_context() :: {authorized, token_keeper_client:auth_data()}.
|
||||
-type preauth_context() :: {unauthorized, {token_type(), token_keeper_client:token()}}.
|
||||
-type api_key() :: binary().
|
||||
|
||||
-type resolution() ::
|
||||
allowed
|
||||
| forbidden
|
||||
| {forbidden, _Reason}.
|
||||
|
||||
-define(AUTHORIZED(Ctx), {authorized, Ctx}).
|
||||
-define(UNAUTHORIZED(Ctx), {unauthorized, Ctx}).
|
||||
|
||||
%%
|
||||
|
||||
-spec get_subject_id(auth_context()) -> binary() | undefined.
|
||||
get_subject_id(AuthContext) ->
|
||||
case get_party_id(AuthContext) of
|
||||
PartyId when is_binary(PartyId) ->
|
||||
PartyId;
|
||||
undefined ->
|
||||
get_user_id(AuthContext)
|
||||
end.
|
||||
|
||||
-spec get_party_id(auth_context()) -> binary() | undefined.
|
||||
get_party_id(?AUTHORIZED(#{metadata := Metadata})) ->
|
||||
get_metadata(get_metadata_mapped_key(party_id), Metadata).
|
||||
|
||||
-spec get_user_id(auth_context()) -> binary() | undefined.
|
||||
get_user_id(?AUTHORIZED(#{metadata := Metadata})) ->
|
||||
get_metadata(get_metadata_mapped_key(user_id), Metadata).
|
||||
|
||||
-spec get_user_email(auth_context()) -> binary() | undefined.
|
||||
get_user_email(?AUTHORIZED(#{metadata := Metadata})) ->
|
||||
get_metadata(get_metadata_mapped_key(user_email), Metadata).
|
||||
%%
|
||||
|
||||
-spec preauthorize_api_key(api_key()) -> {ok, preauth_context()} | {error, _Reason}.
|
||||
preauthorize_api_key(ApiKey) ->
|
||||
case parse_api_key(ApiKey) of
|
||||
{ok, Token} ->
|
||||
{ok, ?UNAUTHORIZED(Token)};
|
||||
{error, Error} ->
|
||||
{error, Error}
|
||||
end.
|
||||
|
||||
-spec authorize_api_key(preauth_context(), token_keeper_client:token_context(), woody_context:ctx()) ->
|
||||
{ok, auth_context()} | {error, _Reason}.
|
||||
authorize_api_key(?UNAUTHORIZED({TokenType, Token}), TokenContext, WoodyContext) ->
|
||||
authorize_token_by_type(TokenType, Token, TokenContext, WoodyContext).
|
||||
|
||||
authorize_token_by_type(bearer, Token, TokenContext, WoodyContext) ->
|
||||
Authenticator = token_keeper_client:authenticator(WoodyContext),
|
||||
case token_keeper_authenticator:authenticate(Token, TokenContext, Authenticator) of
|
||||
{ok, AuthData} ->
|
||||
{ok, ?AUTHORIZED(AuthData)};
|
||||
{error, TokenKeeperError} ->
|
||||
_ = logger:warning("Token keeper authorization failed: ~p", [TokenKeeperError]),
|
||||
{error, {auth_failed, TokenKeeperError}}
|
||||
end.
|
||||
|
||||
-spec authorize_operation(
|
||||
Prototypes :: akm_bouncer_context:prototypes(),
|
||||
Context :: akm_handler_utils:handler_context()
|
||||
) -> resolution().
|
||||
authorize_operation(Prototypes, Context) ->
|
||||
AuthContext = extract_auth_context(Context),
|
||||
#{swagger_context := SwagContext, woody_context := WoodyContext} = Context,
|
||||
IPAddress = get_ip_address(SwagContext),
|
||||
Fragments = akm_bouncer:gather_context_fragments(
|
||||
get_token_keeper_fragment(AuthContext),
|
||||
get_user_id(AuthContext),
|
||||
IPAddress,
|
||||
WoodyContext
|
||||
),
|
||||
Fragments1 = akm_bouncer_context:build(Prototypes, Fragments),
|
||||
akm_bouncer:judge(Fragments1, WoodyContext).
|
||||
|
||||
%%
|
||||
|
||||
get_token_keeper_fragment(?AUTHORIZED(#{context := Context})) ->
|
||||
Context.
|
||||
|
||||
extract_auth_context(#{swagger_context := #{auth_context := AuthContext}}) ->
|
||||
AuthContext.
|
||||
|
||||
parse_api_key(<<"Bearer ", Token/binary>>) ->
|
||||
{ok, {bearer, Token}};
|
||||
parse_api_key(_) ->
|
||||
{error, unsupported_auth_scheme}.
|
||||
|
||||
%%
|
||||
|
||||
get_metadata(Key, Metadata) ->
|
||||
maps:get(Key, Metadata, undefined).
|
||||
|
||||
get_metadata_mapped_key(Key) ->
|
||||
maps:get(Key, get_meta_mappings()).
|
||||
|
||||
get_meta_mappings() ->
|
||||
AuthConfig = genlib_app:env(?APP, auth_config),
|
||||
maps:get(metadata_mappings, AuthConfig).
|
||||
|
||||
get_ip_address(SwagContext) ->
|
||||
Request = maps:get(cowboy_req, SwagContext, #{}),
|
||||
case get_ip_address_from_request(Request) of
|
||||
{ok, IPAddress} ->
|
||||
IPAddress;
|
||||
{error, _Error} ->
|
||||
%% Ignore error, add logging if needed
|
||||
undefined
|
||||
end.
|
||||
|
||||
get_ip_address_from_request(Request) ->
|
||||
IPAddressHeader = genlib_app:env(akm, ip_address_header, <<"x-forwarded-for">>),
|
||||
case Request of
|
||||
#{headers := #{IPAddressHeader := IPAddress}} ->
|
||||
parse_header_ip_address(IPAddress);
|
||||
#{peer := {IPAddress, _Port}} ->
|
||||
{ok, IPAddress};
|
||||
_ ->
|
||||
{error, no_req_in_swag_context}
|
||||
end.
|
||||
|
||||
parse_header_ip_address(IPAddress0) ->
|
||||
IPAddress1 = erlang:binary_to_list(IPAddress0),
|
||||
IPs = [L || L <- string:lexemes(IPAddress1, ", ")],
|
||||
Valid = lists:all(fun check_ip/1, IPs),
|
||||
case IPs of
|
||||
[ClientIP | _Proxies] when Valid ->
|
||||
inet:parse_strict_address(ClientIP);
|
||||
_ ->
|
||||
% empty or malformed value
|
||||
{error, malformed}
|
||||
end.
|
||||
|
||||
check_ip(IP) ->
|
||||
case inet:parse_strict_address(IP) of
|
||||
{ok, _} ->
|
||||
true;
|
||||
_Error ->
|
||||
% unparseable ip address
|
||||
false
|
||||
end.
|
||||
|
||||
-ifdef(TEST).
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
-spec test() -> _.
|
||||
|
||||
-spec determine_peer_test_() -> [_TestGen].
|
||||
determine_peer_test_() ->
|
||||
[
|
||||
?_assertEqual(
|
||||
{ok, {10, 10, 10, 10}},
|
||||
parse_header_ip_address(<<"10.10.10.10">>)
|
||||
),
|
||||
?_assertEqual(
|
||||
{ok, {17, 71, 0, 1}},
|
||||
parse_header_ip_address(<<"17.71.0.1">>)
|
||||
),
|
||||
?_assertEqual(
|
||||
{ok, {17, 71, 0, 1}},
|
||||
parse_header_ip_address(<<" 17.71.0.1,123.123.123.123 ">>)
|
||||
),
|
||||
?_assertEqual(
|
||||
{error, malformed},
|
||||
parse_header_ip_address(<<",,,,">>)
|
||||
),
|
||||
?_assertEqual(
|
||||
{ok, {1, 1, 1, 1}},
|
||||
parse_header_ip_address(<<"1.1.1.1,,, ,,,">>)
|
||||
),
|
||||
?_assertEqual(
|
||||
{error, malformed},
|
||||
parse_header_ip_address(<<"1.,1.,1.1,">>)
|
||||
)
|
||||
].
|
||||
|
||||
-endif.
|
43
apps/akm/src/akm_bouncer.erl
Normal file
43
apps/akm/src/akm_bouncer.erl
Normal file
@ -0,0 +1,43 @@
|
||||
-module(akm_bouncer).
|
||||
|
||||
-export([gather_context_fragments/4]).
|
||||
-export([judge/2]).
|
||||
|
||||
%%
|
||||
|
||||
-spec gather_context_fragments(
|
||||
TokenContextFragment :: token_keeper_client:context_fragment(),
|
||||
UserID :: binary() | undefined,
|
||||
IPAddress :: inet:ip_address(),
|
||||
WoodyContext :: woody_context:ctx()
|
||||
) -> akm_bouncer_context:fragments().
|
||||
gather_context_fragments(TokenContextFragment, UserID, IPAddress, WoodyCtx) ->
|
||||
{Base, External0} = akm_bouncer_context:new(),
|
||||
External1 = External0#{<<"token-keeper">> => {encoded_fragment, TokenContextFragment}},
|
||||
{add_requester_context(IPAddress, Base), maybe_add_userorg(UserID, External1, WoodyCtx)}.
|
||||
|
||||
-spec judge(akm_bouncer_context:fragments(), woody_context:ctx()) -> akm_auth:resolution().
|
||||
judge({Acc, External}, WoodyCtx) ->
|
||||
% TODO error out early?
|
||||
{ok, RulesetID} = application:get_env(akm, bouncer_ruleset_id),
|
||||
JudgeContext = #{fragments => External#{<<"akm">> => Acc}},
|
||||
bouncer_client:judge(RulesetID, JudgeContext, WoodyCtx).
|
||||
|
||||
%%
|
||||
|
||||
maybe_add_userorg(undefined, External, _WoodyCtx) ->
|
||||
External;
|
||||
maybe_add_userorg(UserID, External, WoodyCtx) ->
|
||||
case bouncer_context_helpers:get_user_orgs_fragment(UserID, WoodyCtx) of
|
||||
{ok, UserOrgsFragment} ->
|
||||
External#{<<"userorg">> => UserOrgsFragment};
|
||||
{error, {user, notfound}} ->
|
||||
External
|
||||
end.
|
||||
|
||||
-spec add_requester_context(inet:ip_address(), akm_bouncer_context:acc()) -> akm_bouncer_context:acc().
|
||||
add_requester_context(IPAddress, FragmentAcc) ->
|
||||
bouncer_context_helpers:add_requester(
|
||||
#{ip => IPAddress},
|
||||
FragmentAcc
|
||||
).
|
80
apps/akm/src/akm_bouncer_context.erl
Normal file
80
apps/akm/src/akm_bouncer_context.erl
Normal file
@ -0,0 +1,80 @@
|
||||
-module(akm_bouncer_context).
|
||||
|
||||
-include_lib("bouncer_proto/include/bouncer_ctx_v1_thrift.hrl").
|
||||
-include_lib("bouncer_proto/include/bouncer_base_thrift.hrl").
|
||||
|
||||
-type fragment() :: bouncer_client:context_fragment().
|
||||
-type acc() :: bouncer_context_helpers:context_fragment().
|
||||
|
||||
-type fragments() :: {acc(), _ExternalFragments :: #{_ID => fragment()}}.
|
||||
|
||||
-export_type([fragment/0]).
|
||||
-export_type([acc/0]).
|
||||
-export_type([fragments/0]).
|
||||
|
||||
-type operation_id() :: akm_handler_utils:operation_id().
|
||||
-type prototypes() :: [
|
||||
{operation, prototype_operation()}
|
||||
].
|
||||
|
||||
-type prototype_operation() :: #{
|
||||
id => operation_id(),
|
||||
party => maybe_undefined(entity_id())
|
||||
}.
|
||||
|
||||
-type entity_id() :: binary().
|
||||
-type maybe_undefined(Type) :: Type | undefined.
|
||||
|
||||
-export_type([prototypes/0]).
|
||||
-export_type([prototype_operation/0]).
|
||||
|
||||
-export([new/0]).
|
||||
-export([build/2]).
|
||||
|
||||
%%
|
||||
|
||||
-spec new() -> fragments().
|
||||
new() ->
|
||||
{mk_base_fragment(), #{}}.
|
||||
|
||||
mk_base_fragment() ->
|
||||
bouncer_context_helpers:make_env_fragment(#{
|
||||
now => genlib_rfc3339:format(genlib_time:unow(), second),
|
||||
deployment => #{id => genlib_app:env(akm, deployment, undefined)}
|
||||
}).
|
||||
|
||||
-spec build(prototypes(), fragments()) -> fragments().
|
||||
build(Prototypes, {Acc0, External}) ->
|
||||
Acc1 = lists:foldl(fun({T, Params}, Acc) -> build(T, Params, Acc) end, Acc0, Prototypes),
|
||||
{Acc1, External}.
|
||||
|
||||
build(operation, Params = #{id := OperationID}, Acc) ->
|
||||
Acc#ctx_v1_ContextFragment{
|
||||
apikeymgmt = #ctx_v1_ContextApiKeyMgmt{
|
||||
op = #ctx_v1_ApiKeyMgmtOperation{
|
||||
id = operation_id_to_binary(OperationID),
|
||||
party = maybe_entity(party_id, Params),
|
||||
api_key = maybe(api_key, Params)
|
||||
}
|
||||
}
|
||||
}.
|
||||
|
||||
%%
|
||||
|
||||
maybe(_Name, undefined) ->
|
||||
undefined;
|
||||
maybe(Name, Params) ->
|
||||
maps:get(Name, Params, undefined).
|
||||
|
||||
maybe_entity(_Name, undefined) ->
|
||||
undefined;
|
||||
maybe_entity(Name, Params) ->
|
||||
case maps:get(Name, Params, undefined) of
|
||||
undefined ->
|
||||
undefined;
|
||||
Value ->
|
||||
#base_Entity{id = Value}
|
||||
end.
|
||||
|
||||
operation_id_to_binary(V) ->
|
||||
erlang:atom_to_binary(V, utf8).
|
35
apps/akm/src/akm_cors_policy.erl
Normal file
35
apps/akm/src/akm_cors_policy.erl
Normal file
@ -0,0 +1,35 @@
|
||||
-module(akm_cors_policy).
|
||||
|
||||
-behaviour(cowboy_cors_policy).
|
||||
|
||||
-export([policy_init/1]).
|
||||
-export([allowed_origins/2]).
|
||||
-export([allowed_headers/2]).
|
||||
-export([allowed_methods/2]).
|
||||
|
||||
-spec policy_init(cowboy_req:req()) -> {ok, cowboy_req:req(), any()}.
|
||||
policy_init(Req) ->
|
||||
{ok, Req, undefined}.
|
||||
|
||||
-spec allowed_origins(cowboy_req:req(), any()) -> {'*', any()}.
|
||||
allowed_origins(_Req, State) ->
|
||||
{'*', State}.
|
||||
|
||||
-spec allowed_headers(cowboy_req:req(), any()) -> {[binary()], any()}.
|
||||
allowed_headers(_Req, State) ->
|
||||
{
|
||||
[
|
||||
<<"access-control-allow-headers">>,
|
||||
<<"origin">>,
|
||||
<<"x-requested-with">>,
|
||||
<<"content-type">>,
|
||||
<<"accept">>,
|
||||
<<"authorization">>,
|
||||
<<"x-request-id">>
|
||||
],
|
||||
State
|
||||
}.
|
||||
|
||||
-spec allowed_methods(cowboy_req:req(), any()) -> {[binary()], any()}.
|
||||
allowed_methods(_Req, State) ->
|
||||
{[<<"GET">>, <<"POST">>, <<"PUT">>, <<"DELETE">>, <<"OPTIONS">>], State}.
|
86
apps/akm/src/akm_cowboy_kitten.erl
Normal file
86
apps/akm/src/akm_cowboy_kitten.erl
Normal file
@ -0,0 +1,86 @@
|
||||
-module(akm_cowboy_kitten).
|
||||
|
||||
-behaviour(cowboy_stream).
|
||||
|
||||
-define(APP, akm).
|
||||
|
||||
%% callback exports
|
||||
|
||||
-export([init/3]).
|
||||
-export([data/4]).
|
||||
-export([info/3]).
|
||||
-export([terminate/3]).
|
||||
-export([early_error/5]).
|
||||
|
||||
-type state() :: #{
|
||||
next := any()
|
||||
}.
|
||||
|
||||
%% callbacks
|
||||
|
||||
-spec init(cowboy_stream:streamid(), cowboy_req:req(), cowboy:opts()) -> {cowboy_stream:commands(), state()}.
|
||||
init(StreamID, Req, Opts) ->
|
||||
{Commands0, Next} = cowboy_stream:init(StreamID, Req, Opts),
|
||||
{Commands0, #{next => Next}}.
|
||||
|
||||
-spec data(cowboy_stream:streamid(), cowboy_stream:fin(), cowboy_req:resp_body(), State) ->
|
||||
{cowboy_stream:commands(), State}
|
||||
when
|
||||
State :: state().
|
||||
data(StreamID, IsFin, Data, #{next := Next0} = State) ->
|
||||
{Commands0, Next} = cowboy_stream:data(StreamID, IsFin, Data, Next0),
|
||||
{Commands0, State#{next => Next}}.
|
||||
|
||||
-spec info(cowboy_stream:streamid(), any(), State) -> {cowboy_stream:commands(), State} when State :: state().
|
||||
info(StreamID, {response, _, _, _} = Info, #{next := Next0} = State) ->
|
||||
Resp1 = handle_response(Info),
|
||||
{Commands0, Next} = cowboy_stream:info(StreamID, Resp1, Next0),
|
||||
{Commands0, State#{next => Next}};
|
||||
info(StreamID, Info, #{next := Next0} = State) ->
|
||||
{Commands0, Next} = cowboy_stream:info(StreamID, Info, Next0),
|
||||
{Commands0, State#{next => Next}}.
|
||||
|
||||
-spec terminate(cowboy_stream:streamid(), cowboy_stream:reason(), state()) -> any().
|
||||
terminate(StreamID, Reason, #{next := Next}) ->
|
||||
cowboy_stream:terminate(StreamID, Reason, Next).
|
||||
|
||||
-spec early_error(
|
||||
cowboy_stream:streamid(),
|
||||
cowboy_stream:reason(),
|
||||
cowboy_stream:partial_req(),
|
||||
Resp,
|
||||
cowboy:opts()
|
||||
) -> Resp when
|
||||
Resp :: cowboy_stream:resp_command().
|
||||
early_error(StreamID, Reason, PartialReq, Resp, Opts) ->
|
||||
Resp1 = handle_response(Resp),
|
||||
cowboy_stream:early_error(StreamID, Reason, PartialReq, Resp1, Opts).
|
||||
|
||||
%% private functions
|
||||
|
||||
handle_response({response, Code, Headers, Body}) when Code >= 500 ->
|
||||
send_oops_resp(Code, Headers, get_oops_body_safe(Code), Body);
|
||||
handle_response({response, _, _, _} = Resp) ->
|
||||
Resp.
|
||||
|
||||
send_oops_resp(Code, Headers, undefined, Req) ->
|
||||
{response, Code, Headers, Req};
|
||||
send_oops_resp(Code, Headers0, File, _) ->
|
||||
FileSize = filelib:file_size(File),
|
||||
Headers = maps:merge(Headers0, #{
|
||||
<<"content-type">> => <<"text/plain; charset=utf-8">>,
|
||||
<<"content-length">> => integer_to_list(FileSize)
|
||||
}),
|
||||
{response, Code, Headers, {sendfile, 0, FileSize, File}}.
|
||||
|
||||
get_oops_body_safe(Code) ->
|
||||
try
|
||||
get_oops_body(Code)
|
||||
catch
|
||||
Error:Reason ->
|
||||
_ = logger:warning("Invalid oops body config for code: ~p. Error: ~p:~p", [Code, Error, Reason]),
|
||||
undefined
|
||||
end.
|
||||
|
||||
get_oops_body(Code) ->
|
||||
genlib_map:get(Code, genlib_app:env(?APP, oops_bodies, #{}), undefined).
|
190
apps/akm/src/akm_handler.erl
Normal file
190
apps/akm/src/akm_handler.erl
Normal file
@ -0,0 +1,190 @@
|
||||
-module(akm_handler).
|
||||
|
||||
-behaviour(swag_server_apikeys_logic_handler).
|
||||
|
||||
%% swag_server_apikeys_logic_handler callbacks
|
||||
-export([map_error/2]).
|
||||
-export([authorize_api_key/4]).
|
||||
-export([handle_request/4]).
|
||||
|
||||
-type opts() :: swag_server_apikeys:handler_opts(_).
|
||||
|
||||
%% API
|
||||
|
||||
%% @WARNING Must be refactored in case of different classes of users using this API
|
||||
%% See CAPI capi_handler
|
||||
%% https://github.com/valitydev/capi-v2/blob/2de9367561a511f0dc1448881201de48e9004c54/apps/capi/src/capi_handler.erl#L62
|
||||
-define(REALM, <<"external">>).
|
||||
|
||||
-spec map_error(atom(), swag_server_apikeys_validation:error()) -> swag_server_apikeys:error_reason().
|
||||
map_error(validation_error, Error) ->
|
||||
Type = map_error_type(maps:get(type, Error)),
|
||||
Name = genlib:to_binary(maps:get(param_name, Error)),
|
||||
Message =
|
||||
case maps:get(description, Error, undefined) of
|
||||
undefined ->
|
||||
<<"Request parameter: ", Name/binary, ", error type: ", Type/binary>>;
|
||||
Description ->
|
||||
DescriptionBin = genlib:to_binary(Description),
|
||||
<<"Request parameter: ", Name/binary, ", error type: ", Type/binary, ", description: ",
|
||||
DescriptionBin/binary>>
|
||||
end,
|
||||
jsx:encode(#{
|
||||
<<"errorType">> => Type,
|
||||
<<"name">> => Name,
|
||||
<<"description">> => Message
|
||||
}).
|
||||
|
||||
-spec map_error_type(swag_server_apikeys_validation:error_type()) -> binary().
|
||||
map_error_type(no_match) -> <<"NoMatch">>;
|
||||
map_error_type(not_found) -> <<"NotFound">>;
|
||||
map_error_type(not_in_range) -> <<"NotInRange">>;
|
||||
map_error_type(wrong_length) -> <<"WrongLength">>;
|
||||
map_error_type(wrong_size) -> <<"WrongSize">>;
|
||||
map_error_type(schema_violated) -> <<"SchemaViolated">>;
|
||||
map_error_type(wrong_type) -> <<"WrongType">>;
|
||||
map_error_type(wrong_format) -> <<"WrongFormat">>;
|
||||
map_error_type(wrong_body) -> <<"WrongBody">>.
|
||||
|
||||
-spec authorize_api_key(
|
||||
swag_server_apikeys:operation_id(),
|
||||
swag_server_apikeys:api_key(),
|
||||
swag_server_apikeys:request_context(),
|
||||
opts()
|
||||
) ->
|
||||
Result :: false | {true, akm_auth:preauth_context()}.
|
||||
authorize_api_key(OperationID, ApiKey, _Context, _HandlerOpts) ->
|
||||
%% Since we require the request id field to create a woody context for our trip to token_keeper
|
||||
%% it seems it is no longer possible to perform any authorization in this method.
|
||||
%% To gain this ability back be would need to rewrite the swagger generator to perform its
|
||||
%% request validation checks before this stage.
|
||||
%% But since a decent chunk of authorization logic is already defined in the handler function
|
||||
%% it is probably easier to move it there in its entirety.
|
||||
ok = scoper:add_scope('swag.server', #{api => wallet, operation_id => OperationID}),
|
||||
case akm_auth:preauthorize_api_key(ApiKey) of
|
||||
{ok, Context} ->
|
||||
{true, Context};
|
||||
{error, Error} ->
|
||||
_ = logger:info("API Key preauthorization failed for ~p due to ~p", [OperationID, Error]),
|
||||
false
|
||||
end.
|
||||
|
||||
-spec handle_request(
|
||||
swag_server_apikeys:operation_id(),
|
||||
akm_wallet_handler:request_data(),
|
||||
swag_server_apikeys:request_context(),
|
||||
opts()
|
||||
) ->
|
||||
akm_wallet_handler:request_result().
|
||||
handle_request(OperationID, Req, SwagContext, Opts) ->
|
||||
#{'X-Request-Deadline' := Header} = Req,
|
||||
case akm_utils:parse_deadline(Header) of
|
||||
{ok, Deadline} ->
|
||||
WoodyContext = attach_deadline(Deadline, create_woody_context(Req)),
|
||||
process_request(OperationID, Req, SwagContext, Opts, WoodyContext);
|
||||
_ ->
|
||||
akm_handler_utils:reply_ok(400, #{
|
||||
<<"errorType">> => <<"SchemaViolated">>,
|
||||
<<"name">> => <<"X-Request-Deadline">>,
|
||||
<<"description">> => <<"Invalid data in X-Request-Deadline header">>
|
||||
})
|
||||
end.
|
||||
|
||||
process_request(OperationID, Req, SwagContext0, Opts, WoodyContext0) ->
|
||||
_ = logger:info("Processing request ~p", [OperationID]),
|
||||
try
|
||||
SwagContext = do_authorize_api_key(SwagContext0, WoodyContext0),
|
||||
WoodyContext = put_user_identity(WoodyContext0, get_auth_context(SwagContext)),
|
||||
Context = create_handler_context(OperationID, SwagContext, WoodyContext),
|
||||
ok = set_context_meta(Context),
|
||||
{ok, RequestState} = akm_apikeys_handler:prepare(OperationID, Req, Context, Opts),
|
||||
#{authorize := Authorize, process := Process} = RequestState,
|
||||
{ok, Resolution} = Authorize(),
|
||||
case Resolution of
|
||||
allowed ->
|
||||
ok = logger:debug("Operation ~p authorized", [OperationID]),
|
||||
Process();
|
||||
forbidden ->
|
||||
_ = logger:info("Authorization failed"),
|
||||
akm_handler_utils:reply_ok(401)
|
||||
end
|
||||
catch
|
||||
throw:{token_auth_failed, Reason} ->
|
||||
_ = logger:info("API Key authorization failed for ~p due to ~p", [OperationID, Reason]),
|
||||
akm_handler_utils:reply_ok(401);
|
||||
error:{woody_error, {Source, Class, Details}} ->
|
||||
process_woody_error(Source, Class, Details)
|
||||
end.
|
||||
|
||||
-spec create_woody_context(akm_wallet_handler:request_data()) -> woody_context:ctx().
|
||||
create_woody_context(#{'X-Request-ID' := RequestID}) ->
|
||||
RpcID = #{trace_id := TraceID} = woody_context:new_rpc_id(genlib:to_binary(RequestID)),
|
||||
ok = scoper:add_meta(#{request_id => RequestID, trace_id => TraceID}),
|
||||
woody_context:new(RpcID, undefined, akm_woody_client:get_service_deadline(wallet)).
|
||||
|
||||
put_user_identity(WoodyContext, AuthContext) ->
|
||||
woody_user_identity:put(collect_user_identity(AuthContext), WoodyContext).
|
||||
|
||||
get_auth_context(#{auth_context := AuthContext}) ->
|
||||
AuthContext.
|
||||
|
||||
collect_user_identity(AuthContext) ->
|
||||
genlib_map:compact(#{
|
||||
id => akm_auth:get_subject_id(AuthContext),
|
||||
%%TODO: Store user realm in authdata meta and extract it here
|
||||
realm => ?REALM,
|
||||
email => akm_auth:get_user_email(AuthContext)
|
||||
}).
|
||||
|
||||
-spec set_context_meta(akm_handler_utils:handler_context()) -> ok.
|
||||
set_context_meta(Context) ->
|
||||
AuthContext = akm_handler_utils:get_auth_context(Context),
|
||||
Meta = #{
|
||||
metadata => #{
|
||||
'user-identity' => collect_user_identity(AuthContext)
|
||||
}
|
||||
},
|
||||
scoper:add_meta(Meta).
|
||||
|
||||
attach_deadline(undefined, Context) ->
|
||||
Context;
|
||||
attach_deadline(Deadline, Context) ->
|
||||
woody_context:set_deadline(Deadline, Context).
|
||||
|
||||
do_authorize_api_key(SwagContext = #{auth_context := PreAuthContext}, WoodyContext) ->
|
||||
case akm_auth:authorize_api_key(PreAuthContext, make_token_context(SwagContext), WoodyContext) of
|
||||
{ok, AuthContext} ->
|
||||
SwagContext#{auth_context => AuthContext};
|
||||
{error, Error} ->
|
||||
throw({token_auth_failed, Error})
|
||||
end.
|
||||
|
||||
make_token_context(#{cowboy_req := CowboyReq}) ->
|
||||
case cowboy_req:header(<<"origin">>, CowboyReq) of
|
||||
Origin when is_binary(Origin) ->
|
||||
#{request_origin => Origin};
|
||||
undefined ->
|
||||
#{}
|
||||
end.
|
||||
|
||||
-spec create_handler_context(
|
||||
swag_server_apikeys:operation_id(), swag_server_apikeys:request_context(), woody_context:ctx()
|
||||
) -> akm_handler_utils:handler_context().
|
||||
create_handler_context(OpID, SwagContext, WoodyContext) ->
|
||||
#{
|
||||
operation_id => OpID,
|
||||
woody_context => WoodyContext,
|
||||
swagger_context => SwagContext,
|
||||
swag_server_get_schema_fun => fun swag_server_apikeys_schema:get/0,
|
||||
swag_server_get_operation_fun => fun(OperationID) -> swag_server_apikeys_router:get_operation(OperationID) end
|
||||
}.
|
||||
|
||||
process_woody_error(_Source, result_unexpected, _Details) ->
|
||||
akm_handler_utils:reply_error(500);
|
||||
process_woody_error(_Source, resource_unavailable, _Details) ->
|
||||
% Return an 504 since it is unknown if state of the system has been altered
|
||||
% @TODO Implement some sort of tagging for operations that mutate the state,
|
||||
% so we can still return 503s for those that don't
|
||||
akm_handler_utils:reply_error(504);
|
||||
process_woody_error(_Source, result_unknown, _Details) ->
|
||||
akm_handler_utils:reply_error(504).
|
122
apps/akm/src/akm_handler_utils.erl
Normal file
122
apps/akm/src/akm_handler_utils.erl
Normal file
@ -0,0 +1,122 @@
|
||||
-module(akm_handler_utils).
|
||||
|
||||
-export([get_error_msg/1]).
|
||||
|
||||
-export([reply_ok/1]).
|
||||
-export([reply_ok/2]).
|
||||
-export([reply_ok/3]).
|
||||
|
||||
-export([reply_error/1]).
|
||||
-export([reply_error/2]).
|
||||
-export([reply_error/3]).
|
||||
|
||||
-export([logic_error/2]).
|
||||
|
||||
-export([service_call/2]).
|
||||
|
||||
-export([get_owner/1]).
|
||||
-export([get_auth_context/1]).
|
||||
|
||||
-export([get_location/3]).
|
||||
-export([maybe_with/3]).
|
||||
|
||||
-define(APP, akm_lib).
|
||||
|
||||
-type status_code() :: akm_apikeys_handler:status_code().
|
||||
-type headers() :: akm_apikeys_handler:headers().
|
||||
-type response_data() :: akm_apikeys_handler:response_data().
|
||||
-type handler_context() :: akm_apikeys_handler:handler_context().
|
||||
-type handler_opts() :: akm_apikeys_handler:handler_opts().
|
||||
-type request_context() :: akm_apikeys_handler:request_context().
|
||||
-type operation_id() :: akm_apikeys_handler:operation_id().
|
||||
|
||||
-type error_message() :: binary() | io_lib:chars().
|
||||
|
||||
-type error_type() :: external_id_conflict.
|
||||
-type error_params() :: {ID :: binary(), ExternalID :: binary()}.
|
||||
|
||||
-type owner() :: binary() | undefined.
|
||||
|
||||
-export_type([owner/0]).
|
||||
|
||||
-export_type([handler_context/0]).
|
||||
-export_type([request_context/0]).
|
||||
-export_type([response_data/0]).
|
||||
-export_type([operation_id/0]).
|
||||
|
||||
%% API
|
||||
|
||||
-spec get_owner(handler_context()) -> owner().
|
||||
get_owner(Context) ->
|
||||
akm_auth:get_subject_id(get_auth_context(Context)).
|
||||
|
||||
-spec get_auth_context(akm_handler_utils:handler_context()) -> any().
|
||||
get_auth_context(#{swagger_context := #{auth_context := AuthContext}}) ->
|
||||
AuthContext.
|
||||
|
||||
-spec get_error_msg(error_message()) -> response_data().
|
||||
get_error_msg(Message) ->
|
||||
#{<<"message">> => genlib:to_binary(Message)}.
|
||||
|
||||
-spec logic_error(error_type(), error_params()) -> {error, {status_code(), #{}, response_data()}}.
|
||||
logic_error(external_id_conflict, {ID, ExternalID}) ->
|
||||
Data = #{
|
||||
<<"externalID">> => ExternalID,
|
||||
<<"id">> => ID,
|
||||
<<"message">> => <<"This 'externalID' has been used by another request">>
|
||||
},
|
||||
reply_error(409, Data).
|
||||
|
||||
-spec reply_ok(status_code()) -> {ok, {status_code(), #{}, undefined}}.
|
||||
reply_ok(Code) ->
|
||||
reply_ok(Code, undefined).
|
||||
|
||||
-spec reply_ok(status_code(), response_data()) -> {ok, {status_code(), #{}, response_data()}}.
|
||||
reply_ok(Code, Data) ->
|
||||
reply_ok(Code, Data, #{}).
|
||||
|
||||
-spec reply_ok(status_code(), response_data(), headers()) -> {ok, {status_code(), #{}, response_data()}}.
|
||||
reply_ok(Code, Data, Headers) ->
|
||||
reply(ok, Code, Data, Headers).
|
||||
|
||||
-spec reply_error(status_code()) -> {error, {status_code(), #{}, undefined}}.
|
||||
reply_error(Code) ->
|
||||
reply_error(Code, undefined).
|
||||
|
||||
-spec reply_error(status_code(), response_data()) -> {error, {status_code(), #{}, response_data()}}.
|
||||
reply_error(Code, Data) ->
|
||||
reply_error(Code, Data, #{}).
|
||||
|
||||
-spec reply_error(status_code(), response_data(), headers()) -> {error, {status_code(), #{}, response_data()}}.
|
||||
reply_error(Code, Data, Headers) ->
|
||||
reply(error, Code, Data, Headers).
|
||||
|
||||
reply(Status, Code, Data, Headers) ->
|
||||
{Status, {Code, Headers, Data}}.
|
||||
|
||||
-spec get_location(akm_utils:route_match(), [binary()], handler_opts()) -> headers().
|
||||
get_location(PathSpec, Params, _Opts) ->
|
||||
BaseUrl = genlib_app:env(?APP, public_endpoint),
|
||||
#{<<"Location">> => akm_utils:get_url(BaseUrl, PathSpec, Params)}.
|
||||
|
||||
-spec service_call(
|
||||
{
|
||||
akm_woody_client:service_name(),
|
||||
woody:func(),
|
||||
woody:args()
|
||||
},
|
||||
handler_context()
|
||||
) -> woody:result().
|
||||
service_call({ServiceName, Function, Args}, #{woody_context := WoodyContext}) ->
|
||||
akm_woody_client:call_service(ServiceName, Function, Args, WoodyContext).
|
||||
|
||||
-spec maybe_with(term(), map(), fun((_Value) -> Result)) -> Result | undefined.
|
||||
maybe_with(_Name, undefined, _Then) ->
|
||||
undefined;
|
||||
maybe_with(Name, Params, Then) ->
|
||||
case maps:get(Name, Params, undefined) of
|
||||
V when V /= undefined ->
|
||||
Then(V);
|
||||
undefined ->
|
||||
undefined
|
||||
end.
|
19
apps/akm/src/akm_id.erl
Normal file
19
apps/akm/src/akm_id.erl
Normal file
@ -0,0 +1,19 @@
|
||||
%%
|
||||
%% Identificators-related utils
|
||||
|
||||
-module(akm_id).
|
||||
|
||||
-export([generate_snowflake_id/0]).
|
||||
|
||||
%% Types
|
||||
|
||||
-type binary_id() :: binary().
|
||||
|
||||
-export_type([binary_id/0]).
|
||||
|
||||
%% API
|
||||
|
||||
-spec generate_snowflake_id() -> binary_id().
|
||||
generate_snowflake_id() ->
|
||||
<<ID:64>> = snowflake:new(),
|
||||
genlib_format:format_int_base(ID, 62).
|
57
apps/akm/src/akm_sup.erl
Normal file
57
apps/akm/src/akm_sup.erl
Normal file
@ -0,0 +1,57 @@
|
||||
%% @doc Top level supervisor.
|
||||
%% @end
|
||||
|
||||
-module(akm_sup).
|
||||
|
||||
-behaviour(supervisor).
|
||||
|
||||
%% API
|
||||
-export([start_link/0]).
|
||||
|
||||
%% Supervisor callbacks
|
||||
-export([init/1]).
|
||||
|
||||
%%
|
||||
|
||||
-spec start_link() -> {ok, pid()} | {error, {already_started, pid()}}.
|
||||
start_link() ->
|
||||
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
||||
|
||||
%%
|
||||
|
||||
-spec init([]) -> {ok, {supervisor:sup_flags(), [supervisor:child_spec()]}}.
|
||||
init([]) ->
|
||||
{LogicHandlers, LogicHandlerSpecs} = get_logic_handler_info(),
|
||||
HealthCheck = enable_health_logging(genlib_app:env(akm, health_check, #{})),
|
||||
AdditionalRoutes = [{'_', [erl_health_handle:get_route(HealthCheck), get_prometheus_route()]}],
|
||||
SwaggerHandlerOpts = genlib_app:env(akm, swagger_handler_opts, #{}),
|
||||
SwaggerSpec = akm_swagger_server:child_spec(AdditionalRoutes, LogicHandlers, SwaggerHandlerOpts),
|
||||
ok = start_epgsql_pooler(),
|
||||
{ok, {
|
||||
{one_for_all, 0, 1},
|
||||
LogicHandlerSpecs ++ [SwaggerSpec]
|
||||
}}.
|
||||
|
||||
-spec get_logic_handler_info() -> {akm_swagger_server:logic_handlers(), [supervisor:child_spec()]}.
|
||||
get_logic_handler_info() ->
|
||||
{
|
||||
#{
|
||||
keys => {akm_handler, #{}}
|
||||
},
|
||||
[]
|
||||
}.
|
||||
|
||||
-spec enable_health_logging(erl_health:check()) -> erl_health:check().
|
||||
enable_health_logging(Check) ->
|
||||
EvHandler = {erl_health_event_handler, []},
|
||||
maps:map(fun(_, V = {_, _, _}) -> #{runner => V, event_handler => EvHandler} end, Check).
|
||||
|
||||
start_epgsql_pooler() ->
|
||||
Params = genlib_app:env(akm, epsql_connection, #{}),
|
||||
ok = epgsql_pool:validate_connection_params(Params),
|
||||
{ok, _} = epgsql_pool:start(main_pool, 10, 20, Params),
|
||||
ok.
|
||||
|
||||
-spec get_prometheus_route() -> {iodata(), module(), _Opts :: any()}.
|
||||
get_prometheus_route() ->
|
||||
{"/metrics/[:registry]", prometheus_cowboy2_handler, []}.
|
103
apps/akm/src/akm_swagger_server.erl
Normal file
103
apps/akm/src/akm_swagger_server.erl
Normal file
@ -0,0 +1,103 @@
|
||||
-module(akm_swagger_server).
|
||||
|
||||
-export([child_spec/3]).
|
||||
|
||||
-export_type([logic_handler/0]).
|
||||
-export_type([logic_handlers/0]).
|
||||
|
||||
-type logic_handler() :: swag_server_apikeys:logic_handler(_).
|
||||
-type logic_handlers() :: #{atom() => logic_handler()}.
|
||||
|
||||
-type swagger_handler_opts() :: swag_server_apikeys_router:swagger_handler_opts().
|
||||
|
||||
-define(APP, akm).
|
||||
-define(DEFAULT_ACCEPTORS_POOLSIZE, 100).
|
||||
-define(DEFAULT_IP_ADDR, "::").
|
||||
-define(DEFAULT_PORT, 8080).
|
||||
-define(RANCH_REF, ?MODULE).
|
||||
|
||||
-spec child_spec(cowboy_router:routes(), logic_handlers(), swagger_handler_opts()) -> supervisor:child_spec().
|
||||
child_spec(AdditionalRoutes, LogicHandlers, SwaggerHandlerOpts) ->
|
||||
{Transport, TransportOpts} = get_socket_transport(),
|
||||
CowboyOpts = get_cowboy_config(AdditionalRoutes, LogicHandlers, SwaggerHandlerOpts),
|
||||
GsTimeout = genlib_app:env(?APP, graceful_shutdown_timeout, 5000),
|
||||
Protocol = cowboy_clear,
|
||||
cowboy_draining_server:child_spec(
|
||||
?RANCH_REF,
|
||||
Transport,
|
||||
TransportOpts,
|
||||
Protocol,
|
||||
CowboyOpts,
|
||||
GsTimeout
|
||||
).
|
||||
|
||||
get_socket_transport() ->
|
||||
{ok, IP} = inet:parse_address(genlib_app:env(?APP, ip, ?DEFAULT_IP_ADDR)),
|
||||
Port = genlib_app:env(?APP, port, ?DEFAULT_PORT),
|
||||
AcceptorsPool = genlib_app:env(?APP, acceptors_poolsize, ?DEFAULT_ACCEPTORS_POOLSIZE),
|
||||
{ranch_tcp, #{socket_opts => [{ip, IP}, {port, Port}], num_acceptors => AcceptorsPool}}.
|
||||
|
||||
get_cowboy_config(AdditionalRoutes, LogicHandlers, SwaggerHandlerOpts) ->
|
||||
Dispatch =
|
||||
cowboy_router:compile(
|
||||
squash_routes(
|
||||
AdditionalRoutes ++
|
||||
swag_server_apikeys_router:get_paths(
|
||||
maps:get(wallet, LogicHandlers),
|
||||
SwaggerHandlerOpts
|
||||
)
|
||||
)
|
||||
),
|
||||
CowboyOpts = #{
|
||||
env => #{
|
||||
dispatch => Dispatch,
|
||||
cors_policy => akm_cors_policy
|
||||
},
|
||||
middlewares => [
|
||||
cowboy_router,
|
||||
cowboy_cors,
|
||||
cowboy_handler
|
||||
],
|
||||
stream_handlers => [
|
||||
cowboy_access_log_h,
|
||||
akm_cowboy_kitten,
|
||||
cowboy_stream_h
|
||||
]
|
||||
},
|
||||
cowboy_access_log_h:set_extra_info_fun(
|
||||
mk_operation_id_getter(CowboyOpts),
|
||||
CowboyOpts
|
||||
).
|
||||
|
||||
squash_routes(Routes) ->
|
||||
orddict:to_list(
|
||||
lists:foldl(
|
||||
fun({K, V}, D) -> orddict:update(K, fun(V0) -> V0 ++ V end, V, D) end,
|
||||
orddict:new(),
|
||||
Routes
|
||||
)
|
||||
).
|
||||
|
||||
mk_operation_id_getter(#{env := Env}) ->
|
||||
fun(Req) ->
|
||||
get_operation_id(Req, Env)
|
||||
end.
|
||||
|
||||
%% Ensure that request has host and path required for
|
||||
%% cowboy_router:execute/2.
|
||||
%% NOTE: Be careful when upgrade cowboy in this project
|
||||
%% because cowboy_router:execute/2 call can change.
|
||||
get_operation_id(Req = #{host := _Host, path := _Path}, Env) ->
|
||||
case cowboy_router:execute(Req, Env) of
|
||||
{ok, _, #{handler_opts := {_Operations, _LogicHandler, _SwaggerHandlerOpts} = HandlerOpts}} ->
|
||||
case swag_server_apikeys_utils:get_operation_id(Req, HandlerOpts) of
|
||||
undefined ->
|
||||
#{};
|
||||
OperationID ->
|
||||
#{operation_id => OperationID}
|
||||
end;
|
||||
_ ->
|
||||
#{}
|
||||
end;
|
||||
get_operation_id(_Req, _Env) ->
|
||||
#{}.
|
307
apps/akm/src/akm_utils.erl
Normal file
307
apps/akm/src/akm_utils.erl
Normal file
@ -0,0 +1,307 @@
|
||||
-module(akm_utils).
|
||||
|
||||
-type deadline() :: woody:deadline().
|
||||
|
||||
-export_type([deadline/0]).
|
||||
|
||||
-export([deadline_to_binary/1]).
|
||||
-export([deadline_from_binary/1]).
|
||||
-export([deadline_from_timeout/1]).
|
||||
-export([deadline_is_reached/1]).
|
||||
-export([parse_lifetime/1]).
|
||||
|
||||
-export([base64url_to_map/1]).
|
||||
-export([map_to_base64url/1]).
|
||||
|
||||
-export([to_universal_time/1]).
|
||||
|
||||
-export([unwrap/1]).
|
||||
-export([define/2]).
|
||||
|
||||
-export([get_path/2]).
|
||||
-export([get_url/2]).
|
||||
-export([get_url/3]).
|
||||
|
||||
-export([get_last_pan_digits/1]).
|
||||
|
||||
-export([parse_deadline/1]).
|
||||
|
||||
-export([get_unique_id/0]).
|
||||
-export([get_random_id/0]).
|
||||
|
||||
-type binding_value() :: binary().
|
||||
-type url() :: binary().
|
||||
-type path() :: binary().
|
||||
% cowoby_router:route_match()
|
||||
-type route_match() :: '_' | iodata().
|
||||
|
||||
-export_type([route_match/0]).
|
||||
|
||||
%% API
|
||||
|
||||
-spec deadline_to_binary(deadline()) -> binary() | undefined.
|
||||
deadline_to_binary(undefined) ->
|
||||
undefined;
|
||||
deadline_to_binary(Deadline) ->
|
||||
woody_deadline:to_binary(Deadline).
|
||||
|
||||
-spec deadline_from_binary(binary()) -> deadline() | undefined.
|
||||
deadline_from_binary(undefined) ->
|
||||
undefined;
|
||||
deadline_from_binary(Binary) ->
|
||||
woody_deadline:from_binary(Binary).
|
||||
|
||||
-spec deadline_from_timeout(timeout()) -> deadline().
|
||||
deadline_from_timeout(Timeout) ->
|
||||
woody_deadline:from_timeout(Timeout).
|
||||
|
||||
-spec deadline_is_reached(deadline()) -> boolean().
|
||||
deadline_is_reached(Deadline) ->
|
||||
woody_deadline:is_reached(Deadline).
|
||||
|
||||
-spec parse_lifetime
|
||||
(undefined) -> {error, bad_lifetime};
|
||||
(binary()) -> {ok, timeout()} | {error, bad_lifetime}.
|
||||
parse_lifetime(undefined) ->
|
||||
{error, bad_lifetime};
|
||||
parse_lifetime(Bin) ->
|
||||
%% lifetime string like '1ms', '30s', '2.6m' etc
|
||||
%% default unit - millisecond
|
||||
case re:split(Bin, <<"^(\\d+\\.\\d+|\\d+)([a-z]*)$">>) of
|
||||
[<<>>, NumberStr, <<>>, <<>>] ->
|
||||
{ok, genlib:to_int(NumberStr)};
|
||||
[<<>>, NumberStr, Unit, <<>>] ->
|
||||
Number = genlib:to_float(NumberStr),
|
||||
case unit_factor(Unit) of
|
||||
{ok, Factor} ->
|
||||
{ok, erlang:round(Number * Factor)};
|
||||
{error, _Reason} ->
|
||||
{error, bad_lifetime}
|
||||
end;
|
||||
_Other ->
|
||||
{error, bad_lifetime}
|
||||
end.
|
||||
|
||||
-spec base64url_to_map(binary()) -> map() | no_return().
|
||||
base64url_to_map(Base64) when is_binary(Base64) ->
|
||||
try
|
||||
{ok, Json} = jose_base64url:decode(Base64),
|
||||
jsx:decode(Json, [return_maps])
|
||||
catch
|
||||
Class:Reason ->
|
||||
_ = logger:debug("decoding base64 ~p to map failed with ~p:~p", [Base64, Class, Reason]),
|
||||
erlang:error(badarg)
|
||||
end.
|
||||
|
||||
-spec map_to_base64url(map()) -> binary() | no_return().
|
||||
map_to_base64url(Map) when is_map(Map) ->
|
||||
try
|
||||
jose_base64url:encode(jsx:encode(Map))
|
||||
catch
|
||||
Class:Reason ->
|
||||
_ = logger:debug("encoding map ~p to base64 failed with ~p:~p", [Map, Class, Reason]),
|
||||
erlang:error(badarg)
|
||||
end.
|
||||
|
||||
-spec to_universal_time(Timestamp :: binary()) -> TimestampUTC :: binary().
|
||||
to_universal_time(Timestamp) ->
|
||||
TimestampMS = genlib_rfc3339:parse(Timestamp, microsecond),
|
||||
genlib_rfc3339:format_relaxed(TimestampMS, microsecond).
|
||||
|
||||
-spec unwrap(ok | {ok, Value} | {error, _Error}) -> Value | no_return().
|
||||
unwrap(ok) ->
|
||||
ok;
|
||||
unwrap({ok, Value}) ->
|
||||
Value;
|
||||
unwrap({error, Error}) ->
|
||||
erlang:error({unwrap_error, Error}).
|
||||
|
||||
-spec define(undefined | T, T) -> T.
|
||||
define(undefined, V) ->
|
||||
V;
|
||||
define(V, _Default) ->
|
||||
V.
|
||||
|
||||
-spec get_path(route_match(), [binding_value()]) -> path().
|
||||
get_path(PathSpec, Params) when is_list(PathSpec) ->
|
||||
get_path(genlib:to_binary(PathSpec), Params);
|
||||
get_path(Path, []) ->
|
||||
Path;
|
||||
get_path(PathSpec, [Value | Rest]) ->
|
||||
[P1, P2] = split(PathSpec),
|
||||
P3 = get_next(P2),
|
||||
get_path(<<P1/binary, Value/binary, P3/binary>>, Rest).
|
||||
|
||||
split(PathSpec) ->
|
||||
case binary:split(PathSpec, <<":">>) of
|
||||
Res = [_, _] -> Res;
|
||||
[_] -> erlang:error(param_mismatch)
|
||||
end.
|
||||
|
||||
get_next(PathSpec) ->
|
||||
case binary:split(PathSpec, <<"/">>) of
|
||||
[_, Next] -> <<"/", Next/binary>>;
|
||||
[_] -> <<>>
|
||||
end.
|
||||
|
||||
-spec get_url(url(), path()) -> url().
|
||||
get_url(BaseUrl, Path) ->
|
||||
<<BaseUrl/binary, Path/binary>>.
|
||||
|
||||
-spec get_url(url(), route_match(), [binding_value()]) -> url().
|
||||
get_url(BaseUrl, PathSpec, Params) ->
|
||||
get_url(BaseUrl, get_path(PathSpec, Params)).
|
||||
|
||||
-define(MASKED_PAN_MAX_LENGTH, 4).
|
||||
|
||||
-spec get_last_pan_digits(binary()) -> binary().
|
||||
get_last_pan_digits(MaskedPan) when byte_size(MaskedPan) > ?MASKED_PAN_MAX_LENGTH ->
|
||||
binary:part(MaskedPan, {byte_size(MaskedPan), -?MASKED_PAN_MAX_LENGTH});
|
||||
get_last_pan_digits(MaskedPan) ->
|
||||
MaskedPan.
|
||||
|
||||
-spec parse_deadline
|
||||
(binary()) -> {ok, woody:deadline()} | {error, bad_deadline};
|
||||
(undefined) -> {ok, undefined}.
|
||||
parse_deadline(undefined) ->
|
||||
{ok, undefined};
|
||||
parse_deadline(DeadlineStr) ->
|
||||
Parsers = [
|
||||
fun try_parse_woody_default/1,
|
||||
fun try_parse_relative/1
|
||||
],
|
||||
try_parse_deadline(DeadlineStr, Parsers).
|
||||
|
||||
%%
|
||||
%% Internals
|
||||
%%
|
||||
try_parse_deadline(_DeadlineStr, []) ->
|
||||
{error, bad_deadline};
|
||||
try_parse_deadline(DeadlineStr, [P | Parsers]) ->
|
||||
case P(DeadlineStr) of
|
||||
{ok, _Deadline} = Result ->
|
||||
Result;
|
||||
{error, bad_deadline} ->
|
||||
try_parse_deadline(DeadlineStr, Parsers)
|
||||
end.
|
||||
|
||||
try_parse_woody_default(DeadlineStr) ->
|
||||
try
|
||||
Deadline = woody_deadline:from_binary(to_universal_time(DeadlineStr)),
|
||||
NewDeadline = clamp_max_request_deadline(woody_deadline:to_timeout(Deadline)),
|
||||
{ok, woody_deadline:from_timeout(NewDeadline)}
|
||||
catch
|
||||
error:{bad_deadline, _Reason} ->
|
||||
{error, bad_deadline};
|
||||
error:{badmatch, _} ->
|
||||
{error, bad_deadline};
|
||||
error:deadline_reached ->
|
||||
{error, bad_deadline}
|
||||
end.
|
||||
|
||||
try_parse_relative(DeadlineStr) ->
|
||||
%% deadline string like '1ms', '30m', '2.6h' etc
|
||||
case re:split(DeadlineStr, <<"^(\\d+\\.\\d+|\\d+)([a-z]+)$">>) of
|
||||
[<<>>, NumberStr, Unit, <<>>] ->
|
||||
Number = genlib:to_float(NumberStr),
|
||||
try_parse_relative(Number, Unit);
|
||||
_Other ->
|
||||
{error, bad_deadline}
|
||||
end.
|
||||
|
||||
try_parse_relative(Number, Unit) ->
|
||||
case unit_factor(Unit) of
|
||||
{ok, Factor} ->
|
||||
Timeout = erlang:round(Number * Factor),
|
||||
{ok, woody_deadline:from_timeout(clamp_max_request_deadline(Timeout))};
|
||||
{error, _Reason} ->
|
||||
{error, bad_deadline}
|
||||
end.
|
||||
|
||||
unit_factor(<<"ms">>) ->
|
||||
{ok, 1};
|
||||
unit_factor(<<"s">>) ->
|
||||
{ok, 1000};
|
||||
unit_factor(<<"m">>) ->
|
||||
{ok, 1000 * 60};
|
||||
unit_factor(_Other) ->
|
||||
{error, unknown_unit}.
|
||||
|
||||
% 1 min
|
||||
-define(MAX_REQUEST_DEADLINE_TIME, timer:minutes(1)).
|
||||
|
||||
clamp_max_request_deadline(Value) when is_integer(Value) ->
|
||||
MaxDeadline = genlib_app:env(akm, max_request_deadline, ?MAX_REQUEST_DEADLINE_TIME),
|
||||
case Value > MaxDeadline of
|
||||
true ->
|
||||
MaxDeadline;
|
||||
false ->
|
||||
Value
|
||||
end.
|
||||
|
||||
-spec get_unique_id() -> binary().
|
||||
get_unique_id() ->
|
||||
akm_id:generate_snowflake_id().
|
||||
|
||||
-spec get_random_id() -> binary().
|
||||
get_random_id() ->
|
||||
Random = crypto:strong_rand_bytes(16),
|
||||
genlib_format:format_int_base(binary:decode_unsigned(Random), 62).
|
||||
|
||||
%%
|
||||
|
||||
-ifdef(TEST).
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
-spec test() -> _.
|
||||
|
||||
-spec to_universal_time_test() -> _.
|
||||
|
||||
to_universal_time_test() ->
|
||||
?assertEqual(<<"2017-04-19T13:56:07Z">>, to_universal_time(<<"2017-04-19T13:56:07Z">>)),
|
||||
?assertEqual(<<"2017-04-19T13:56:07.530Z">>, to_universal_time(<<"2017-04-19T13:56:07.53Z">>)),
|
||||
?assertEqual(<<"2017-04-19T10:36:07.530Z">>, to_universal_time(<<"2017-04-19T13:56:07.53+03:20">>)),
|
||||
?assertEqual(<<"2017-04-19T17:16:07.530Z">>, to_universal_time(<<"2017-04-19T13:56:07.53-03:20">>)).
|
||||
|
||||
-spec get_path_test() -> _.
|
||||
get_path_test() ->
|
||||
?assertEqual(
|
||||
<<"/wallet/v0/deposits/11/events/42">>,
|
||||
get_path(
|
||||
<<"/wallet/v0/deposits/:depositID/events/:eventID">>,
|
||||
[<<"11">>, <<"42">>]
|
||||
)
|
||||
),
|
||||
?assertEqual(
|
||||
<<"/wallet/v0/deposits/11/events/42">>,
|
||||
get_path(
|
||||
"/wallet/v0/deposits/:depositID/events/:eventID",
|
||||
[<<"11">>, <<"42">>]
|
||||
)
|
||||
),
|
||||
?assertError(
|
||||
param_mismatch,
|
||||
get_path(
|
||||
"/wallet/v0/deposits/:depositID/events/:eventID",
|
||||
[<<"11">>, <<"42">>, <<"0">>]
|
||||
)
|
||||
).
|
||||
|
||||
-spec parse_deadline_test() -> _.
|
||||
parse_deadline_test() ->
|
||||
Deadline = woody_deadline:from_timeout(3000),
|
||||
BinDeadline = woody_deadline:to_binary(Deadline),
|
||||
{ok, {_, _}} = parse_deadline(BinDeadline),
|
||||
?assertEqual({error, bad_deadline}, parse_deadline(<<"2017-04-19T13:56:07.53Z">>)),
|
||||
{ok, {_, _}} = parse_deadline(<<"15s">>),
|
||||
{ok, {_, _}} = parse_deadline(<<"15m">>),
|
||||
{error, bad_deadline} = parse_deadline(<<"15h">>).
|
||||
|
||||
-spec parse_lifetime_test() -> _.
|
||||
parse_lifetime_test() ->
|
||||
{ok, 16 * 1000} = parse_lifetime(<<"16s">>),
|
||||
{ok, 32 * 60 * 1000} = parse_lifetime(<<"32m">>),
|
||||
{error, bad_lifetime} = parse_lifetime(undefined),
|
||||
{error, bad_lifetime} = parse_lifetime(<<"64h">>).
|
||||
|
||||
-endif.
|
96
apps/akm/src/akm_woody_client.erl
Normal file
96
apps/akm/src/akm_woody_client.erl
Normal file
@ -0,0 +1,96 @@
|
||||
-module(akm_woody_client).
|
||||
|
||||
-export([call_service/4]).
|
||||
-export([call_service/5]).
|
||||
|
||||
-export([get_service_modname/1]).
|
||||
-export([get_service_deadline/1]).
|
||||
|
||||
%%
|
||||
-define(APP, akm).
|
||||
|
||||
-type service_name() :: atom().
|
||||
|
||||
-export_type([service_name/0]).
|
||||
|
||||
-spec call_service(service_name(), woody:func(), woody:args(), woody_context:ctx()) -> woody:result().
|
||||
call_service(ServiceName, Function, Args, Context) ->
|
||||
call_service(ServiceName, Function, Args, Context, scoper_woody_event_handler).
|
||||
|
||||
-spec call_service(service_name(), woody:func(), woody:args(), woody_context:ctx(), woody:ev_handler()) ->
|
||||
woody:result().
|
||||
call_service(ServiceName, Function, Args, Context0, EventHandler) ->
|
||||
Deadline = get_service_deadline(ServiceName),
|
||||
Context1 = set_deadline(Deadline, Context0),
|
||||
Retry = get_service_retry(ServiceName, Function),
|
||||
call_service(ServiceName, Function, Args, Context1, EventHandler, Retry).
|
||||
|
||||
call_service(ServiceName, Function, Args, Context, EventHandler, Retry) ->
|
||||
Url = get_service_url(ServiceName),
|
||||
Service = get_service_modname(ServiceName),
|
||||
Request = {Service, Function, Args},
|
||||
try
|
||||
woody_client:call(
|
||||
Request,
|
||||
#{url => Url, event_handler => EventHandler},
|
||||
Context
|
||||
)
|
||||
catch
|
||||
error:{woody_error, {_Source, Class, _Details}} = Error when
|
||||
Class =:= resource_unavailable orelse Class =:= result_unknown
|
||||
->
|
||||
NextRetry = apply_retry_strategy(Retry, Error, Context),
|
||||
call_service(ServiceName, Function, Args, Context, EventHandler, NextRetry)
|
||||
end.
|
||||
|
||||
apply_retry_strategy(Retry, Error, Context) ->
|
||||
apply_retry_step(genlib_retry:next_step(Retry), woody_context:get_deadline(Context), Error).
|
||||
|
||||
apply_retry_step(finish, _, Error) ->
|
||||
erlang:error(Error);
|
||||
apply_retry_step({wait, Timeout, Retry}, undefined, _) ->
|
||||
ok = timer:sleep(Timeout),
|
||||
Retry;
|
||||
apply_retry_step({wait, Timeout, Retry}, Deadline0, Error) ->
|
||||
Deadline1 = woody_deadline:from_unixtime_ms(
|
||||
woody_deadline:to_unixtime_ms(Deadline0) - Timeout
|
||||
),
|
||||
case woody_deadline:is_reached(Deadline1) of
|
||||
true ->
|
||||
% no more time for retries
|
||||
erlang:error(Error);
|
||||
false ->
|
||||
ok = timer:sleep(Timeout),
|
||||
Retry
|
||||
end.
|
||||
|
||||
get_service_url(ServiceName) ->
|
||||
maps:get(ServiceName, genlib_app:env(?APP, service_urls)).
|
||||
|
||||
-spec get_service_modname(service_name()) -> woody:service().
|
||||
get_service_modname(token_storage) ->
|
||||
{tds_storage_thrift, 'TokenStorage'}.
|
||||
|
||||
-spec get_service_deadline(service_name()) -> undefined | woody_deadline:deadline().
|
||||
get_service_deadline(ServiceName) ->
|
||||
ServiceDeadlines = genlib_app:env(?APP, service_deadlines, #{}),
|
||||
case maps:get(ServiceName, ServiceDeadlines, undefined) of
|
||||
Timeout when is_integer(Timeout) andalso Timeout >= 0 ->
|
||||
woody_deadline:from_timeout(Timeout);
|
||||
undefined ->
|
||||
undefined
|
||||
end.
|
||||
|
||||
set_deadline(Deadline, Context) ->
|
||||
case woody_context:get_deadline(Context) of
|
||||
undefined ->
|
||||
woody_context:set_deadline(Deadline, Context);
|
||||
_AlreadySet ->
|
||||
Context
|
||||
end.
|
||||
|
||||
get_service_retry(ServiceName, Function) ->
|
||||
ServiceRetries = genlib_app:env(?APP, service_retries, #{}),
|
||||
FunctionReties = maps:get(ServiceName, ServiceRetries, #{}),
|
||||
DefaultRetry = maps:get('_', FunctionReties, finish),
|
||||
maps:get(Function, FunctionReties, DefaultRetry).
|
55
compose.yaml
Normal file
55
compose.yaml
Normal file
@ -0,0 +1,55 @@
|
||||
services:
|
||||
|
||||
testrunner:
|
||||
image: $DEV_IMAGE_TAG
|
||||
build:
|
||||
dockerfile: Dockerfile.dev
|
||||
context: .
|
||||
args:
|
||||
OTP_VERSION: $OTP_VERSION
|
||||
THRIFT_VERSION: $THRIFT_VERSION
|
||||
volumes:
|
||||
- .:$PWD
|
||||
hostname: akm
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
working_dir: $PWD
|
||||
|
||||
db:
|
||||
image: postgres
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: apikeymgmtv2
|
||||
ports:
|
||||
- 5432:5432
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U postgres"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
token-keeper:
|
||||
image: ghcr.io/valitydev/token-keeper:sha-42d4a27
|
||||
command: /opt/token-keeper/bin/token-keeper foreground
|
||||
depends_on:
|
||||
machinegun:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: "/opt/token-keeper/bin/token-keeper ping"
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
|
||||
machinegun:
|
||||
image: ghcr.io/valitydev/machinegun:sha-058bada
|
||||
command: /opt/machinegun/bin/machinegun foreground
|
||||
volumes:
|
||||
- ./test/machinegun/config.yaml:/opt/machinegun/etc/config.yaml
|
||||
- ./test/machinegun/cookie:/opt/machinegun/etc/cookie
|
||||
healthcheck:
|
||||
test: "/opt/machinegun/bin/machinegun ping"
|
||||
interval: 5s
|
||||
timeout: 1s
|
||||
retries: 20
|
142
config/sys.config
Normal file
142
config/sys.config
Normal file
@ -0,0 +1,142 @@
|
||||
[
|
||||
{kernel, [
|
||||
{log_level, info},
|
||||
{logger, [
|
||||
{handler, default, logger_std_h, #{
|
||||
level => debug,
|
||||
config => #{
|
||||
type => {file, "/var/log/api-key-mgmt-v2/console.json"},
|
||||
sync_mode_qlen => 20
|
||||
},
|
||||
formatter => {logger_logstash_formatter, #{}}
|
||||
}}
|
||||
]}
|
||||
]},
|
||||
|
||||
{scoper, [
|
||||
{storage, scoper_storage_logger}
|
||||
]},
|
||||
|
||||
{dmt_client, [
|
||||
% milliseconds
|
||||
{cache_update_interval, 5000},
|
||||
{max_cache_size, #{
|
||||
elements => 20,
|
||||
% 50Mb
|
||||
memory => 52428800
|
||||
}},
|
||||
{woody_event_handlers, [
|
||||
{scoper_woody_event_handler, #{
|
||||
event_handler_opts => #{
|
||||
formatter_opts => #{
|
||||
max_length => 1000
|
||||
}
|
||||
}
|
||||
}}
|
||||
]},
|
||||
{service_urls, #{
|
||||
'Repository' => <<"http://dominant:8022/v1/domain/repository">>,
|
||||
'RepositoryClient' => <<"http://dominant:8022/v1/domain/repository_client">>
|
||||
}}
|
||||
]},
|
||||
|
||||
{akm, [
|
||||
{ip, "::"},
|
||||
{port, 8080},
|
||||
%% To send ASCII text in 5xx replies
|
||||
%% {oops_bodies, #{
|
||||
%% 500 => "oops_bodies/500_body"
|
||||
%% }},
|
||||
{transport, thrift},
|
||||
{access_conf, #{
|
||||
jwt => #{
|
||||
keyset => #{
|
||||
akm => {pem_file, "var/keys/api-key-mgmt-v2/private.pem"}
|
||||
}
|
||||
}
|
||||
}},
|
||||
{health_check, #{
|
||||
disk => {erl_health, disk, ["/", 99]},
|
||||
memory => {erl_health, cg_memory, [99]},
|
||||
service => {erl_health, service, [<<"api-key-mgmt-v2">>]}
|
||||
}},
|
||||
% milliseconds
|
||||
{max_request_deadline, 60000},
|
||||
% seconds
|
||||
{file_storage_url_lifetime, 60},
|
||||
{lechiffre_opts, #{
|
||||
encryption_source => {json, {file, <<"path/to/pub.secret">>}},
|
||||
decryption_sources => [{json, {file, <<"path/to/priv.secret">>}}]
|
||||
}},
|
||||
|
||||
{epsql_connection, #{
|
||||
host => "db",
|
||||
port => 5432,
|
||||
username => "postgres",
|
||||
password => "postgres",
|
||||
database => "apikeymgmtv2"
|
||||
}}
|
||||
]},
|
||||
|
||||
{how_are_you, [
|
||||
{metrics_publishers, [
|
||||
% {hay_statsd_publisher, #{
|
||||
% key_prefix => <<"api-key-mgmt-v2.">>,
|
||||
% host => "localhost",
|
||||
% port => 8125
|
||||
% }}
|
||||
]}
|
||||
]},
|
||||
|
||||
{snowflake, [
|
||||
% {machine_id, 42}
|
||||
]},
|
||||
|
||||
{bender_client, [
|
||||
{services, #{
|
||||
'Bender' => <<"http://bender:8022/v1/bender">>,
|
||||
'Generator' => <<"http://bender:8022/v1/generator">>
|
||||
}},
|
||||
{deadline, 60000}
|
||||
%{retries, #{
|
||||
% 'GenerateID' => finish,
|
||||
% 'GetInternalID' => finish,
|
||||
% '_' => finish
|
||||
%}}
|
||||
]},
|
||||
|
||||
{bouncer_client, [
|
||||
{service_clients, #{
|
||||
bouncer => #{
|
||||
url => <<"http://bouncer:8022/">>,
|
||||
timeout => 500,
|
||||
retries => #{
|
||||
'Judge' => {linear, 1, 100},
|
||||
'_' => finish
|
||||
}
|
||||
},
|
||||
org_management => #{
|
||||
url => <<"http://orgmgmt:8022/">>,
|
||||
retries => #{
|
||||
'GetUserContext' => {linear, 2, 500},
|
||||
'_' => finish
|
||||
}
|
||||
}
|
||||
}}
|
||||
]},
|
||||
|
||||
{token_keeper_client, [
|
||||
{service_client, #{
|
||||
url => <<"http://token-keeper:8022/">>,
|
||||
timeout => 1000,
|
||||
retries => #{
|
||||
'GetByToken' => {linear, 3, 100},
|
||||
'_' => finish
|
||||
}
|
||||
}}
|
||||
]},
|
||||
|
||||
{prometheus, [
|
||||
{collectors, [default]}
|
||||
]}
|
||||
].
|
6
config/vm.args
Normal file
6
config/vm.args
Normal file
@ -0,0 +1,6 @@
|
||||
-sname ffsrv
|
||||
|
||||
-setcookie ffsrv
|
||||
|
||||
+K true
|
||||
+A 10
|
60
elvis.config
Normal file
60
elvis.config
Normal file
@ -0,0 +1,60 @@
|
||||
[
|
||||
{elvis, [
|
||||
{verbose, true},
|
||||
{config, [
|
||||
#{
|
||||
dirs => ["apps/**/src", "apps/**/include"],
|
||||
filter => "*.erl",
|
||||
ruleset => erl_files,
|
||||
rules => [
|
||||
%% Common settings
|
||||
{elvis_text_style, line_length, #{limit => 120}},
|
||||
{elvis_style, nesting_level, #{level => 3}},
|
||||
{elvis_style, function_naming_convention, #{regex => "^([a-z][a-z0-9]*_?)*$"}},
|
||||
{elvis_style, no_if_expression, disable},
|
||||
%% Project settings
|
||||
% Verbose authorization code triggers this otherwise
|
||||
{elvis_style, dont_repeat_yourself, #{min_complexity => 35}},
|
||||
{elvis_style, atom_naming_convention, #{}}
|
||||
]
|
||||
},
|
||||
#{
|
||||
dirs => ["apps/**/test"],
|
||||
filter => "*.erl",
|
||||
ruleset => erl_files,
|
||||
rules => [
|
||||
{elvis_text_style, line_length, #{limit => 120}},
|
||||
{elvis_style, nesting_level, #{level => 3}},
|
||||
{elvis_style, no_if_expression, disable},
|
||||
% We want to use `ct:pal/2` and friends in test code.
|
||||
{elvis_style, no_debug_call, disable},
|
||||
% Assert macros can trigger use of ignored binding, yet we want them for better
|
||||
% readability.
|
||||
{elvis_style, used_ignored_variable, disable},
|
||||
% Tests are usually more comprehensible when a bit more verbose.
|
||||
{elvis_style, dont_repeat_yourself, #{min_complexity => 50}},
|
||||
{elvis_style, god_modules, disable}
|
||||
]
|
||||
},
|
||||
#{
|
||||
dirs => ["."],
|
||||
filter => "Makefile",
|
||||
ruleset => makefiles
|
||||
},
|
||||
#{
|
||||
dirs => ["."],
|
||||
filter => "elvis.config",
|
||||
ruleset => elvis_config
|
||||
},
|
||||
#{
|
||||
dirs => ["apps/*/src"],
|
||||
filter => "*.app.src",
|
||||
rules => [
|
||||
{elvis_text_style, line_length, #{limit => 120}},
|
||||
{elvis_text_style, no_tabs},
|
||||
{elvis_text_style, no_trailing_whitespace}
|
||||
]
|
||||
}
|
||||
]}
|
||||
]}
|
||||
].
|
19
migrations/1686524106-create_api_keys.sql
Normal file
19
migrations/1686524106-create_api_keys.sql
Normal file
@ -0,0 +1,19 @@
|
||||
-- migrations/1686524106-create_api_keys.sql
|
||||
-- :up
|
||||
-- Up migration
|
||||
CREATE TYPE apikeys_status AS ENUM ('active', 'revoked');
|
||||
|
||||
CREATE TABLE apikeys (
|
||||
id TEXT,
|
||||
name TEXT,
|
||||
party_id TEXT,
|
||||
status apikeys_status,
|
||||
revoke_token TEXT,
|
||||
metadata TEXT,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
-- :down
|
||||
-- Down migration
|
||||
DROP TABLE apikeys;
|
||||
|
||||
DROP TYPE apikeys_status;
|
1
psql-migration
Submodule
1
psql-migration
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit c84b06fc7e1603783eb81eaad214552151ed9066
|
156
rebar.config
Normal file
156
rebar.config
Normal file
@ -0,0 +1,156 @@
|
||||
%% Common project erlang options.
|
||||
{erl_opts, [
|
||||
% mandatory
|
||||
debug_info,
|
||||
warnings_as_errors,
|
||||
warn_export_all,
|
||||
warn_missing_spec,
|
||||
warn_untyped_record,
|
||||
warn_export_vars,
|
||||
|
||||
% by default
|
||||
warn_unused_record,
|
||||
warn_bif_clash,
|
||||
warn_obsolete_guard,
|
||||
warn_unused_vars,
|
||||
warn_shadow_vars,
|
||||
warn_unused_import,
|
||||
warn_unused_function,
|
||||
warn_deprecated_function
|
||||
|
||||
% at will
|
||||
% bin_opt_info
|
||||
% no_auto_import
|
||||
% warn_missing_spec_all
|
||||
]}.
|
||||
|
||||
% Common project dependencies.
|
||||
{deps, [
|
||||
{genlib, {git, "https://github.com/valitydev/genlib.git", {branch, "master"}}},
|
||||
{cowboy_draining_server, {git, "https://github.com/valitydev/cowboy_draining_server.git", {branch, "master"}}},
|
||||
{uuid, {git, "https://github.com/okeuday/uuid.git", {branch, "master"}}},
|
||||
{scoper, {git, "https://github.com/valitydev/scoper.git", {branch, "master"}}},
|
||||
{erl_health, {git, "https://github.com/valitydev/erlang-health.git", {branch, "master"}}},
|
||||
{cowboy_cors, {git, "https://github.com/valitydev/cowboy_cors.git", {branch, master}}},
|
||||
{cowboy_access_log, {git, "https://github.com/valitydev/cowboy_access_log.git", {branch, "master"}}},
|
||||
{woody_user_identity, {git, "https://github.com/valitydev/woody_erlang_user_identity.git", {branch, "master"}}},
|
||||
{bouncer_proto, {git, "https://github.com/valitydev/bouncer-proto.git", {branch, "master"}}},
|
||||
{bouncer_client, {git, "https://github.com/valitydev/bouncer-client-erlang", {branch, "master"}}},
|
||||
{epgsql_pool, {git, "https://github.com/wgnet/epgsql_pool", {branch, "master"}}},
|
||||
{token_keeper_client, {git, "https://github.com/valitydev/token-keeper-client", {branch, "master"}}},
|
||||
|
||||
%% Libraries generated with swagger-codegen-erlang from valitydev/swag-api-keys
|
||||
{swag_server_apikeys,
|
||||
{git, "https://github.com/valitydev/swag-api-keys-v2.git", {branch, "release/erlang/server/master"}}},
|
||||
{swag_client_apikeys,
|
||||
{git, "https://github.com/valitydev/swag-api-keys-v2.git", {branch, "release/erlang/client/master"}}},
|
||||
|
||||
%% NOTE
|
||||
%% Pinning to version "1.11.2" from hex here causes constant upgrading and recompilation of the entire project
|
||||
{jose, {git, "https://github.com/potatosalad/erlang-jose.git", {tag, "1.11.2"}}}
|
||||
]}.
|
||||
|
||||
%% XRef checks
|
||||
{xref_checks, [
|
||||
undefined_function_calls,
|
||||
undefined_functions,
|
||||
deprecated_functions_calls,
|
||||
deprecated_functions
|
||||
]}.
|
||||
|
||||
%% Dialyzer static analyzing
|
||||
{dialyzer, [
|
||||
{warnings, [
|
||||
% mandatory
|
||||
unmatched_returns,
|
||||
error_handling,
|
||||
race_conditions,
|
||||
unknown
|
||||
]},
|
||||
{plt_apps, all_deps}
|
||||
]}.
|
||||
|
||||
{profiles, [
|
||||
{prod, [
|
||||
{deps, [
|
||||
% NOTE
|
||||
% Because of a dependency conflict, prometheus libs are only included in production build for now
|
||||
% https://github.com/project-fifo/rebar3_lint/issues/42
|
||||
% https://github.com/valitydev/hellgate/pull/2/commits/884724c1799703cee4d1033850fe32c17f986d9e
|
||||
{prometheus, "4.8.1"},
|
||||
{prometheus_cowboy, "0.1.8"},
|
||||
% Introspect a node running in production
|
||||
{recon, "2.5.2"},
|
||||
{logger_logstash_formatter,
|
||||
{git, "https://github.com/valitydev/logger_logstash_formatter.git", {ref, "2c7b716"}}},
|
||||
{iosetopts, {git, "https://github.com/valitydev/iosetopts.git", {ref, "edb445c"}}}
|
||||
]},
|
||||
{relx, [
|
||||
{release, {'api-key-mgmt-v2', "0.1.0"}, [
|
||||
iosetopts,
|
||||
% debugger
|
||||
{runtime_tools, load},
|
||||
% profiler
|
||||
{tools, load},
|
||||
{recon, load},
|
||||
{logger_logstash_formatter, load},
|
||||
prometheus,
|
||||
prometheus_cowboy,
|
||||
sasl,
|
||||
akm
|
||||
]},
|
||||
{sys_config, "./config/sys.config"},
|
||||
{vm_args, "./config/vm.args"},
|
||||
{mode, minimal},
|
||||
{extended_start_script, true},
|
||||
%% api-key-mgmt-v2
|
||||
{overlay, [
|
||||
{mkdir, "var/keys/api-key-mgmt-v2"},
|
||||
{copy,
|
||||
"apps/api-key-mgmt-v2/var/keys/api-key-mgmt-v2/private.pem",
|
||||
"var/keys/api-key-mgmt-v2/private.pem"
|
||||
}
|
||||
]}
|
||||
]}
|
||||
]},
|
||||
|
||||
{test, [
|
||||
{deps, [
|
||||
{meck, "0.9.2"}
|
||||
]},
|
||||
{cover_enabled, true},
|
||||
{cover_excl_apps, [
|
||||
swag_client_payres,
|
||||
swag_client_wallet,
|
||||
swag_server_apikeys
|
||||
]},
|
||||
{dialyzer, [{plt_extra_apps, [eunit, common_test, meck, swag_client_wallet]}]}
|
||||
]}
|
||||
]}.
|
||||
|
||||
{project_plugins, [
|
||||
{rebar3_lint, "1.0.1"},
|
||||
{erlfmt, "1.0.0"},
|
||||
{covertool, "2.0.4"}
|
||||
]}.
|
||||
|
||||
%% Linter config.
|
||||
{elvis_output_format, colors}.
|
||||
|
||||
{erlfmt, [
|
||||
{print_width, 120},
|
||||
{files, [
|
||||
"apps/api-key-mgmt*/{src,include,test}/*.{hrl,erl,app.src}",
|
||||
"rebar.config",
|
||||
"elvis.config",
|
||||
"config/sys.config",
|
||||
"test/*/sys.config"
|
||||
]}
|
||||
]}.
|
||||
|
||||
{covertool, [
|
||||
{coverdata_files, [
|
||||
"eunit.coverdata",
|
||||
"ct.coverdata"
|
||||
]}
|
||||
]}.
|
200
rebar.lock
Normal file
200
rebar.lock
Normal file
@ -0,0 +1,200 @@
|
||||
{"1.2.0",
|
||||
[{<<"bender_client">>,
|
||||
{git,"https://github.com/valitydev/bender-client-erlang.git",
|
||||
{ref,"4e15070a194ed2f3f033891eb2da935982a06c30"}},
|
||||
0},
|
||||
{<<"bender_proto">>,
|
||||
{git,"https://github.com/valitydev/bender-proto.git",
|
||||
{ref,"71c56878c1cf154cdfab9bbc563ddba25abe7259"}},
|
||||
0},
|
||||
{<<"bouncer_client">>,
|
||||
{git,"https://github.com/valitydev/bouncer-client-erlang",
|
||||
{ref,"79d9d0144ed66537ec25302aeba8f133bddb05d7"}},
|
||||
0},
|
||||
{<<"bouncer_proto">>,
|
||||
{git,"https://github.com/valitydev/bouncer-proto.git",
|
||||
{ref,"b23c905db51915737fdab80c2a3af4c546b32799"}},
|
||||
0},
|
||||
{<<"cache">>,{pkg,<<"cache">>,<<"2.3.3">>},1},
|
||||
{<<"certifi">>,{pkg,<<"certifi">>,<<"2.6.1">>},2},
|
||||
{<<"cg_mon">>,
|
||||
{git,"https://github.com/rbkmoney/cg_mon.git",
|
||||
{ref,"5a87a37694e42b6592d3b4164ae54e0e87e24e18"}},
|
||||
1},
|
||||
{<<"cowboy">>,{pkg,<<"cowboy">>,<<"2.9.0">>},1},
|
||||
{<<"cowboy_access_log">>,
|
||||
{git,"https://github.com/valitydev/cowboy_access_log.git",
|
||||
{ref,"04da359e022cf05c5c93812504d5791d6bc97453"}},
|
||||
0},
|
||||
{<<"cowboy_cors">>,
|
||||
{git,"https://github.com/valitydev/cowboy_cors.git",
|
||||
{ref,"5a3b084fb8c5a4ff58e3c915a822d709d6023c3b"}},
|
||||
0},
|
||||
{<<"cowboy_draining_server">>,
|
||||
{git,"https://github.com/valitydev/cowboy_draining_server.git",
|
||||
{ref,"186cf4d0722d4ad79afe73d371df6b1371e51905"}},
|
||||
0},
|
||||
{<<"cowlib">>,{pkg,<<"cowlib">>,<<"2.11.0">>},2},
|
||||
{<<"damsel">>,
|
||||
{git,"https://github.com/valitydev/damsel.git",
|
||||
{ref,"bfedcb9dbb0bfdbd7a06a86417b49be6e807b98d"}},
|
||||
0},
|
||||
{<<"dmt_client">>,
|
||||
{git,"https://github.com/valitydev/dmt-client.git",
|
||||
{ref,"ce6678af1499230fe13f8b34258aabe8b92ac722"}},
|
||||
0},
|
||||
{<<"dmt_core">>,
|
||||
{git,"https://github.com/valitydev/dmt-core.git",
|
||||
{ref,"75841332fe0b40a77da0c12ea8d5dbb994da8e82"}},
|
||||
1},
|
||||
{<<"email_validator">>,{pkg,<<"email_validator">>,<<"1.1.0">>},1},
|
||||
{<<"epgsql">>,{pkg,<<"epgsql">>,<<"4.5.0">>},1},
|
||||
{<<"epgsql_pool">>,
|
||||
{git,"https://github.com/wgnet/epgsql_pool",
|
||||
{ref,"f5e492f73752950aab932a1662536e22fc00c717"}},
|
||||
0},
|
||||
{<<"erl_health">>,
|
||||
{git,"https://github.com/valitydev/erlang-health.git",
|
||||
{ref,"5958e2f35cd4d09f40685762b82b82f89b4d9333"}},
|
||||
0},
|
||||
{<<"file_storage_proto">>,
|
||||
{git,"https://github.com/valitydev/file-storage-proto.git",
|
||||
{ref,"1dbc0067db68780660b4f691ea6ca6d5f68d56aa"}},
|
||||
0},
|
||||
{<<"fistful_proto">>,
|
||||
{git,"https://github.com/valitydev/fistful-proto.git",
|
||||
{ref,"f19e383fa596e7b672616858b56d8ff6a1022994"}},
|
||||
0},
|
||||
{<<"fistful_reporter_proto">>,
|
||||
{git,"https://github.com/valitydev/fistful-reporter-proto.git",
|
||||
{ref,"69565e48f036ded9b5ecc337b4f631d0e2fa6f8d"}},
|
||||
0},
|
||||
{<<"genlib">>,
|
||||
{git,"https://github.com/valitydev/genlib.git",
|
||||
{ref,"b08ef4d61e0dde98995ec3d2f69a4447255e79ef"}},
|
||||
0},
|
||||
{<<"gproc">>,{pkg,<<"gproc">>,<<"0.9.0">>},1},
|
||||
{<<"gun">>,
|
||||
{git,"https://github.com/ninenines/gun.git",
|
||||
{ref,"e7dd9f227e46979d8073e71c683395a809b78cb4"}},
|
||||
1},
|
||||
{<<"hackney">>,{pkg,<<"hackney">>,<<"1.17.4">>},1},
|
||||
{<<"herd">>,
|
||||
{git,"https://github.com/wgnet/herd.git",
|
||||
{ref,"934847589dcf5a6d2b02a1f546ffe91c04066f17"}},
|
||||
1},
|
||||
{<<"identdocstore_proto">>,
|
||||
{git,"https://github.com/valitydev/identdocstore-proto.git",
|
||||
{ref,"0ab676da2bb23eb04c42e02325c40c413d74856e"}},
|
||||
0},
|
||||
{<<"idna">>,{pkg,<<"idna">>,<<"6.1.1">>},2},
|
||||
{<<"jesse">>,
|
||||
{git,"https://github.com/valitydev/jesse.git",
|
||||
{ref,"f4ff58e79ebe65650f9c445e730ad4c8d7f463a0"}},
|
||||
1},
|
||||
{<<"jose">>,
|
||||
{git,"https://github.com/potatosalad/erlang-jose.git",
|
||||
{ref,"991649695aaccd92c8effb1c1e88e6159fe8e9a6"}},
|
||||
0},
|
||||
{<<"jsx">>,{pkg,<<"jsx">>,<<"3.1.0">>},1},
|
||||
{<<"metrics">>,{pkg,<<"metrics">>,<<"1.0.1">>},2},
|
||||
{<<"mimerl">>,{pkg,<<"mimerl">>,<<"1.2.0">>},2},
|
||||
{<<"msgpack_proto">>,
|
||||
{git,"https://github.com/valitydev/msgpack-proto.git",
|
||||
{ref,"7e447496aa5df4a5f1ace7ef2e3c31248b2a3ed0"}},
|
||||
1},
|
||||
{<<"org_management_proto">>,
|
||||
{git,"https://github.com/valitydev/org-management-proto",
|
||||
{ref,"03a269df4805fa604e8fd2d04241619a739e2ae3"}},
|
||||
1},
|
||||
{<<"parse_trans">>,{pkg,<<"parse_trans">>,<<"3.4.1">>},1},
|
||||
{<<"pooler">>,{pkg,<<"pooler">>,<<"1.5.3">>},1},
|
||||
{<<"quickrand">>,
|
||||
{git,"https://github.com/okeuday/quickrand.git",
|
||||
{ref,"7fe89e9cfcc1378b7164e9dac4e7f02119110b68"}},
|
||||
1},
|
||||
{<<"ranch">>,{pkg,<<"ranch">>,<<"1.8.0">>},2},
|
||||
{<<"scoper">>,
|
||||
{git,"https://github.com/valitydev/scoper.git",
|
||||
{ref,"87110f5bd72c0e39ba9b7d6eca88fea91b8cd357"}},
|
||||
0},
|
||||
{<<"snowflake">>,
|
||||
{git,"https://github.com/valitydev/snowflake.git",
|
||||
{ref,"de159486ef40cec67074afe71882bdc7f7deab72"}},
|
||||
1},
|
||||
{<<"ssl_verify_fun">>,{pkg,<<"ssl_verify_fun">>,<<"1.1.6">>},2},
|
||||
{<<"swag_client_apikeys">>,
|
||||
{git,"https://github.com/valitydev/swag-api-keys-v2.git",
|
||||
{ref,"9e981fbc7b59468ad0c710cf7743b0e42adccd00"}},
|
||||
0},
|
||||
{<<"swag_server_apikeys">>,
|
||||
{git,"https://github.com/valitydev/swag-api-keys-v2.git",
|
||||
{ref,"06febd46dceee06633f34af82dfe798b34fecf09"}},
|
||||
0},
|
||||
{<<"tds_proto">>,
|
||||
{git,"https://github.com/valitydev/tds-proto.git",
|
||||
{ref,"48bca4905215ac492ab4764441c04db7cf1f3394"}},
|
||||
0},
|
||||
{<<"thrift">>,
|
||||
{git,"https://github.com/valitydev/thrift-erlang.git",
|
||||
{ref,"3f3e11246d90aefa8f58b35e4f2eab14c0c28bd2"}},
|
||||
0},
|
||||
{<<"token_keeper_client">>,
|
||||
{git,"https://github.com/valitydev/token-keeper-client",
|
||||
{ref,"de4f666c93a71c8fcab2cfb3b77f2e91225b650c"}},
|
||||
0},
|
||||
{<<"token_keeper_proto">>,
|
||||
{git,"https://github.com/valitydev/token-keeper-proto.git",
|
||||
{ref,"094b4f05a4e220df79911c25093feffea1cb868b"}},
|
||||
1},
|
||||
{<<"unicode_util_compat">>,{pkg,<<"unicode_util_compat">>,<<"0.7.0">>},2},
|
||||
{<<"uuid">>,
|
||||
{git,"https://github.com/okeuday/uuid.git",
|
||||
{ref,"965c76b7343530cf940a808f497eef37d0a332e6"}},
|
||||
0},
|
||||
{<<"woody">>,
|
||||
{git,"https://github.com/valitydev/woody_erlang.git",
|
||||
{ref,"68b191ed3655dbf40d0ba687f17f75ddd74e82da"}},
|
||||
0},
|
||||
{<<"woody_user_identity">>,
|
||||
{git,"https://github.com/valitydev/woody_erlang_user_identity.git",
|
||||
{ref,"a480762fea8d7c08f105fb39ca809482b6cb042e"}},
|
||||
0}]}.
|
||||
[
|
||||
{pkg_hash,[
|
||||
{<<"cache">>, <<"B23A5FE7095445A88412A6E614C933377E0137B44FFED77C9B3FEF1A731A20B2">>},
|
||||
{<<"certifi">>, <<"DBAB8E5E155A0763EEA978C913CA280A6B544BFA115633FA20249C3D396D9493">>},
|
||||
{<<"cowboy">>, <<"865DD8B6607E14CF03282E10E934023A1BD8BE6F6BACF921A7E2A96D800CD452">>},
|
||||
{<<"cowlib">>, <<"0B9FF9C346629256C42EBE1EEB769A83C6CB771A6EE5960BD110AB0B9B872063">>},
|
||||
{<<"email_validator">>, <<"7E09A862E9AA99AE2CA6FD2A718D2B94360E32940A1339B53DFEE6B774BCDB03">>},
|
||||
{<<"epgsql">>, <<"CA863EE3A771E7696AE58EC924A29DF8435CDAFFA64DBA70C02DD2571AD2122D">>},
|
||||
{<<"gproc">>, <<"853CCB7805E9ADA25D227A157BA966F7B34508F386A3E7E21992B1B484230699">>},
|
||||
{<<"hackney">>, <<"99DA4674592504D3FB0CFEF0DB84C3BA02B4508BAE2DFF8C0108BAA0D6E0977C">>},
|
||||
{<<"idna">>, <<"8A63070E9F7D0C62EB9D9FCB360A7DE382448200FBBD1B106CC96D3D8099DF8D">>},
|
||||
{<<"jsx">>, <<"D12516BAA0BB23A59BB35DCCAF02A1BD08243FCBB9EFE24F2D9D056CCFF71268">>},
|
||||
{<<"metrics">>, <<"25F094DEA2CDA98213CECC3AEFF09E940299D950904393B2A29D191C346A8486">>},
|
||||
{<<"mimerl">>, <<"67E2D3F571088D5CFD3E550C383094B47159F3EEE8FFA08E64106CDF5E981BE3">>},
|
||||
{<<"parse_trans">>, <<"6E6AA8167CB44CC8F39441D05193BE6E6F4E7C2946CB2759F015F8C56B76E5FF">>},
|
||||
{<<"pooler">>, <<"898CD1FA301FC42D4A8ED598CE139B71CA85B54C16AB161152B5CC5FBDCFA1A8">>},
|
||||
{<<"ranch">>, <<"8C7A100A139FD57F17327B6413E4167AC559FBC04CA7448E9BE9057311597A1D">>},
|
||||
{<<"ssl_verify_fun">>, <<"CF344F5692C82D2CD7554F5EC8FD961548D4FD09E7D22F5B62482E5AEAEBD4B0">>},
|
||||
{<<"unicode_util_compat">>, <<"BC84380C9AB48177092F43AC89E4DFA2C6D62B40B8BD132B1059ECC7232F9A78">>}]},
|
||||
{pkg_hash_ext,[
|
||||
{<<"cache">>, <<"44516CE6FA03594D3A2AF025DD3A87BFE711000EB730219E1DDEFC816E0AA2F4">>},
|
||||
{<<"certifi">>, <<"524C97B4991B3849DD5C17A631223896272C6B0AF446778BA4675A1DFF53BB7E">>},
|
||||
{<<"cowboy">>, <<"2C729F934B4E1AA149AFF882F57C6372C15399A20D54F65C8D67BEF583021BDE">>},
|
||||
{<<"cowlib">>, <<"2B3E9DA0B21C4565751A6D4901C20D1B4CC25CBB7FD50D91D2AB6DD287BC86A9">>},
|
||||
{<<"email_validator">>, <<"2B1E6DF7BB14155C8D7D131F1C95CF4676200BC056EEBA82123396833FF94DA2">>},
|
||||
{<<"epgsql">>, <<"0A02D338CC1426C5873B412FED9D694F7B5143933C5F85F244655A5E77B23078">>},
|
||||
{<<"gproc">>, <<"587E8AF698CCD3504CF4BA8D90F893EDE2B0F58CABB8A916E2BF9321DE3CF10B">>},
|
||||
{<<"hackney">>, <<"DE16FF4996556C8548D512F4DBE22DD58A587BF3332E7FD362430A7EF3986B16">>},
|
||||
{<<"idna">>, <<"92376EB7894412ED19AC475E4A86F7B413C1B9FBB5BD16DCCD57934157944CEA">>},
|
||||
{<<"jsx">>, <<"0C5CC8FDC11B53CC25CF65AC6705AD39E54ECC56D1C22E4ADB8F5A53FB9427F3">>},
|
||||
{<<"metrics">>, <<"69B09ADDDC4F74A40716AE54D140F93BEB0FB8978D8636EADED0C31B6F099F16">>},
|
||||
{<<"mimerl">>, <<"F278585650AA581986264638EBF698F8BB19DF297F66AD91B18910DFC6E19323">>},
|
||||
{<<"parse_trans">>, <<"620A406CE75DADA827B82E453C19CF06776BE266F5A67CFF34E1EF2CBB60E49A">>},
|
||||
{<<"pooler">>, <<"058D85C5081289B90E97E4DDDBC3BB5A3B4A19A728AB3BC88C689EFCC36A07C7">>},
|
||||
{<<"ranch">>, <<"49FBCFD3682FAB1F5D109351B61257676DA1A2FDBE295904176D5E521A2DDFE5">>},
|
||||
{<<"ssl_verify_fun">>, <<"BDB0D2471F453C88FF3908E7686F86F9BE327D065CC1EC16FA4540197EA04680">>},
|
||||
{<<"unicode_util_compat">>, <<"25EEE6D67DF61960CF6A794239566599B09E17E668D3700247BC498638152521">>}]}
|
||||
].
|
Loading…
Reference in New Issue
Block a user