Implement a thrift service around opa policy solver (#1)

Co-authored-by: Andrey Fadeev <me@ciiol.net>
This commit is contained in:
Andrew Mayorov 2020-10-14 12:09:26 +03:00 committed by GitHub
parent c928a722c4
commit 9004e3fe50
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 3746 additions and 0 deletions

15
.gitignore vendored Normal file
View File

@ -0,0 +1,15 @@
log
/_build/
*~
erl_crash.dump
.tags*
*.sublime-workspace
.DS_Store
*~
\#*
.\#*
.tags*
Dockerfile
docker-compose.yml
rebar3.crashdump
/_checkouts/

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "build-utils"]
path = build_utils
url = git@github.com:rbkmoney/build_utils

24
Dockerfile.sh Executable file
View File

@ -0,0 +1,24 @@
#!/bin/bash
cat <<EOF
FROM $BASE_IMAGE
LABEL maintainer="Andrey Mayorov <a.mayorov@rbkmoney.com>"
RUN mkdir -p /var/log/bouncer
COPY ./_build/prod/rel/bouncer /opt/bouncer
WORKDIR /opt/bouncer
CMD /opt/bouncer/bin/bouncer foreground
EXPOSE 8022
LABEL com.rbkmoney.$SERVICE_NAME.parent=$BASE_IMAGE_NAME \
com.rbkmoney.$SERVICE_NAME.parent_tag=$BASE_IMAGE_TAG \
com.rbkmoney.$SERVICE_NAME.build_img=build \
com.rbkmoney.$SERVICE_NAME.build_img_tag=$BUILD_IMAGE_TAG \
com.rbkmoney.$SERVICE_NAME.commit_id=$(git rev-parse HEAD) \
com.rbkmoney.$SERVICE_NAME.commit_number=$(git rev-list --count HEAD) \
com.rbkmoney.$SERVICE_NAME.branch=$( \
if [ "HEAD" != $(git rev-parse --abbrev-ref HEAD) ]; then \
echo $(git rev-parse --abbrev-ref HEAD); \
elif [ -n "$BRANCH_NAME" ]; then \
echo $BRANCH_NAME; \
else \
echo $(git name-rev --name-only HEAD); \
fi)
EOF

23
Jenkinsfile vendored Normal file
View File

@ -0,0 +1,23 @@
#!groovy
// -*- mode: groovy -*-
def finalHook = {
runStage('store CT logs') {
archive '_build/test/logs/'
}
}
build('bouncer', 'docker-host', finalHook) {
checkoutRepo()
loadBuildUtils()
def pipeErlangService
runStage('load pipeline') {
env.JENKINS_LIB = "build_utils/jenkins_lib"
env.SH_TOOLS = "build_utils/sh"
pipeErlangService = load("${env.JENKINS_LIB}/pipeErlangService.groovy")
}
pipeErlangService.runPipe(true, true)
}

67
Makefile Normal file
View File

@ -0,0 +1,67 @@
REBAR := $(shell which rebar3 2>/dev/null || which ./rebar3)
SUBMODULES = build_utils
SUBTARGETS = $(patsubst %,%/.git,$(SUBMODULES))
UTILS_PATH := build_utils
TEMPLATES_PATH := .
# Name of the service
SERVICE_NAME := bouncer
# Service image default tag
SERVICE_IMAGE_TAG ?= $(shell git rev-parse HEAD)
# The tag for service image to be pushed with
SERVICE_IMAGE_PUSH_TAG ?= $(SERVICE_IMAGE_TAG)
# Base image for the service
BASE_IMAGE_NAME := service-erlang
BASE_IMAGE_TAG := b5bbfa829d0e504e2755f529282a20876c86ec61
BUILD_IMAGE_TAG := 917afcdd0c0a07bf4155d597bbba72e962e1a34a
CALL_ANYWHERE := \
submodules \
all compile xref lint dialyze cover release clean distclean
CALL_W_CONTAINER := $(CALL_ANYWHERE) test
.PHONY: $(CALL_W_CONTAINER) all
all: compile
-include $(UTILS_PATH)/make_lib/utils_container.mk
-include $(UTILS_PATH)/make_lib/utils_image.mk
$(SUBTARGETS): %/.git: %
git submodule update --init $<
touch $@
submodules: $(SUBTARGETS)
compile: submodules
$(REBAR) compile
xref:
$(REBAR) xref
lint:
$(REBAR) lint
dialyze:
$(REBAR) dialyzer
release: submodules
$(REBAR) as prod release
clean:
$(REBAR) cover -r
$(REBAR) clean
distclean:
$(REBAR) clean
rm -rf _build
cover:
$(REBAR) cover
# CALL_W_CONTAINER
test: submodules
$(REBAR) do eunit, ct

1
build_utils Submodule

@ -0,0 +1 @@
Subproject commit 2c4c2289ad7919ef953603f70d5cc967419ec2dd

92
config/sys.config Normal file
View File

@ -0,0 +1,92 @@
[
{bouncer, [
{ip, "::"},
{port, 8022},
{services, #{
arbiter => #{
path => <<"/v1/arbiter">>
}
}},
{protocol_opts, #{
% How much to wait for another request before closing a keepalive connection? (ms)
request_timeout => 5000
}},
{transport_opts, #{
% Maximum number of simultaneous connections. (default = 1024)
max_connections => 8000,
% Size of the acceptor pool. (default = 10)
num_acceptors => 100
}},
% How much to wait for outstanding requests completion when asked to shut down? (ms)
{shutdown_timeout, 1000},
{opa, #{
endpoint => {"opa", 8181},
% Which transport to use? (tcp | tls)
transport => tcp,
% Which `gen_tcp:connect_option()`s to use? Relevant only for `tcp` transport.
tcp_opts => [inet6],
% Which `ssl:tls_client_option()`s to use? Relevant only for `tls` transport.
tls_opts => [{verify, verify_peer}],
% Total timeout for estabilishing a connection. (ms)
connect_timeout => 1000,
% Timeout for domain lookup query. (ms)
domain_lookup_timeout => 1000,
% Timeout for making request and receiving response. (ms)
request_timeout => 1000
}},
{woody_event_handlers, [
hay_woody_event_handler,
{scoper_woody_event_handler, #{
event_handler_opts => #{
formatter_opts => #{
max_length => 1000,
max_printable_string_length => 80
}
}
}}
]},
{health_check, #{
% disk => {erl_health, disk , ["/", 99]},
% memory => {erl_health, cg_memory, [99]},
% service => {erl_health, service , [<<"bouncer">>]}
}}
]},
{kernel, [
{logger_level, debug},
{logger, [
{handler, default, logger_std_h, #{
config => #{
type => {file, "/var/log/bouncer/log.json"}
},
formatter => {logger_logstash_formatter, #{}}
}}
]}
]},
% {how_are_you, [
% {metrics_publishers, [
% {hay_statsd_publisher, #{
% key_prefix => <<"bender.">>,
% host => "localhost",
% port => 8125
% }}
% ]}
% ]},
{scoper, [
{storage, scoper_storage_logger}
]},
{snowflake, [
{max_backward_clock_moving, 1000} % 1 second
% {machine_id, hostname_hash}
]}
].

41
docker-compose.sh Executable file
View File

@ -0,0 +1,41 @@
#!/bin/bash
OPA_VSN="0.23.2-debug"
OPA_IMAGE="openpolicyagent/opa:${OPA_VSN}"
POLICY_DIR="/var/opa/policies"
cat <<EOF
version: '2.1'
services:
${SERVICE_NAME}:
image: ${BUILD_IMAGE}
volumes:
- .:$PWD
- $HOME/.cache:/home/$UNAME/.cache
working_dir: $PWD
command: /sbin/init
depends_on:
opa:
condition: service_healthy
opa:
image: ${OPA_IMAGE}
entrypoint: ["sh", "-c"]
command: ["cd ${POLICY_DIR} &&
/opa build . &&
/opa run
--server
--addr :8181
--set decision_logs.console=true
--bundle bundle.tar.gz
"]
volumes:
- ./test/policies:${POLICY_DIR}:rw
healthcheck:
test: ["CMD", "sh", "-c", "wget -q --tries=1 --spider http://localhost:8181/"]
interval: 5s
timeout: 1s
retries: 5
EOF

204
rebar.config Normal file
View File

@ -0,0 +1,204 @@
%% Common project erlang options.
{erl_opts, [
% mandatory
debug_info,
warnings_as_errors,
warn_export_all,
warn_missing_spec,
warn_untyped_record,
warn_export_vars,
% by default
warn_unused_record,
warn_bif_clash,
warn_obsolete_guard,
warn_unused_vars,
warn_shadow_vars,
warn_unused_import,
warn_unused_function,
warn_deprecated_function
% at will
% bin_opt_info
% no_auto_import
% warn_missing_spec_all
]}.
%% Common project dependencies.
{deps, [
{cowboy, "2.8.0"},
{jsx, "3.0.0"},
{jesse, "1.5.5"},
{gun,
{git, "https://github.com/ninenines/gun.git",
{branch, "master"}
}},
{genlib,
{git, "https://github.com/rbkmoney/genlib.git",
{branch, "master"}
}
},
{thrift,
{git, "https://github.com/rbkmoney/thrift_erlang.git",
{branch, "master"}
}
},
{woody,
{git, "https://github.com/rbkmoney/woody_erlang.git",
{branch, "master"}
}
},
{woody_user_identity,
{git, "https://github.com/rbkmoney/woody_erlang_user_identity.git",
{branch, "master"}
}
},
{bouncer_proto,
{git, "git@github.com:rbkmoney/bouncer-proto.git",
{branch, "master"}
}
},
{scoper,
{git, "https://github.com/rbkmoney/scoper.git",
{branch, "master"}
}
},
{erl_health,
{git, "https://github.com/rbkmoney/erlang-health.git",
{branch, "master"}
}
},
% Production-only deps.
% Defined here for the sake of rebar-locking.
{recon, "2.5.1"},
{logger_logstash_formatter,
{git, "https://github.com/rbkmoney/logger_logstash_formatter.git",
{branch, "master"}
}
},
{how_are_you,
{git, "https://github.com/rbkmoney/how_are_you.git",
{branch, "master"}
}
}
]}.
%% Helpful plugins.
{plugins, [
rebar3_lint
]}.
%% Linter config.
{elvis, [
#{
dirs => ["src"],
filter => "*.erl",
ruleset => erl_files,
rules => [
{elvis_style, invalid_dynamic_call, #{ignore => [
% Uses thrift reflection through `struct_info/1`.
bouncer_context_v1
]}}
]
},
#{
dirs => ["test"],
filter => "*.erl",
ruleset => erl_files,
rules => [
% We want to use `ct:pal/2` and friends in test code.
{elvis_style, no_debug_call, disable},
% Tests are usually more comprehensible when a bit more verbose.
{elvis_style, dont_repeat_yourself, #{min_complexity => 20}},
{elvis_style, god_modules, #{ignore => [ct_gun_event_h]}}
]
},
#{
dirs => ["."],
filter => "Makefile",
ruleset => makefiles
},
#{
dirs => ["."],
filter => "rebar.config",
rules => [
{elvis_style, line_length, #{limit => 100, skip_comments => false}},
{elvis_style, no_tabs},
{elvis_style, no_trailing_whitespace}
]
},
#{
dirs => ["src"],
filter => "*.app.src",
rules => [
{elvis_style, line_length, #{limit => 100, skip_comments => false}},
{elvis_style, no_tabs},
{elvis_style, no_trailing_whitespace}
]
}
]}.
{elvis_output_format, colors}.
%% XRef checks
{xref_checks, [
undefined_function_calls,
undefined_functions,
deprecated_functions_calls,
deprecated_functions
]}.
% at will
% {xref_warnings, true}.
%% Tests
{cover_enabled, true}.
%% Relx configuration
{relx, [
{release, {bouncer , "0.1.0"}, [
{recon , load}, % tools for introspection
{runtime_tools, load}, % debugger
{tools , load}, % profiler
{logger_logstash_formatter, load}, % logger formatter
how_are_you,
bouncer
]},
{sys_config, "./config/sys.config"},
{vm_args, "./config/vm.args"},
{dev_mode, true},
{include_erts, false},
{extended_start_script, true}
]}.
%% Dialyzer static analyzing
{dialyzer, [
{warnings, [
% mandatory
unmatched_returns,
error_handling,
race_conditions,
unknown
]},
{plt_apps, all_deps}
]}.
{profiles, [
{prod, [
{relx, [
{dev_mode, false},
{include_erts, true},
{overlay, []}
]}
]},
{test, [
{cover_enabled, true},
{deps, []}
]}
]}.

92
rebar.lock Normal file
View File

@ -0,0 +1,92 @@
{"1.1.0",
[{<<"bear">>,{pkg,<<"bear">>,<<"0.8.7">>},2},
{<<"bouncer_proto">>,
{git,"git@github.com:rbkmoney/bouncer-proto.git",
{ref,"97dcad6f2f2706f269188722bc2419246d38823b"}},
0},
{<<"cache">>,{pkg,<<"cache">>,<<"2.2.0">>},1},
{<<"certifi">>,{pkg,<<"certifi">>,<<"2.5.1">>},2},
{<<"cg_mon">>,
{git,"https://github.com/rbkmoney/cg_mon.git",
{ref,"5a87a37694e42b6592d3b4164ae54e0e87e24e18"}},
1},
{<<"cowboy">>,{pkg,<<"cowboy">>,<<"2.8.0">>},0},
{<<"cowlib">>,{pkg,<<"cowlib">>,<<"2.9.1">>},1},
{<<"erl_health">>,
{git,"https://github.com/rbkmoney/erlang-health.git",
{ref,"982af88738ca062eea451436d830eef8c1fbe3f9"}},
0},
{<<"folsom">>,
{git,"https://github.com/folsom-project/folsom.git",
{ref,"eeb1cc467eb64bd94075b95b8963e80d8b4df3df"}},
1},
{<<"genlib">>,
{git,"https://github.com/rbkmoney/genlib.git",
{ref,"1ca08793ad8af0beb26eda8cd00687c69f7ef8b4"}},
0},
{<<"gproc">>,{pkg,<<"gproc">>,<<"0.8.0">>},1},
{<<"gun">>,
{git,"https://github.com/ninenines/gun.git",
{ref,"f2e8d103dd7827251fa726c42e307e42cef8a3dc"}},
0},
{<<"hackney">>,{pkg,<<"hackney">>,<<"1.15.2">>},1},
{<<"how_are_you">>,
{git,"https://github.com/rbkmoney/how_are_you.git",
{ref,"8f11d17eeb6eb74096da7363a9df272fd3099718"}},
0},
{<<"idna">>,{pkg,<<"idna">>,<<"6.0.0">>},2},
{<<"jesse">>,{pkg,<<"jesse">>,<<"1.5.5">>},0},
{<<"jsx">>,{pkg,<<"jsx">>,<<"3.0.0">>},0},
{<<"logger_logstash_formatter">>,
{git,"https://github.com/rbkmoney/logger_logstash_formatter.git",
{ref,"013525713c0d2a1ff0a0daff5db872793f5e6012"}},
0},
{<<"metrics">>,{pkg,<<"metrics">>,<<"1.0.1">>},2},
{<<"mimerl">>,{pkg,<<"mimerl">>,<<"1.2.0">>},2},
{<<"parse_trans">>,{pkg,<<"parse_trans">>,<<"3.3.0">>},3},
{<<"ranch">>,{pkg,<<"ranch">>,<<"1.7.1">>},1},
{<<"recon">>,{pkg,<<"recon">>,<<"2.5.1">>},0},
{<<"rfc3339">>,{pkg,<<"rfc3339">>,<<"0.2.2">>},1},
{<<"scoper">>,
{git,"https://github.com/rbkmoney/scoper.git",
{ref,"23b1625bf2c6940a56cfc30389472a5e384a229f"}},
0},
{<<"snowflake">>,
{git,"https://github.com/rbkmoney/snowflake.git",
{ref,"7f379ad5e389e1c96389a8d60bae8117965d6a6d"}},
1},
{<<"ssl_verify_fun">>,{pkg,<<"ssl_verify_fun">>,<<"1.1.5">>},2},
{<<"thrift">>,
{git,"https://github.com/rbkmoney/thrift_erlang.git",
{ref,"846a0819d9b6d09d0c31f160e33a78dbad2067b4"}},
0},
{<<"unicode_util_compat">>,{pkg,<<"unicode_util_compat">>,<<"0.4.1">>},3},
{<<"woody">>,
{git,"https://github.com/rbkmoney/woody_erlang.git",
{ref,"3b766220542ea49965e99a4c4352727d6e9e0cbf"}},
0},
{<<"woody_user_identity">>,
{git,"https://github.com/rbkmoney/woody_erlang_user_identity.git",
{ref,"d6d8c570e6aaae7adfd2737e47007da43728c1b3"}},
0}]}.
[
{pkg_hash,[
{<<"bear">>, <<"16264309AE5D005D03718A5C82641FCC259C9E8F09ADEB6FD79CA4271168656F">>},
{<<"cache">>, <<"3C11DBF4CD8FCD5787C95A5FB2A04038E3729CFCA0386016EEA8C953AB48A5AB">>},
{<<"certifi">>, <<"867CE347F7C7D78563450A18A6A28A8090331E77FA02380B4A21962A65D36EE5">>},
{<<"cowboy">>, <<"F3DC62E35797ECD9AC1B50DB74611193C29815401E53BAC9A5C0577BD7BC667D">>},
{<<"cowlib">>, <<"61A6C7C50CF07FDD24B2F45B89500BB93B6686579B069A89F88CB211E1125C78">>},
{<<"gproc">>, <<"CEA02C578589C61E5341FCE149EA36CCEF236CC2ECAC8691FBA408E7EA77EC2F">>},
{<<"hackney">>, <<"07E33C794F8F8964EE86CEBEC1A8ED88DB5070E52E904B8F12209773C1036085">>},
{<<"idna">>, <<"689C46CBCDF3524C44D5F3DDE8001F364CD7608A99556D8FBD8239A5798D4C10">>},
{<<"jesse">>, <<"ECFD2C1634C49052CA907B4DFDE1D1F44B7FD7862D933F4590807E42759B8072">>},
{<<"jsx">>, <<"20A170ABD4335FC6DB24D5FAD1E5D677C55DADF83D1B20A8A33B5FE159892A39">>},
{<<"metrics">>, <<"25F094DEA2CDA98213CECC3AEFF09E940299D950904393B2A29D191C346A8486">>},
{<<"mimerl">>, <<"67E2D3F571088D5CFD3E550C383094B47159F3EEE8FFA08E64106CDF5E981BE3">>},
{<<"parse_trans">>, <<"09765507A3C7590A784615CFD421D101AEC25098D50B89D7AA1D66646BC571C1">>},
{<<"ranch">>, <<"6B1FAB51B49196860B733A49C07604465A47BDB78AA10C1C16A3D199F7F8C881">>},
{<<"recon">>, <<"430FFA60685AC1EFDFB1FE4C97B8767C92D0D92E6E7C3E8621559BA77598678A">>},
{<<"rfc3339">>, <<"1552DF616ACA368D982E9F085A0E933B6688A3F4938A671798978EC2C0C58730">>},
{<<"ssl_verify_fun">>, <<"6EAF7AD16CB568BB01753DBBD7A95FF8B91C7979482B95F38443FE2C8852A79B">>},
{<<"unicode_util_compat">>, <<"D869E4C68901DD9531385BB0C8C40444EBF624E60B6962D95952775CAC5E90CD">>}]}
].

20
src/bouncer.app.src Normal file
View File

@ -0,0 +1,20 @@
{application, bouncer, [
{description, "Does someone look like a troublemaker?"},
{vsn, "0.1.0"},
{registered, []},
{mod, {bouncer, []}},
{applications, [
kernel,
stdlib,
genlib,
woody,
scoper,
thrift,
gun,
jesse,
bouncer_proto,
erl_health
]},
{env, []},
{licenses, ["Apache 2.0"]}
]}.

109
src/bouncer.erl Normal file
View File

@ -0,0 +1,109 @@
-module(bouncer).
%% Application callbacks
-behaviour(application).
-export([start/2]).
-export([stop/1]).
%% Supervisor callbacks
-behaviour(supervisor).
-export([init/1]).
%%
-spec start(normal, any()) ->
{ok, pid()} | {error, any()}.
start(_StartType, _StartArgs) ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-spec stop(any()) ->
ok.
stop(_State) ->
ok.
%%
-spec init([]) ->
{ok, {supervisor:sup_flags(), [supervisor:child_spec()]}}.
init([]) ->
ServiceOpts = genlib_app:env(?MODULE, services, #{}),
EventHandlers = genlib_app:env(?MODULE, woody_event_handlers, [woody_event_handler_default]),
Healthcheck = enable_health_logging(genlib_app:env(?MODULE, health_check, #{})),
ChildSpec = woody_server:child_spec(
?MODULE,
#{
ip => get_ip_address(),
port => get_port(),
protocol_opts => get_protocol_opts(),
transport_opts => get_transport_opts(),
shutdown_timeout => get_shutdown_timeout(),
event_handler => EventHandlers,
handlers => get_handler_specs(ServiceOpts),
additional_routes => [erl_health_handle:get_route(Healthcheck)]
}
),
{ok, {
#{strategy => one_for_all, intensity => 6, period => 30},
[ChildSpec]
}}.
-spec get_ip_address() ->
inet:ip_address().
get_ip_address() ->
{ok, Address} = inet:parse_address(genlib_app:env(?MODULE, ip, "::")),
Address.
-spec get_port() ->
inet:port_number().
get_port() ->
genlib_app:env(?MODULE, port, 8022).
-spec get_protocol_opts() ->
woody_server_thrift_http_handler:protocol_opts().
get_protocol_opts() ->
genlib_app:env(?MODULE, protocol_opts, #{}).
-spec get_transport_opts() ->
woody_server_thrift_http_handler:transport_opts().
get_transport_opts() ->
genlib_app:env(?MODULE, transport_opts, #{}).
-spec get_shutdown_timeout() ->
timeout().
get_shutdown_timeout() ->
genlib_app:env(?MODULE, shutdown_timeout, 0).
-spec get_handler_specs(map()) ->
[woody:http_handler(woody:th_handler())].
get_handler_specs(ServiceOpts) ->
ArbiterService = maps:get(arbiter, ServiceOpts, #{}),
ArbiterOpts = maps:with([pulse], ArbiterService),
[
{
maps:get(path, ArbiterService, <<"/v1/arbiter">>),
{{bouncer_decisions_thrift, 'Arbiter'}, {bouncer_arbiter_handler, ArbiterOpts}}
}
].
%%
-spec enable_health_logging(erl_health:check()) ->
erl_health:check().
enable_health_logging(Check) ->
EvHandler = {erl_health_event_handler, []},
maps:map(
fun (_, Runner) -> #{runner => Runner, event_handler => EvHandler} end,
Check
).

242
src/bouncer_arbiter.erl Normal file
View File

@ -0,0 +1,242 @@
-module(bouncer_arbiter).
%% TODOs
%% * Enable connection pooling, to lower IO resource usage.
%% * Respect external woody deadlines where applicable.
%% NOTE
%% This must be a path to some document with the subdocument of the following form:
%% ```
%% "assertions": {
%% "allowed": [["code", "description"], ...] // 0 or more, may be unset
%% "forbidden": [["code", "description"], ...] // 0 or more, may be unset
%% }
%% ```
-type ruleset_id() :: iodata().
-type judgement() :: {resolution(), [assertion()]}.
-type resolution() :: allowed | forbidden.
-type assertion() :: {_Code :: binary(), _Description :: binary() | undefined}.
-export_type([judgement/0]).
-export_type([resolution/0]).
-export([judge/2]).
%%
-spec judge(ruleset_id(), bouncer_context:ctx()) ->
{ok, judgement()} |
{error,
ruleset_notfound |
{ruleset_invalid, _Details} |
{unavailable | unknown, _Reason}
}.
judge(RulesetID, Context) ->
case mk_opa_client() of
{ok, Client} ->
Location = join_path(RulesetID, <<"/assertions">>),
case request_opa_document(Location, Context, Client) of
{ok, Document} ->
infer_judgement(Document);
{error, notfound} ->
{error, ruleset_notfound};
{error, _} = Error ->
Error
end;
{error, _} = Error ->
Error
end.
-spec infer_judgement(document()) ->
{ok, judgement()} | {error, {ruleset_invalid, _Details}}.
infer_judgement(Document) ->
case jesse:validate_with_schema(get_judgement_schema(), Document) of
{ok, _} ->
Allowed = maps:get(<<"allowed">>, Document, []),
Forbidden = maps:get(<<"forbidden">>, Document, []),
{ok, infer_judgement(Forbidden, Allowed)};
{error, Reason} ->
{error, {ruleset_invalid, Reason}}
end.
infer_judgement(Forbidden = [_ | _], _Allowed) ->
{forbidden, extract_assertions(Forbidden)};
infer_judgement(_Forbidden = [], Allowed = [_ | _]) ->
{allowed, extract_assertions(Allowed)};
infer_judgement(_Forbidden = [], _Allowed = []) ->
{forbidden, []}.
extract_assertions(Assertions) ->
[extract_assertion(E) || E <- Assertions].
extract_assertion([Code, Description]) ->
{Code, Description};
extract_assertion(Code) when is_binary(Code) ->
{Code, undefined}.
-spec get_judgement_schema() ->
jesse:schema().
get_judgement_schema() ->
% TODO
% Worth declaring in a separate file? Should be helpful w/ CI-like activities.
CodeSchema = [{<<"type">>, <<"string">>}, {<<"minLength">>, 1}],
AssertionsSchema = [
{<<"type">>, <<"array">>},
{<<"items">>, [{<<"oneOf">>, [
CodeSchema,
[
{<<"type">>, <<"array">>},
{<<"items">>, CodeSchema},
{<<"minItems">>, 1},
{<<"maxItems">>, 2}
]
]}]}
],
[
{<<"type">>, <<"object">>},
{<<"properties">>, [
{<<"allowed">>, AssertionsSchema},
{<<"forbidden">>, AssertionsSchema}
]},
{<<"additionalProperties">>, false}
].
%%
-type endpoint() :: {inet:hostname() | inet:ip_address(), inet:port_number()}.
-type client_opts() :: #{
endpoint := endpoint(),
transport => tcp | tls,
tcp_opts => [gen_tcp:connect_option()],
tls_opts => [ssl:tls_client_option()],
connect_timeout => timeout(),
domain_lookup_timeout => timeout(),
request_timeout => timeout(),
http_opts => gun:http_opts(),
http2_opts => gun:http2_opts(),
% TODO
% Pulse over gun event handler mechanic.
event_handler => {module(), _State}
}.
-define(DEFAULT_CLIENT_OPTS, #{
domain_lookup_timeout => 1000,
connect_timeout => 1000,
request_timeout => 1000
}).
-type client() :: {pid(), client_opts()}.
-type document() ::
null |
binary() |
number() |
boolean() |
#{atom() | binary() => document()} |
[document()].
-spec mk_opa_client() ->
{ok, client()} | {error, {unavailable, _Reason}}.
mk_opa_client() ->
Opts = get_opa_client_opts(),
{Host, Port} = maps:get(endpoint, Opts),
GunOpts = maps:with(
[
transport,
tcp_opts,
tls_opts,
connect_timeout,
domain_lookup_timeout,
http_opts,
http2_opts,
event_handler
],
Opts
),
% TODO
% We might want to mask intermittent service unavailability here with retries though.
% Leave it up to our clients for the time being, in the name of configuration simplicity
% and predictability.
case gun:open(Host, Port, GunOpts#{retry => 0}) of
{ok, Client} ->
Timeout = maps:get(connect_timeout, Opts),
case gun:await_up(Client, Timeout) of
{ok, _} ->
{ok, {Client, Opts}};
{error, Reason} ->
{error, {unavailable, Reason}}
end;
{error, Reason = {options, _}} ->
erlang:error({invalid_client_options, Reason, Opts})
end.
-spec request_opa_document(_ID :: iodata(), _Input :: document(), client()) ->
{ok, document()} |
{error,
notfound |
{unknown, _Reason}
}.
request_opa_document(ID, Input, {Client, Opts}) ->
Path = join_path(<<"/v1/data">>, ID),
% TODO
% A bit hacky, ordsets are allowed in context and supposed to be opaque, at least by design.
% We probably need something like `bouncer_context:to_json/1`.
Body = jsx:encode(#{input => Input}),
CType = <<"application/json; charset=utf-8">>,
Headers = #{
<<"content-type">> => CType,
<<"accept">> => CType
},
Timeout = maps:get(request_timeout, Opts),
StreamRef = gun:post(Client, Path, Headers, Body),
Deadline = erlang:monotonic_time(millisecond) + Timeout, % TODO think about implications
case gun:await(Client, StreamRef, Timeout) of
{response, nofin, 200, _Headers} ->
TimeoutLeft = Deadline - erlang:monotonic_time(millisecond),
case gun:await_body(Client, StreamRef, TimeoutLeft) of
{ok, Response, _Trailers} ->
decode_document(Response);
{ok, Response} ->
decode_document(Response);
{error, Reason} ->
{error, {unknown, Reason}}
end;
{response, fin, 404, _Headers} ->
{error, notfound};
{error, Reason} ->
{error, {unknown, Reason}}
end.
-spec decode_document(binary()) ->
{ok, document()} | {error, notfound}.
decode_document(Response) ->
case jsx:decode(Response) of
#{<<"result">> := Result} ->
{ok, Result};
#{} ->
{error, notfound}
end.
-spec get_opa_client_opts() ->
client_opts().
get_opa_client_opts() ->
maps:merge(
?DEFAULT_CLIENT_OPTS,
application:get_env(bouncer, opa, #{})
).
%%
join_path(F1, F2) when is_binary(F1), is_binary(F2) ->
normalize_path(genlib_string:cat(normalize_path(F1), normalize_path(F2))).
normalize_path(P = <<$/, P1/binary>>) ->
S1 = byte_size(P1),
case S1 > 0 andalso binary:last(P1) of
$/ -> binary:part(P, 0, S1);
_ -> P
end;
normalize_path(P) when is_binary(P) ->
normalize_path(<<$/, P/binary>>);
normalize_path(P) ->
normalize_path(iolist_to_binary(P)).

View File

@ -0,0 +1,182 @@
-module(bouncer_arbiter_handler).
-include_lib("bouncer_proto/include/bouncer_decisions_thrift.hrl").
%% Woody handler
-behaviour(woody_server_thrift_handler).
-export([handle_function/4]).
%%
-type opts() :: #{
pulse => bouncer_arbiter_pulse:handler()
}.
-record(st, {
pulse :: bouncer_arbiter_pulse:handler() | undefined,
pulse_metadata :: bouncer_arbiter_pulse:metadata()
}).
-type st() :: #st{}.
-spec handle_function(woody:func(), woody:args(), woody_context:ctx(), opts()) ->
{ok, woody:result()}.
handle_function(Fn, Args, WoodyCtx, Opts) ->
scoper:scope(arbiter, fun() ->
do_handle_function(Fn, Args, WoodyCtx, Opts)
end).
do_handle_function('Judge', {RulesetID, ContextIn}, WoodyCtx, Opts) ->
St = #st{
pulse = maps:get(pulse, Opts, undefined),
pulse_metadata = #{woody_ctx => WoodyCtx}
},
try handle_judge(RulesetID, ContextIn, St) catch
throw:{woody, Class, Details} ->
woody_error:raise(Class, Details);
C:R:S ->
ok = handle_judgement_beat({failed, {unexpected_exception, {C, R, S}}}, St),
erlang:raise(C, R, S)
end.
%%
handle_judge(RulesetID, ContextIn, St0) ->
St1 = append_pulse_metadata(#{ruleset => RulesetID}, St0),
ok = handle_judgement_beat(started, St1),
{Context, St2} = decode_context(ContextIn, St1),
case bouncer_arbiter:judge(RulesetID, Context) of
{ok, Judgement} ->
ok = handle_judgement_beat({completed, Judgement}, St2),
{ok, encode_judgement(Judgement)};
{error, ruleset_notfound = Reason} ->
ok = handle_judgement_beat({failed, Reason}, St2),
throw({woody, business, #bdcs_RulesetNotFound{}});
{error, {ruleset_invalid, _} = Reason} ->
ok = handle_judgement_beat({failed, Reason}, St2),
throw({woody, business, #bdcs_InvalidRuleset{}});
{error, Reason} ->
handle_network_error(Reason, St2)
end.
-spec handle_network_error(_Reason, st()) ->
no_return().
handle_network_error({unavailable, Reason} = Error, St) ->
ok = handle_judgement_beat({failed, Error}, St),
throw({woody, system, {external, resource_unavailable, genlib:format(Reason)}});
handle_network_error({unknown, Reason} = Error, St) ->
ok = handle_judgement_beat({failed, Error}, St),
throw({woody, system, {external, result_unknown, genlib:format(Reason)}}).
%%
-type fragment_id() :: binary().
-type fragment_metadata() :: #{atom() => _}.
-type thrift_judgement() :: bouncer_decisions_thrift:'Judgement'().
-type thrift_context() :: bouncer_decisions_thrift:'Context'().
-type thrift_fragment() :: bouncer_context_thrift:'ContextFragment'().
-type thrift_fragment_type() :: bouncer_context_thrift:'ContextFragmentType'().
-spec encode_judgement(bouncer_arbiter:judgement()) ->
thrift_judgement().
encode_judgement({Resolution, _Assertions}) ->
#bdcs_Judgement{
resolution = encode_resolution(Resolution)
}.
encode_resolution(allowed) ->
allowed;
encode_resolution(forbidden) ->
forbidden.
-spec decode_context(thrift_context(), st()) ->
{bouncer_context:ctx(), st()}.
decode_context(#bdcs_Context{fragments = FragmentsIn}, St0) ->
% 1. Decode each fragment.
{Fragments, St1} = decode_fragments(FragmentsIn, St0),
% 2. Merge each decoded context into an empty context. Accumulate conflicts associated with
% corresponding fragment id.
{Ctx, Conflicts} = maps:fold(
fun (ID, Ctx, {CtxAcc, DiscardAcc}) ->
case bouncer_context:merge(CtxAcc, Ctx) of
{CtxAcc1, undefined} ->
{CtxAcc1, DiscardAcc};
{CtxAcc1, Discard} ->
{CtxAcc1, #{ID => Discard}}
end
end,
{bouncer_context:empty(), #{}},
Fragments
),
% 3. Return merged context if there was no conflicts.
case map_size(Conflicts) of
0 ->
St2 = append_pulse_metadata(#{context => Ctx}, St1),
{Ctx, St2};
_ ->
% TODO
% Вообще можно на сторону политик отдать ответственность за проверку отстутствия
% конфликтов. Так как конфликты возможны (подозреваю, что в процессе эволюции
% системы рано или поздно они где-нибудь появятся), быть может стоит это сделать
% сразу?
ok = handle_judgement_beat({failed, {conflicting_context, Conflicts}}, St1),
throw({woody, business, #bdcs_InvalidContext{}})
end.
-spec decode_fragments(#{fragment_id() => thrift_fragment()}, st()) ->
{#{fragment_id() => bouncer_context:ctx()}, st()}.
decode_fragments(Fragments, St0) ->
{Ctxs, Errors, PulseMeta} = maps:fold(
fun (ID, Fragment, {CtxAcc, ErrorAcc, PulseMetaAcc}) ->
Type = Fragment#bctx_ContextFragment.type,
Content = genlib:define(Fragment#bctx_ContextFragment.content, <<>>),
case decode_fragment(Type, Content) of
{ok, Ctx, Meta} ->
PulseMeta = #{
type => Type,
context => Ctx,
metadata => Meta
},
{
CtxAcc#{ID => Ctx},
ErrorAcc,
PulseMetaAcc#{ID => PulseMeta}
};
{error, Reason} ->
{
CtxAcc,
ErrorAcc#{ID => {Type, Reason}},
PulseMetaAcc
}
end
end,
{#{}, #{}, #{}},
Fragments
),
St1 = append_pulse_metadata(#{fragments => PulseMeta}, St0),
case map_size(Errors) of
0 ->
{Ctxs, St1};
_ ->
ok = handle_judgement_beat({failed, {malformed_context, Errors}}, St1),
throw({woody, business, #bdcs_InvalidContext{}})
end.
-spec decode_fragment(thrift_fragment_type(), _Content :: binary()) ->
{ok, bouncer_context:ctx(), fragment_metadata()} | {error, _Reason}.
decode_fragment(v1_thrift_binary, Content) ->
bouncer_context_v1:decode(thrift, Content).
%%
-spec append_pulse_metadata(bouncer_arbiter_pulse:metadata(), st()) ->
st().
append_pulse_metadata(Metadata, St = #st{pulse_metadata = MetadataWas}) ->
St#st{pulse_metadata = maps:merge(MetadataWas, Metadata)}.
-spec handle_judgement_beat(_Beat, st()) ->
ok.
handle_judgement_beat(Beat, #st{pulse = Pulse, pulse_metadata = Metadata}) ->
bouncer_arbiter_pulse:handle_beat({judgement, Beat}, Metadata, Pulse).

View File

@ -0,0 +1,47 @@
-module(bouncer_arbiter_pulse).
-type beat() ::
{judgement,
started |
{completed, bouncer_arbiter:judgement()} |
{failed, _Reason}
}.
-type metadata() :: #{
ruleset => id(),
context => bouncer_context:ctx(),
fragments => #{id() => fragment()},
woody_ctx => woody_context:ctx()
}.
-type id() :: binary().
-type fragment() :: #{
type => atom(),
context => bouncer_context:ctx(),
metadata => map()
}.
-export_type([beat/0]).
-export_type([metadata/0]).
%%
-type handler() :: {module(), _Opts}.
-export_type([handler/0]).
-callback handle_beat(beat(), metadata(), _Opts) ->
ok.
-export([handle_beat/3]).
-spec handle_beat(beat(), metadata(), handler() | undefined) ->
ok.
handle_beat(Beat, Metadata, {Mod, Opts}) ->
% NOTE
% Generally, we don't want some fault to propagate from event handler to the business logic
% and affect it, causing failure. Hovewer here we deem it required because we actually need
% this kind of behaviour when doing audit logging, an inability to append to the audit log
% should cause whole operation to fail.
Mod:handle_beat(Beat, Metadata, Opts);
handle_beat(_Beat, _Metadata, undefined) ->
ok.

92
src/bouncer_context.erl Normal file
View File

@ -0,0 +1,92 @@
-module(bouncer_context).
-type set(T) :: ordsets:ordset(T).
%% TODO
%% Currently there's no notion of generic maps in contexts. This means for example that (given
%% `context_v1` model) client may submits fragments like these:
%% ```
%% "frag1": {user: {id: "bla", orgs: [{id: "org42", owner: {id: "blarg"}}]}}
%% "frag2": {user: {id: "bla", orgs: [{id: "org42", owner: {id: "goodwyn"}}]}}
%% ```
%% Which we won't consider conflicting, when they're obviously referring to the same
%% «organization».
-type ctx() :: #{
atom() => number() | binary() | ctx() | set(ctx())
}.
-export([empty/0]).
-export([merge/2]).
-export_type([ctx/0]).
%%
-spec empty() ->
ctx().
empty() ->
#{}.
-spec merge(ctx(), ctx()) ->
{_Merged :: ctx(), _Conflicting :: ctx() | undefined}.
merge(Ctx1, Ctx2) ->
maps:fold(
fun (K, V2, {CtxAcc, ConflictAcc}) ->
case maps:find(K, CtxAcc) of
{ok, V1} ->
{VM, Conflict} = merge_values(V1, V2),
CtxAcc1 = CtxAcc#{K => VM},
DiscardAcc1 = append_conflict(K, Conflict, ConflictAcc),
{CtxAcc1, DiscardAcc1};
error ->
{CtxAcc#{K => V2}, ConflictAcc}
end
end,
{Ctx1, undefined},
Ctx2
).
merge_values(V1 = #{}, V2 = #{}) ->
merge(V1, V2);
merge_values(V1, V2) ->
case ordsets:is_set(V1) and ordsets:is_set(V2) of
true ->
Intersection = ordsets:intersection(V1, V2),
MaybeConflict = case ordsets:size(Intersection) of
0 -> undefined;
_ -> Intersection
end,
{ordsets:union(V1, V2), MaybeConflict};
false when V1 =:= V2 ->
{V2, undefined};
false ->
{V2, V1}
end.
append_conflict(_, undefined, Acc) ->
Acc;
append_conflict(K, Conflict, undefined) ->
append_conflict(K, Conflict, #{});
append_conflict(K, Conflict, Acc) ->
Acc#{K => Conflict}.
%%
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-spec test() -> _.
-spec merge_test_() -> [_TestGen].
merge_test_() ->
C1 = #{lol => #{yeah => 1337}},
[
?_assertEqual({empty(), undefined}, merge(empty(), empty())),
?_assertEqual({C1, undefined}, merge(empty(), C1)),
?_assertEqual({C1, undefined}, merge(C1, empty()))
].
-endif.

154
src/bouncer_context_v1.erl Normal file
View File

@ -0,0 +1,154 @@
-module(bouncer_context_v1).
-include_lib("bouncer_proto/include/bouncer_context_v1_thrift.hrl").
-type vsn() :: integer().
-type format() :: thrift.
-type metadata() :: #{
version := #{
current := vsn(),
original := vsn(),
latest := vsn()
}
}.
-export([decode/2]).
-export([encode/2]).
%%
-define(THRIFT_TYPE,
{struct, struct, {bouncer_context_v1_thrift, 'ContextFragment'}}).
-type thrift_ctx_fragment() :: bouncer_context_v1_thrift:'ContextFragment'().
-spec decode(format(), _Content :: binary()) ->
{ok, bouncer_context:ctx(), metadata()} | {error, _Reason}.
decode(thrift, Content) ->
Codec = thrift_strict_binary_codec:new(Content),
case thrift_strict_binary_codec:read(Codec, ?THRIFT_TYPE) of
{ok, CtxThrift, Codec1} ->
case thrift_strict_binary_codec:close(Codec1) of
<<>> ->
from_thrift(CtxThrift);
Leftovers ->
{error, {excess_binary_data, Leftovers}}
end;
Error ->
Error
end.
-spec from_thrift(thrift_ctx_fragment()) ->
{ok, bouncer_context:ctx(), metadata()}.
from_thrift(#bctx_v1_ContextFragment{} = Ctx0) ->
Ctx1 = try_upgrade(Ctx0),
Metadata = #{
version => #{
current => Ctx1#bctx_v1_ContextFragment.vsn,
original => Ctx0#bctx_v1_ContextFragment.vsn,
latest => ?BCTX_V1_HEAD
}
},
{ok, from_thrift_context(Ctx1), Metadata}.
from_thrift_context(Ctx) ->
{struct, _, [_VsnField | StructDef]} =
bouncer_context_v1_thrift:struct_info('ContextFragment'),
from_thrift_struct(StructDef, Ctx, 3, #{}).
from_thrift_struct(StructDef, Struct) ->
from_thrift_struct(StructDef, Struct, 2, #{}).
from_thrift_struct([{_, _Req, Type, Name, _Default} | Rest], Struct, Idx, Acc) ->
Acc1 = case element(Idx, Struct) of
V when V /= undefined ->
Acc#{Name => from_thrift_value(Type, V)};
undefined ->
Acc
end,
from_thrift_struct(Rest, Struct, Idx + 1, Acc1);
from_thrift_struct([], _Struct, _, Acc) ->
Acc.
from_thrift_value({struct, struct, {Mod, Name}}, V) ->
{struct, _, StructDef} = Mod:struct_info(Name),
from_thrift_struct(StructDef, V);
from_thrift_value({set, Type}, Vs) ->
ordsets:from_list([from_thrift_value(Type, V) || V <- ordsets:to_list(Vs)]);
from_thrift_value(string, V) ->
V;
from_thrift_value(i64, V) ->
V;
from_thrift_value(i32, V) ->
V;
from_thrift_value(i16, V) ->
V;
from_thrift_value(byte, V) ->
V.
-spec try_upgrade(thrift_ctx_fragment()) ->
thrift_ctx_fragment().
try_upgrade(#bctx_v1_ContextFragment{vsn = ?BCTX_V1_HEAD} = Ctx) ->
Ctx.
%%
-spec encode(format(), bouncer_context:ctx()) ->
{ok, _Content} | {error, _}.
encode(thrift, Context) ->
Codec = thrift_strict_binary_codec:new(),
try to_thrift(Context) of
CtxThrift ->
case thrift_strict_binary_codec:write(Codec, ?THRIFT_TYPE, CtxThrift) of
{ok, Codec1} ->
{ok, thrift_strict_binary_codec:close(Codec1)};
{error, _} = Error ->
Error
end
catch throw:{?MODULE, Reason} ->
{error, Reason}
end.
-spec to_thrift(bouncer_context:ctx()) ->
thrift_ctx_fragment() | no_return().
to_thrift(Context) ->
{struct, _, StructDef} = bouncer_context_v1_thrift:struct_info('ContextFragment'),
to_thrift_struct(StructDef, Context, #bctx_v1_ContextFragment{}).
to_thrift_struct([{Idx, _Req, Type, Name, Default} | Rest], Map, Acc) ->
case maps:take(Name, Map) of
{V, MapLeft} ->
Acc1 = erlang:setelement(Idx + 1, Acc, to_thrift_value(Type, V)),
to_thrift_struct(Rest, MapLeft, Acc1);
error when Default /= undefined ->
Acc1 = erlang:setelement(Idx + 1, Acc, Default),
to_thrift_struct(Rest, Map, Acc1);
error ->
to_thrift_struct(Rest, Map, Acc)
end;
to_thrift_struct([], MapLeft, Acc) ->
case map_size(MapLeft) of
0 ->
Acc;
_ ->
throw({?MODULE, {excess_context_data, MapLeft}})
end.
to_thrift_value({struct, struct, {Mod, Name}}, V = #{}) ->
{struct, _, StructDef} = Mod:struct_info(Name),
Acc = erlang:make_tuple(length(StructDef) + 1, undefined, [{1, Mod:record_name(Name)}]),
to_thrift_struct(StructDef, V, Acc);
to_thrift_value({set, Type}, Vs) ->
ordsets:from_list([to_thrift_value(Type, V) || V <- ordsets:to_list(Vs)]);
to_thrift_value(string, V) ->
V;
to_thrift_value(i64, V) ->
V;
to_thrift_value(i32, V) ->
V;
to_thrift_value(i16, V) ->
V;
to_thrift_value(byte, V) ->
V.

View File

@ -0,0 +1,607 @@
-module(bouncer_tests_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("stdlib/include/assert.hrl").
-export([all/0]).
-export([groups/0]).
-export([init_per_suite/1]).
-export([end_per_suite/1]).
-export([init_per_group/2]).
-export([end_per_group/2]).
-export([init_per_testcase/2]).
-export([end_per_testcase/2]).
-export([missing_ruleset_notfound/1]).
-export([incorrect_ruleset_invalid/1]).
-export([missing_content_invalid_context/1]).
-export([junk_content_invalid_context/1]).
-export([conflicting_context_invalid/1]).
-export([distinct_sets_context_valid/1]).
-export([allowed_create_invoice_shop_manager/1]).
-export([forbidden_w_empty_context/1]).
-export([forbidden_expired/1]).
-export([forbidden_blacklisted_ip/1]).
-export([connect_failed_means_unavailable/1]).
-export([connect_timeout_means_unavailable/1]).
-export([request_timeout_means_unknown/1]).
-behaviour(bouncer_arbiter_pulse).
-export([handle_beat/3]).
-include_lib("bouncer_proto/include/bouncer_decisions_thrift.hrl").
-type config() :: [{atom(), term()}].
-type group_name() :: atom().
-type test_case_name() :: atom().
-define(CONFIG(Key, C), (element(2, lists:keyfind(Key, 1, C)))).
%%
-define(OPA_HOST, "opa").
-define(OPA_ENDPOINT, {?OPA_HOST, 8181}).
-define(API_RULESET_ID, "authz/api").
-spec all() ->
[atom()].
all() ->
[
{group, general},
{group, rules_authz_api},
{group, network_error_mapping}
].
-spec groups() ->
[{group_name(), list(), [test_case_name()]}].
groups() ->
[
{general, [parallel], [
missing_ruleset_notfound,
incorrect_ruleset_invalid,
missing_content_invalid_context,
junk_content_invalid_context,
conflicting_context_invalid,
distinct_sets_context_valid
]},
{rules_authz_api, [parallel], [
allowed_create_invoice_shop_manager,
forbidden_expired,
forbidden_blacklisted_ip,
forbidden_w_empty_context
]},
{network_error_mapping, [], [
connect_failed_means_unavailable,
connect_timeout_means_unavailable,
request_timeout_means_unknown
]}
].
-spec init_per_suite(config()) ->
config().
init_per_suite(C) ->
Apps =
genlib_app:start_application(woody) ++
genlib_app:start_application_with(scoper, [
{storage, scoper_storage_logger}
]),
[{suite_apps, Apps} | C].
-spec end_per_suite(config()) ->
ok.
end_per_suite(C) ->
genlib_app:stop_unload_applications(?CONFIG(suite_apps, C)).
-spec init_per_group(group_name(), config()) ->
config().
init_per_group(Name, C) when
Name == general;
Name == rules_authz_api
->
start_bouncer([], [{groupname, Name} | C]);
init_per_group(Name, C) ->
[{groupname, Name} | C].
start_bouncer(Env, C) ->
IP = "127.0.0.1",
Port = 8022,
ArbiterPath = <<"/v1/arbiter">>,
{ok, StashPid} = ct_stash:start(),
Apps = genlib_app:start_application_with(bouncer, [
{ip, IP},
{port, Port},
{services, #{
arbiter => #{
path => ArbiterPath,
pulse => {?MODULE, StashPid}
}
}},
{transport_opts, #{
max_connections => 1000,
num_acceptors => 4
}},
{opa, #{
endpoint => ?OPA_ENDPOINT,
transport => tcp
}}
] ++ Env),
Services = #{
arbiter => mk_url(IP, Port, ArbiterPath)
},
[{group_apps, Apps}, {service_urls, Services}, {stash, StashPid} | C].
mk_url(IP, Port, Path) ->
iolist_to_binary(["http://", IP, ":", genlib:to_binary(Port), Path]).
-spec end_per_group(group_name(), config()) ->
_.
end_per_group(_Name, C) ->
stop_bouncer(C).
stop_bouncer(C) ->
with_config(stash, C, fun (Pid) -> ?assertEqual(ok, ct_stash:destroy(Pid)) end),
with_config(group_apps, C, fun (Apps) -> genlib_app:stop_unload_applications(Apps) end).
-spec init_per_testcase(atom(), config()) ->
config().
init_per_testcase(Name, C) ->
[{testcase, Name} | C].
-spec end_per_testcase(atom(), config()) ->
config().
end_per_testcase(_Name, _C) ->
ok.
%%
-define(CONTEXT(Fragments), #bdcs_Context{fragments = Fragments}).
-define(JUDGEMENT(Resolution), #bdcs_Judgement{resolution = Resolution}).
-spec missing_ruleset_notfound(config()) -> ok.
-spec incorrect_ruleset_invalid(config()) -> ok.
-spec missing_content_invalid_context(config()) -> ok.
-spec junk_content_invalid_context(config()) -> ok.
-spec conflicting_context_invalid(config()) -> ok.
-spec distinct_sets_context_valid(config()) -> ok.
missing_ruleset_notfound(C) ->
Client = mk_client(C),
MissingRulesetID = "missing_ruleset",
?assertThrow(
#bdcs_RulesetNotFound{},
call_judge(MissingRulesetID, ?CONTEXT(#{}), Client)
),
?assertMatch(
{judgement, {failed, ruleset_notfound}},
lists:last(flush_beats(Client, C))
).
incorrect_ruleset_invalid(C) ->
Client1 = mk_client(C),
?assertThrow(
#bdcs_InvalidRuleset{},
call_judge("trivial/incorrect1", ?CONTEXT(#{}), Client1)
),
?assertMatch(
{judgement, {failed, {ruleset_invalid, [
{data_invalid, _, no_extra_properties_allowed, _, [<<"fordibben">>]}
]}}},
lists:last(flush_beats(Client1, C))
),
Client2 = mk_client(C),
?assertThrow(
#bdcs_InvalidRuleset{},
call_judge("trivial/incorrect2", ?CONTEXT(#{}), Client2)
),
?assertMatch(
{judgement, {failed, {ruleset_invalid, [
{data_invalid, _, wrong_type, _, [<<"allowed">>]}
]}}},
lists:last(flush_beats(Client2, C))
).
missing_content_invalid_context(C) ->
Client = mk_client(C),
NoContentFragment = #bctx_ContextFragment{type = v1_thrift_binary},
Context = ?CONTEXT(#{<<"missing">> => NoContentFragment}),
?assertThrow(
#bdcs_InvalidContext{},
call_judge(?API_RULESET_ID, Context, Client)
),
?assertMatch(
{judgement, {failed, {malformed_context, #{
<<"missing">> := {v1_thrift_binary, {unexpected, _, _, _}}
}}}},
lists:last(flush_beats(Client, C))
).
junk_content_invalid_context(C) ->
Client = mk_client(C),
Junk = <<"STOP RIGHT THERE YOU CRIMINAL SCUM!">>,
JunkFragment = #bctx_ContextFragment{type = v1_thrift_binary, content = Junk},
Context = ?CONTEXT(#{<<"missing">> => JunkFragment}),
?assertThrow(
#bdcs_InvalidContext{},
call_judge(?API_RULESET_ID, Context, Client)
),
?assertMatch(
{judgement, {failed, {malformed_context, #{
<<"missing">> := {v1_thrift_binary, {unexpected, _, _, _}}
}}}},
lists:last(flush_beats(Client, C))
).
conflicting_context_invalid(C) ->
Client = mk_client(C),
Fragment1 = #{
user => #{
id => <<"joeblow">>,
email => Email1 = <<"deadinside69@example.org">>
},
requester => #{
ip => <<"1.2.3.4">>
}
},
Fragment2 = #{
user => #{
id => <<"joeblow">>,
email => <<"deadinside420@example.org">>
},
requester => #{
ip => <<"1.2.3.4">>
}
},
Context = ?CONTEXT(#{
<<"frag1">> => mk_ctx_v1_fragment(Fragment1),
<<"frag2">> => mk_ctx_v1_fragment(Fragment2)
}),
?assertThrow(
#bdcs_InvalidContext{},
call_judge(?API_RULESET_ID, Context, Client)
),
?assertEqual(
{judgement, {failed, {conflicting_context, #{
<<"frag2">> => #{user => #{email => Email1}}
}}}},
lists:last(flush_beats(Client, C))
).
distinct_sets_context_valid(C) ->
Client = mk_client(C),
Fragment1 = #{
user => #{
id => <<"joeblow">>,
orgs => mk_ordset([
#{
id => <<"hoolie">>,
roles => mk_ordset([#{id => <<"Administrator">>}])
},
#{
id => <<"weewrok">>,
roles => mk_ordset([#{id => <<"Administrator">>}])
}
])
}
},
Fragment2 = #{
user => #{
id => <<"joeblow">>,
orgs => mk_ordset([
#{
id => <<"hoolie">>,
roles => mk_ordset([#{id => <<"Nobody">>}])
},
#{
id => <<"blooply">>,
roles => mk_ordset([#{id => <<"Nobody">>}])
}
])
}
},
Context = ?CONTEXT(#{
<<"frag1">> => mk_ctx_v1_fragment(Fragment1),
<<"frag2">> => mk_ctx_v1_fragment(Fragment2)
}),
?assertMatch(
#bdcs_Judgement{},
call_judge(?API_RULESET_ID, Context, Client)
),
?assertMatch(
{judgement, {completed, _}},
lists:last(flush_beats(Client, C))
).
%%
-spec allowed_create_invoice_shop_manager(config()) -> ok.
-spec forbidden_expired(config()) -> ok.
-spec forbidden_blacklisted_ip(config()) -> ok.
-spec forbidden_w_empty_context(config()) -> ok.
allowed_create_invoice_shop_manager(C) ->
Client = mk_client(C),
Fragment = lists:foldl(fun maps:merge/2, #{}, [
mk_auth_session_token(),
mk_env(),
mk_op_create_invoice(<<"BLARG">>, <<"SHOP">>, <<"PARTY">>),
mk_user(<<"USER">>, mk_ordset([
mk_user_org(<<"PARTY">>, <<"OWNER">>, mk_ordset([
mk_role(<<"Manager">>, <<"SHOP">>)
]))
]))
]),
Context = ?CONTEXT(#{<<"root">> => mk_ctx_v1_fragment(Fragment)}),
?assertMatch(
?JUDGEMENT(allowed),
call_judge(?API_RULESET_ID, Context, Client)
),
?assertMatch(
{judgement, {completed, {allowed, [{<<"user_has_role">>, _}]}}},
lists:last(flush_beats(Client, C))
).
forbidden_expired(C) ->
Client = mk_client(C),
% Would be funny if this fails on some system too deep in the past.
Fragment = maps:merge(mk_env(), #{
auth => #{
method => <<"AccessToken">>,
expiration => <<"1991-12-26T17:00:00Z">> % 😢
}
}),
Context = ?CONTEXT(#{<<"root">> => mk_ctx_v1_fragment(Fragment)}),
?assertMatch(
?JUDGEMENT(forbidden),
call_judge(?API_RULESET_ID, Context, Client)
),
?assertMatch(
{judgement, {completed, {forbidden, [{<<"auth_expired">>, _}]}}},
lists:last(flush_beats(Client, C))
).
forbidden_blacklisted_ip(C) ->
Client = mk_client(C),
Fragment = lists:foldl(fun maps:merge/2, #{}, [
mk_auth_session_token(),
mk_env(),
% See test/policies/authz/blacklists/source-ip-range/data.json#L42
#{requester => #{ip => <<"91.41.147.55">>}}
]),
Context = ?CONTEXT(#{<<"root">> => mk_ctx_v1_fragment(Fragment)}),
?assertMatch(
?JUDGEMENT(forbidden),
call_judge(?API_RULESET_ID, Context, Client)
),
?assertMatch(
{judgement, {completed, {forbidden, [{<<"ip_range_blacklisted">>, _}]}}},
lists:last(flush_beats(Client, C))
).
forbidden_w_empty_context(C) ->
Client1 = mk_client(C),
EmptyFragment = mk_ctx_v1_fragment(#{}),
?assertMatch(
?JUDGEMENT(forbidden),
call_judge(?API_RULESET_ID, ?CONTEXT(#{}), Client1)
),
?assertMatch(
{judgement, {completed, {forbidden, [{<<"auth_required">>, _}]}}},
lists:last(flush_beats(Client1, C))
),
Client2 = mk_client(C),
?assertMatch(
?JUDGEMENT(forbidden),
call_judge(?API_RULESET_ID, ?CONTEXT(#{<<"empty">> => EmptyFragment}), Client2)
),
?assertMatch(
{judgement, {completed, {forbidden, [{<<"auth_required">>, _}]}}},
lists:last(flush_beats(Client2, C))
).
mk_user(UserID, UserOrgs) ->
#{user => #{
id => UserID,
orgs => UserOrgs
}}.
mk_user_org(OrgID, OwnerID, Roles) ->
#{
id => OrgID,
owner => #{id => OwnerID},
roles => Roles
}.
mk_role(RoleID, ShopID) ->
#{id => RoleID, scope => #{shop => #{id => ShopID}}}.
mk_auth_session_token() ->
mk_auth_session_token(erlang:system_time(second) + 3600).
mk_auth_session_token(ExpiresAt) ->
#{auth => #{
method => <<"SessionToken">>,
expiration => format_ts(ExpiresAt, second)
}}.
mk_op_create_invoice(InvoiceID, ShopID, PartyID) ->
#{capi => #{
op => #{
id => <<"CreateInvoice">>,
invoice => #{id => InvoiceID},
shop => #{id => ShopID},
party => #{id => PartyID}
}
}}.
mk_env() ->
#{env => #{
now => format_now()
}}.
format_now() ->
USec = erlang:system_time(second),
format_ts(USec, second).
format_ts(Ts, Unit) ->
Str = calendar:system_time_to_rfc3339(Ts, [{unit, Unit}, {offset, "Z"}]),
erlang:list_to_binary(Str).
%%
-spec connect_failed_means_unavailable(config()) -> ok.
-spec connect_timeout_means_unavailable(config()) -> ok.
-spec request_timeout_means_unknown(config()) -> ok.
connect_failed_means_unavailable(C) ->
C1 = start_bouncer([{opa, #{
endpoint => {?OPA_HOST, 65535},
transport => tcp,
event_handler => {ct_gun_event_h, []}
}}], C),
Client = mk_client(C1),
try
?assertError(
{woody_error, {external, resource_unavailable, _}},
call_judge(?API_RULESET_ID, ?CONTEXT(#{}), Client)
),
?assertMatch(
[
{judgement, started},
{judgement, {failed, {unavailable, {down, {shutdown, econnrefused}}}}}
],
flush_beats(Client, C1)
)
after
stop_bouncer(C1)
end.
connect_timeout_means_unavailable(C) ->
{ok, Proxy} = ct_proxy:start_link(?OPA_ENDPOINT, #{listen => ignore}),
C1 = start_proxy_bouncer(Proxy, C),
Client = mk_client(C1),
try
?assertError(
% NOTE
% Turns out it's somewhat hard to simulate connection timeout when connecting to the
% localhost. This is why we expect `result_unknown` here instead of
% `resource_unavailable`.
{woody_error, {external, result_unknown, _}},
call_judge(?API_RULESET_ID, ?CONTEXT(#{}), Client)
),
?assertMatch(
[
{judgement, started},
{judgement, {failed, {unknown, timeout}}}
],
flush_beats(Client, C1)
)
after
stop_bouncer(C1)
end.
request_timeout_means_unknown(C) ->
{ok, Proxy} = ct_proxy:start_link(?OPA_ENDPOINT),
C1 = start_proxy_bouncer(Proxy, C),
Client = mk_client(C1),
ok = change_proxy_mode(Proxy, connection, ignore, C1),
try
?assertError(
{woody_error, {external, result_unknown, _}},
call_judge(?API_RULESET_ID, ?CONTEXT(#{}), Client)
),
?assertMatch(
[
{judgement, started},
{judgement, {failed, {unknown, timeout}}}
],
flush_beats(Client, C1)
)
after
stop_bouncer(C1)
end.
start_proxy_bouncer(Proxy, C) ->
start_bouncer([{opa, #{
endpoint => ct_proxy:endpoint(Proxy),
transport => tcp,
event_handler => {ct_gun_event_h, []}
}}], C).
change_proxy_mode(Proxy, Scope, Mode, C) ->
ModeWas = ct_proxy:mode(Proxy, Scope, Mode),
_ = ct:pal(debug, "[~p] set proxy ~p from '~p' to '~p'",
[?CONFIG(testcase, C), Scope, ModeWas, Mode]),
ok.
%%
mk_ordset(L) ->
ordsets:from_list(L).
mk_ctx_v1_fragment(Context) ->
{ok, Content} = bouncer_context_v1:encode(thrift, Context),
#bctx_ContextFragment{type = v1_thrift_binary, content = Content}.
%%
mk_client(C) ->
WoodyCtx = woody_context:new(genlib:to_binary(?CONFIG(testcase, C))),
ServiceURLs = ?CONFIG(service_urls, C),
{WoodyCtx, ServiceURLs}.
call_judge(RulesetID, Context, Client) ->
call(arbiter, 'Judge', {genlib:to_binary(RulesetID), Context}, Client).
call(ServiceName, Fn, Args, {WoodyCtx, ServiceURLs}) ->
Service = get_service_spec(ServiceName),
Opts = #{
url => maps:get(ServiceName, ServiceURLs),
event_handler => scoper_woody_event_handler
},
case woody_client:call({Service, Fn, Args}, Opts, WoodyCtx) of
{ok, Response} ->
Response;
{exception, Exception} ->
throw(Exception)
end.
get_service_spec(arbiter) ->
{bouncer_decisions_thrift, 'Arbiter'}.
%%
-spec handle_beat(bouncer_arbiter_pulse:beat(), bouncer_arbiter_pulse:metadata(), pid()) ->
ok.
handle_beat(Beat, Metadata, StashPid) ->
_ = stash_beat(Beat, Metadata, StashPid),
ct:pal("~p [arbiter] ~0p:~nmetadata=~p", [self(), Beat, Metadata]).
%%
stash_beat(Beat, Metadata = #{woody_ctx := WoodyCtx}, StashPid) ->
ct_stash:append(StashPid, get_trace_id(WoodyCtx), {Beat, Metadata}).
flush_beats({WoodyCtx, _}, C) ->
StashPid = ?CONFIG(stash, C),
{ok, Entries} = ct_stash:flush(StashPid, get_trace_id(WoodyCtx)),
[Beat || {Beat, _Metadata} <- Entries].
get_trace_id(WoodyCtx) ->
RpcID = woody_context:get_rpc_id(WoodyCtx),
maps:get(trace_id, RpcID).
%%
-spec with_config(atom(), config(), fun ((_) -> R)) ->
R | undefined.
with_config(Name, C, Fun) ->
case lists:keyfind(Name, 1, C) of
{_, V} -> Fun(V);
false -> undefined
end.

208
test/ct_gun_event_h.erl Normal file
View File

@ -0,0 +1,208 @@
-module(ct_gun_event_h).
-behavior(gun_event).
-export([init/2]).
-export([domain_lookup_start/2]).
-export([domain_lookup_end/2]).
-export([connect_start/2]).
-export([connect_end/2]).
-export([tls_handshake_start/2]).
-export([tls_handshake_end/2]).
-export([request_start/2]).
-export([request_headers/2]).
-export([request_end/2]).
-export([push_promise_start/2]).
-export([push_promise_end/2]).
-export([response_start/2]).
-export([response_inform/2]).
-export([response_headers/2]).
-export([response_trailers/2]).
-export([response_end/2]).
-export([ws_upgrade/2]).
-export([ws_recv_frame_start/2]).
-export([ws_recv_frame_header/2]).
-export([ws_recv_frame_end/2]).
-export([ws_send_frame_start/2]).
-export([ws_send_frame_end/2]).
-export([protocol_changed/2]).
-export([transport_changed/2]).
-export([origin_changed/2]).
-export([cancel/2]).
-export([disconnect/2]).
-export([terminate/2]).
-type st() :: _.
-spec init(gun_event:init_event(), st()) ->
st().
init(Event, State) ->
_ = ct:pal("~p [gun] init: ~p", [self(), Event]),
State.
-spec domain_lookup_start(gun_event:domain_lookup_event(), st()) ->
st().
domain_lookup_start(Event, State) ->
_ = ct:pal("~p [gun] domain lookup start: ~p", [self(), Event]),
State.
-spec domain_lookup_end(gun_event:domain_lookup_event(), st()) ->
st().
domain_lookup_end(Event, State) ->
_ = ct:pal("~p [gun] domain lookup end: ~p", [self(), Event]),
State.
-spec connect_start(gun_event:connect_event(), st()) ->
st().
connect_start(Event, State) ->
_ = ct:pal("~p [gun] connect start: ~p", [self(), Event]),
State.
-spec connect_end(gun_event:connect_event(), st()) ->
st().
connect_end(Event, State) ->
_ = ct:pal("~p [gun] connect end: ~p", [self(), Event]),
State.
-spec tls_handshake_start(gun_event:tls_handshake_event(), st()) ->
st().
tls_handshake_start(Event, State) ->
_ = ct:pal("~p [gun] tls handshake start: ~p", [self(), Event]),
State.
-spec tls_handshake_end(gun_event:tls_handshake_event(), st()) ->
st().
tls_handshake_end(Event, State) ->
_ = ct:pal("~p [gun] tls handshake end: ~p", [self(), Event]),
State.
-spec request_start(gun_event:request_start_event(), st()) ->
st().
request_start(Event, State) ->
_ = ct:pal("~p [gun] request start: ~p", [self(), Event]),
State.
-spec request_headers(gun_event:request_start_event(), st()) ->
st().
request_headers(Event, State) ->
_ = ct:pal("~p [gun] request headers: ~p", [self(), Event]),
State.
-spec request_end(gun_event:request_end_event(), st()) ->
st().
request_end(Event, State) ->
_ = ct:pal("~p [gun] request end: ~p", [self(), Event]),
State.
-spec push_promise_start(gun_event:push_promise_start_event(), st()) ->
st().
push_promise_start(Event, State) ->
_ = ct:pal("~p [gun] push promise start: ~p", [self(), Event]),
State.
-spec push_promise_end(gun_event:push_promise_end_event(), st()) ->
st().
push_promise_end(Event, State) ->
_ = ct:pal("~p [gun] push promise end: ~p", [self(), Event]),
State.
-spec response_start(gun_event:response_start_event(), st()) ->
st().
response_start(Event, State) ->
_ = ct:pal("~p [gun] response start: ~p", [self(), Event]),
State.
-spec response_inform(gun_event:response_headers_event(), st()) ->
st().
response_inform(Event, State) ->
_ = ct:pal("~p [gun] response inform: ~p", [self(), Event]),
State.
-spec response_headers(gun_event:response_headers_event(), st()) ->
st().
response_headers(Event, State) ->
_ = ct:pal("~p [gun] response headers: ~p", [self(), Event]),
State.
-spec response_trailers(gun_event:response_trailers_event(), st()) ->
st().
response_trailers(Event, State) ->
_ = ct:pal("~p [gun] response trailers: ~p", [self(), Event]),
State.
-spec response_end(gun_event:response_end_event(), st()) ->
st().
response_end(Event, State) ->
_ = ct:pal("~p [gun] response end: ~p", [self(), Event]),
State.
-spec ws_upgrade(gun_event:ws_upgrade_event(), st()) ->
st().
ws_upgrade(Event, State) ->
_ = ct:pal("~p [gun] ws upgrade: ~p", [self(), Event]),
State.
-spec ws_recv_frame_start(gun_event:ws_recv_frame_start_event(), st()) ->
st().
ws_recv_frame_start(Event, State) ->
_ = ct:pal("~p [gun] ws recv frame start: ~p", [self(), Event]),
State.
-spec ws_recv_frame_header(gun_event:ws_recv_frame_header_event(), st()) ->
st().
ws_recv_frame_header(Event, State) ->
_ = ct:pal("~p [gun] ws recv frame header: ~p", [self(), Event]),
State.
-spec ws_recv_frame_end(gun_event:ws_recv_frame_end_event(), st()) ->
st().
ws_recv_frame_end(Event, State) ->
_ = ct:pal("~p [gun] ws recv frame end: ~p", [self(), Event]),
State.
-spec ws_send_frame_start(gun_event:ws_send_frame_event(), st()) ->
st().
ws_send_frame_start(Event, State) ->
_ = ct:pal("~p [gun] ws send frame start: ~p", [self(), Event]),
State.
-spec ws_send_frame_end(gun_event:ws_send_frame_event(), st()) ->
st().
ws_send_frame_end(Event, State) ->
_ = ct:pal("~p [gun] ws send frame end: ~p", [self(), Event]),
State.
-spec protocol_changed(gun_event:protocol_changed_event(), st()) ->
st().
protocol_changed(Event, State) ->
_ = ct:pal("~p [gun] protocol changed: ~p", [self(), Event]),
State.
-spec transport_changed(gun_event:transport_changed_event(), st()) ->
st().
transport_changed(Event, State) ->
_ = ct:pal("~p [gun] transport changed: ~p", [self(), Event]),
State.
-spec origin_changed(gun_event:origin_changed_event(), st()) ->
st().
origin_changed(Event, State) ->
_ = ct:pal("~p [gun] origin changed: ~p", [self(), Event]),
State.
-spec cancel(gun_event:cancel_event(), st()) ->
st().
cancel(Event, State) ->
_ = ct:pal("~p [gun] cancel: ~p", [self(), Event]),
State.
-spec disconnect(gun_event:disconnect_event(), st()) ->
st().
disconnect(Event, State) ->
_ = ct:pal("~p [gun] disconnect: ~p", [self(), Event]),
State.
-spec terminate(gun_event:terminate_event(), st()) ->
st().
terminate(Event, State) ->
_ = ct:pal("~p [gun] terminate: ~p", [self(), Event]),
State.

281
test/ct_proxy.erl Normal file
View File

@ -0,0 +1,281 @@
-module(ct_proxy).
-export([start_link/1]).
-export([start_link/2]).
-export([unlink/1]).
-export([endpoint/1]).
-export([mode/2]).
-export([mode/3]).
-export([stop/1]).
%%
-behaviour(gen_server).
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
%%
-include_lib("kernel/include/inet.hrl").
-type endpoint() :: {inet:hostname(), inet:port_number()}.
-type scope() :: listen | connection.
-type mode() :: ignore | stop | relay.
-type modes() :: #{scope() => mode()}.
-type proxy() :: pid().
-spec start_link(endpoint()) ->
{ok, proxy()}.
-spec start_link(endpoint(), modes()) ->
{ok, proxy()}.
-spec start_link(endpoint(), modes(), ranch_tcp:opts()) ->
{ok, proxy()}.
-spec unlink(proxy()) ->
proxy().
start_link(Upstream) ->
start_link(Upstream, #{}).
start_link(Upstream, Modes) ->
start_link(Upstream, Modes, [{ip, {127, 0, 0, 1}}, {backlog, 1}]).
start_link(Upstream, Modes, SocketOpts) ->
Args = {resolve_endpoint(Upstream), Modes, SocketOpts},
gen_server:start_link(?MODULE, Args, []).
resolve_endpoint({Host, Port}) ->
{ok, #hostent{h_addr_list = [Address | _Rest]}} = inet:gethostbyname(Host),
{Address, Port}.
unlink(Proxy) when is_pid(Proxy) ->
true = erlang:unlink(Proxy),
Proxy.
-spec endpoint(proxy()) ->
endpoint().
endpoint(Proxy) when is_pid(Proxy) ->
gen_server:call(Proxy, endpoint).
-spec mode(proxy(), scope()) ->
{mode(), _Upstream :: endpoint()}.
mode(Proxy, Scope) when is_pid(Proxy) ->
gen_server:call(Proxy, {mode, Scope}).
-spec mode(proxy(), scope(), mode()) ->
mode().
mode(Proxy, Scope, Mode) when is_pid(Proxy) ->
gen_server:call(Proxy, {mode, Scope, Mode}).
-spec stop(proxy()) ->
ok.
stop(Proxy) when is_pid(Proxy) ->
proc_lib:stop(Proxy, shutdown).
%%
-record(st, {
lsock :: _Socket | undefined,
lsockopts :: list(),
acceptor :: pid() | undefined,
modes :: #{scope() => mode()},
upstream :: {inet:ip_address(), inet:port_number()}
}).
-type st() :: #st{}.
-spec init(_) ->
{ok, st()}.
init({Upstream, Modes0, SocketOpts}) ->
Modes = maps:merge(#{listen => relay, connection => relay}, Modes0),
St = #st{
lsockopts = SocketOpts,
modes = Modes,
upstream = Upstream
},
{ok, sync_mode(listen, stop, maps:get(listen, Modes), St)}.
-spec handle_call(_Call, _From, st()) ->
{noreply, st()}.
handle_call(endpoint, _From, St = #st{}) ->
{reply, get_endpoint(St), St};
handle_call({mode, Scope, Mode}, _From, St = #st{modes = Modes}) ->
ModeWas = maps:get(Scope, Modes),
StNext = sync_mode(Scope, ModeWas, Mode, St),
{reply, ModeWas, StNext#st{modes = Modes#{Scope := Mode}}};
handle_call({mode, Scope}, _From, St = #st{modes = Modes, upstream = Endpoint}) ->
{reply, {maps:get(Scope, Modes), Endpoint}, St};
handle_call(_Call, _From, St) ->
{noreply, St}.
-spec handle_cast(_Cast, st()) ->
{noreply, st()}.
handle_cast(_Cast, St) ->
{noreply, St}.
-spec handle_info(_Info, st()) ->
{noreply, st()}.
handle_info(_Info, St) ->
{noreply, St}.
-spec terminate(_Reason, st()) ->
_.
terminate(_Reason, _St) ->
ok.
-spec code_change(_Vsn | {down, _Vsn}, st(), _Extra) ->
{ok, st()}.
code_change(_Vsn, St, _Extra) ->
{ok, St}.
%%
get_endpoint(#st{lsock = undefined}) ->
undefined;
get_endpoint(#st{lsock = LSock}) ->
{ok, {IP, Port}} = ranch_tcp:sockname(LSock),
{inet:ntoa(IP), Port}.
sync_mode(listen, Mode, Mode, St) ->
St;
sync_mode(listen = Scope, stop, relay, St) ->
St1 = sync_mode(Scope, stop, ignore, St),
St2 = sync_mode(Scope, ignore, relay, St1),
St2;
sync_mode(listen, stop, ignore, St) ->
start_listener(St);
sync_mode(listen, ignore, relay, St) ->
start_acceptor(St);
sync_mode(listen = Scope, relay, stop, St) ->
St1 = sync_mode(Scope, relay, ignore, St),
St2 = sync_mode(Scope, ignore, stop, St1),
St2;
sync_mode(listen, relay, ignore, St) ->
stop_acceptor(St);
sync_mode(listen, ignore, stop, St) ->
stop_listener(St);
sync_mode(connection, _, _, St) ->
St.
start_listener(St = #st{lsock = undefined, lsockopts = SocketOpts}) ->
ct:pal("start_listener @ ~p", [St]),
{ok, LSock} = ranch_tcp:listen(SocketOpts),
St#st{lsock = LSock}.
stop_listener(St = #st{lsock = LSock}) when lsock /= undefined ->
ct:pal("stop_listener @ ~p", [St]),
ok = ranch_tcp:close(LSock),
St#st{lsock = undefined}.
%%
start_acceptor(St = #st{acceptor = undefined, lsock = LSock}) ->
ct:pal("start_acceptor @ ~p", [St]),
Parent = self(),
Pid = erlang:spawn_link(fun () -> loop_acceptor(Parent, LSock) end),
St#st{acceptor = Pid}.
stop_acceptor(St = #st{acceptor = Pid}) when is_pid(Pid) ->
ct:pal("stop_acceptor @ ~p", [St]),
MRef = erlang:monitor(process, Pid),
true = erlang:unlink(Pid),
true = erlang:exit(Pid, shutdown),
receive {'DOWN', MRef, process, Pid, _Reason} ->
St#st{acceptor = undefined}
end.
loop_acceptor(Parent, LSock) ->
_ = case ranch_tcp:accept(LSock, infinity) of
{ok, CSock} ->
_ = ct:pal("accepted ~p from ~p", [CSock, ranch_tcp:peername(CSock)]),
_ = spawn_proxy_connection(Parent, CSock),
loop_acceptor(Parent, LSock);
{error, Reason} ->
exit(Reason)
end.
%%
-define(PROXY_RECV_TIMEOUT, 1000).
-define(PROXY_SOCKET_OPTS, [{packet, 0}, {active, once}]).
-record(proxy, {
insock :: _Socket,
upsock :: _Socket | undefined,
parent :: pid(),
upstream :: endpoint() | undefined,
buffer = <<>> :: binary(),
timeout = ?PROXY_RECV_TIMEOUT :: timeout()
}).
spawn_proxy_connection(Parent, CSock) ->
ProxySt = #proxy{insock = CSock, parent = Parent},
erlang:spawn_link(fun () -> loop_proxy_connection(ProxySt) end).
loop_proxy_connection(St = #proxy{insock = InSock, parent = Parent, buffer = Buffer}) ->
case ranch_tcp:recv(InSock, 0, ?PROXY_RECV_TIMEOUT) of
{ok, Data} ->
Buffer1 = <<Buffer/binary, Data/binary>>,
{Mode, Endpoint} = mode(Parent, connection),
case Mode of
stop ->
terminate(St);
ignore ->
loop_proxy_connection(St);
relay ->
loop_proxy_relay(St#proxy{buffer = Buffer1, upstream = Endpoint})
end;
_ ->
terminate(St)
end.
loop_proxy_relay(St = #proxy{upsock = undefined, upstream = Endpoint, buffer = Buffer}) ->
case remote_connect(Endpoint) of
{ok, Socket} ->
ok = ranch_tcp:send(Socket, Buffer),
loop_proxy_relay(St#proxy{upsock = Socket, buffer = <<>>});
{error, _Error} ->
terminate(St)
end;
loop_proxy_relay(St = #proxy{insock = InSock, upsock = UpSock}) ->
ok = ranch_tcp:setopts(InSock, ?PROXY_SOCKET_OPTS),
ok = ranch_tcp:setopts(InSock, ?PROXY_SOCKET_OPTS),
receive
{_, InSock, Data} ->
ranch_tcp:send(UpSock, Data),
loop_proxy_relay(St);
{_, UpSock, Data} ->
ranch_tcp:send(InSock, Data),
loop_proxy_relay(St);
{tcp_closed, UpSock} ->
terminate(St);
{tcp_closed, InSock} ->
ranch_tcp:close(UpSock);
_ ->
_ = ranch_tcp:close(UpSock),
terminate(St)
end.
remote_connect({IP, Port}) ->
gen_tcp:connect(IP, Port, [binary, {packet, 0}, {delay_send, true}]).
terminate(#proxy{insock = InSock}) ->
ranch_tcp:close(InSock).

96
test/ct_stash.erl Normal file
View File

@ -0,0 +1,96 @@
-module(ct_stash).
-behaviour(gen_server).
-export([start/0]).
-export([destroy/1]).
-export([append/3]).
-export([flush/2]).
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
-define(CALL_TIMEOUT, 1000).
%%% API
-type key() :: _.
-type entry() :: _.
-spec start() ->
{ok, pid()}.
start() ->
gen_server:start(?MODULE, [], []).
-spec destroy(pid()) ->
ok | {error, {nonempty, _Left :: #{key() => entry()}}}.
destroy(Pid) ->
call(Pid, destroy).
-spec append(pid(), key(), entry()) ->
ok.
append(Pid, Key, Entry) ->
call(Pid, {append, Key, Entry}).
-spec flush(pid(), key()) ->
{ok, [entry()]} | error.
flush(Pid, Key) ->
call(Pid, {flush, Key}).
call(Pid, Msg) ->
gen_server:call(Pid, Msg, ?CALL_TIMEOUT).
%%% gen_server callbacks
-spec init(term()) ->
{ok, atom()}.
init(_) ->
{ok, #{}}.
-spec handle_call(term(), pid(), atom()) ->
{reply, atom(), atom()}.
handle_call({append, Key, Entry}, _From, State) ->
Entries = maps:get(Key, State, []),
State1 = maps:put(Key, [Entry | Entries], State),
{reply, ok, State1};
handle_call({flush, Key}, _From, State) ->
case maps:take(Key, State) of
{Entries, State1} ->
{reply, {ok, lists:reverse(Entries)}, State1};
error ->
{reply, error, State}
end;
handle_call(destroy, _From, State) ->
case maps:size(State) of
0 ->
{stop, shutdown, ok, State};
_ ->
Left = maps:map(fun (_, Entries) -> lists:reverse(Entries) end, State),
Reason = {error, {nonempty, Left}},
{stop, Reason, Reason, State}
end.
-spec handle_cast(term(), atom()) -> {noreply, atom()}.
handle_cast(_Msg, State) ->
{noreply, State}.
-spec handle_info(term(), atom()) -> {noreply, atom()}.
handle_info(_Info, State) ->
{noreply, State}.
-spec terminate(term(), atom()) -> atom().
terminate(_Reason, _State) ->
ok.
-spec code_change(term(), term(), term()) -> {ok, atom()}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.

View File

@ -0,0 +1,119 @@
package authz.api
import data.authz.blacklists
assertions := {
"forbidden" : { why | forbidden[why] },
"allowed" : { why | allowed[why] }
}
# Set of assertions which tell why operation under the input context is forbidden.
# When the set is empty operation is not explicitly forbidden.
# Each element must be a 2-item array of the following form:
# ```
# ["code", "description"]
# ```
forbidden[why] {
input
not input.auth.method
why := [
"auth_required",
"Authorization is required"
]
}
forbidden[why] {
exp := time.parse_rfc3339_ns(input.auth.expiration)
now := time.parse_rfc3339_ns(input.env.now)
now > exp
why := [
"auth_expired",
sprintf("Authorization is expired at: %s", [input.auth.expiration])
]
}
forbidden[why] {
ip := input.requester.ip
blacklist := blacklists["source-ip-range"]
matches := net.cidr_contains_matches(blacklist, ip)
ranges := [ range | matches[_][0] = i; range := blacklist[i] ]
why := [
"ip_range_blacklisted",
sprintf("Requester IP address is blacklisted with ranges: %v", [concat(", ", ranges)])
]
}
# Set of assertions which tell why operation under the input context is allowed.
# When the set is empty operation is not explicitly allowed.
# Each element must be a 2-item array of the following form:
# ```
# ["code", "description"]
# ```
allowed[why] {
input.auth.method == "SessionToken"
input.user
org_allowed[why]
}
org_allowed[why] {
org := org_by_operation
org.owner == input.user.id
why := [
"user_is_owner",
"User is the organisation owner itself"
]
}
org_allowed[why] {
rolename := role_by_operation[_]
org_by_operation.roles[i].id == rolename
scopename := scopename_by_role[i]
why := [
"user_has_role",
sprintf("User has role %s in scope %v", [rolename, scopename])
]
}
scopename_by_role[i] = sprintf("shop:%s", [shop]) {
role := org_by_operation.roles[i]
shop := role.scope.shop.id
shop == input.capi.op.shop.id
}
scopename_by_role[i] = "*" {
role := org_by_operation.roles[i]
not role.scope
}
# Set of roles at least one of which is required to perform the operation in context.
role_by_operation["Manager"]
{ input.capi.op.id == "CreateInvoice" }
{ input.capi.op.id == "GetInvoiceByID" }
{ input.capi.op.id == "GetInvoiceEvents" }
{ input.capi.op.id == "FulfillInvoice" }
{ input.capi.op.id == "RescindInvoice" }
{ input.capi.op.id == "GetPayments" }
{ input.capi.op.id == "GetPaymentByID" }
{ input.capi.op.id == "CancelPayment" }
{ input.capi.op.id == "CapturePayment" }
{ input.capi.op.id == "GetRefunds" }
{ input.capi.op.id == "GetRefundByID" }
{ input.capi.op.id == "CreateRefund" }
{ input.capi.op.id == "CreateInvoiceTemplate" }
{ input.capi.op.id == "GetInvoiceTemplateByID" }
{ input.capi.op.id == "UpdateInvoiceTemplate" }
{ input.capi.op.id == "DeleteInvoiceTemplate" }
role_by_operation["Administrator"]
{ input.orgmgmt.op.id == "ListInvitations" }
{ input.orgmgmt.op.id == "CreateInvitation" }
{ input.orgmgmt.op.id == "GetInvitation" }
{ input.orgmgmt.op.id == "RevokeInvitation" }
# Context of an organisation which is being operated upon.
org_by_operation = org_by_id[id]
{ id = input.capi.op.party.id }
{ id = input.orgmgmt.op.organization.id }
# A mapping of org ids to organizations.
org_by_id := { org.id: org | org := input.user.orgs[_] }

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,5 @@
package trivial.incorrect1
assertions := {
"fordibben" : { }
}

View File

@ -0,0 +1,20 @@
package trivial.incorrect2
assertions := {
"forbidden" : forbidden,
"allowed" : allowed
}
forbidden[why] = description {
input
not input.auth.method
why := "auth_required"
description := "Authorization is required"
}
allowed[why] = description {
input.auth.method == "SessionToken"
input.user
why := "its_a_user"
description := "Then why not?"
}