IMP-284: Implement new DB schema (#1)

* IMP-284: Implement new DB schema

* WIP

* WIP

* SQL Request for commiting

* Implement RepositoryClient and UserOpManagement

* Raw integration tests

* WIP Testing

* WIP Testing

* Tests WIP

* Insert objects rebuild

* Remade sql requests and make test work

* Extra tests

* Extra tests

* Update/Delete work

* Rename everything from dmt_v2 to dmt
This commit is contained in:
ndiezel0 2024-10-18 03:37:46 +05:00 committed by GitHub
parent 57aab45c92
commit 604d1b5b78
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
41 changed files with 3917 additions and 81 deletions

5
.env Normal file
View File

@ -0,0 +1,5 @@
SERVICE_NAME=dominant-v2
OTP_VERSION=27.1
REBAR_VERSION=3.23
THRIFT_VERSION=0.14.2.3
DATABASE_URL=postgresql://postgres:postgres@db/dmt

36
.gitignore vendored
View File

@ -1,20 +1,20 @@
.rebar3
_build
_checkouts
_vendor
.eunit
*.o
*.beam
*.plt
*.swp
*.swo
.erlang.cookie
ebin
# general
log
erl_crash.dump
.rebar
logs
.idea
*.iml
rebar3.crashdump
/_build/
/_checkouts/
*~
erl_crash.dump
rebar3.crashdump
.tags*
*.sublime-workspace
.edts
.DS_Store
/.idea/
*.beam
/test/log/
tags
.image.dev
bin
.codestract

4
.gitmodules vendored Normal file
View File

@ -0,0 +1,4 @@
[submodule "psql-migration"]
path = psql-migration
url = git@github.com:valitydev/psql-migration.git
branch = get_rid_of_binary

2
.tool-versions Normal file
View File

@ -0,0 +1,2 @@
erlang 27.1
rebar 3.23.0

59
Dockerfile Normal file
View File

@ -0,0 +1,59 @@
ARG OTP_VERSION
# Build the release
FROM docker.io/library/erlang:${OTP_VERSION} AS builder
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# Install thrift compiler
ARG THRIFT_VERSION
ARG TARGETARCH
RUN wget -q -O- "https://github.com/valitydev/thrift/releases/download/${THRIFT_VERSION}/thrift-${THRIFT_VERSION}-linux-${TARGETARCH}.tar.gz" \
| tar -xvz -C /usr/local/bin/
# Hack ssh fetch and copy sources
ARG FETCH_TOKEN
RUN git config --global url."https://${FETCH_TOKEN}@github.com/".insteadOf ssh://git@github.com/ ;\
mkdir /build
COPY . /build/
# Build the release
WORKDIR /build
RUN rebar3 compile && \
rebar3 as prod release
# Make a runner image
FROM docker.io/library/erlang:${OTP_VERSION}-slim
ARG SERVICE_NAME
ARG USER_UID=1001
ARG USER_GID=$USER_UID
# Set env
ENV CHARSET=UTF-8
ENV LANG=C.UTF-8
# Set runtime
WORKDIR /opt/${SERVICE_NAME}
COPY --from=builder /build/_build/prod/rel/${SERVICE_NAME} /opt/${SERVICE_NAME}
# Set up migration
COPY --from=builder /build/migrations /opt/${SERVICE_NAME}/migrations
COPY --from=builder /build/.env /opt/${SERVICE_NAME}/.env
ENV WORK_DIR=/opt/${SERVICE_NAME}
RUN echo "#!/bin/sh" >> /entrypoint.sh && \
echo "exec /opt/${SERVICE_NAME}/bin/${SERVICE_NAME} foreground" >> /entrypoint.sh && \
chmod +x /entrypoint.sh
RUN groupadd --gid ${USER_GID} ${SERVICE_NAME} && \
mkdir /var/log/${SERVICE_NAME} && \
chown ${USER_UID}:${USER_GID} /var/log/${SERVICE_NAME} && \
useradd --uid ${USER_UID} --gid ${USER_GID} -M ${SERVICE_NAME}
USER ${SERVICE_NAME}
ENTRYPOINT []
CMD ["/entrypoint.sh"]
EXPOSE 8022

17
Dockerfile.dev Normal file
View File

@ -0,0 +1,17 @@
ARG OTP_VERSION
FROM docker.io/library/erlang:${OTP_VERSION}
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# Install thrift compiler
ARG THRIFT_VERSION
ARG TARGETARCH
RUN wget -q -O- "https://github.com/valitydev/thrift/releases/download/${THRIFT_VERSION}/thrift-${THRIFT_VERSION}-linux-${TARGETARCH}.tar.gz" \
| tar -xvz -C /usr/local/bin/
# Set env
ENV CHARSET=UTF-8
ENV LANG=C.UTF-8
# Set runtime
CMD ["/bin/bash"]

119
Makefile Normal file
View File

@ -0,0 +1,119 @@
# HINT
# Use this file to override variables here.
# For example, to run with podman put `DOCKER=podman` there.
-include Makefile.env
# NOTE
# Variables specified in `.env` file are used to pick and setup specific
# component versions, both when building a development image and when running
# CI workflows on GH Actions. This ensures that tasks run with `wc-` prefix
# (like `wc-dialyze`) are reproducible between local machine and CI runners.
DOTENV := $(shell grep -v '^\#' .env)
# Development images
DEV_IMAGE_TAG = $(TEST_CONTAINER_NAME)-dev
DEV_IMAGE_ID = $(file < .image.dev)
DOCKER ?= docker
DOCKERCOMPOSE ?= docker compose
DOCKERCOMPOSE_W_ENV = DEV_IMAGE_TAG=$(DEV_IMAGE_TAG) $(DOCKERCOMPOSE) -f compose.yaml -f compose.tracing.yaml
REBAR ?= rebar3
TEST_CONTAINER_NAME ?= testrunner
all: compile
.PHONY: dev-image clean-dev-image wc-shell test
dev-image: .image.dev
get-submodules:
git submodule init
git submodule update
.image.dev: get-submodules Dockerfile.dev .env
env $(DOTENV) $(DOCKERCOMPOSE_W_ENV) build $(TEST_CONTAINER_NAME)
$(DOCKER) image ls -q -f "reference=$(DEV_IMAGE_ID)" | head -n1 > $@
clean-dev-image:
ifneq ($(DEV_IMAGE_ID),)
$(DOCKER) image rm -f $(DEV_IMAGE_TAG)
rm .image.dev
endif
DOCKER_WC_OPTIONS := -v $(PWD):$(PWD) --workdir $(PWD)
DOCKER_WC_EXTRA_OPTIONS ?= --rm
DOCKER_RUN = $(DOCKER) run -t $(DOCKER_WC_OPTIONS) $(DOCKER_WC_EXTRA_OPTIONS)
DOCKERCOMPOSE_RUN = $(DOCKERCOMPOSE_W_ENV) run --rm $(DOCKER_WC_OPTIONS)
# Utility tasks
wc-shell: dev-image
$(DOCKER_RUN) --interactive --tty $(DEV_IMAGE_TAG)
wc-%: dev-image
$(DOCKER_RUN) $(DEV_IMAGE_TAG) make $*
wdeps-shell: dev-image
$(DOCKERCOMPOSE_RUN) $(TEST_CONTAINER_NAME) su; \
$(DOCKERCOMPOSE_W_ENV) down
wdeps-%: dev-image
$(DOCKERCOMPOSE_RUN) -T $(TEST_CONTAINER_NAME) make $(if $(MAKE_ARGS),$(MAKE_ARGS) $*,$*); \
res=$$?; \
$(DOCKERCOMPOSE_W_ENV) down; \
exit $$res
# Submodules tasks
make_psql_migration:
make -C psql-migration/
mkdir -p bin
mkdir -p migrations
cp ./psql-migration/_build/default/bin/psql_migration ./bin
# Rebar tasks
rebar-shell:
$(REBAR) shell
compile:
$(REBAR) compile
xref:
$(REBAR) xref
lint:
$(REBAR) lint
check-format:
$(REBAR) fmt -c
dialyze:
$(REBAR) as test dialyzer
release:
$(REBAR) as prod release
eunit:
$(REBAR) eunit --cover
common-test:
$(REBAR) ct --cover
cover:
$(REBAR) covertool generate
format:
$(REBAR) fmt -w
clean:
$(REBAR) clean
distclean: clean-build-image
rm -rf _build
test: eunit common-test
cover-report:
$(REBAR) cover

View File

@ -1,9 +1,31 @@
dominant-v2
=====
# dominant_v2
An OTP application
## Migration
Build
-----
First compile migration script with
$ rebar3 compile
```shell
make wc-make_psql_migration
```
Then you can use script with
```shell
bin/psql_migration -e .env
```
```shell
Usage: psql_migration [-h] [-d [<dir>]] [-e [<env>]] <command>
-h, --help Print this help text
-d, --dir Migration folder [default: migrations]
-e, --env Environment file to search for DATABASE_URL [default: .env]
new <name> Create a new migration
list List migrations indicating which have been applied
run Run all migrations
revert Revert the last migration
reset Resets your database by dropping the database in your
DATABASE_URL and then runs `setup`
setup Creates the database specified in your DATABASE_URL, and
runs any existing migrations.
```

30
apps/dmt/src/dmt.app.src Normal file
View File

@ -0,0 +1,30 @@
{application, dmt, [
{description, "An OTP application"},
{vsn, "0.1.0"},
{registered, []},
{mod, {dmt_app, []}},
{applications, [
kernel,
stdlib,
damsel,
epg_connector,
epgsql,
epgsql_pool,
eql,
erl_health,
getopt,
envloader,
woody_user_identity,
jsx,
dmt_core,
dmt_object,
opentelemetry_api,
opentelemetry_exporter,
opentelemetry
]},
{env, []},
{modules, []},
{licenses, ["Apache-2.0"]},
{links, []}
]}.

View File

@ -0,0 +1,43 @@
-module(dmt_api_woody_utils).
-export([get_woody_client/1]).
-export([ensure_woody_deadline_set/2]).
%% API
-spec get_woody_client(atom()) -> woody_client:options().
get_woody_client(Service) ->
Services = genlib_app:env(dmt_api, services, #{}),
make_woody_client(maps:get(Service, Services)).
-spec make_woody_client(#{atom() => _}) -> woody_client:options().
make_woody_client(#{url := Url} = Service) ->
lists:foldl(
fun(Opt, Acc) ->
case maps:get(Opt, Service, undefined) of
undefined -> Acc;
Value -> Acc#{Opt => Value}
end
end,
#{
url => Url,
event_handler => get_woody_event_handlers()
},
[
transport_opts,
resolver_opts
]
).
-spec get_woody_event_handlers() -> woody:ev_handlers().
get_woody_event_handlers() ->
genlib_app:env(dmt_api, woody_event_handlers, [scoper_woody_event_handler]).
-spec ensure_woody_deadline_set(woody_context:ctx(), woody_deadline:deadline()) -> woody_context:ctx().
ensure_woody_deadline_set(WoodyContext, Default) ->
case woody_context:get_deadline(WoodyContext) of
undefined ->
woody_context:set_deadline(Default, WoodyContext);
_Other ->
WoodyContext
end.

View File

@ -1,16 +1,16 @@
%%%-------------------------------------------------------------------
%% @doc dominant-v2 public API
%% @doc dmt public API
%% @end
%%%-------------------------------------------------------------------
-module(dominant-v2_app).
-module(dmt_app).
-behaviour(application).
-export([start/2, stop/1]).
start(_StartType, _StartArgs) ->
dominant-v2_sup:start_link().
dmt_sup:start_link().
stop(_State) ->
ok.

View File

@ -0,0 +1,332 @@
-module(dmt_db_migration).
%% API exports
-export([process/1]).
%%====================================================================
%% API functions
%%====================================================================
-define(OPTS, [
{dir, $d, "dir", {string, "migrations"}, "Migration folder"},
{env, $e, "env", {string, ".env"}, "Environment file to search for DATABASE_URL"}
]).
-spec process(list()) -> ok | {error, any()}.
process(Args) ->
handle_command(getopt:parse(?OPTS, Args)).
handle_command({error, Error}) ->
logger:error("~p", [Error]),
{error, Error};
handle_command({ok, {Args, ["new", Name]}}) ->
Dir = target_dir(Args),
Timestamp = erlang:system_time(seconds),
MigrationName = [integer_to_list(Timestamp), "-", Name, ".sql"],
Filename = filename:join(Dir, MigrationName),
C = [
"-- ",
Filename,
"\n",
"-- :up\n",
"-- Up migration\n\n",
"-- :down\n",
"-- Down migration\n"
],
Result =
case file:write_file(Filename, list_to_binary(C), [exclusive]) of
ok -> {ok, "Created migration: ~s~n", [Filename]};
{error, Reason} -> {error, "Migration can not be written to file ~s: ~s~n", [Filename, Reason]}
end,
handle_command_result(Result);
handle_command({ok, {Args, ["run"]}}) ->
Available = available_migrations(Args),
Result = with_connection(
Args,
fun(Conn) ->
Applied = applied_migrations(Conn),
ToApply = lists:filter(fun({Mig, _}) -> not lists:member(Mig, Applied) end, Available),
Results = apply_migrations(up, ToApply, Conn),
report_migrations(up, Results)
end
),
handle_command_result(Result);
handle_command({ok, {Args, ["revert"]}}) ->
Available = available_migrations(Args),
Result = with_connection(
Args,
fun(Conn) ->
case applied_migrations(Conn) of
[] ->
{error, "No applied migrations to revert"};
Applied ->
LastApplied = lists:last(Applied),
case lists:keyfind(LastApplied, 1, Available) of
false ->
{error, "Migration ~p can not be found~n", [LastApplied]};
Migration ->
Results = apply_migrations(down, [Migration], Conn),
report_migrations(down, Results)
end
end
end
),
handle_command_result(Result);
handle_command({ok, {Args, ["reset"]}}) ->
{ok, Opts} = connection_opts(Args),
case maps:take(database, Opts) of
error ->
handle_command_result({error, "No database to reset~n"});
{Database, Opts1} ->
case
with_connection(
Opts1#{database => "postgres"},
fun(Conn) ->
if_ok(epgsql:equery(Conn, "drop database if exists " ++ Database))
end
)
of
ok ->
handle_command({ok, {Args, ["setup"]}});
Other ->
handle_command_result(Other)
end
end;
handle_command({ok, {Args, ["setup"]}}) ->
{ok, Opts} = connection_opts(Args),
case maps:take(database, Opts) of
error ->
handle_command_result({error, "No database to set up~n"});
{Database, Opts1} ->
case
with_connection(
Opts1#{database => "postgres"},
fun(Conn) ->
case
epgsql:squery(
Conn,
"select 1 from pg_database where datname = '" ++ Database ++ "'"
)
of
{ok, _, []} ->
if_ok(epgsql:squery(Conn, "create database " ++ Database));
Other ->
if_ok(Other)
end
end
)
of
ok ->
handle_command({ok, {Args, ["run"]}});
Other ->
handle_command_result(Other)
end
end;
handle_command({ok, {_, _}}) ->
ok.
%% Utils
-type command_result() ::
ok
| {ok, io:format(), [term()]}
| {error, string()}
| {error, io:format(), [term()]}.
-spec handle_command_result(command_result()) -> ok | {error, term()}.
handle_command_result(ok) ->
logger:info("All sql migrations completed"),
ok;
handle_command_result({ok, Fmt, Args}) ->
logger:info(Fmt, Args),
ok;
handle_command_result({error, Str}) ->
handle_command_result({error, Str, []});
handle_command_result({error, Fmt, Args}) ->
logger:error(Fmt, Args),
{error, Fmt}.
-spec with_connection(list() | map(), fun((pid()) -> command_result())) ->
command_result().
with_connection(Args, Fun) ->
case open_connection(Args) of
{ok, Conn} ->
Fun(Conn);
{error, Error} ->
{error, "Failed to connect to database: ~p~n", [Error]}
end.
connection_opts(Args) ->
connection_opts(Args, {env, "DATABASE_URL"}).
connection_opts(Args, {env, URLName}) ->
maybe_load_dot_env(dot_env(Args)),
case os:getenv(URLName) of
false -> {error, "Missing DATABASE_URL.~n"};
DatabaseUrl -> connection_opts(Args, {url, DatabaseUrl})
end;
connection_opts(_Args, {url, DatabaseUrl}) ->
case uri_string:parse(string:trim(DatabaseUrl)) of
{error, Error, Term} ->
{error, {Error, Term}};
Map = #{userinfo := UserPass, host := Host, path := Path} ->
{User, Pass} =
case string:split(UserPass, ":") of
[[]] -> {"postgres", ""};
[U] -> {U, ""};
[[], []] -> {"postgres", ""};
[U, P] -> {U, P}
end,
ConnectionOpts = #{
port => maps:get(port, Map, 5432),
username => User,
password => Pass,
host => Host,
database => string:slice(Path, 1)
},
case maps:get(query, Map, []) of
[] ->
{ok, ConnectionOpts};
"?" ++ QueryString ->
case uri_string:dissect_query(QueryString) of
[] ->
{ok, ConnectionOpts};
QueryList ->
case proplists:get_value("ssl", QueryList) of
undefined -> {ok, ConnectionOpts};
[] -> {ok, maps:put(ssl, true, ConnectionOpts)};
Value -> {ok, maps:put(ssl, list_to_atom(Value), ConnectionOpts)}
end
end
end
end.
-spec open_connection(list() | map()) -> {ok, epgsql:connection()} | {error, term()}.
open_connection(Args) when is_list(Args) ->
{ok, Opts} = connection_opts(Args),
open_connection(Opts);
open_connection(Opts) ->
epgsql:connect(Opts).
target_dir(Args) ->
case lists:keyfind(dir, 1, Args) of
false ->
".";
{dir, Dir} ->
_ = filelib:ensure_dir(Dir),
Dir
end.
maybe_load_dot_env(DotEnv) ->
case filelib:is_file(DotEnv) of
true -> envloader:load(DotEnv);
% NB: Ignore the missing file.
false -> ok
end.
dot_env(Args) ->
case lists:keyfind(env, 1, Args) of
false -> ".env";
{env, DotEnv} -> DotEnv
end.
-spec report_migrations(up | down, [{Version :: string(), ok | {error, term()}}]) -> ok.
report_migrations(_, L) when length(L) == 0 ->
logger:warning("No migrations were run");
report_migrations(up, Results) ->
[logger:info("Applied ~s: ~p", [V, R]) || {V, R} <- Results],
ok;
report_migrations(down, Results) ->
[logger:info("Reverted ~s: ~p", [V, R]) || {V, R} <- Results],
ok.
-define(DRIVER, epgsql).
record_migration(up, Conn, V) ->
?DRIVER:equery(Conn, "INSERT INTO __migrations (id) VALUES ($1)", [V]);
record_migration(down, Conn, V) ->
?DRIVER:equery(Conn, "DELETE FROM __migrations WHERE id = $1", [V]).
apply_migrations(Type, Migrations, Conn) ->
Results = lists:foldl(
fun
(_, [{_, {error, _}} | _] = Acc) ->
Acc;
(Migration = {Version, _}, Acc) ->
case apply_migration(Type, Migration, Conn) of
ok -> [{Version, ok} | Acc];
{error, Error} -> [{Version, {error, Error}}]
end
end,
[],
Migrations
),
lists:reverse(Results).
apply_migration(Type, {Version, Migration}, Conn) ->
case eql:get_query(Type, Migration) of
{ok, Query} ->
case if_ok(?DRIVER:squery(Conn, Query)) of
ok ->
_ = record_migration(Type, Conn, Version),
ok;
{error, Error} ->
{error, Error}
end;
undefined ->
_ = record_migration(Type, Conn, Version),
ok
end.
if_ok(Rs) when is_list(Rs) ->
Result = lists:map(fun(R) -> if_ok(R) end, Rs),
case lists:keyfind(error, 1, Result) of
false -> ok;
Error -> Error
end;
if_ok({ok, _}) ->
ok;
if_ok({ok, _, _}) ->
ok;
if_ok({ok, _, _, _}) ->
ok;
if_ok({error, {error, error, _, _, Descr, _}}) ->
{error, binary_to_list(Descr)};
if_ok(Error) ->
{error, Error}.
available_migrations(Args) ->
Dir = target_dir(Args),
Files = filelib:wildcard(filename:join(Dir, "*.sql")),
lists:map(
fun(Filename) ->
{ok, Migs} = eql:compile(Filename),
{filename:rootname(Filename), Migs}
end,
lists:usort(Files)
).
applied_migrations(Args) when is_list(Args) ->
with_connection(
Args,
fun(Conn) ->
applied_migrations(Conn)
end
);
applied_migrations(Conn) when is_pid(Conn) ->
case ?DRIVER:squery(Conn, "SELECT id FROM __migrations ORDER by id ASC") of
{ok, _, Migs} ->
[binary_to_list(Mig) || {Mig} <- Migs];
{error, {error, error, <<"42P01">>, _, _, _}} ->
%% init migrations and restart
{ok, _, _} = ?DRIVER:squery(
Conn,
"CREATE TABLE __migrations ("
"id VARCHAR(255) PRIMARY KEY,"
"datetime TIMESTAMP DEFAULT CURRENT_TIMESTAMP)"
),
applied_migrations(Conn)
end.

View File

@ -0,0 +1,654 @@
-module(dmt_repository).
-include_lib("damsel/include/dmsl_domain_conf_v2_thrift.hrl").
-include_lib("epgsql/include/epgsql.hrl").
%% API
-export([commit/3]).
-export([get_object/2]).
-export([get_local_versions/3]).
-export([get_global_versions/2]).
%%
get_object({version, V}, ObjectRef) ->
case get_target_object(ObjectRef, V) of
{ok, #{
global_version := GlobalVersion,
data := Data,
created_at := CreatedAt
}} ->
io:format("get_object Data ~p~n", [Data]),
{ok, #domain_conf_v2_VersionedObject{
global_version = GlobalVersion,
%% TODO implement local versions
local_version = 0,
object = Data,
created_at = CreatedAt
}};
{error, Reason} ->
{error, Reason}
end;
get_object({head, #domain_conf_v2_Head{}}, ObjectRef) ->
case get_latest_target_object(ObjectRef) of
{ok, #{
global_version := GlobalVersion,
data := Data,
created_at := CreatedAt
}} ->
io:format("get_object head Data ~p~n", [Data]),
{ok, #domain_conf_v2_VersionedObject{
global_version = GlobalVersion,
%% TODO implement local versions
local_version = 0,
object = Data,
created_at = CreatedAt
}};
{error, Reason} ->
{error, Reason}
end.
%% Retrieve local versions with pagination
get_local_versions(_Ref, _Limit, _ContinuationToken) ->
not_impl.
%% Retrieve global versions with pagination
get_global_versions(_Limit, _ContinuationToken) ->
not_impl.
assemble_operations(Commit) ->
try
lists:foldl(
fun assemble_operations_/2,
{[], #{}, []},
Commit#domain_conf_v2_Commit.ops
)
catch
{error, Error} ->
{error, Error}
end.
assemble_operations_(
Operation,
{InsertsAcc, UpdatesAcc, UpdatedObjectsAcc}
) ->
case Operation of
{insert, #domain_conf_v2_InsertOp{} = InsertOp} ->
{ok, NewObject} = dmt_object:new_object(InsertOp),
#{
tmp_id := TmpID,
references := Refers
} = NewObject,
Updates1 = update_objects_added_refs({temporary, TmpID}, Refers, UpdatesAcc),
io:format("~n {insert, #domain_conf_v2_InsertOp{} = InsertOp} ~p ~n", [Refers]),
{[NewObject | InsertsAcc], Updates1, UpdatedObjectsAcc};
{update, #domain_conf_v2_UpdateOp{targeted_ref = Ref} = UpdateOp} ->
case get_original_object_changes(UpdatesAcc, Ref) of
%% TODO Figure out how to stop several updates for the same object happening
Changes ->
{ok, ObjectUpdate} = dmt_object:update_object(UpdateOp, Changes),
io:format("~n {update, #domain_conf_v2_UpdateOp{targeted_ref = Ref} = UpdateOp} ~p ~n", [{Changes, ObjectUpdate}]),
UpdatesAcc1 = update_referenced_objects(Changes, ObjectUpdate, UpdatesAcc),
{InsertsAcc, UpdatesAcc1#{Ref => ObjectUpdate}, [Ref | UpdatedObjectsAcc]}
end;
{remove, #domain_conf_v2_RemoveOp{ref = Ref}} ->
#{
references := OriginalReferences
} = OG = get_original_object_changes(UpdatesAcc, Ref),
UpdatesAcc1 = update_objects_removed_refs(Ref, OriginalReferences, UpdatesAcc),
NewObjectState = dmt_object:remove_object(OG),
io:format("~n UpdatesAcc1#{Ref => NewObjectState} ~p ~n", [UpdatesAcc1#{Ref => NewObjectState}]),
{InsertsAcc, UpdatesAcc1#{Ref => NewObjectState}, [Ref | UpdatedObjectsAcc]}
end.
update_referenced_objects(OriginalObjectChanges, ObjectChanges, Updates) ->
#{
id := ObjectID,
references := OriginalReferences
} = OriginalObjectChanges,
#{references := NewVersionReferences} = ObjectChanges,
ORS = ordsets:from_list(OriginalReferences),
NVRS = ordsets:from_list(NewVersionReferences),
AddedRefs = ordsets:subtract(NVRS, ORS),
RemovedRefs = ordsets:subtract(ORS, NVRS),
Updates1 = update_objects_added_refs(ObjectID, AddedRefs, Updates),
update_objects_removed_refs(ObjectID, RemovedRefs, Updates1).
update_objects_added_refs(ObjectID, AddedRefs, Updates) ->
lists:foldl(
fun(Ref, Acc) ->
#{
id := UpdatedObjectID,
referenced_by := RefdBy0
} = OG = get_original_object_changes(Acc, Ref),
Acc#{
UpdatedObjectID =>
OG#{
referenced_by => [ObjectID | RefdBy0]
}
}
end,
Updates,
AddedRefs
).
update_objects_removed_refs(ObjectID, RemovedRefs, Updates) ->
lists:foldl(
fun(Ref, Acc) ->
#{
id := UpdatedObjectID,
referenced_by := RefdBy0
} = OG = get_original_object_changes(Acc, Ref),
RefdBy1 = ordsets:from_list(RefdBy0),
RefdBy2 = ordsets:del_element(ObjectID, RefdBy1),
Acc#{
UpdatedObjectID =>
OG#{
referenced_by => ordsets:to_list(RefdBy2)
}
}
end,
Updates,
RemovedRefs
).
get_original_object_changes(Updates, Ref) ->
case Updates of
#{Ref := Object} ->
Object;
_ ->
%% {ok, #{
%% id := ID,
%% type := Type,
%% referenced_by := RefdBy,
%% references := Refers,
%% data := Data
%% }} = get_latest_target_object(Ref),
%% %% NOTE this is done in order to decouple object type from object change type
%% #{
%% id => ID,
%% type => Type,
%% referenced_by => RefdBy,
%% references => Refers,
%% data => Data
%% }
{ok, Res} = get_latest_target_object(Ref),
{Type, _} = Ref,
Res#{
type => Type
}
end.
%% NOTE Add new tables here
-define(TABLES, [
category,
currency,
business_schedule,
calendar,
payment_method,
payout_method,
bank,
contract_template,
term_set_hierarchy,
payment_institution,
provider,
terminal,
inspector,
system_account_set,
external_account_set,
proxy,
globals,
cash_register_provider,
routing_rules,
bank_card_category,
criterion,
document_type,
payment_service,
payment_system,
bank_card_token_service,
mobile_op_user,
crypto_currency,
country,
trade_bloc,
identity_provider,
limit_config
]).
commit(Version, Commit, CreatedBy) ->
{InsertObjects, UpdateObjects0, ChangedObjectIds} = assemble_operations(Commit),
case
epgsql_pool:transaction(
default_pool,
fun(Worker) ->
ok = check_versions_sql(Worker, ChangedObjectIds, Version),
NewVersion = get_new_version(Worker, CreatedBy),
PermanentIDsMaps = insert_objects(Worker, InsertObjects, NewVersion),
UpdateObjects1 = replace_tmp_ids_in_updates(UpdateObjects0, PermanentIDsMaps),
ok = update_objects(Worker, UpdateObjects1, NewVersion),
{ok, NewVersion, maps:values(PermanentIDsMaps)}
end
)
of
{ok, ResVersion, NewObjectsIDs} ->
NewObjects = lists:map(
fun(#{data := Data}) ->
Data
end,
get_target_objects(NewObjectsIDs, ResVersion)
),
{ok, ResVersion, NewObjects};
{error, {error, error, _, conflict_detected, Msg, _}} ->
{error, {conflict, Msg}};
{error, Error} ->
{error, Error}
end.
replace_tmp_ids_in_updates(UpdateObjects, PermanentIDsMaps) ->
maps:map(
fun(_ID, UpdateObject) ->
#{
referenced_by := ReferencedBy
} = UpdateObject,
NewReferencedBy = lists:map(
fun(Ref) ->
case Ref of
{temporary, TmpID} ->
maps:get(TmpID, PermanentIDsMaps);
_ ->
Ref
end
end,
ReferencedBy
),
UpdateObject#{
referenced_by => NewReferencedBy
}
end,
UpdateObjects
).
check_versions_sql(Worker, ChangedObjectIds, Version) ->
lists:foreach(
fun({ChangedObjectType, ChangedObjectRef0} = ChangedObjectId) ->
io:format("ChangedObjectRef0 ~p~n", [ChangedObjectRef0]),
ChangedObjectRef1 = to_string(ChangedObjectRef0),
Query0 =
io_lib:format("""
SELECT id, global_version
FROM ~p
WHERE id = $1
ORDER BY global_version DESC
LIMIT 1
""", [ChangedObjectType]),
case epgsql_pool:query(Worker, Query0, [ChangedObjectRef1]) of
{ok, _Columns, []} ->
throw({unknown_object_update, ChangedObjectId});
{ok, _Columns, [{ChangedObjectRef, MostRecentVersion}]} when MostRecentVersion > Version ->
throw({object_update_too_old, {ChangedObjectRef, MostRecentVersion}});
{ok, _Columns, [{_ChangedObjectRef, _MostRecentVersion}]} ->
ok;
{error, Reason} ->
throw({error, Reason})
end
end,
ChangedObjectIds
),
ok.
get_new_version(Worker, CreatedBy) ->
Query1 =
"""
INSERT INTO GLOBAL_VERSION (CREATED_BY)
VALUES ($1::uuid) RETURNING version;
""",
case epgsql_pool:query(Worker, Query1, [CreatedBy]) of
{ok, 1, _Columns, [{NewVersion}]} ->
NewVersion;
{error, Reason} ->
throw({error, Reason})
end.
insert_objects(Worker, InsertObjects, Version) ->
lists:foldl(
fun(InsertObject, Acc) ->
#{
tmp_id := TmpID,
type := Type,
forced_id := ForcedID,
references := References,
data := Data0
} = InsertObject,
{ID, Sequence} = get_insert_object_id(Worker, ForcedID, Type),
Data1 = give_data_id(Data0, ID),
ID = insert_object(Worker, Type, ID, Sequence, Version, References, Data1),
Acc#{TmpID => {Type, ID}}
end,
#{},
InsertObjects
).
insert_object(Worker, Type, ID0, Sequence, Version, References0, Data0) ->
ID1 = to_string(ID0),
Data1 = to_string(Data0),
References1 = lists:map(fun to_string/1, References0),
{Query, Params} =
case check_if_force_id_required(Worker, Type) of
true ->
Query0 =
io_lib:format("""
INSERT INTO ~p (id, global_version, references_to, referenced_by, data, is_active)
VALUES ($1, $2, $3, $4, $5, TRUE);
""", [Type]),
Params0 = [ID1, Version, References1, [], Data1],
{Query0, Params0};
false ->
Query1 =
io_lib:format("""
INSERT INTO ~p (id, sequence, global_version, references_to, referenced_by, data, is_active)
VALUES ($1, $2, $3, $4, $5, $6, TRUE);
""", [Type]),
Params1 = [ID1, Sequence, Version, References1, [], Data1],
{Query1, Params1}
end,
case epgsql_pool:query(Worker, Query, Params) of
{ok, 1} ->
ID0;
{error, Reason} ->
throw({error, Reason})
end.
give_data_id({Tag, Data}, Ref) ->
{struct, union, DomainObjects} = dmsl_domain_thrift:struct_info('DomainObject'),
{value, {_, _, {_, _, {_, ObjectName}}, Tag, _}} = lists:search(
fun({_, _, _, T, _}) ->
case T of
Tag ->
true;
_ ->
false
end
end,
DomainObjects
),
RecordName = dmsl_domain_thrift:record_name(ObjectName),
{_, _, [
FirstField,
SecondField
]} = dmsl_domain_thrift:struct_info(ObjectName),
First = get_object_field(FirstField, Data, Ref),
Second = get_object_field(SecondField, Data, Ref),
{Tag, {RecordName, First, Second}}.
get_object_field({_, _, _, ref, _}, _Data, Ref) ->
Ref;
get_object_field({_, _, _, data, _}, Data, _Ref) ->
Data.
update_objects(Worker, UpdateObjects, Version) ->
io:format("~n update_objects UpdateObjects ~p~n", [UpdateObjects]),
maps:foreach(
fun({_, ID}, UpdateObject) ->
io:format("~n update_objects ID ~p~n", [ID]),
#{
id := ID,
type := Type,
references := References,
referenced_by := ReferencedBy,
data := Data,
is_active := IsActive
} = UpdateObject,
ok = update_object(Worker, Type, ID, References, ReferencedBy, IsActive, Data, Version)
end,
UpdateObjects
).
update_object(Worker, Type, ID0, References0, ReferencedBy0, IsActive, Data0, Version) ->
Data1 = to_string(Data0),
ID1 = to_string(ID0),
References1 = lists:map(fun to_string/1, References0),
ReferencedBy1 = lists:map(fun to_string/1, ReferencedBy0),
Query =
io_lib:format("""
INSERT INTO ~p
(id, global_version, references_to, referenced_by, data, is_active)
VALUES ($1, $2, $3, $4, $5, $6);
""", [Type]),
Params = [ID1, Version, References1, ReferencedBy1, Data1, IsActive],
case epgsql_pool:query(Worker, Query, Params) of
{ok, 1} ->
ok;
{error, Reason} ->
throw({error, Reason})
end.
get_insert_object_id(Worker, undefined, Type) ->
%% Check if sequence column exists in table
%% -- if it doesn't, then raise exception
case check_if_force_id_required(Worker, Type) of
true ->
throw({error, {object_type_requires_forced_id, Type}});
false ->
{ok, LastSequenceInType} = get_last_sequence(Worker, Type),
case get_new_object_id(Worker, LastSequenceInType, Type) of
{undefined, Seq} ->
throw({error, {free_id_not_found, Seq, Type}});
{NewID, NewSequence} ->
{NewID, NewSequence}
end
end;
get_insert_object_id(Worker, {Type, ForcedID}, Type) ->
case check_if_id_exists(Worker, ForcedID, Type) of
true ->
throw({error, {forced_id_exists, ForcedID}});
false ->
{ForcedID, null}
end.
check_if_force_id_required(Worker, Type) ->
Query = """
SELECT column_name
FROM information_schema.columns
WHERE table_name = $1 AND column_name = 'sequence';
""",
case epgsql_pool:query(Worker, Query, [Type]) of
{ok, _Columns, []} ->
true;
{ok, _Columns, Rows} ->
lists:all(
fun(Row) ->
case Row of
{<<"sequence">>} ->
false;
_ ->
true
end
end,
Rows
);
{error, Reason} ->
throw({error, Reason})
end.
get_last_sequence(Worker, Type) ->
Query = io_lib:format("""
SELECT MAX(sequence)
FROM ~p;
""", [Type]),
case epgsql_pool:query(Worker, Query) of
{ok, _Columns, [{null}]} ->
{ok, 0};
{ok, _Columns, [{LastID}]} ->
{ok, LastID};
{error, Reason} ->
throw({error, Reason})
end.
get_new_object_id(Worker, LastSequenceInType, Type) ->
genlib_list:foldl_while(
fun(_I, {ID, Sequence}) ->
NextSequence = Sequence + 1,
NewID = dmt_object_id:get_numerical_object_id(Type, NextSequence),
case check_if_id_exists(Worker, NewID, Type) of
false ->
{halt, {NewID, NextSequence}};
true ->
{cont, {ID, NextSequence}}
end
end,
{undefined, LastSequenceInType},
lists:seq(1, 100)
).
check_if_id_exists(Worker, ID0, Type0) ->
%% Type1 = atom_to_list(Type0),
Query = io_lib:format("""
SELECT id
FROM ~p
WHERE id = $1;
""", [Type0]),
ID1 = to_string(ID0),
case epgsql_pool:query(Worker, Query, [ID1]) of
{ok, _Columns, []} ->
false;
{ok, _Columns, [{ID1}]} ->
true;
{error, Reason} ->
throw({error, Reason})
end.
get_target_objects(Refs, Version) ->
get_target_objects(default_pool, Refs, Version).
get_target_objects(Worker, Refs, Version) ->
lists:map(
fun(Ref) ->
{ok, Obj} = get_target_object(Worker, Ref, Version),
Obj
end,
Refs
).
get_target_object(Ref, Version) ->
get_target_object(default_pool, Ref, Version).
get_target_object(Worker, Ref, Version) ->
{Type, ID} = Ref,
ID0 = to_string(ID),
io:format("~n get_target_object ID ~p ID0 ~p and Version ~p~n", [ID, ID0, Version]),
Request = io_lib:format("""
SELECT id,
global_version,
references_to,
referenced_by,
data,
is_active,
created_at
FROM ~p
WHERE id = $1 AND global_version <= $2
ORDER BY global_version DESC
LIMIT 1
""", [Type]),
case epgsql_pool:query(Worker, Request, [ID0, Version]) of
{ok, _Columns, []} ->
{error, {object_not_found, Ref, Version}};
{ok, Columns, Rows} ->
io:format("get_target_object Res ~p ~n", [{Columns, Rows}]),
[Result | _] = to_marshalled_maps(Columns, Rows),
{ok, Result}
end.
get_latest_target_object(Ref) ->
{Type, ID} = Ref,
ID0 = to_string(ID),
Request = io_lib:format("""
SELECT id,
global_version,
references_to,
referenced_by,
data,
is_active,
created_at
FROM ~p
WHERE id = $1
ORDER BY global_version DESC
LIMIT 1
""", [Type]),
case epgsql_pool:query(default_pool, Request, [ID0]) of
{ok, _Columns, []} ->
{error, {object_not_found, Ref}};
{ok, Columns, Rows} ->
[Result | _] = to_marshalled_maps(Columns, Rows),
{ok, Result}
end.
to_marshalled_maps(Columns, Rows) ->
to_maps(Columns, Rows, fun marshall_object/1).
to_maps(Columns, Rows, TransformRowFun) ->
ColNumbers = erlang:length(Columns),
Seq = lists:seq(1, ColNumbers),
lists:map(
fun(Row) ->
Data = lists:foldl(
fun(Pos, Acc) ->
#column{name = Field, type = Type} = lists:nth(Pos, Columns),
Acc#{Field => convert(Type, erlang:element(Pos, Row))}
end,
#{},
Seq
),
TransformRowFun(Data)
end,
Rows
).
%% for reference https://github.com/epgsql/epgsql#data-representation
convert(timestamp, Value) ->
datetime_to_binary(Value);
convert(timestamptz, Value) ->
datetime_to_binary(Value);
convert(_Type, Value) ->
Value.
datetime_to_binary({Date, {Hour, Minute, Second}}) when is_float(Second) ->
datetime_to_binary({Date, {Hour, Minute, trunc(Second)}});
datetime_to_binary(DateTime) ->
UnixTime = genlib_time:daytime_to_unixtime(DateTime),
genlib_rfc3339:format(UnixTime, second).
marshall_object(#{
<<"id">> := ID,
<<"global_version">> := Version,
<<"references_to">> := ReferencesTo,
<<"referenced_by">> := ReferencedBy,
<<"data">> := Data,
<<"created_at">> := CreatedAt,
<<"is_active">> := IsActive
}) ->
dmt_object:just_object(
from_string(ID),
Version,
lists:map(fun from_string/1, ReferencesTo),
lists:map(fun from_string/1, ReferencedBy),
from_string(Data),
CreatedAt,
IsActive
).
to_string(A0) ->
A1 = term_to_binary(A0),
base64:encode_to_string(A1).
from_string(B0) ->
B1 = base64:decode(B0),
binary_to_term(B1).

View File

@ -0,0 +1,59 @@
-module(dmt_repository_client_handler).
-include_lib("damsel/include/dmsl_domain_conf_v2_thrift.hrl").
-export([handle_function/4]).
handle_function(Function, Args, WoodyContext0, Options) ->
DefaultDeadline = woody_deadline:from_timeout(default_handling_timeout(Options)),
WoodyContext = dmt_api_woody_utils:ensure_woody_deadline_set(WoodyContext0, DefaultDeadline),
do_handle_function(Function, Args, WoodyContext, Options).
default_handling_timeout(#{default_handling_timeout := Timeout}) ->
Timeout.
do_handle_function('CheckoutObject', {VersionRef, ObjectRef}, _Context, _Options) ->
%% Fetch the object based on VersionReference and Reference
case dmt_repository:get_object(VersionRef, ObjectRef) of
{ok, Object} ->
{ok, Object};
{error, global_version_not_found} ->
woody_error:raise(business, #domain_conf_v2_GlobalVersionNotFound{});
{error, object_not_found} ->
woody_error:raise(business, #domain_conf_v2_ObjectNotFound{});
{error, Reason} ->
woody_error:raise(system, {internal, Reason})
end;
do_handle_function('GetLocalVersions', {Request}, _Context, _Options) ->
#domain_conf_v2_GetLocalVersionsRequest{
ref = Ref,
limit = Limit,
continuation_token = ContinuationToken
} = Request,
%% Retrieve local versions with pagination
case dmt_repository:get_local_versions(Ref, Limit, ContinuationToken) of
{ok, Versions, NewToken} ->
{ok, #domain_conf_v2_GetVersionsResponse{
result = Versions,
continuation_token = NewToken
}};
{error, object_not_found} ->
woody_error:raise(business, #domain_conf_v2_ObjectNotFound{});
{error, Reason} ->
woody_error:raise(system, {internal, Reason})
end;
do_handle_function('GetGlobalVersions', {Request}, _Context, _Options) ->
#domain_conf_v2_GetGlobalVersionsRequest{
limit = Limit,
continuation_token = ContinuationToken
} = Request,
%% Retrieve global versions with pagination
case dmt_repository:get_global_versions(Limit, ContinuationToken) of
{ok, Versions, NewToken} ->
{ok, #domain_conf_v2_GetVersionsResponse{
result = Versions,
continuation_token = NewToken
}};
{error, Reason} ->
woody_error:raise(system, {internal, Reason})
end.

View File

@ -0,0 +1,61 @@
-module(dmt_repository_handler).
-include_lib("damsel/include/dmsl_domain_conf_v2_thrift.hrl").
%% API
-export([handle_function/4]).
handle_function(Function, Args, WoodyContext0, Options) ->
DefaultDeadline = woody_deadline:from_timeout(default_handling_timeout(Options)),
WoodyContext = dmt_api_woody_utils:ensure_woody_deadline_set(WoodyContext0, DefaultDeadline),
do_handle_function(Function, Args, WoodyContext, Options).
do_handle_function('Commit', {Version, Commit, CreatedBy}, _Context, _Options) ->
case dmt_repository:commit(Version, Commit, CreatedBy) of
{ok, NextVersion, NewObjects} ->
{ok, #domain_conf_v2_CommitResponse{
version = NextVersion,
new_objects = ordsets:from_list(NewObjects)
}};
{error, {operation_error, Error}} ->
woody_error:raise(business, handle_operation_error(Error));
{error, version_not_found} ->
woody_error:raise(business, #domain_conf_v2_VersionNotFound{});
{error, {head_mismatch, LatestVersion}} ->
woody_error:raise(business, #domain_conf_v2_ObsoleteCommitVersion{latest_version = LatestVersion});
{error, migration_in_progress} ->
woody_error:raise(system, {internal, resource_unavailable, <<"Migration in progress. Please, stand by.">>})
end.
default_handling_timeout(#{default_handling_timeout := Timeout}) ->
Timeout.
handle_operation_error({conflict, Conflict}) ->
#domain_conf_v2_OperationConflict{
conflict = handle_operation_conflict(Conflict)
};
handle_operation_error({invalid, Invalid}) ->
#domain_conf_v2_OperationInvalid{
errors = handle_operation_invalid(Invalid)
}.
handle_operation_conflict({object_already_exists, Ref}) ->
{object_already_exists, #domain_conf_v2_ObjectAlreadyExistsConflict{object_ref = Ref}};
handle_operation_conflict({object_not_found, Ref}) ->
{object_not_found, #domain_conf_v2_ObjectNotFoundConflict{object_ref = Ref}};
handle_operation_conflict({object_reference_mismatch, Ref}) ->
{object_reference_mismatch, #domain_conf_v2_ObjectReferenceMismatchConflict{object_ref = Ref}}.
handle_operation_invalid({objects_not_exist, Refs}) ->
[
{object_not_exists, #domain_conf_v2_NonexistantObject{
object_ref = Ref,
referenced_by = ReferencedBy
}}
|| {Ref, ReferencedBy} <- Refs
];
handle_operation_invalid({object_reference_cycles, Cycles}) ->
[
{object_reference_cycle, #domain_conf_v2_ObjectReferenceCycle{cycle = Cycle}}
|| Cycle <- Cycles
].

144
apps/dmt/src/dmt_sup.erl Normal file
View File

@ -0,0 +1,144 @@
%%%-------------------------------------------------------------------
%% @doc dmt top level supervisor.
%% @end
%%%-------------------------------------------------------------------
-module(dmt_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-export([get_service/1]).
-define(SERVER, ?MODULE).
-define(APP, dmt).
-define(DEFAULT_DB, default_db).
start_link() ->
supervisor:start_link({local, ?APP}, ?MODULE, []).
%% sup_flags() = #{strategy => strategy(), % optional
%% intensity => non_neg_integer(), % optional
%% period => pos_integer()} % optional
%% child_spec() = #{id => child_id(), % mandatory
%% start => mfargs(), % mandatory
%% restart => restart(), % optional
%% shutdown => shutdown(), % optional
%% type => worker(), % optional
%% modules => modules()} % optional
init(_) ->
ok = dbinit(),
{ok, IP} = inet:parse_address(genlib_app:env(?APP, ip, "::")),
HealthCheck = enable_health_logging(genlib_app:env(?APP, health_check, #{})),
EventHandlers = genlib_app:env(?APP, woody_event_handlers, [scoper_woody_event_handler]),
API = woody_server:child_spec(
?MODULE,
#{
ip => IP,
port => genlib_app:env(?APP, port, 8022),
transport_opts => genlib_app:env(?APP, transport_opts, #{}),
protocol_opts => genlib_app:env(?APP, protocol_opts, #{}),
event_handler => EventHandlers,
handlers => get_repository_handlers(),
additional_routes => [
get_prometheus_route(),
erl_health_handle:get_route(HealthCheck)
]
}
),
SupFlags = #{
strategy => one_for_one,
intensity => 10,
period => 60
},
ChildSpecs = [API],
{ok, {SupFlags, ChildSpecs}}.
dbinit() ->
WorkDir = get_env_var("WORK_DIR"),
_ = set_database_url(),
MigrationsPath = WorkDir ++ "/migrations",
Cmd = "run",
case dmt_db_migration:process(["-d", MigrationsPath, Cmd]) of
ok -> ok;
{error, Reason} -> throw({migrations_error, Reason})
end.
set_database_url() ->
{ok, #{
?DEFAULT_DB := #{
host := PgHost,
port := PgPort,
username := PgUser,
password := PgPassword,
database := DbName
}
}} = application:get_env(epg_connector, databases),
%% DATABASE_URL=postgresql://postgres:postgres@db/dmtv2
PgPortStr = erlang:integer_to_list(PgPort),
Value =
"postgresql://" ++ PgUser ++ ":" ++ PgPassword ++ "@" ++ PgHost ++ ":" ++ PgPortStr ++ "/" ++ DbName,
true = os:putenv("DATABASE_URL", Value).
%% internal functions
get_env_var(Name) ->
case os:getenv(Name) of
false -> throw({os_env_required, Name});
V -> V
end.
get_repository_handlers() ->
DefaultTimeout = genlib_app:env(?APP, default_woody_handling_timeout, timer:seconds(30)),
[
get_handler(repository, #{
repository => dmt_repository_handler,
default_handling_timeout => DefaultTimeout
}),
get_handler(repository_client, #{
repository => dmt_repository_client_handler,
default_handling_timeout => DefaultTimeout
}),
get_handler(user_op, #{
repository => dmt_user_op_handler,
default_handling_timeout => DefaultTimeout
})
].
-spec get_handler(repository | repository_client | state_processor, woody:options()) ->
woody:http_handler(woody:th_handler()).
get_handler(repository, Options) ->
{"/v1/domain/repository", {
get_service(repository),
{dmt_repository_handler, Options}
}};
get_handler(repository_client, Options) ->
{"/v1/domain/repository_client", {
get_service(repository_client),
{dmt_repository_client_handler, Options}
}};
get_handler(user_op, Options) ->
{"/v1/domain/user_op", {
get_service(user_op),
{dmt_user_op_handler, Options}
}}.
get_service(repository) ->
{dmsl_domain_conf_v2_thrift, 'Repository'};
get_service(repository_client) ->
{dmsl_domain_conf_v2_thrift, 'RepositoryClient'};
get_service(user_op) ->
{dmsl_domain_conf_v2_thrift, 'UserOpManagement'}.
-spec enable_health_logging(erl_health:check()) -> erl_health:check().
enable_health_logging(Check) ->
EvHandler = {erl_health_event_handler, []},
maps:map(fun(_, V = {_, _, _}) -> #{runner => V, event_handler => EvHandler} end, Check).
-spec get_prometheus_route() -> {iodata(), module(), _Opts :: any()}.
get_prometheus_route() ->
{"/metrics/[:registry]", prometheus_cowboy2_handler, []}.

View File

@ -0,0 +1,50 @@
-module(dmt_user_op).
%% Existing includes and exports
-include_lib("damsel/include/dmsl_domain_conf_v2_thrift.hrl").
-include_lib("epgsql/include/epgsql.hrl").
-define(POOL_NAME, user_op_pool).
-export([
insert_user/2,
get_user/1,
delete_user/1
]).
%% Insert a new user
insert_user(Name, Email) ->
Sql = "INSERT INTO op_user (name, email) VALUES ($1, $2) returning id",
Params = [Name, Email],
case epgsql_pool:query(?POOL_NAME, Sql, Params) of
{ok, 1, _Columns, [{ID}]} ->
{ok, ID};
{error, Reason} ->
{error, Reason}
end.
%% Retrieve a user by ID
get_user(UserOpID) ->
Sql = "SELECT id, name, email FROM op_user WHERE id = $1::uuid",
Params = [UserOpID],
case epgsql_pool:query(?POOL_NAME, Sql, Params) of
{ok, _Columns, [{ID, Name, Email}]} ->
{ok, #domain_conf_v2_UserOp{id = ID, name = Name, email = Email}};
{ok, _, []} ->
{error, user_not_found};
{error, Reason} ->
{error, Reason}
end.
%% Delete a user by ID
delete_user(UserOpID) ->
Sql = "DELETE FROM op_user WHERE id = $1::uuid",
Params = [UserOpID],
case epgsql_pool:query(?POOL_NAME, Sql, Params) of
{ok, _, Result} when Result =:= [] ->
{error, user_not_found};
{ok, 1} ->
ok;
{error, Reason} ->
{error, Reason}
end.

View File

@ -0,0 +1,45 @@
%% File: ./dmt/src/dmt_user_op_handler.erl
-module(dmt_user_op_handler).
-include_lib("damsel/include/dmsl_domain_conf_v2_thrift.hrl").
%% API
-export([handle_function/4]).
handle_function(Function, Args, WoodyContext0, Options) ->
DefaultDeadline = woody_deadline:from_timeout(default_handling_timeout(Options)),
WoodyContext = dmt_api_woody_utils:ensure_woody_deadline_set(WoodyContext0, DefaultDeadline),
do_handle_function(Function, Args, WoodyContext, Options).
default_handling_timeout(#{default_handling_timeout := Timeout}) ->
Timeout.
%% Implement the Create function
do_handle_function('Create', {Params}, _Context, _Options) ->
#domain_conf_v2_UserOpParams{email = Email, name = Name} = Params,
%% Insert into op_user table
case dmt_user_op:insert_user(Name, Email) of
{ok, ID} ->
{ok, #domain_conf_v2_UserOp{id = ID, email = Email, name = Name}};
{error, Reason} ->
woody_error:raise(system, {internal, Reason})
end;
do_handle_function('Get', {UserOpID}, _Context, _Options) ->
case dmt_user_op:get_user(UserOpID) of
{ok, #domain_conf_v2_UserOp{id = ID, email = Email, name = Name}} ->
{ok, #domain_conf_v2_UserOp{id = ID, email = Email, name = Name}};
{error, user_not_found} ->
woody_error:raise(business, #domain_conf_v2_UserOpNotFound{});
{error, Reason} ->
woody_error:raise(system, {internal, Reason})
end;
do_handle_function('Delete', {UserOpID}, _Context, _Options) ->
case dmt_user_op:delete_user(UserOpID) of
ok ->
{ok, ok};
{error, user_not_found} ->
woody_error:raise(business, #domain_conf_v2_UserOpNotFound{});
{error, Reason} ->
woody_error:raise(system, {internal, Reason})
end.

View File

@ -0,0 +1,46 @@
-module(dmt_client).
-include_lib("opentelemetry_api/include/otel_tracer.hrl").
-include_lib("opentelemetry_api/include/opentelemetry.hrl").
%% API
-export([
checkout_object/3,
get_local_versions/2,
get_global_versions/2,
commit/4
]).
-export([
create_user_op/2,
get_user_op/2,
delete_user_op/2
]).
checkout_object(VersionRef, ObjectRef, Client) ->
Args = [VersionRef, ObjectRef],
dmt_client_api:call(repository_client, 'CheckoutObject', Args, Client).
get_local_versions(Request, Client) ->
Args = [Request],
dmt_client_api:call(repository_client, 'GetLocalVersions', Args, Client).
get_global_versions(Request, Client) ->
Args = [Request],
dmt_client_api:call(repository_client, 'GetGlobalVersions', Args, Client).
commit(Version, Commit, Author, Client) ->
Args = [Version, Commit, Author],
dmt_client_api:call(repository, 'Commit', Args, Client).
create_user_op(Params, Client) ->
Args = [Params],
dmt_client_api:call(user_op, 'Create', Args, Client).
get_user_op(ID, Client) ->
Args = [ID],
dmt_client_api:call(user_op, 'Get', Args, Client).
delete_user_op(ID, Client) ->
Args = [ID],
dmt_client_api:call(user_op, 'Delete', Args, Client).

View File

@ -0,0 +1,38 @@
-module(dmt_client_api).
-export([new/1]).
-export([call/4]).
-export_type([t/0]).
%%
-type t() :: woody_context:ctx().
-spec new(woody_context:ctx()) -> t().
new(Context) ->
Context.
-spec call(Name :: atom(), woody:func(), [any()], t()) -> {ok, _Response} | {exception, _} | {error, _}.
call(ServiceName, Function, Args, Context) ->
Service = dmt_sup:get_service(ServiceName),
Request = {Service, Function, list_to_tuple(Args)},
Opts = get_opts(ServiceName),
try
woody_client:call(Request, Opts, Context)
catch
error:Error:ST ->
{error, {Error, ST}}
end.
get_opts(ServiceName) ->
EventHandlerOpts = genlib_app:env(dmt, scoper_event_handler_options, #{}),
Opts0 = #{
event_handler => {scoper_woody_event_handler, EventHandlerOpts}
},
case maps:get(ServiceName, genlib_app:env(dmt, services), undefined) of
#{} = Opts ->
maps:merge(Opts, Opts0);
_ ->
Opts0
end.

View File

@ -0,0 +1,169 @@
-module(dmt_ct_helper).
-export([start_app/1]).
-export([start_app/2]).
-export([start_apps/1]).
-export([cfg/2]).
-export([create_client/0]).
-export([create_client/1]).
-export([cleanup_db/0]).
-include_lib("damsel/include/dmsl_base_thrift.hrl").
-include_lib("damsel/include/dmsl_domain_thrift.hrl").
-export_type([config/0]).
-export_type([test_case_name/0]).
-export_type([group_name/0]).
%%
-type app_name() :: atom().
-spec start_app(app_name()) -> {[app_name()], map()}.
start_app(scoper = AppName) ->
{
start_app(AppName, [
{storage, scoper_storage_logger}
]),
#{}
};
start_app(woody = AppName) ->
{
start_app(AppName, [
{acceptors_pool_size, 4}
]),
#{}
};
start_app(dmt = AppName) ->
{
start_app(AppName, [
{host, <<"dominant-v2">>},
{port, 8022},
{scoper_event_handler_options, #{
event_handler_opts => #{
formatter_opts => #{
max_length => 1000
}
}
}},
{services, #{
repository => #{
url => <<"http://dominant-v2:8022/v1/domain/repository">>
},
repository_client => #{
url => <<"http://dominant-v2:8022/v1/domain/repository_client">>
},
user_op => #{
url => <<"http://dominant-v2:8022/v1/domain/user_op">>
}
}}
]),
#{}
};
start_app(epg_connector = AppName) ->
{
start_app(AppName, [
{databases, #{
default_db => #{
host => "db",
port => 5432,
username => "postgres",
password => "postgres",
database => "dmt"
}
}},
{pools, #{
default_pool => #{
database => default_db,
size => 10
},
user_op_pool => #{
database => default_db,
size => 10
}
}}
]),
#{}
};
start_app(AppName) ->
{genlib_app:start_application(AppName), #{}}.
-spec start_app(app_name(), list()) -> [app_name()].
start_app(cowboy = AppName, Env) ->
#{
listener_ref := Ref,
acceptors_count := Count,
transport_opts := TransOpt,
proto_opts := ProtoOpt
} = Env,
{ok, _} = cowboy:start_clear(Ref, [{num_acceptors, Count} | TransOpt], ProtoOpt),
[AppName];
start_app(AppName, Env) ->
genlib_app:start_application_with(AppName, Env).
-spec start_apps([app_name() | {app_name(), list()}]) -> {[app_name()], map()}.
start_apps(Apps) ->
lists:foldl(
fun
({AppName, Env}, {AppsAcc, RetAcc}) ->
{lists:reverse(start_app(AppName, Env)) ++ AppsAcc, RetAcc};
(AppName, {AppsAcc, RetAcc}) ->
{Apps0, Ret0} = start_app(AppName),
{lists:reverse(Apps0) ++ AppsAcc, maps:merge(Ret0, RetAcc)}
end,
{[], #{}},
Apps
).
-type config() :: [{atom(), term()}].
-type test_case_name() :: atom().
-type group_name() :: atom().
-spec cfg(atom(), config()) -> term().
cfg(Key, Config) ->
case lists:keyfind(Key, 1, Config) of
{Key, V} -> V;
_ -> undefined
end.
%%
-define(ROOT_URL, "http://dominant-v2:8022").
-spec create_client() -> dmt_client_api:t().
create_client() ->
create_client_w_context(woody_context:new()).
%% {?ROOT_URL, create_client_w_context(woody_context:new())}.
-spec create_client(woody:trace_id()) -> dmt_client_api:t().
create_client(TraceID) ->
create_client_w_context(woody_context:new(TraceID)).
%% {?ROOT_URL, create_client_w_context(woody_context:new(TraceID))}.
create_client_w_context(WoodyCtx) ->
dmt_client_api:new(WoodyCtx).
-spec cleanup_db() -> ok.
cleanup_db() ->
Query = """
DO $$
DECLARE
r RECORD;
BEGIN
-- Loop through all tables in the current schema
FOR r IN (
SELECT table_name
FROM information_schema.tables
WHERE table_schema='public'
AND NOT table_name = '__migrations'
) LOOP
-- Execute the TRUNCATE command on each table
EXECUTE 'TRUNCATE TABLE ' || quote_ident(r.table_name) || ' RESTART IDENTITY CASCADE';
END LOOP;
END $$;
""",
{ok, _, _} = epgsql_pool:query(default_pool, Query),
ok.

View File

@ -0,0 +1,371 @@
-module(dmt_integration_test_SUITE).
-include_lib("damsel/include/dmsl_domain_conf_v2_thrift.hrl").
-include_lib("damsel/include/dmsl_domain_thrift.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
%% API
-export([
init_per_suite/1,
end_per_suite/1,
init_per_group/2,
end_per_group/2,
end_per_testcase/2,
all/0,
groups/0
]).
-export([
%% UserOpManagement Tests
create_user_op_test/1,
get_user_op_test/1,
delete_user_op_test/1
]).
-export([
%% Repository Tests
insert_object_forced_id_success_test/1,
insert_object_sequence_id_success_test/1,
insert_remove_referencing_object_success_test/1,
update_object_success_test/1
]).
-export([
%% RepositoryClient Tests
]).
%% Setup and Teardown
%% Initialize per suite
init_per_suite(Config) ->
{Apps, _Ret} = dmt_ct_helper:start_apps([woody, scoper, epg_connector, dmt]),
ApiClient = dmt_ct_helper:create_client(),
[{client, ApiClient}, {apps, Apps} | Config].
%% Cleanup after suite
end_per_suite(_Config) ->
dmt_ct_helper:cleanup_db(),
ok.
%% Define all test cases
all() ->
[
{group, create_user_op_test},
{group, repository_tests}
%% {group, repository_client_tests}
].
%% Define test groups
groups() ->
[
{create_user_op_test, [parallel], [
create_user_op_test,
get_user_op_test,
delete_user_op_test
]},
{repository_tests, [], [
insert_object_forced_id_success_test,
insert_object_sequence_id_success_test,
insert_remove_referencing_object_success_test,
update_object_success_test
]},
{repository_client_tests, [], [
]}
].
init_per_group(repository_client_tests, C) ->
Client = dmt_ct_helper:cfg(client, C),
[
{user_op_id, create_user_op(<<"repository_client_tests@tests">>, Client)}
| C
];
init_per_group(_, C) ->
C.
end_per_group(_, _C) ->
ok.
end_per_testcase(_, _C) ->
dmt_ct_helper:cleanup_db(),
ok.
%% Test Cases
%% UserOpManagement Tests
create_user_op_test(Config) ->
Client = dmt_ct_helper:cfg(client, Config),
UserOpParams = #domain_conf_v2_UserOpParams{
email = <<"create_user_op_test@test">>,
name = <<"some_name">>
},
{ok, _} = dmt_client:create_user_op(UserOpParams, Client).
get_user_op_test(Config) ->
Client = dmt_ct_helper:cfg(client, Config),
Email = <<"get_user_op_test">>,
UserOpID = create_user_op(Email, Client),
{ok, #domain_conf_v2_UserOp{
email = Email1
}} = dmt_client:get_user_op(UserOpID, Client),
?assertEqual(Email, Email1).
delete_user_op_test(Config) ->
Client = dmt_ct_helper:cfg(client, Config),
Email = <<"delete_user_op_test">>,
UserOpID = create_user_op(Email, Client),
{ok, ok} = dmt_client:delete_user_op(UserOpID, Client),
{exception, #domain_conf_v2_UserOpNotFound{}} =
dmt_client:get_user_op(UserOpID, Client).
%% Repository tests
insert_remove_referencing_object_success_test(Config) ->
Client = dmt_ct_helper:cfg(client, Config),
Email = <<"insert_remove_referencing_object_success_test">>,
UserOpID = create_user_op(Email, Client),
Revision1 = 0,
Commit1 = #domain_conf_v2_Commit{
ops = [
{insert, #domain_conf_v2_InsertOp{
object = {proxy, #domain_ProxyDefinition{
name = <<"proxy">>,
description = <<"proxy_description">>,
url = <<"http://someurl">>,
options = #{}
}}
}}
]
},
{ok, #domain_conf_v2_CommitResponse{
version = Revision2,
new_objects = [
{proxy, #domain_ProxyObject{
ref = ProxyRef
}}
]
}} = dmt_client:commit(Revision1, Commit1, UserOpID, Client),
Commit2 = #domain_conf_v2_Commit{
ops = [
{insert, #domain_conf_v2_InsertOp{
object = {provider, #domain_Provider{
name = <<"name">>,
description = <<"description">>,
proxy = #domain_Proxy{
ref = ProxyRef,
additional = #{}
}
}}
}}
]
},
{ok, #domain_conf_v2_CommitResponse{
version = Revision3,
new_objects = [
{provider, #domain_ProviderObject{
ref = _ProviderRef
}}
]
}} = dmt_client:commit(Revision2, Commit2, UserOpID, Client),
%% try to remove proxy
Commit3 = #domain_conf_v2_Commit{
ops = [
{remove, #domain_conf_v2_RemoveOp{
ref = {proxy, ProxyRef}
}}
]
},
{ok, _} = dmt_client:commit(Revision3, Commit3, UserOpID, Client).
%% FIXME reference collecting doesn't work. Need to fix ASAP
%%
%%%% try to remove provider
%% Commit4 = #domain_conf_v2_Commit{
%% ops = [
%% {remove, #domain_conf_v2_RemoveOp{
%% ref = {provider, ProviderRef}
%% }}
%% ]
%% },
%% {ok, #domain_conf_v2_CommitResponse{
%% version = Revision4
%% }} = dmt_client:commit(Revision3, Commit4, UserOpID, Client),
%%
%%%% try to remove proxy again
%% {ok, #domain_conf_v2_CommitResponse{}} =
%% dmt_client:commit(Revision4, Commit3, UserOpID, Client).
insert_object_sequence_id_success_test(Config) ->
Client = dmt_ct_helper:cfg(client, Config),
Email = <<"insert_object_sequence_id_success_test">>,
UserOpID = create_user_op(Email, Client),
%% Insert a test object
Revision = 0,
Category = #domain_Category{
name = <<"name1">>,
description = <<"description1">>
},
Commit = #domain_conf_v2_Commit{
ops = [
{insert, #domain_conf_v2_InsertOp{
object = {category, Category}
}},
{insert, #domain_conf_v2_InsertOp{
object = {category, Category}
}}
]
},
{ok, #domain_conf_v2_CommitResponse{
new_objects = [
{category, #domain_CategoryObject{
ref = #domain_CategoryRef{id = ID1}
}},
{category, #domain_CategoryObject{
ref = #domain_CategoryRef{id = ID2}
}}
]
}} = dmt_client:commit(Revision, Commit, UserOpID, Client),
?assertMatch(true, is_in_sequence(ID1, ID2)).
is_in_sequence(N1, N2) when N1 + 1 =:= N2 ->
true;
is_in_sequence(N1, N2) when N1 =:= N2 + 1 ->
true;
is_in_sequence(N1, N2) ->
{false, N1, N2}.
%% Test successful insert with forced id
insert_object_forced_id_success_test(Config) ->
Client = dmt_ct_helper:cfg(client, Config),
Email = <<"insert_object_forced_id_success_test">>,
UserOpID = create_user_op(Email, Client),
%% Insert a test object
Revision = 0,
CategoryRef = #domain_CategoryRef{id = 1337},
ForcedRef = {category, CategoryRef},
Category = #domain_Category{
name = <<"name">>,
description = <<"description">>
},
Commit = #domain_conf_v2_Commit{
ops = [
{insert, #domain_conf_v2_InsertOp{
object =
{category, Category},
force_ref = ForcedRef
}}
]
},
{ok, #domain_conf_v2_CommitResponse{
new_objects = NewObjectsSet
}} = dmt_client:commit(Revision, Commit, UserOpID, Client),
[
{category, #domain_CategoryObject{
ref = Ref,
data = Category
}}
] = ordsets:to_list(NewObjectsSet),
?assertMatch(CategoryRef, Ref).
update_object_success_test(Config) ->
Client = dmt_ct_helper:cfg(client, Config),
Email = <<"insert_remove_referencing_object_success_test">>,
UserOpID = create_user_op(Email, Client),
Revision1 = 0,
Commit1 = #domain_conf_v2_Commit{
ops = [
{insert, #domain_conf_v2_InsertOp{
object = {proxy, #domain_ProxyDefinition{
name = <<"proxy">>,
description = <<"proxy_description">>,
url = <<"http://someurl">>,
options = #{}
}}
}}
]
},
{ok, #domain_conf_v2_CommitResponse{
version = Revision2,
new_objects = [
{proxy, #domain_ProxyObject{
ref = ProxyRef
}}
]
}} = dmt_client:commit(Revision1, Commit1, UserOpID, Client),
NewObject = {proxy, #domain_ProxyObject{
ref = ProxyRef,
data = #domain_ProxyDefinition{
name = <<"proxy2">>,
description = <<"proxy_description2">>,
url = <<"http://someurl">>,
options = #{}
}
}},
Commit2 = #domain_conf_v2_Commit{
ops = [
{update, #domain_conf_v2_UpdateOp{
targeted_ref = {proxy, ProxyRef},
new_object = NewObject
}}
]
},
{ok, #domain_conf_v2_CommitResponse{
version = Revision3
}} = dmt_client:commit(Revision2, Commit2, UserOpID, Client),
{ok, #domain_conf_v2_VersionedObject{
object = NewObject
}} = dmt_client:checkout_object({version, Revision3}, {proxy, ProxyRef}, Client).
%% RepositoryClient Tests
%% GetLocalVersions
%% GetGlobalVersions
create_user_op(Email, Client) ->
%% Create UserOp
UserOpParams = #domain_conf_v2_UserOpParams{
email = Email,
name = <<"some_name">>
},
{ok, #domain_conf_v2_UserOp{
id = UserOpID
}} = dmt_client:create_user_op(UserOpParams, Client),
UserOpID.

View File

@ -0,0 +1,14 @@
{application, dmt_core, [
{description, "An OTP application"},
{vsn, "0.1.0"},
{registered, []},
{applications, [
kernel,
stdlib
]},
{env, []},
{modules, []},
{licenses, ["Apache-2.0"]},
{links, []}
]}.

View File

@ -0,0 +1,155 @@
-module(dmt_domain).
-include_lib("damsel/include/dmsl_domain_conf_v2_thrift.hrl").
-compile({parse_transform, dmt_domain_pt}).
%%
-export([references/1]).
-define(DOMAIN, dmsl_domain_thrift).
-export_type([operation_error/0]).
%%
-type object_ref() :: dmsl_domain_thrift:'Reference'().
-type domain_object() :: dmsl_domain_thrift:'DomainObject'().
-type nonexistent_object() :: {object_ref(), [object_ref()]}.
-type operation_conflict() ::
{object_already_exists, object_ref()}
| {object_not_found, object_ref()}
| {object_reference_mismatch, object_ref()}.
-type operation_invalid() ::
{objects_not_exist, [nonexistent_object()]}
| {object_reference_cycles, [[object_ref()]]}.
-type operation_error() ::
{conflict, operation_conflict()}
| {invalid, operation_invalid()}.
references(DomainObject) ->
{DataType, Data} = get_data(DomainObject),
references(Data, DataType).
references(Object, DataType) ->
references(Object, DataType, []).
references(undefined, _StructInfo, Refs) ->
Refs;
references({Tag, Object}, StructInfo = {struct, union, FieldsInfo}, Refs) when is_list(FieldsInfo) ->
case get_field_info(Tag, StructInfo) of
false ->
erlang:error({<<"field info not found">>, Tag, StructInfo});
{_, _, Type, _, _} ->
check_reference_type(Object, Type, Refs)
end;
%% what if it's a union?
references(Object, {struct, struct, FieldsInfo}, Refs) when is_list(FieldsInfo) ->
indexfold(
fun(I, {_, _Required, FieldType, _Name, _}, Acc) ->
case element(I, Object) of
undefined ->
Acc;
Field ->
check_reference_type(Field, FieldType, Acc)
end
end,
Refs,
% NOTE
% This `2` gives index of the first significant field in a record tuple.
2,
FieldsInfo
);
references(Object, {struct, _, {?DOMAIN, StructName}}, Refs) ->
StructInfo = get_struct_info(StructName),
check_reference_type(Object, StructInfo, Refs);
references(Object, {list, FieldType}, Refs) ->
lists:foldl(
fun(O, Acc) ->
check_reference_type(O, FieldType, Acc)
end,
Refs,
Object
);
references(Object, {set, FieldType}, Refs) ->
ListObject = ordsets:to_list(Object),
check_reference_type(ListObject, {list, FieldType}, Refs);
references(Object, {map, KeyType, ValueType}, Refs) ->
maps:fold(
fun(K, V, Acc) ->
check_reference_type(
V,
ValueType,
check_reference_type(K, KeyType, Acc)
)
end,
Refs,
Object
);
references(_DomainObject, _Primitive, Refs) ->
Refs.
indexfold(Fun, Acc, I, [E | Rest]) ->
indexfold(Fun, Fun(I, E, Acc), I + 1, Rest);
indexfold(_Fun, Acc, _I, []) ->
Acc.
check_reference_type(Object, Type, Refs) ->
case is_reference_type(Type) of
{true, Tag} ->
[{Tag, Object} | Refs];
false ->
references(Object, Type, Refs)
end.
-spec get_data(domain_object()) -> any().
get_data(DomainObject) ->
get_domain_object_field(data, DomainObject).
get_domain_object_field(Field, {Tag, Struct}) ->
get_field(Field, Struct, get_domain_object_schema(Tag)).
get_domain_object_schema(Tag) ->
SchemaInfo = get_struct_info('DomainObject'),
{_, _, {struct, _, {_, ObjectStructName}}, _, _} = get_field_info(Tag, SchemaInfo),
get_struct_info(ObjectStructName).
get_field(Field, Struct, StructInfo) when is_atom(Field) ->
{FieldIndex, {_, _, Type, _, _}} = get_field_index(Field, StructInfo),
{Type, element(FieldIndex, Struct)}.
get_struct_info(StructName) ->
dmsl_domain_thrift:struct_info(StructName).
get_field_info(Field, {struct, _StructType, FieldsInfo}) ->
lists:keyfind(Field, 4, FieldsInfo).
get_field_index(Field, {struct, _StructType, FieldsInfo}) ->
% NOTE
% This `2` gives index of the first significant field in a record tuple.
get_field_index(Field, 2, FieldsInfo).
get_field_index(_Field, _, []) ->
false;
get_field_index(Field, I, [F | Rest]) ->
case F of
{_, _, _, Field, _} = Info ->
{I, Info};
_ ->
get_field_index(Field, I + 1, Rest)
end.
is_reference_type(Type) ->
{struct, union, StructInfo} = get_struct_info('Reference'),
is_reference_type(Type, StructInfo).
is_reference_type(_Type, []) ->
false;
is_reference_type(Type, [{_, _, Type, Tag, _} | _Rest]) ->
{true, Tag};
is_reference_type(Type, [_ | Rest]) ->
is_reference_type(Type, Rest).

View File

@ -0,0 +1,92 @@
-module(dmt_domain_pt).
-export([parse_transform/2]).
-spec parse_transform(Forms, [compile:option()]) -> Forms when
Forms :: [erl_parse:abstract_form() | erl_parse:form_info()].
parse_transform(Forms, _Options) ->
[
erl_syntax:revert(FormNext)
|| Form <- Forms,
FormNext <- [erl_syntax_lib:map(fun transform/1, Form)],
FormNext /= delete
].
transform(Form) ->
case erl_syntax:type(Form) of
function ->
Name = erl_syntax:concrete(erl_syntax:function_name(Form)),
Arity = erl_syntax:function_arity(Form),
transform_function(Name, Arity, Form);
_ ->
Form
end.
transform_function(Name = is_reference_type, 1, FormWas) ->
% NOTE
% Replacing `dmt_domain:is_reference_type/1` with a code which does something similar to:
% ```
% is_reference_type({struct, struct, {dmsl_domain_thrift, 'CategoryRef'}}) -> {true, 'category'};
% is_reference_type({struct, struct, {dmsl_domain_thrift, 'CurrencyRef'}}) -> {true, 'currency'};
% ...
% is_reference_type(_) -> false.
% ```
{struct, union, StructInfo} = get_struct_info('Reference'),
ok = validate_reference_struct('Reference', StructInfo),
Clauses =
[
erl_syntax:clause(
[erl_syntax:abstract(Type)],
none,
[erl_syntax:abstract({true, Tag})]
)
|| {_N, _Req, Type, Tag, _Default} <- StructInfo
] ++
[
erl_syntax:clause(
[erl_syntax:underscore()],
none,
[erl_syntax:abstract(false)]
)
],
Form = erl_syntax_lib:map(
fun(F) -> erl_syntax:copy_attrs(FormWas, F) end,
erl_syntax:function(
erl_syntax:abstract(Name),
Clauses
)
),
Form;
transform_function(_Name = is_reference_type, 2, _FormWas) ->
% NOTE
% We need to make `is_reference_type/2` disappear, otherwise it will trigger _unused function_
% warning.
delete;
transform_function(_, _, Form) ->
Form.
get_struct_info(StructName) ->
dmsl_domain_thrift:struct_info(StructName).
validate_reference_struct(StructName, StructInfo) ->
Mappings = lists:foldl(
fun({N, _Req, Type, Tag, _Default}, Acc) ->
maps:put(Type, [{N, Tag} | maps:get(Type, Acc, [])], Acc)
end,
#{},
StructInfo
),
case maps:filter(fun(_, Tags) -> length(Tags) > 1 end, Mappings) of
Dups when map_size(Dups) > 0 ->
ErrorMessage = format(
"struct_info(~0p): multiple fields share the same reference type, "
"this breaks referential integrity validation",
[StructName]
),
exit({ErrorMessage, Dups});
_ ->
ok
end.
format(Fmt, Args) ->
unicode:characters_to_nfc_list(io_lib:format(Fmt, Args)).

View File

@ -0,0 +1,50 @@
-module(dmt_history).
-export([head/1]).
-export([head/2]).
-export([travel/3]).
-include_lib("damsel/include/dmsl_domain_conf_thrift.hrl").
-type history() :: dmsl_domain_conf_thrift:'History'().
-type version() :: dmsl_domain_conf_thrift:'Version'().
-type snapshot() :: dmsl_domain_conf_thrift:'Snapshot'().
-spec head(history()) -> {ok, snapshot()} | {error, dmt_domain:operation_error()}.
head(History) ->
head(History, #domain_conf_Snapshot{version = 0, domain = dmt_domain:new()}).
-spec head(history(), snapshot()) -> {ok, snapshot()} | {error, dmt_domain:operation_error()}.
head(History, Snapshot) when map_size(History) =:= 0 ->
{ok, Snapshot};
head(History, Snapshot) ->
Head = lists:max(maps:keys(History)),
travel(Head, History, Snapshot).
-spec travel(version(), history(), snapshot()) -> {ok, snapshot()} | {error, dmt_domain:operation_error()}.
travel(To, _History, #domain_conf_Snapshot{version = From} = Snapshot) when To =:= From ->
{ok, Snapshot};
travel(To, History, #domain_conf_Snapshot{version = From, domain = Domain}) when To > From ->
#domain_conf_Commit{ops = Ops} = maps:get(From + 1, History),
case dmt_domain:apply_operations(Ops, Domain) of
{ok, NewDomain} ->
NextSnapshot = #domain_conf_Snapshot{
version = From + 1,
domain = NewDomain
},
travel(To, History, NextSnapshot);
{error, _} = Error ->
Error
end;
travel(To, History, #domain_conf_Snapshot{version = From, domain = Domain}) when To < From ->
#domain_conf_Commit{ops = Ops} = maps:get(From, History),
case dmt_domain:revert_operations(Ops, Domain) of
{ok, NewDomain} ->
PreviousSnapshot = #domain_conf_Snapshot{
version = From - 1,
domain = NewDomain
},
travel(To, History, PreviousSnapshot);
{error, _} = Error ->
Error
end.

View File

@ -0,0 +1,14 @@
{application, dmt_object, [
{description, "An OTP application"},
{vsn, "0.1.0"},
{registered, []},
{applications, [
kernel,
stdlib
]},
{env, []},
{modules, []},
{licenses, ["Apache-2.0"]},
{links, []}
]}.

View File

@ -0,0 +1,125 @@
-module(dmt_object).
-feature(maybe_expr, enable).
-include_lib("damsel/include/dmsl_domain_conf_v2_thrift.hrl").
-export([new_object/1]).
-export([update_object/2]).
-export([remove_object/1]).
-export([just_object/7]).
-export_type([insertable_object/0]).
-export_type([object_changes/0]).
-export_type([object/0]).
-type object_type() :: atom().
-type object_id() :: string().
-type timestamp() :: string().
-type insertable_object() :: #{
type := object_type(),
tmp_id := object_id(),
forced_id := string() | undefined,
references := [{object_type(), object_id()}],
data := binary()
}.
-type object_changes() :: #{
id := object_id(),
type := object_type(),
references => [{object_type(), object_id()}],
referenced_by => [{object_type(), object_id()}],
data => binary(),
is_active => boolean()
}.
-type object() :: #{
id := object_id(),
type := object_type(),
global_version := string(),
references := [{object_type(), object_id()}],
referenced_by := [{object_type(), object_id()}],
data := binary(),
created_at := timestamp(),
created_by := string()
}.
new_object(#domain_conf_v2_InsertOp{
object = NewObject,
force_ref = ForcedRef
}) ->
case get_checked_type(ForcedRef, NewObject) of
{ok, Type} ->
{ok, #{
tmp_id => uuid:get_v4_urandom(),
type => Type,
forced_id => ForcedRef,
references => list_term_to_binary(dmt_object_reference:refless_object_references(NewObject)),
data => NewObject
}};
{error, Error} ->
{error, Error}
end.
update_object(
#domain_conf_v2_UpdateOp{
targeted_ref = {_, ID} = TargetedRef,
new_object = NewObject
},
ExistingUpdate
) ->
maybe
{ok, Type} ?= get_checked_type(TargetedRef, NewObject),
ok ?= check_domain_object_refs(TargetedRef, NewObject),
{ok, ExistingUpdate#{
id => ID,
type => Type,
%% NOTE this will just provide all the refs that already exist,
%% it doesn't give us diff, but maybe it's not needed
references => dmt_object_reference:domain_object_references(NewObject),
data => NewObject
}}
end.
remove_object(OG) ->
OG#{
referenced_by => [],
is_active => false
}.
just_object(
ID,
Version,
ReferencesTo,
ReferencedBy,
Data,
CreatedAt,
IsActive
) ->
{Type, _} = ID,
#{
id => ID,
type => Type,
global_version => Version,
references => ReferencesTo,
referenced_by => ReferencedBy,
data => Data,
created_at => CreatedAt,
is_active => IsActive
}.
get_checked_type(undefined, {Type, _}) ->
{ok, Type};
get_checked_type({Type, _}, {Type, _}) ->
{ok, Type};
get_checked_type(Ref, Object) ->
{error, {type_mismatch, Ref, Object}}.
check_domain_object_refs({Type, Ref}, {Type, {_Object, Ref, _Data}}) ->
ok;
check_domain_object_refs(Ref, Object) ->
{error, {reference_mismatch, Ref, Object}}.
list_term_to_binary(Terms) ->
lists:map(fun(Term) -> term_to_binary(Term) end, Terms).

View File

@ -0,0 +1,45 @@
-module(dmt_object_id).
-include_lib("damsel/include/dmsl_domain_thrift.hrl").
%% API
-export([get_numerical_object_id/2]).
get_numerical_object_id(category, ID) ->
#domain_CategoryRef{id = ID};
get_numerical_object_id(business_schedule, ID) ->
#domain_BusinessScheduleRef{id = ID};
get_numerical_object_id(calendar, ID) ->
#domain_CalendarRef{id = ID};
get_numerical_object_id(bank, ID) ->
#domain_BankRef{id = ID};
get_numerical_object_id(contract_template, ID) ->
#domain_ContractTemplateRef{id = ID};
get_numerical_object_id(term_set_hierarchy, ID) ->
#domain_TermSetHierarchyRef{id = ID};
get_numerical_object_id(payment_institution, ID) ->
#domain_PaymentInstitutionRef{id = ID};
get_numerical_object_id(provider, ID) ->
#domain_ProviderRef{id = ID};
get_numerical_object_id(terminal, ID) ->
#domain_TerminalRef{id = ID};
get_numerical_object_id(inspector, ID) ->
#domain_InspectorRef{id = ID};
get_numerical_object_id(system_account_set, ID) ->
#domain_SystemAccountSetRef{id = ID};
get_numerical_object_id(external_account_set, ID) ->
#domain_ExternalAccountSetRef{id = ID};
get_numerical_object_id(proxy, ID) ->
#domain_ProxyRef{id = ID};
get_numerical_object_id(cash_register_provider, ID) ->
#domain_CashRegisterProviderRef{id = ID};
get_numerical_object_id(routing_rules, ID) ->
#domain_RoutingRulesetRef{id = ID};
get_numerical_object_id(bank_card_category, ID) ->
#domain_BankCardCategoryRef{id = ID};
get_numerical_object_id(criterion, ID) ->
#domain_CriterionRef{id = ID};
get_numerical_object_id(document_type, ID) ->
#domain_DocumentTypeRef{id = ID};
get_numerical_object_id(Type, _ID) ->
throw({not_supported, Type}).

View File

@ -0,0 +1,156 @@
-module(dmt_object_reference).
%% API
-export([get_domain_object_ref/1]).
-export([refless_object_references/1]).
-export([domain_object_references/1]).
-define(DOMAIN, dmsl_domain_thrift).
get_domain_object_ref(DomainObject = {Tag, _Struct}) ->
{_Type, Ref} = get_domain_object_field(ref, DomainObject),
{Tag, Ref}.
%% RefflessObject ZONE
%% FIXME doesn't work
refless_object_references(DomainObject) ->
{Data, DataType} = get_refless_data(DomainObject),
references(Data, DataType).
get_refless_data({Tag, Struct}) ->
{Struct, get_refless_object_schema(Tag)}.
get_refless_object_schema(Tag) ->
SchemaInfo = get_struct_info('ReflessDomainObject'),
{_, _, {struct, _, {_, ObjectStructName}}, _, _} = get_field_info(Tag, SchemaInfo),
{ObjectStructName, get_struct_info(ObjectStructName)}.
%% DomainObject ZONE
domain_object_references(DomainObject) ->
{Data, DataType} = get_domain_object_data(DomainObject),
references(Data, DataType).
get_domain_object_data(DomainObject) ->
get_domain_object_field(data, DomainObject).
get_domain_object_field(Field, {Tag, Struct}) ->
get_field(Field, Struct, get_domain_object_schema(Tag)).
get_domain_object_schema(Tag) ->
SchemaInfo = get_struct_info('DomainObject'),
{_, _, {struct, _, {_, ObjectStructName}}, _, _} = get_field_info(Tag, SchemaInfo),
get_struct_info(ObjectStructName).
get_field(Field, Struct, StructInfo) when is_atom(Field) ->
{FieldIndex, {_, _, Type, _, _}} = get_field_index(Field, StructInfo),
{element(FieldIndex, Struct), Type}.
get_field_index(Field, {struct, _StructType, FieldsInfo}) ->
% NOTE
% This `2` gives index of the first significant field in a record tuple.
get_field_index(Field, 2, FieldsInfo).
get_field_index(_Field, _, []) ->
false;
get_field_index(Field, I, [F | Rest]) ->
case F of
{_, _, _, Field, _} = Info ->
{I, Info};
_ ->
get_field_index(Field, I + 1, Rest)
end.
%% References Gathering ZONE
references(Object, DataType) ->
references(Object, DataType, []).
references(undefined, _StructInfo, Refs) ->
Refs;
references({Tag, Object}, StructInfo = {struct, union, FieldsInfo}, Refs) when is_list(FieldsInfo) ->
case get_field_info(Tag, StructInfo) of
false ->
erlang:error({<<"field info not found">>, Tag, StructInfo});
{_, _, Type, _, _} ->
check_reference_type(Object, Type, Refs)
end;
%% what if it's a union?
references(Object, {struct, struct, FieldsInfo}, Refs) when is_list(FieldsInfo) ->
indexfold(
fun(I, {_, _Required, FieldType, _Name, _}, Acc) ->
case element(I, Object) of
undefined ->
Acc;
Field ->
check_reference_type(Field, FieldType, Acc)
end
end,
Refs,
% NOTE
% This `2` gives index of the first significant field in a record tuple.
2,
FieldsInfo
);
references(Object, {struct, _, {?DOMAIN, StructName}}, Refs) ->
StructInfo = get_struct_info(StructName),
check_reference_type(Object, StructInfo, Refs);
references(Object, {list, FieldType}, Refs) ->
lists:foldl(
fun(O, Acc) ->
check_reference_type(O, FieldType, Acc)
end,
Refs,
Object
);
references(Object, {set, FieldType}, Refs) ->
ListObject = ordsets:to_list(Object),
check_reference_type(ListObject, {list, FieldType}, Refs);
references(Object, {map, KeyType, ValueType}, Refs) ->
maps:fold(
fun(K, V, Acc) ->
check_reference_type(
V,
ValueType,
check_reference_type(K, KeyType, Acc)
)
end,
Refs,
Object
);
references(_DomainObject, _Primitive, Refs) ->
Refs.
check_reference_type(Object, Type, Refs) ->
case is_reference_type(Type) of
{true, Tag} ->
[{Tag, Object} | Refs];
false ->
references(Object, Type, Refs)
end.
is_reference_type(Type) ->
{struct, union, StructInfo} = get_struct_info('Reference'),
is_reference_type(Type, StructInfo).
is_reference_type(_Type, []) ->
false;
is_reference_type(Type, [{_, _, Type, Tag, _} | _Rest]) ->
{true, Tag};
is_reference_type(Type, [_ | Rest]) ->
is_reference_type(Type, Rest).
indexfold(Fun, Acc, I, [E | Rest]) ->
indexfold(Fun, Fun(I, E, Acc), I + 1, Rest);
indexfold(_Fun, Acc, _I, []) ->
Acc.
%% Common
get_struct_info(StructName) ->
dmsl_domain_thrift:struct_info(StructName).
get_field_info(Field, {struct, _StructType, FieldsInfo}) ->
lists:keyfind(Field, 4, FieldsInfo).

View File

@ -0,0 +1,23 @@
-module(dmt_object_type).
-include_lib("damsel/include/dmsl_domain_thrift.hrl").
%% API
-export([get_refless_object_type/1]).
-export([get_ref_type/1]).
get_refless_object_type(#domain_Category{}) ->
category;
get_refless_object_type(#domain_Currency{}) ->
currency;
get_refless_object_type(_) ->
error(not_impl).
get_ref_type(#domain_CurrencyRef{}) ->
currency;
get_ref_type(#domain_CategoryRef{}) ->
category;
get_ref_type(undefined) ->
undefined;
get_ref_type(_) ->
error(not_impl).

27
compose.tracing.yaml Normal file
View File

@ -0,0 +1,27 @@
services:
testrunner:
depends_on:
jaeger:
condition: service_healthy
environment:
- OTEL_SERVICE_NAME=dominant-v2
- OTEL_TRACES_EXPORTER=otlp
- OTEL_TRACES_SAMPLER=parentbased_always_on
- OTEL_EXPORTER_OTLP_PROTOCOL=http_protobuf
- OTEL_EXPORTER_OTLP_ENDPOINT=http://jaeger:4318
jaeger:
image: jaegertracing/all-in-one:1.47
environment:
- COLLECTOR_OTLP_ENABLED=true
healthcheck:
test: "/go/bin/all-in-one-linux status"
interval: 2s
timeout: 1s
retries: 20
ports:
- 4317:4317 # OTLP gRPC receiver
- 4318:4318 # OTLP http receiver
- 5778:5778
- 14250:14250
- 16686:16686

37
compose.yaml Normal file
View File

@ -0,0 +1,37 @@
services:
testrunner:
image: $DEV_IMAGE_TAG
environment:
WORK_DIR: $PWD
POSTGRES_HOST: db
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: dmtv2
build:
dockerfile: Dockerfile.dev
context: .
args:
OTP_VERSION: $OTP_VERSION
THRIFT_VERSION: $THRIFT_VERSION
volumes:
- .:$PWD
hostname: dominant-v2
depends_on:
db:
condition: service_healthy
working_dir: $PWD
db:
image: postgres
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: dmt
ports:
- 5432:5432
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 5s
timeout: 5s
retries: 5

59
config/sys.config Normal file
View File

@ -0,0 +1,59 @@
[
{kernel, [
{log_level, info},
{logger, [
{handler, default, logger_std_h, #{
level =>
debug,
config => #{
type =>
{file, "/var/log/dominant-v2/console.json"},
sync_mode_qlen =>
20
},
formatter =>
{logger_logstash_formatter, #{}}
}}
]}
]},
{dmt, []},
{epg_connector, [
{databases, #{
default_db => #{
host => "db",
port => 5432,
username => "postgres",
password => "postgres",
database => "dmt"
}
}},
{pools, #{
default_pool => #{
database => default_db,
size => 10
}
}}
]},
{scoper, [
{storage, scoper_storage_logger}
]},
{how_are_you, [
{metrics_publishers, [
% {hay_statsd_publisher, #{
% key_prefix => <<"dominant-v2.">>,
% host => "localhost",
% port => 8125
% }}
]}
]},
{prometheus, [
{collectors, [
default
]}
]}
].

6
config/vm.args Normal file
View File

@ -0,0 +1,6 @@
-sname dmt
-setcookie dmt
+K true
+A 10

View File

@ -0,0 +1,515 @@
-- migrations/1722105006-create_initial_tables.sql
-- :up
-- Up migration
CREATE TABLE op_user (
id UUID DEFAULT gen_random_uuid() PRIMARY KEY,
name VARCHAR(255) NOT NULL,
email VARCHAR(255) UNIQUE NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
);
CREATE TABLE global_version (
version BIGINT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
created_by UUID REFERENCES op_user(id)
);
CREATE TABLE category (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE currency (
id TEXT NOT NULL,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE business_schedule (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE calendar (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE payment_method (
id TEXT NOT NULL,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE payout_method (
id TEXT NOT NULL,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE bank (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE contract_template (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE term_set_hierarchy (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE payment_institution (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE provider (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE terminal (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE inspector (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE system_account_set (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE external_account_set (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE proxy (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE globals (
id TEXT NOT NULL,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE cash_register_provider (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE routing_rules (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE bank_card_category (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE criterion (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE document_type (
id TEXT NOT NULL,
sequence BIGINT,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE payment_service (
id TEXT NOT NULL,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE payment_system (
id TEXT NOT NULL,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE bank_card_token_service (
id TEXT NOT NULL,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE mobile_operator (
id TEXT NOT NULL,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE crypto_currency (
id TEXT NOT NULL,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE country (
id TEXT NOT NULL,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE trade_bloc (
id TEXT NOT NULL,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE identity_provider (
id TEXT NOT NULL,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE TABLE limit_config (
id TEXT NOT NULL,
global_version BIGINT NOT NULL REFERENCES global_version(version),
references_to TEXT[] NOT NULL,
referenced_by TEXT[] NOT NULL,
data TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
is_active BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id, global_version)
);
CREATE INDEX idx_category_global_version ON category(global_version);
CREATE INDEX idx_currency_global_version ON currency(global_version);
CREATE INDEX idx_business_schedule_global_version ON business_schedule(global_version);
CREATE INDEX idx_calendar_global_version ON calendar(global_version);
CREATE INDEX idx_payment_method_global_version ON payment_method(global_version);
CREATE INDEX idx_payout_method_global_version ON payout_method(global_version);
CREATE INDEX idx_bank_global_version ON bank(global_version);
CREATE INDEX idx_contract_template_global_version ON contract_template(global_version);
CREATE INDEX idx_term_set_hierarchy_global_version ON term_set_hierarchy(global_version);
CREATE INDEX idx_payment_institution_global_version ON payment_institution(global_version);
CREATE INDEX idx_provider_global_version ON provider(global_version);
CREATE INDEX idx_terminal_global_version ON terminal(global_version);
CREATE INDEX idx_inspector_global_version ON inspector(global_version);
CREATE INDEX idx_system_account_set_global_version ON system_account_set(global_version);
CREATE INDEX idx_external_account_set_global_version ON external_account_set(global_version);
CREATE INDEX idx_proxy_global_version ON proxy(global_version);
CREATE INDEX idx_globals_global_version ON globals(global_version);
CREATE INDEX idx_cash_register_provider_global_version ON cash_register_provider(global_version);
CREATE INDEX idx_routing_rules_global_version ON routing_rules(global_version);
CREATE INDEX idx_bank_card_category_global_version ON bank_card_category(global_version);
CREATE INDEX idx_criterion_global_version ON criterion(global_version);
CREATE INDEX idx_document_type_global_version ON document_type(global_version);
CREATE INDEX idx_payment_service_global_version ON payment_service(global_version);
CREATE INDEX idx_payment_system_global_version ON payment_system(global_version);
CREATE INDEX idx_bank_card_token_service_global_version ON bank_card_token_service(global_version);
CREATE INDEX idx_mobile_operator_global_version ON mobile_operator(global_version);
CREATE INDEX idx_crypto_currency_global_version ON crypto_currency(global_version);
CREATE INDEX idx_country_global_version ON country(global_version);
CREATE INDEX idx_trade_bloc_global_version ON trade_bloc(global_version);
CREATE INDEX idx_identity_provider_global_version ON identity_provider(global_version);
CREATE INDEX idx_limit_config_global_version ON limit_config(global_version);
CREATE INDEX idx_category_sequence ON category(sequence);
CREATE INDEX idx_business_schedule_sequence ON business_schedule(sequence);
CREATE INDEX idx_calendar_sequence ON calendar(sequence);
CREATE INDEX idx_bank_sequence ON bank(sequence);
CREATE INDEX idx_contract_template_sequence ON contract_template(sequence);
CREATE INDEX idx_term_set_hierarchy_sequence ON term_set_hierarchy(sequence);
CREATE INDEX idx_payment_institution_sequence ON payment_institution(sequence);
CREATE INDEX idx_provider_sequence ON provider(sequence);
CREATE INDEX idx_terminal_sequence ON terminal(sequence);
CREATE INDEX idx_inspector_sequence ON inspector(sequence);
CREATE INDEX idx_system_account_set_sequence ON system_account_set(sequence);
CREATE INDEX idx_external_account_set_sequence ON external_account_set(sequence);
CREATE INDEX idx_proxy_sequence ON proxy(sequence);
CREATE INDEX idx_cash_register_provider_sequence ON cash_register_provider(sequence);
CREATE INDEX idx_routing_rules_sequence ON routing_rules(sequence);
CREATE INDEX idx_bank_card_category_sequence ON bank_card_category(sequence);
CREATE INDEX idx_criterion_sequence ON criterion(sequence);
CREATE INDEX idx_document_type_sequence ON document_type(sequence);
-- :down
-- Down migration
DROP INDEX IF EXISTS idx_category_global_version;
DROP INDEX IF EXISTS idx_currency_global_version;
DROP INDEX IF EXISTS idx_business_schedule_global_version;
DROP INDEX IF EXISTS idx_calendar_global_version;
DROP INDEX IF EXISTS idx_payment_method_global_version;
DROP INDEX IF EXISTS idx_payout_method_global_version;
DROP INDEX IF EXISTS idx_bank_global_version;
DROP INDEX IF EXISTS idx_contract_template_global_version;
DROP INDEX IF EXISTS idx_term_set_hierarchy_global_version;
DROP INDEX IF EXISTS idx_payment_institution_global_version;
DROP INDEX IF EXISTS idx_provider_global_version;
DROP INDEX IF EXISTS idx_terminal_global_version;
DROP INDEX IF EXISTS idx_inspector_global_version;
DROP INDEX IF EXISTS idx_system_account_set_global_version;
DROP INDEX IF EXISTS idx_external_account_set_global_version;
DROP INDEX IF EXISTS idx_proxy_global_version;
DROP INDEX IF EXISTS idx_globals_global_version;
DROP INDEX IF EXISTS idx_cash_register_provider_global_version;
DROP INDEX IF EXISTS idx_routing_rules_global_version;
DROP INDEX IF EXISTS idx_bank_card_category_global_version;
DROP INDEX IF EXISTS idx_criterion_global_version;
DROP INDEX IF EXISTS idx_document_type_global_version;
DROP INDEX IF EXISTS idx_payment_service_global_version;
DROP INDEX IF EXISTS idx_payment_system_global_version;
DROP INDEX IF EXISTS idx_bank_card_token_service_global_version;
DROP INDEX IF EXISTS idx_mobile_operator_global_version;
DROP INDEX IF EXISTS idx_crypto_currency_global_version;
DROP INDEX IF EXISTS idx_country_global_version;
DROP INDEX IF EXISTS idx_trade_bloc_global_version;
DROP INDEX IF EXISTS idx_identity_provider_global_version;
DROP INDEX IF EXISTS idx_limit_config_global_version;
DROP INDEX IF EXISTS idx_category_sequence;
DROP INDEX IF EXISTS idx_business_schedule_sequence;
DROP INDEX IF EXISTS idx_calendar_sequence;
DROP INDEX IF EXISTS idx_bank_sequence;
DROP INDEX IF EXISTS idx_contract_template_sequence;
DROP INDEX IF EXISTS idx_term_set_hierarchy_sequence;
DROP INDEX IF EXISTS idx_payment_institution_sequence;
DROP INDEX IF EXISTS idx_provider_sequence;
DROP INDEX IF EXISTS idx_terminal_sequence;
DROP INDEX IF EXISTS idx_inspector_sequence;
DROP INDEX IF EXISTS idx_system_account_set_sequence;
DROP INDEX IF EXISTS idx_external_account_set_sequence;
DROP INDEX IF EXISTS idx_proxy_sequence;
DROP INDEX IF EXISTS idx_cash_register_provider_sequence;
DROP INDEX IF EXISTS idx_routing_rules_sequence;
DROP INDEX IF EXISTS idx_bank_card_category_sequence;
DROP INDEX IF EXISTS idx_criterion_sequence;
DROP INDEX IF EXISTS idx_document_type_sequence;
DROP TABLE IF EXISTS category;
DROP TABLE IF EXISTS currency;
DROP TABLE IF EXISTS business_schedule;
DROP TABLE IF EXISTS calendar;
DROP TABLE IF EXISTS payment_method;
DROP TABLE IF EXISTS payout_method;
DROP TABLE IF EXISTS bank;
DROP TABLE IF EXISTS contract_template;
DROP TABLE IF EXISTS term_set_hierarchy;
DROP TABLE IF EXISTS payment_institution;
DROP TABLE IF EXISTS provider;
DROP TABLE IF EXISTS terminal;
DROP TABLE IF EXISTS inspector;
DROP TABLE IF EXISTS system_account_set;
DROP TABLE IF EXISTS external_account_set;
DROP TABLE IF EXISTS proxy;
DROP TABLE IF EXISTS globals;
DROP TABLE IF EXISTS cash_register_provider;
DROP TABLE IF EXISTS routing_rules;
DROP TABLE IF EXISTS bank_card_category;
DROP TABLE IF EXISTS criterion;
DROP TABLE IF EXISTS document_type;
DROP TABLE IF EXISTS payment_service;
DROP TABLE IF EXISTS payment_system;
DROP TABLE IF EXISTS bank_card_token_service;
DROP TABLE IF EXISTS mobile_operator;
DROP TABLE IF EXISTS crypto_currency;
DROP TABLE IF EXISTS country;
DROP TABLE IF EXISTS trade_bloc;
DROP TABLE IF EXISTS identity_provider;
DROP TABLE IF EXISTS limit_config;
DROP TABLE IF EXISTS global_version;
DROP TABLE IF EXISTS op_user;

1
psql-migration Submodule

@ -0,0 +1 @@
Subproject commit c84b06fc7e1603783eb81eaad214552151ed9066

View File

@ -1,7 +1,113 @@
{erl_opts, [debug_info]}.
{deps, []}.
%% Common project erlang options.
{erl_opts, [
% mandatory
%% debug_info,
%% warnings_as_errors,
%% warn_export_all,
%% warn_missing_spec,
%% warn_untyped_record,
%% warn_export_vars,
% by default
%% warn_unused_record,
%% warn_bif_clash,
%% warn_obsolete_guard,
%% warn_unused_vars,
%% warn_shadow_vars,
%% warn_unused_import,
%% warn_unused_function,
%% warn_deprecated_function
% at will
% bin_opt_info
% no_auto_import
% warn_missing_spec_all
]}.
% Common project dependencies.
{deps, [
{genlib, {git, "https://github.com/valitydev/genlib.git", {branch, "master"}}},
{cowboy_draining_server, {git, "https://github.com/valitydev/cowboy_draining_server.git", {branch, "master"}}},
{uuid, {git, "https://github.com/okeuday/uuid.git", {branch, "master"}}},
{scoper, {git, "https://github.com/valitydev/scoper.git", {branch, "master"}}},
{erl_health, {git, "https://github.com/valitydev/erlang-health.git", {branch, "master"}}},
{cowboy_cors, {git, "https://github.com/valitydev/cowboy_cors.git", {branch, master}}},
{cowboy_access_log, {git, "https://github.com/valitydev/cowboy_access_log.git", {branch, "master"}}},
{woody_user_identity, {git, "https://github.com/valitydev/woody_erlang_user_identity.git", {branch, "master"}}},
{woody, {git, "https://github.com/valitydev/woody_erlang.git", {branch, master}}},
{damsel, {git, "git@github.com:valitydev/damsel.git", {branch, "IMP-281/dmt_proto"}}},
%% Libraries for postgres interaction
{epg_connector, {git, "git@github.com:valitydev/epg_connector.git", {branch, master}}},
{epgsql, {git, "https://github.com/epgsql/epgsql.git", {tag, "4.7.1"}}},
{epgsql_pool, {git, "https://github.com/wgnet/epgsql_pool", {branch, "master"}}},
{herd, {git, "https://github.com/wgnet/herd.git", {tag, "1.3.4"}}},
%% For db migrations
{envloader, {git, "https://github.com/nuex/envloader.git", {branch, "master"}}},
eql,
getopt,
{prometheus, "4.6.0"},
{prometheus_cowboy, "0.1.8"},
%% OpenTelemetry deps
{opentelemetry_api, "1.2.1"},
{opentelemetry, "1.3.0"},
{opentelemetry_exporter, "1.3.0"}
]}.
%% XRef checks
{xref_checks, [
undefined_function_calls,
undefined_functions,
deprecated_functions_calls,
deprecated_functions
]}.
%% Dialyzer static analyzing
{dialyzer, [
{warnings, [
% mandatory
unmatched_returns,
error_handling,
% race_conditions,
unknown
]},
{plt_apps, all_deps},
{incremental, true}
]}.
{project_plugins, [
{rebar3_lint, "3.2.6"},
{erlfmt, "1.5.0"},
{covertool, "2.0.6"}
]}.
%% Linter config.
{elvis_output_format, colors}.
{erlfmt, [
{print_width, 120},
{files, [
"apps/dmt*/{src,include,test}/*.{hrl,erl,app.src}",
"rebar.config",
"elvis.config",
"config/sys.config",
"test/*/sys.config"
]}
]}.
{covertool, [
{coverdata_files, [
"eunit.coverdata",
"ct.coverdata"
]}
]}.
{shell, [
% {config, "config/sys.config"},
{apps, [dominant-v2]}
{config, "config/sys.config"},
{apps, [
dmt
]}
]}.

191
rebar.lock Normal file
View File

@ -0,0 +1,191 @@
{"1.2.0",
[{<<"accept">>,{pkg,<<"accept">>,<<"0.3.5">>},2},
{<<"acceptor_pool">>,{pkg,<<"acceptor_pool">>,<<"1.0.0">>},2},
{<<"cache">>,{pkg,<<"cache">>,<<"2.3.3">>},1},
{<<"canal">>,
{git,"https://github.com/valitydev/canal",
{ref,"621d3821cd0a6036fee75d8e3b2d17167f3268e4"}},
1},
{<<"certifi">>,{pkg,<<"certifi">>,<<"2.8.0">>},2},
{<<"cg_mon">>,
{git,"https://github.com/rbkmoney/cg_mon.git",
{ref,"5a87a37694e42b6592d3b4164ae54e0e87e24e18"}},
1},
{<<"chatterbox">>,{pkg,<<"ts_chatterbox">>,<<"0.15.1">>},2},
{<<"cowboy">>,{pkg,<<"cowboy">>,<<"2.9.0">>},1},
{<<"cowboy_access_log">>,
{git,"https://github.com/valitydev/cowboy_access_log.git",
{ref,"04da359e022cf05c5c93812504d5791d6bc97453"}},
0},
{<<"cowboy_cors">>,
{git,"https://github.com/valitydev/cowboy_cors.git",
{ref,"5a3b084fb8c5a4ff58e3c915a822d709d6023c3b"}},
0},
{<<"cowboy_draining_server">>,
{git,"https://github.com/valitydev/cowboy_draining_server.git",
{ref,"186cf4d0722d4ad79afe73d371df6b1371e51905"}},
0},
{<<"cowlib">>,{pkg,<<"cowlib">>,<<"2.11.0">>},2},
{<<"ctx">>,{pkg,<<"ctx">>,<<"0.6.0">>},2},
{<<"damsel">>,
{git,"git@github.com:valitydev/damsel.git",
{ref,"de7ce44874984331f8b180bef3a786bd35573e48"}},
0},
{<<"envloader">>,
{git,"https://github.com/nuex/envloader.git",
{ref,"27a97e04f35c554995467b9236d8ae0188d468c7"}},
0},
{<<"epg_connector">>,
{git,"git@github.com:valitydev/epg_connector.git",
{ref,"7fc3aa1b6d9c8be69a64fefd18f6aaa416dcd572"}},
0},
{<<"epgsql">>,
{git,"https://github.com/epgsql/epgsql.git",
{ref,"7ba52768cf0ea7d084df24d4275a88eef4db13c2"}},
0},
{<<"epgsql_pool">>,
{git,"https://github.com/wgnet/epgsql_pool",
{ref,"f5e492f73752950aab932a1662536e22fc00c717"}},
0},
{<<"eql">>,{pkg,<<"eql">>,<<"0.2.0">>},0},
{<<"erl_health">>,
{git,"https://github.com/valitydev/erlang-health.git",
{ref,"49716470d0e8dab5e37db55d52dea78001735a3d"}},
0},
{<<"genlib">>,
{git,"https://github.com/valitydev/genlib.git",
{ref,"f6074551d6586998e91a97ea20acb47241254ff3"}},
0},
{<<"getopt">>,{pkg,<<"getopt">>,<<"1.0.3">>},0},
{<<"gproc">>,{pkg,<<"gproc">>,<<"0.9.0">>},1},
{<<"grpcbox">>,{pkg,<<"grpcbox">>,<<"0.17.1">>},1},
{<<"gun">>,
{git,"https://github.com/ninenines/gun.git",
{ref,"e7dd9f227e46979d8073e71c683395a809b78cb4"}},
1},
{<<"hackney">>,{pkg,<<"hackney">>,<<"1.18.0">>},1},
{<<"herd">>,
{git,"https://github.com/wgnet/herd.git",
{ref,"934847589dcf5a6d2b02a1f546ffe91c04066f17"}},
0},
{<<"hpack">>,{pkg,<<"hpack_erl">>,<<"0.3.0">>},3},
{<<"idna">>,{pkg,<<"idna">>,<<"6.1.1">>},2},
{<<"jsone">>,{pkg,<<"jsone">>,<<"1.8.0">>},2},
{<<"jsx">>,{pkg,<<"jsx">>,<<"3.1.0">>},1},
{<<"metrics">>,{pkg,<<"metrics">>,<<"1.0.1">>},2},
{<<"mimerl">>,{pkg,<<"mimerl">>,<<"1.3.0">>},2},
{<<"opentelemetry">>,{pkg,<<"opentelemetry">>,<<"1.3.0">>},0},
{<<"opentelemetry_api">>,{pkg,<<"opentelemetry_api">>,<<"1.2.1">>},0},
{<<"opentelemetry_exporter">>,
{pkg,<<"opentelemetry_exporter">>,<<"1.3.0">>},
0},
{<<"opentelemetry_semantic_conventions">>,
{pkg,<<"opentelemetry_semantic_conventions">>,<<"0.2.0">>},
1},
{<<"parse_trans">>,{pkg,<<"parse_trans">>,<<"3.3.1">>},2},
{<<"pooler">>,{pkg,<<"pooler">>,<<"1.5.3">>},1},
{<<"prometheus">>,{pkg,<<"prometheus">>,<<"4.6.0">>},0},
{<<"prometheus_cowboy">>,{pkg,<<"prometheus_cowboy">>,<<"0.1.8">>},0},
{<<"prometheus_httpd">>,{pkg,<<"prometheus_httpd">>,<<"2.1.11">>},1},
{<<"quickrand">>,
{git,"https://github.com/okeuday/quickrand.git",
{ref,"65332de501998764f437c3ffe05d744f582d7622"}},
1},
{<<"ranch">>,{pkg,<<"ranch">>,<<"1.8.0">>},2},
{<<"scoper">>,
{git,"https://github.com/valitydev/scoper.git",
{ref,"55a2a32ee25e22fa35f583a18eaf38b2b743429b"}},
0},
{<<"snowflake">>,
{git,"https://github.com/valitydev/snowflake.git",
{ref,"de159486ef40cec67074afe71882bdc7f7deab72"}},
1},
{<<"ssl_verify_fun">>,{pkg,<<"ssl_verify_fun">>,<<"1.1.7">>},2},
{<<"thrift">>,
{git,"https://github.com/valitydev/thrift_erlang.git",
{ref,"c280ff266ae1c1906fb0dcee8320bb8d8a4a3c75"}},
1},
{<<"tls_certificate_check">>,
{pkg,<<"tls_certificate_check">>,<<"1.23.0">>},
1},
{<<"unicode_util_compat">>,{pkg,<<"unicode_util_compat">>,<<"0.7.0">>},2},
{<<"uuid">>,
{git,"https://github.com/okeuday/uuid.git",
{ref,"c65307d7991e11dff96064dadf6880108ac0d911"}},
0},
{<<"woody">>,
{git,"https://github.com/valitydev/woody_erlang.git",
{ref,"072825ee7179825a4078feb0649df71303c74157"}},
0},
{<<"woody_user_identity">>,
{git,"https://github.com/valitydev/woody_erlang_user_identity.git",
{ref,"a480762fea8d7c08f105fb39ca809482b6cb042e"}},
0}]}.
[
{pkg_hash,[
{<<"accept">>, <<"B33B127ABCA7CC948BBE6CAA4C263369ABF1347CFA9D8E699C6D214660F10CD1">>},
{<<"acceptor_pool">>, <<"43C20D2ACAE35F0C2BCD64F9D2BDE267E459F0F3FD23DAB26485BF518C281B21">>},
{<<"cache">>, <<"B23A5FE7095445A88412A6E614C933377E0137B44FFED77C9B3FEF1A731A20B2">>},
{<<"certifi">>, <<"D4FB0A6BB20B7C9C3643E22507E42F356AC090A1DCEA9AB99E27E0376D695EBA">>},
{<<"chatterbox">>, <<"5CAC4D15DD7AD61FC3C4415CE4826FC563D4643DEE897A558EC4EA0B1C835C9C">>},
{<<"cowboy">>, <<"865DD8B6607E14CF03282E10E934023A1BD8BE6F6BACF921A7E2A96D800CD452">>},
{<<"cowlib">>, <<"0B9FF9C346629256C42EBE1EEB769A83C6CB771A6EE5960BD110AB0B9B872063">>},
{<<"ctx">>, <<"8FF88B70E6400C4DF90142E7F130625B82086077A45364A78D208ED3ED53C7FE">>},
{<<"eql">>, <<"598ABC19A1CF6AFB8EF89FFEA869F43BAEBB1CEC3260DD5065112FEE7D8CE3E2">>},
{<<"getopt">>, <<"4F3320C1F6F26B2BEC0F6C6446B943EB927A1E6428EA279A1C6C534906EE79F1">>},
{<<"gproc">>, <<"853CCB7805E9ADA25D227A157BA966F7B34508F386A3E7E21992B1B484230699">>},
{<<"grpcbox">>, <<"6E040AB3EF16FE699FFB513B0EF8E2E896DA7B18931A1EF817143037C454BCCE">>},
{<<"hackney">>, <<"C4443D960BB9FBA6D01161D01CD81173089686717D9490E5D3606644C48D121F">>},
{<<"hpack">>, <<"2461899CC4AB6A0EF8E970C1661C5FC6A52D3C25580BC6DD204F84CE94669926">>},
{<<"idna">>, <<"8A63070E9F7D0C62EB9D9FCB360A7DE382448200FBBD1B106CC96D3D8099DF8D">>},
{<<"jsone">>, <<"347FF1FA700E182E1F9C5012FA6D737B12C854313B9AE6954CA75D3987D6C06D">>},
{<<"jsx">>, <<"D12516BAA0BB23A59BB35DCCAF02A1BD08243FCBB9EFE24F2D9D056CCFF71268">>},
{<<"metrics">>, <<"25F094DEA2CDA98213CECC3AEFF09E940299D950904393B2A29D191C346A8486">>},
{<<"mimerl">>, <<"D0CD9FC04B9061F82490F6581E0128379830E78535E017F7780F37FEA7545726">>},
{<<"opentelemetry">>, <<"988AC3C26ACAC9720A1D4FB8D9DC52E95B45ECFEC2D5B5583276A09E8936BC5E">>},
{<<"opentelemetry_api">>, <<"7B69ED4F40025C005DE0B74FCE8C0549625D59CB4DF12D15C32FE6DC5076FF42">>},
{<<"opentelemetry_exporter">>, <<"1D8809C0D4F4ACF986405F7700ED11992BCBDB6A4915DD11921E80777FFA7167">>},
{<<"opentelemetry_semantic_conventions">>, <<"B67FE459C2938FCAB341CB0951C44860C62347C005ACE1B50F8402576F241435">>},
{<<"parse_trans">>, <<"16328AB840CC09919BD10DAB29E431DA3AF9E9E7E7E6F0089DD5A2D2820011D8">>},
{<<"pooler">>, <<"898CD1FA301FC42D4A8ED598CE139B71CA85B54C16AB161152B5CC5FBDCFA1A8">>},
{<<"prometheus">>, <<"20510F381DB1CCAB818B4CF2FAC5FA6AB5CC91BC364A154399901C001465F46F">>},
{<<"prometheus_cowboy">>, <<"CFCE0BC7B668C5096639084FCD873826E6220EA714BF60A716F5BD080EF2A99C">>},
{<<"prometheus_httpd">>, <<"F616ED9B85B536B195D94104063025A91F904A4CFC20255363F49A197D96C896">>},
{<<"ranch">>, <<"8C7A100A139FD57F17327B6413E4167AC559FBC04CA7448E9BE9057311597A1D">>},
{<<"ssl_verify_fun">>, <<"354C321CF377240C7B8716899E182CE4890C5938111A1296ADD3EC74CF1715DF">>},
{<<"tls_certificate_check">>, <<"BB7869C629DE4EC72D4652520C1AD2255BB5712AD09A6568C41B0294B3CEC78F">>},
{<<"unicode_util_compat">>, <<"BC84380C9AB48177092F43AC89E4DFA2C6D62B40B8BD132B1059ECC7232F9A78">>}]},
{pkg_hash_ext,[
{<<"accept">>, <<"11B18C220BCC2EAB63B5470C038EF10EB6783BCB1FCDB11AA4137DEFA5AC1BB8">>},
{<<"acceptor_pool">>, <<"0CBCD83FDC8B9AD2EEE2067EF8B91A14858A5883CB7CD800E6FCD5803E158788">>},
{<<"cache">>, <<"44516CE6FA03594D3A2AF025DD3A87BFE711000EB730219E1DDEFC816E0AA2F4">>},
{<<"certifi">>, <<"6AC7EFC1C6F8600B08D625292D4BBF584E14847CE1B6B5C44D983D273E1097EA">>},
{<<"chatterbox">>, <<"4F75B91451338BC0DA5F52F3480FA6EF6E3A2AEECFC33686D6B3D0A0948F31AA">>},
{<<"cowboy">>, <<"2C729F934B4E1AA149AFF882F57C6372C15399A20D54F65C8D67BEF583021BDE">>},
{<<"cowlib">>, <<"2B3E9DA0B21C4565751A6D4901C20D1B4CC25CBB7FD50D91D2AB6DD287BC86A9">>},
{<<"ctx">>, <<"A14ED2D1B67723DBEBBE423B28D7615EB0BDCBA6FF28F2D1F1B0A7E1D4AA5FC2">>},
{<<"eql">>, <<"513BE6B36EE86E8292B2B7475C257ABB66CED5AAD40CBF7AD21E233D0A3BF51D">>},
{<<"getopt">>, <<"7E01DE90AC540F21494FF72792B1E3162D399966EBBFC674B4CE52CB8F49324F">>},
{<<"gproc">>, <<"587E8AF698CCD3504CF4BA8D90F893EDE2B0F58CABB8A916E2BF9321DE3CF10B">>},
{<<"grpcbox">>, <<"4A3B5D7111DAABC569DC9CBD9B202A3237D81C80BF97212FBC676832CB0CEB17">>},
{<<"hackney">>, <<"9AFCDA620704D720DB8C6A3123E9848D09C87586DC1C10479C42627B905B5C5E">>},
{<<"hpack">>, <<"D6137D7079169D8C485C6962DFE261AF5B9EF60FBC557344511C1E65E3D95FB0">>},
{<<"idna">>, <<"92376EB7894412ED19AC475E4A86F7B413C1B9FBB5BD16DCCD57934157944CEA">>},
{<<"jsone">>, <<"08560B78624A12E0B5E7EC0271EC8CA38EF51F63D84D84843473E14D9B12618C">>},
{<<"jsx">>, <<"0C5CC8FDC11B53CC25CF65AC6705AD39E54ECC56D1C22E4ADB8F5A53FB9427F3">>},
{<<"metrics">>, <<"69B09ADDDC4F74A40716AE54D140F93BEB0FB8978D8636EADED0C31B6F099F16">>},
{<<"mimerl">>, <<"A1E15A50D1887217DE95F0B9B0793E32853F7C258A5CD227650889B38839FE9D">>},
{<<"opentelemetry">>, <<"8E09EDC26AAD11161509D7ECAD854A3285D88580F93B63B0B1CF0BAC332BFCC0">>},
{<<"opentelemetry_api">>, <<"6D7A27B7CAD2AD69A09CABF6670514CAFCEC717C8441BEB5C96322BAC3D05350">>},
{<<"opentelemetry_exporter">>, <<"2B40007F509D38361744882FD060A8841AF772AB83BB542AA5350908B303AD65">>},
{<<"opentelemetry_semantic_conventions">>, <<"D61FA1F5639EE8668D74B527E6806E0503EFC55A42DB7B5F39939D84C07D6895">>},
{<<"parse_trans">>, <<"07CD9577885F56362D414E8C4C4E6BDF10D43A8767ABB92D24CBE8B24C54888B">>},
{<<"pooler">>, <<"058D85C5081289B90E97E4DDDBC3BB5A3B4A19A728AB3BC88C689EFCC36A07C7">>},
{<<"prometheus">>, <<"4905FD2992F8038ECCD7AA0CD22F40637ED618C0BED1F75C05AACEC15B7545DE">>},
{<<"prometheus_cowboy">>, <<"BA286BECA9302618418892D37BCD5DC669A6CC001F4EB6D6AF85FF81F3F4F34C">>},
{<<"prometheus_httpd">>, <<"0BBE831452CFDF9588538EB2F570B26F30C348ADAE5E95A7D87F35A5910BCF92">>},
{<<"ranch">>, <<"49FBCFD3682FAB1F5D109351B61257676DA1A2FDBE295904176D5E521A2DDFE5">>},
{<<"ssl_verify_fun">>, <<"FE4C190E8F37401D30167C8C405EDA19469F34577987C76DDE613E838BBC67F8">>},
{<<"tls_certificate_check">>, <<"79D0C84EFFC7C81AC1E85FA38B1C33572FE2976FB8FAFDFB2F0140DE0442D494">>},
{<<"unicode_util_compat">>, <<"25EEE6D67DF61960CF6A794239566599B09E17E668D3700247BC498638152521">>}]}
].

View File

@ -1,15 +0,0 @@
{application, dominant-v2,
[{description, "An OTP application"},
{vsn, "0.1.0"},
{registered, []},
{mod, {dominant-v2_app, []}},
{applications,
[kernel,
stdlib
]},
{env,[]},
{modules, []},
{licenses, ["Apache-2.0"]},
{links, []}
]}.

View File

@ -1,35 +0,0 @@
%%%-------------------------------------------------------------------
%% @doc dominant-v2 top level supervisor.
%% @end
%%%-------------------------------------------------------------------
-module(dominant-v2_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-define(SERVER, ?MODULE).
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
%% sup_flags() = #{strategy => strategy(), % optional
%% intensity => non_neg_integer(), % optional
%% period => pos_integer()} % optional
%% child_spec() = #{id => child_id(), % mandatory
%% start => mfargs(), % mandatory
%% restart => restart(), % optional
%% shutdown => shutdown(), % optional
%% type => worker(), % optional
%% modules => modules()} % optional
init([]) ->
SupFlags = #{strategy => one_for_all,
intensity => 0,
period => 1},
ChildSpecs = [],
{ok, {SupFlags, ChildSpecs}}.
%% internal functions