Introduce CI and fix everything that is needed for it (#3)

* Introduce CI and fix everything that is needed for it

* Update damsel

* Update epg_connector

* Update workflow version

* Fix release

* Fix test

* Fix release

* Fix

* Fix 2
This commit is contained in:
ndiezel0 2024-11-02 01:34:09 +03:00 committed by GitHub
parent 604d1b5b78
commit 3d016875de
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 388 additions and 286 deletions

2
.env
View File

@ -1,4 +1,4 @@
SERVICE_NAME=dominant-v2
SERVICE_NAME=dmt
OTP_VERSION=27.1
REBAR_VERSION=3.23
THRIFT_VERSION=0.14.2.3

21
.github/workflows/build-image.yml vendored Normal file
View File

@ -0,0 +1,21 @@
name: Build and publish Docker image
on:
push:
branches:
- 'master'
- 'epic/**'
pull_request:
branches: ['**']
env:
REGISTRY: ghcr.io
jobs:
build-push:
runs-on: ubuntu-latest
steps:
- uses: valitydev/action-deploy-docker@v2
with:
registry-username: ${{ github.actor }}
registry-access-token: ${{ secrets.GITHUB_TOKEN }}

40
.github/workflows/erlang-checks.yml vendored Normal file
View File

@ -0,0 +1,40 @@
name: Erlang CI Checks
on:
push:
branches:
- 'master'
- 'epic/**'
pull_request:
branches: ['**']
jobs:
setup:
name: Load .env
runs-on: ubuntu-latest
outputs:
otp-version: ${{ steps.otp-version.outputs.version }}
rebar-version: ${{ steps.rebar-version.outputs.version }}
thrift-version: ${{ steps.thrift-version.outputs.version }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- run: grep -v '^#' .env >> $GITHUB_ENV
- id: otp-version
run: echo "::set-output name=version::$OTP_VERSION"
- id: rebar-version
run: echo "::set-output name=version::$REBAR_VERSION"
- id: thrift-version
run: echo "::set-output name=version::$THRIFT_VERSION"
run:
name: Run checks
needs: setup
uses: valitydev/erlang-workflows/.github/workflows/erlang-parallel-build.yml@v1.0.15
with:
otp-version: ${{ needs.setup.outputs.otp-version }}
rebar-version: ${{ needs.setup.outputs.rebar-version }}
use-thrift: true
thrift-version: ${{ needs.setup.outputs.thrift-version }}
run-ct-with-compose: true
upload-coverage: false

View File

@ -16,6 +16,7 @@
envloader,
woody_user_identity,
jsx,
uuid,
dmt_core,
dmt_object,
opentelemetry_api,

View File

@ -246,16 +246,16 @@ report_migrations(down, Results) ->
-define(DRIVER, epgsql).
record_migration(up, Conn, V) ->
?DRIVER:equery(Conn, "INSERT INTO __migrations (id) VALUES ($1)", [V]);
epgsql:equery(Conn, "INSERT INTO __migrations (id) VALUES ($1)", [V]);
record_migration(down, Conn, V) ->
?DRIVER:equery(Conn, "DELETE FROM __migrations WHERE id = $1", [V]).
epgsql:equery(Conn, "DELETE FROM __migrations WHERE id = $1", [V]).
apply_migrations(Type, Migrations, Conn) ->
Results = lists:foldl(
fun
(_, [{_, {error, _}} | _] = Acc) ->
Acc;
(Migration = {Version, _}, Acc) ->
({Version, _} = Migration, Acc) ->
case apply_migration(Type, Migration, Conn) of
ok -> [{Version, ok} | Acc];
{error, Error} -> [{Version, {error, Error}}]
@ -269,7 +269,7 @@ apply_migrations(Type, Migrations, Conn) ->
apply_migration(Type, {Version, Migration}, Conn) ->
case eql:get_query(Type, Migration) of
{ok, Query} ->
case if_ok(?DRIVER:squery(Conn, Query)) of
case if_ok(epgsql:squery(Conn, Query)) of
ok ->
_ = record_migration(Type, Conn, Version),
ok;
@ -317,12 +317,12 @@ applied_migrations(Args) when is_list(Args) ->
end
);
applied_migrations(Conn) when is_pid(Conn) ->
case ?DRIVER:squery(Conn, "SELECT id FROM __migrations ORDER by id ASC") of
case epgsql:squery(Conn, "SELECT id FROM __migrations ORDER by id ASC") of
{ok, _, Migs} ->
[binary_to_list(Mig) || {Mig} <- Migs];
{error, {error, error, <<"42P01">>, _, _, _}} ->
%% init migrations and restart
{ok, _, _} = ?DRIVER:squery(
{ok, _, _} = epgsql:squery(
Conn,
"CREATE TABLE __migrations ("
"id VARCHAR(255) PRIMARY KEY,"

View File

@ -19,7 +19,7 @@ get_object({version, V}, ObjectRef) ->
data := Data,
created_at := CreatedAt
}} ->
io:format("get_object Data ~p~n", [Data]),
% io:format("get_object Data ~p~n", [Data]),
{ok, #domain_conf_v2_VersionedObject{
global_version = GlobalVersion,
%% TODO implement local versions
@ -37,7 +37,6 @@ get_object({head, #domain_conf_v2_Head{}}, ObjectRef) ->
data := Data,
created_at := CreatedAt
}} ->
io:format("get_object head Data ~p~n", [Data]),
{ok, #domain_conf_v2_VersionedObject{
global_version = GlobalVersion,
%% TODO implement local versions
@ -82,19 +81,12 @@ assemble_operations_(
} = NewObject,
Updates1 = update_objects_added_refs({temporary, TmpID}, Refers, UpdatesAcc),
io:format("~n {insert, #domain_conf_v2_InsertOp{} = InsertOp} ~p ~n", [Refers]),
{[NewObject | InsertsAcc], Updates1, UpdatedObjectsAcc};
{update, #domain_conf_v2_UpdateOp{targeted_ref = Ref} = UpdateOp} ->
case get_original_object_changes(UpdatesAcc, Ref) of
%% TODO Figure out how to stop several updates for the same object happening
Changes ->
Changes = get_original_object_changes(UpdatesAcc, Ref),
{ok, ObjectUpdate} = dmt_object:update_object(UpdateOp, Changes),
io:format("~n {update, #domain_conf_v2_UpdateOp{targeted_ref = Ref} = UpdateOp} ~p ~n", [{Changes, ObjectUpdate}]),
UpdatesAcc1 = update_referenced_objects(Changes, ObjectUpdate, UpdatesAcc),
{InsertsAcc, UpdatesAcc1#{Ref => ObjectUpdate}, [Ref | UpdatedObjectsAcc]}
end;
{InsertsAcc, UpdatesAcc1#{Ref => ObjectUpdate}, [Ref | UpdatedObjectsAcc]};
{remove, #domain_conf_v2_RemoveOp{ref = Ref}} ->
#{
references := OriginalReferences
@ -102,7 +94,6 @@ assemble_operations_(
UpdatesAcc1 = update_objects_removed_refs(Ref, OriginalReferences, UpdatesAcc),
NewObjectState = dmt_object:remove_object(OG),
io:format("~n UpdatesAcc1#{Ref => NewObjectState} ~p ~n", [UpdatesAcc1#{Ref => NewObjectState}]),
{InsertsAcc, UpdatesAcc1#{Ref => NewObjectState}, [Ref | UpdatedObjectsAcc]}
end.
@ -164,21 +155,6 @@ get_original_object_changes(Updates, Ref) ->
#{Ref := Object} ->
Object;
_ ->
%% {ok, #{
%% id := ID,
%% type := Type,
%% referenced_by := RefdBy,
%% references := Refers,
%% data := Data
%% }} = get_latest_target_object(Ref),
%% %% NOTE this is done in order to decouple object type from object change type
%% #{
%% id => ID,
%% type => Type,
%% referenced_by => RefdBy,
%% references => Refers,
%% data => Data
%% }
{ok, Res} = get_latest_target_object(Ref),
{Type, _} = Ref,
Res#{
@ -186,46 +162,10 @@ get_original_object_changes(Updates, Ref) ->
}
end.
%% NOTE Add new tables here
-define(TABLES, [
category,
currency,
business_schedule,
calendar,
payment_method,
payout_method,
bank,
contract_template,
term_set_hierarchy,
payment_institution,
provider,
terminal,
inspector,
system_account_set,
external_account_set,
proxy,
globals,
cash_register_provider,
routing_rules,
bank_card_category,
criterion,
document_type,
payment_service,
payment_system,
bank_card_token_service,
mobile_op_user,
crypto_currency,
country,
trade_bloc,
identity_provider,
limit_config
]).
commit(Version, Commit, CreatedBy) ->
{InsertObjects, UpdateObjects0, ChangedObjectIds} = assemble_operations(Commit),
case
epgsql_pool:transaction(
Result = epg_pool:transaction(
default_pool,
fun(Worker) ->
ok = check_versions_sql(Worker, ChangedObjectIds, Version),
@ -235,8 +175,8 @@ commit(Version, Commit, CreatedBy) ->
ok = update_objects(Worker, UpdateObjects1, NewVersion),
{ok, NewVersion, maps:values(PermanentIDsMaps)}
end
)
of
),
case Result of
{ok, ResVersion, NewObjectsIDs} ->
NewObjects = lists:map(
fun(#{data := Data}) ->
@ -257,7 +197,16 @@ replace_tmp_ids_in_updates(UpdateObjects, PermanentIDsMaps) ->
#{
referenced_by := ReferencedBy
} = UpdateObject,
NewReferencedBy = lists:map(
NewReferencedBy = replace_referenced_by_ids(ReferencedBy, PermanentIDsMaps),
UpdateObject#{
referenced_by => NewReferencedBy
}
end,
UpdateObjects
).
replace_referenced_by_ids(ReferencedBy, PermanentIDsMaps) ->
lists:map(
fun(Ref) ->
case Ref of
{temporary, TmpID} ->
@ -267,18 +216,11 @@ replace_tmp_ids_in_updates(UpdateObjects, PermanentIDsMaps) ->
end
end,
ReferencedBy
),
UpdateObject#{
referenced_by => NewReferencedBy
}
end,
UpdateObjects
).
check_versions_sql(Worker, ChangedObjectIds, Version) ->
lists:foreach(
fun({ChangedObjectType, ChangedObjectRef0} = ChangedObjectId) ->
io:format("ChangedObjectRef0 ~p~n", [ChangedObjectRef0]),
ChangedObjectRef1 = to_string(ChangedObjectRef0),
Query0 =
io_lib:format("""
@ -288,7 +230,7 @@ check_versions_sql(Worker, ChangedObjectIds, Version) ->
ORDER BY global_version DESC
LIMIT 1
""", [ChangedObjectType]),
case epgsql_pool:query(Worker, Query0, [ChangedObjectRef1]) of
case epg_pool:query(Worker, Query0, [ChangedObjectRef1]) of
{ok, _Columns, []} ->
throw({unknown_object_update, ChangedObjectId});
{ok, _Columns, [{ChangedObjectRef, MostRecentVersion}]} when MostRecentVersion > Version ->
@ -309,7 +251,7 @@ get_new_version(Worker, CreatedBy) ->
INSERT INTO GLOBAL_VERSION (CREATED_BY)
VALUES ($1::uuid) RETURNING version;
""",
case epgsql_pool:query(Worker, Query1, [CreatedBy]) of
case epg_pool:query(Worker, Query1, [CreatedBy]) of
{ok, 1, _Columns, [{NewVersion}]} ->
NewVersion;
{error, Reason} ->
@ -339,7 +281,8 @@ insert_object(Worker, Type, ID0, Sequence, Version, References0, Data0) ->
ID1 = to_string(ID0),
Data1 = to_string(Data0),
References1 = lists:map(fun to_string/1, References0),
{Query, Params} =
Params0 = [Version, References1, [], Data1],
{Query, Params1} =
case check_if_force_id_required(Worker, Type) of
true ->
Query0 =
@ -347,18 +290,16 @@ insert_object(Worker, Type, ID0, Sequence, Version, References0, Data0) ->
INSERT INTO ~p (id, global_version, references_to, referenced_by, data, is_active)
VALUES ($1, $2, $3, $4, $5, TRUE);
""", [Type]),
Params0 = [ID1, Version, References1, [], Data1],
{Query0, Params0};
{Query0, [ID1 | Params0]};
false ->
Query1 =
io_lib:format("""
INSERT INTO ~p (id, sequence, global_version, references_to, referenced_by, data, is_active)
VALUES ($1, $2, $3, $4, $5, $6, TRUE);
""", [Type]),
Params1 = [ID1, Sequence, Version, References1, [], Data1],
{Query1, Params1}
{Query1, [ID1, Sequence | Params0]}
end,
case epgsql_pool:query(Worker, Query, Params) of
case epg_pool:query(Worker, Query, Params1) of
{ok, 1} ->
ID0;
{error, Reason} ->
@ -393,11 +334,8 @@ get_object_field({_, _, _, data, _}, Data, _Ref) ->
Data.
update_objects(Worker, UpdateObjects, Version) ->
io:format("~n update_objects UpdateObjects ~p~n", [UpdateObjects]),
maps:foreach(
fun({_, ID}, UpdateObject) ->
io:format("~n update_objects ID ~p~n", [ID]),
#{
id := ID,
type := Type,
@ -423,7 +361,7 @@ update_object(Worker, Type, ID0, References0, ReferencedBy0, IsActive, Data0, Ve
VALUES ($1, $2, $3, $4, $5, $6);
""", [Type]),
Params = [ID1, Version, References1, ReferencedBy1, Data1, IsActive],
case epgsql_pool:query(Worker, Query, Params) of
case epg_pool:query(Worker, Query, Params) of
{ok, 1} ->
ok;
{error, Reason} ->
@ -459,10 +397,16 @@ check_if_force_id_required(Worker, Type) ->
FROM information_schema.columns
WHERE table_name = $1 AND column_name = 'sequence';
""",
case epgsql_pool:query(Worker, Query, [Type]) of
case epg_pool:query(Worker, Query, [Type]) of
{ok, _Columns, []} ->
true;
{ok, _Columns, Rows} ->
has_sequence_column(Rows);
{error, Reason} ->
throw({error, Reason})
end.
has_sequence_column(Rows) ->
lists:all(
fun(Row) ->
case Row of
@ -473,17 +417,14 @@ check_if_force_id_required(Worker, Type) ->
end
end,
Rows
);
{error, Reason} ->
throw({error, Reason})
end.
).
get_last_sequence(Worker, Type) ->
Query = io_lib:format("""
SELECT MAX(sequence)
FROM ~p;
""", [Type]),
case epgsql_pool:query(Worker, Query) of
case epg_pool:query(Worker, Query) of
{ok, _Columns, [{null}]} ->
{ok, 0};
{ok, _Columns, [{LastID}]} ->
@ -516,7 +457,7 @@ check_if_id_exists(Worker, ID0, Type0) ->
WHERE id = $1;
""", [Type0]),
ID1 = to_string(ID0),
case epgsql_pool:query(Worker, Query, [ID1]) of
case epg_pool:query(Worker, Query, [ID1]) of
{ok, _Columns, []} ->
false;
{ok, _Columns, [{ID1}]} ->
@ -537,14 +478,39 @@ get_target_objects(Worker, Refs, Version) ->
Refs
).
get_target_object(Ref, Version) ->
get_target_object(default_pool, Ref, Version).
get_target_object(Worker, Ref, Version) ->
% First check if the version exists
case check_version_exists(Worker, Version) of
{ok, exists} ->
fetch_object(Worker, Ref, Version);
{ok, not_exists} ->
{error, global_version_not_found};
Error ->
Error
end.
check_version_exists(Worker, Version) ->
VersionRequest = """
SELECT 1
FROM global_version
WHERE version = $1
LIMIT 1
""",
case epg_pool:query(Worker, VersionRequest, [Version]) of
{ok, _Columns, []} ->
{ok, not_exists};
{ok, _Columns, [_Row]} ->
{ok, exists};
Error ->
Error
end.
fetch_object(Worker, Ref, Version) ->
{Type, ID} = Ref,
ID0 = to_string(ID),
io:format("~n get_target_object ID ~p ID0 ~p and Version ~p~n", [ID, ID0, Version]),
Request = io_lib:format("""
SELECT id,
global_version,
@ -558,11 +524,10 @@ get_target_object(Worker, Ref, Version) ->
ORDER BY global_version DESC
LIMIT 1
""", [Type]),
case epgsql_pool:query(Worker, Request, [ID0, Version]) of
case epg_pool:query(Worker, Request, [ID0, Version]) of
{ok, _Columns, []} ->
{error, {object_not_found, Ref, Version}};
{error, object_not_found};
{ok, Columns, Rows} ->
io:format("get_target_object Res ~p ~n", [{Columns, Rows}]),
[Result | _] = to_marshalled_maps(Columns, Rows),
{ok, Result}
end.
@ -583,7 +548,7 @@ get_latest_target_object(Ref) ->
ORDER BY global_version DESC
LIMIT 1
""", [Type]),
case epgsql_pool:query(default_pool, Request, [ID0]) of
case epg_pool:query(default_pool, Request, [ID0]) of
{ok, _Columns, []} ->
{error, {object_not_found, Ref}};
{ok, Columns, Rows} ->

View File

@ -23,37 +23,39 @@ do_handle_function('CheckoutObject', {VersionRef, ObjectRef}, _Context, _Options
woody_error:raise(business, #domain_conf_v2_ObjectNotFound{});
{error, Reason} ->
woody_error:raise(system, {internal, Reason})
end;
do_handle_function('GetLocalVersions', {Request}, _Context, _Options) ->
#domain_conf_v2_GetLocalVersionsRequest{
ref = Ref,
limit = Limit,
continuation_token = ContinuationToken
} = Request,
%% Retrieve local versions with pagination
case dmt_repository:get_local_versions(Ref, Limit, ContinuationToken) of
{ok, Versions, NewToken} ->
{ok, #domain_conf_v2_GetVersionsResponse{
result = Versions,
continuation_token = NewToken
}};
{error, object_not_found} ->
woody_error:raise(business, #domain_conf_v2_ObjectNotFound{});
{error, Reason} ->
woody_error:raise(system, {internal, Reason})
end;
do_handle_function('GetGlobalVersions', {Request}, _Context, _Options) ->
#domain_conf_v2_GetGlobalVersionsRequest{
limit = Limit,
continuation_token = ContinuationToken
} = Request,
%% Retrieve global versions with pagination
case dmt_repository:get_global_versions(Limit, ContinuationToken) of
{ok, Versions, NewToken} ->
{ok, #domain_conf_v2_GetVersionsResponse{
result = Versions,
continuation_token = NewToken
}};
{error, Reason} ->
woody_error:raise(system, {internal, Reason})
end.
% TODO
% do_handle_function('GetLocalVersions', {Request}, _Context, _Options) ->
% #domain_conf_v2_GetLocalVersionsRequest{
% ref = Ref,
% limit = Limit,
% continuation_token = ContinuationToken
% } = Request,
% %% Retrieve local versions with pagination
% case dmt_repository:get_local_versions(Ref, Limit, ContinuationToken) of
% {ok, Versions, NewToken} ->
% {ok, #domain_conf_v2_GetVersionsResponse{
% result = Versions,
% continuation_token = NewToken
% }};
% {error, object_not_found} ->
% woody_error:raise(business, #domain_conf_v2_ObjectNotFound{});
% {error, Reason} ->
% woody_error:raise(system, {internal, Reason})
% end;
% TODO
% do_handle_function('GetGlobalVersions', {Request}, _Context, _Options) ->
% #domain_conf_v2_GetGlobalVersionsRequest{
% limit = Limit,
% continuation_token = ContinuationToken
% } = Request,
% %% Retrieve global versions with pagination
% case dmt_repository:get_global_versions(Limit, ContinuationToken) of
% {ok, Versions, NewToken} ->
% {ok, #domain_conf_v2_GetVersionsResponse{
% result = Versions,
% continuation_token = NewToken
% }};
% {error, Reason} ->
% woody_error:raise(system, {internal, Reason})
% end.

View File

@ -13,7 +13,6 @@
-export([get_service/1]).
-define(SERVER, ?MODULE).
-define(APP, dmt).
-define(DEFAULT_DB, default_db).
@ -109,7 +108,7 @@ get_repository_handlers() ->
})
].
-spec get_handler(repository | repository_client | state_processor, woody:options()) ->
-spec get_handler(repository | repository_client | user_op, woody:options()) ->
woody:http_handler(woody:th_handler()).
get_handler(repository, Options) ->
{"/v1/domain/repository", {
@ -137,7 +136,7 @@ get_service(user_op) ->
-spec enable_health_logging(erl_health:check()) -> erl_health:check().
enable_health_logging(Check) ->
EvHandler = {erl_health_event_handler, []},
maps:map(fun(_, V = {_, _, _}) -> #{runner => V, event_handler => EvHandler} end, Check).
maps:map(fun(_, {_, _, _} = V) -> #{runner => V, event_handler => EvHandler} end, Check).
-spec get_prometheus_route() -> {iodata(), module(), _Opts :: any()}.
get_prometheus_route() ->

View File

@ -16,7 +16,7 @@
insert_user(Name, Email) ->
Sql = "INSERT INTO op_user (name, email) VALUES ($1, $2) returning id",
Params = [Name, Email],
case epgsql_pool:query(?POOL_NAME, Sql, Params) of
case epg_pool:query(?POOL_NAME, Sql, Params) of
{ok, 1, _Columns, [{ID}]} ->
{ok, ID};
{error, Reason} ->
@ -27,7 +27,7 @@ insert_user(Name, Email) ->
get_user(UserOpID) ->
Sql = "SELECT id, name, email FROM op_user WHERE id = $1::uuid",
Params = [UserOpID],
case epgsql_pool:query(?POOL_NAME, Sql, Params) of
case epg_pool:query(?POOL_NAME, Sql, Params) of
{ok, _Columns, [{ID, Name, Email}]} ->
{ok, #domain_conf_v2_UserOp{id = ID, name = Name, email = Email}};
{ok, _, []} ->
@ -40,7 +40,7 @@ get_user(UserOpID) ->
delete_user(UserOpID) ->
Sql = "DELETE FROM op_user WHERE id = $1::uuid",
Params = [UserOpID],
case epgsql_pool:query(?POOL_NAME, Sql, Params) of
case epg_pool:query(?POOL_NAME, Sql, Params) of
{ok, _, Result} when Result =:= [] ->
{error, user_not_found};
{ok, 1} ->

View File

@ -11,9 +11,6 @@
-export([cleanup_db/0]).
-include_lib("damsel/include/dmsl_base_thrift.hrl").
-include_lib("damsel/include/dmsl_domain_thrift.hrl").
-export_type([config/0]).
-export_type([test_case_name/0]).
-export_type([group_name/0]).
@ -21,6 +18,7 @@
%%
-type app_name() :: atom().
-export_type([app_name/0]).
-spec start_app(app_name()) -> {[app_name()], map()}.
start_app(scoper = AppName) ->
@ -51,13 +49,13 @@ start_app(dmt = AppName) ->
}},
{services, #{
repository => #{
url => <<"http://dominant-v2:8022/v1/domain/repository">>
url => <<"http://dmt:8022/v1/domain/repository">>
},
repository_client => #{
url => <<"http://dominant-v2:8022/v1/domain/repository_client">>
url => <<"http://dmt:8022/v1/domain/repository_client">>
},
user_op => #{
url => <<"http://dominant-v2:8022/v1/domain/user_op">>
url => <<"http://dmt:8022/v1/domain/user_op">>
}
}}
]),
@ -131,17 +129,13 @@ cfg(Key, Config) ->
%%
-define(ROOT_URL, "http://dominant-v2:8022").
-spec create_client() -> dmt_client_api:t().
create_client() ->
create_client_w_context(woody_context:new()).
%% {?ROOT_URL, create_client_w_context(woody_context:new())}.
-spec create_client(woody:trace_id()) -> dmt_client_api:t().
create_client(TraceID) ->
create_client_w_context(woody_context:new(TraceID)).
%% {?ROOT_URL, create_client_w_context(woody_context:new(TraceID))}.
create_client_w_context(WoodyCtx) ->
dmt_client_api:new(WoodyCtx).
@ -165,5 +159,5 @@ cleanup_db() ->
END LOOP;
END $$;
""",
{ok, _, _} = epgsql_pool:query(default_pool, Query),
{ok, _, _} = epg_pool:query(default_pool, Query),
ok.

View File

@ -2,8 +2,7 @@
-include_lib("damsel/include/dmsl_domain_conf_v2_thrift.hrl").
-include_lib("damsel/include/dmsl_domain_thrift.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("stdlib/include/assert.hrl").
%% API
-export([
@ -53,7 +52,7 @@ all() ->
[
{group, create_user_op_test},
{group, repository_tests}
%% {group, repository_client_tests}
%% {group, repository_client_tests}
].
%% Define test groups
@ -70,9 +69,7 @@ groups() ->
insert_remove_referencing_object_success_test,
update_object_success_test
]},
{repository_client_tests, [], [
]}
{repository_client_tests, [], []}
].
init_per_group(repository_client_tests, C) ->
@ -141,7 +138,8 @@ insert_remove_referencing_object_success_test(Config) ->
Commit1 = #domain_conf_v2_Commit{
ops = [
{insert, #domain_conf_v2_InsertOp{
object = {proxy, #domain_ProxyDefinition{
object =
{proxy, #domain_ProxyDefinition{
name = <<"proxy">>,
description = <<"proxy_description">>,
url = <<"http://someurl">>,
@ -159,11 +157,11 @@ insert_remove_referencing_object_success_test(Config) ->
]
}} = dmt_client:commit(Revision1, Commit1, UserOpID, Client),
Commit2 = #domain_conf_v2_Commit{
ops = [
{insert, #domain_conf_v2_InsertOp{
object = {provider, #domain_Provider{
object =
{provider, #domain_Provider{
name = <<"name">>,
description = <<"description">>,
proxy = #domain_Proxy{
@ -184,7 +182,7 @@ insert_remove_referencing_object_success_test(Config) ->
]
}} = dmt_client:commit(Revision2, Commit2, UserOpID, Client),
%% try to remove proxy
%% try to remove proxy
Commit3 = #domain_conf_v2_Commit{
ops = [
{remove, #domain_conf_v2_RemoveOp{
@ -193,7 +191,6 @@ insert_remove_referencing_object_success_test(Config) ->
]
},
{ok, _} = dmt_client:commit(Revision3, Commit3, UserOpID, Client).
%% FIXME reference collecting doesn't work. Need to fix ASAP
@ -295,7 +292,6 @@ insert_object_forced_id_success_test(Config) ->
] = ordsets:to_list(NewObjectsSet),
?assertMatch(CategoryRef, Ref).
update_object_success_test(Config) ->
Client = dmt_ct_helper:cfg(client, Config),
@ -306,7 +302,8 @@ update_object_success_test(Config) ->
Commit1 = #domain_conf_v2_Commit{
ops = [
{insert, #domain_conf_v2_InsertOp{
object = {proxy, #domain_ProxyDefinition{
object =
{proxy, #domain_ProxyDefinition{
name = <<"proxy">>,
description = <<"proxy_description">>,
url = <<"http://someurl">>,
@ -324,8 +321,8 @@ update_object_success_test(Config) ->
]
}} = dmt_client:commit(Revision1, Commit1, UserOpID, Client),
NewObject = {proxy, #domain_ProxyObject{
NewObject =
{proxy, #domain_ProxyObject{
ref = ProxyRef,
data = #domain_ProxyDefinition{
name = <<"proxy2">>,

View File

@ -40,7 +40,7 @@ references(Object, DataType) ->
references(undefined, _StructInfo, Refs) ->
Refs;
references({Tag, Object}, StructInfo = {struct, union, FieldsInfo}, Refs) when is_list(FieldsInfo) ->
references({Tag, Object}, {struct, union, FieldsInfo} = StructInfo, Refs) when is_list(FieldsInfo) ->
case get_field_info(Tag, StructInfo) of
false ->
erlang:error({<<"field info not found">>, Tag, StructInfo});

View File

@ -2,7 +2,7 @@
-export([parse_transform/2]).
-spec parse_transform(Forms, [compile:option()]) -> Forms when
-spec parse_transform(Forms, term()) -> Forms when
Forms :: [erl_parse:abstract_form() | erl_parse:form_info()].
parse_transform(Forms, _Options) ->
[
@ -22,7 +22,7 @@ transform(Form) ->
Form
end.
transform_function(Name = is_reference_type, 1, FormWas) ->
transform_function(is_reference_type = Name, 1, FormWas) ->
% NOTE
% Replacing `dmt_domain:is_reference_type/1` with a code which does something similar to:
% ```
@ -57,7 +57,7 @@ transform_function(Name = is_reference_type, 1, FormWas) ->
)
),
Form;
transform_function(_Name = is_reference_type, 2, _FormWas) ->
transform_function(is_reference_type, 2, _FormWas) ->
% NOTE
% We need to make `is_reference_type/2` disappear, otherwise it will trigger _unused function_
% warning.

View File

@ -1,50 +0,0 @@
-module(dmt_history).
-export([head/1]).
-export([head/2]).
-export([travel/3]).
-include_lib("damsel/include/dmsl_domain_conf_thrift.hrl").
-type history() :: dmsl_domain_conf_thrift:'History'().
-type version() :: dmsl_domain_conf_thrift:'Version'().
-type snapshot() :: dmsl_domain_conf_thrift:'Snapshot'().
-spec head(history()) -> {ok, snapshot()} | {error, dmt_domain:operation_error()}.
head(History) ->
head(History, #domain_conf_Snapshot{version = 0, domain = dmt_domain:new()}).
-spec head(history(), snapshot()) -> {ok, snapshot()} | {error, dmt_domain:operation_error()}.
head(History, Snapshot) when map_size(History) =:= 0 ->
{ok, Snapshot};
head(History, Snapshot) ->
Head = lists:max(maps:keys(History)),
travel(Head, History, Snapshot).
-spec travel(version(), history(), snapshot()) -> {ok, snapshot()} | {error, dmt_domain:operation_error()}.
travel(To, _History, #domain_conf_Snapshot{version = From} = Snapshot) when To =:= From ->
{ok, Snapshot};
travel(To, History, #domain_conf_Snapshot{version = From, domain = Domain}) when To > From ->
#domain_conf_Commit{ops = Ops} = maps:get(From + 1, History),
case dmt_domain:apply_operations(Ops, Domain) of
{ok, NewDomain} ->
NextSnapshot = #domain_conf_Snapshot{
version = From + 1,
domain = NewDomain
},
travel(To, History, NextSnapshot);
{error, _} = Error ->
Error
end;
travel(To, History, #domain_conf_Snapshot{version = From, domain = Domain}) when To < From ->
#domain_conf_Commit{ops = Ops} = maps:get(From, History),
case dmt_domain:revert_operations(Ops, Domain) of
{ok, NewDomain} ->
PreviousSnapshot = #domain_conf_Snapshot{
version = From - 1,
domain = NewDomain
},
travel(To, History, PreviousSnapshot);
{error, _} = Error ->
Error
end.

View File

@ -8,7 +8,7 @@
-define(DOMAIN, dmsl_domain_thrift).
get_domain_object_ref(DomainObject = {Tag, _Struct}) ->
get_domain_object_ref({Tag, _Struct} = DomainObject) ->
{_Type, Ref} = get_domain_object_field(ref, DomainObject),
{Tag, Ref}.
@ -24,8 +24,12 @@ get_refless_data({Tag, Struct}) ->
get_refless_object_schema(Tag) ->
SchemaInfo = get_struct_info('ReflessDomainObject'),
{_, _, {struct, _, {_, ObjectStructName}}, _, _} = get_field_info(Tag, SchemaInfo),
{ObjectStructName, get_struct_info(ObjectStructName)}.
case get_field_info(Tag, SchemaInfo) of
{_, _, {struct, _, {_, ObjectStructName}}, _, _} ->
{ObjectStructName, get_struct_info(ObjectStructName)};
false ->
erlang:error({field_info_not_found, Tag, SchemaInfo})
end.
%% DomainObject ZONE
@ -70,7 +74,7 @@ references(Object, DataType) ->
references(undefined, _StructInfo, Refs) ->
Refs;
references({Tag, Object}, StructInfo = {struct, union, FieldsInfo}, Refs) when is_list(FieldsInfo) ->
references({Tag, Object}, {struct, union, FieldsInfo} = StructInfo, Refs) when is_list(FieldsInfo) ->
case get_field_info(Tag, StructInfo) of
false ->
erlang:error({<<"field info not found">>, Tag, StructInfo});

View File

@ -4,7 +4,7 @@ services:
jaeger:
condition: service_healthy
environment:
- OTEL_SERVICE_NAME=dominant-v2
- OTEL_SERVICE_NAME=dmt
- OTEL_TRACES_EXPORTER=otlp
- OTEL_TRACES_SAMPLER=parentbased_always_on
- OTEL_EXPORTER_OTLP_PROTOCOL=http_protobuf

View File

@ -1,5 +1,4 @@
services:
testrunner:
image: $DEV_IMAGE_TAG
environment:
@ -7,7 +6,7 @@ services:
POSTGRES_HOST: db
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: dmtv2
POSTGRES_DB: dmt
build:
dockerfile: Dockerfile.dev
context: .
@ -16,7 +15,7 @@ services:
THRIFT_VERSION: $THRIFT_VERSION
volumes:
- .:$PWD
hostname: dominant-v2
hostname: dmt
depends_on:
db:
condition: service_healthy

79
elvis.config Normal file
View File

@ -0,0 +1,79 @@
[
{elvis, [
{config, [
#{
dirs => ["apps/**/src", "apps/**/include"],
filter => "*.erl",
ruleset => erl_files,
rules => [
{elvis_text_style, line_length, #{limit => 120}},
{elvis_text_style, no_trailing_whitespace, #{ignore_empty_lines => true}},
{elvis_style, nesting_level, #{
level => 3,
ignore => [dmt_db_migration]
}},
{elvis_style, function_naming_convention, #{regex => "^([a-z][a-z0-9]*_?)*$"}},
{elvis_style, no_if_expression, disable},
%% Project rules
{elvis_style, atom_naming_convention, disable},
{elvis_style, macro_names, disable},
{elvis_style, no_throw, disable}
]
},
#{
dirs => ["apps/**/test"],
filter => "*.erl",
ruleset => erl_files,
rules => [
{elvis_text_style, line_length, #{limit => 120}},
{elvis_text_style, no_trailing_whitespace, #{ignore_empty_lines => true}},
{elvis_style, nesting_level, #{level => 3}},
{elvis_style, function_naming_convention, #{regex => "^([a-z][a-z0-9]*_?)*$"}},
{elvis_style, no_if_expression, disable},
%% Project rules
% We want to use `ct:pal/2` and friends in test code.
{elvis_style, no_debug_call, disable},
% Assert macros can trigger use of ignored binding, yet we want them for better
% readability.
{elvis_style, used_ignored_variable, disable},
% Tests are usually more comprehensible when a bit more verbose.
{elvis_style, dont_repeat_yourself, #{min_complexity => 50}},
{elvis_style, god_modules, disable},
{elvis_style, macro_names, disable}
]
},
#{
dirs => ["."],
filter => "Makefile",
ruleset => makefiles
},
#{
dirs => ["."],
filter => "elvis.config",
ruleset => elvis_config
},
#{
dirs => [".", "apps/*"],
filter => "rebar.config",
ruleset => rebar_config,
rules => [
{elvis_text_style, line_length, #{limit => 120, skip_comments => false}},
{elvis_text_style, no_tabs},
{elvis_text_style, no_trailing_whitespace},
%% Temporarily disabled till regex pattern is available
{elvis_project, no_deps_master_rebar, disable},
{elvis_project, no_branch_deps, disable}
]
},
#{
dirs => ["apps/**/src"],
filter => "*.app.src",
rules => [
{elvis_text_style, line_length, #{limit => 120, skip_comments => false}},
{elvis_text_style, no_tabs},
{elvis_text_style, no_trailing_whitespace}
]
}
]}
]}
].

2
erlang_ls.config Normal file
View File

@ -0,0 +1,2 @@
include_dirs:
- "_build/default/lib"

View File

@ -35,10 +35,10 @@
{cowboy_access_log, {git, "https://github.com/valitydev/cowboy_access_log.git", {branch, "master"}}},
{woody_user_identity, {git, "https://github.com/valitydev/woody_erlang_user_identity.git", {branch, "master"}}},
{woody, {git, "https://github.com/valitydev/woody_erlang.git", {branch, master}}},
{damsel, {git, "git@github.com:valitydev/damsel.git", {branch, "IMP-281/dmt_proto"}}},
{damsel, {git, "https://github.com/valitydev/damsel.git", {branch, "IMP-281/dmt_v2_proto"}}},
%% Libraries for postgres interaction
{epg_connector, {git, "git@github.com:valitydev/epg_connector.git", {branch, master}}},
{epg_connector, {git, "https://github.com/valitydev/epg_connector.git", {branch, master}}},
{epgsql, {git, "https://github.com/epgsql/epgsql.git", {tag, "4.7.1"}}},
{epgsql_pool, {git, "https://github.com/wgnet/epgsql_pool", {branch, "master"}}},
{herd, {git, "https://github.com/wgnet/herd.git", {tag, "1.3.4"}}},
@ -48,7 +48,7 @@
eql,
getopt,
{prometheus, "4.6.0"},
{prometheus, "4.11.0"},
{prometheus_cowboy, "0.1.8"},
%% OpenTelemetry deps
@ -78,6 +78,48 @@
{incremental, true}
]}.
{profiles, [
{prod, [
{deps, [
% for introspection on production
{recon, "2.5.2"},
{logger_logstash_formatter,
{git, "https://github.com/valitydev/logger_logstash_formatter.git", {ref, "08a66a6"}}},
{iosetopts, {git, "https://github.com/valitydev/iosetopts.git", {ref, "edb445c"}}}
]},
{relx, [
{release, {dmt, "0.1"}, [
iosetopts,
{recon, load},
{runtime_tools, load},
{tools, load},
{opentelemetry, temporary},
logger_logstash_formatter,
sasl,
dmt
]},
{mode, minimal},
{sys_config, "./config/sys.config"},
{vm_args, "./config/vm.args"},
{extended_start_script, true}
]}
]},
{test, [
{deps, [
{meck, "0.9.2"}
]},
{dialyzer, [
{plt_extra_apps, [
eunit,
common_test,
runtime_tools,
damsel,
meck
]}
]}
]}
]}.
{project_plugins, [
{rebar3_lint, "3.2.6"},
{erlfmt, "1.5.0"},
@ -111,3 +153,10 @@
dmt
]}
]}.
% Workaround for outdated plugin. It breaks it.
{overrides, [
{override, rebar3_elvis_plugin, [
{src_dirs, ["unknown"]}
]}
]}.

View File

@ -28,7 +28,7 @@
{<<"cowlib">>,{pkg,<<"cowlib">>,<<"2.11.0">>},2},
{<<"ctx">>,{pkg,<<"ctx">>,<<"0.6.0">>},2},
{<<"damsel">>,
{git,"git@github.com:valitydev/damsel.git",
{git,"https://github.com/valitydev/damsel.git",
{ref,"de7ce44874984331f8b180bef3a786bd35573e48"}},
0},
{<<"envloader">>,
@ -36,8 +36,8 @@
{ref,"27a97e04f35c554995467b9236d8ae0188d468c7"}},
0},
{<<"epg_connector">>,
{git,"git@github.com:valitydev/epg_connector.git",
{ref,"7fc3aa1b6d9c8be69a64fefd18f6aaa416dcd572"}},
{git,"https://github.com/valitydev/epg_connector.git",
{ref,"19c1a4bd2cde9823b6576bc446e402b90791c9c0"}},
0},
{<<"epgsql">>,
{git,"https://github.com/epgsql/epgsql.git",