mirror of
https://github.com/valitydev/dominant-v2.git
synced 2024-11-06 00:35:21 +00:00
Introduce CI and fix everything that is needed for it (#3)
* Introduce CI and fix everything that is needed for it * Update damsel * Update epg_connector * Update workflow version * Fix release * Fix test * Fix release * Fix * Fix 2
This commit is contained in:
parent
604d1b5b78
commit
3d016875de
2
.env
2
.env
@ -1,4 +1,4 @@
|
|||||||
SERVICE_NAME=dominant-v2
|
SERVICE_NAME=dmt
|
||||||
OTP_VERSION=27.1
|
OTP_VERSION=27.1
|
||||||
REBAR_VERSION=3.23
|
REBAR_VERSION=3.23
|
||||||
THRIFT_VERSION=0.14.2.3
|
THRIFT_VERSION=0.14.2.3
|
||||||
|
21
.github/workflows/build-image.yml
vendored
Normal file
21
.github/workflows/build-image.yml
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
name: Build and publish Docker image
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- 'master'
|
||||||
|
- 'epic/**'
|
||||||
|
pull_request:
|
||||||
|
branches: ['**']
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-push:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: valitydev/action-deploy-docker@v2
|
||||||
|
with:
|
||||||
|
registry-username: ${{ github.actor }}
|
||||||
|
registry-access-token: ${{ secrets.GITHUB_TOKEN }}
|
40
.github/workflows/erlang-checks.yml
vendored
Normal file
40
.github/workflows/erlang-checks.yml
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
name: Erlang CI Checks
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- 'master'
|
||||||
|
- 'epic/**'
|
||||||
|
pull_request:
|
||||||
|
branches: ['**']
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
setup:
|
||||||
|
name: Load .env
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
otp-version: ${{ steps.otp-version.outputs.version }}
|
||||||
|
rebar-version: ${{ steps.rebar-version.outputs.version }}
|
||||||
|
thrift-version: ${{ steps.thrift-version.outputs.version }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- run: grep -v '^#' .env >> $GITHUB_ENV
|
||||||
|
- id: otp-version
|
||||||
|
run: echo "::set-output name=version::$OTP_VERSION"
|
||||||
|
- id: rebar-version
|
||||||
|
run: echo "::set-output name=version::$REBAR_VERSION"
|
||||||
|
- id: thrift-version
|
||||||
|
run: echo "::set-output name=version::$THRIFT_VERSION"
|
||||||
|
|
||||||
|
run:
|
||||||
|
name: Run checks
|
||||||
|
needs: setup
|
||||||
|
uses: valitydev/erlang-workflows/.github/workflows/erlang-parallel-build.yml@v1.0.15
|
||||||
|
with:
|
||||||
|
otp-version: ${{ needs.setup.outputs.otp-version }}
|
||||||
|
rebar-version: ${{ needs.setup.outputs.rebar-version }}
|
||||||
|
use-thrift: true
|
||||||
|
thrift-version: ${{ needs.setup.outputs.thrift-version }}
|
||||||
|
run-ct-with-compose: true
|
||||||
|
upload-coverage: false
|
@ -16,6 +16,7 @@
|
|||||||
envloader,
|
envloader,
|
||||||
woody_user_identity,
|
woody_user_identity,
|
||||||
jsx,
|
jsx,
|
||||||
|
uuid,
|
||||||
dmt_core,
|
dmt_core,
|
||||||
dmt_object,
|
dmt_object,
|
||||||
opentelemetry_api,
|
opentelemetry_api,
|
||||||
|
@ -246,16 +246,16 @@ report_migrations(down, Results) ->
|
|||||||
-define(DRIVER, epgsql).
|
-define(DRIVER, epgsql).
|
||||||
|
|
||||||
record_migration(up, Conn, V) ->
|
record_migration(up, Conn, V) ->
|
||||||
?DRIVER:equery(Conn, "INSERT INTO __migrations (id) VALUES ($1)", [V]);
|
epgsql:equery(Conn, "INSERT INTO __migrations (id) VALUES ($1)", [V]);
|
||||||
record_migration(down, Conn, V) ->
|
record_migration(down, Conn, V) ->
|
||||||
?DRIVER:equery(Conn, "DELETE FROM __migrations WHERE id = $1", [V]).
|
epgsql:equery(Conn, "DELETE FROM __migrations WHERE id = $1", [V]).
|
||||||
|
|
||||||
apply_migrations(Type, Migrations, Conn) ->
|
apply_migrations(Type, Migrations, Conn) ->
|
||||||
Results = lists:foldl(
|
Results = lists:foldl(
|
||||||
fun
|
fun
|
||||||
(_, [{_, {error, _}} | _] = Acc) ->
|
(_, [{_, {error, _}} | _] = Acc) ->
|
||||||
Acc;
|
Acc;
|
||||||
(Migration = {Version, _}, Acc) ->
|
({Version, _} = Migration, Acc) ->
|
||||||
case apply_migration(Type, Migration, Conn) of
|
case apply_migration(Type, Migration, Conn) of
|
||||||
ok -> [{Version, ok} | Acc];
|
ok -> [{Version, ok} | Acc];
|
||||||
{error, Error} -> [{Version, {error, Error}}]
|
{error, Error} -> [{Version, {error, Error}}]
|
||||||
@ -269,7 +269,7 @@ apply_migrations(Type, Migrations, Conn) ->
|
|||||||
apply_migration(Type, {Version, Migration}, Conn) ->
|
apply_migration(Type, {Version, Migration}, Conn) ->
|
||||||
case eql:get_query(Type, Migration) of
|
case eql:get_query(Type, Migration) of
|
||||||
{ok, Query} ->
|
{ok, Query} ->
|
||||||
case if_ok(?DRIVER:squery(Conn, Query)) of
|
case if_ok(epgsql:squery(Conn, Query)) of
|
||||||
ok ->
|
ok ->
|
||||||
_ = record_migration(Type, Conn, Version),
|
_ = record_migration(Type, Conn, Version),
|
||||||
ok;
|
ok;
|
||||||
@ -317,12 +317,12 @@ applied_migrations(Args) when is_list(Args) ->
|
|||||||
end
|
end
|
||||||
);
|
);
|
||||||
applied_migrations(Conn) when is_pid(Conn) ->
|
applied_migrations(Conn) when is_pid(Conn) ->
|
||||||
case ?DRIVER:squery(Conn, "SELECT id FROM __migrations ORDER by id ASC") of
|
case epgsql:squery(Conn, "SELECT id FROM __migrations ORDER by id ASC") of
|
||||||
{ok, _, Migs} ->
|
{ok, _, Migs} ->
|
||||||
[binary_to_list(Mig) || {Mig} <- Migs];
|
[binary_to_list(Mig) || {Mig} <- Migs];
|
||||||
{error, {error, error, <<"42P01">>, _, _, _}} ->
|
{error, {error, error, <<"42P01">>, _, _, _}} ->
|
||||||
%% init migrations and restart
|
%% init migrations and restart
|
||||||
{ok, _, _} = ?DRIVER:squery(
|
{ok, _, _} = epgsql:squery(
|
||||||
Conn,
|
Conn,
|
||||||
"CREATE TABLE __migrations ("
|
"CREATE TABLE __migrations ("
|
||||||
"id VARCHAR(255) PRIMARY KEY,"
|
"id VARCHAR(255) PRIMARY KEY,"
|
||||||
|
@ -19,7 +19,7 @@ get_object({version, V}, ObjectRef) ->
|
|||||||
data := Data,
|
data := Data,
|
||||||
created_at := CreatedAt
|
created_at := CreatedAt
|
||||||
}} ->
|
}} ->
|
||||||
io:format("get_object Data ~p~n", [Data]),
|
% io:format("get_object Data ~p~n", [Data]),
|
||||||
{ok, #domain_conf_v2_VersionedObject{
|
{ok, #domain_conf_v2_VersionedObject{
|
||||||
global_version = GlobalVersion,
|
global_version = GlobalVersion,
|
||||||
%% TODO implement local versions
|
%% TODO implement local versions
|
||||||
@ -37,7 +37,6 @@ get_object({head, #domain_conf_v2_Head{}}, ObjectRef) ->
|
|||||||
data := Data,
|
data := Data,
|
||||||
created_at := CreatedAt
|
created_at := CreatedAt
|
||||||
}} ->
|
}} ->
|
||||||
io:format("get_object head Data ~p~n", [Data]),
|
|
||||||
{ok, #domain_conf_v2_VersionedObject{
|
{ok, #domain_conf_v2_VersionedObject{
|
||||||
global_version = GlobalVersion,
|
global_version = GlobalVersion,
|
||||||
%% TODO implement local versions
|
%% TODO implement local versions
|
||||||
@ -82,19 +81,12 @@ assemble_operations_(
|
|||||||
} = NewObject,
|
} = NewObject,
|
||||||
|
|
||||||
Updates1 = update_objects_added_refs({temporary, TmpID}, Refers, UpdatesAcc),
|
Updates1 = update_objects_added_refs({temporary, TmpID}, Refers, UpdatesAcc),
|
||||||
|
|
||||||
io:format("~n {insert, #domain_conf_v2_InsertOp{} = InsertOp} ~p ~n", [Refers]),
|
|
||||||
|
|
||||||
{[NewObject | InsertsAcc], Updates1, UpdatedObjectsAcc};
|
{[NewObject | InsertsAcc], Updates1, UpdatedObjectsAcc};
|
||||||
{update, #domain_conf_v2_UpdateOp{targeted_ref = Ref} = UpdateOp} ->
|
{update, #domain_conf_v2_UpdateOp{targeted_ref = Ref} = UpdateOp} ->
|
||||||
case get_original_object_changes(UpdatesAcc, Ref) of
|
Changes = get_original_object_changes(UpdatesAcc, Ref),
|
||||||
%% TODO Figure out how to stop several updates for the same object happening
|
|
||||||
Changes ->
|
|
||||||
{ok, ObjectUpdate} = dmt_object:update_object(UpdateOp, Changes),
|
{ok, ObjectUpdate} = dmt_object:update_object(UpdateOp, Changes),
|
||||||
io:format("~n {update, #domain_conf_v2_UpdateOp{targeted_ref = Ref} = UpdateOp} ~p ~n", [{Changes, ObjectUpdate}]),
|
|
||||||
UpdatesAcc1 = update_referenced_objects(Changes, ObjectUpdate, UpdatesAcc),
|
UpdatesAcc1 = update_referenced_objects(Changes, ObjectUpdate, UpdatesAcc),
|
||||||
{InsertsAcc, UpdatesAcc1#{Ref => ObjectUpdate}, [Ref | UpdatedObjectsAcc]}
|
{InsertsAcc, UpdatesAcc1#{Ref => ObjectUpdate}, [Ref | UpdatedObjectsAcc]};
|
||||||
end;
|
|
||||||
{remove, #domain_conf_v2_RemoveOp{ref = Ref}} ->
|
{remove, #domain_conf_v2_RemoveOp{ref = Ref}} ->
|
||||||
#{
|
#{
|
||||||
references := OriginalReferences
|
references := OriginalReferences
|
||||||
@ -102,7 +94,6 @@ assemble_operations_(
|
|||||||
UpdatesAcc1 = update_objects_removed_refs(Ref, OriginalReferences, UpdatesAcc),
|
UpdatesAcc1 = update_objects_removed_refs(Ref, OriginalReferences, UpdatesAcc),
|
||||||
|
|
||||||
NewObjectState = dmt_object:remove_object(OG),
|
NewObjectState = dmt_object:remove_object(OG),
|
||||||
io:format("~n UpdatesAcc1#{Ref => NewObjectState} ~p ~n", [UpdatesAcc1#{Ref => NewObjectState}]),
|
|
||||||
{InsertsAcc, UpdatesAcc1#{Ref => NewObjectState}, [Ref | UpdatedObjectsAcc]}
|
{InsertsAcc, UpdatesAcc1#{Ref => NewObjectState}, [Ref | UpdatedObjectsAcc]}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
@ -164,21 +155,6 @@ get_original_object_changes(Updates, Ref) ->
|
|||||||
#{Ref := Object} ->
|
#{Ref := Object} ->
|
||||||
Object;
|
Object;
|
||||||
_ ->
|
_ ->
|
||||||
%% {ok, #{
|
|
||||||
%% id := ID,
|
|
||||||
%% type := Type,
|
|
||||||
%% referenced_by := RefdBy,
|
|
||||||
%% references := Refers,
|
|
||||||
%% data := Data
|
|
||||||
%% }} = get_latest_target_object(Ref),
|
|
||||||
%% %% NOTE this is done in order to decouple object type from object change type
|
|
||||||
%% #{
|
|
||||||
%% id => ID,
|
|
||||||
%% type => Type,
|
|
||||||
%% referenced_by => RefdBy,
|
|
||||||
%% references => Refers,
|
|
||||||
%% data => Data
|
|
||||||
%% }
|
|
||||||
{ok, Res} = get_latest_target_object(Ref),
|
{ok, Res} = get_latest_target_object(Ref),
|
||||||
{Type, _} = Ref,
|
{Type, _} = Ref,
|
||||||
Res#{
|
Res#{
|
||||||
@ -186,46 +162,10 @@ get_original_object_changes(Updates, Ref) ->
|
|||||||
}
|
}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% NOTE Add new tables here
|
|
||||||
-define(TABLES, [
|
|
||||||
category,
|
|
||||||
currency,
|
|
||||||
business_schedule,
|
|
||||||
calendar,
|
|
||||||
payment_method,
|
|
||||||
payout_method,
|
|
||||||
bank,
|
|
||||||
contract_template,
|
|
||||||
term_set_hierarchy,
|
|
||||||
payment_institution,
|
|
||||||
provider,
|
|
||||||
terminal,
|
|
||||||
inspector,
|
|
||||||
system_account_set,
|
|
||||||
external_account_set,
|
|
||||||
proxy,
|
|
||||||
globals,
|
|
||||||
cash_register_provider,
|
|
||||||
routing_rules,
|
|
||||||
bank_card_category,
|
|
||||||
criterion,
|
|
||||||
document_type,
|
|
||||||
payment_service,
|
|
||||||
payment_system,
|
|
||||||
bank_card_token_service,
|
|
||||||
mobile_op_user,
|
|
||||||
crypto_currency,
|
|
||||||
country,
|
|
||||||
trade_bloc,
|
|
||||||
identity_provider,
|
|
||||||
limit_config
|
|
||||||
]).
|
|
||||||
|
|
||||||
commit(Version, Commit, CreatedBy) ->
|
commit(Version, Commit, CreatedBy) ->
|
||||||
{InsertObjects, UpdateObjects0, ChangedObjectIds} = assemble_operations(Commit),
|
{InsertObjects, UpdateObjects0, ChangedObjectIds} = assemble_operations(Commit),
|
||||||
|
|
||||||
case
|
Result = epg_pool:transaction(
|
||||||
epgsql_pool:transaction(
|
|
||||||
default_pool,
|
default_pool,
|
||||||
fun(Worker) ->
|
fun(Worker) ->
|
||||||
ok = check_versions_sql(Worker, ChangedObjectIds, Version),
|
ok = check_versions_sql(Worker, ChangedObjectIds, Version),
|
||||||
@ -235,8 +175,8 @@ commit(Version, Commit, CreatedBy) ->
|
|||||||
ok = update_objects(Worker, UpdateObjects1, NewVersion),
|
ok = update_objects(Worker, UpdateObjects1, NewVersion),
|
||||||
{ok, NewVersion, maps:values(PermanentIDsMaps)}
|
{ok, NewVersion, maps:values(PermanentIDsMaps)}
|
||||||
end
|
end
|
||||||
)
|
),
|
||||||
of
|
case Result of
|
||||||
{ok, ResVersion, NewObjectsIDs} ->
|
{ok, ResVersion, NewObjectsIDs} ->
|
||||||
NewObjects = lists:map(
|
NewObjects = lists:map(
|
||||||
fun(#{data := Data}) ->
|
fun(#{data := Data}) ->
|
||||||
@ -257,7 +197,16 @@ replace_tmp_ids_in_updates(UpdateObjects, PermanentIDsMaps) ->
|
|||||||
#{
|
#{
|
||||||
referenced_by := ReferencedBy
|
referenced_by := ReferencedBy
|
||||||
} = UpdateObject,
|
} = UpdateObject,
|
||||||
NewReferencedBy = lists:map(
|
NewReferencedBy = replace_referenced_by_ids(ReferencedBy, PermanentIDsMaps),
|
||||||
|
UpdateObject#{
|
||||||
|
referenced_by => NewReferencedBy
|
||||||
|
}
|
||||||
|
end,
|
||||||
|
UpdateObjects
|
||||||
|
).
|
||||||
|
|
||||||
|
replace_referenced_by_ids(ReferencedBy, PermanentIDsMaps) ->
|
||||||
|
lists:map(
|
||||||
fun(Ref) ->
|
fun(Ref) ->
|
||||||
case Ref of
|
case Ref of
|
||||||
{temporary, TmpID} ->
|
{temporary, TmpID} ->
|
||||||
@ -267,18 +216,11 @@ replace_tmp_ids_in_updates(UpdateObjects, PermanentIDsMaps) ->
|
|||||||
end
|
end
|
||||||
end,
|
end,
|
||||||
ReferencedBy
|
ReferencedBy
|
||||||
),
|
|
||||||
UpdateObject#{
|
|
||||||
referenced_by => NewReferencedBy
|
|
||||||
}
|
|
||||||
end,
|
|
||||||
UpdateObjects
|
|
||||||
).
|
).
|
||||||
|
|
||||||
check_versions_sql(Worker, ChangedObjectIds, Version) ->
|
check_versions_sql(Worker, ChangedObjectIds, Version) ->
|
||||||
lists:foreach(
|
lists:foreach(
|
||||||
fun({ChangedObjectType, ChangedObjectRef0} = ChangedObjectId) ->
|
fun({ChangedObjectType, ChangedObjectRef0} = ChangedObjectId) ->
|
||||||
io:format("ChangedObjectRef0 ~p~n", [ChangedObjectRef0]),
|
|
||||||
ChangedObjectRef1 = to_string(ChangedObjectRef0),
|
ChangedObjectRef1 = to_string(ChangedObjectRef0),
|
||||||
Query0 =
|
Query0 =
|
||||||
io_lib:format("""
|
io_lib:format("""
|
||||||
@ -288,7 +230,7 @@ check_versions_sql(Worker, ChangedObjectIds, Version) ->
|
|||||||
ORDER BY global_version DESC
|
ORDER BY global_version DESC
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
""", [ChangedObjectType]),
|
""", [ChangedObjectType]),
|
||||||
case epgsql_pool:query(Worker, Query0, [ChangedObjectRef1]) of
|
case epg_pool:query(Worker, Query0, [ChangedObjectRef1]) of
|
||||||
{ok, _Columns, []} ->
|
{ok, _Columns, []} ->
|
||||||
throw({unknown_object_update, ChangedObjectId});
|
throw({unknown_object_update, ChangedObjectId});
|
||||||
{ok, _Columns, [{ChangedObjectRef, MostRecentVersion}]} when MostRecentVersion > Version ->
|
{ok, _Columns, [{ChangedObjectRef, MostRecentVersion}]} when MostRecentVersion > Version ->
|
||||||
@ -309,7 +251,7 @@ get_new_version(Worker, CreatedBy) ->
|
|||||||
INSERT INTO GLOBAL_VERSION (CREATED_BY)
|
INSERT INTO GLOBAL_VERSION (CREATED_BY)
|
||||||
VALUES ($1::uuid) RETURNING version;
|
VALUES ($1::uuid) RETURNING version;
|
||||||
""",
|
""",
|
||||||
case epgsql_pool:query(Worker, Query1, [CreatedBy]) of
|
case epg_pool:query(Worker, Query1, [CreatedBy]) of
|
||||||
{ok, 1, _Columns, [{NewVersion}]} ->
|
{ok, 1, _Columns, [{NewVersion}]} ->
|
||||||
NewVersion;
|
NewVersion;
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
@ -339,7 +281,8 @@ insert_object(Worker, Type, ID0, Sequence, Version, References0, Data0) ->
|
|||||||
ID1 = to_string(ID0),
|
ID1 = to_string(ID0),
|
||||||
Data1 = to_string(Data0),
|
Data1 = to_string(Data0),
|
||||||
References1 = lists:map(fun to_string/1, References0),
|
References1 = lists:map(fun to_string/1, References0),
|
||||||
{Query, Params} =
|
Params0 = [Version, References1, [], Data1],
|
||||||
|
{Query, Params1} =
|
||||||
case check_if_force_id_required(Worker, Type) of
|
case check_if_force_id_required(Worker, Type) of
|
||||||
true ->
|
true ->
|
||||||
Query0 =
|
Query0 =
|
||||||
@ -347,18 +290,16 @@ insert_object(Worker, Type, ID0, Sequence, Version, References0, Data0) ->
|
|||||||
INSERT INTO ~p (id, global_version, references_to, referenced_by, data, is_active)
|
INSERT INTO ~p (id, global_version, references_to, referenced_by, data, is_active)
|
||||||
VALUES ($1, $2, $3, $4, $5, TRUE);
|
VALUES ($1, $2, $3, $4, $5, TRUE);
|
||||||
""", [Type]),
|
""", [Type]),
|
||||||
Params0 = [ID1, Version, References1, [], Data1],
|
{Query0, [ID1 | Params0]};
|
||||||
{Query0, Params0};
|
|
||||||
false ->
|
false ->
|
||||||
Query1 =
|
Query1 =
|
||||||
io_lib:format("""
|
io_lib:format("""
|
||||||
INSERT INTO ~p (id, sequence, global_version, references_to, referenced_by, data, is_active)
|
INSERT INTO ~p (id, sequence, global_version, references_to, referenced_by, data, is_active)
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, TRUE);
|
VALUES ($1, $2, $3, $4, $5, $6, TRUE);
|
||||||
""", [Type]),
|
""", [Type]),
|
||||||
Params1 = [ID1, Sequence, Version, References1, [], Data1],
|
{Query1, [ID1, Sequence | Params0]}
|
||||||
{Query1, Params1}
|
|
||||||
end,
|
end,
|
||||||
case epgsql_pool:query(Worker, Query, Params) of
|
case epg_pool:query(Worker, Query, Params1) of
|
||||||
{ok, 1} ->
|
{ok, 1} ->
|
||||||
ID0;
|
ID0;
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
@ -393,11 +334,8 @@ get_object_field({_, _, _, data, _}, Data, _Ref) ->
|
|||||||
Data.
|
Data.
|
||||||
|
|
||||||
update_objects(Worker, UpdateObjects, Version) ->
|
update_objects(Worker, UpdateObjects, Version) ->
|
||||||
|
|
||||||
io:format("~n update_objects UpdateObjects ~p~n", [UpdateObjects]),
|
|
||||||
maps:foreach(
|
maps:foreach(
|
||||||
fun({_, ID}, UpdateObject) ->
|
fun({_, ID}, UpdateObject) ->
|
||||||
io:format("~n update_objects ID ~p~n", [ID]),
|
|
||||||
#{
|
#{
|
||||||
id := ID,
|
id := ID,
|
||||||
type := Type,
|
type := Type,
|
||||||
@ -423,7 +361,7 @@ update_object(Worker, Type, ID0, References0, ReferencedBy0, IsActive, Data0, Ve
|
|||||||
VALUES ($1, $2, $3, $4, $5, $6);
|
VALUES ($1, $2, $3, $4, $5, $6);
|
||||||
""", [Type]),
|
""", [Type]),
|
||||||
Params = [ID1, Version, References1, ReferencedBy1, Data1, IsActive],
|
Params = [ID1, Version, References1, ReferencedBy1, Data1, IsActive],
|
||||||
case epgsql_pool:query(Worker, Query, Params) of
|
case epg_pool:query(Worker, Query, Params) of
|
||||||
{ok, 1} ->
|
{ok, 1} ->
|
||||||
ok;
|
ok;
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
@ -459,10 +397,16 @@ check_if_force_id_required(Worker, Type) ->
|
|||||||
FROM information_schema.columns
|
FROM information_schema.columns
|
||||||
WHERE table_name = $1 AND column_name = 'sequence';
|
WHERE table_name = $1 AND column_name = 'sequence';
|
||||||
""",
|
""",
|
||||||
case epgsql_pool:query(Worker, Query, [Type]) of
|
case epg_pool:query(Worker, Query, [Type]) of
|
||||||
{ok, _Columns, []} ->
|
{ok, _Columns, []} ->
|
||||||
true;
|
true;
|
||||||
{ok, _Columns, Rows} ->
|
{ok, _Columns, Rows} ->
|
||||||
|
has_sequence_column(Rows);
|
||||||
|
{error, Reason} ->
|
||||||
|
throw({error, Reason})
|
||||||
|
end.
|
||||||
|
|
||||||
|
has_sequence_column(Rows) ->
|
||||||
lists:all(
|
lists:all(
|
||||||
fun(Row) ->
|
fun(Row) ->
|
||||||
case Row of
|
case Row of
|
||||||
@ -473,17 +417,14 @@ check_if_force_id_required(Worker, Type) ->
|
|||||||
end
|
end
|
||||||
end,
|
end,
|
||||||
Rows
|
Rows
|
||||||
);
|
).
|
||||||
{error, Reason} ->
|
|
||||||
throw({error, Reason})
|
|
||||||
end.
|
|
||||||
|
|
||||||
get_last_sequence(Worker, Type) ->
|
get_last_sequence(Worker, Type) ->
|
||||||
Query = io_lib:format("""
|
Query = io_lib:format("""
|
||||||
SELECT MAX(sequence)
|
SELECT MAX(sequence)
|
||||||
FROM ~p;
|
FROM ~p;
|
||||||
""", [Type]),
|
""", [Type]),
|
||||||
case epgsql_pool:query(Worker, Query) of
|
case epg_pool:query(Worker, Query) of
|
||||||
{ok, _Columns, [{null}]} ->
|
{ok, _Columns, [{null}]} ->
|
||||||
{ok, 0};
|
{ok, 0};
|
||||||
{ok, _Columns, [{LastID}]} ->
|
{ok, _Columns, [{LastID}]} ->
|
||||||
@ -516,7 +457,7 @@ check_if_id_exists(Worker, ID0, Type0) ->
|
|||||||
WHERE id = $1;
|
WHERE id = $1;
|
||||||
""", [Type0]),
|
""", [Type0]),
|
||||||
ID1 = to_string(ID0),
|
ID1 = to_string(ID0),
|
||||||
case epgsql_pool:query(Worker, Query, [ID1]) of
|
case epg_pool:query(Worker, Query, [ID1]) of
|
||||||
{ok, _Columns, []} ->
|
{ok, _Columns, []} ->
|
||||||
false;
|
false;
|
||||||
{ok, _Columns, [{ID1}]} ->
|
{ok, _Columns, [{ID1}]} ->
|
||||||
@ -537,14 +478,39 @@ get_target_objects(Worker, Refs, Version) ->
|
|||||||
Refs
|
Refs
|
||||||
).
|
).
|
||||||
|
|
||||||
|
|
||||||
get_target_object(Ref, Version) ->
|
get_target_object(Ref, Version) ->
|
||||||
get_target_object(default_pool, Ref, Version).
|
get_target_object(default_pool, Ref, Version).
|
||||||
|
|
||||||
get_target_object(Worker, Ref, Version) ->
|
get_target_object(Worker, Ref, Version) ->
|
||||||
|
% First check if the version exists
|
||||||
|
case check_version_exists(Worker, Version) of
|
||||||
|
{ok, exists} ->
|
||||||
|
fetch_object(Worker, Ref, Version);
|
||||||
|
{ok, not_exists} ->
|
||||||
|
{error, global_version_not_found};
|
||||||
|
Error ->
|
||||||
|
Error
|
||||||
|
end.
|
||||||
|
|
||||||
|
check_version_exists(Worker, Version) ->
|
||||||
|
VersionRequest = """
|
||||||
|
SELECT 1
|
||||||
|
FROM global_version
|
||||||
|
WHERE version = $1
|
||||||
|
LIMIT 1
|
||||||
|
""",
|
||||||
|
case epg_pool:query(Worker, VersionRequest, [Version]) of
|
||||||
|
{ok, _Columns, []} ->
|
||||||
|
{ok, not_exists};
|
||||||
|
{ok, _Columns, [_Row]} ->
|
||||||
|
{ok, exists};
|
||||||
|
Error ->
|
||||||
|
Error
|
||||||
|
end.
|
||||||
|
|
||||||
|
fetch_object(Worker, Ref, Version) ->
|
||||||
{Type, ID} = Ref,
|
{Type, ID} = Ref,
|
||||||
ID0 = to_string(ID),
|
ID0 = to_string(ID),
|
||||||
io:format("~n get_target_object ID ~p ID0 ~p and Version ~p~n", [ID, ID0, Version]),
|
|
||||||
Request = io_lib:format("""
|
Request = io_lib:format("""
|
||||||
SELECT id,
|
SELECT id,
|
||||||
global_version,
|
global_version,
|
||||||
@ -558,11 +524,10 @@ get_target_object(Worker, Ref, Version) ->
|
|||||||
ORDER BY global_version DESC
|
ORDER BY global_version DESC
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
""", [Type]),
|
""", [Type]),
|
||||||
case epgsql_pool:query(Worker, Request, [ID0, Version]) of
|
case epg_pool:query(Worker, Request, [ID0, Version]) of
|
||||||
{ok, _Columns, []} ->
|
{ok, _Columns, []} ->
|
||||||
{error, {object_not_found, Ref, Version}};
|
{error, object_not_found};
|
||||||
{ok, Columns, Rows} ->
|
{ok, Columns, Rows} ->
|
||||||
io:format("get_target_object Res ~p ~n", [{Columns, Rows}]),
|
|
||||||
[Result | _] = to_marshalled_maps(Columns, Rows),
|
[Result | _] = to_marshalled_maps(Columns, Rows),
|
||||||
{ok, Result}
|
{ok, Result}
|
||||||
end.
|
end.
|
||||||
@ -583,7 +548,7 @@ get_latest_target_object(Ref) ->
|
|||||||
ORDER BY global_version DESC
|
ORDER BY global_version DESC
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
""", [Type]),
|
""", [Type]),
|
||||||
case epgsql_pool:query(default_pool, Request, [ID0]) of
|
case epg_pool:query(default_pool, Request, [ID0]) of
|
||||||
{ok, _Columns, []} ->
|
{ok, _Columns, []} ->
|
||||||
{error, {object_not_found, Ref}};
|
{error, {object_not_found, Ref}};
|
||||||
{ok, Columns, Rows} ->
|
{ok, Columns, Rows} ->
|
||||||
|
@ -23,37 +23,39 @@ do_handle_function('CheckoutObject', {VersionRef, ObjectRef}, _Context, _Options
|
|||||||
woody_error:raise(business, #domain_conf_v2_ObjectNotFound{});
|
woody_error:raise(business, #domain_conf_v2_ObjectNotFound{});
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
woody_error:raise(system, {internal, Reason})
|
woody_error:raise(system, {internal, Reason})
|
||||||
end;
|
|
||||||
do_handle_function('GetLocalVersions', {Request}, _Context, _Options) ->
|
|
||||||
#domain_conf_v2_GetLocalVersionsRequest{
|
|
||||||
ref = Ref,
|
|
||||||
limit = Limit,
|
|
||||||
continuation_token = ContinuationToken
|
|
||||||
} = Request,
|
|
||||||
%% Retrieve local versions with pagination
|
|
||||||
case dmt_repository:get_local_versions(Ref, Limit, ContinuationToken) of
|
|
||||||
{ok, Versions, NewToken} ->
|
|
||||||
{ok, #domain_conf_v2_GetVersionsResponse{
|
|
||||||
result = Versions,
|
|
||||||
continuation_token = NewToken
|
|
||||||
}};
|
|
||||||
{error, object_not_found} ->
|
|
||||||
woody_error:raise(business, #domain_conf_v2_ObjectNotFound{});
|
|
||||||
{error, Reason} ->
|
|
||||||
woody_error:raise(system, {internal, Reason})
|
|
||||||
end;
|
|
||||||
do_handle_function('GetGlobalVersions', {Request}, _Context, _Options) ->
|
|
||||||
#domain_conf_v2_GetGlobalVersionsRequest{
|
|
||||||
limit = Limit,
|
|
||||||
continuation_token = ContinuationToken
|
|
||||||
} = Request,
|
|
||||||
%% Retrieve global versions with pagination
|
|
||||||
case dmt_repository:get_global_versions(Limit, ContinuationToken) of
|
|
||||||
{ok, Versions, NewToken} ->
|
|
||||||
{ok, #domain_conf_v2_GetVersionsResponse{
|
|
||||||
result = Versions,
|
|
||||||
continuation_token = NewToken
|
|
||||||
}};
|
|
||||||
{error, Reason} ->
|
|
||||||
woody_error:raise(system, {internal, Reason})
|
|
||||||
end.
|
end.
|
||||||
|
% TODO
|
||||||
|
% do_handle_function('GetLocalVersions', {Request}, _Context, _Options) ->
|
||||||
|
% #domain_conf_v2_GetLocalVersionsRequest{
|
||||||
|
% ref = Ref,
|
||||||
|
% limit = Limit,
|
||||||
|
% continuation_token = ContinuationToken
|
||||||
|
% } = Request,
|
||||||
|
% %% Retrieve local versions with pagination
|
||||||
|
% case dmt_repository:get_local_versions(Ref, Limit, ContinuationToken) of
|
||||||
|
% {ok, Versions, NewToken} ->
|
||||||
|
% {ok, #domain_conf_v2_GetVersionsResponse{
|
||||||
|
% result = Versions,
|
||||||
|
% continuation_token = NewToken
|
||||||
|
% }};
|
||||||
|
% {error, object_not_found} ->
|
||||||
|
% woody_error:raise(business, #domain_conf_v2_ObjectNotFound{});
|
||||||
|
% {error, Reason} ->
|
||||||
|
% woody_error:raise(system, {internal, Reason})
|
||||||
|
% end;
|
||||||
|
% TODO
|
||||||
|
% do_handle_function('GetGlobalVersions', {Request}, _Context, _Options) ->
|
||||||
|
% #domain_conf_v2_GetGlobalVersionsRequest{
|
||||||
|
% limit = Limit,
|
||||||
|
% continuation_token = ContinuationToken
|
||||||
|
% } = Request,
|
||||||
|
% %% Retrieve global versions with pagination
|
||||||
|
% case dmt_repository:get_global_versions(Limit, ContinuationToken) of
|
||||||
|
% {ok, Versions, NewToken} ->
|
||||||
|
% {ok, #domain_conf_v2_GetVersionsResponse{
|
||||||
|
% result = Versions,
|
||||||
|
% continuation_token = NewToken
|
||||||
|
% }};
|
||||||
|
% {error, Reason} ->
|
||||||
|
% woody_error:raise(system, {internal, Reason})
|
||||||
|
% end.
|
||||||
|
@ -13,7 +13,6 @@
|
|||||||
|
|
||||||
-export([get_service/1]).
|
-export([get_service/1]).
|
||||||
|
|
||||||
-define(SERVER, ?MODULE).
|
|
||||||
-define(APP, dmt).
|
-define(APP, dmt).
|
||||||
-define(DEFAULT_DB, default_db).
|
-define(DEFAULT_DB, default_db).
|
||||||
|
|
||||||
@ -109,7 +108,7 @@ get_repository_handlers() ->
|
|||||||
})
|
})
|
||||||
].
|
].
|
||||||
|
|
||||||
-spec get_handler(repository | repository_client | state_processor, woody:options()) ->
|
-spec get_handler(repository | repository_client | user_op, woody:options()) ->
|
||||||
woody:http_handler(woody:th_handler()).
|
woody:http_handler(woody:th_handler()).
|
||||||
get_handler(repository, Options) ->
|
get_handler(repository, Options) ->
|
||||||
{"/v1/domain/repository", {
|
{"/v1/domain/repository", {
|
||||||
@ -137,7 +136,7 @@ get_service(user_op) ->
|
|||||||
-spec enable_health_logging(erl_health:check()) -> erl_health:check().
|
-spec enable_health_logging(erl_health:check()) -> erl_health:check().
|
||||||
enable_health_logging(Check) ->
|
enable_health_logging(Check) ->
|
||||||
EvHandler = {erl_health_event_handler, []},
|
EvHandler = {erl_health_event_handler, []},
|
||||||
maps:map(fun(_, V = {_, _, _}) -> #{runner => V, event_handler => EvHandler} end, Check).
|
maps:map(fun(_, {_, _, _} = V) -> #{runner => V, event_handler => EvHandler} end, Check).
|
||||||
|
|
||||||
-spec get_prometheus_route() -> {iodata(), module(), _Opts :: any()}.
|
-spec get_prometheus_route() -> {iodata(), module(), _Opts :: any()}.
|
||||||
get_prometheus_route() ->
|
get_prometheus_route() ->
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
insert_user(Name, Email) ->
|
insert_user(Name, Email) ->
|
||||||
Sql = "INSERT INTO op_user (name, email) VALUES ($1, $2) returning id",
|
Sql = "INSERT INTO op_user (name, email) VALUES ($1, $2) returning id",
|
||||||
Params = [Name, Email],
|
Params = [Name, Email],
|
||||||
case epgsql_pool:query(?POOL_NAME, Sql, Params) of
|
case epg_pool:query(?POOL_NAME, Sql, Params) of
|
||||||
{ok, 1, _Columns, [{ID}]} ->
|
{ok, 1, _Columns, [{ID}]} ->
|
||||||
{ok, ID};
|
{ok, ID};
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
@ -27,7 +27,7 @@ insert_user(Name, Email) ->
|
|||||||
get_user(UserOpID) ->
|
get_user(UserOpID) ->
|
||||||
Sql = "SELECT id, name, email FROM op_user WHERE id = $1::uuid",
|
Sql = "SELECT id, name, email FROM op_user WHERE id = $1::uuid",
|
||||||
Params = [UserOpID],
|
Params = [UserOpID],
|
||||||
case epgsql_pool:query(?POOL_NAME, Sql, Params) of
|
case epg_pool:query(?POOL_NAME, Sql, Params) of
|
||||||
{ok, _Columns, [{ID, Name, Email}]} ->
|
{ok, _Columns, [{ID, Name, Email}]} ->
|
||||||
{ok, #domain_conf_v2_UserOp{id = ID, name = Name, email = Email}};
|
{ok, #domain_conf_v2_UserOp{id = ID, name = Name, email = Email}};
|
||||||
{ok, _, []} ->
|
{ok, _, []} ->
|
||||||
@ -40,7 +40,7 @@ get_user(UserOpID) ->
|
|||||||
delete_user(UserOpID) ->
|
delete_user(UserOpID) ->
|
||||||
Sql = "DELETE FROM op_user WHERE id = $1::uuid",
|
Sql = "DELETE FROM op_user WHERE id = $1::uuid",
|
||||||
Params = [UserOpID],
|
Params = [UserOpID],
|
||||||
case epgsql_pool:query(?POOL_NAME, Sql, Params) of
|
case epg_pool:query(?POOL_NAME, Sql, Params) of
|
||||||
{ok, _, Result} when Result =:= [] ->
|
{ok, _, Result} when Result =:= [] ->
|
||||||
{error, user_not_found};
|
{error, user_not_found};
|
||||||
{ok, 1} ->
|
{ok, 1} ->
|
||||||
|
@ -11,9 +11,6 @@
|
|||||||
|
|
||||||
-export([cleanup_db/0]).
|
-export([cleanup_db/0]).
|
||||||
|
|
||||||
-include_lib("damsel/include/dmsl_base_thrift.hrl").
|
|
||||||
-include_lib("damsel/include/dmsl_domain_thrift.hrl").
|
|
||||||
|
|
||||||
-export_type([config/0]).
|
-export_type([config/0]).
|
||||||
-export_type([test_case_name/0]).
|
-export_type([test_case_name/0]).
|
||||||
-export_type([group_name/0]).
|
-export_type([group_name/0]).
|
||||||
@ -21,6 +18,7 @@
|
|||||||
%%
|
%%
|
||||||
|
|
||||||
-type app_name() :: atom().
|
-type app_name() :: atom().
|
||||||
|
-export_type([app_name/0]).
|
||||||
|
|
||||||
-spec start_app(app_name()) -> {[app_name()], map()}.
|
-spec start_app(app_name()) -> {[app_name()], map()}.
|
||||||
start_app(scoper = AppName) ->
|
start_app(scoper = AppName) ->
|
||||||
@ -51,13 +49,13 @@ start_app(dmt = AppName) ->
|
|||||||
}},
|
}},
|
||||||
{services, #{
|
{services, #{
|
||||||
repository => #{
|
repository => #{
|
||||||
url => <<"http://dominant-v2:8022/v1/domain/repository">>
|
url => <<"http://dmt:8022/v1/domain/repository">>
|
||||||
},
|
},
|
||||||
repository_client => #{
|
repository_client => #{
|
||||||
url => <<"http://dominant-v2:8022/v1/domain/repository_client">>
|
url => <<"http://dmt:8022/v1/domain/repository_client">>
|
||||||
},
|
},
|
||||||
user_op => #{
|
user_op => #{
|
||||||
url => <<"http://dominant-v2:8022/v1/domain/user_op">>
|
url => <<"http://dmt:8022/v1/domain/user_op">>
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
]),
|
]),
|
||||||
@ -131,17 +129,13 @@ cfg(Key, Config) ->
|
|||||||
|
|
||||||
%%
|
%%
|
||||||
|
|
||||||
-define(ROOT_URL, "http://dominant-v2:8022").
|
|
||||||
|
|
||||||
-spec create_client() -> dmt_client_api:t().
|
-spec create_client() -> dmt_client_api:t().
|
||||||
create_client() ->
|
create_client() ->
|
||||||
create_client_w_context(woody_context:new()).
|
create_client_w_context(woody_context:new()).
|
||||||
%% {?ROOT_URL, create_client_w_context(woody_context:new())}.
|
|
||||||
|
|
||||||
-spec create_client(woody:trace_id()) -> dmt_client_api:t().
|
-spec create_client(woody:trace_id()) -> dmt_client_api:t().
|
||||||
create_client(TraceID) ->
|
create_client(TraceID) ->
|
||||||
create_client_w_context(woody_context:new(TraceID)).
|
create_client_w_context(woody_context:new(TraceID)).
|
||||||
%% {?ROOT_URL, create_client_w_context(woody_context:new(TraceID))}.
|
|
||||||
|
|
||||||
create_client_w_context(WoodyCtx) ->
|
create_client_w_context(WoodyCtx) ->
|
||||||
dmt_client_api:new(WoodyCtx).
|
dmt_client_api:new(WoodyCtx).
|
||||||
@ -165,5 +159,5 @@ cleanup_db() ->
|
|||||||
END LOOP;
|
END LOOP;
|
||||||
END $$;
|
END $$;
|
||||||
""",
|
""",
|
||||||
{ok, _, _} = epgsql_pool:query(default_pool, Query),
|
{ok, _, _} = epg_pool:query(default_pool, Query),
|
||||||
ok.
|
ok.
|
||||||
|
@ -2,8 +2,7 @@
|
|||||||
|
|
||||||
-include_lib("damsel/include/dmsl_domain_conf_v2_thrift.hrl").
|
-include_lib("damsel/include/dmsl_domain_conf_v2_thrift.hrl").
|
||||||
-include_lib("damsel/include/dmsl_domain_thrift.hrl").
|
-include_lib("damsel/include/dmsl_domain_thrift.hrl").
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("stdlib/include/assert.hrl").
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
|
||||||
|
|
||||||
%% API
|
%% API
|
||||||
-export([
|
-export([
|
||||||
@ -70,9 +69,7 @@ groups() ->
|
|||||||
insert_remove_referencing_object_success_test,
|
insert_remove_referencing_object_success_test,
|
||||||
update_object_success_test
|
update_object_success_test
|
||||||
]},
|
]},
|
||||||
{repository_client_tests, [], [
|
{repository_client_tests, [], []}
|
||||||
|
|
||||||
]}
|
|
||||||
].
|
].
|
||||||
|
|
||||||
init_per_group(repository_client_tests, C) ->
|
init_per_group(repository_client_tests, C) ->
|
||||||
@ -141,7 +138,8 @@ insert_remove_referencing_object_success_test(Config) ->
|
|||||||
Commit1 = #domain_conf_v2_Commit{
|
Commit1 = #domain_conf_v2_Commit{
|
||||||
ops = [
|
ops = [
|
||||||
{insert, #domain_conf_v2_InsertOp{
|
{insert, #domain_conf_v2_InsertOp{
|
||||||
object = {proxy, #domain_ProxyDefinition{
|
object =
|
||||||
|
{proxy, #domain_ProxyDefinition{
|
||||||
name = <<"proxy">>,
|
name = <<"proxy">>,
|
||||||
description = <<"proxy_description">>,
|
description = <<"proxy_description">>,
|
||||||
url = <<"http://someurl">>,
|
url = <<"http://someurl">>,
|
||||||
@ -159,11 +157,11 @@ insert_remove_referencing_object_success_test(Config) ->
|
|||||||
]
|
]
|
||||||
}} = dmt_client:commit(Revision1, Commit1, UserOpID, Client),
|
}} = dmt_client:commit(Revision1, Commit1, UserOpID, Client),
|
||||||
|
|
||||||
|
|
||||||
Commit2 = #domain_conf_v2_Commit{
|
Commit2 = #domain_conf_v2_Commit{
|
||||||
ops = [
|
ops = [
|
||||||
{insert, #domain_conf_v2_InsertOp{
|
{insert, #domain_conf_v2_InsertOp{
|
||||||
object = {provider, #domain_Provider{
|
object =
|
||||||
|
{provider, #domain_Provider{
|
||||||
name = <<"name">>,
|
name = <<"name">>,
|
||||||
description = <<"description">>,
|
description = <<"description">>,
|
||||||
proxy = #domain_Proxy{
|
proxy = #domain_Proxy{
|
||||||
@ -193,7 +191,6 @@ insert_remove_referencing_object_success_test(Config) ->
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
||||||
{ok, _} = dmt_client:commit(Revision3, Commit3, UserOpID, Client).
|
{ok, _} = dmt_client:commit(Revision3, Commit3, UserOpID, Client).
|
||||||
|
|
||||||
%% FIXME reference collecting doesn't work. Need to fix ASAP
|
%% FIXME reference collecting doesn't work. Need to fix ASAP
|
||||||
@ -295,7 +292,6 @@ insert_object_forced_id_success_test(Config) ->
|
|||||||
] = ordsets:to_list(NewObjectsSet),
|
] = ordsets:to_list(NewObjectsSet),
|
||||||
?assertMatch(CategoryRef, Ref).
|
?assertMatch(CategoryRef, Ref).
|
||||||
|
|
||||||
|
|
||||||
update_object_success_test(Config) ->
|
update_object_success_test(Config) ->
|
||||||
Client = dmt_ct_helper:cfg(client, Config),
|
Client = dmt_ct_helper:cfg(client, Config),
|
||||||
|
|
||||||
@ -306,7 +302,8 @@ update_object_success_test(Config) ->
|
|||||||
Commit1 = #domain_conf_v2_Commit{
|
Commit1 = #domain_conf_v2_Commit{
|
||||||
ops = [
|
ops = [
|
||||||
{insert, #domain_conf_v2_InsertOp{
|
{insert, #domain_conf_v2_InsertOp{
|
||||||
object = {proxy, #domain_ProxyDefinition{
|
object =
|
||||||
|
{proxy, #domain_ProxyDefinition{
|
||||||
name = <<"proxy">>,
|
name = <<"proxy">>,
|
||||||
description = <<"proxy_description">>,
|
description = <<"proxy_description">>,
|
||||||
url = <<"http://someurl">>,
|
url = <<"http://someurl">>,
|
||||||
@ -324,8 +321,8 @@ update_object_success_test(Config) ->
|
|||||||
]
|
]
|
||||||
}} = dmt_client:commit(Revision1, Commit1, UserOpID, Client),
|
}} = dmt_client:commit(Revision1, Commit1, UserOpID, Client),
|
||||||
|
|
||||||
|
NewObject =
|
||||||
NewObject = {proxy, #domain_ProxyObject{
|
{proxy, #domain_ProxyObject{
|
||||||
ref = ProxyRef,
|
ref = ProxyRef,
|
||||||
data = #domain_ProxyDefinition{
|
data = #domain_ProxyDefinition{
|
||||||
name = <<"proxy2">>,
|
name = <<"proxy2">>,
|
||||||
|
@ -40,7 +40,7 @@ references(Object, DataType) ->
|
|||||||
|
|
||||||
references(undefined, _StructInfo, Refs) ->
|
references(undefined, _StructInfo, Refs) ->
|
||||||
Refs;
|
Refs;
|
||||||
references({Tag, Object}, StructInfo = {struct, union, FieldsInfo}, Refs) when is_list(FieldsInfo) ->
|
references({Tag, Object}, {struct, union, FieldsInfo} = StructInfo, Refs) when is_list(FieldsInfo) ->
|
||||||
case get_field_info(Tag, StructInfo) of
|
case get_field_info(Tag, StructInfo) of
|
||||||
false ->
|
false ->
|
||||||
erlang:error({<<"field info not found">>, Tag, StructInfo});
|
erlang:error({<<"field info not found">>, Tag, StructInfo});
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
-export([parse_transform/2]).
|
-export([parse_transform/2]).
|
||||||
|
|
||||||
-spec parse_transform(Forms, [compile:option()]) -> Forms when
|
-spec parse_transform(Forms, term()) -> Forms when
|
||||||
Forms :: [erl_parse:abstract_form() | erl_parse:form_info()].
|
Forms :: [erl_parse:abstract_form() | erl_parse:form_info()].
|
||||||
parse_transform(Forms, _Options) ->
|
parse_transform(Forms, _Options) ->
|
||||||
[
|
[
|
||||||
@ -22,7 +22,7 @@ transform(Form) ->
|
|||||||
Form
|
Form
|
||||||
end.
|
end.
|
||||||
|
|
||||||
transform_function(Name = is_reference_type, 1, FormWas) ->
|
transform_function(is_reference_type = Name, 1, FormWas) ->
|
||||||
% NOTE
|
% NOTE
|
||||||
% Replacing `dmt_domain:is_reference_type/1` with a code which does something similar to:
|
% Replacing `dmt_domain:is_reference_type/1` with a code which does something similar to:
|
||||||
% ```
|
% ```
|
||||||
@ -57,7 +57,7 @@ transform_function(Name = is_reference_type, 1, FormWas) ->
|
|||||||
)
|
)
|
||||||
),
|
),
|
||||||
Form;
|
Form;
|
||||||
transform_function(_Name = is_reference_type, 2, _FormWas) ->
|
transform_function(is_reference_type, 2, _FormWas) ->
|
||||||
% NOTE
|
% NOTE
|
||||||
% We need to make `is_reference_type/2` disappear, otherwise it will trigger _unused function_
|
% We need to make `is_reference_type/2` disappear, otherwise it will trigger _unused function_
|
||||||
% warning.
|
% warning.
|
||||||
|
@ -1,50 +0,0 @@
|
|||||||
-module(dmt_history).
|
|
||||||
|
|
||||||
-export([head/1]).
|
|
||||||
-export([head/2]).
|
|
||||||
-export([travel/3]).
|
|
||||||
|
|
||||||
-include_lib("damsel/include/dmsl_domain_conf_thrift.hrl").
|
|
||||||
|
|
||||||
-type history() :: dmsl_domain_conf_thrift:'History'().
|
|
||||||
-type version() :: dmsl_domain_conf_thrift:'Version'().
|
|
||||||
-type snapshot() :: dmsl_domain_conf_thrift:'Snapshot'().
|
|
||||||
|
|
||||||
-spec head(history()) -> {ok, snapshot()} | {error, dmt_domain:operation_error()}.
|
|
||||||
head(History) ->
|
|
||||||
head(History, #domain_conf_Snapshot{version = 0, domain = dmt_domain:new()}).
|
|
||||||
|
|
||||||
-spec head(history(), snapshot()) -> {ok, snapshot()} | {error, dmt_domain:operation_error()}.
|
|
||||||
head(History, Snapshot) when map_size(History) =:= 0 ->
|
|
||||||
{ok, Snapshot};
|
|
||||||
head(History, Snapshot) ->
|
|
||||||
Head = lists:max(maps:keys(History)),
|
|
||||||
travel(Head, History, Snapshot).
|
|
||||||
|
|
||||||
-spec travel(version(), history(), snapshot()) -> {ok, snapshot()} | {error, dmt_domain:operation_error()}.
|
|
||||||
travel(To, _History, #domain_conf_Snapshot{version = From} = Snapshot) when To =:= From ->
|
|
||||||
{ok, Snapshot};
|
|
||||||
travel(To, History, #domain_conf_Snapshot{version = From, domain = Domain}) when To > From ->
|
|
||||||
#domain_conf_Commit{ops = Ops} = maps:get(From + 1, History),
|
|
||||||
case dmt_domain:apply_operations(Ops, Domain) of
|
|
||||||
{ok, NewDomain} ->
|
|
||||||
NextSnapshot = #domain_conf_Snapshot{
|
|
||||||
version = From + 1,
|
|
||||||
domain = NewDomain
|
|
||||||
},
|
|
||||||
travel(To, History, NextSnapshot);
|
|
||||||
{error, _} = Error ->
|
|
||||||
Error
|
|
||||||
end;
|
|
||||||
travel(To, History, #domain_conf_Snapshot{version = From, domain = Domain}) when To < From ->
|
|
||||||
#domain_conf_Commit{ops = Ops} = maps:get(From, History),
|
|
||||||
case dmt_domain:revert_operations(Ops, Domain) of
|
|
||||||
{ok, NewDomain} ->
|
|
||||||
PreviousSnapshot = #domain_conf_Snapshot{
|
|
||||||
version = From - 1,
|
|
||||||
domain = NewDomain
|
|
||||||
},
|
|
||||||
travel(To, History, PreviousSnapshot);
|
|
||||||
{error, _} = Error ->
|
|
||||||
Error
|
|
||||||
end.
|
|
@ -8,7 +8,7 @@
|
|||||||
|
|
||||||
-define(DOMAIN, dmsl_domain_thrift).
|
-define(DOMAIN, dmsl_domain_thrift).
|
||||||
|
|
||||||
get_domain_object_ref(DomainObject = {Tag, _Struct}) ->
|
get_domain_object_ref({Tag, _Struct} = DomainObject) ->
|
||||||
{_Type, Ref} = get_domain_object_field(ref, DomainObject),
|
{_Type, Ref} = get_domain_object_field(ref, DomainObject),
|
||||||
{Tag, Ref}.
|
{Tag, Ref}.
|
||||||
|
|
||||||
@ -24,8 +24,12 @@ get_refless_data({Tag, Struct}) ->
|
|||||||
|
|
||||||
get_refless_object_schema(Tag) ->
|
get_refless_object_schema(Tag) ->
|
||||||
SchemaInfo = get_struct_info('ReflessDomainObject'),
|
SchemaInfo = get_struct_info('ReflessDomainObject'),
|
||||||
{_, _, {struct, _, {_, ObjectStructName}}, _, _} = get_field_info(Tag, SchemaInfo),
|
case get_field_info(Tag, SchemaInfo) of
|
||||||
{ObjectStructName, get_struct_info(ObjectStructName)}.
|
{_, _, {struct, _, {_, ObjectStructName}}, _, _} ->
|
||||||
|
{ObjectStructName, get_struct_info(ObjectStructName)};
|
||||||
|
false ->
|
||||||
|
erlang:error({field_info_not_found, Tag, SchemaInfo})
|
||||||
|
end.
|
||||||
|
|
||||||
%% DomainObject ZONE
|
%% DomainObject ZONE
|
||||||
|
|
||||||
@ -70,7 +74,7 @@ references(Object, DataType) ->
|
|||||||
|
|
||||||
references(undefined, _StructInfo, Refs) ->
|
references(undefined, _StructInfo, Refs) ->
|
||||||
Refs;
|
Refs;
|
||||||
references({Tag, Object}, StructInfo = {struct, union, FieldsInfo}, Refs) when is_list(FieldsInfo) ->
|
references({Tag, Object}, {struct, union, FieldsInfo} = StructInfo, Refs) when is_list(FieldsInfo) ->
|
||||||
case get_field_info(Tag, StructInfo) of
|
case get_field_info(Tag, StructInfo) of
|
||||||
false ->
|
false ->
|
||||||
erlang:error({<<"field info not found">>, Tag, StructInfo});
|
erlang:error({<<"field info not found">>, Tag, StructInfo});
|
||||||
|
@ -4,7 +4,7 @@ services:
|
|||||||
jaeger:
|
jaeger:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
environment:
|
environment:
|
||||||
- OTEL_SERVICE_NAME=dominant-v2
|
- OTEL_SERVICE_NAME=dmt
|
||||||
- OTEL_TRACES_EXPORTER=otlp
|
- OTEL_TRACES_EXPORTER=otlp
|
||||||
- OTEL_TRACES_SAMPLER=parentbased_always_on
|
- OTEL_TRACES_SAMPLER=parentbased_always_on
|
||||||
- OTEL_EXPORTER_OTLP_PROTOCOL=http_protobuf
|
- OTEL_EXPORTER_OTLP_PROTOCOL=http_protobuf
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
services:
|
services:
|
||||||
|
|
||||||
testrunner:
|
testrunner:
|
||||||
image: $DEV_IMAGE_TAG
|
image: $DEV_IMAGE_TAG
|
||||||
environment:
|
environment:
|
||||||
@ -7,7 +6,7 @@ services:
|
|||||||
POSTGRES_HOST: db
|
POSTGRES_HOST: db
|
||||||
POSTGRES_USER: postgres
|
POSTGRES_USER: postgres
|
||||||
POSTGRES_PASSWORD: postgres
|
POSTGRES_PASSWORD: postgres
|
||||||
POSTGRES_DB: dmtv2
|
POSTGRES_DB: dmt
|
||||||
build:
|
build:
|
||||||
dockerfile: Dockerfile.dev
|
dockerfile: Dockerfile.dev
|
||||||
context: .
|
context: .
|
||||||
@ -16,7 +15,7 @@ services:
|
|||||||
THRIFT_VERSION: $THRIFT_VERSION
|
THRIFT_VERSION: $THRIFT_VERSION
|
||||||
volumes:
|
volumes:
|
||||||
- .:$PWD
|
- .:$PWD
|
||||||
hostname: dominant-v2
|
hostname: dmt
|
||||||
depends_on:
|
depends_on:
|
||||||
db:
|
db:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
79
elvis.config
Normal file
79
elvis.config
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
[
|
||||||
|
{elvis, [
|
||||||
|
{config, [
|
||||||
|
#{
|
||||||
|
dirs => ["apps/**/src", "apps/**/include"],
|
||||||
|
filter => "*.erl",
|
||||||
|
ruleset => erl_files,
|
||||||
|
rules => [
|
||||||
|
{elvis_text_style, line_length, #{limit => 120}},
|
||||||
|
{elvis_text_style, no_trailing_whitespace, #{ignore_empty_lines => true}},
|
||||||
|
{elvis_style, nesting_level, #{
|
||||||
|
level => 3,
|
||||||
|
ignore => [dmt_db_migration]
|
||||||
|
}},
|
||||||
|
{elvis_style, function_naming_convention, #{regex => "^([a-z][a-z0-9]*_?)*$"}},
|
||||||
|
{elvis_style, no_if_expression, disable},
|
||||||
|
%% Project rules
|
||||||
|
{elvis_style, atom_naming_convention, disable},
|
||||||
|
{elvis_style, macro_names, disable},
|
||||||
|
{elvis_style, no_throw, disable}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
#{
|
||||||
|
dirs => ["apps/**/test"],
|
||||||
|
filter => "*.erl",
|
||||||
|
ruleset => erl_files,
|
||||||
|
rules => [
|
||||||
|
{elvis_text_style, line_length, #{limit => 120}},
|
||||||
|
{elvis_text_style, no_trailing_whitespace, #{ignore_empty_lines => true}},
|
||||||
|
{elvis_style, nesting_level, #{level => 3}},
|
||||||
|
{elvis_style, function_naming_convention, #{regex => "^([a-z][a-z0-9]*_?)*$"}},
|
||||||
|
{elvis_style, no_if_expression, disable},
|
||||||
|
%% Project rules
|
||||||
|
% We want to use `ct:pal/2` and friends in test code.
|
||||||
|
{elvis_style, no_debug_call, disable},
|
||||||
|
% Assert macros can trigger use of ignored binding, yet we want them for better
|
||||||
|
% readability.
|
||||||
|
{elvis_style, used_ignored_variable, disable},
|
||||||
|
% Tests are usually more comprehensible when a bit more verbose.
|
||||||
|
{elvis_style, dont_repeat_yourself, #{min_complexity => 50}},
|
||||||
|
{elvis_style, god_modules, disable},
|
||||||
|
{elvis_style, macro_names, disable}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
#{
|
||||||
|
dirs => ["."],
|
||||||
|
filter => "Makefile",
|
||||||
|
ruleset => makefiles
|
||||||
|
},
|
||||||
|
#{
|
||||||
|
dirs => ["."],
|
||||||
|
filter => "elvis.config",
|
||||||
|
ruleset => elvis_config
|
||||||
|
},
|
||||||
|
#{
|
||||||
|
dirs => [".", "apps/*"],
|
||||||
|
filter => "rebar.config",
|
||||||
|
ruleset => rebar_config,
|
||||||
|
rules => [
|
||||||
|
{elvis_text_style, line_length, #{limit => 120, skip_comments => false}},
|
||||||
|
{elvis_text_style, no_tabs},
|
||||||
|
{elvis_text_style, no_trailing_whitespace},
|
||||||
|
%% Temporarily disabled till regex pattern is available
|
||||||
|
{elvis_project, no_deps_master_rebar, disable},
|
||||||
|
{elvis_project, no_branch_deps, disable}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
#{
|
||||||
|
dirs => ["apps/**/src"],
|
||||||
|
filter => "*.app.src",
|
||||||
|
rules => [
|
||||||
|
{elvis_text_style, line_length, #{limit => 120, skip_comments => false}},
|
||||||
|
{elvis_text_style, no_tabs},
|
||||||
|
{elvis_text_style, no_trailing_whitespace}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]}
|
||||||
|
]}
|
||||||
|
].
|
2
erlang_ls.config
Normal file
2
erlang_ls.config
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
include_dirs:
|
||||||
|
- "_build/default/lib"
|
55
rebar.config
55
rebar.config
@ -35,10 +35,10 @@
|
|||||||
{cowboy_access_log, {git, "https://github.com/valitydev/cowboy_access_log.git", {branch, "master"}}},
|
{cowboy_access_log, {git, "https://github.com/valitydev/cowboy_access_log.git", {branch, "master"}}},
|
||||||
{woody_user_identity, {git, "https://github.com/valitydev/woody_erlang_user_identity.git", {branch, "master"}}},
|
{woody_user_identity, {git, "https://github.com/valitydev/woody_erlang_user_identity.git", {branch, "master"}}},
|
||||||
{woody, {git, "https://github.com/valitydev/woody_erlang.git", {branch, master}}},
|
{woody, {git, "https://github.com/valitydev/woody_erlang.git", {branch, master}}},
|
||||||
{damsel, {git, "git@github.com:valitydev/damsel.git", {branch, "IMP-281/dmt_proto"}}},
|
{damsel, {git, "https://github.com/valitydev/damsel.git", {branch, "IMP-281/dmt_v2_proto"}}},
|
||||||
|
|
||||||
%% Libraries for postgres interaction
|
%% Libraries for postgres interaction
|
||||||
{epg_connector, {git, "git@github.com:valitydev/epg_connector.git", {branch, master}}},
|
{epg_connector, {git, "https://github.com/valitydev/epg_connector.git", {branch, master}}},
|
||||||
{epgsql, {git, "https://github.com/epgsql/epgsql.git", {tag, "4.7.1"}}},
|
{epgsql, {git, "https://github.com/epgsql/epgsql.git", {tag, "4.7.1"}}},
|
||||||
{epgsql_pool, {git, "https://github.com/wgnet/epgsql_pool", {branch, "master"}}},
|
{epgsql_pool, {git, "https://github.com/wgnet/epgsql_pool", {branch, "master"}}},
|
||||||
{herd, {git, "https://github.com/wgnet/herd.git", {tag, "1.3.4"}}},
|
{herd, {git, "https://github.com/wgnet/herd.git", {tag, "1.3.4"}}},
|
||||||
@ -48,7 +48,7 @@
|
|||||||
eql,
|
eql,
|
||||||
getopt,
|
getopt,
|
||||||
|
|
||||||
{prometheus, "4.6.0"},
|
{prometheus, "4.11.0"},
|
||||||
{prometheus_cowboy, "0.1.8"},
|
{prometheus_cowboy, "0.1.8"},
|
||||||
|
|
||||||
%% OpenTelemetry deps
|
%% OpenTelemetry deps
|
||||||
@ -78,6 +78,48 @@
|
|||||||
{incremental, true}
|
{incremental, true}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
|
{profiles, [
|
||||||
|
{prod, [
|
||||||
|
{deps, [
|
||||||
|
% for introspection on production
|
||||||
|
{recon, "2.5.2"},
|
||||||
|
{logger_logstash_formatter,
|
||||||
|
{git, "https://github.com/valitydev/logger_logstash_formatter.git", {ref, "08a66a6"}}},
|
||||||
|
{iosetopts, {git, "https://github.com/valitydev/iosetopts.git", {ref, "edb445c"}}}
|
||||||
|
]},
|
||||||
|
{relx, [
|
||||||
|
{release, {dmt, "0.1"}, [
|
||||||
|
iosetopts,
|
||||||
|
{recon, load},
|
||||||
|
{runtime_tools, load},
|
||||||
|
{tools, load},
|
||||||
|
{opentelemetry, temporary},
|
||||||
|
logger_logstash_formatter,
|
||||||
|
sasl,
|
||||||
|
dmt
|
||||||
|
]},
|
||||||
|
{mode, minimal},
|
||||||
|
{sys_config, "./config/sys.config"},
|
||||||
|
{vm_args, "./config/vm.args"},
|
||||||
|
{extended_start_script, true}
|
||||||
|
]}
|
||||||
|
]},
|
||||||
|
{test, [
|
||||||
|
{deps, [
|
||||||
|
{meck, "0.9.2"}
|
||||||
|
]},
|
||||||
|
{dialyzer, [
|
||||||
|
{plt_extra_apps, [
|
||||||
|
eunit,
|
||||||
|
common_test,
|
||||||
|
runtime_tools,
|
||||||
|
damsel,
|
||||||
|
meck
|
||||||
|
]}
|
||||||
|
]}
|
||||||
|
]}
|
||||||
|
]}.
|
||||||
|
|
||||||
{project_plugins, [
|
{project_plugins, [
|
||||||
{rebar3_lint, "3.2.6"},
|
{rebar3_lint, "3.2.6"},
|
||||||
{erlfmt, "1.5.0"},
|
{erlfmt, "1.5.0"},
|
||||||
@ -111,3 +153,10 @@
|
|||||||
dmt
|
dmt
|
||||||
]}
|
]}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
|
% Workaround for outdated plugin. It breaks it.
|
||||||
|
{overrides, [
|
||||||
|
{override, rebar3_elvis_plugin, [
|
||||||
|
{src_dirs, ["unknown"]}
|
||||||
|
]}
|
||||||
|
]}.
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
{<<"cowlib">>,{pkg,<<"cowlib">>,<<"2.11.0">>},2},
|
{<<"cowlib">>,{pkg,<<"cowlib">>,<<"2.11.0">>},2},
|
||||||
{<<"ctx">>,{pkg,<<"ctx">>,<<"0.6.0">>},2},
|
{<<"ctx">>,{pkg,<<"ctx">>,<<"0.6.0">>},2},
|
||||||
{<<"damsel">>,
|
{<<"damsel">>,
|
||||||
{git,"git@github.com:valitydev/damsel.git",
|
{git,"https://github.com/valitydev/damsel.git",
|
||||||
{ref,"de7ce44874984331f8b180bef3a786bd35573e48"}},
|
{ref,"de7ce44874984331f8b180bef3a786bd35573e48"}},
|
||||||
0},
|
0},
|
||||||
{<<"envloader">>,
|
{<<"envloader">>,
|
||||||
@ -36,8 +36,8 @@
|
|||||||
{ref,"27a97e04f35c554995467b9236d8ae0188d468c7"}},
|
{ref,"27a97e04f35c554995467b9236d8ae0188d468c7"}},
|
||||||
0},
|
0},
|
||||||
{<<"epg_connector">>,
|
{<<"epg_connector">>,
|
||||||
{git,"git@github.com:valitydev/epg_connector.git",
|
{git,"https://github.com/valitydev/epg_connector.git",
|
||||||
{ref,"7fc3aa1b6d9c8be69a64fefd18f6aaa416dcd572"}},
|
{ref,"19c1a4bd2cde9823b6576bc446e402b90791c9c0"}},
|
||||||
0},
|
0},
|
||||||
{<<"epgsql">>,
|
{<<"epgsql">>,
|
||||||
{git,"https://github.com/epgsql/epgsql.git",
|
{git,"https://github.com/epgsql/epgsql.git",
|
||||||
|
Loading…
Reference in New Issue
Block a user