mirror of
https://github.com/valitydev/riak_test.git
synced 2024-11-06 16:45:29 +00:00
a rolling upgrade/downgrade test for TS
A squash of the following commits: * First cut of upgrade/downgrade Also includes a spelling fix for 'aggregration' * Experimental branch of riak_test that includes the capability to force downgrade and upgrade the client * whitespace cleanup, copyright year update * ensure ts_util:assert brings a ct test down * This is the 5th commit message: take care to fuzzy-compare those floats in returned rows * don't bother reloading protobuff, but do reload riak_pb mods & apps * always dumbly drop 'ok' from query returned tuple * trivial code touchups * remove client up/downgrade code, extra io:format's; use 3 nodes not 5
This commit is contained in:
parent
4b422947a9
commit
ee7985d4d1
78
src/rt_load_client.erl
Normal file
78
src/rt_load_client.erl
Normal file
@ -0,0 +1,78 @@
|
||||
%% -------------------------------------------------------------------
|
||||
%%
|
||||
%% Copyright (c) 2016 Basho Technologies, Inc.
|
||||
%%
|
||||
%% This file is provided to you under the Apache License,
|
||||
%% Version 2.0 (the "License"); you may not use this file
|
||||
%% except in compliance with the License. You may obtain
|
||||
%% a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
%% -------------------------------------------------------------------
|
||||
-module(rt_load_client).
|
||||
|
||||
-export([
|
||||
load/1
|
||||
]).
|
||||
|
||||
load(Version) ->
|
||||
LoadPaths = get_load_paths(Version),
|
||||
Modules = get_modules(LoadPaths, []),
|
||||
ok = unload_riak_client(Modules),
|
||||
ok = load_riak_client(Modules),
|
||||
ok.
|
||||
|
||||
load_riak_client(Modules) ->
|
||||
[{module, Mod} = code:load_abs(File) || {File, Mod} <- Modules],
|
||||
ok = application:ensure_all_started(riakc),
|
||||
ok.
|
||||
|
||||
get_modules([], Acc) ->
|
||||
lists:flatten(Acc);
|
||||
get_modules([H | T], Acc) ->
|
||||
Files = filelib:wildcard(filename:join([H, "*.beam"])),
|
||||
Files2 = [filename:rootname(File) || File <- Files],
|
||||
Mods = extract_mods(Files),
|
||||
Zip = lists:zip(Files2, Mods),
|
||||
get_modules(T, [Zip | Acc]).
|
||||
|
||||
extract_mods(Mods) ->
|
||||
[list_to_atom(filename:rootname(filename:basename(Mod))) || Mod <- Mods].
|
||||
|
||||
get_load_paths(Version) ->
|
||||
Root = rtdev:relpath(Version),
|
||||
[
|
||||
filename:join([Root, "dev/dev1/lib/riakc*/ebin"]),
|
||||
filename:join([Root, "dev/dev1/lib/riak_pb*/ebin"])
|
||||
].
|
||||
|
||||
%% we have a problem: the set of files in Version X of the client
|
||||
%% might not be congruent with that of Version Y
|
||||
%% so we will hard unload files from the app manifest
|
||||
%% and then iterate over the beam files in the path and unload any
|
||||
%% modules with those names - all a bit brute-force-and-ignorance
|
||||
unload_riak_client(Modules) ->
|
||||
%% app first
|
||||
ok = application:stop(riak_pb),
|
||||
ok = application:stop(riakc),
|
||||
[ok = unload(App) || App <- [riakc, riak_pb]],
|
||||
[begin code:purge(Mod), code:delete(Mod) end || {_File, Mod} <- Modules],
|
||||
ok.
|
||||
|
||||
unload(App) ->
|
||||
_ = application:load(App),
|
||||
case application:get_key(App, modules) of
|
||||
{ok, Modules} ->
|
||||
[begin code:purge(Mod), code:delete(Mod) end || Mod <- Modules],
|
||||
ok;
|
||||
_Other ->
|
||||
ok
|
||||
end.
|
185
tests/ts_downgrade_upgrade.erl
Normal file
185
tests/ts_downgrade_upgrade.erl
Normal file
@ -0,0 +1,185 @@
|
||||
%% -------------------------------------------------------------------
|
||||
%%
|
||||
%% Copyright (c) 2016 Basho Technologies, Inc.
|
||||
%%
|
||||
%% This file is provided to you under the Apache License,
|
||||
%% Version 2.0 (the "License"); you may not use this file
|
||||
%% except in compliance with the License. You may obtain
|
||||
%% a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
%% -------------------------------------------------------------------
|
||||
-module(ts_downgrade_upgrade).
|
||||
|
||||
-compile(export_all).
|
||||
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
|
||||
|
||||
%% Callbacks
|
||||
|
||||
suite() ->
|
||||
[{timetrap,{seconds,9000}}].
|
||||
|
||||
init_per_suite(Config) ->
|
||||
%% get the test meta data from the riak_test runner
|
||||
TestMetaData = riak_test_runner:metadata(self()),
|
||||
|
||||
%% set up the cluster that we will be testing
|
||||
OldVsn = proplists:get_value(upgrade_version, TestMetaData, current),
|
||||
NewVsn = proplists:get_value(upgrade_version, TestMetaData, previous),
|
||||
|
||||
%% build the starting (old cluster)
|
||||
Nodes = rt:build_cluster([OldVsn, OldVsn, OldVsn]),
|
||||
|
||||
%% document the configuration of the nodes so that this can be added
|
||||
%% to the Config that is passed to all the tests
|
||||
NodeConfig = [
|
||||
{nodes, lists:zip(lists:seq(1,3), Nodes)},
|
||||
{oldvsn, OldVsn},
|
||||
{newvsn, NewVsn}
|
||||
],
|
||||
|
||||
%% now we are going to write some data to the old cluster
|
||||
%% and generate some queries that will operate on it
|
||||
%% the query and the expected results will be put in the Config
|
||||
%% so that we can rerun them as we walk the upgrade/downgrade ladder
|
||||
%% Gonnae do a complex aggregation query and a simple read for functional
|
||||
%% coverage
|
||||
QueryConfig = ts_updown_util:init_per_suite_data_write(Nodes),
|
||||
|
||||
%% now stuff the config with the expected values
|
||||
FullConfig = QueryConfig ++ NodeConfig ++ Config,
|
||||
%% ct:pal("CT config: ~p", [FullConfig]),
|
||||
FullConfig.
|
||||
|
||||
end_per_suite(_Config) ->
|
||||
ok.
|
||||
|
||||
init_per_group(_GroupName, Config) ->
|
||||
Config.
|
||||
|
||||
end_per_group(_GroupName, _Config) ->
|
||||
ok.
|
||||
|
||||
init_per_testcase(_TestCase, Config) ->
|
||||
Config.
|
||||
|
||||
end_per_testcase(_TestCase, _Config) ->
|
||||
ok.
|
||||
|
||||
%% we need to break up the read tests into groups to stop the system going into
|
||||
%% query overload
|
||||
groups() ->
|
||||
[
|
||||
{query_group_1, [parallel], [
|
||||
query_1,
|
||||
query_2,
|
||||
query_3
|
||||
]},
|
||||
{query_group_2, [parallel], [
|
||||
query_4,
|
||||
query_5,
|
||||
query_6
|
||||
]},
|
||||
{query_group_3, [parallel], [
|
||||
query_7,
|
||||
query_8,
|
||||
query_9
|
||||
]},
|
||||
{query_group_4, [parallel], [
|
||||
query_10
|
||||
]}
|
||||
].
|
||||
|
||||
all() ->
|
||||
[
|
||||
{group, query_group_1},
|
||||
{group, query_group_2},
|
||||
{group, query_group_3},
|
||||
{group, query_group_4},
|
||||
|
||||
downgrade3,
|
||||
|
||||
{group, query_group_1},
|
||||
{group, query_group_2},
|
||||
{group, query_group_3},
|
||||
{group, query_group_4},
|
||||
|
||||
downgrade2,
|
||||
|
||||
{group, query_group_1},
|
||||
{group, query_group_2},
|
||||
{group, query_group_3},
|
||||
{group, query_group_4},
|
||||
|
||||
downgrade1,
|
||||
|
||||
{group, query_group_1},
|
||||
{group, query_group_2},
|
||||
{group, query_group_3},
|
||||
{group, query_group_4},
|
||||
|
||||
upgrade1,
|
||||
|
||||
{group, query_group_1},
|
||||
{group, query_group_2},
|
||||
{group, query_group_3},
|
||||
{group, query_group_4},
|
||||
|
||||
upgrade2,
|
||||
|
||||
{group, query_group_1},
|
||||
{group, query_group_2},
|
||||
{group, query_group_3},
|
||||
{group, query_group_4},
|
||||
|
||||
upgrade3,
|
||||
|
||||
{group, query_group_1},
|
||||
{group, query_group_2},
|
||||
{group, query_group_3},
|
||||
{group, query_group_4}
|
||||
].
|
||||
|
||||
%%%
|
||||
%%% Tests
|
||||
%%
|
||||
|
||||
upgrade1(Config) -> ts_updown_util:do_node_transition(Config, 1, oldvsn).
|
||||
upgrade2(Config) -> ts_updown_util:do_node_transition(Config, 2, oldvsn).
|
||||
upgrade3(Config) -> ts_updown_util:do_node_transition(Config, 3, oldvsn).
|
||||
|
||||
downgrade1(Config) -> ts_updown_util:do_node_transition(Config, 1, newvsn).
|
||||
downgrade2(Config) -> ts_updown_util:do_node_transition(Config, 2, newvsn).
|
||||
downgrade3(Config) -> ts_updown_util:do_node_transition(Config, 3, newvsn).
|
||||
|
||||
query_1() -> ts_updown_util:run_init_per_suite_queries(1).
|
||||
query_2() -> ts_updown_util:run_init_per_suite_queries(2).
|
||||
query_3() -> ts_updown_util:run_init_per_suite_queries(3).
|
||||
query_4() -> ts_updown_util:run_init_per_suite_queries(4).
|
||||
query_5() -> ts_updown_util:run_init_per_suite_queries(5).
|
||||
query_6() -> ts_updown_util:run_init_per_suite_queries(6).
|
||||
query_7() -> ts_updown_util:run_init_per_suite_queries(7).
|
||||
query_8() -> ts_updown_util:run_init_per_suite_queries(8).
|
||||
query_9() -> ts_updown_util:run_init_per_suite_queries(9).
|
||||
query_10() -> ts_updown_util:run_init_per_suite_queries(10).
|
||||
|
||||
query_1(Config) -> ts_updown_util:run_init_per_suite_queries(Config, 1).
|
||||
query_2(Config) -> ts_updown_util:run_init_per_suite_queries(Config, 2).
|
||||
query_3(Config) -> ts_updown_util:run_init_per_suite_queries(Config, 3).
|
||||
query_4(Config) -> ts_updown_util:run_init_per_suite_queries(Config, 4).
|
||||
query_5(Config) -> ts_updown_util:run_init_per_suite_queries(Config, 5).
|
||||
query_6(Config) -> ts_updown_util:run_init_per_suite_queries(Config, 6).
|
||||
query_7(Config) -> ts_updown_util:run_init_per_suite_queries(Config, 7).
|
||||
query_8(Config) -> ts_updown_util:run_init_per_suite_queries(Config, 8).
|
||||
query_9(Config) -> ts_updown_util:run_init_per_suite_queries(Config, 9).
|
||||
query_10(Config) -> ts_updown_util:run_init_per_suite_queries(Config, 10).
|
181
tests/ts_updown_util.erl
Normal file
181
tests/ts_updown_util.erl
Normal file
@ -0,0 +1,181 @@
|
||||
%% -------------------------------------------------------------------
|
||||
%%
|
||||
%% Copyright (c) 2016 Basho Technologies, Inc.
|
||||
%%
|
||||
%% This file is provided to you under the Apache License,
|
||||
%% Version 2.0 (the "License"); you may not use this file
|
||||
%% except in compliance with the License. You may obtain
|
||||
%% a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
%% -------------------------------------------------------------------
|
||||
-module(ts_updown_util).
|
||||
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
-export([
|
||||
init_per_suite_data_write/1,
|
||||
do_node_transition/3,
|
||||
run_init_per_suite_queries/2
|
||||
]).
|
||||
|
||||
-define(TEMPERATURE_COL_INDEX, 4).
|
||||
-define(PRESSURE_COL_INDEX, 5).
|
||||
-define(PRECIPITATION_COL_INDEX, 6).
|
||||
|
||||
init_per_suite_data_write(Nodes) ->
|
||||
StartingNode = hd(Nodes),
|
||||
Conn = rt:pbc(StartingNode),
|
||||
?assert(is_pid(Conn)),
|
||||
|
||||
AggTable = "Aggregation_written_on_old_cluster",
|
||||
DDL = ts_util:get_ddl(aggregation, AggTable),
|
||||
|
||||
{ok, _} = ts_util:create_and_activate_bucket_type({Nodes, StartingNode}, DDL, AggTable),
|
||||
|
||||
%% generate data and hoy it into the cluster
|
||||
Count = 10,
|
||||
Data = ts_util:get_valid_aggregation_data(Count),
|
||||
Column4 = [element(?TEMPERATURE_COL_INDEX, X) || X <- Data],
|
||||
Column5 = [element(?PRESSURE_COL_INDEX, X) || X <- Data],
|
||||
Column6 = [element(?PRECIPITATION_COL_INDEX, X) || X <- Data],
|
||||
ok = riakc_ts:put(Conn, AggTable, Data),
|
||||
|
||||
%% now lets create some queries with their expected results
|
||||
Where = " WHERE myfamily = 'family1' and myseries = 'seriesX' "
|
||||
"and time >= 1 and time <= " ++ integer_to_list(Count),
|
||||
|
||||
Qry1 = "SELECT COUNT(myseries) FROM " ++ AggTable ++ Where,
|
||||
Expected1 = {[<<"COUNT(myseries)">>], [{Count}]},
|
||||
|
||||
Qry2 = "SELECT COUNT(time) FROM " ++ AggTable ++ Where,
|
||||
Expected2 = {[<<"COUNT(time)">>], [{Count}]},
|
||||
|
||||
Qry3 = "SELECT COUNT(pressure), count(temperature), cOuNt(precipitation) FROM " ++
|
||||
AggTable ++ Where,
|
||||
Expected3 = {
|
||||
[<<"COUNT(pressure)">>,
|
||||
<<"COUNT(temperature)">>,
|
||||
<<"COUNT(precipitation)">>
|
||||
],
|
||||
[{count_non_nulls(Column5),
|
||||
count_non_nulls(Column4),
|
||||
count_non_nulls(Column6)}]},
|
||||
|
||||
Qry4 = "SELECT SUM(temperature) FROM " ++ AggTable ++ Where,
|
||||
Sum4 = lists:sum([X || X <- Column4, is_number(X)]),
|
||||
Expected4 = {[<<"SUM(temperature)">>],
|
||||
[{Sum4}]},
|
||||
|
||||
Qry5 = "SELECT SUM(temperature), sum(pressure), sUM(precipitation) FROM " ++
|
||||
AggTable ++ Where,
|
||||
Sum5 = lists:sum([X || X <- Column5, is_number(X)]),
|
||||
Sum6 = lists:sum([X || X <- Column6, is_number(X)]),
|
||||
Expected5 = {[<<"SUM(temperature)">>, <<"SUM(pressure)">>, <<"SUM(precipitation)">>],
|
||||
[{Sum4, Sum5, Sum6}]},
|
||||
|
||||
Qry6 = "SELECT MIN(temperature), MIN(pressure) FROM " ++ AggTable ++ Where,
|
||||
Min4 = lists:min([X || X <- Column4, is_number(X)]),
|
||||
Min5 = lists:min([X || X <- Column5, is_number(X)]),
|
||||
Expected6 = {[<<"MIN(temperature)">>, <<"MIN(pressure)">>],
|
||||
[{Min4, Min5}]},
|
||||
|
||||
Qry7 = "SELECT MAX(temperature), MAX(pressure) FROM " ++ AggTable ++ Where,
|
||||
Max4 = lists:max([X || X <- Column4, is_number(X)]),
|
||||
Max5 = lists:max([X || X <- Column5, is_number(X)]),
|
||||
Expected7 = {[<<"MAX(temperature)">>, <<"MAX(pressure)">>],
|
||||
[{Max4, Max5}]},
|
||||
|
||||
C4 = [X || X <- Column4, is_number(X)],
|
||||
C5 = [X || X <- Column5, is_number(X)],
|
||||
Count4 = length(C4),
|
||||
Count5 = length(C5),
|
||||
|
||||
Avg4 = Sum4 / Count4,
|
||||
Avg5 = Sum5 / Count5,
|
||||
Qry8 = "SELECT AVG(temperature), MEAN(pressure) FROM " ++ AggTable ++ Where,
|
||||
Expected8 = {[<<"AVG(temperature)">>, <<"MEAN(pressure)">>],
|
||||
[{Avg4, Avg5}]},
|
||||
|
||||
StdDevFun4 = stddev_fun_builder(Avg4),
|
||||
StdDevFun5 = stddev_fun_builder(Avg5),
|
||||
StdDev4 = math:sqrt(lists:foldl(StdDevFun4, 0, C4) / Count4),
|
||||
StdDev5 = math:sqrt(lists:foldl(StdDevFun5, 0, C5) / Count5),
|
||||
Sample4 = math:sqrt(lists:foldl(StdDevFun4, 0, C4) / (Count4-1)),
|
||||
Sample5 = math:sqrt(lists:foldl(StdDevFun5, 0, C5) / (Count5-1)),
|
||||
Qry9 = "SELECT STDDEV_POP(temperature), STDDEV_POP(pressure),"
|
||||
" STDDEV(temperature), STDDEV(pressure), "
|
||||
" STDDEV_SAMP(temperature), STDDEV_SAMP(pressure) FROM " ++ AggTable ++ Where,
|
||||
Expected9 = {
|
||||
[
|
||||
<<"STDDEV_POP(temperature)">>, <<"STDDEV_POP(pressure)">>,
|
||||
<<"STDDEV(temperature)">>, <<"STDDEV(pressure)">>,
|
||||
<<"STDDEV_SAMP(temperature)">>, <<"STDDEV_SAMP(pressure)">>
|
||||
],
|
||||
[{StdDev4, StdDev5, Sample4, Sample5, Sample4, Sample5}]
|
||||
},
|
||||
|
||||
Qry10 = "SELECT SUM(temperature), MIN(pressure), AVG(pressure) FROM " ++
|
||||
AggTable ++ Where,
|
||||
Expected10 = {
|
||||
[<<"SUM(temperature)">>, <<"MIN(pressure)">>, <<"AVG(pressure)">>],
|
||||
[{Sum4, Min5, Avg5}]
|
||||
},
|
||||
|
||||
QueryConfig = [
|
||||
{init_per_suite_queries,
|
||||
[
|
||||
{1, {Qry1, {ok, Expected1}}},
|
||||
{2, {Qry2, {ok, Expected2}}},
|
||||
{3, {Qry3, {ok, Expected3}}},
|
||||
{4, {Qry4, {ok, Expected4}}},
|
||||
{5, {Qry5, {ok, Expected5}}},
|
||||
{6, {Qry6, {ok, Expected6}}},
|
||||
{7, {Qry7, {ok, Expected7}}},
|
||||
{8, {Qry8, {ok, Expected8}}},
|
||||
{9, {Qry9, {ok, Expected9}}},
|
||||
{10, {Qry10, {ok, Expected10}}}
|
||||
]
|
||||
}
|
||||
],
|
||||
QueryConfig.
|
||||
|
||||
do_node_transition(Config, N, Version) ->
|
||||
{nodes, Nodes} = lists:keyfind(nodes, 1, Config),
|
||||
{N, Node} = lists:keyfind(N, 1, Nodes),
|
||||
{Version, ToVsn} = lists:keyfind(Version, 1, Config),
|
||||
ok = rt:upgrade(Node, ToVsn),
|
||||
ok = rt:wait_for_service(Node, riak_kv),
|
||||
pass.
|
||||
|
||||
run_init_per_suite_queries(Config, TestNo)
|
||||
when is_integer(TestNo) andalso TestNo > 0 ->
|
||||
{init_per_suite_queries, Queries} = lists:keyfind(init_per_suite_queries, 1, Config),
|
||||
{TestNo, {Query, Exp}} = lists:keyfind(TestNo, 1, Queries),
|
||||
{nodes, Nodes} = lists:keyfind(nodes, 1, Config),
|
||||
{1, Node} = lists:keyfind(1, 1, Nodes),
|
||||
Conn = rt:pbc(Node),
|
||||
Got = ts_util:single_query(Conn, Query, []),
|
||||
case ts_util:assert_float("reading data query " ++ integer_to_list(TestNo), Exp, Got) of
|
||||
pass ->
|
||||
pass;
|
||||
fail ->
|
||||
ct:fail("failed query ~b", [TestNo])
|
||||
end.
|
||||
|
||||
%%
|
||||
%% helper fns
|
||||
%%
|
||||
count_non_nulls(Col) ->
|
||||
length([V || V <- Col, V =/= []]).
|
||||
|
||||
stddev_fun_builder(Avg) ->
|
||||
fun(X, Acc) -> Acc + (Avg-X)*(Avg-X) end.
|
@ -68,6 +68,7 @@
|
||||
remove_last/1,
|
||||
results/1,
|
||||
single_query/2,
|
||||
single_query/3,
|
||||
ts_get/6,
|
||||
ts_get/7,
|
||||
ts_insert/4,
|
||||
@ -119,8 +120,11 @@ ts_query({Cluster, Conn}, TestType, DDL, Data, Qry, Bucket) ->
|
||||
single_query(Conn, Qry).
|
||||
|
||||
single_query(Conn, Qry) ->
|
||||
single_query(Conn, Qry, []).
|
||||
|
||||
single_query(Conn, Qry, Opts) ->
|
||||
lager:info("3 - Now run the query ~ts", [Qry]),
|
||||
Got = riakc_ts:query(Conn, Qry),
|
||||
Got = riakc_ts:query(Conn, Qry, Opts),
|
||||
lager:info("Result is ~p", [Got]),
|
||||
Got.
|
||||
|
||||
@ -443,7 +447,6 @@ get_ddl(aggregation, Table) ->
|
||||
" PRIMARY KEY ((myfamily, myseries, quantum(time, 10, 'm')), "
|
||||
" myfamily, myseries, time))".
|
||||
|
||||
|
||||
get_data(api) ->
|
||||
[{<<"family1">>, <<"seriesX">>, 100, 1, <<"test1">>, 1.0, true}] ++
|
||||
[{<<"family1">>, <<"seriesX">>, 200, 2, <<"test2">>, 2.0, false}] ++
|
||||
@ -524,28 +527,39 @@ get_optional(N, X) ->
|
||||
|
||||
-define(DELTA, 1.0e-15).
|
||||
|
||||
assert_float(String, {_, {Cols, [ValsA]}} = Exp, {_, {Cols, [ValsB]}} = Got) ->
|
||||
assert_float(String, {ok, Thing1}, {ok, Thing2}) ->
|
||||
assert_float(String, Thing1, Thing2);
|
||||
assert_float(String, {Cols, [ValsA]} = Exp, {Cols, [ValsB]} = Got) ->
|
||||
case assertf2(tuple_to_list(ValsA), tuple_to_list(ValsB)) of
|
||||
fail -> lager:info("*****************", []),
|
||||
fail ->
|
||||
lager:info("*****************", []),
|
||||
lager:info("Test ~p failed", [String]),
|
||||
lager:info("Exp ~p", [Exp]),
|
||||
lager:info("Got ~p", [Got]),
|
||||
lager:info("*****************", []),
|
||||
fail;
|
||||
pass -> pass
|
||||
pass ->
|
||||
pass
|
||||
end;
|
||||
assert_float(String, Exp, Got) -> assert(String, Exp, Got).
|
||||
assert_float(String, Exp, Got) ->
|
||||
assert(String, Exp, Got).
|
||||
|
||||
assertf2([], []) -> pass;
|
||||
assertf2([H1 | T1], [H2 | T2]) ->
|
||||
assertf2([H1 | T1], [H2 | T2]) when is_float(H1), is_float(H2) ->
|
||||
Diff = H1 - H2,
|
||||
Av = (H1 + H2)/2,
|
||||
if Diff/Av > ?DELTA -> fail;
|
||||
el/=se -> assertf2(T1, T2)
|
||||
end.
|
||||
el/=se -> assertf2(T1, T2)
|
||||
end;
|
||||
assertf2([H | T1], [H | T2]) ->
|
||||
assertf2(T1, T2);
|
||||
assertf2(_, _) ->
|
||||
fail.
|
||||
|
||||
|
||||
assert(_, X, X) -> pass;
|
||||
assert(String, Exp, Got) -> lager:info("*****************", []),
|
||||
assert(String, Exp, Got) ->
|
||||
lager:info("*****************", []),
|
||||
lager:info("Test ~p failed", [String]),
|
||||
lager:info("Exp ~p", [Exp]),
|
||||
lager:info("Got ~p", [Got]),
|
||||
|
Loading…
Reference in New Issue
Block a user