mirror of
https://github.com/valitydev/riak_test.git
synced 2024-11-06 08:35:22 +00:00
Tweak tests to use lists of tuples
This commit is contained in:
parent
1e995c1d3b
commit
09ecf74d5e
@ -96,8 +96,8 @@ confirm_all_from_node(Node, Data, PvalP1, PvalP2) ->
|
||||
ok = confirm_get(C, lists:nth(12, Data)),
|
||||
ok = confirm_nx_get(C),
|
||||
|
||||
%% Switch to native mode and repeat a few tests
|
||||
riakc_pb_socket:use_native_encoding(C, true),
|
||||
%% Switch to protocol buffer mode and repeat a few tests
|
||||
%% riakc_pb_socket:use_native_encoding(C, true),
|
||||
|
||||
%% 5 (redux). select
|
||||
ok = confirm_select(C, PvalP1, PvalP2),
|
||||
@ -106,7 +106,7 @@ confirm_all_from_node(Node, Data, PvalP1, PvalP2) ->
|
||||
ok = confirm_get(C, lists:nth(12, Data)),
|
||||
ok = confirm_nx_get(C),
|
||||
|
||||
riakc_pb_socket:use_native_encoding(C, false),
|
||||
%%riakc_pb_socket:use_native_encoding(C, false),
|
||||
|
||||
ok = confirm_delete_all(C, RemainingKeys),
|
||||
{ok, []} = confirm_list_keys(C, 0).
|
||||
@ -116,10 +116,10 @@ make_data(PvalP1, PvalP2) ->
|
||||
lists:reverse(
|
||||
lists:foldl(
|
||||
fun(T, Q) ->
|
||||
[[PvalP1,
|
||||
[{PvalP1,
|
||||
PvalP2,
|
||||
?TIMEBASE + ?LIFESPAN - T + 1,
|
||||
math:sin(float(T) / 100 * math:pi())] | Q]
|
||||
math:sin(float(T) / 100 * math:pi())} | Q]
|
||||
end,
|
||||
[], lists:seq(?LIFESPAN, 1, -1))).
|
||||
|
||||
@ -144,7 +144,7 @@ confirm_overwrite(C, Data) ->
|
||||
?assertEqual(ok, Res),
|
||||
ok.
|
||||
|
||||
confirm_delete(C, [Pooter1, Pooter2, Timepoint | _] = Record) ->
|
||||
confirm_delete(C, {Pooter1, Pooter2, Timepoint, _} = Record) ->
|
||||
ResFail = riakc_ts:delete(C, <<"no-bucket-like-this">>, ?BADKEY, []),
|
||||
io:format("Nothing deleted from a non-existent bucket: ~p\n", [ResFail]),
|
||||
?assertMatch({error, _}, ResFail),
|
||||
@ -194,7 +194,7 @@ confirm_select(C, PvalP1, PvalP2) ->
|
||||
?assertEqual(10 - 1 - 1, length(Rows)),
|
||||
ok.
|
||||
|
||||
confirm_get(C, Record = [Pooter1, Pooter2, Timepoint | _]) ->
|
||||
confirm_get(C, Record = {Pooter1, Pooter2, Timepoint, _}) ->
|
||||
ResFail = riakc_ts:get(C, <<"no-bucket-like-this">>, ?BADKEY, []),
|
||||
io:format("Got nothing from a non-existent bucket: ~p\n", [ResFail]),
|
||||
?assertMatch({error, _}, ResFail),
|
||||
|
@ -39,19 +39,14 @@ confirm() ->
|
||||
ts_util:create_table(normal, Nodes, DDL, Table),
|
||||
ok = riakc_ts:put(rt:pbc(hd(Nodes)), Table, Data),
|
||||
%% First test on a small range well within the size of a normal query
|
||||
SmallData = lists:filter(fun([_, _, Time, _, _]) ->
|
||||
SmallData = lists:filter(fun({_, _, Time, _, _}) ->
|
||||
Time < (4 * QuantumMS)
|
||||
end, Data),
|
||||
test_quanta_range(Table, lists_to_tuples(SmallData), Nodes, 4, QuantumMS),
|
||||
test_quanta_range(Table, SmallData, Nodes, 4, QuantumMS),
|
||||
%% Now test the full range
|
||||
test_quanta_range(Table, lists_to_tuples(Data), Nodes, QuantaTally, QuantumMS),
|
||||
test_quanta_range(Table, Data, Nodes, QuantaTally, QuantumMS),
|
||||
pass.
|
||||
|
||||
|
||||
%% We put data with each record as a list, but in the results it's a tuple
|
||||
lists_to_tuples(Rows) ->
|
||||
lists:map(fun erlang:list_to_tuple/1, Rows).
|
||||
|
||||
test_quanta_range(Table, ExpectedData, Nodes, NumQuanta, QuantumMS) ->
|
||||
AdminPid = rt:pbc(lists:nth(3, Nodes)),
|
||||
OtherPid = rt:pbc(lists:nth(2, Nodes)),
|
||||
|
@ -88,10 +88,12 @@ describe_test(Ctx) ->
|
||||
|
||||
get_put_data_test(Ctx) ->
|
||||
C = client_pid(Ctx),
|
||||
Data = [[<<"a">>, <<"b">>, 10101010, <<"not bad">>, 42.24]],
|
||||
Data = [{<<"a">>, <<"b">>, 10101010, <<"not bad">>, 42.24}],
|
||||
Key = [<<"a">>, <<"b">>, 10101010],
|
||||
?assertMatch(ok, riakc_ts:put(C, ts_util:get_default_bucket(), Data)),
|
||||
?assertMatch({ok, {_, Data}}, riakc_ts:get(C, ts_util:get_default_bucket(), Key, [])),
|
||||
Got = riakc_ts:get(C, ts_util:get_default_bucket(), Key, []),
|
||||
lager:info("get_put_data_test Got ~p", [Got]),
|
||||
?assertMatch({ok, {_, Data}}, Got),
|
||||
pass.
|
||||
|
||||
get_set_property_test(Ctx) ->
|
||||
|
@ -54,10 +54,7 @@ run_query(ClusterConn, NVal, NPuts, Q, NSpans) ->
|
||||
ok = riakc_ts:put(Conn, Bucket, Data),
|
||||
{_, Got} = ts_util:single_query(Conn, Query),
|
||||
|
||||
%% should get the data back
|
||||
Got2 = [tuple_to_list(X) || X <- Got],
|
||||
|
||||
?assertEqual(Data, Got2),
|
||||
?assertEqual(Data, Got),
|
||||
|
||||
true.
|
||||
|
||||
@ -86,9 +83,9 @@ make_data(NPuts, Q, NSpans) ->
|
||||
Family = <<"family1">>,
|
||||
Series = <<"seriesX">>,
|
||||
Times = lists:seq(1, NPuts),
|
||||
[[Family, Series, trunc((X/NPuts) * Multi),
|
||||
[{Family, Series, trunc((X/NPuts) * Multi),
|
||||
ts_util:get_varchar(),
|
||||
ts_util:get_float()]
|
||||
ts_util:get_float()}
|
||||
|| X <- Times].
|
||||
|
||||
get_multi({No, y}) -> 365*24*60*60*1000 * No;
|
||||
|
@ -54,9 +54,9 @@ verify_aggregation(ClusterType) ->
|
||||
Count = 10,
|
||||
Data = ts_util:get_valid_aggregation_data(Count),
|
||||
lager:info("Data is ~p", [Data]),
|
||||
Column4 = [lists:nth(?TEMPERATURE_COL_INDEX, X) || X <- Data],
|
||||
Column5 = [lists:nth(?PRESSURE_COL_INDEX, X) || X <- Data],
|
||||
Column6 = [lists:nth(?PRECIPITATION_COL_INDEX, X) || X <- Data],
|
||||
Column4 = [element(?TEMPERATURE_COL_INDEX, X) || X <- Data],
|
||||
Column5 = [element(?PRESSURE_COL_INDEX, X) || X <- Data],
|
||||
Column6 = [element(?PRECIPITATION_COL_INDEX, X) || X <- Data],
|
||||
TestType = normal,
|
||||
Bucket = "WeatherData",
|
||||
|
||||
|
@ -30,9 +30,9 @@ confirm() ->
|
||||
DDL = ts_util:get_ddl(aggregration),
|
||||
Count = 10,
|
||||
Data = ts_util:get_valid_aggregation_data_not_null(Count),
|
||||
Column4 = [lists:nth(4, X) || X <- Data],
|
||||
Column5 = [lists:nth(5, X) || X <- Data],
|
||||
Column6 = [lists:nth(6, X) || X <- Data],
|
||||
Column4 = [element(4, X) || X <- Data],
|
||||
Column5 = [element(5, X) || X <- Data],
|
||||
Column6 = [element(6, X) || X <- Data],
|
||||
TestType = normal,
|
||||
Bucket = "WeatherData",
|
||||
|
||||
|
@ -142,10 +142,10 @@ buildList(Acc, Next) ->
|
||||
% Given a list of lists, return a list of tuples
|
||||
%------------------------------------------------------------
|
||||
|
||||
ltot(Lists) ->
|
||||
lists:foldl(fun(Entry, Acc) ->
|
||||
buildList(Acc, list_to_tuple(Entry))
|
||||
end, [], Lists).
|
||||
%%ltot(Lists) ->
|
||||
%% lists:foldl(fun(Entry, Acc) ->
|
||||
%% buildList(Acc, list_to_tuple(Entry))
|
||||
%% end, [], Lists).
|
||||
|
||||
%------------------------------------------------------------
|
||||
% Return a list of indices corresponding to the passed list of field
|
||||
@ -166,7 +166,7 @@ indexOf(Type, FieldNames) ->
|
||||
valuesOf(Type, FieldNames, Record) ->
|
||||
Indices = indexOf(Type, FieldNames),
|
||||
lists:foldl(fun(Index, Acc) ->
|
||||
buildList(Acc, lists:nth(Index, Record))
|
||||
buildList(Acc, element(Index, Record))
|
||||
end, [], Indices).
|
||||
|
||||
%------------------------------------------------------------
|
||||
@ -174,7 +174,7 @@ valuesOf(Type, FieldNames, Record) ->
|
||||
%------------------------------------------------------------
|
||||
|
||||
recordsMatching(Type, Data, FieldNames, CompVals, CompFun) ->
|
||||
ltot(lists:foldl(fun(Record, Acc) ->
|
||||
lists:foldl(fun(Record, Acc) ->
|
||||
Vals = valuesOf(Type, FieldNames, Record),
|
||||
case CompFun(Vals, CompVals) of
|
||||
true ->
|
||||
@ -182,7 +182,7 @@ recordsMatching(Type, Data, FieldNames, CompVals, CompFun) ->
|
||||
false ->
|
||||
Acc
|
||||
end
|
||||
end, [], Data)).
|
||||
end, [], Data).
|
||||
|
||||
%------------------------------------------------------------
|
||||
% Return the expected data from a query
|
||||
|
@ -32,7 +32,7 @@ confirm() ->
|
||||
DDL = ts_util:get_ddl(),
|
||||
Data = ts_util:get_valid_select_data(),
|
||||
DataRow = hd(Data),
|
||||
Key = lists:sublist(DataRow, 3),
|
||||
Key = lists:sublist(tuple_to_list(DataRow), 3),
|
||||
Expected = {ts_util:get_cols(),[DataRow]},
|
||||
{ok, Got} = ts_util:ts_get(
|
||||
ts_util:cluster_and_connect(single),
|
||||
|
@ -30,8 +30,8 @@ confirm() ->
|
||||
DDL = ts_util:get_ddl(),
|
||||
Table = ts_util:get_default_bucket(),
|
||||
Data = ts_util:get_valid_select_data(),
|
||||
TooMuchData = [[<<"rubbish">> | Row] || Row <- Data],
|
||||
TooLittleData = [lists:reverse(tl(lists:reverse(Row))) || Row <- Data],
|
||||
TooMuchData = [list_to_tuple([<<"rubbish">> | tuple_to_list(Row)]) || Row <- Data],
|
||||
TooLittleData = [list_to_tuple(lists:reverse(tl(lists:reverse(tuple_to_list(Row))))) || Row <- Data],
|
||||
WrongColumns = TooMuchData ++ TooLittleData,
|
||||
Columns = ts_util:get_cols(),
|
||||
Expected =
|
||||
|
@ -55,7 +55,7 @@ confirm() ->
|
||||
make_data(0, _, _, Acc) ->
|
||||
Acc;
|
||||
make_data(N, F, S, Acc) when is_integer(N) andalso N > 0 ->
|
||||
NewAcc = [
|
||||
NewAcc = {
|
||||
F,
|
||||
S,
|
||||
1 + N * ?SPANNING_STEP,
|
||||
@ -64,5 +64,5 @@ make_data(N, F, S, Acc) when is_integer(N) andalso N > 0 ->
|
||||
[],
|
||||
[],
|
||||
[]
|
||||
],
|
||||
},
|
||||
make_data(N - 1, F, S, [NewAcc | Acc]).
|
||||
|
@ -41,7 +41,7 @@ confirm() ->
|
||||
"AND myseries = 'series' "
|
||||
"AND myfamily = 13.777744543543500002342342342342342340000000017777445435435000023423423423423423400000000177774454354350000234234234234234234000000001",
|
||||
?assertEqual(
|
||||
{[<<"myfamily">>, <<"myseries">>, <<"time">>], result_data()},
|
||||
{[<<"myfamily">>, <<"myseries">>, <<"time">>], input_data()},
|
||||
ts_util:ts_query(
|
||||
ts_util:cluster_and_connect(single), TestType, TableDef, input_data(), Query)),
|
||||
pass.
|
||||
@ -49,8 +49,5 @@ confirm() ->
|
||||
%%
|
||||
input_data() ->
|
||||
Times = lists:seq(1, 10),
|
||||
[[13.777744543543500002342342342342342340000000017777445435435000023423423423423423400000000177774454354350000234234234234234234000000001, <<"series">>, T] || T <- Times].
|
||||
[{13.777744543543500002342342342342342340000000017777445435435000023423423423423423400000000177774454354350000234234234234234234000000001, <<"series">>, T} || T <- Times].
|
||||
|
||||
%%
|
||||
result_data() ->
|
||||
[list_to_tuple(R) || R <- input_data()].
|
||||
|
@ -36,7 +36,7 @@ table_def_1() ->
|
||||
|
||||
create_table_def_1(Pid) ->
|
||||
?assertEqual({[],[]}, riakc_ts:query(Pid, table_def_1())),
|
||||
ok = riakc_ts:put(Pid, <<"table1">>, [[1,2,N,4] || N <- lists:seq(1,200)]).
|
||||
ok = riakc_ts:put(Pid, <<"table1">>, [{1,2,N,4} || N <- lists:seq(1,200)]).
|
||||
|
||||
delete_single_key_def_1_test(Pid) ->
|
||||
?assertEqual(
|
||||
@ -81,7 +81,7 @@ table_def_3() ->
|
||||
|
||||
create_table_def_3(Pid) ->
|
||||
?assertEqual({[],[]}, riakc_ts:query(Pid, table_def_3())),
|
||||
ok = riakc_ts:put(Pid, <<"table3">>, [[1,2,3,N,4] || N <- lists:seq(1,200)]).
|
||||
ok = riakc_ts:put(Pid, <<"table3">>, [{1,2,3,N,4} || N <- lists:seq(1,200)]).
|
||||
|
||||
delete_single_key_def_3_test(Pid) ->
|
||||
?assertEqual(
|
||||
@ -101,7 +101,7 @@ create_table_def_4(Pid) ->
|
||||
"b SINT64 NOT NULL, "
|
||||
"c TIMESTAMP NOT NULL, "
|
||||
"PRIMARY KEY ((a,b,quantum(c, 1, 's')), a,b,c))")),
|
||||
ok = riakc_ts:put(Pid, <<"table4">>, [[1,2,N] || N <- lists:seq(1,50)]).
|
||||
ok = riakc_ts:put(Pid, <<"table4">>, [{1,2,N} || N <- lists:seq(1,50)]).
|
||||
|
||||
%% query just the key that has been deleted
|
||||
query_key_after_it_has_been_deleted_test(Pid) ->
|
||||
|
@ -133,7 +133,7 @@ ts_insert(Conn, Table, Columns, Data) ->
|
||||
end,
|
||||
TermFn = fun insert_term_format/2,
|
||||
ColClause = string:strip(lists:foldl(ColFn, [], Columns), right, $,),
|
||||
ValClause = string:strip(lists:foldl(TermFn, [], Data), right, $,),
|
||||
ValClause = string:strip(lists:foldl(TermFn, [], tuple_to_list(Data)), right, $,),
|
||||
SQL = flat_format("INSERT INTO ~s (~s) VALUES (~s)",
|
||||
[Table, ColClause, ValClause]),
|
||||
lager:info("~ts", [SQL]),
|
||||
@ -143,7 +143,7 @@ ts_insert(Conn, Table, Columns, Data) ->
|
||||
|
||||
ts_insert_no_columns(Conn, Table, Data) ->
|
||||
TermFn = fun insert_term_format/2,
|
||||
ValClause = string:strip(lists:foldl(TermFn, [], Data), right, $,),
|
||||
ValClause = string:strip(lists:foldl(TermFn, [], tuple_to_list(Data)), right, $,),
|
||||
SQL = flat_format("INSERT INTO ~s VALUES (~s)",
|
||||
[Table, ValClause]),
|
||||
lager:info("~ts", [SQL]),
|
||||
@ -286,7 +286,7 @@ get_valid_select_data(SeqFun) ->
|
||||
Family = <<"family1">>,
|
||||
Series = <<"seriesX">>,
|
||||
Times = SeqFun(),
|
||||
[[Family, Series, X, get_varchar(), get_float()] || X <- Times].
|
||||
[{Family, Series, X, get_varchar(), get_float()} || X <- Times].
|
||||
|
||||
|
||||
-define(SPANNING_STEP_BIG, (1000)).
|
||||
@ -295,7 +295,7 @@ get_valid_big_data(N) ->
|
||||
Family = <<"family1">>,
|
||||
Series = <<"seriesX">>,
|
||||
Times = lists:seq(1, N),
|
||||
[[
|
||||
[{
|
||||
Family,
|
||||
Series,
|
||||
1 + N * ?SPANNING_STEP_BIG,
|
||||
@ -304,25 +304,25 @@ get_valid_big_data(N) ->
|
||||
get_bool(X),
|
||||
N + 100000,
|
||||
get_optional(X, X)
|
||||
] || X <- Times].
|
||||
} || X <- Times].
|
||||
|
||||
get_valid_aggregation_data(N) ->
|
||||
Family = <<"family1">>,
|
||||
Series = <<"seriesX">>,
|
||||
Times = lists:seq(1, N),
|
||||
[[Family, Series, X,
|
||||
[{Family, Series, X,
|
||||
get_optional(X, get_float()),
|
||||
get_optional(X+1, get_float()),
|
||||
get_optional(X*3, get_float())] || X <- Times].
|
||||
get_optional(X*3, get_float())} || X <- Times].
|
||||
|
||||
get_valid_aggregation_data_not_null(N) ->
|
||||
Family = <<"family1">>,
|
||||
Series = <<"seriesX">>,
|
||||
Times = lists:seq(1, N),
|
||||
[[Family, Series, X,
|
||||
[{Family, Series, X,
|
||||
get_float(),
|
||||
get_float(),
|
||||
get_float()] || X <- Times].
|
||||
get_float()} || X <- Times].
|
||||
|
||||
-define(SPANNING_STEP, (1000*60*5)).
|
||||
|
||||
@ -338,7 +338,7 @@ get_valid_select_data_spanning_quanta() ->
|
||||
Family = <<"family1">>,
|
||||
Series = <<"seriesX">>,
|
||||
Times = lists:seq(1 + ?SPANNING_STEP, 1 + ?SPANNING_STEP * 10, ?SPANNING_STEP), %% five-minute intervals, to span 15-min buckets
|
||||
[[Family, Series, X, get_varchar(), get_float()] || X <- Times].
|
||||
[{Family, Series, X, get_varchar(), get_float()} || X <- Times].
|
||||
|
||||
get_cols() ->
|
||||
get_cols(small).
|
||||
@ -357,12 +357,13 @@ get_cols(api) ->
|
||||
<<"myfloat">>,
|
||||
<<"mybool">>].
|
||||
|
||||
|
||||
exclusive_result_from_data(Data, Start, Finish) when is_integer(Start) andalso
|
||||
is_integer(Finish) andalso
|
||||
Start > 0 andalso
|
||||
Finish > 0 andalso
|
||||
Finish > Start ->
|
||||
[list_to_tuple(X) || X <- lists:sublist(Data, Start, Finish - Start + 1)].
|
||||
lists:sublist(Data, Start, Finish - Start + 1).
|
||||
|
||||
remove_last(Data) ->
|
||||
lists:reverse(tl(lists:reverse(Data))).
|
||||
@ -425,10 +426,10 @@ get_ddl(aggregration) ->
|
||||
|
||||
|
||||
get_data(api) ->
|
||||
[[<<"family1">>, <<"seriesX">>, 100, 1, <<"test1">>, 1.0, true]] ++
|
||||
[[<<"family1">>, <<"seriesX">>, 200, 2, <<"test2">>, 2.0, false]] ++
|
||||
[[<<"family1">>, <<"seriesX">>, 300, 3, <<"test3">>, 3.0, true]] ++
|
||||
[[<<"family1">>, <<"seriesX">>, 400, 4, <<"test4">>, 4.0, false]].
|
||||
[{<<"family1">>, <<"seriesX">>, 100, 1, <<"test1">>, 1.0, true}] ++
|
||||
[{<<"family1">>, <<"seriesX">>, 200, 2, <<"test2">>, 2.0, false}] ++
|
||||
[{<<"family1">>, <<"seriesX">>, 300, 3, <<"test3">>, 3.0, true}] ++
|
||||
[{<<"family1">>, <<"seriesX">>, 400, 4, <<"test4">>, 4.0, false}].
|
||||
|
||||
get_map(api) ->
|
||||
[{<<"myfamily">>, 1},
|
||||
@ -441,32 +442,32 @@ get_map(api) ->
|
||||
|
||||
|
||||
get_valid_obj() ->
|
||||
[get_varchar(),
|
||||
{get_varchar(),
|
||||
get_varchar(),
|
||||
get_timestamp(),
|
||||
get_varchar(),
|
||||
get_float()].
|
||||
get_float()}.
|
||||
|
||||
get_invalid_obj() ->
|
||||
[get_varchar(),
|
||||
{get_varchar(),
|
||||
get_integer(), % this is the duff field
|
||||
get_timestamp(),
|
||||
get_varchar(),
|
||||
get_float()].
|
||||
get_float()}.
|
||||
|
||||
get_short_obj() ->
|
||||
[get_varchar(),
|
||||
{get_varchar(),
|
||||
get_varchar(),
|
||||
get_timestamp(),
|
||||
get_varchar()].
|
||||
get_varchar()}.
|
||||
|
||||
get_long_obj() ->
|
||||
[get_varchar(),
|
||||
{get_varchar(),
|
||||
get_varchar(),
|
||||
get_timestamp(),
|
||||
get_varchar(),
|
||||
get_float(),
|
||||
get_float()].
|
||||
get_float()}.
|
||||
|
||||
get_varchar() ->
|
||||
Len = random:uniform(?MAXVARCHARLEN),
|
||||
|
Loading…
Reference in New Issue
Block a user