Merge pull request #1117 from basho/feature_gg_group_by_updowngrade

Rejig of updown test suite to add group_by tests
This commit is contained in:
Brett Hazen 2016-08-17 16:20:57 -06:00 committed by GitHub
commit 3ec9ef3d81
4 changed files with 204 additions and 23 deletions

View File

@ -0,0 +1,167 @@
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2016 Basho Technologies, Inc.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(ts_cluster_updowngrade_group_by_SUITE).
-export([
sorted_assert/3,
plain_assert/3
]).
-include("ts_updowngrade_test.part").
%% yes this 1.3 error is bonkers and a proper chocolate teapot but that's the error
-define(SELECTERROR, {error, {1020, <<"Used group as a measure of time in 1000group. Only s, m, h and d are allowed.">>}}).
make_scenarios() ->
BaseScenarios = [#scenario{table_node_vsn = TableNodeVsn,
query_node_vsn = QueryNodeVsn,
need_table_node_transition = NeedTableNodeTransition,
need_query_node_transition = NeedQueryNodeTransition,
need_pre_cluster_mixed = NeedPreClusterMixed,
need_post_cluster_mixed = NeedPostClusterMixed}
|| TableNodeVsn <- [current, previous],
QueryNodeVsn <- [current, previous],
NeedTableNodeTransition <- [true, false],
NeedQueryNodeTransition <- [true, false],
NeedPreClusterMixed <- [true, false],
NeedPostClusterMixed <- [true, false]],
lists:flatten([add_tests(X) || X <- BaseScenarios]).
%% This test will not use config invariants
%% see ts_cluster_updowngrade_select_aggregation_SUITE.erl for an example
%% of how to use them
make_scenario_invariants(Config) ->
Config.
%% GROUP BY will always work if
%% the query node is 1.4
%% the query node is queried *AFTER* a transition
%% this scenario the query node starts at 1.4 and remains there
%% the cluster is either mixed or all 1.4
add_tests(#scenario{query_node_vsn = current,
need_query_node_transition = false} = Scen) ->
Tests = [
make_select_grouped_field_test(select_passes),
make_group_by_2_test(select_passes)
],
Scen#scenario{tests = Tests};
%% in this scenario the query node starts at 1.3 and transitions to 1.4 before
%% the select happens - the intial cluster is mixed and the final can be
%% mixed or all 1.4
add_tests(#scenario{query_node_vsn = previous,
need_query_node_transition = true} = Scen) ->
Tests = [
make_select_grouped_field_test(select_passes),
make_group_by_2_test(select_passes)
],
Scen#scenario{tests = Tests};
%% all other scenarios (all 1.3 or mixed with a 1.3 query node) GROUP BY
%% wont work
add_tests(Scen) ->
Tests = [
make_select_grouped_field_test(select_fails),
make_group_by_2_test(select_fails)
],
Scen#scenario{tests = Tests}.
make_select_grouped_field_test(DoesSelectPass) ->
Create = #create{ddl = "CREATE TABLE ~s ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c,1,s)), a,b,c))",
expected = {ok, {[], []}}},
Insert = #insert{data = [{1,B,C} || B <- [1,2,3], C <- [1,2,3]],
expected = ok},
{SelExp, AssertFn}
= case DoesSelectPass of
select_passes ->
{{ok, {[<<"c">>], [{2},{1},{3}]}}, sorted_assert};
select_fails ->
{?SELECTERROR, plain_assert}
end,
Select = #select{qry = "SELECT c FROM ~s "
"WHERE a = 1 AND b = 1 AND c >= 1 AND c <= 1000 "
"GROUP BY c",
expected = SelExp,
assert_mod = ?MODULE,
assert_fun = AssertFn},
#test_set{testname = "grouped_field_test",
create = Create,
insert = Insert,
selects = [Select]}.
make_group_by_2_test(DoesSelectPass) ->
Create = #create{ddl = "CREATE TABLE ~s ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d SINT64 NOT NULL, "
"e SINT64 NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c,1,s)), a,b,c,d))",
expected = {ok, {[], []}}},
Insert = #insert{data = [{1,1,CE,D,CE} || CE <- lists:seq(1,1000),
D <- [1,2,3]],
expected = ok},
{SelExp, AssertFn}
= case DoesSelectPass of
select_passes -> {{ok, {[<<"d">>, <<"AVG(e)">>],
[{2,500.5}, {3,500.5}, {1,500.5}]}},
sorted_assert};
select_fails -> {?SELECTERROR, plain_assert}
end,
Select = #select{qry = "SELECT d, AVG(e) FROM ~s "
"WHERE a = 1 AND b = 1 AND c >= 1 AND c <= 1000 "
"GROUP BY d",
expected = SelExp,
assert_mod = ?MODULE,
assert_fun = AssertFn},
#test_set{testname = "group_by_2",
create = Create,
insert = Insert,
selects = [Select]}.
sorted_assert(String, {ok, {ECols, Exp}}, {ok, {GCols, Got}}) ->
Exp2 = lists:sort(Exp),
Got2 = lists:sort(Got),
ts_util:assert_float(String, {ECols, Exp2}, {GCols, Got2});
sorted_assert(String, Exp, Got) ->
ok = log_error(String ++ " banjo", Exp, Got),
fail.
plain_assert(_String, Exp, Exp) ->
pass;
plain_assert(String, Exp, Got) ->
ok = log_error(String ++ "rando", Exp, Got),
fail.
log_error(String, Exp, Got) ->
lager:info("*****************", []),
lager:info("Test ~p failed", [String]),
lager:info("Exp ~p", [Exp]),
lager:info("Got ~p", [Got]),
lager:info("*****************", []),
ok.

View File

@ -45,10 +45,14 @@ make_scenario_invariants(Config) ->
{SelectVsExpected, Data} = make_queries_and_data(),
Create = #create{ddl = DDL, expected = {ok, {[], []}}},
Insert = #insert{data = Data, expected = ok},
Selects = [#select{qry = Q, expected = E} || {Q, E} <- SelectVsExpected],
DefaultTestSets = [#test_set{create = Create,
insert = Insert,
selects = Selects}],
Selects = [#select{qry = Q,
expected = E,
assert_mod = ts_util,
assert_fun = assert_float} || {Q, E} <- SelectVsExpected],
DefaultTestSets = [#test_set{testname = "basic_select_aggregation",
create = Create,
insert = Insert,
selects = Selects}],
Config ++ [{default_tests, DefaultTestSets}].
make_queries_and_data() ->

View File

@ -368,12 +368,13 @@ fmt(F, A) ->
make_tables(#test_set{create = #create{should_skip = true}}, _TableNode) ->
pass;
make_tables(#test_set{timestamp = Timestamp,
create = #create{ddl = DDLFmt,
make_tables(#test_set{testname = Testname,
timestamp = Timestamp,
create = #create{ddl = DDLFmt,
expected = Exp}}, TableNode) ->
%% fast machines:
timer:sleep(1),
Table = get_table_name(Timestamp),
Table = get_table_name(Testname, Timestamp),
DDL = fmt(DDLFmt, [Table]),
Client1 = rt:pbc(TableNode),
case riakc_ts:'query'(Client1, DDL) of
@ -381,8 +382,8 @@ make_tables(#test_set{timestamp = Timestamp,
ok = wait_until_active_table(Client1, Table, 5),
ct:log("Table ~p created on ~p", [Table, TableNode]),
pass;
Error ->
ct:log("Failed to create table ~p: (~s)", [Table, Error]),
{error, {_No, Error}} ->
ct:log("Failed to create table ~p: (~s) with ~s", [Table, Error, DDL]),
#fail{message = make_msg("Creation of ~s failed", [Table]),
expected = Exp,
got = Error}
@ -390,11 +391,12 @@ make_tables(#test_set{timestamp = Timestamp,
insert_data(#test_set{insert = #insert{should_skip = true}}, _TableNode) ->
pass;
insert_data(#test_set{timestamp = Timestamp,
insert = #insert{data = Data,
insert_data(#test_set{testname = Testname,
timestamp = Timestamp,
insert = #insert{data = Data,
expected = Exp}}, TableNode) ->
Client1 = rt:pbc(TableNode),
Table = get_table_name(Timestamp),
Table = get_table_name(Testname, Timestamp),
case riakc_ts:put(Client1, Table, Data) of
Exp ->
ct:log("Table ~p on ~p had ~b records successfully inserted)",
@ -407,19 +409,23 @@ insert_data(#test_set{timestamp = Timestamp,
got = Error}
end.
run_selects(#test_set{timestamp = Timestamp,
run_selects(#test_set{testname = Testname,
timestamp = Timestamp,
selects = Selects}, QueryNode, Config) ->
QryNos = lists:seq(1, length(Selects)),
Zip = lists:zip(Selects, QryNos),
lists:flatten([run_select(S, QN, Timestamp, QueryNode, Config) || {S, QN} <- Zip]).
Tablename = get_table_name(Testname, Timestamp),
lists:flatten([run_select(S, QN, Tablename, QueryNode, Config) || {S, QN} <- Zip]).
run_select(#select{should_skip = true}, _QryNo, _Timestamp, _QueryNode, _Config) ->
run_select(#select{should_skip = true}, _QryNo, _Tablename, _QueryNode, _Config) ->
pass;
run_select(#select{qry = Q, expected = Exp}, QryNo, Timestamp, QueryNode, Config) ->
Table = get_table_name(Timestamp),
SelectQuery = fmt(Q, [Table]),
run_select(#select{qry = Q,
expected = Exp,
assert_mod = Mod,
assert_fun = Fun}, QryNo, Tablename, QueryNode, Config) ->
SelectQuery = fmt(Q, [Tablename]),
Got = query_with_client(SelectQuery, QueryNode, Config),
case ts_util:assert_float(fmt("Query #~p", [QryNo]), Exp, Got) of
case Mod:Fun(fmt("Query #~p", [QryNo]), Exp, Got) of
pass -> pass;
fail -> #fail{message = SelectQuery,
expected = Exp,
@ -433,8 +439,9 @@ make_timestamp() ->
{_Mega, Sec, Milli} = os:timestamp(),
fmt("~b~b", [Sec, Milli]).
get_table_name(Timestamp) when is_list(Timestamp) ->
"updown_test_" ++ Timestamp.
get_table_name(Testname, Timestamp) when is_list(Testname) andalso
is_list(Timestamp) ->
Testname ++ Timestamp.
make_msg(Format, Payload) ->
list_to_binary(fmt(Format, Payload)).

View File

@ -46,11 +46,14 @@
should_skip = false :: boolean(),
%% the select query is an io_lib:format containing a single "~s" placeholder
%% for the table name
qry :: binary(),
expected :: term()
qry :: binary(),
expected :: term(),
assert_mod :: atom(),
assert_fun :: atom()
}).
-record(test_set, {
testname :: string(),
create :: #create{},
insert :: #insert{},
selects = [] :: [#select{}],