From 97159fd9b07475b87311d76cc0093cc1d4205a89 Mon Sep 17 00:00:00 2001 From: Martin Sumner Date: Sun, 19 Apr 2020 23:50:35 +0100 Subject: [PATCH 01/10] Add OTP 22 support --- .gitignore | 1 + rebar.config | 20 +++++++--------- rebar.lock | 34 --------------------------- src/basho_bench_driver_nhs.erl | 36 ++++++++++++++--------------- src/basho_bench_driver_riakc_pb.erl | 24 +++++++++---------- src/basho_bench_valgen.erl | 6 ++--- 6 files changed, 42 insertions(+), 79 deletions(-) delete mode 100644 rebar.lock diff --git a/.gitignore b/.gitignore index a62abb735..92bb3fbf4 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,7 @@ tests/ /rel/basho_bench package .rebar +rebar.lock *~ #*# diff --git a/rebar.config b/rebar.config index 4cc7a390d..776944442 100644 --- a/rebar.config +++ b/rebar.config @@ -1,10 +1,4 @@ -%% Known library limitations that make support for Erlang/OTP -%% versions R15 or earlier difficult: -%% -%% katja: Defines a function `query()`, which is a reserved word in R15 -%% uuid: Dep on quickrand -%% quickrand: Bogus (IMHO) inability to adapt to R15 environment. -{require_otp_vsn, "20"}. +{minimum_otp_vsn, "20"}. {deps, [ @@ -15,7 +9,7 @@ %% increments. If someone wants to take advantage of a %% new folsom feature, that desire + float incr must be %% weighed. - {folsom, "0.8.7"}, + {folsom, {git, "git://github.com/basho/folsom.git", {branch, "basho_bench_float"}}}, {lager, {git, "git://github.com/erlang-lager/lager", {tag, "3.8.0"}}}, {ibrowse, {git, "git://github.com/basho/ibrowse.git", {branch, "develop-3.0"}}}, {riakc, {git, "git://github.com/basho/riak-erlang-client", {branch, "develop-3.0-292"}}}, @@ -34,16 +28,18 @@ goldrush, ibrowse, lager, - lz4, + %% lz4, %% meck, mochiweb, %% proper, - protobuffs, + %% protobuffs, riak_pb, - riakc, - snappy + riakc %, + %% snappy ]}. +{escript_main_app, basho_bench}. + %% When using the Java client bench driver, please use the -N and -C %% command line options to set the distributed Erlang node name %% and node cookie for the basho_bench VM. diff --git a/rebar.lock b/rebar.lock deleted file mode 100644 index dd7f2f023..000000000 --- a/rebar.lock +++ /dev/null @@ -1,34 +0,0 @@ -{"1.1.0", -[{<<"bear">>,{pkg,<<"bear">>,<<"0.8.7">>},1}, - {<<"folsom">>,{pkg,<<"folsom">>,<<"0.8.7">>},0}, - {<<"getopt">>, - {git,"git://github.com/jcomellas/getopt", - {ref,"388dc95caa7fb97ec7db8cfc39246a36aba61bd8"}}, - 0}, - {<<"goldrush">>,{pkg,<<"goldrush">>,<<"0.1.9">>},1}, - {<<"ibrowse">>, - {git,"git://github.com/basho/ibrowse.git", - {ref,"893864e25573b4ccd31a82b9412639751733ba40"}}, - 0}, - {<<"lager">>, - {git,"git://github.com/erlang-lager/lager", - {ref,"22e62f28e5afabe90a6f31bcde367a2b5799fc94"}}, - 0}, - {<<"mochiweb">>, - {git,"git://github.com/mochi/mochiweb", - {ref,"9090c7942f9dcde4ddbf9f78429e289ff5600c18"}}, - 0}, - {<<"riak_pb">>, - {git,"https://github.com/basho/riak_pb", - {ref,"55fab3a2642edf2293228d5a43748a0fc3c8e554"}}, - 1}, - {<<"riakc">>, - {git,"git://github.com/basho/riak-erlang-client", - {ref,"1bc4e86f2969d7dd8dd05362ea00817435579a0a"}}, - 0}]}. -[ -{pkg_hash,[ - {<<"bear">>, <<"16264309AE5D005D03718A5C82641FCC259C9E8F09ADEB6FD79CA4271168656F">>}, - {<<"folsom">>, <<"A885F0AEEE4C84270954C88A55A5A473D6B2C7493E32FFDC5765412DD555A951">>}, - {<<"goldrush">>, <<"F06E5D5F1277DA5C413E84D5A2924174182FB108DABB39D5EC548B27424CD106">>}]} -]. diff --git a/src/basho_bench_driver_nhs.erl b/src/basho_bench_driver_nhs.erl index 1848ffde0..c1b4145b9 100644 --- a/src/basho_bench_driver_nhs.erl +++ b/src/basho_bench_driver_nhs.erl @@ -161,7 +161,7 @@ new(Id) -> pb_timeout = PBTimeout, http_timeout = HTTPTimeout, fold_timeout = FoldTimeout, - query_logfreq = random:uniform(?QUERYLOG_FREQ), + query_logfreq = rand:uniform(?QUERYLOG_FREQ), nominated_id = NominatedID, unique_key_count = 1, alwaysget_key_count = 0, @@ -249,7 +249,7 @@ run(alwaysget_updatewith2i, _KeyGen, ValueGen, State) -> Value = ValueGen(), KeyInt = eightytwenty_keycount(AGKC), ToExtend = - random:uniform(State#state.alwaysget_perworker_maxkeycount) > AGKC, + rand:uniform(State#state.alwaysget_perworker_maxkeycount) > AGKC, {Robj0, NewAGKC} = case ToExtend of @@ -301,7 +301,7 @@ run(alwaysget_updatewithout2i, _KeyGen, ValueGen, State) -> Value = ValueGen(), KeyInt = eightytwenty_keycount(AGKC), ToExtend = - random:uniform(State#state.alwaysget_perworker_maxkeycount) > AGKC, + rand:uniform(State#state.alwaysget_perworker_maxkeycount) > AGKC, {Robj0, NewAGKC} = case ToExtend of @@ -434,7 +434,7 @@ run(get_unique, _KeyGen, _ValueGen, State) -> Bucket = State#state.documentBucket, UKC = State#state.unique_key_count, LKC = State#state.unique_key_lowcount, - Key = generate_uniquekey(LKC + random:uniform(UKC - LKC), + Key = generate_uniquekey(LKC + rand:uniform(UKC - LKC), State#state.keyid, State#state.unique_keyorder), case riakc_pb_socket:get(Pid, Bucket, Key, State#state.pb_timeout) of @@ -475,8 +475,8 @@ run(postcodequery_http, _KeyGen, _ValueGen, State) -> Bucket = State#state.recordBucket, L = length(?POSTCODE_AREAS), - {_, Area} = lists:keyfind(random:uniform(L), 1, ?POSTCODE_AREAS), - District = Area ++ integer_to_list(random:uniform(26)), + {_, Area} = lists:keyfind(rand:uniform(L), 1, ?POSTCODE_AREAS), + District = Area ++ integer_to_list(rand:uniform(26)), StartKey = District ++ "|" ++ "a", EndKey = District ++ "|" ++ "h", URL = io_lib:format("http://~s:~p/buckets/~s/index/postcode_bin/~s/~s", @@ -510,7 +510,7 @@ run(dobquery_http, _KeyGen, _ValueGen, State) -> Port = State#state.http_port, Bucket = State#state.recordBucket, - RandYear = random:uniform(70) + 1950, + RandYear = rand:uniform(70) + 1950, DoBStart = integer_to_list(RandYear) ++ "0101", DoBEnd = integer_to_list(RandYear) ++ "0110", @@ -820,20 +820,20 @@ generate_binary_indexes() -> {{binary_index, "lastmodified"}, lastmodified_index()}]. postcode_index() -> - NotVeryNameLikeThing = base64:encode_to_string(crypto:rand_bytes(4)), + NotVeryNameLikeThing = base64:encode_to_string(crypto:strong_rand_bytes(4)), lists:map(fun(_X) -> L = length(?POSTCODE_AREAS), - {_, Area} = lists:keyfind(random:uniform(L), 1, ?POSTCODE_AREAS), - District = Area ++ integer_to_list(random:uniform(26)), + {_, Area} = lists:keyfind(rand:uniform(L), 1, ?POSTCODE_AREAS), + District = Area ++ integer_to_list(rand:uniform(26)), F = District ++ "|" ++ NotVeryNameLikeThing, list_to_binary(F) end, - lists:seq(1, random:uniform(3))). + lists:seq(1, rand:uniform(3))). dateofbirth_index() -> - Delta = random:uniform(2500000000), + Delta = rand:uniform(2500000000), {{Y, M, D}, _} = calendar:gregorian_seconds_to_datetime(Delta + 61000000000), - F = lists:flatten(io_lib:format(?DATE_FORMAT, [Y, M, D])) ++ "|" ++ base64:encode_to_string(crypto:rand_bytes(4)), + F = lists:flatten(io_lib:format(?DATE_FORMAT, [Y, M, D])) ++ "|" ++ base64:encode_to_string(crypto:strong_rand_bytes(4)), [list_to_binary(F)]. lastmodified_index() -> @@ -855,18 +855,18 @@ generate_uniquekey(C, RandBytes, key_order) -> non_compressible_value(Size) -> - crypto:rand_bytes(Size). + crypto:strong_rand_bytes(Size). eightytwenty_keycount(UKC) -> % 80% of the time choose a key in the bottom 20% of the % result range, and 20% of the time in the upper 80% of the range - TwentyPoint = random:uniform(max(1, UKC div 5)), - case random:uniform(max(1, UKC)) < TwentyPoint of + TwentyPoint = rand:uniform(max(1, UKC div 5)), + case rand:uniform(max(1, UKC)) < TwentyPoint of true -> - random:uniform(UKC - TwentyPoint) + max(1, TwentyPoint); + rand:uniform(UKC - TwentyPoint) + max(1, TwentyPoint); false -> - random:uniform(max(1, TwentyPoint)) + rand:uniform(max(1, TwentyPoint)) end. diff --git a/src/basho_bench_driver_riakc_pb.erl b/src/basho_bench_driver_riakc_pb.erl index 8d996c9fd..b7f7f0643 100644 --- a/src/basho_bench_driver_riakc_pb.erl +++ b/src/basho_bench_driver_riakc_pb.erl @@ -417,8 +417,8 @@ run(query_postcode, _KeyGen, _ValueGen, State) -> Pid = State#state.pid, Bucket = State#state.bucket, L = length(?POSTCODE_AREAS), - {_R, Area} = lists:keyfind(random:uniform(L), 1, ?POSTCODE_AREAS), - District = Area ++ integer_to_list(random:uniform(26)), + {_R, Area} = lists:keyfind(rand:uniform(L), 1, ?POSTCODE_AREAS), + District = Area ++ integer_to_list(rand:uniform(26)), StartKey = District ++ "|" ++ "a", EndKey = District ++ "|" ++ "b", case riakc_pb_socket:get_index_range(Pid, @@ -437,9 +437,9 @@ run(query_postcode, _KeyGen, _ValueGen, State) -> run(query_dob, _KeyGen, _ValueGen, State) -> Pid = State#state.pid, Bucket = State#state.bucket, - R = random:uniform(2500000000), + R = rand:uniform(2500000000), DOB_SK = pick_dateofbirth(R), - DOB_EK = pick_dateofbirth(R + random:uniform(86400 * 3)), + DOB_EK = pick_dateofbirth(R + rand:uniform(86400 * 3)), case riakc_pb_socket:get_index_range(Pid, Bucket, <<"dateofbirth_bin">>, @@ -741,22 +741,22 @@ generate_binary_indexes() -> {{binary_index, "lastmodified"}, lastmodified_index()}]. postcode_index() -> - NotVeryNameLikeThing = base64:encode_to_string(crypto:rand_bytes(4)), + NotVeryNameLikeThing = base64:encode_to_string(crypto:strong_rand_bytes(4)), lists:map(fun(_X) -> L = length(?POSTCODE_AREAS), - {_R, Area} = lists:keyfind(random:uniform(L), 1, ?POSTCODE_AREAS), - District = Area ++ integer_to_list(random:uniform(26)), + {_R, Area} = lists:keyfind(rand:uniform(L), 1, ?POSTCODE_AREAS), + District = Area ++ integer_to_list(rand:uniform(26)), F = District ++ "|" ++ NotVeryNameLikeThing, list_to_binary(F) end, - lists:seq(1, random:uniform(3))). + lists:seq(1, rand:uniform(3))). dateofbirth_index() -> F = pick_dateofbirth() ++ "|" ++ - base64:encode_to_string(crypto:rand_bytes(4)), + base64:encode_to_string(crypto:strong_rand_bytes(4)), [list_to_binary(F)]. pick_dateofbirth() -> - pick_dateofbirth(random:uniform(2500000000)). + pick_dateofbirth(rand:uniform(2500000000)). pick_dateofbirth(Delta) -> {{Y, M, D}, @@ -781,7 +781,7 @@ record_2i_results(Results, State) -> end, QCount = State#state.twoi_qcount + 1, RCount = State#state.twoi_rcount + RCount_ThisQuery, - case random:uniform(?RANDOMLOG_FREQ) < QCount of + case rand:uniform(?RANDOMLOG_FREQ) < QCount of true -> AvgRSize = RCount / QCount, TS = timer:now_diff(os:timestamp(), @@ -798,7 +798,7 @@ run_listkeys(State) -> lager:info("Commencing listkeys request"), Targets = State#state.singleton_targets, - {TargetIp, TargetPort} = lists:nth(random:uniform(length(Targets)+1), + {TargetIp, TargetPort} = lists:nth(rand:uniform(length(Targets)+1), Targets), ?INFO("Using target ~p:~p for new singleton asyncworker\n", [TargetIp, TargetPort]), diff --git a/src/basho_bench_valgen.erl b/src/basho_bench_valgen.erl index 7c6a41c66..0744542c3 100644 --- a/src/basho_bench_valgen.erl +++ b/src/basho_bench_valgen.erl @@ -123,7 +123,7 @@ init_altsource(Id) -> init_altsource(Id, basho_bench_config:get(?VAL_GEN_BLOB_CFG, undefined)). init_altsource(1, undefined) -> - GenRandStrFun = fun(_X) -> random:uniform(95) + 31 end, + GenRandStrFun = fun(_X) -> rand:uniform(95) + 31 end, RandomStrs = lists:map(fun(X) -> SL = lists:map(GenRandStrFun, lists:seq(1, 128)), @@ -132,7 +132,7 @@ init_altsource(1, undefined) -> lists:seq(1, 16)), ComboBlockFun = fun(_X, Acc) -> - Bin1 = crypto:rand_bytes(4096), + Bin1 = crypto:strong_rand_bytes(4096), Bin2 = create_random_textblock(32, RandomStrs), % Both the compressible and uncompressible parts will be % 4096 bytes in size. zlib will compress the compressible @@ -161,7 +161,7 @@ init_altsource(Id, Path) -> create_random_textblock(BlockLength, RandomStrs) -> GetRandomBlockFun = fun(X, Acc) -> - Rand = random:uniform(min(X, 16)), + Rand = rand:uniform(min(X, 16)), {Rand, Block} = lists:keyfind(Rand, 1, RandomStrs), <> end, From 760e63877d510bff36de92f4c835e5eb00ed34b0 Mon Sep 17 00:00:00 2001 From: Martin Sumner Date: Mon, 20 Apr 2020 14:57:09 +0100 Subject: [PATCH 02/10] Set lager log level in non-deprecated way --- src/basho_bench.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/basho_bench.erl b/src/basho_bench.erl index bcda7554a..5b7a50404 100644 --- a/src/basho_bench.erl +++ b/src/basho_bench.erl @@ -63,7 +63,7 @@ main(Args) -> CrashLog = filename:join([TestDir, "crash.log"]), application:set_env(lager, handlers, - [{lager_console_backend, ConsoleLagerLevel}, + [{lager_console_backend, [{level, ConsoleLagerLevel}]}, {lager_file_backend, [{file, ErrorLog}, {level, error}, {size, 10485760}, {date, "$D0"}, {count, 5}]}, {lager_file_backend, [{file, ConsoleLog}, {level, debug}, {size, 10485760}, {date, "$D0"}, {count, 5}]} ]), From 985cf196fc785ce4aeb3c8208b100a8afdd461ed Mon Sep 17 00:00:00 2001 From: Martin Sumner Date: Mon, 20 Apr 2020 15:35:11 +0100 Subject: [PATCH 03/10] Change seed in worker for OTP compliance --- src/basho_bench_worker.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/basho_bench_worker.erl b/src/basho_bench_worker.erl index ad92480e6..0ab3dbf20 100644 --- a/src/basho_bench_worker.erl +++ b/src/basho_bench_worker.erl @@ -213,7 +213,7 @@ worker_init(State) -> %% Trap exits from linked parent process; use this to ensure the driver %% gets a chance to cleanup process_flag(trap_exit, true), - rand:seed(State#state.rng_seed), + rand:seed(exsp, State#state.rng_seed), worker_idle_loop(State). worker_idle_loop(State) -> From 97ac1f9a1b9d06e2a986a8479d29626f6a5e5742 Mon Sep 17 00:00:00 2001 From: Martin Sumner Date: Fri, 24 Apr 2020 17:10:12 +0100 Subject: [PATCH 04/10] Bump mochiweb branch for OTP20+ support --- rebar.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rebar.config b/rebar.config index 776944442..f0883fd4b 100644 --- a/rebar.config +++ b/rebar.config @@ -13,7 +13,7 @@ {lager, {git, "git://github.com/erlang-lager/lager", {tag, "3.8.0"}}}, {ibrowse, {git, "git://github.com/basho/ibrowse.git", {branch, "develop-3.0"}}}, {riakc, {git, "git://github.com/basho/riak-erlang-client", {branch, "develop-3.0-292"}}}, - {mochiweb, {git, "git://github.com/martinsumner/mochiweb.git", {branch, "mas-431response"}}}, + {mochiweb, {git, "git://github.com/martinsumner/mochiweb.git", {branch, "develop-3.0"}}}, {getopt, ".*", {git, "git://github.com/jcomellas/getopt", {tag, "v0.8.2"}}} ]}. From ad82439c86e46f01b3b1733a3ff50ad44bec5471 Mon Sep 17 00:00:00 2001 From: Steven Joseph Date: Tue, 5 May 2020 20:47:05 +1000 Subject: [PATCH 05/10] feat: add driver for pointzi --- pointzi.config | 67 ++++ src/basho_bench_driver_https_pointzi.erl | 393 +++++++++++++++++++++++ src/pointzi.erl | 19 ++ 3 files changed, 479 insertions(+) create mode 100644 pointzi.config create mode 100644 src/basho_bench_driver_https_pointzi.erl create mode 100644 src/pointzi.erl diff --git a/pointzi.config b/pointzi.config new file mode 100644 index 000000000..ba204479f --- /dev/null +++ b/pointzi.config @@ -0,0 +1,67 @@ +{mode, max}. + +{duration, 1}. + +{concurrent, 1}. + +{driver, basho_bench_driver_https_streethawk}. +%% Default generators, reference by the atoms key_generator and value_generator +{key_generator, uuid_v4}. +{value_generator, {fixed_bin, 10000}}. + +%%% Generators: {Name, KeyGen | ValGen} +% Name: atom() +% KeyGen: User or Basho Bench defined key generator +% ValGen: User or Basho Bench defined value generator +{generators, [ + {string_g, {key_generator, {int_to_str, {uniform_int, 50000}}}}, + {binstring_g, {value_generator, {fixed_bin, 100}}}, + {install_g, {value_generator, {function, pointzi, generate_install, []}}} + ]}. + +%%% Values: {Name, Value} +%%% {Name, {FormattedValue, Generators}} +% Name: atom() +% Value: string() | atom() - named generator, can be key_generator or value_generator for default +% FormattedValue: string() - formatted with io_lib:format +% Generators: list() - list of generators, can be key_generator or value_generator for default +{values, [ + {json_v, install_g} + ]}. + +%%% Headers: {Name, Headers} +% Name: atom() +% Headers: proplist() +{headers, [ + {json_h, [{"Content-Type", "application/json"}, {"Accept", "application/json"}]} + ]}. + +%%% Targets: {Name, {Host, Port, Path}} +%%% {Name, [{Host1, Port1, Path1},{Host2, Port2, Path2},...]} +%%% {Name, {Host, Port, {FormattedPath, Generators}}} +%%% {Name, [{Host1, Port1, {FormattedPath1, Generators1}},{Host2, Port2, {FormattedPath2, Generators2}},...]} +% Name: atom() +% Host: string() +% Port: integer() +% Path: string() +% FormattedPath: string() - formatted with io_lib:format +% Generators: list() - list of generators, can be key_generator or value_generator for default +{targets, [ + %{installs_uri_t, {"localhost", 8000, "/v3/installs"}} + {installs_uri_t, {"https://george.pointzi.com", 443, "/v3/installs"}} + ]}. + +%%% Operations: {{get|delete, Target}, Weight} +%%% {{get|delete, Target, Header}, Weight} +%%% {{put|post, Target, Value}, Weight} +%%% {{put|post, Target, Value, Header}, Weight} +% Target: atom() - defined target +% Header: atom() - defined header +% Value: atom() - defined value +% Weight: integer() - ratio of this operation to the rest (ThisWeight / TotalWeightSum = % of this Operation) + +{operations, [ + %% Get without a key + %%{{get, base_uri_t}, 1}, + {{post, installs_uri_t, json_v, json_h}, 1} + ]}. diff --git a/src/basho_bench_driver_https_pointzi.erl b/src/basho_bench_driver_https_pointzi.erl new file mode 100644 index 000000000..df64d9fbc --- /dev/null +++ b/src/basho_bench_driver_https_pointzi.erl @@ -0,0 +1,393 @@ +%% ------------------------------------------------------------------- +%% +%% basho_bench: Benchmarking Suite +%% +%% Copyright (c) 2009-2013 Basho Techonologies +%% +%% This file is provided to you under the Apache License, +%% Version 2.0 (the "License"); you may not use this file +%% except in compliance with the License. You may obtain +%% a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, +%% software distributed under the License is distributed on an +%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +%% KIND, either express or implied. See the License for the +%% specific language governing permissions and limitations +%% under the License. +%% +%% ------------------------------------------------------------------- +-module(basho_bench_driver_https_streethawk). + +-export([new/1, + run/4]). + +-include("basho_bench.hrl"). + +-record(url, {abspath, host, port, username, password, path, protocol, host_type}). + +-record(state, { + generators = [], + values = [], + headers = [], + targets = [], + target_indexes = []}). + + +%% ==================================================================== +%% API +%% ==================================================================== + +new(Id) -> + ?DEBUG("ID: ~p\n", [Id]), + + application:start(crypto), + application:start(public_key), + application:start(ssl), + application:start(inets), + inets:start(), + ssl:start(), + + + + + Disconnect = basho_bench_config:get(http_disconnect_frequency, infinity), + + case Disconnect of + infinity -> ok; + Seconds when is_integer(Seconds) -> ok; + {ops, Ops} when is_integer(Ops) -> ok; + _ -> ?FAIL_MSG("Invalid configuration for http_disconnect_frequency: ~p~n", [Disconnect]) + end, + + %% Uses pdict to avoid threading state record through lots of functions + erlang:put(disconnect_freq, Disconnect), + + %% TODO: Validate these + Generators = build_generators(basho_bench_config:get(generators), [], Id), + Values = basho_bench_config:get(values), + Headers = basho_bench_config:get(headers), + TargetsProplist = basho_bench_config:get(targets), + TargetIndexes = build_target_list(TargetsProplist, []), + + {ok, #state { + generators = Generators, + values = Values, + headers = Headers, + targets = TargetsProplist, + target_indexes = TargetIndexes + }}. + +run({get, Target}, KeyGen, ValueGen, State) -> + run({get, Target, undefined}, KeyGen, ValueGen, State); +run({get, Target, HeaderName}, KeyGen, ValueGen, State) -> + {Url, S2} = next_url(Target, KeyGen, ValueGen, State), + Headers = proplists:get_value(HeaderName, S2#state.headers, []), + + case do_get(Url, Headers) of + {not_found, _Url} -> + {ok, S2}; + {ok, _Url, _Header} -> + {ok, S2}; + {error, Reason} -> + {error, Reason, S2} + end; + +run({put, Target, ValueName}, KeyGen, ValueGen, State) -> + run({put, Target, ValueName, undefined}, KeyGen, ValueGen, State); +run({put, Target, ValueName, HeaderName}, KeyGen, ValueGen, State) -> + {Url, S2} = next_url(Target, KeyGen, ValueGen, State), + Headers = proplists:get_value(HeaderName, S2#state.headers, []), + Data = build_value(ValueName, KeyGen, ValueGen, S2), + + case do_put(Url, Headers, Data) of + ok -> + {ok, S2}; + {error, Reason} -> + {error, Reason, S2} + end; + +run({post, Target, ValueName}, KeyGen, ValueGen, State) -> + run({post, Target, ValueName, undefined}, KeyGen, ValueGen, State); +run({post, Target, ValueName, HeaderName}, KeyGen, ValueGen, State) -> + {Url, S2} = next_url(Target, KeyGen, ValueGen, State), + Headers = proplists:get_value(HeaderName, S2#state.headers, []), + Data = build_value(ValueName, KeyGen, ValueGen, S2), + + case do_post(Url, Headers, Data) of + ok -> + {ok, S2}; + {error, Reason} -> + lager:debug("post failed ~p~n", [Reason]), + {error, Reason, S2} + end; + +run({delete, Target}, KeyGen, ValueGen, State) -> + run({delete, Target, undefined}, KeyGen, ValueGen, State); +run({delete, Target, HeaderName}, KeyGen, ValueGen, State) -> + {Url, S2} = next_url(Target, KeyGen, ValueGen, State), + Headers = proplists:get_value(HeaderName, S2#state.headers, []), + + case do_delete(Url, Headers) of + ok -> + {ok, S2}; + {error, Reason} -> + {error, Reason, S2} + end. + +%% ==================================================================== +%% Internal functions +%% ==================================================================== + +build_generators([{Name, {key_generator, KeyGenSpec}}|Rest], Generators, Id) -> + KeyGen = basho_bench_keygen:new(KeyGenSpec, Id), + build_generators(Rest, [{Name, KeyGen}|Generators], Id); +build_generators([{Name, {value_generator, ValGenSpec}}|Rest], Generators, Id) -> + ValGen = basho_bench_valgen:new(ValGenSpec, Id), + build_generators(Rest, [{Name, ValGen}|Generators], Id); +build_generators([], Generators, _) -> + Generators. + +evaluate_generator(Name, Generators, KeyGen, ValueGen) -> + case Name of + key_generator -> KeyGen(); + value_generator -> ValueGen(); + N when is_atom(N) -> + Fun = proplists:get_value(N, Generators), + Fun(); + Value -> Value + end. + +build_formatted_value(String, GeneratorNames, Generators, KeyGen, ValueGen) -> + lager:debug("GeneratorNames ~p~n", [GeneratorNames]), + Values = lists:map(fun (Name) -> evaluate_generator(Name, Generators, KeyGen, ValueGen) end, GeneratorNames), + lager:debug("Values ~p~n", [Values]), + io_lib:format(String, Values). + +%% Round robin sub-target selection +next_url({TargetName, Index, Targets}, KeyGen, ValueGen, State) + when is_list(Targets), Index > length(Targets) -> + OtherIndexes = proplists:delete(TargetName, State#state.target_indexes), + S2 = State#state{target_indexes = [{TargetName, 1} | OtherIndexes]}, + next_url({TargetName, 1, Targets}, KeyGen, ValueGen, S2); +next_url({TargetName, Index, Targets}, KeyGen, ValueGen, State) + when is_list(Targets) -> + OtherIndexes = proplists:delete(TargetName, State#state.target_indexes), + Url = build_url(lists:nth(Index, Targets), State#state.generators, KeyGen, ValueGen), + S2 = State#state{target_indexes = [{TargetName, Index + 1} | OtherIndexes]}, + {Url, S2}; +next_url({_, _, Target}, KeyGen, ValueGen, State) when is_tuple(Target) -> + Url = build_url(Target, State#state.generators, KeyGen, ValueGen), + {Url, State}; +next_url(TargetName, KeyGen, ValueGen, State) when is_atom(TargetName) -> + Index = proplists:get_value(TargetName, State#state.target_indexes), + Target = proplists:get_value(TargetName, State#state.targets), + next_url({TargetName, Index, Target}, KeyGen, ValueGen, State). + +build_url({Host, Port, {FormattedPath, GeneratorNames}}, Generators, KeyGen, ValueGen) -> + Path = build_formatted_value(FormattedPath, GeneratorNames, Generators, KeyGen, ValueGen), + #url{host=Host, port=Port, path=Path}; +build_url({Host, Port, Path}, _, _, _) -> + #url{host=Host, port=Port, path=Path}. + +build_target_list([], TargetIndexes) -> + TargetIndexes; +build_target_list([{Name, _}|Rest], TargetIndexes) -> + build_target_list(Rest, [{Name, 1} | TargetIndexes]). + +build_value(ValueName, KeyGen, ValueGen, State) -> + case proplists:get_value(ValueName, State#state.values) of + {FormattedValue, GeneratorNames} -> + build_formatted_value(FormattedValue, GeneratorNames, State#state.generators, KeyGen, ValueGen); + V -> evaluate_generator(V, State#state.generators, KeyGen, ValueGen) + end. + +do_get(Url, Headers) -> + case send_request(Url, Headers, get, [], [{response_format, binary}]) of + {ok, 404, _Header, _Body} -> + {not_found, Url}; + {ok, 300, Header, _Body} -> + {ok, Url, Header}; + {ok, 200, Header, _Body} -> + {ok, Url, Header}; + {ok, Code, _Header, _Body} -> + {error, {http_error, Code}}; + {error, Reason} -> + {error, Reason} + end. + +do_put(Url, Headers, ValueGen) -> + Val = if is_function(ValueGen) -> + ValueGen(); + true -> + ValueGen + end, + case send_request(Url, Headers, + put, Val, [{response_format, binary}]) of + {ok, 200, _Header, _Body} -> + ok; + {ok, 201, _Header, _Body} -> + ok; + {ok, 202, _Header, _Body} -> + ok; + {ok, 204, _Header, _Body} -> + ok; + {ok, Code, _Header, _Body} -> + {error, {http_error, Code}}; + {error, Reason} -> + {error, Reason} + end. + +do_post(Url, Headers, ValueGen) -> + Val = if is_function(ValueGen) -> + ValueGen(); + true -> + ValueGen + end, + %lager:debug("send_request ~1p~n", [iolist_to_binary(Val)]), + case send_request(Url, Headers, + post, Val, [{response_format, binary}, {is_ssl, true}, {ssl_options, []}]) of + {ok, 200, _Header, _Body} -> + ok; + {ok, 201, _Header, _Body} -> + ok; + {ok, 202, _Header, _Body} -> + ok; + {ok, 204, _Header, _Body} -> + ok; + {ok, Code, _Header, _Body} -> + lager:debug("send_request error ~p~n", [_Body]), + {error, {http_error, Code}}; + {error, Reason} -> + {error, Reason} + end. + +do_delete(Url, Headers) -> + case send_request(Url, Headers, delete, [], []) of + {ok, 200, _Header, _Body} -> + ok; + {ok, 201, _Header, _Body} -> + ok; + {ok, 202, _Header, _Body} -> + ok; + {ok, 204, _Header, _Body} -> + ok; + {ok, 404, _Header, _Body} -> + ok; + {ok, 410, _Header, _Body} -> + ok; + {ok, Code, _Header, _Body} -> + {error, {http_error, Code}}; + {error, Reason} -> + {error, Reason} + end. + +connect(Url) -> + case erlang:get({inets_pid, Url#url.host}) of + undefined -> + {ok, Pid} = inets:start({Url#url.host, Url#url.port}), + erlang:put({ibrowse_pid, Url#url.host}, Pid), + Pid; + Pid -> + case is_process_alive(Pid) of + true -> + Pid; + false -> + erlang:erase({ibrowse_pid, Url#url.host}), + connect(Url) + end + end. + + +disconnect(Url) -> + case erlang:get({ibrowse_pid, Url#url.host}) of + undefined -> + ok; + OldPid -> + catch(ibrowse_http_client:stop(OldPid)) + end, + erlang:erase({ibrowse_pid, Url#url.host}), + ok. + +maybe_disconnect(Url) -> + case erlang:get(disconnect_freq) of + infinity -> ok; + {ops, Count} -> should_disconnect_ops(Count,Url) andalso disconnect(Url); + Seconds -> should_disconnect_secs(Seconds,Url) andalso disconnect(Url) + end. + +should_disconnect_ops(Count, Url) -> + Key = {ops_since_disconnect, Url#url.host}, + case erlang:get(Key) of + undefined -> + erlang:put(Key, 1), + false; + Count -> + erlang:put(Key, 0), + true; + Incr -> + erlang:put(Key, Incr + 1), + false + end. + +should_disconnect_secs(Seconds, Url) -> + Key = {last_disconnect, Url#url.host}, + case erlang:get(Key) of + undefined -> + erlang:put(Key, os:timestamp()), + false; + Time when is_tuple(Time) andalso size(Time) == 3 -> + Diff = timer:now_diff(os:timestamp(), Time), + if + Diff >= Seconds * 1000000 -> + erlang:put(Key, os:timestamp()), + true; + true -> false + end + end. + +clear_disconnect_freq(Url) -> + case erlang:get(disconnect_freq) of + infinity -> ok; + {ops, _Count} -> erlang:put({ops_since_disconnect, Url#url.host}, 0); + _Seconds -> erlang:put({last_disconnect, Url#url.host}, os:timestamp()) + end. + +send_request(Url, Headers, Method, Body, Options) -> + send_request(Url, Headers, Method, Body, Options, 3). + +send_request(_Url, _Headers, _Method, _Body, _Options, 0) -> + {error, max_retries}; +send_request(Url, Headers, Method, Body, Options, Count) -> + {url, _, Host, Port, _, _, Path, _, _} = Url, + lager:debug("send_request ~p ~p ~p ~n", [Method, Host++Path, iolist_to_binary(Body)]), + Request = {Host++Path, Headers, "application/json", iolist_to_binary(Body)}, + case catch(httpc:request(Method, Request, [], [])) of + {ok, {{_, Status, _}, RespHeaders, RespBody}} -> + %%maybe_disconnect(Url), + {ok, Status, RespHeaders, RespBody}; + + Error -> + clear_disconnect_freq(Url), + %disconnect(Url), + case should_retry(Error) of + true -> + send_request(Url, Headers, Method, Body, Options, Count-1); + + false -> + normalize_error(Method, Error) + end + end. + + +should_retry({error, send_failed}) -> true; +should_retry({error, connection_closed}) -> true; +should_retry({'EXIT', {normal, _}}) -> true; +should_retry({'EXIT', {noproc, _}}) -> true; +should_retry(_) -> false. + +normalize_error(Method, {'EXIT', {timeout, _}}) -> {error, {Method, timeout}}; +normalize_error(Method, {'EXIT', Reason}) -> {error, {Method, 'EXIT', Reason}}; +normalize_error(Method, {error, Reason}) -> {error, {Method, Reason}}. diff --git a/src/pointzi.erl b/src/pointzi.erl new file mode 100644 index 000000000..aaa0c09ed --- /dev/null +++ b/src/pointzi.erl @@ -0,0 +1,19 @@ +-module(pointzi). +-export([ + generate_install/1 +]). +generate_install(_Id) -> + fun() -> + mochijson2:encode({struct, [ + {<<"id">>, list_to_binary(basho_uuid:to_string(basho_uuid:v4()))}, + {<<"app_key">>, <<"LOTSAINSTALLS2">>}, + {<<"client_version">>, <<"1.2.3">>}, + {<<"ipaddress">>, <<"192.168.0.1">>}, + {<<"model">>, <<"basho_bench">>}, + {<<"operating_system">>, <<"web">>}, + {<<"sh_version">>, <<"1.9.23">>}, + {<<"pz_version">>, <<"2.0">>}, + {<<"utc_offset">>, 166} + ]}) + end. + From 73eae998d1bff5e628e982532fcc55d21b6a813f Mon Sep 17 00:00:00 2001 From: Steven Joseph Date: Tue, 12 May 2020 23:46:37 +1000 Subject: [PATCH 06/10] feat: add gitlab config and docker files --- .gitlab-ci.yml | 38 ++++++++++++++++++++++++++++++++ Dockerfile | 7 ++++++ examples/pointzi-crdt-map.config | 26 ++++++++++++++++++++++ src/pointzi.erl | 16 ++++++++++++++ 4 files changed, 87 insertions(+) create mode 100644 .gitlab-ci.yml create mode 100644 Dockerfile create mode 100644 examples/pointzi-crdt-map.config diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 000000000..319d961fd --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,38 @@ +image: docker:stable + +services: + - docker:dind + +variables: + CONTAINER_IMAGE: registry.gitlab.com/$CI_PROJECT_PATH + DOCKER_TLS_CERTDIR: "" + DOCKER_HOST: tcp://localhost:2375 + DOCKER_DRIVER: overlay2 + +before_script: + - docker login -u gitlab-ci-token -p $CI_JOB_TOKEN registry.gitlab.com + - apk add --update curl && rm -rf /var/cache/apk/* + +stages: + - build + - benchmark + +build: + stage: build + script: + - docker pull $CONTAINER_IMAGE/$CI_COMMIT_REF_NAME:latest || true + - docker build + --cache-from $CONTAINER_IMAGE/$CI_COMMIT_REF_NAME:latest + --tag $CONTAINER_IMAGE/$CI_COMMIT_REF_NAME:$CI_COMMIT_SHA + --tag $CONTAINER_IMAGE/$CI_COMMIT_REF_NAME:latest + --build-arg deployment=$CI_COMMIT_REF_NAME + -f Dockerfile.base . + - docker push $CONTAINER_IMAGE/$CI_COMMIT_REF_NAME:$CI_COMMIT_SHA + - docker push $CONTAINER_IMAGE/$CI_COMMIT_REF_NAME:latest + +benchmark: + stage: riak + script: + - set -x + - docker pull $CONTAINER_IMAGE/$CI_COMMIT_REF_NAME:latest || true + - docker run -it $CONTAINER_IMAGE/$CI_COMMIT_REF_NAME:latest diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..25ae3a9de --- /dev/null +++ b/Dockerfile @@ -0,0 +1,7 @@ +ARG deployment +FROM registry.gitlab.com/riak/riak/base/${deployment}:latest +ARG deployment +ADD . /srv/basho_bench +WORKDIR /srv/basho_bench +RUN rebar3 do upgrade, compile, escriptize + diff --git a/examples/pointzi-crdt-map.config b/examples/pointzi-crdt-map.config new file mode 100644 index 000000000..f693f43a9 --- /dev/null +++ b/examples/pointzi-crdt-map.config @@ -0,0 +1,26 @@ +{mode,{rate,max}}. +{duration,20}. +{concurrent,200}. +{rng_seed,now}. + +%% This bucket type must be created and set to be datatype, maps. +%%{riakc_pb_bucket,{<<"TagsTest">>,<<"TagsTestBucket">>}}. +{riakc_pb_bucket,{<<"Log">>,<<"InstallLogTest">>}}. + +{key_generator, {uniform_int, 100}}. +{value_generator, {uniform_int, 1000}}. + +{operations,[{{game,completed},10}, + {{team,player,addition},3}, + {{team,player,removal},3}, + {{team,read},100}, + {{team,write},1}]}. + +{riakc_pb_ips,[ + {"localhost",8087} + %%{"13.0.6.35",8087} +]}. + +{riakc_pb_replies,default}. + +{driver,basho_bench_driver_riakc_pb}. diff --git a/src/pointzi.erl b/src/pointzi.erl index aaa0c09ed..886c61897 100644 --- a/src/pointzi.erl +++ b/src/pointzi.erl @@ -17,3 +17,19 @@ generate_install(_Id) -> ]}) end. + +generate_tag(_Id) -> + fun() -> + mochijson2:encode({struct, [ + {<<"id">>, list_to_binary(basho_uuid:to_string(basho_uuid:v4()))}, + {<<"installid">>, list_to_binary(basho_uuid:to_string(basho_uuid:v4()))}, + {<<"app_key">>, <<"LOTSAINSTALLS2">>}, + {<<"client_version">>, <<"1.2.3">>}, + {<<"ipaddress">>, <<"192.168.0.1">>}, + {<<"model">>, <<"basho_bench">>}, + {<<"operating_system">>, <<"web">>}, + {<<"sh_version">>, <<"1.9.23">>}, + {<<"pz_version">>, <<"2.0">>}, + {<<"utc_offset">>, 166}, + ]}) + end. From 54e8e279f64219266070010e7dc8908a68a2a8f5 Mon Sep 17 00:00:00 2001 From: Steven Joseph Date: Fri, 15 May 2020 13:27:19 +1000 Subject: [PATCH 07/10] fix: gitlab config --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 319d961fd..c87302de5 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -31,7 +31,7 @@ build: - docker push $CONTAINER_IMAGE/$CI_COMMIT_REF_NAME:latest benchmark: - stage: riak + stage: benchmark script: - set -x - docker pull $CONTAINER_IMAGE/$CI_COMMIT_REF_NAME:latest || true From e72a21810e00cb2868d1e3ff0c9f35a1ed8712a4 Mon Sep 17 00:00:00 2001 From: Steven Joseph Date: Fri, 15 May 2020 13:28:57 +1000 Subject: [PATCH 08/10] fix: build --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c87302de5..2f92fb659 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -26,7 +26,7 @@ build: --tag $CONTAINER_IMAGE/$CI_COMMIT_REF_NAME:$CI_COMMIT_SHA --tag $CONTAINER_IMAGE/$CI_COMMIT_REF_NAME:latest --build-arg deployment=$CI_COMMIT_REF_NAME - -f Dockerfile.base . + . - docker push $CONTAINER_IMAGE/$CI_COMMIT_REF_NAME:$CI_COMMIT_SHA - docker push $CONTAINER_IMAGE/$CI_COMMIT_REF_NAME:latest From 6636e91fbc0d0b63db5e2401434963f1ff0645a7 Mon Sep 17 00:00:00 2001 From: Steven Joseph Date: Fri, 15 May 2020 13:37:43 +1000 Subject: [PATCH 09/10] fix: base image --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 25ae3a9de..135838f79 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ ARG deployment -FROM registry.gitlab.com/riak/riak/base/${deployment}:latest +FROM registry.gitlab.com/riak/riak/${deployment}:latest ARG deployment ADD . /srv/basho_bench WORKDIR /srv/basho_bench From dccb7f07d8b65ab071ec620b752e115c3492657a Mon Sep 17 00:00:00 2001 From: Steven Joseph Date: Wed, 4 Jan 2023 19:20:26 +1100 Subject: [PATCH 10/10] Fix use erlang 20 docker image --- Dockerfile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 135838f79..c433c0aac 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,4 @@ -ARG deployment -FROM registry.gitlab.com/riak/riak/${deployment}:latest +FROM erlang:20-alpine ARG deployment ADD . /srv/basho_bench WORKDIR /srv/basho_bench