Skip to content

Commit

Permalink
Include keyspace range checks on flow tests (RedisLabs#208)
Browse files Browse the repository at this point in the history
  • Loading branch information
filipecosta90 authored Feb 14, 2023
1 parent 82dba6a commit 1620979
Show file tree
Hide file tree
Showing 2 changed files with 147 additions and 3 deletions.
26 changes: 23 additions & 3 deletions tests/include.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,13 +59,15 @@ def debugPrintMemtierOnError(config, env):
env.debugPrint(line.rstrip(), True)


def get_expected_request_count(config):
def get_expected_request_count(config, key_minimum=0, key_maximum=1000000):
result = -1
if 'memtier_benchmark' in config:
mt = config['memtier_benchmark']
if 'threads' in mt and 'clients' in mt and 'requests' in mt:
result = config['memtier_benchmark']['threads'] * config['memtier_benchmark']['clients'] * \
config['memtier_benchmark']['requests']
if mt['requests'] != 'allkeys':
result = mt['threads'] * mt['clients'] * mt['requests']
else:
result = key_maximum - key_minimum + 1
return result


Expand Down Expand Up @@ -112,3 +114,21 @@ def ensure_clean_benchmark_folder(dirname):
os.removedirs(dirname)
os.makedirs(dirname)


def assert_keyspace_range(env, key_max, key_min, master_nodes_connections):
expected_keyspace_range = key_max - key_min + 1
overall_keyspace_range = agg_keyspace_range(master_nodes_connections)
# assert we have the expected keyspace range
env.assertEqual(expected_keyspace_range, overall_keyspace_range)


def agg_keyspace_range(master_nodes_connections):
overall_keyspace_range = 0
for master_connection in master_nodes_connections:
shard_reply = master_connection.execute_command("INFO", "KEYSPACE")
shard_count = 0
if 'db0' in shard_reply:
if 'keys' in shard_reply['db0']:
shard_count = int(shard_reply['db0']['keys'])
overall_keyspace_range = overall_keyspace_range + shard_count
return overall_keyspace_range
124 changes: 124 additions & 0 deletions tests/tests_oss_simple_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,130 @@
from mbdirector.runner import RunConfig


def test_preload_and_set_get(env):
key_max = 500000
key_min = 1
benchmark_specs = {"name": env.testName, "args": ['--pipeline=10','--ratio=1:0','--key-pattern=P:P','--key-minimum={}'.format(key_min),'--key-maximum={}'.format(key_max)]}
addTLSArgs(benchmark_specs, env)
config = get_default_memtier_config(threads=2, clients=10, requests='allkeys')
master_nodes_list = env.getMasterNodesList()
overall_expected_request_count = get_expected_request_count(config,key_min, key_max)

add_required_env_arguments(benchmark_specs, config, env, master_nodes_list)

# Create a temporary directory
test_dir = tempfile.mkdtemp()

config = RunConfig(test_dir, env.testName, config, {})
ensure_clean_benchmark_folder(config.results_dir)

benchmark = Benchmark.from_json(config, benchmark_specs)

# benchmark.run() returns True if the return code of memtier_benchmark was 0
memtier_ok = benchmark.run()
debugPrintMemtierOnError(config, env)

master_nodes_connections = env.getOSSMasterNodesConnectionList()
merged_command_stats = {'cmdstat_set': {'calls': 0}, 'cmdstat_get': {'calls': 0}}
assert_keyspace_range(env, key_max, key_min, master_nodes_connections)

overall_request_count = agg_info_commandstats(master_nodes_connections, merged_command_stats)
assert_minimum_memtier_outcomes(config, env, memtier_ok, overall_expected_request_count,
overall_request_count)
json_filename = '{0}/mb.json'.format(config.results_dir)

for master_connection in master_nodes_connections:
master_connection.execute_command("CONFIG", "RESETSTAT")

benchmark_specs = {"name": env.testName, "args": ['--pipeline=10','--ratio=1:1','--key-pattern=R:R','--key-minimum={}'.format(key_min),'--key-maximum={}'.format(key_max)]}
addTLSArgs(benchmark_specs, env)
config = get_default_memtier_config(threads=2, clients=10, requests=200000)
master_nodes_list = env.getMasterNodesList()
overall_expected_request_count = get_expected_request_count(config,key_min, key_max)

add_required_env_arguments(benchmark_specs, config, env, master_nodes_list)

# Create a temporary directory
test_dir = tempfile.mkdtemp()

config = RunConfig(test_dir, env.testName, config, {})
ensure_clean_benchmark_folder(config.results_dir)

benchmark = Benchmark.from_json(config, benchmark_specs)

# benchmark.run() returns True if the return code of memtier_benchmark was 0
memtier_ok = benchmark.run()
debugPrintMemtierOnError(config, env)

merged_command_stats = {'cmdstat_set': {'calls': 0}, 'cmdstat_get': {'calls': 0}}
assert_keyspace_range(env, key_max, key_min, master_nodes_connections)

overall_request_count = agg_info_commandstats(master_nodes_connections, merged_command_stats)
assert_minimum_memtier_outcomes(config, env, memtier_ok, overall_expected_request_count,
overall_request_count)


def test_default_set(env):
key_max = 500000
key_min = 1
benchmark_specs = {"name": env.testName, "args": ['--pipeline=10','--ratio=1:0','--key-pattern=P:P','--key-minimum={}'.format(key_min),'--key-maximum={}'.format(key_max)]}
addTLSArgs(benchmark_specs, env)
config = get_default_memtier_config(threads=2, clients=10, requests='allkeys')
master_nodes_list = env.getMasterNodesList()
overall_expected_request_count = get_expected_request_count(config,key_min, key_max)

add_required_env_arguments(benchmark_specs, config, env, master_nodes_list)

# Create a temporary directory
test_dir = tempfile.mkdtemp()

config = RunConfig(test_dir, env.testName, config, {})
ensure_clean_benchmark_folder(config.results_dir)

benchmark = Benchmark.from_json(config, benchmark_specs)

# benchmark.run() returns True if the return code of memtier_benchmark was 0
memtier_ok = benchmark.run()
master_nodes_connections = env.getOSSMasterNodesConnectionList()

merged_command_stats = {'cmdstat_set': {'calls': 0}, 'cmdstat_get': {'calls': 0}}
assert_keyspace_range(env, key_max, key_min, master_nodes_connections)

overall_request_count = agg_info_commandstats(master_nodes_connections, merged_command_stats)
assert_minimum_memtier_outcomes(config, env, memtier_ok, overall_expected_request_count,
overall_request_count)

# ensure if we run again on a different key pattern the dataset doesn't grow
for master_connection in master_nodes_connections:
master_connection.execute_command("CONFIG", "RESETSTAT")

benchmark_specs = {"name": env.testName, "args": ['--pipeline=10','--ratio=1:0','--key-pattern=R:R','--key-minimum={}'.format(key_min),'--key-maximum={}'.format(key_max)]}
addTLSArgs(benchmark_specs, env)
config = get_default_memtier_config(threads=2, clients=10, requests=200000)
master_nodes_list = env.getMasterNodesList()
overall_expected_request_count = get_expected_request_count(config,key_min, key_max)

add_required_env_arguments(benchmark_specs, config, env, master_nodes_list)

# Create a temporary directory
test_dir = tempfile.mkdtemp()

config = RunConfig(test_dir, env.testName, config, {})
ensure_clean_benchmark_folder(config.results_dir)

benchmark = Benchmark.from_json(config, benchmark_specs)

# benchmark.run() returns True if the return code of memtier_benchmark was 0
memtier_ok = benchmark.run()

master_nodes_connections = env.getOSSMasterNodesConnectionList()
merged_command_stats = {'cmdstat_set': {'calls': 0}}
assert_keyspace_range(env, key_max, key_min, master_nodes_connections)

overall_request_count = agg_info_commandstats(master_nodes_connections, merged_command_stats)
assert_minimum_memtier_outcomes(config, env, memtier_ok, overall_expected_request_count,
overall_request_count)

def test_default_set_get(env):
benchmark_specs = {"name": env.testName, "args": []}
addTLSArgs(benchmark_specs, env)
Expand Down

0 comments on commit 1620979

Please sign in to comment.