Skip to content

Commit

Permalink
Always print out stderr/out output on test failures. (RedisLabs#207)
Browse files Browse the repository at this point in the history
Also do some minor cleanups around unused args.

Co-authored-by: Uri Shachar <uri@redis.com>
  • Loading branch information
ushachar and Uri Shachar authored Feb 8, 2023
1 parent 78b0a9a commit 82dba6a
Show file tree
Hide file tree
Showing 2 changed files with 44 additions and 56 deletions.
69 changes: 35 additions & 34 deletions tests/include.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,21 @@
TLS_CACERT = os.environ.get("TLS_CACERT", "")


def assert_minimum_memtier_outcomes(config, env, memtier_ok, merged_command_stats, overall_expected_request_count,
def assert_minimum_memtier_outcomes(config, env, memtier_ok, overall_expected_request_count,
overall_request_count):
# assert correct exit code
env.assertTrue(memtier_ok == True)
# assert we have all outputs
env.assertTrue(os.path.isfile('{0}/mb.stdout'.format(config.results_dir)))
env.assertTrue(os.path.isfile('{0}/mb.stderr'.format(config.results_dir)))
env.assertTrue(os.path.isfile('{0}/mb.json'.format(config.results_dir)))

# assert we have the expected request count
env.assertEqual(overall_expected_request_count, overall_request_count)

failed_asserts = env.getNumberOfFailedAssertion()
try:
# assert correct exit code
env.assertTrue(memtier_ok == True)
# assert we have all outputs
env.assertTrue(os.path.isfile('{0}/mb.stdout'.format(config.results_dir)))
env.assertTrue(os.path.isfile('{0}/mb.stderr'.format(config.results_dir)))
env.assertTrue(os.path.isfile('{0}/mb.json'.format(config.results_dir)))
# assert we have the expected request count
env.assertEqual(overall_expected_request_count, overall_request_count)
finally:
if env.getNumberOfFailedAssertion() > failed_asserts:
debugPrintMemtierOnError(config, env)

def add_required_env_arguments(benchmark_specs, config, env, master_nodes_list):
# check if environment is cluster
Expand All @@ -33,29 +36,27 @@ def add_required_env_arguments(benchmark_specs, config, env, master_nodes_list):
config['redis_process_port'] = master_nodes_list[0]['port']


def debugPrintMemtierOnError(config, env, memtier_ok):
if not memtier_ok:
with open('{0}/mb.stderr'.format(config.results_dir)) as stderr:
env.debugPrint("### PRINTING STDERR OUTPUT OF MEMTIER ON FAILURE ###", True)
env.debugPrint("### mb.stderr file location: {0}".format('{0}/mb.stderr'.format(config.results_dir)), True)
for line in stderr:
env.debugPrint(line.rstrip(), True)

with open('{0}/mb.stdout'.format(config.results_dir)) as stderr:
env.debugPrint("### PRINTING STDERR OUTPUT OF MEMTIER ON FAILURE ###", True)
env.debugPrint("### mb.stderr file location: {0}".format('{0}/mb.stdout'.format(config.results_dir)), True)
for line in stderr:
env.debugPrint(line.rstrip(), True)

if not env.isCluster():
if env.envRunner is not None:
log_file = os.path.join(env.envRunner.dbDirPath, env.envRunner._getFileName('master', '.log'))
with open(log_file) as redislog:
env.debugPrint("### REDIS LOG ###", True)
env.debugPrint(
"### log_file file location: {0}".format(log_file), True)
for line in redislog:
env.debugPrint(line.rstrip(), True)
def debugPrintMemtierOnError(config, env):
with open('{0}/mb.stderr'.format(config.results_dir)) as stderr:
env.debugPrint("### PRINTING STDERR OUTPUT OF MEMTIER ON FAILURE ###", True)
env.debugPrint("### mb.stderr file location: {0}".format('{0}/mb.stderr'.format(config.results_dir)), True)
for line in stderr:
env.debugPrint(line.rstrip(), True)
with open('{0}/mb.stdout'.format(config.results_dir)) as stderr:
env.debugPrint("### PRINTING STDOUT OUTPUT OF MEMTIER ON FAILURE ###", True)
env.debugPrint("### mb.stdout file location: {0}".format('{0}/mb.stdout'.format(config.results_dir)), True)
for line in stderr:
env.debugPrint(line.rstrip(), True)

if not env.isCluster():
if env.envRunner is not None:
log_file = os.path.join(env.envRunner.dbDirPath, env.envRunner._getFileName('master', '.log'))
with open(log_file) as redislog:
env.debugPrint("### REDIS LOG ###", True)
env.debugPrint(
"### log_file file location: {0}".format(log_file), True)
for line in redislog:
env.debugPrint(line.rstrip(), True)


def get_expected_request_count(config):
Expand Down
31 changes: 9 additions & 22 deletions tests/tests_oss_simple_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,11 @@ def test_default_set_get(env):

# benchmark.run() returns True if the return code of memtier_benchmark was 0
memtier_ok = benchmark.run()
debugPrintMemtierOnError(config, env, memtier_ok)

master_nodes_connections = env.getOSSMasterNodesConnectionList()
merged_command_stats = {'cmdstat_set': {'calls': 0}, 'cmdstat_get': {'calls': 0}}
overall_request_count = agg_info_commandstats(master_nodes_connections, merged_command_stats)
assert_minimum_memtier_outcomes(config, env, memtier_ok, merged_command_stats, overall_expected_request_count,
overall_request_count)
assert_minimum_memtier_outcomes(config, env, memtier_ok, overall_expected_request_count, overall_request_count)


def test_default_set_get_with_print_percentiles(env):
Expand All @@ -54,13 +52,11 @@ def test_default_set_get_with_print_percentiles(env):

# benchmark.run() returns True if the return code of memtier_benchmark was 0
memtier_ok = benchmark.run()
debugPrintMemtierOnError(config, env, memtier_ok)

master_nodes_connections = env.getOSSMasterNodesConnectionList()
merged_command_stats = {'cmdstat_set': {'calls': 0}, 'cmdstat_get': {'calls': 0}}
overall_request_count = agg_info_commandstats(master_nodes_connections, merged_command_stats)
assert_minimum_memtier_outcomes(config, env, memtier_ok, merged_command_stats, overall_expected_request_count,
overall_request_count)
assert_minimum_memtier_outcomes(config, env, memtier_ok, overall_expected_request_count, overall_request_count)
json_filename = '{0}/mb.json'.format(config.results_dir)

hdr_files_sufix = ["_FULL_RUN_1","_SET_command_run_1","_GET_command_run_1"]
Expand Down Expand Up @@ -103,13 +99,11 @@ def test_default_set_get_1_1(env):

# benchmark.run() returns True if the return code of memtier_benchmark was 0
memtier_ok = benchmark.run()
debugPrintMemtierOnError(config, env, memtier_ok)

master_nodes_connections = env.getOSSMasterNodesConnectionList()
merged_command_stats = {'cmdstat_set': {'calls': 0}, 'cmdstat_get': {'calls': 0}}
overall_request_count = agg_info_commandstats(master_nodes_connections, merged_command_stats)
assert_minimum_memtier_outcomes(config, env, memtier_ok, merged_command_stats, overall_expected_request_count,
overall_request_count)
assert_minimum_memtier_outcomes(config, env, memtier_ok, overall_expected_request_count, overall_request_count)

# assert same number of gets and sets
env.assertEqual(merged_command_stats['cmdstat_set']['calls'], merged_command_stats['cmdstat_get']['calls'])
Expand All @@ -136,13 +130,11 @@ def test_default_set_get_3_runs(env):

# benchmark.run() returns True if the return code of memtier_benchmark was 0
memtier_ok = benchmark.run()
debugPrintMemtierOnError(config, env, memtier_ok)

master_nodes_connections = env.getOSSMasterNodesConnectionList()
merged_command_stats = {'cmdstat_set': {'calls': 0}, 'cmdstat_get': {'calls': 0}}
overall_request_count = agg_info_commandstats(master_nodes_connections, merged_command_stats)
assert_minimum_memtier_outcomes(config, env, memtier_ok, merged_command_stats, overall_expected_request_count,
overall_request_count)
assert_minimum_memtier_outcomes(config, env, memtier_ok, overall_expected_request_count, overall_request_count)


def test_default_arbitrary_command_pubsub(env):
Expand All @@ -151,7 +143,6 @@ def test_default_arbitrary_command_pubsub(env):
addTLSArgs(benchmark_specs, env)
config = get_default_memtier_config()
master_nodes_list = env.getMasterNodesList()
overall_expected_request_count = 0

add_required_env_arguments(benchmark_specs, config, env, master_nodes_list)

Expand All @@ -163,9 +154,8 @@ def test_default_arbitrary_command_pubsub(env):

benchmark = Benchmark.from_json(config, benchmark_specs)

# benchmark.run() returns True if the return code of memtier_benchmark was 0
memtier_ok = benchmark.run()
debugPrintMemtierOnError(config, env, memtier_ok)
if not benchmark.run():
debugPrintMemtierOnError(config, env)


def test_default_arbitrary_command_set(env):
Expand All @@ -188,13 +178,11 @@ def test_default_arbitrary_command_set(env):

# benchmark.run() returns True if the return code of memtier_benchmark was 0
memtier_ok = benchmark.run()
debugPrintMemtierOnError(config, env, memtier_ok)

master_nodes_connections = env.getOSSMasterNodesConnectionList()
merged_command_stats = {'cmdstat_set': {'calls': 0}}
overall_request_count = agg_info_commandstats(master_nodes_connections, merged_command_stats)
assert_minimum_memtier_outcomes(config, env, memtier_ok, merged_command_stats, overall_expected_request_count,
overall_request_count)
assert_minimum_memtier_outcomes(config, env, memtier_ok, overall_expected_request_count, overall_request_count)


def test_default_arbitrary_command_hset(env):
Expand All @@ -217,10 +205,9 @@ def test_default_arbitrary_command_hset(env):

# benchmark.run() returns True if the return code of memtier_benchmark was 0
memtier_ok = benchmark.run()
debugPrintMemtierOnError(config, env, memtier_ok)

master_nodes_connections = env.getOSSMasterNodesConnectionList()
merged_command_stats = {'cmdstat_hset': {'calls': 0}}
overall_request_count = agg_info_commandstats(master_nodes_connections, merged_command_stats)
assert_minimum_memtier_outcomes(config, env, memtier_ok, merged_command_stats, overall_expected_request_count,
overall_request_count)
assert_minimum_memtier_outcomes(config, env, memtier_ok, overall_expected_request_count, overall_request_count)

0 comments on commit 82dba6a

Please sign in to comment.