From 662ce2be0fdfd9e0c772c75bbd2de5438254ea2e Mon Sep 17 00:00:00 2001 From: root Date: Thu, 5 May 2022 03:57:42 +0000 Subject: [PATCH 1/9] delete the stopped container by default at tear down --- redis_benchmarks_specification/__runner__/runner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/redis_benchmarks_specification/__runner__/runner.py b/redis_benchmarks_specification/__runner__/runner.py index a03b74c..5b7dcf6 100644 --- a/redis_benchmarks_specification/__runner__/runner.py +++ b/redis_benchmarks_specification/__runner__/runner.py @@ -387,7 +387,7 @@ def process_self_contained_coordinator_stream( "mode": "rw", }, }, - auto_remove=False, + auto_remove=True, privileged=True, working_dir=benchmark_tool_workdir, command=benchmark_command_str, From 6f93554ca5b003a448664fff4a7f28137383404d Mon Sep 17 00:00:00 2001 From: Martin Dimitrov Date: Fri, 6 May 2022 22:53:15 +0000 Subject: [PATCH 2/9] fix test-time on 4 test-cases. --- ...-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml | 2 +- ...k-10Mkeys-load-hash-5-fields-with-10B-values-pipeline-10.yml | 2 +- ...-1Mkeys-load-hash-5-fields-with-1000B-values-pipeline-10.yml | 2 +- ...er_benchmark-1Mkeys-load-hash-5-fields-with-1000B-values.yml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml b/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml index 909193c..19142b1 100644 --- a/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml +++ b/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml @@ -17,7 +17,7 @@ build-variants: clientconfig: run_image: redislabs/memtier_benchmark:edge tool: memtier_benchmark - arguments: '"--pipeline" "10" "--data-size" "100" --command "HSET __key__ field1 __data__ field2 __data__ field3 __data__ field4 __data__ field5 __data__" "--command-key-pattern" "P" --key-minimum=1 --key-maximum 10000000 -n 50000 -c 50 -t 4 --hide-histogram' + arguments: '"--pipeline" "10" "--data-size" "100" --command "HSET __key__ field1 __data__ field2 __data__ field3 __data__ field4 __data__ field5 __data__" "--command-key-pattern" "P" --key-minimum=1 --key-maximum 10000000 -n 50000 -c 50 -t 4 --hide-histogram --test-time 180' resources: requests: cpus: "4" diff --git a/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-10B-values-pipeline-10.yml b/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-10B-values-pipeline-10.yml index f794477..383d011 100644 --- a/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-10B-values-pipeline-10.yml +++ b/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-10B-values-pipeline-10.yml @@ -17,7 +17,7 @@ build-variants: clientconfig: run_image: redislabs/memtier_benchmark:edge tool: memtier_benchmark - arguments: '"--pipeline" "10" "--data-size" "10" --command "HSET __key__ field1 __data__ field2 __data__ field3 __data__ field4 __data__ field5 __data__" --command-key-pattern="P" --key-minimum=1 --key-maximum 10000000 -n 50000 -c 50 -t 4 --hide-histogram' + arguments: '"--pipeline" "10" "--data-size" "10" --command "HSET __key__ field1 __data__ field2 __data__ field3 __data__ field4 __data__ field5 __data__" --command-key-pattern="P" --key-minimum=1 --key-maximum 10000000 -n 50000 -c 50 -t 4 --hide-histogram --test-time 180' resources: requests: cpus: "4" diff --git a/redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-hash-5-fields-with-1000B-values-pipeline-10.yml b/redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-hash-5-fields-with-1000B-values-pipeline-10.yml index 46ff4bb..b0994a0 100644 --- a/redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-hash-5-fields-with-1000B-values-pipeline-10.yml +++ b/redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-hash-5-fields-with-1000B-values-pipeline-10.yml @@ -17,7 +17,7 @@ build-variants: clientconfig: run_image: redislabs/memtier_benchmark:edge tool: memtier_benchmark - arguments: '"--pipeline" "10" "--data-size" "1000" --command "HSET __key__ field1 __data__ field2 __data__ field3 __data__ field4 __data__ field5 __data__" --command-key-pattern="P" --key-minimum=1 --key-maximum 1000000 -n 5000 -c 50 -t 4 --hide-histogram' + arguments: '"--pipeline" "10" "--data-size" "1000" --command "HSET __key__ field1 __data__ field2 __data__ field3 __data__ field4 __data__ field5 __data__" --command-key-pattern="P" --key-minimum=1 --key-maximum 1000000 -n 5000 -c 50 -t 4 --hide-histogram --test-time 180' resources: requests: cpus: "4" diff --git a/redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-hash-5-fields-with-1000B-values.yml b/redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-hash-5-fields-with-1000B-values.yml index 549b70a..6f5bed5 100644 --- a/redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-hash-5-fields-with-1000B-values.yml +++ b/redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-hash-5-fields-with-1000B-values.yml @@ -17,7 +17,7 @@ build-variants: clientconfig: run_image: redislabs/memtier_benchmark:edge tool: memtier_benchmark - arguments: '"--data-size" "1000" --command "HSET __key__ field1 __data__ field2 __data__ field3 __data__ field4 __data__ field5 __data__" --command-key-pattern="P" --key-minimum=1 --key-maximum 1000000 -n 5000 -c 50 -t 4 --hide-histogram' + arguments: '"--data-size" "1000" --command "HSET __key__ field1 __data__ field2 __data__ field3 __data__ field4 __data__ field5 __data__" --command-key-pattern="P" --key-minimum=1 --key-maximum 1000000 -n 5000 -c 50 -t 4 --hide-histogram --test-time 180' resources: requests: cpus: "4" From ed07f7b1e57ce47be2e0c7ef5bcfc4f924d30374 Mon Sep 17 00:00:00 2001 From: Martin Dimitrov Date: Mon, 9 May 2022 18:52:57 +0000 Subject: [PATCH 3/9] removed the -n parameter, since it was incompatible with --test-time and was causing the benchmark to fail --- ...-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml b/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml index 19142b1..74ca96f 100644 --- a/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml +++ b/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml @@ -17,7 +17,7 @@ build-variants: clientconfig: run_image: redislabs/memtier_benchmark:edge tool: memtier_benchmark - arguments: '"--pipeline" "10" "--data-size" "100" --command "HSET __key__ field1 __data__ field2 __data__ field3 __data__ field4 __data__ field5 __data__" "--command-key-pattern" "P" --key-minimum=1 --key-maximum 10000000 -n 50000 -c 50 -t 4 --hide-histogram --test-time 180' + arguments: '"--pipeline" "10" "--data-size" "100" --command "HSET __key__ field1 __data__ field2 __data__ field3 __data__ field4 __data__ field5 __data__" "--command-key-pattern" "P" --key-minimum=1 --key-maximum 10000000 -c 50 -t 4 --hide-histogram --test-time 180' resources: requests: cpus: "4" From 90733b30cae43b0ddc2a09ad16513f06100e78c2 Mon Sep 17 00:00:00 2001 From: Martin Dimitrov Date: Mon, 9 May 2022 18:58:59 +0000 Subject: [PATCH 4/9] adding a test-time=180 to testcase memtier_benchmark-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml --- ...-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml b/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml index 4744946..f75c29d 100644 --- a/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml +++ b/redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml @@ -17,7 +17,7 @@ build-variants: clientconfig: run_image: redislabs/memtier_benchmark:edge tool: memtier_benchmark - arguments: '"--pipeline" "10" "--data-size" "100" --command "HSET __key__ field1 __data__ field2 __data__ field3 __data__ field4 __data__ field5 __data__" "--command-key-pattern" "P" --key-minimum=1 --key-maximum 10000000 -c 50 -t 4 --hide-histogram' + arguments: '"--pipeline" "10" "--data-size" "100" --command "HSET __key__ field1 __data__ field2 __data__ field3 __data__ field4 __data__ field5 __data__" "--command-key-pattern" "P" --key-minimum=1 --key-maximum 10000000 -c 50 -t 4 --hide-histogram --test-time=180' resources: requests: cpus: "4" From feff069cca054bf34804cb625b11c620497aaf08 Mon Sep 17 00:00:00 2001 From: Martin Dimitrov Date: Tue, 10 May 2022 20:06:25 +0000 Subject: [PATCH 5/9] Re-order --preserve_temporary_client_dirs --client_aggregated_results_folder so that we can create an aggregated folder and still delete the temp files after. In addition, delete temp JSON files created by redis-benchmark (and not by memtier-benchmark) --- .../__runner__/runner.py | 28 +++++++++++-------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/redis_benchmarks_specification/__runner__/runner.py b/redis_benchmarks_specification/__runner__/runner.py index 5b7dcf6..b9b0942 100644 --- a/redis_benchmarks_specification/__runner__/runner.py +++ b/redis_benchmarks_specification/__runner__/runner.py @@ -506,17 +506,6 @@ def process_self_contained_coordinator_stream( ) pass - if preserve_temporary_client_dirs is True: - logging.info( - "Preserving temporary client dir {}".format( - temporary_dir_client - ) - ) - else: - logging.info( - "Removing temporary client dir {}".format(temporary_dir_client) - ) - shutil.rmtree(temporary_dir_client, ignore_errors=True) if client_aggregated_results_folder != "": os.makedirs(client_aggregated_results_folder, exist_ok=True) dest_fpath = "{}/{}".format( @@ -531,6 +520,23 @@ def process_self_contained_coordinator_stream( shutil.copy(full_result_path, dest_fpath) overall_result &= test_result + if preserve_temporary_client_dirs is True: + logging.info( + "Preserving temporary client dir {}".format( + temporary_dir_client + ) + ) + else: + if "redis-benchmark" in benchmark_tool: + os.remove(full_result_path) + logging.info( + "Removing temporary JSON file".format(full_result_path) + ) + shutil.rmtree(temporary_dir_client, ignore_errors=True) + logging.info( + "Removing temporary client dir {}".format(temporary_dir_client) + ) + table_name = "Results for entire test-suite".format(test_name) results_matrix_headers = [ "Test Name", From a5c08cb5dced61c1fbd821bddc7c637241f82e74 Mon Sep 17 00:00:00 2001 From: Martin Dimitrov Date: Wed, 11 May 2022 21:14:14 +0000 Subject: [PATCH 6/9] change the auto-remove to True, so that we dont accumulate un-used containers --- .../self_contained_coordinator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py b/redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py index bd96389..96f6ac4 100644 --- a/redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py +++ b/redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py @@ -616,7 +616,7 @@ def process_self_contained_coordinator_stream( "mode": "rw", }, }, - auto_remove=False, + auto_remove=True, privileged=True, working_dir=benchmark_tool_workdir, command=benchmark_command_str, From 79791a96cfc35979e1060f0e6b42de68eb1a8d58 Mon Sep 17 00:00:00 2001 From: Martin Dimitrov Date: Tue, 12 Jul 2022 12:53:33 -0700 Subject: [PATCH 7/9] added skeleton for executing tools, such as pcm_monitor during the benchmark --- .../__runner__/args.py | 7 +++++ .../__runner__/runner.py | 24 ++++++++++++++- .../__setups__/tools.py | 29 +++++++++++++++++++ .../setups/tools/pcm-memory.yml | 6 ++++ 4 files changed, 65 insertions(+), 1 deletion(-) create mode 100644 redis_benchmarks_specification/__setups__/tools.py create mode 100644 redis_benchmarks_specification/setups/tools/pcm-memory.yml diff --git a/redis_benchmarks_specification/__runner__/args.py b/redis_benchmarks_specification/__runner__/args.py index 02ae79b..7df0fef 100644 --- a/redis_benchmarks_specification/__runner__/args.py +++ b/redis_benchmarks_specification/__runner__/args.py @@ -1,6 +1,7 @@ import argparse from redis_benchmarks_specification.__common__.env import ( + SPECS_PATH_SETUPS, SPECS_PATH_TEST_SUITES, DATASINK_RTS_HOST, DATASINK_RTS_PORT, @@ -40,6 +41,12 @@ def create_client_runner_args(project_name): default=SPECS_PATH_TEST_SUITES, help="Test suites folder, containing the different test variations", ) + parser.add_argument( + "--setups-folder", + type=str, + default=SPECS_PATH_SETUPS, + help="Setups folder, containing the build environment variations sub-folder that we use to trigger different build artifacts", + ) parser.add_argument( "--test", type=str, diff --git a/redis_benchmarks_specification/__runner__/runner.py b/redis_benchmarks_specification/__runner__/runner.py index 3929d7b..ca69ca5 100644 --- a/redis_benchmarks_specification/__runner__/runner.py +++ b/redis_benchmarks_specification/__runner__/runner.py @@ -55,6 +55,11 @@ ) from redis_benchmarks_specification.__runner__.args import create_client_runner_args +from redis_benchmarks_specification.__setups__.tools import ( + get_tools, + start_tools_if_required, +) + def main(): _, _, project_version = populate_with_poetry_data() @@ -91,6 +96,15 @@ def main(): ) ) + tools_folder = os.path.abspath(args.setups_folder + "/tools") + logging.info("Using tools folder dir {}".format(tools_folder)) + tools_files = get_tools(tools_folder) + logging.info( + "There are a total of {} tools in folder {}".format( + len(tools_files), tools_folder + ) + ) + datasink_conn = None if args.datasink_push_results_redistimeseries: logging.info( @@ -157,8 +171,9 @@ def main(): testsuite_spec_files, {}, running_platform, + tools_files, profilers_enabled, - profilers_list, + profilers_list, tls_enabled, tls_skip_verify, tls_cert, @@ -221,6 +236,7 @@ def process_self_contained_coordinator_stream( testsuite_spec_files, topologies_map, running_platform, + tools_files, profilers_enabled=False, profilers_list=[], tls_enabled=False, @@ -439,6 +455,12 @@ def process_self_contained_coordinator_stream( profiler_frequency, profiler_call_graph_mode, ) + + #start the data collection tools + start_tools_if_required( + tools_files + ) + logging.info( "Using docker image {} as benchmark client image (cpuset={}) with the following args: {}".format( client_container_image, diff --git a/redis_benchmarks_specification/__setups__/tools.py b/redis_benchmarks_specification/__setups__/tools.py new file mode 100644 index 0000000..6abacfb --- /dev/null +++ b/redis_benchmarks_specification/__setups__/tools.py @@ -0,0 +1,29 @@ +import logging +import os +import pathlib +import yaml + + +def get_tools(tools_folder): + files = pathlib.Path(tools_folder).glob("*.yml") + files = [str(x) for x in files] + logging.info( + "Running tools: {}".format( + " ".join([str(x) for x in files]) + ) + ) + return files + +def start_tools_if_required(tools_files): + logging.info( + "Running tools: {}".format( + " ".join([str(x) for x in tools_files]) + ) + ) + for tool_file in tools_files: + with open(tool_file) as stream: + tool_config = yaml.safe_load(stream) + command = tool_config["command"] + tool_stream = os.popen(command) + output = tool_stream.read() + logging.info(output) diff --git a/redis_benchmarks_specification/setups/tools/pcm-memory.yml b/redis_benchmarks_specification/setups/tools/pcm-memory.yml new file mode 100644 index 0000000..095a813 --- /dev/null +++ b/redis_benchmarks_specification/setups/tools/pcm-memory.yml @@ -0,0 +1,6 @@ +version: 0.1 +name: pcm-memory +description: "Memory latency and bandwidth monitoring" +command: "pcm-memory" +env: + PCM_IGNORE_ARCH_PERFMON: "1" \ No newline at end of file From ed25c2cddad4931ab3c6eea4436eeec6aae9bab9 Mon Sep 17 00:00:00 2001 From: Martin Dimitrov Date: Tue, 12 Jul 2022 13:44:44 -0700 Subject: [PATCH 8/9] added data collection tools to the coordinator as well --- .../__runner__/runner.py | 6 ++--- .../self_contained_coordinator.py | 22 +++++++++++++++++++ .../{tools.py => data_collection_tools.py} | 0 .../pcm-memory.yml | 0 4 files changed, 25 insertions(+), 3 deletions(-) rename redis_benchmarks_specification/__setups__/{tools.py => data_collection_tools.py} (100%) rename redis_benchmarks_specification/setups/{tools => data-collection-tools}/pcm-memory.yml (100%) diff --git a/redis_benchmarks_specification/__runner__/runner.py b/redis_benchmarks_specification/__runner__/runner.py index ca69ca5..ca8938c 100644 --- a/redis_benchmarks_specification/__runner__/runner.py +++ b/redis_benchmarks_specification/__runner__/runner.py @@ -55,7 +55,7 @@ ) from redis_benchmarks_specification.__runner__.args import create_client_runner_args -from redis_benchmarks_specification.__setups__.tools import ( +from redis_benchmarks_specification.__setups__.data_collection_tools import ( get_tools, start_tools_if_required, ) @@ -96,7 +96,7 @@ def main(): ) ) - tools_folder = os.path.abspath(args.setups_folder + "/tools") + tools_folder = os.path.abspath(args.setups_folder + "/data-collection-tools") logging.info("Using tools folder dir {}".format(tools_folder)) tools_files = get_tools(tools_folder) logging.info( @@ -456,7 +456,7 @@ def process_self_contained_coordinator_stream( profiler_call_graph_mode, ) - #start the data collection tools + # start data collection tools start_tools_if_required( tools_files ) diff --git a/redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py b/redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py index 9361a24..52a1a8a 100644 --- a/redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py +++ b/redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py @@ -66,6 +66,10 @@ restore_build_artifacts_from_test_details, ) from redis_benchmarks_specification.__setups__.topologies import get_topologies +from redis_benchmarks_specification.__setups__.data_collection_tools import ( + get_tools, + start_tools_if_required, +) def main(): @@ -112,6 +116,15 @@ def main(): ) ) + tools_folder = os.path.abspath(args.setups_folder + "/data-collection-tools") + logging.info("Using tools folder dir {}".format(tools_folder)) + tools_files = get_tools(tools_folder) + logging.info( + "There are a total of {} tools in folder {}".format( + len(tools_files), tools_folder + ) + ) + logging.info( "Reading event streams from: {}:{} with user {}".format( args.event_stream_host, args.event_stream_port, args.event_stream_user @@ -213,6 +226,7 @@ def main(): testsuite_spec_files, topologies_map, running_platform, + tools_files, profilers_enabled, profilers_list, grafana_profile_dashboard, @@ -260,6 +274,7 @@ def self_contained_coordinator_blocking_read( testsuite_spec_files, topologies_map, platform_name, + tools_files, profilers_enabled, profilers_list, grafana_profile_dashboard="", @@ -302,6 +317,7 @@ def self_contained_coordinator_blocking_read( testsuite_spec_files, topologies_map, platform_name, + tools_files, profilers_enabled, profilers_list, grafana_profile_dashboard, @@ -369,6 +385,7 @@ def process_self_contained_coordinator_stream( testsuite_spec_files, topologies_map, running_platform, + tools_files, profilers_enabled=False, profilers_list=[], grafana_profile_dashboard="", @@ -614,6 +631,11 @@ def process_self_contained_coordinator_stream( profiler_call_graph_mode, ) + # start data collection tools + start_tools_if_required( + tools_files + ) + logging.info( "Using docker image {} as benchmark client image (cpuset={}) with the following args: {}".format( client_container_image, diff --git a/redis_benchmarks_specification/__setups__/tools.py b/redis_benchmarks_specification/__setups__/data_collection_tools.py similarity index 100% rename from redis_benchmarks_specification/__setups__/tools.py rename to redis_benchmarks_specification/__setups__/data_collection_tools.py diff --git a/redis_benchmarks_specification/setups/tools/pcm-memory.yml b/redis_benchmarks_specification/setups/data-collection-tools/pcm-memory.yml similarity index 100% rename from redis_benchmarks_specification/setups/tools/pcm-memory.yml rename to redis_benchmarks_specification/setups/data-collection-tools/pcm-memory.yml From 6d65b3adb5b09f84d6a8199a83b17f890a668eac Mon Sep 17 00:00:00 2001 From: Martin Dimitrov Date: Tue, 12 Jul 2022 14:25:36 -0700 Subject: [PATCH 9/9] launch the tool using subprocess int the background --- redis_benchmarks_specification/__runner__/runner.py | 2 +- .../__setups__/data_collection_tools.py | 13 +++++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/redis_benchmarks_specification/__runner__/runner.py b/redis_benchmarks_specification/__runner__/runner.py index ca8938c..7f9be6a 100644 --- a/redis_benchmarks_specification/__runner__/runner.py +++ b/redis_benchmarks_specification/__runner__/runner.py @@ -456,7 +456,7 @@ def process_self_contained_coordinator_stream( profiler_call_graph_mode, ) - # start data collection tools + # start data collection start_tools_if_required( tools_files ) diff --git a/redis_benchmarks_specification/__setups__/data_collection_tools.py b/redis_benchmarks_specification/__setups__/data_collection_tools.py index 6abacfb..5f9dcf3 100644 --- a/redis_benchmarks_specification/__setups__/data_collection_tools.py +++ b/redis_benchmarks_specification/__setups__/data_collection_tools.py @@ -1,7 +1,8 @@ import logging -import os import pathlib import yaml +import subprocess +import os def get_tools(tools_folder): @@ -24,6 +25,10 @@ def start_tools_if_required(tools_files): with open(tool_file) as stream: tool_config = yaml.safe_load(stream) command = tool_config["command"] - tool_stream = os.popen(command) - output = tool_stream.read() - logging.info(output) + + # Launch the tool in a background process + tool_output = subprocess.Popen(command) + + #tool_output = os.popen(command) + # output = tool_output.read() + logging.info(tool_output)