Skip to content

Commit 68769a2

Browse files
Running tox using self hosted runner. maxmemory check fix on client-runner (#189)
* tox using self hosted runner. maxmemory check fix * Updated tox workflow description * Updated badge
1 parent 62e9098 commit 68769a2

File tree

5 files changed

+141
-80
lines changed

5 files changed

+141
-80
lines changed

.github/workflows/tox-self-hosted.yml

Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
name: Run tox on Ubuntu 22.04
2+
3+
on:
4+
push:
5+
paths-ignore:
6+
- 'docs/**'
7+
- '**/*.rst'
8+
- '**/*.md'
9+
branches:
10+
- master
11+
- main
12+
- '[0-9].[0-9]'
13+
pull_request:
14+
branches:
15+
- master
16+
- main
17+
18+
jobs:
19+
start-runner:
20+
name: Start self-hosted EC2 runner
21+
runs-on: ubuntu-latest
22+
outputs:
23+
label: ${{ steps.start-ec2-runner.outputs.label }}
24+
ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }}
25+
steps:
26+
- name: Configure AWS credentials
27+
uses: aws-actions/configure-aws-credentials@v1
28+
with:
29+
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
30+
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
31+
aws-region: ${{ secrets.AWS_REGION }}
32+
- name: Start EC2 runner
33+
id: start-ec2-runner
34+
uses: machulav/ec2-github-runner@v2
35+
with:
36+
mode: start
37+
github-token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }}
38+
# Ubuntu 22.04 region AMI
39+
ec2-image-id: ami-050b6fdb9f05d7b32
40+
ec2-instance-type: c5.9xlarge
41+
subnet-id: ${{ secrets.AWS_EC2_SUBNET_ID }}
42+
security-group-id: ${{ secrets.AWS_EC2_SG_ID }}
43+
44+
tox:
45+
name: Run tox on the runner
46+
needs: start-runner # required to start the main job when the runner is ready
47+
runs-on: ${{ needs.start-runner.outputs.label }} # run the job on the newly created runner
48+
steps:
49+
- name: checkout
50+
uses: actions/checkout@v3
51+
52+
- name: Print runner info
53+
run: |
54+
printf "Runner lscpu:\n$(lscpu)\n"
55+
printf "Runner lsmem:\n$(lsmem)\n"
56+
printf "Runner nproc:\n$(nproc)\n"
57+
printf "Runner uname:\n$(uname -a)\n"
58+
- name: Install benchmark dependencies
59+
run: |
60+
sudo apt update -y
61+
sudo apt install python3-pip -y
62+
sudo pip3 install --upgrade pip
63+
sudo apt install docker.io -y
64+
pip3 install -r dev_requirements.txt
65+
66+
- name: Install Poetry
67+
run: |
68+
curl -sSL https://install.python-poetry.org | python3 -
69+
70+
- name: Install Dev requirements
71+
run: |
72+
pip install -U setuptools wheel
73+
pip install -r dev_requirements.txt
74+
75+
- name: Run tox
76+
run: |
77+
tox
78+
79+
- name: Upload coverage to Codecov
80+
uses: codecov/codecov-action@v2
81+
with:
82+
token: ${{secrets.CODECOV_TOKEN}}
83+
fail_ci_if_error: true
84+
85+
stop-runner:
86+
name: Stop self-hosted EC2 runner
87+
needs:
88+
- start-runner # required to get output from the start-runner job
89+
- tox # required to wait when the main job is done
90+
runs-on: ubuntu-latest
91+
if: ${{ always() }} # required to stop the runner even if the error happened in the previous jobs
92+
steps:
93+
- name: Configure AWS credentials
94+
uses: aws-actions/configure-aws-credentials@v1
95+
with:
96+
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
97+
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
98+
aws-region: ${{ secrets.AWS_REGION }}
99+
- name: Stop EC2 runner
100+
uses: machulav/ec2-github-runner@v2
101+
with:
102+
mode: stop
103+
github-token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }}
104+
label: ${{ needs.start-runner.outputs.label }}
105+
ec2-instance-id: ${{ needs.start-runner.outputs.ec2-instance-id }}

.github/workflows/tox.yml

Lines changed: 0 additions & 49 deletions
This file was deleted.

Readme.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33

44
[![codecov](https://codecov.io/gh/redis/redis-benchmarks-specification/branch/main/graph/badge.svg?token=GS64MV1H4W)](https://codecov.io/gh/redis/redis-benchmarks-specification)
5-
[![CI tests](https://github.com/redis/redis-benchmarks-specification/actions/workflows/tox.yml/badge.svg)](https://github.com/redis/redis-benchmarks-specification/actions/workflows/tox.yml)
5+
[![Run tox on Ubuntu 22.04](https://github.com/redis/redis-benchmarks-specification/actions/workflows/tox-self-hosted.yml/badge.svg)](https://github.com/redis/redis-benchmarks-specification/actions/workflows/tox-self-hosted.yml)
66
[![PyPI version](https://badge.fury.io/py/redis-benchmarks-specification.svg)](https://pypi.org/project/redis-benchmarks-specification)
77

88
<!-- toc -->

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "redis-benchmarks-specification"
3-
version = "0.1.63"
3+
version = "0.1.64"
44
description = "The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute."
55
authors = ["filipecosta90 <filipecosta.90@gmail.com>","Redis Performance Group <performance@redis.com>"]
66
readme = "Readme.md"

redis_benchmarks_specification/__runner__/runner.py

Lines changed: 34 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -459,36 +459,12 @@ def process_self_contained_coordinator_stream(
459459
logging.info("Sending FLUSHALL to the DB")
460460
r.flushall()
461461

462-
benchmark_required_memory = 0
463-
maxmemory = 0
464-
if "resources" in benchmark_config["dbconfig"]:
465-
resources = benchmark_config["dbconfig"]["resources"]
466-
if "requests" in resources:
467-
resources_requests = benchmark_config["dbconfig"][
468-
"resources"
469-
]["requests"]
470-
if "memory" in resources_requests:
471-
benchmark_required_memory = resources_requests["memory"]
472-
benchmark_required_memory = parse_size(
473-
benchmark_required_memory
474-
)
475-
logging.info(
476-
"Benchmark required memory: {} Bytes".format(
477-
benchmark_required_memory
478-
)
479-
)
480-
481-
maxmemory = r.info("memory")["maxmemory"]
482-
if maxmemory == 0:
483-
total_system_memory = r.info("memory")["total_system_memory"]
484-
logging.info(
485-
" Using total system memory as max {}".format(
486-
total_system_memory
487-
)
488-
)
489-
maxmemory = total_system_memory
462+
benchmark_required_memory = get_benchmark_required_memory(
463+
benchmark_config
464+
)
465+
maxmemory = get_maxmemory(r)
490466
if benchmark_required_memory > maxmemory:
491-
logging.WARN(
467+
logging.warning(
492468
"Skipping test {} given maxmemory of server is bellow the benchmark required memory: {} < {}".format(
493469
test_name, maxmemory, benchmark_required_memory
494470
)
@@ -878,6 +854,35 @@ def process_self_contained_coordinator_stream(
878854
)
879855

880856

857+
def get_maxmemory(r):
858+
maxmemory = int(r.info("memory")["maxmemory"])
859+
if maxmemory == 0:
860+
total_system_memory = int(r.info("memory")["total_system_memory"])
861+
logging.info(" Using total system memory as max {}".format(total_system_memory))
862+
maxmemory = total_system_memory
863+
else:
864+
logging.info(" Detected redis maxmemory config value {}".format(maxmemory))
865+
866+
return maxmemory
867+
868+
869+
def get_benchmark_required_memory(benchmark_config):
870+
benchmark_required_memory = 0
871+
if "resources" in benchmark_config["dbconfig"]:
872+
resources = benchmark_config["dbconfig"]["resources"]
873+
if "requests" in resources:
874+
resources_requests = benchmark_config["dbconfig"]["resources"]["requests"]
875+
if "memory" in resources_requests:
876+
benchmark_required_memory = resources_requests["memory"]
877+
benchmark_required_memory = int(parse_size(benchmark_required_memory))
878+
logging.info(
879+
"Benchmark required memory: {} Bytes".format(
880+
benchmark_required_memory
881+
)
882+
)
883+
return benchmark_required_memory
884+
885+
881886
def used_memory_check(test_name, benchmark_required_memory, r, stage):
882887
used_memory = r.info("memory")["used_memory"]
883888
used_memory_gb = int(math.ceil(float(used_memory) / 1024.0 / 1024.0 / 1024.0))

0 commit comments

Comments
 (0)