Allow specifying job names for benchmarks

This commit is contained in:
Joshua Boniface 2024-09-18 14:55:12 -04:00
parent 736762901c
commit 41cd34ba4d
5 changed files with 39 additions and 13 deletions

View File

@ -5069,6 +5069,10 @@ class API_Storage_Ceph_Benchmark(Resource):
"required": True, "required": True,
"helptext": "A valid pool must be specified.", "helptext": "A valid pool must be specified.",
}, },
{
"name": "name",
"required": False,
},
] ]
) )
@Authenticator @Authenticator
@ -5084,6 +5088,11 @@ class API_Storage_Ceph_Benchmark(Resource):
type: string type: string
required: true required: true
description: The PVC storage pool to benchmark description: The PVC storage pool to benchmark
- in: query
name: name
type: string
required: false
description: An optional override name for the job
responses: responses:
200: 200:
description: OK description: OK
@ -5101,7 +5110,10 @@ class API_Storage_Ceph_Benchmark(Resource):
}, 400 }, 400
task = run_celery_task( task = run_celery_task(
"storage.benchmark", pool=reqargs.get("pool", None), run_on="primary" "storage.benchmark",
pool=reqargs.get("pool", None),
name=reqargs.get("name", None),
run_on="primary",
) )
return ( return (
{ {

View File

@ -3755,6 +3755,13 @@ def cli_storage_benchmark():
@click.command(name="run", short_help="Run a storage benchmark.") @click.command(name="run", short_help="Run a storage benchmark.")
@connection_req @connection_req
@click.argument("pool") @click.argument("pool")
@click.option(
"--name",
"name",
default=None,
show_default=False,
help="Use a custom name for the job",
)
@click.option( @click.option(
"--wait/--no-wait", "--wait/--no-wait",
"wait_flag", "wait_flag",
@ -3766,12 +3773,14 @@ def cli_storage_benchmark():
@confirm_opt( @confirm_opt(
"Storage benchmarks take approximately 10 minutes to run and generate significant load on the cluster; they should be run sparingly. Continue" "Storage benchmarks take approximately 10 minutes to run and generate significant load on the cluster; they should be run sparingly. Continue"
) )
def cli_storage_benchmark_run(pool, wait_flag): def cli_storage_benchmark_run(pool, name, wait_flag):
""" """
Run a storage benchmark on POOL in the background. Run a storage benchmark on POOL in the background.
""" """
retcode, retmsg = pvc.lib.storage.ceph_benchmark_run(CLI_CONFIG, pool, wait_flag) retcode, retmsg = pvc.lib.storage.ceph_benchmark_run(
CLI_CONFIG, pool, name, wait_flag
)
if retcode and wait_flag: if retcode and wait_flag:
retmsg = wait_for_celery_task(CLI_CONFIG, retmsg) retmsg = wait_for_celery_task(CLI_CONFIG, retmsg)

View File

@ -1725,15 +1725,17 @@ def format_list_snapshot(config, snapshot_list):
# #
# Benchmark functions # Benchmark functions
# #
def ceph_benchmark_run(config, pool, wait_flag): def ceph_benchmark_run(config, pool, name, wait_flag):
""" """
Run a storage benchmark against {pool} Run a storage benchmark against {pool}
API endpoint: POST /api/v1/storage/ceph/benchmark API endpoint: POST /api/v1/storage/ceph/benchmark
API arguments: pool={pool} API arguments: pool={pool}, name={name}
API schema: {message} API schema: {message}
""" """
params = {"pool": pool} params = {"pool": pool}
if name:
params["name"] = name
response = call_api(config, "post", "/storage/ceph/benchmark", params=params) response = call_api(config, "post", "/storage/ceph/benchmark", params=params)
return get_wait_retdata(response, wait_flag) return get_wait_retdata(response, wait_flag)

View File

@ -410,11 +410,14 @@ def run_benchmark_job(
return resource_data, jstdout return resource_data, jstdout
def worker_run_benchmark(zkhandler, celery, config, pool): def worker_run_benchmark(zkhandler, celery, config, pool, name):
# Phase 0 - connect to databases # Phase 0 - connect to databases
if not name:
cur_time = datetime.now().isoformat(timespec="seconds") cur_time = datetime.now().isoformat(timespec="seconds")
cur_primary = zkhandler.read("base.config.primary_node") cur_primary = zkhandler.read("base.config.primary_node")
job_name = f"{cur_time}_{cur_primary}" job_name = f"{cur_time}_{cur_primary}"
else:
job_name = name
current_stage = 0 current_stage = 0
total_stages = 13 total_stages = 13

View File

@ -96,12 +96,12 @@ def create_vm(
@celery.task(name="storage.benchmark", bind=True, routing_key="run_on") @celery.task(name="storage.benchmark", bind=True, routing_key="run_on")
def storage_benchmark(self, pool=None, run_on="primary"): def storage_benchmark(self, pool=None, name=None, run_on="primary"):
@ZKConnection(config) @ZKConnection(config)
def run_storage_benchmark(zkhandler, self, pool): def run_storage_benchmark(zkhandler, self, pool, name):
return worker_run_benchmark(zkhandler, self, config, pool) return worker_run_benchmark(zkhandler, self, config, pool, name)
return run_storage_benchmark(self, pool) return run_storage_benchmark(self, pool, name)
@celery.task(name="cluster.autobackup", bind=True, routing_key="run_on") @celery.task(name="cluster.autobackup", bind=True, routing_key="run_on")