From a3420e338288de4d7cefd3f93226563b2accd4cf Mon Sep 17 00:00:00 2001 From: Nitzan Mordechai Date: Wed, 31 Jul 2024 10:38:11 +0000 Subject: [PATCH] workunit/dencoder: fix corpus test for backword and forward compability - changed the check for non-deterministic, return code 1 is also legit - unneeded check for is_dir, if it exist - limit the number of threads to prevent error Fixes: https://tracker.ceph.com/issues/67263 Signed-off-by: NitzanMordhai (cherry picked from commit 30921272ddee5e7c8aaf4bdb8d69645ce92ba379) --- qa/workunits/dencoder/test_readable.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/qa/workunits/dencoder/test_readable.py b/qa/workunits/dencoder/test_readable.py index f032f7a9bbe..6eba0a4eb3f 100755 --- a/qa/workunits/dencoder/test_readable.py +++ b/qa/workunits/dencoder/test_readable.py @@ -61,7 +61,7 @@ def process_type(file_path, type): cmd_determ = [CEPH_DENCODER, "type", type, "is_deterministic"] determ_res = subprocess.run(cmd_determ, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Check if the command failed - if determ_res.returncode != 0: + if determ_res.returncode != 0 and determ_res.returncode != 1: error_message = determ_res.stderr.decode().strip() debug_print(f"Error running command: {error_message}") return 1 @@ -222,7 +222,7 @@ def check_backward_compat(): version_name = version.name _backward_compat[version_name] = {} type_dir = archive_dir / version_name / "forward_incompat" - if type_dir.exists() and type_dir.is_dir(): + if type_dir.exists(): for type_entry in type_dir.iterdir(): if type_entry.is_dir(): type_name = type_entry.name @@ -243,7 +243,8 @@ def check_backward_compat(): def process_batch(batch): results = [] - with concurrent.futures.ThreadPoolExecutor() as executor: + max_workers = 15 + with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor: futures = [ executor.submit( test_object_wrapper, batch_type, vdir, arversion, current_ver @@ -259,7 +260,8 @@ def process_batch(batch): # Create a generator that processes batches asynchronously def async_process_batches(task_batches): - with concurrent.futures.ProcessPoolExecutor() as executor: + max_workers = 10 + with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor: futures = [executor.submit(process_batch, batch) for batch in task_batches] for future in concurrent.futures.as_completed(futures): yield future.result() -- 2.39.5