From c15fa36cbe6f58856f9f9b07bb0125ffb7484465 Mon Sep 17 00:00:00 2001 From: mightqxc Date: Wed, 28 Jan 2026 09:06:37 +0100 Subject: [PATCH 1/3] htcondor: pretty --- pandaharvester/harvestermisc/htcondor_utils.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/pandaharvester/harvestermisc/htcondor_utils.py b/pandaharvester/harvestermisc/htcondor_utils.py index b8702861..76e9f683 100644 --- a/pandaharvester/harvestermisc/htcondor_utils.py +++ b/pandaharvester/harvestermisc/htcondor_utils.py @@ -39,11 +39,9 @@ # logger baseLogger = core_utils.setup_logger("htcondor_utils") - # module level lock moduleLock = threading.Lock() - # List of job ads required CONDOR_JOB_ADS_LIST = [ "ClusterId", @@ -59,7 +57,6 @@ "harvesterWorkerID", ] - # harvesterID harvesterID = harvester_config.master.harvester_id @@ -67,7 +64,6 @@ # === Functions ================================================= - def synchronize(func): """ synchronize decorator @@ -80,7 +76,6 @@ def wrapper(*args, **kwargs): return wrapper - def _runShell(cmd): """ Run shell function @@ -91,7 +86,6 @@ def _runShell(cmd): retCode = p.returncode return (retCode, stdOut, stdErr) - def condor_job_id_from_workspec(workspec): """ Generate condor job id with schedd host from workspec @@ -102,7 +96,6 @@ def condor_job_id_from_workspec(workspec): batchid_str += ".0" return f"{workspec.submissionHost}#{batchid_str}" - def get_host_batchid_map(workspec_list): """ Get a dictionary of submissionHost: list of batchIDs from workspec_list @@ -122,7 +115,6 @@ def get_host_batchid_map(workspec_list): host_batchid_map[host][batchid_str] = workspec return host_batchid_map - def get_batchid_from_job(job_ads_dict): """ Get batchID string from condor job dict @@ -130,7 +122,6 @@ def get_batchid_from_job(job_ads_dict): batchid = f"{job_ads_dict['ClusterId']}.{job_ads_dict['ProcId']}" return batchid - def get_job_id_tuple_from_batchid(batchid): """ Get tuple (ClusterId, ProcId) from batchID string @@ -142,7 +133,6 @@ def get_job_id_tuple_from_batchid(batchid): procid = 0 return (clusterid, procid) - # def jdl_to_map(jdl): # """ # Transform jdl into dictionary @@ -156,7 +146,6 @@ def get_job_id_tuple_from_batchid(batchid): # ret_map[match(1)] = match(2) # return ret_map - def condor_submit_process(mp_queue, host, jdl_map_list, tmp_log): """ Function for new process to submit condor @@ -200,14 +189,11 @@ def condor_submit_process(mp_queue, host, jdl_map_list, tmp_log): errStr = f"submission failed; {e.__class__.__name__}: {e}" mp_queue.put((batchIDs_list, errStr)) - # =============================================================== # === Classes =================================================== # Condor queue cache fifo - - class CondorQCacheFifo(SpecialFIFOBase, metaclass=SingletonWithID): global_lock_id = -1 @@ -236,7 +222,6 @@ def unlock(self, key=None, force=False): else: return False - # Condor client class CondorClient(object): @classmethod @@ -978,5 +963,4 @@ def remove_with_python(self, batchIDs_list=[]): # Return return retMap - # =============================================================== From c996456791b09a412bdfa3dce2fc04c934c6af09 Mon Sep 17 00:00:00 2001 From: mightqxc Date: Wed, 28 Jan 2026 10:15:39 +0100 Subject: [PATCH 2/3] test precommit --- pandaharvester/commit_timestamp.py | 2 +- pandaharvester/harvestermisc/htcondor_utils.py | 14 +++++++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/pandaharvester/commit_timestamp.py b/pandaharvester/commit_timestamp.py index 90171e29..894778c9 100644 --- a/pandaharvester/commit_timestamp.py +++ b/pandaharvester/commit_timestamp.py @@ -1 +1 @@ -timestamp = "20-01-2026 13:00:32 on flin (by mightqxc)" +timestamp = "28-01-2026 09:15:40 on flin (by mightqxc)" diff --git a/pandaharvester/harvestermisc/htcondor_utils.py b/pandaharvester/harvestermisc/htcondor_utils.py index 76e9f683..b86deeaf 100644 --- a/pandaharvester/harvestermisc/htcondor_utils.py +++ b/pandaharvester/harvestermisc/htcondor_utils.py @@ -64,6 +64,7 @@ # === Functions ================================================= + def synchronize(func): """ synchronize decorator @@ -76,6 +77,7 @@ def wrapper(*args, **kwargs): return wrapper + def _runShell(cmd): """ Run shell function @@ -86,6 +88,7 @@ def _runShell(cmd): retCode = p.returncode return (retCode, stdOut, stdErr) + def condor_job_id_from_workspec(workspec): """ Generate condor job id with schedd host from workspec @@ -96,6 +99,7 @@ def condor_job_id_from_workspec(workspec): batchid_str += ".0" return f"{workspec.submissionHost}#{batchid_str}" + def get_host_batchid_map(workspec_list): """ Get a dictionary of submissionHost: list of batchIDs from workspec_list @@ -115,6 +119,7 @@ def get_host_batchid_map(workspec_list): host_batchid_map[host][batchid_str] = workspec return host_batchid_map + def get_batchid_from_job(job_ads_dict): """ Get batchID string from condor job dict @@ -122,6 +127,7 @@ def get_batchid_from_job(job_ads_dict): batchid = f"{job_ads_dict['ClusterId']}.{job_ads_dict['ProcId']}" return batchid + def get_job_id_tuple_from_batchid(batchid): """ Get tuple (ClusterId, ProcId) from batchID string @@ -133,6 +139,7 @@ def get_job_id_tuple_from_batchid(batchid): procid = 0 return (clusterid, procid) + # def jdl_to_map(jdl): # """ # Transform jdl into dictionary @@ -146,6 +153,7 @@ def get_job_id_tuple_from_batchid(batchid): # ret_map[match(1)] = match(2) # return ret_map + def condor_submit_process(mp_queue, host, jdl_map_list, tmp_log): """ Function for new process to submit condor @@ -189,10 +197,12 @@ def condor_submit_process(mp_queue, host, jdl_map_list, tmp_log): errStr = f"submission failed; {e.__class__.__name__}: {e}" mp_queue.put((batchIDs_list, errStr)) + # =============================================================== # === Classes =================================================== + # Condor queue cache fifo class CondorQCacheFifo(SpecialFIFOBase, metaclass=SingletonWithID): global_lock_id = -1 @@ -222,6 +232,7 @@ def unlock(self, key=None, force=False): else: return False + # Condor client class CondorClient(object): @classmethod @@ -700,7 +711,7 @@ def submit(self, jdl_list, use_spool=False): if self.condor_api_type == "python": try: # TODO: submit_with_python will meet segfault or c++ error after many times of submission; need help from condor team - # TODO: submit_with_python_proces has no such error but spawns some processes that will not terminate after harvester stops + # TODO: submit_with_python_process has no such error but spawns some processes that will not terminate after harvester stops # TODO: Fall back to submit_with_command for now # retVal = self.submit_with_python(jdl_list, use_spool) # retVal = self.submit_with_python_proces(jdl_list, use_spool) @@ -963,4 +974,5 @@ def remove_with_python(self, batchIDs_list=[]): # Return return retMap + # =============================================================== From 5f87acaa4f5ab57a68f5866e77ebe32c7da68635 Mon Sep 17 00:00:00 2001 From: mightqxc Date: Wed, 28 Jan 2026 10:17:12 +0100 Subject: [PATCH 3/3] typo --- pandaharvester/commit_timestamp.py | 2 +- pandaharvester/harvestermisc/htcondor_utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pandaharvester/commit_timestamp.py b/pandaharvester/commit_timestamp.py index 894778c9..ae9f5a69 100644 --- a/pandaharvester/commit_timestamp.py +++ b/pandaharvester/commit_timestamp.py @@ -1 +1 @@ -timestamp = "28-01-2026 09:15:40 on flin (by mightqxc)" +timestamp = "28-01-2026 09:17:13 on flin (by mightqxc)" diff --git a/pandaharvester/harvestermisc/htcondor_utils.py b/pandaharvester/harvestermisc/htcondor_utils.py index b86deeaf..74993122 100644 --- a/pandaharvester/harvestermisc/htcondor_utils.py +++ b/pandaharvester/harvestermisc/htcondor_utils.py @@ -714,7 +714,7 @@ def submit(self, jdl_list, use_spool=False): # TODO: submit_with_python_process has no such error but spawns some processes that will not terminate after harvester stops # TODO: Fall back to submit_with_command for now # retVal = self.submit_with_python(jdl_list, use_spool) - # retVal = self.submit_with_python_proces(jdl_list, use_spool) + # retVal = self.submit_with_python_process(jdl_list, use_spool) retVal = self.submit_with_command(jdl_list, use_spool) except Exception as e: tmpLog.error(f"Exception {e.__class__.__name__}: {e}")