From 322f043d2b86aff550b6e530056b855ea2bfefc5 Mon Sep 17 00:00:00 2001 From: Charles Cowart Date: Thu, 22 Feb 2024 11:31:45 -0800 Subject: [PATCH] debug CI: Exception not being caught --- sequence_processing_pipeline/Job.py | 2 +- sequence_processing_pipeline/NuQCJob.py | 8 ++++++-- .../tests/test_NuQCJob.py | 17 ++++------------- 3 files changed, 11 insertions(+), 16 deletions(-) diff --git a/sequence_processing_pipeline/Job.py b/sequence_processing_pipeline/Job.py index c1d703e6..6c197a35 100644 --- a/sequence_processing_pipeline/Job.py +++ b/sequence_processing_pipeline/Job.py @@ -219,7 +219,7 @@ def submit_job(self, script_path, job_parameters=None, logging.debug("job scheduler call: %s" % cmd) if self.force_job_fail: - raise JobFailedError("This job died.") + raise JobFailedError("I AM MESSAGE ONE") # if system_call does not raise a PipelineError(), then the scheduler # successfully submitted the job. In this case, it should return diff --git a/sequence_processing_pipeline/NuQCJob.py b/sequence_processing_pipeline/NuQCJob.py index 7b44d8b3..6812edf6 100644 --- a/sequence_processing_pipeline/NuQCJob.py +++ b/sequence_processing_pipeline/NuQCJob.py @@ -249,18 +249,22 @@ def run(self, callback=None): # job_script_path formerly known as: # process.multiprep.pangenome.adapter-filter.pe.sbatch + print("SUBMITTING JOB...") try: job_info = self.submit_job(job_script_path, job_parameters=' '.join(job_params), exec_from=self.log_path, callback=callback) except JobFailedError as e: + print("I AM HERE") # When a job has failed, parse the logs generated by this specific # job to return a more descriptive message to the user. info = self.parse_logs() # prepend just the message component of the Error. info.insert(0, str(e)) - raise JobFailedError('\n'.join(info)) + # raise JobFailedError('\n'.join(info)) + raise JobFailedError("I AM MESSAGE TWO") + print("IF YOU CAN SEE THIS MESSAGE THEN JobFailedError WAS NOT CAUGHT AS EXPECTED") job_id = job_info['job_id'] logging.debug(f'NuQCJob {job_id} completed') @@ -450,6 +454,7 @@ def _generate_job_script(self): return job_script_path def parse_logs(self): + print("PARSE_LOGS() CALLED") log_path = join(self.output_path, 'Logs') # sorted lists give predictable results files = sorted(glob.glob(join(log_path, '*.out'))) @@ -457,7 +462,6 @@ def parse_logs(self): msgs = [] for some_file in files: - print("PARSE_LOGS.SOME_FILE: %s" % some_file) with open(some_file, 'r') as f: msgs += [line for line in f.readlines() if 'error' in line.lower()] diff --git a/sequence_processing_pipeline/tests/test_NuQCJob.py b/sequence_processing_pipeline/tests/test_NuQCJob.py index 9196afa6..d5815661 100644 --- a/sequence_processing_pipeline/tests/test_NuQCJob.py +++ b/sequence_processing_pipeline/tests/test_NuQCJob.py @@ -567,11 +567,8 @@ def setUp(self): "[ERROR] Another Standin Error (ASE)."] } - self.foobar = [] - for log_file in log_files: fp = join(self.qc_log_path, log_file) - self.foobar.append(fp) with open(fp, 'w') as f: lines = log_files[log_file] @@ -654,19 +651,13 @@ def test_error_msg_from_logs(self): exp = ("This job died.\n[ERROR] Another Standin Error (ASE).\n[ERROR]" " Generic Standin Error (GSE).") - for foo in self.foobar: - print("checking %s..." % foo) - self.assertTrue(exists(foo)) - with open(foo, 'r') as f: - lines = f.readlines() - print("\tLINES: %s" % lines) - try: job.run() except JobFailedError as e: - print(">>>%s<<<" % str(e)) - print(">>>%s<<<" % exp) - self.assertEqual(str(e), exp) + print("JobFailedError CAUGHT: %s" % str(e)) + #print(">>>%s<<<" % str(e)) + #print(">>>%s<<<" % exp) + #self.assertEqual(str(e), exp) self.assertTrue(False)