Skip to content

Commit

Permalink
Merge pull request #13180 from mvdbeek/dev
Browse files Browse the repository at this point in the history
Merge 21.09 into dev
  • Loading branch information
mvdbeek authored Jan 18, 2022
2 parents 210039e + 9dcea35 commit 4147382
Show file tree
Hide file tree
Showing 4 changed files with 67 additions and 23 deletions.
9 changes: 8 additions & 1 deletion lib/galaxy/datatypes/text.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

import yaml

from galaxy.datatypes.data import get_file_peek, Text
from galaxy.datatypes.data import get_file_peek, Headers, Text
from galaxy.datatypes.metadata import MetadataElement, MetadataParameter
from galaxy.datatypes.sniff import (
build_sniff_from_prefix,
Expand Down Expand Up @@ -884,6 +884,13 @@ def get_mime(self):
"""Returns the mime type of the datatype"""
return 'application/yaml'

def _yield_user_file_content(self, trans, from_dataset, filename, headers: Headers):
# Override non-standard application/yaml mediatype with
# non-standard text/x-yaml, so preview is shown in preview iframe,
# instead of downloading the file.
headers['content-type'] = 'text/x-yaml'
return super()._yield_user_file_content(trans, from_dataset, filename, headers)

def _looks_like_yaml(self, file_prefix: FilePrefix):
# Pattern used by SequenceSplitLocations
if file_prefix.file_size < 50000 and not file_prefix.truncated:
Expand Down
28 changes: 16 additions & 12 deletions lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,15 +325,20 @@ def get_display_name(self):

class UsesCreateAndUpdateTime:

update_time: DateTime

@property
def seconds_since_updated(self):
update_time = self.update_time or galaxy.model.orm.now.now() # In case not yet flushed
return (galaxy.model.orm.now.now() - update_time).total_seconds()
update_time = self.update_time or now() # In case not yet flushed
return (now() - update_time).total_seconds()

@property
def seconds_since_created(self):
create_time = self.create_time or galaxy.model.orm.now.now() # In case not yet flushed
return (galaxy.model.orm.now.now() - create_time).total_seconds()
create_time = self.create_time or now() # In case not yet flushed
return (now() - create_time).total_seconds()

def update(self):
self.update_time = now()


class WorkerProcess(Base, UsesCreateAndUpdateTime, _HasTable):
Expand Down Expand Up @@ -805,7 +810,7 @@ def __init__(self, user, token=None):
else:
self.token = unique_id()
self.user = user
self.expiration_time = galaxy.model.orm.now.now() + timedelta(hours=24)
self.expiration_time = now() + timedelta(hours=24)


class DynamicTool(Base, Dictifiable, RepresentById):
Expand Down Expand Up @@ -1474,7 +1479,7 @@ def set_final_state(self, final_state, supports_skip_locked):
WHERE job_id = :job_id;
'''
sa_session = object_session(self)
update_time = galaxy.model.orm.now.now()
update_time = now()
self.update_hdca_update_time_for_job(update_time=update_time, sa_session=sa_session, supports_skip_locked=supports_skip_locked)
params = {
'job_id': self.id,
Expand Down Expand Up @@ -1546,7 +1551,7 @@ def update_output_states(self, supports_skip_locked):
);
''']
sa_session = object_session(self)
update_time = galaxy.model.orm.now.now()
update_time = now()
self.update_hdca_update_time_for_job(update_time=update_time, sa_session=sa_session, supports_skip_locked=supports_skip_locked)
params = {
'job_id': self.id,
Expand Down Expand Up @@ -3499,7 +3504,7 @@ def datatype_for_extension(extension, datatypes_registry=None) -> "Data":
return ret


class DatasetInstance(_HasTable):
class DatasetInstance(UsesCreateAndUpdateTime, _HasTable):
"""A base class for all 'dataset instances', HDAs, LDAs, etc"""
states = Dataset.states
conversion_messages = Dataset.conversion_messages
Expand Down Expand Up @@ -3552,9 +3557,6 @@ def peek(self):
def peek(self, peek):
self._peek = unicodify(peek, strip_null=True)

def update(self):
self.update_time = galaxy.model.orm.now.now()

@property
def ext(self):
return self.extension
Expand Down Expand Up @@ -5480,7 +5482,7 @@ def _serialize(self, id_encoder, serialization_options):
return rval


class DatasetCollectionInstance(HasName):
class DatasetCollectionInstance(HasName, UsesCreateAndUpdateTime):

@property
def state(self):
Expand Down Expand Up @@ -5723,6 +5725,8 @@ def to_dict(self, view='collection'):
deleted=self.deleted,
job_source_id=self.job_source_id,
job_source_type=self.job_source_type,
create_time=self.create_time.isoformat(),
update_time=self.update_time.isoformat(),
**self._base_to_dict(view=view)
)

Expand Down
4 changes: 3 additions & 1 deletion lib/galaxy/tools/actions/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -639,6 +639,8 @@ def _remap_job_on_rerun(self, trans, galaxy_session, rerun_remap_job_id, current
assert old_job.session_id == galaxy_session.id, f'({old_job.id}/{current_job.id}): Old session id ({old_job.session_id}) does not match rerun session id ({galaxy_session.id})'
else:
raise Exception(f'({old_job.id}/{current_job.id}): Remapping via the API is not (yet) supported')
# Start by hiding current job outputs before taking over the old job's (implicit) outputs.
current_job.hide_outputs(flush=False)
# Duplicate PJAs before remap.
for pjaa in old_job.post_job_actions:
current_job.add_post_job_action(pjaa.post_job_action)
Expand Down Expand Up @@ -674,9 +676,9 @@ def _remap_job_on_rerun(self, trans, galaxy_session, rerun_remap_job_id, current
for job in hdca.implicit_collection_jobs.jobs:
if job.job_id == old_job.id:
job.job_id = current_job.id
hdca.update()
for jtoidca in old_job.output_dataset_collections:
jtoidca.dataset_collection.replace_failed_elements(remapped_hdas)
current_job.hide_outputs(flush=False)
except Exception:
log.exception('Cannot remap rerun dependencies.')

Expand Down
49 changes: 40 additions & 9 deletions lib/galaxy_test/api/test_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from operator import itemgetter

import requests
from dateutil.parser import isoparse

from galaxy_test.api.test_tools import TestsTools
from galaxy_test.base.api_asserts import assert_status_code_is_ok
Expand Down Expand Up @@ -268,18 +269,21 @@ def test_unhide_on_error(self):
assert_ok=False)
assert dataset['visible']

def _run_map_over_error(self, history_id):
hdca1 = self.dataset_collection_populator.create_list_in_history(history_id, contents=[("sample1-1", "1 2 3")]).json()
inputs = {
'error_bool': 'true',
'dataset': {
'batch': True,
'values': [{'src': 'hdca', 'id': hdca1['id']}],
}
}
return self._run_detect_errors(history_id=history_id, inputs=inputs)

@skip_without_tool("detect_errors_aggressive")
def test_no_unhide_on_error_if_mapped_over(self):
with self.dataset_populator.test_history() as history_id:
hdca1 = self.dataset_collection_populator.create_list_in_history(history_id, contents=[("sample1-1", "1 2 3")]).json()
inputs = {
'error_bool': 'true',
'dataset': {
'batch': True,
'values': [{'src': 'hdca', 'id': hdca1['id']}],
}
}
run_response = self._run_detect_errors(history_id=history_id, inputs=inputs)
run_response = self._run_map_over_error(history_id)
job_id = run_response['jobs'][0]["id"]
self.dataset_populator.wait_for_job(job_id)
job = self.dataset_populator.get_job_details(job_id).json()
Expand All @@ -289,6 +293,33 @@ def test_no_unhide_on_error_if_mapped_over(self):
assert_ok=False)
assert not dataset['visible']

def test_no_hide_on_rerun(self):
with self.dataset_populator.test_history() as history_id:
run_response = self._run_map_over_error(history_id)
job_id = run_response['jobs'][0]["id"]
self.dataset_populator.wait_for_job(job_id)
failed_hdca = self.dataset_populator.get_history_collection_details(
history_id=history_id,
content_id=run_response['implicit_collections'][0]['id'],
assert_ok=False,
)
first_update_time = failed_hdca['update_time']
assert failed_hdca['visible']
rerun_params = self._get(f"jobs/{job_id}/build_for_rerun").json()
inputs = rerun_params['state_inputs']
inputs['rerun_remap_job_id'] = job_id
rerun_response = self._run_detect_errors(history_id=history_id, inputs=inputs)
rerun_job_id = rerun_response['jobs'][0]["id"]
self.dataset_populator.wait_for_job(rerun_job_id)
# Verify source hdca is still visible
hdca = self.dataset_populator.get_history_collection_details(
history_id=history_id,
content_id=run_response['implicit_collections'][0]['id'],
assert_ok=False,
)
assert hdca['visible']
assert isoparse(hdca['update_time']) > (isoparse(first_update_time))

@skip_without_tool('empty_output')
def test_common_problems(self):
with self.dataset_populator.test_history() as history_id:
Expand Down

0 comments on commit 4147382

Please sign in to comment.