Commit cf62a1da authored by Dannon Baker's avatar Dannon Baker
Browse files

Merge remote-tracking branch 'upstream/release_18.01' into release_18.05

parents 5a97e4fc 71b56151
......@@ -726,7 +726,16 @@ class Configuration(object):
'qualname': 'COMPLIANCE'
}
if kwargs.get("log_destination", None):
log_destination = kwargs.get("log_destination", None)
if log_destination == "stdout":
LOGGING_CONFIG_DEFAULT['handlers']['console'] = {
'class': 'logging.StreamHandler',
'formatter': 'stack',
'level': 'DEBUG',
'stream': 'ext://sys.stdout',
'filters': ['stack']
}
elif log_destination:
LOGGING_CONFIG_DEFAULT['handlers']['console'] = {
'class': 'logging.FileHandler',
'formatter': 'stack',
......
......@@ -4224,7 +4224,7 @@ class WorkflowInvocation(UsesCreateAndUpdateTime, Dictifiable):
else:
raise Exception("Unknown output type encountered")
def to_dict(self, view='collection', value_mapper=None, step_details=False):
def to_dict(self, view='collection', value_mapper=None, step_details=False, legacy_job_state=False):
rval = super(WorkflowInvocation, self).to_dict(view=view, value_mapper=value_mapper)
if view == 'element':
steps = []
......@@ -4233,7 +4233,19 @@ class WorkflowInvocation(UsesCreateAndUpdateTime, Dictifiable):
v = step.to_dict(view='element')
else:
v = step.to_dict(view='collection')
steps.append(v)
if legacy_job_state:
step_jobs = step.jobs
if step_jobs:
for step_job in step_jobs:
v_clone = v.copy()
v_clone["state"] = step_job.state
v_clone["job_id"] = step_job.id
steps.append(v_clone)
else:
v["state"] = None
steps.append(v)
else:
steps.append(v)
rval['steps'] = steps
inputs = {}
......@@ -4375,6 +4387,10 @@ class WorkflowInvocationStep(Dictifiable):
# Following no longer makes sense...
# rval['state'] = self.job.state if self.job is not None else None
if view == 'element':
jobs = []
for job in self.jobs:
jobs.append(job.to_dict())
outputs = {}
for output_assoc in self.output_datasets:
name = output_assoc.output_name
......@@ -4394,6 +4410,7 @@ class WorkflowInvocationStep(Dictifiable):
rval['outputs'] = outputs
rval['output_collections'] = output_collections
rval['jobs'] = jobs
return rval
......
......@@ -714,12 +714,30 @@ class WorkflowsAPIController(BaseAPIController, UsesStoredWorkflowMixin, UsesAnn
:param invocation_id: the invocation id (required)
:type invocation_id: str
:param step_details: fetch details about individual invocation steps
and populate a steps attribute in the resulting
dictionary. Defaults to false.
:type step_details: bool
:param legacy_job_state: If step_details is rrue, and this is set to true
populate the invocation step state with the job state
instead of the invocation step state. This will also
produce one step per job in mapping jobs to mimic the
older behavior with respect to collections. Partially
scheduled steps may provide incomplete information
and the listed steps outputs are the mapped over
step outputs but the individual job outputs
when this is set - at least for now.
:type legacy_job_state: bool
:raises: exceptions.MessageException, exceptions.ObjectNotFound
"""
decoded_workflow_invocation_id = self.decode_id(invocation_id)
workflow_invocation = self.workflow_manager.get_invocation(trans, decoded_workflow_invocation_id)
if workflow_invocation:
return self.__encode_invocation(trans, workflow_invocation, step_details=kwd.get('step_details', False))
step_details = util.string_as_bool(kwd.get('step_details', 'False'))
legacy_job_state = util.string_as_bool(kwd.get('legacy_job_state', 'False'))
return self.__encode_invocation(trans, workflow_invocation, step_details=step_details, legacy_job_state=legacy_job_state)
return None
@expose_api
......@@ -809,9 +827,9 @@ class WorkflowsAPIController(BaseAPIController, UsesStoredWorkflowMixin, UsesAnn
def __get_stored_workflow(self, trans, workflow_id):
return self.workflow_manager.get_stored_workflow(trans, workflow_id)
def __encode_invocation(self, trans, invocation, view="element", step_details=False):
def __encode_invocation(self, trans, invocation, view="element", step_details=False, legacy_job_state=False):
return self.encode_all_ids(
trans,
invocation.to_dict(view, step_details=step_details),
invocation.to_dict(view, step_details=step_details, legacy_job_state=legacy_job_state),
True
)
......@@ -45,7 +45,7 @@ mapping:
cookie_path:
type: str
default: null
default: ''
required: false
desc: |
If proxy-prefix is enabled and you're running more than one Galaxy instance
......@@ -109,7 +109,7 @@ mapping:
database_template:
type: str
default: null
default: ''
required: false
desc: |
If auto-creating a postgres database on startup - it can be based on an existing
......@@ -342,7 +342,7 @@ mapping:
watch_tools:
type: str
default: false
default: 'false'
required: false
desc: |
Set to True to enable monitoring of tools and tool directories
......@@ -369,7 +369,7 @@ mapping:
containers_resolvers_config_file:
type: str
default: null
default: ''
required: false
desc: |
Container resolvers configuration (beta). Setup a file describing container
......@@ -465,7 +465,7 @@ mapping:
watch_tool_data_dir:
type: str
default: false
default: 'false'
required: false
desc: |
Set to True to enable monitoring of the tool_data and shed_tool_data_path
......@@ -617,7 +617,7 @@ mapping:
Number of checks to execute if check_job_script_integrity is enabled.
check_job_script_integrity_sleep:
type: str
type: float
default: .25
required: false
desc: |
......@@ -684,7 +684,7 @@ mapping:
smtp_server:
type: str
default: null
default: ''
required: false
desc: |
Galaxy sends mail for various things: subscribing users to the mailing list
......@@ -695,7 +695,7 @@ mapping:
smtp_username:
type: str
default: null
default: ''
required: false
desc: |
If your SMTP server requires a username and password, you can provide them
......@@ -704,7 +704,7 @@ mapping:
smtp_password:
type: str
default: null
default: ''
required: false
desc: |
If your SMTP server requires a username and password, you can provide them
......@@ -729,7 +729,7 @@ mapping:
error_email_to:
type: str
default: null
default: ''
required: false
desc: |
Datasets in an error state include a link to report the error. Those reports
......@@ -739,7 +739,7 @@ mapping:
email_from:
type: str
default: null
default: ''
required: false
desc: |
Email address to use in the 'From' field when sending emails for
......@@ -823,7 +823,7 @@ mapping:
ga_code:
type: str
default: null
default: ''
required: false
desc: |
You can enter tracking code here to track visitor's behavior
......@@ -875,7 +875,7 @@ mapping:
message_box_content:
type: str
default: null
default: ''
required: false
desc: |
Show a message box under the masthead.
......@@ -889,7 +889,7 @@ mapping:
brand:
type: str
default: null
default: ''
required: false
desc: |
Append "/{brand}" to the "Galaxy" text in the masthead.
......@@ -977,21 +977,21 @@ mapping:
biostar_url:
type: str
default: null
default: ''
required: false
desc: |
Enable integration with a custom Biostar instance.
biostar_key_name:
type: str
default: null
default: ''
required: false
desc: |
Enable integration with a custom Biostar instance.
biostar_key:
type: str
default: null
default: ''
required: false
desc: |
Enable integration with a custom Biostar instance.
......@@ -1048,7 +1048,7 @@ mapping:
terms_url:
type: str
default: null
default: ''
required: false
desc: |
The URL linked by the "Terms and Conditions" link in the "Help" menu, as well
......@@ -1162,7 +1162,7 @@ mapping:
nginx_x_accel_redirect_base:
type: str
default: null
default: ''
required: false
desc: |
The same download handling can be done by nginx using X-Accel-Redirect. This
......@@ -1194,7 +1194,7 @@ mapping:
nginx_upload_store:
type: str
default: null
default: ''
required: false
desc: |
nginx can also handle file uploads (user-to-Galaxy) via nginx_upload_module.
......@@ -1204,7 +1204,7 @@ mapping:
nginx_upload_path:
type: str
default: null
default: ''
required: false
desc: |
This value overrides the action set on the file upload form, e.g. the web
......@@ -1213,7 +1213,7 @@ mapping:
nginx_upload_job_files_store:
type: str
default: null
default: ''
required: false
desc: |
Galaxy can also use nginx_upload_module to receive files staged out upon job
......@@ -1223,7 +1223,7 @@ mapping:
nginx_upload_job_files_path:
type: str
default: null
default: ''
required: false
desc: |
Galaxy can also use nginx_upload_module to receive files staged out upon job
......@@ -1338,7 +1338,7 @@ mapping:
dynamic_proxy_golang_api_key:
type: str
default: null
default: ''
required: false
desc: |
The golang proxy uses a RESTful HTTP API for communication with Galaxy
......@@ -1365,7 +1365,7 @@ mapping:
logging:
type: map
default: galaxy.config.LOGGING_CONFIG_DEFAULT
allowempty: True
desc: |
Controls where and how the server logs messages. If unset, the default is to log all messages to standard
output at the level defined by the `log_level` configuration option. Configuration is described in the
......@@ -1458,7 +1458,7 @@ mapping:
allowed_origin_hostnames:
type: str
default: null
default: ''
required: false
desc: |
Return a Access-Control-Allow-Origin response header that matches the Origin
......@@ -1555,7 +1555,7 @@ mapping:
sentry_dsn:
type: str
default: null
default: ''
required: false
desc: |
Log to Sentry
......@@ -1657,7 +1657,7 @@ mapping:
library_import_dir:
type: str
default: null
default: ''
required: false
desc: |
Add an option to the library upload form which allows administrators to
......@@ -1665,7 +1665,7 @@ mapping:
user_library_import_dir:
type: str
default: null
default: ''
required: false
desc: |
Add an option to the library upload form which allows authorized
......@@ -1684,7 +1684,7 @@ mapping:
user_library_import_symlink_whitelist:
type: str
default: null
default: ''
required: false
desc: |
For security reasons, users may not import any files that actually lie
......@@ -1738,7 +1738,7 @@ mapping:
tool_name_boost:
type: float
default: 9
default: 9.0
required: false
desc: |
Boosts are used to customize this instance's toolbox search.
......@@ -1748,7 +1748,7 @@ mapping:
tool_section_boost:
type: float
default: 3
default: 3.0
required: false
desc: |
Boosts are used to customize this instance's toolbox search.
......@@ -1758,7 +1758,7 @@ mapping:
tool_description_boost:
type: float
default: 2
default: 2.0
required: false
desc: |
Boosts are used to customize this instance's toolbox search.
......@@ -1768,7 +1768,7 @@ mapping:
tool_label_boost:
type: float
default: 1
default: 1.0
required: false
desc: |
Boosts are used to customize this instance's toolbox search.
......@@ -1778,7 +1778,7 @@ mapping:
tool_stub_boost:
type: float
default: 5
default: 5.0
required: false
desc: |
Boosts are used to customize this instance's toolbox search.
......@@ -1864,7 +1864,7 @@ mapping:
remote_user_maildomain:
type: str
default: null
default: ''
required: false
desc: |
If use_remote_user is enabled and your external authentication
......@@ -1896,7 +1896,7 @@ mapping:
remote_user_logout_href:
type: str
default: null
default: ''
required: false
desc: |
If use_remote_user is enabled, you can set this to a URL that will log your
......@@ -1923,7 +1923,7 @@ mapping:
admin_users:
type: str
default: null
default: ''
required: false
desc: |
Administrative users - set this to a comma-separated list of valid Galaxy
......@@ -2154,7 +2154,7 @@ mapping:
invocation to schedule indefinitely. The default corresponds to 1 month.
maximum_workflow_jobs_per_scheduling_iteration:
type: str
type: int
default: -1
required: false
desc: |
......@@ -2227,7 +2227,7 @@ mapping:
api_allow_run_as:
type: str
default: null
default: ''
required: false
desc: |
Optional list of email addresses of API users who can make calls on behalf of
......@@ -2273,7 +2273,7 @@ mapping:
ftp_upload_dir:
type: str
default: null
default: ''
required: false
desc: |
Enable Galaxy's "Upload via FTP" interface. You'll need to install and
......@@ -2284,7 +2284,7 @@ mapping:
ftp_upload_site:
type: str
default: null
default: ''
required: false
desc: |
This should be the hostname of your FTP server, which will be provided to
......@@ -2564,7 +2564,7 @@ mapping:
environment_setup_file:
type: str
default: null
default: ''
required: false
desc: |
File to source to set up the environment when running jobs. By default, the
......@@ -2719,3 +2719,12 @@ mapping:
required: false
desc: |
persistent_communication_rooms is a comma-separated list of rooms that should be always available.
use_pbkdf2:
type: bool
default: true
required: false
desc: |
Allow disabling pbkdf2 hashing of passwords for legacy situations.
This should normally be left enabled unless there is a specific
reason to disable it.
......@@ -199,8 +199,8 @@ class BaseWorkflowsApiTestCase(api.ApiTestCase):
workflow_inputs = workflow_show_resposne.json()["inputs"]
return workflow_inputs
def _invocation_details(self, workflow_id, invocation_id):
invocation_details_response = self._get("workflows/%s/usage/%s" % (workflow_id, invocation_id))
def _invocation_details(self, workflow_id, invocation_id, **kwds):
invocation_details_response = self._get("workflows/%s/usage/%s" % (workflow_id, invocation_id), data=kwds)
self._assert_status_code_is(invocation_details_response, 200)
invocation_details = invocation_details_response.json()
return invocation_details
......@@ -2775,15 +2775,108 @@ steps:
invocation_id = usage["id"]
usage_details = self._invocation_details(workflow_id, invocation_id)
# Assert some high-level things about the structure of data returned.
self._assert_has_keys(usage_details, "inputs", "steps")
self._assert_has_keys(usage_details, "inputs", "steps", "workflow_id")
# stored_workflow vs workflow thing makes following state false.
# assert usage_details["workflow_id"] == workflow_id
# Wait for the invocation to be fully scheduled, so we have details on all steps.
self._wait_for_invocation_state(workflow_id, invocation_id, 'scheduled')
usage_details = self._invocation_details(workflow_id, invocation_id)
invocation_steps = usage_details["steps"]
for step in invocation_steps:
self._assert_has_keys(step, "workflow_step_id", "order_index", "id")
an_invocation_step = invocation_steps[0]
step_id = an_invocation_step["id"]
step_response = self._get("workflows/%s/usage/%s/steps/%s" % (workflow_id, invocation_id, step_id))
self._assert_status_code_is(step_response, 200)
self._assert_has_keys(step_response.json(), "id", "order_index")
invocation_input_step, invocation_tool_step = None, None
for invocation_step in invocation_steps:
self._assert_has_keys(invocation_step, "workflow_step_id", "order_index", "id")
order_index = invocation_step["order_index"]
assert order_index in [0, 1, 2], order_index
if order_index == 0:
invocation_input_step = invocation_step
elif order_index == 2:
invocation_tool_step = invocation_step
# Tool steps have non-null job_ids (deprecated though they may be)
assert invocation_input_step.get("job_id", None) is None
job_id = invocation_tool_step.get("job_id", None)
assert job_id is not None
invocation_tool_step_id = invocation_tool_step["id"]
invocation_tool_step_response = self._get("workflows/%s/invocations/%s/steps/%s" % (workflow_id, invocation_id, invocation_tool_step_id))
self._assert_status_code_is(invocation_tool_step_response, 200)
self._assert_has_keys(invocation_tool_step_response.json(), "id", "order_index", "job_id")
assert invocation_tool_step_response.json()["job_id"] == job_id
def test_invocation_with_collection_mapping(self):
workflow_id, invocation_id = self._run_mapping_workflow()
usage_details = self._invocation_details(workflow_id, invocation_id)
# Assert some high-level things about the structure of data returned.
self._assert_has_keys(usage_details, "inputs", "steps", "workflow_id")
invocation_steps = usage_details["steps"]
invocation_input_step, invocation_tool_step = None, None
for invocation_step in invocation_steps:
self._assert_has_keys(invocation_step, "workflow_step_id", "order_index", "id")
order_index = invocation_step["order_index"]
assert order_index in [0, 1]
if invocation_step["order_index"] == 0:
assert invocation_input_step is None
invocation_input_step = invocation_step
else:
assert invocation_tool_step is None
invocation_tool_step = invocation_step
# Tool steps have non-null job_ids (deprecated though they may be)
assert invocation_input_step.get("job_id", None) is None
assert invocation_tool_step.get("job_id", None) is None
assert invocation_tool_step["state"] == "scheduled"
usage_details = self._invocation_details(workflow_id, invocation_id, legacy_job_state="true")
# Assert some high-level things about the structure of data returned.
self._assert_has_keys(usage_details, "inputs", "steps", "workflow_id")
invocation_steps = usage_details["steps"]
invocation_input_step = None
invocation_tool_steps = []
for invocation_step in invocation_steps:
self._assert_has_keys(invocation_step, "workflow_step_id", "order_index", "id")
order_index = invocation_step["order_index"]
assert order_index in [0, 1]
if invocation_step["order_index"] == 0:
assert invocation_input_step is None
invocation_input_step = invocation_step
else:
invocation_tool_steps.append(invocation_step)
assert len(invocation_tool_steps) == 2
assert invocation_tool_steps[0]["state"] == "ok"
def _run_mapping_workflow(self):
history_id = self.dataset_populator.new_history()
summary = self._run_jobs("""
class: GalaxyWorkflow
inputs:
- type: collection
label: input_c
steps:
- label: cat1
tool_id: cat1
state:
input1:
$link: input_c
test_data:
input_c:
type: list
elements:
- identifier: i1
content: "0"
- identifier: i2
content: "1"
""", history_id=history_id, wait=True, assert_ok=True)
workflow_id = summary.workflow_id
invocation_id = summary.invocation_id
return workflow_id, invocation_id
@skip_without_tool("cat1")
def test_invocations_accessible_imported_workflow(self):
......