Commit ac7efcfa authored by Celine Noirot's avatar Celine Noirot

Upgrade jflow v3-git

parent f5cc1b8e
...@@ -22,8 +22,6 @@ import os ...@@ -22,8 +22,6 @@ import os
from jflow.config_reader import JFlowConfigReader from jflow.config_reader import JFlowConfigReader
# Define some Error classes
class InvalidFormatError(Exception): pass
jflowconf = JFlowConfigReader() jflowconf = JFlowConfigReader()
......
...@@ -27,7 +27,66 @@ from weaver.options import Options ...@@ -27,7 +27,66 @@ from weaver.options import Options
from weaver.abstraction import Abstraction from weaver.abstraction import Abstraction
class MultiMap(Abstraction): class AbstractionWargs(Abstraction):
""" The base Abstraction class.
**Positional Arguments**:
- `function` -- Function to apply (Function, string, string format)
**Keyword Arguments**:
- `inputs` -- Inputs to function
- `outputs` -- Output of function
- `includes` -- Files to include for each task.
- `native` -- Whether or not to use native abstraction if available.
- `group` -- Number of tasks to inline.
- `collect` -- Whether or not to mark files for garbage collection.
- `local` -- Whether or not to force local execution.
`inputs` and `includes` are parsed using
:func:`~weaver.data.parse_input_list` and must be in a form acceptable tot
hat function. Likewise, `outputs` is parsed by
:func:`~weaver.data.parse_output_list` and `function` is parsed by
:func:`~weaver.function.parse_function`.
"""
Counter = None
def __init__(self, function, inputs=None, outputs=None, includes=None,
native=False, group=None, collect=False, local=False, arguments=None):
Abstraction.__init__(self, function, inputs, outputs, includes, native, group, collect, local)
self.arguments = arguments
class Map(AbstractionWargs):
""" Weaver Map Abstraction.
This Abstraction enables the following pattern of execution:
Map(f, inputs, outputs)
In this case, the :class:`Function` *f* is applied to each item in
*inputs* to generate the corresponding *outputs*.
"""
Counter = itertools.count()
@cache_generation
def _generate(self):
with self:
debug(D_ABSTRACTION, 'Generating Abstraction {0}'.format(self))
function = parse_function(self.function)
inputs = parse_input_list(self.inputs)
outputs = parse_output_list(self.outputs, inputs)
includes = parse_input_list(self.includes)
for i, o in zip(inputs, outputs):
with Options(local=self.options.local, collect=[i] if self.collect else None):
yield function(i, o, self.arguments, includes)
class MultiMap(AbstractionWargs):
""" Weaver MultiMap Abstraction. """ Weaver MultiMap Abstraction.
This Abstraction enables the following pattern of execution: This Abstraction enables the following pattern of execution:
...@@ -110,5 +169,5 @@ class MultiMap(Abstraction): ...@@ -110,5 +169,5 @@ class MultiMap(Abstraction):
iteration_outputs = parse_output_list(self.outputs, input_pattern) iteration_outputs = parse_output_list(self.outputs, input_pattern)
with Options(local=self.options.local): with Options(local=self.options.local):
yield function(iteration_inputs, iteration_outputs, None, includes) yield function(iteration_inputs, iteration_outputs, self.arguments, includes)
#
# Copyright (C) 2015 INRA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import argparse
class JflowArgumentParser (argparse.ArgumentParser):
def _read_args_from_files(self, arg_strings):
# expand arguments referencing files
new_arg_strings = []
for arg_string in arg_strings:
# if it's not a comment or an empty line
if not arg_string.startswith("#") and arg_string:
# for regular arguments, just add them back into the list
if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
new_arg_strings.append(arg_string)
# replace arguments referencing files with the file content
else:
try:
with open(arg_string[1:]) as args_file:
arg_strings = []
# give to the convert_arg_line_to_args a table of lines instead of line per line
for arg in self.convert_arg_line_to_args(args_file.read().splitlines()):
arg_strings.append(arg)
arg_strings = self._read_args_from_files(arg_strings)
new_arg_strings.extend(arg_strings)
except OSError:
err = _sys.exc_info()[1]
self.error(str(err))
# return the modified argument list
return new_arg_strings
\ No newline at end of file
This diff is collapsed.
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
# #
import os import os
import re
import sys import sys
import inspect import inspect
import logging import logging
...@@ -29,7 +30,7 @@ class JFlowConfigReader(object): ...@@ -29,7 +30,7 @@ class JFlowConfigReader(object):
""" """
CONFIG_FILE_PATH = "../../application.properties" CONFIG_FILE_PATH = "../../application.properties"
USER_PATTERN = re.compile("###USER###")
def __init__(self): def __init__(self):
""" """
""" """
...@@ -37,12 +38,18 @@ class JFlowConfigReader(object): ...@@ -37,12 +38,18 @@ class JFlowConfigReader(object):
self.reader.read(os.path.join(os.path.dirname(inspect.getfile(self.__class__)), self.CONFIG_FILE_PATH)) self.reader.read(os.path.join(os.path.dirname(inspect.getfile(self.__class__)), self.CONFIG_FILE_PATH))
def get_tmp_directory(self): def get_tmp_directory(self):
if not os.path.isdir(self.reader.get("storage", "tmp_directory").replace("###USER###",os.getenv("USER"))): tmp_dir=self.reader.get("storage", "tmp_directory")
os.makedirs(self.reader.get("storage", "tmp_directory").replace("###USER###",os.getenv("USER")), 0o751) if self.USER_PATTERN.search(self.reader.get("storage", "tmp_directory")) is not None :
return self.reader.get("storage", "tmp_directory").replace("###USER###",os.getenv("USER")) tmp_dir=tmp_dir.replace("###USER###",os.getenv("USER"))
if not os.path.isdir(tmp_dir):
os.makedirs(tmp_dir, 0o751)
return tmp_dir
def get_work_directory(self): def get_work_directory(self):
return self.reader.get("storage", "work_directory").replace("###USER###",os.getenv("USER")) if self.USER_PATTERN.search(self.reader.get("storage", "work_directory")) is None :
return self.reader.get("storage", "work_directory")
else:
return self.reader.get("storage", "work_directory").replace("###USER###",os.getenv("USER"))
def get_exec(self, software): def get_exec(self, software):
try: try:
...@@ -59,9 +66,13 @@ class JFlowConfigReader(object): ...@@ -59,9 +66,13 @@ class JFlowConfigReader(object):
@return: the path to the log file @return: the path to the log file
""" """
try: try:
return self.reader.get('storage', 'log_file').replace("###USER###",os.getenv("USER")) if self.USER_PATTERN.search(self.reader.get("storage", "log_file")) is None :
return self.reader.get('storage', 'log_file')
else :
return self.reader.get('storage', 'log_file').replace("###USER###",os.getenv("USER"))
except : except :
raise NoOptionError("Failed when parsing the config file, no section logging found!") raise NoOptionError("Failed when parsing the config file, no section logging found!")
def get_makeflow_path(self): def get_makeflow_path(self):
try: try:
...@@ -118,11 +129,24 @@ class JFlowConfigReader(object): ...@@ -118,11 +129,24 @@ class JFlowConfigReader(object):
return self.reader.get("components", component_class+".batch_options") return self.reader.get("components", component_class+".batch_options")
except: except:
return "" return ""
def get_component_modules(self, component_class):
try:
return self.reader.get("components", component_class+".modules").split(",")
except:
return []
def get_workflow_group(self, workflow_class): def get_workflow_group(self, workflow_class):
try: try:
return self.reader.get("workflows", workflow_class+".group") return self.reader.get("workflows", workflow_class+".group")
except: except:
return "" return ""
def get_browse_root_dir(self):
\ No newline at end of file return self.reader.get("storage", "browse_root_dir")
def get_debug(self):
try:
return self.reader.get("global", "debug") == "True"
except NoOptionError:
return False
#
# Copyright (C) 2015 INRA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
class InvalidFormatError(Exception):
pass
class RuleException (Exception):
pass
class RuleIgnore (Exception):
pass
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
import sys, re import re
class GFF3Record: class GFF3Record:
""" """
...@@ -171,7 +171,7 @@ class GFF3IO: ...@@ -171,7 +171,7 @@ class GFF3IO:
for line in self._handle: for line in self._handle:
line = line.rstrip() line = line.rstrip()
self._line += 1 self._line += 1
if line.startswith('#') : if line.startswith('#') or line == "":
continue continue
try: try:
gff_record = GFF3Record.fromGff(line) gff_record = GFF3Record.fromGff(line)
......
This diff is collapsed.
This diff is collapsed.
...@@ -26,7 +26,6 @@ __status__ = 'beta' ...@@ -26,7 +26,6 @@ __status__ = 'beta'
from collections import namedtuple from collections import namedtuple
import sys import sys
import struct import struct
import os
import io import io
str = str str = str
from codecs import getreader, getwriter from codecs import getreader, getwriter
......
This diff is collapsed.
...@@ -51,7 +51,7 @@ def robust_rmtree(path, logger=None, max_retries=6): ...@@ -51,7 +51,7 @@ def robust_rmtree(path, logger=None, max_retries=6):
shutil.rmtree(path) shutil.rmtree(path)
def display_error_message(msg): def display_error_message(msg):
sys.stderr.write("\033[91mError: "+msg+"\n\033[0m") sys.stderr.write("\033[91mError: "+msg+"\033[0m\n")
sys.exit(1) sys.exit(1)
def display_info_message(msg, with_exit=False): def display_info_message(msg, with_exit=False):
......
This diff is collapsed.
...@@ -84,7 +84,7 @@ class WorkflowsManager(object): ...@@ -84,7 +84,7 @@ class WorkflowsManager(object):
for ifunction in inspect.getmembers(obj, predicate=inspect.isfunction): for ifunction in inspect.getmembers(obj, predicate=inspect.isfunction):
if ifunction[0] == function: if ifunction[0] == function:
# try to build the workflow # try to build the workflow
try: #try:
select_workflow = True select_workflow = True
inst = obj(function=function) inst = obj(function=function)
if filter_groups : if filter_groups :
...@@ -93,7 +93,7 @@ class WorkflowsManager(object): ...@@ -93,7 +93,7 @@ class WorkflowsManager(object):
if select_workflow: if select_workflow:
wf_instances.append(inst) wf_instances.append(inst)
wf_methodes.append(function) wf_methodes.append(function)
except: pass #except: pass
return [wf_instances, wf_methodes] return [wf_instances, wf_methodes]
def rerun_workflow(self, workflow_id): def rerun_workflow(self, workflow_id):
...@@ -109,7 +109,7 @@ class WorkflowsManager(object): ...@@ -109,7 +109,7 @@ class WorkflowsManager(object):
# Update the workflow in the cache # Update the workflow in the cache
self._dump_workflows([workflow]) self._dump_workflows([workflow])
return workflow return workflow
def run_workflow(self, workflow_class, args, function="process"): def run_workflow(self, workflow_class, args, function="process"):
# Load all modules within the workflow module # Load all modules within the workflow module
for importer, modname, ispkg in pkgutil.iter_modules(workflows.__path__, workflows.__name__ + "."): for importer, modname, ispkg in pkgutil.iter_modules(workflows.__path__, workflows.__name__ + "."):
...@@ -194,6 +194,20 @@ class WorkflowsManager(object): ...@@ -194,6 +194,20 @@ class WorkflowsManager(object):
if class_name == workflow_class: if class_name == workflow_class:
return obj() return obj()
return None return None
def get_workflow_by_name(self, workflow_name):
"""
Get workflow by name, case insensitive
:param workflow_name: workflow name
:return: workflow object
"""
for importer, modname, ispkg in pkgutil.iter_modules(workflows.__path__, workflows.__name__ + "."):
__import__(modname)
# Search for Workflow classes
for class_name, obj in inspect.getmembers(sys.modules[modname], inspect.isclass):
if class_name.lower() == workflow_name.lower():
return obj()
return None
def get_workflow(self, workflow_id): def get_workflow(self, workflow_id):
rworkflow_id = utils.get_nb_string(workflow_id) rworkflow_id = utils.get_nb_string(workflow_id)
......
...@@ -40,7 +40,15 @@ class BasicNG6Workflow (Workflow): ...@@ -40,7 +40,15 @@ class BasicNG6Workflow (Workflow):
def __init__(self, args={}, id=None, function= "process"): def __init__(self, args={}, id=None, function= "process"):
Workflow.__init__(self, args, id, function) Workflow.__init__(self, args, id, function)
self.add_parameter("admin_login", "Who is the project administrator", required = True, type = 'ng6userlogin', display_name="Admin login") self.add_parameter("admin_login", "Who is the project administrator", required = True, type = 'ng6userlogin', display_name="Admin login")
def get_description(self):
"""
Return the workflow description, has to be implemented by subclasses
"""
return "BasicNG6Workflow"
def define_parameters(self, function="process"):
pass
def add_component(self, component_name, args=[], kwargs={}, component_prefix="default", parent=None, addto="run"): def add_component(self, component_name, args=[], kwargs={}, component_prefix="default", parent=None, addto="run"):
# first build and check if this component is OK # first build and check if this component is OK
if component_name in self.internal_components or component_name in self.external_components: if component_name in self.internal_components or component_name in self.external_components:
...@@ -127,7 +135,15 @@ class DownloadWorkflow(Workflow): ...@@ -127,7 +135,15 @@ class DownloadWorkflow(Workflow):
self.add_parameter_list('data_id', 'Ids of a run from which rawdata will be retrieved', type = 'existingrun') self.add_parameter_list('data_id', 'Ids of a run from which rawdata will be retrieved', type = 'existingrun')
self.add_parameter_list('run_id', 'Ids of run from which all data will be retrieved', type = 'existingrun') self.add_parameter_list('run_id', 'Ids of run from which all data will be retrieved', type = 'existingrun')
self.add_parameter_list('analysis_id', 'Ids of analysis to retrieve', type = 'existinganalysis') self.add_parameter_list('analysis_id', 'Ids of analysis to retrieve', type = 'existinganalysis')
def get_description(self):
"""
Return the workflow description, has to be implemented by subclasses
"""
return "DownloadWorkflow"
def define_parameters(self, function="process"):
pass
class NG6Workflow (BasicNG6Workflow): class NG6Workflow (BasicNG6Workflow):
def __init__(self, args={}, id=None, function= "process"): def __init__(self, args={}, id=None, function= "process"):
...@@ -153,7 +169,7 @@ class NG6Workflow (BasicNG6Workflow): ...@@ -153,7 +169,7 @@ class NG6Workflow (BasicNG6Workflow):
self.add_parameter("run_type", "What type of data is it (1 lane, 1 region)", flag = "--type", required = True, display_name="Type", group="Run information") self.add_parameter("run_type", "What type of data is it (1 lane, 1 region)", flag = "--type", required = True, display_name="Type", group="Run information")
def __add_sample_parameters__(self): def __add_sample_parameters__(self):
self.add_multiple_parameter_list("input_sample", "Definition of a sample", flag="--sample", required = True, group="Sample description") self.add_multiple_parameter_list("input_sample", "Definition of a sample", flag="--sample", group="Sample description") # required = True, # TO CHECK casavaWorkflow required not if casava dir
self.add_parameter("sample_id", "The uniq identifier of the sample", type="nospacestr", add_to = "input_sample") self.add_parameter("sample_id", "The uniq identifier of the sample", type="nospacestr", add_to = "input_sample")
self.add_parameter("sample_name", "A descriptive name for the sample", type="nospacestr", add_to = "input_sample") self.add_parameter("sample_name", "A descriptive name for the sample", type="nospacestr", add_to = "input_sample")
self.add_parameter("sample_description", "A brief description of the sample", add_to = "input_sample") self.add_parameter("sample_description", "A brief description of the sample", add_to = "input_sample")
...@@ -254,7 +270,7 @@ class NG6Workflow (BasicNG6Workflow): ...@@ -254,7 +270,7 @@ class NG6Workflow (BasicNG6Workflow):
elif self.project: elif self.project:
self.project.sync() self.project.sync()
def get_files_from_casava(casava_directory, project_name, lane_number): def get_files_from_casava(casava_directory, project_name, lane_number):
""" """
Retrieve all fastq files of a specific project and lane number from a given casava directory Retrieve all fastq files of a specific project and lane number from a given casava directory
...@@ -324,7 +340,7 @@ class CasavaNG6Workflow(NG6Workflow): ...@@ -324,7 +340,7 @@ class CasavaNG6Workflow(NG6Workflow):
def __add_sample_parameters__(self): def __add_sample_parameters__(self):
self.add_multiple_parameter('casava', 'Provide the options to retrieve samples from a CASAVA directory', group="Sample description") self.add_multiple_parameter('casava', 'Provide the options to retrieve samples from a CASAVA directory', group="Sample description")
self.add_input_directory("directory", "Path to the CASAVA directory to use", required=True, get_files_fn=get_files_from_casava, add_to="casava" ) self.add_input_directory("directory", "Path to the CASAVA directory to use", required=True, get_files_fn=get_files_from_casava, add_to="casava" )
self.add_parameter("lane", "The lane number to be retrieved from the casava directory", required=True, type='int', add_to="casava") self.add_parameter("lane", "The lane number to be retrieved from the casava directory", type='int', add_to="casava") #required=True,
self.add_parameter('project', 'The name of the project to retrieve in casava directory. The default name is the name of the nG6 project',add_to="casava") self.add_parameter('project', 'The name of the project to retrieve in casava directory. The default name is the name of the nG6 project',add_to="casava")
self.add_parameter('mismatch_index', 'Set this value to true if the index sequence in the sample fastq files allows at least 1 mismatch', self.add_parameter('mismatch_index', 'Set this value to true if the index sequence in the sample fastq files allows at least 1 mismatch',
type ='bool', add_to="casava") type ='bool', add_to="casava")
...@@ -333,7 +349,7 @@ class CasavaNG6Workflow(NG6Workflow): ...@@ -333,7 +349,7 @@ class CasavaNG6Workflow(NG6Workflow):
NG6Workflow.__add_sample_parameters__(self) NG6Workflow.__add_sample_parameters__(self)
self.add_exclusion_rule("casava", "input_sample") #TODO exclude self.add_exclusion_rule("casava", "input_sample")
self.add_parameter("compression", "How should the data be compressed once archived", choices= [ "none", "gz", "bz2"], default = "none") self.add_parameter("compression", "How should the data be compressed once archived", choices= [ "none", "gz", "bz2"], default = "none")
self.add_parameter("keep_reads", "Keep or discard reads which pass the illumina filter. 'all' option will keep all reads", flag = "--keep", self.add_parameter("keep_reads", "Keep or discard reads which pass the illumina filter. 'all' option will keep all reads", flag = "--keep",
...@@ -536,4 +552,4 @@ class CasavaNG6Workflow(NG6Workflow): ...@@ -536,4 +552,4 @@ class CasavaNG6Workflow(NG6Workflow):
return fastqilluminafilter, filtered_read1_files, filtered_read2_files, saved_files return fastqilluminafilter, filtered_read1_files, filtered_read2_files, saved_files
\ No newline at end of file
...@@ -97,7 +97,6 @@ class Run(object): ...@@ -97,7 +97,6 @@ class Run(object):
""" """
nb_seq, full_size = 0, 0 nb_seq, full_size = 0, 0
for file in self.raw_files: for file in self.raw_files:
print("run process_raw_files " + file)
# Get nb_seq and full_size values # Get nb_seq and full_size values
reader = seqio.SequenceReader(file) reader = seqio.SequenceReader(file)
for id, desc, seq, qualities in reader: for id, desc, seq, qualities in reader:
......
...@@ -185,7 +185,7 @@ class ShellFunction(ScriptFunction): ...@@ -185,7 +185,7 @@ class ShellFunction(ScriptFunction):
} }
SHELL_DEFAULT = 'sh' SHELL_DEFAULT = 'sh'
def __init__(self, source, shell=None, executable=None, cmd_format=None): def __init__(self, source, shell=None, executable=None, cmd_format=None, modules=[]):
if shell is None or not os.path.isabs(shell): if shell is None or not os.path.isabs(shell):
if shell not in ShellFunction.SHELL_TABLE: if shell not in ShellFunction.SHELL_TABLE:
shell = ShellFunction.SHELL_DEFAULT shell = ShellFunction.SHELL_DEFAULT
...@@ -193,7 +193,10 @@ class ShellFunction(ScriptFunction): ...@@ -193,7 +193,10 @@ class ShellFunction(ScriptFunction):
else: else:
shell_path = shell shell_path = shell
shell = os.path.basename(shell) shell = os.path.basename(shell)
source = '#!%s\n' % shell_path + source source = '#!%s\n' % shell_path + \
("\n".join(("module load " + module) for module in modules) if len(modules) > 0 else "") + "\n" + \
source + \
"\n" + ("\n".join(("module unload " + module) for module in modules) if len(modules) > 0 else "")
ScriptFunction.__init__(self, source, executable, cmd_format) ScriptFunction.__init__(self, source, executable, cmd_format)
...@@ -209,8 +212,9 @@ class PythonFunction(ScriptFunction): ...@@ -209,8 +212,9 @@ class PythonFunction(ScriptFunction):
- `executable` -- Path or name to use for the script. - `executable` -- Path or name to use for the script.
- `cmd_format` -- String template used to generate command string. - `cmd_format` -- String template used to generate command string.
""" """
PYTHON_VERSION = 'python{0}.{1}'.format(sys.version_info[0], sys.version_info[1]) PYTHON_VERSION = sys.executable
PYTHON_TEMPLATE = '''#!/usr/bin/env {0} # 'python{0}.{1}'.format(sys.version_info[0], sys.version_info[1])
PYTHON_TEMPLATE = '''#!{0}
import pickle import pickle
import {{0}} import {{0}}
...@@ -262,7 +266,7 @@ if __name__ == '__main__': ...@@ -262,7 +266,7 @@ if __name__ == '__main__':
objh.close() objh.close()
'''.format(PYTHON_VERSION) '''.format(PYTHON_VERSION)
def __init__(self, function, add_path=None, executable=None, cmd_format=None): def __init__(self, function, add_path=None, executable=None, cmd_format=None, modules=[]):
# TODO: this doesn't work with Python3 # TODO: this doesn't work with Python3
body = inspect.getsource(function) body = inspect.getsource(function)
name = getfuncname(function) name = getfuncname(function)
...@@ -273,15 +277,23 @@ if __name__ == '__main__': ...@@ -273,15 +277,23 @@ if __name__ == '__main__':
pass pass
if add_path: if add_path:
add_path = add_path.extend(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')) add_path.update([os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'),
os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..')])
else: else:
add_path = [os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'), add_path = {os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'),
os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..')] os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..')}
path = "" path = ""
for apath in add_path: for apath in add_path:
path += "sys.path.insert(0, '" + apath + "')\n" path += "sys.path.insert(0, '" + apath + "')\n"
source = self.PYTHON_TEMPLATE.format(', '.join(imports), path, body, name) source = self.PYTHON_TEMPLATE.format(', '.join(imports), path, body, name)
"""
source_modules = '#!%s\n' % path + \
("\n".join(("module load " + module) for module in modules) if len(modules) > 0 else "") + "\n" + \
source + \
"\n" + ("\n".join(("module unload " + module) for module in modules) if len(modules) > 0 else "")
"""
ScriptFunction.__init__(self, source, executable, cmd_format) ScriptFunction.__init__(self, source, executable, cmd_format)
...@@ -350,4 +362,4 @@ class Pipeline(Function): ...@@ -350,4 +362,4 @@ class Pipeline(Function):
def __str__(self): def __str__(self):
return self.cmd_format return self.cmd_format
# vim: set sts=4 sw=4 ts=8 expandtab ft=python: # vim: set sts=4 sw=4 ts=8 expandtab ft=python:
\ No newline at end of file
...@@ -101,23 +101,9 @@ class tx_nG6_utils { ...@@ -101,23 +101,9 @@ class tx_nG6_utils {
if ($directory != 'undefined' && $directory != '') { if ($directory != 'undefined' && $directory != '') {
// And process the directories structure // And process the directories structure
$stream = ssh2_exec($connection, 'rm -rf '.$directory."/"); //TODO BUG NE RETOURNE PAS D'ERREUR si pas supprimer
$errorStream = ssh2_fetch_stream($stream, SSH2_STREAM_STDERR); if (!ssh2_exec($connection, 'rm -rf '.$directory."/")) {
// Enable blocking for both streams
stream_set_blocking($errorStream, true);
stream_set_blocking($stream, true);
// Whichever of the two below commands is listed first will receive its appropriate output. The second command receives nothing
if(stream_get_contents($errorStream)) {
// Close the streams
fclose($errorStream);
fclose($stream);
return 1; return 1;
}else{
// Close the streams
fclose($errorStream);
fclose($stream);
return 0;
} }
} }
return 0; return 0;
...@@ -134,7 +120,8 @@ class tx_nG6_utils { ...@@ -134,7 +120,8 @@ class tx_nG6_utils {
static function purge_directory($user_login, $user_pwd, $directory) { static function purge_directory($user_login, $user_pwd, $directory) {
if ($directory != 'undefined' && $directory != '') { if ($directory != 'undefined' && $directory != '') {
// And process the directories structure, remove all files except .html and .png // And process the directories structure, remove all files except .html ans .png
$connection = ssh2_connect('127.0.0.1', 22); $connection = ssh2_connect('127.0.0.1', 22);
if (!$connection) { if (!$connection) {
return 3; return 3;
...@@ -148,19 +135,18 @@ class tx_nG6_utils { ...@@ -148,19 +135,18 @@ class tx_nG6_utils {
// Enable blocking for both streams // Enable blocking for both streams
stream_set_blocking($errorStream, true); stream_set_blocking($errorStream, true);
stream_set_blocking($stream, true); stream_set_blocking($stream, true);
// Whichever of the two below commands is listed first will receive its appropriate output. The second command receives nothing // Whichever of the two below commands is listed first will receive its appropriate output. The second command receives nothing
if(stream_get_contents($errorStream)) { if(stream_get_contents($errorStream)) {
// Close the streams
fclose($errorStream);
fclose($stream);
return 1; return 1;
}else{ }else{
// Close the streams
fclose($errorStream);
fclose($stream);
return 0; return 0;
} }
// Close the streams