Commit ac7efcfa authored by Celine Noirot's avatar Celine Noirot

Upgrade jflow v3-git

parent f5cc1b8e
......@@ -22,8 +22,6 @@ import os
from jflow.config_reader import JFlowConfigReader
# Define some Error classes
class InvalidFormatError(Exception): pass
jflowconf = JFlowConfigReader()
......
......@@ -27,7 +27,66 @@ from weaver.options import Options
from weaver.abstraction import Abstraction
class MultiMap(Abstraction):
class AbstractionWargs(Abstraction):
""" The base Abstraction class.
**Positional Arguments**:
- `function` -- Function to apply (Function, string, string format)
**Keyword Arguments**:
- `inputs` -- Inputs to function
- `outputs` -- Output of function
- `includes` -- Files to include for each task.
- `native` -- Whether or not to use native abstraction if available.
- `group` -- Number of tasks to inline.
- `collect` -- Whether or not to mark files for garbage collection.
- `local` -- Whether or not to force local execution.
`inputs` and `includes` are parsed using
:func:`~weaver.data.parse_input_list` and must be in a form acceptable tot
hat function. Likewise, `outputs` is parsed by
:func:`~weaver.data.parse_output_list` and `function` is parsed by
:func:`~weaver.function.parse_function`.
"""
Counter = None
def __init__(self, function, inputs=None, outputs=None, includes=None,
native=False, group=None, collect=False, local=False, arguments=None):
Abstraction.__init__(self, function, inputs, outputs, includes, native, group, collect, local)
self.arguments = arguments
class Map(AbstractionWargs):
""" Weaver Map Abstraction.
This Abstraction enables the following pattern of execution:
Map(f, inputs, outputs)
In this case, the :class:`Function` *f* is applied to each item in
*inputs* to generate the corresponding *outputs*.
"""
Counter = itertools.count()
@cache_generation
def _generate(self):
with self:
debug(D_ABSTRACTION, 'Generating Abstraction {0}'.format(self))
function = parse_function(self.function)
inputs = parse_input_list(self.inputs)
outputs = parse_output_list(self.outputs, inputs)
includes = parse_input_list(self.includes)
for i, o in zip(inputs, outputs):
with Options(local=self.options.local, collect=[i] if self.collect else None):
yield function(i, o, self.arguments, includes)
class MultiMap(AbstractionWargs):
""" Weaver MultiMap Abstraction.
This Abstraction enables the following pattern of execution:
......@@ -110,5 +169,5 @@ class MultiMap(Abstraction):
iteration_outputs = parse_output_list(self.outputs, input_pattern)
with Options(local=self.options.local):
yield function(iteration_inputs, iteration_outputs, None, includes)
yield function(iteration_inputs, iteration_outputs, self.arguments, includes)
#
# Copyright (C) 2015 INRA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import argparse
class JflowArgumentParser (argparse.ArgumentParser):
def _read_args_from_files(self, arg_strings):
# expand arguments referencing files
new_arg_strings = []
for arg_string in arg_strings:
# if it's not a comment or an empty line
if not arg_string.startswith("#") and arg_string:
# for regular arguments, just add them back into the list
if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
new_arg_strings.append(arg_string)
# replace arguments referencing files with the file content
else:
try:
with open(arg_string[1:]) as args_file:
arg_strings = []
# give to the convert_arg_line_to_args a table of lines instead of line per line
for arg in self.convert_arg_line_to_args(args_file.read().splitlines()):
arg_strings.append(arg)
arg_strings = self._read_args_from_files(arg_strings)
new_arg_strings.extend(arg_strings)
except OSError:
err = _sys.exc_info()[1]
self.error(str(err))
# return the modified argument list
return new_arg_strings
\ No newline at end of file
This diff is collapsed.
......@@ -16,6 +16,7 @@
#
import os
import re
import sys
import inspect
import logging
......@@ -29,7 +30,7 @@ class JFlowConfigReader(object):
"""
CONFIG_FILE_PATH = "../../application.properties"
USER_PATTERN = re.compile("###USER###")
def __init__(self):
"""
"""
......@@ -37,12 +38,18 @@ class JFlowConfigReader(object):
self.reader.read(os.path.join(os.path.dirname(inspect.getfile(self.__class__)), self.CONFIG_FILE_PATH))
def get_tmp_directory(self):
if not os.path.isdir(self.reader.get("storage", "tmp_directory").replace("###USER###",os.getenv("USER"))):
os.makedirs(self.reader.get("storage", "tmp_directory").replace("###USER###",os.getenv("USER")), 0o751)
return self.reader.get("storage", "tmp_directory").replace("###USER###",os.getenv("USER"))
tmp_dir=self.reader.get("storage", "tmp_directory")
if self.USER_PATTERN.search(self.reader.get("storage", "tmp_directory")) is not None :
tmp_dir=tmp_dir.replace("###USER###",os.getenv("USER"))
if not os.path.isdir(tmp_dir):
os.makedirs(tmp_dir, 0o751)
return tmp_dir
def get_work_directory(self):
return self.reader.get("storage", "work_directory").replace("###USER###",os.getenv("USER"))
if self.USER_PATTERN.search(self.reader.get("storage", "work_directory")) is None :
return self.reader.get("storage", "work_directory")
else:
return self.reader.get("storage", "work_directory").replace("###USER###",os.getenv("USER"))
def get_exec(self, software):
try:
......@@ -59,9 +66,13 @@ class JFlowConfigReader(object):
@return: the path to the log file
"""
try:
return self.reader.get('storage', 'log_file').replace("###USER###",os.getenv("USER"))
if self.USER_PATTERN.search(self.reader.get("storage", "log_file")) is None :
return self.reader.get('storage', 'log_file')
else :
return self.reader.get('storage', 'log_file').replace("###USER###",os.getenv("USER"))
except :
raise NoOptionError("Failed when parsing the config file, no section logging found!")
def get_makeflow_path(self):
try:
......@@ -118,11 +129,24 @@ class JFlowConfigReader(object):
return self.reader.get("components", component_class+".batch_options")
except:
return ""
def get_component_modules(self, component_class):
try:
return self.reader.get("components", component_class+".modules").split(",")
except:
return []
def get_workflow_group(self, workflow_class):
try:
return self.reader.get("workflows", workflow_class+".group")
except:
return ""
\ No newline at end of file
def get_browse_root_dir(self):
return self.reader.get("storage", "browse_root_dir")
def get_debug(self):
try:
return self.reader.get("global", "debug") == "True"
except NoOptionError:
return False
#
# Copyright (C) 2015 INRA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
class InvalidFormatError(Exception):
pass
class RuleException (Exception):
pass
class RuleIgnore (Exception):
pass
......@@ -15,7 +15,7 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import sys, re
import re
class GFF3Record:
"""
......@@ -171,7 +171,7 @@ class GFF3IO:
for line in self._handle:
line = line.rstrip()
self._line += 1
if line.startswith('#') :
if line.startswith('#') or line == "":
continue
try:
gff_record = GFF3Record.fromGff(line)
......
This diff is collapsed.
This diff is collapsed.
......@@ -26,7 +26,6 @@ __status__ = 'beta'
from collections import namedtuple
import sys
import struct
import os
import io
str = str
from codecs import getreader, getwriter
......
This diff is collapsed.
......@@ -51,7 +51,7 @@ def robust_rmtree(path, logger=None, max_retries=6):
shutil.rmtree(path)
def display_error_message(msg):
sys.stderr.write("\033[91mError: "+msg+"\n\033[0m")
sys.stderr.write("\033[91mError: "+msg+"\033[0m\n")
sys.exit(1)
def display_info_message(msg, with_exit=False):
......
This diff is collapsed.
......@@ -84,7 +84,7 @@ class WorkflowsManager(object):
for ifunction in inspect.getmembers(obj, predicate=inspect.isfunction):
if ifunction[0] == function:
# try to build the workflow
try:
#try:
select_workflow = True
inst = obj(function=function)
if filter_groups :
......@@ -93,7 +93,7 @@ class WorkflowsManager(object):
if select_workflow:
wf_instances.append(inst)
wf_methodes.append(function)
except: pass
#except: pass
return [wf_instances, wf_methodes]
def rerun_workflow(self, workflow_id):
......@@ -109,7 +109,7 @@ class WorkflowsManager(object):
# Update the workflow in the cache
self._dump_workflows([workflow])
return workflow
def run_workflow(self, workflow_class, args, function="process"):
# Load all modules within the workflow module
for importer, modname, ispkg in pkgutil.iter_modules(workflows.__path__, workflows.__name__ + "."):
......@@ -194,6 +194,20 @@ class WorkflowsManager(object):
if class_name == workflow_class:
return obj()
return None
def get_workflow_by_name(self, workflow_name):
"""
Get workflow by name, case insensitive
:param workflow_name: workflow name
:return: workflow object
"""
for importer, modname, ispkg in pkgutil.iter_modules(workflows.__path__, workflows.__name__ + "."):
__import__(modname)
# Search for Workflow classes
for class_name, obj in inspect.getmembers(sys.modules[modname], inspect.isclass):
if class_name.lower() == workflow_name.lower():
return obj()
return None
def get_workflow(self, workflow_id):
rworkflow_id = utils.get_nb_string(workflow_id)
......
......@@ -40,7 +40,15 @@ class BasicNG6Workflow (Workflow):
def __init__(self, args={}, id=None, function= "process"):
Workflow.__init__(self, args, id, function)
self.add_parameter("admin_login", "Who is the project administrator", required = True, type = 'ng6userlogin', display_name="Admin login")
def get_description(self):
"""
Return the workflow description, has to be implemented by subclasses
"""
return "BasicNG6Workflow"
def define_parameters(self, function="process"):
pass
def add_component(self, component_name, args=[], kwargs={}, component_prefix="default", parent=None, addto="run"):
# first build and check if this component is OK
if component_name in self.internal_components or component_name in self.external_components:
......@@ -127,7 +135,15 @@ class DownloadWorkflow(Workflow):
self.add_parameter_list('data_id', 'Ids of a run from which rawdata will be retrieved', type = 'existingrun')
self.add_parameter_list('run_id', 'Ids of run from which all data will be retrieved', type = 'existingrun')
self.add_parameter_list('analysis_id', 'Ids of analysis to retrieve', type = 'existinganalysis')
def get_description(self):
"""
Return the workflow description, has to be implemented by subclasses
"""
return "DownloadWorkflow"
def define_parameters(self, function="process"):
pass
class NG6Workflow (BasicNG6Workflow):
def __init__(self, args={}, id=None, function= "process"):
......@@ -153,7 +169,7 @@ class NG6Workflow (BasicNG6Workflow):
self.add_parameter("run_type", "What type of data is it (1 lane, 1 region)", flag = "--type", required = True, display_name="Type", group="Run information")
def __add_sample_parameters__(self):
self.add_multiple_parameter_list("input_sample", "Definition of a sample", flag="--sample", required = True, group="Sample description")
self.add_multiple_parameter_list("input_sample", "Definition of a sample", flag="--sample", group="Sample description") # required = True, # TO CHECK casavaWorkflow required not if casava dir
self.add_parameter("sample_id", "The uniq identifier of the sample", type="nospacestr", add_to = "input_sample")
self.add_parameter("sample_name", "A descriptive name for the sample", type="nospacestr", add_to = "input_sample")
self.add_parameter("sample_description", "A brief description of the sample", add_to = "input_sample")
......@@ -254,7 +270,7 @@ class NG6Workflow (BasicNG6Workflow):
elif self.project:
self.project.sync()
def get_files_from_casava(casava_directory, project_name, lane_number):
"""
Retrieve all fastq files of a specific project and lane number from a given casava directory
......@@ -324,7 +340,7 @@ class CasavaNG6Workflow(NG6Workflow):
def __add_sample_parameters__(self):
self.add_multiple_parameter('casava', 'Provide the options to retrieve samples from a CASAVA directory', group="Sample description")
self.add_input_directory("directory", "Path to the CASAVA directory to use", required=True, get_files_fn=get_files_from_casava, add_to="casava" )
self.add_parameter("lane", "The lane number to be retrieved from the casava directory", required=True, type='int', add_to="casava")
self.add_parameter("lane", "The lane number to be retrieved from the casava directory", type='int', add_to="casava") #required=True,
self.add_parameter('project', 'The name of the project to retrieve in casava directory. The default name is the name of the nG6 project',add_to="casava")
self.add_parameter('mismatch_index', 'Set this value to true if the index sequence in the sample fastq files allows at least 1 mismatch',
type ='bool', add_to="casava")
......@@ -333,7 +349,7 @@ class CasavaNG6Workflow(NG6Workflow):
NG6Workflow.__add_sample_parameters__(self)
self.add_exclusion_rule("casava", "input_sample")
#TODO exclude self.add_exclusion_rule("casava", "input_sample")
self.add_parameter("compression", "How should the data be compressed once archived", choices= [ "none", "gz", "bz2"], default = "none")
self.add_parameter("keep_reads", "Keep or discard reads which pass the illumina filter. 'all' option will keep all reads", flag = "--keep",
......@@ -536,4 +552,4 @@ class CasavaNG6Workflow(NG6Workflow):
return fastqilluminafilter, filtered_read1_files, filtered_read2_files, saved_files
\ No newline at end of file
......@@ -97,7 +97,6 @@ class Run(object):
"""
nb_seq, full_size = 0, 0
for file in self.raw_files:
print("run process_raw_files " + file)
# Get nb_seq and full_size values
reader = seqio.SequenceReader(file)
for id, desc, seq, qualities in reader:
......
......@@ -185,7 +185,7 @@ class ShellFunction(ScriptFunction):
}
SHELL_DEFAULT = 'sh'
def __init__(self, source, shell=None, executable=None, cmd_format=None):
def __init__(self, source, shell=None, executable=None, cmd_format=None, modules=[]):
if shell is None or not os.path.isabs(shell):
if shell not in ShellFunction.SHELL_TABLE:
shell = ShellFunction.SHELL_DEFAULT
......@@ -193,7 +193,10 @@ class ShellFunction(ScriptFunction):
else:
shell_path = shell
shell = os.path.basename(shell)
source = '#!%s\n' % shell_path + source
source = '#!%s\n' % shell_path + \
("\n".join(("module load " + module) for module in modules) if len(modules) > 0 else "") + "\n" + \
source + \
"\n" + ("\n".join(("module unload " + module) for module in modules) if len(modules) > 0 else "")
ScriptFunction.__init__(self, source, executable, cmd_format)
......@@ -209,8 +212,9 @@ class PythonFunction(ScriptFunction):
- `executable` -- Path or name to use for the script.
- `cmd_format` -- String template used to generate command string.
"""
PYTHON_VERSION = 'python{0}.{1}'.format(sys.version_info[0], sys.version_info[1])
PYTHON_TEMPLATE = '''#!/usr/bin/env {0}
PYTHON_VERSION = sys.executable
# 'python{0}.{1}'.format(sys.version_info[0], sys.version_info[1])
PYTHON_TEMPLATE = '''#!{0}
import pickle
import {{0}}
......@@ -262,7 +266,7 @@ if __name__ == '__main__':
objh.close()
'''.format(PYTHON_VERSION)
def __init__(self, function, add_path=None, executable=None, cmd_format=None):
def __init__(self, function, add_path=None, executable=None, cmd_format=None, modules=[]):
# TODO: this doesn't work with Python3
body = inspect.getsource(function)
name = getfuncname(function)
......@@ -273,15 +277,23 @@ if __name__ == '__main__':
pass
if add_path:
add_path = add_path.extend(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
add_path.update([os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'),
os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..')])
else:
add_path = [os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'),
os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..')]
add_path = {os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'),
os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..')}
path = ""
for apath in add_path:
path += "sys.path.insert(0, '" + apath + "')\n"
source = self.PYTHON_TEMPLATE.format(', '.join(imports), path, body, name)
"""
source_modules = '#!%s\n' % path + \
("\n".join(("module load " + module) for module in modules) if len(modules) > 0 else "") + "\n" + \
source + \
"\n" + ("\n".join(("module unload " + module) for module in modules) if len(modules) > 0 else "")
"""
ScriptFunction.__init__(self, source, executable, cmd_format)
......@@ -350,4 +362,4 @@ class Pipeline(Function):
def __str__(self):
return self.cmd_format
# vim: set sts=4 sw=4 ts=8 expandtab ft=python:
\ No newline at end of file
# vim: set sts=4 sw=4 ts=8 expandtab ft=python:
......@@ -101,23 +101,9 @@ class tx_nG6_utils {
if ($directory != 'undefined' && $directory != '') {
// And process the directories structure
$stream = ssh2_exec($connection, 'rm -rf '.$directory."/");
$errorStream = ssh2_fetch_stream($stream, SSH2_STREAM_STDERR);
// Enable blocking for both streams
stream_set_blocking($errorStream, true);
stream_set_blocking($stream, true);
// Whichever of the two below commands is listed first will receive its appropriate output. The second command receives nothing
if(stream_get_contents($errorStream)) {
// Close the streams
fclose($errorStream);
fclose($stream);
//TODO BUG NE RETOURNE PAS D'ERREUR si pas supprimer
if (!ssh2_exec($connection, 'rm -rf '.$directory."/")) {
return 1;
}else{
// Close the streams
fclose($errorStream);
fclose($stream);
return 0;
}
}
return 0;
......@@ -134,7 +120,8 @@ class tx_nG6_utils {
static function purge_directory($user_login, $user_pwd, $directory) {
if ($directory != 'undefined' && $directory != '') {
// And process the directories structure, remove all files except .html and .png
// And process the directories structure, remove all files except .html ans .png
$connection = ssh2_connect('127.0.0.1', 22);
if (!$connection) {
return 3;
......@@ -148,19 +135,18 @@ class tx_nG6_utils {
// Enable blocking for both streams
stream_set_blocking($errorStream, true);
stream_set_blocking($stream, true);
// Whichever of the two below commands is listed first will receive its appropriate output. The second command receives nothing
if(stream_get_contents($errorStream)) {
// Close the streams
fclose($errorStream);
fclose($stream);
return 1;
}else{
// Close the streams
fclose($errorStream);
fclose($stream);
return 0;
}
// Close the streams
fclose($errorStream);
fclose($stream);
}
}
}
......
This diff is collapsed.
#
# Copyright (C) 2012 INRA
# Copyright (C) 2015 INRA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
......
......@@ -35,15 +35,15 @@ class AddAnalysis (BasicNG6Workflow):
self.add_parameter("compression", "How should data be compressed once archived", choices = ['none', 'gz', 'bz2'], default = 'none')
self.add_parameter("delete", "Should the input data be deleted once the process is over", type = bool , default = False)
self.add_parameter("archive_name", "Give a name to the final archive", default = 'ng6_archive')
self.add_parameter("project_id", "The project id the analysis belongs to", type = 'int', required = True)
self.add_parameter("run_id", "The run id the run belongs to", type = 'int', required = True)
self.add_parameter("project_id", "The project id the analysis belongs to", type = 'int', rules="Exclude=run_id;RequiredIf?ALL[run_id=None]")
self.add_parameter("run_id", "The run id the run belongs to", type = 'int', rules = "Exclude=project_id;RequiredIf?ALL[project_id=None]")
self.add_parameter("analysis_name", "Give a name to your analysis", required = True)
self.add_parameter("analysis_description", "Give a description to your analysis", required = True)
self.add_parameter("analysis_software", "Which software was used for this analysis", required = True)
self.add_parameter("analysis_software_options", "Which software options were used for this analysis", required = True)
self.add_parameter("analysis_software_version", "analysis_software_version", required = True)
self.add_parameter("parent_analysis", "The id of an analysis to be used as a parent analysis", type= 'int')
self.add_exclusion_rule( "project_id", "run_id" )
def process(self):
self.runobj = None
......@@ -90,4 +90,4 @@ class AddAnalysis (BasicNG6Workflow):
if self.runobj:
self.runobj.sync()
elif self.project:
self.project.sync()
\ No newline at end of file
self.project.sync()
......@@ -32,9 +32,9 @@ class AddFiles (BasicNG6Workflow):
self.add_input_file_list("data_file", " Which data files should be added to the analysis or the run", required = True)
self.add_parameter("compression", "How should data be compressed once archived", choices = ['none', 'gz', 'bz2'], default = 'none')
self.add_parameter("delete", "Should the input data be deleted once the process is over", type = bool , default = False)
self.add_parameter("analysis_id", "The id of the analysis", type = int, required = True)
self.add_parameter("run_id", "The run id the run belongs to", type = int, required = True)
self.add_exclusion_rule( "analysis_id", "run_id" )
self.add_parameter("analysis_id", "The id of the analysis", type = int) #, required = True)
self.add_parameter("run_id", "The run id the run belongs to", type = int)#, required = True)
#TODO Exclude self.add_exclusion_rule( "analysis_id", "run_id" )
def process(self):
# check if files provided exists
......
......@@ -165,11 +165,11 @@ class AlignmentStats (Analysis):
# Alignment quality stats
if self.csv_files_r2:
self.add_shell_execution(self.get_exec_path("samtools") + " view -F0x0100 $1 | " + self.get_exec_path("python3") + " " + self.get_exec_path("cigarlineGraph.py") + " -i - -t $2 $3 " + self.cigar_options + " 2> $4",
self.add_shell_execution(self.get_exec_path("samtools") + " view -F0x0100 $1 | python " + self.get_exec_path("cigarlineGraph.py") + " -i - -t $2 $3 " + self.cigar_options + " 2> $4",
cmd_format='{EXE} {IN} {OUT}', map=True,
inputs=self.bam_files, outputs=[self.csv_files_r1, self.csv_files_r2, self.cigar_stderrs])
else:
self.add_shell_execution(self.get_exec_path("samtools") + " view -F0x0100 $1 | " + self.get_exec_path("python3") + " " + self.get_exec_path("cigarlineGraph.py") + " -i - -t $2 " + self.cigar_options + " 2> $3",
self.add_shell_execution(self.get_exec_path("samtools") + " view -F0x0100 $1 | python " + self.get_exec_path("cigarlineGraph.py") + " -i - -t $2 " + self.cigar_options + " 2> $3",
cmd_format='{EXE} {IN} {OUT}', map=True,
inputs=self.bam_files, outputs=[self.csv_files_r1, self.cigar_stderrs])
......
......@@ -17,6 +17,8 @@
import jflow
from jflow import seqio
from jflow.featureiolib.gff3 import GFF3IO
from jflow.exceptions import InvalidFormatError
import os
import sys
......@@ -33,18 +35,18 @@ def fastq(ifile):
# only check the first 10 sequences
if nb_seq == 10: break
except:
raise jflow.InvalidFormatError("The provided file '" + ifile + "' is not a fastq file!")
raise InvalidFormatError("The provided file '" + ifile + "' is not a fastq file!")
def fasta(ifile):
try:
reader = seqio.FastaReader(ifile, wholefile=True)
reader = seqio.FastaReader(ifile)
nb_seq = 0
for id, desc, seq, qualities in reader:
nb_seq += 1
# only check the first 10 sequences
if nb_seq == 10: break
# only check the first 3 sequences
if nb_seq == 3: break
except:
raise jflow.InvalidFormatError("The provided file '" + ifile + "' is not a fasta file!")
raise InvalidFormatError("The provided file '" + ifile + "' is not a fasta file!")
def sff(ifile):
try:
......@@ -55,5 +57,15 @@ def sff(ifile):
# only check the first 10 sequences
if nb_seq == 10: break
except:
raise jflow.InvalidFormatError("The provided file '" + ifile + "' is not a sff file!")
\ No newline at end of file
raise InvalidFormatError("The provided file '" + ifile + "' is not a sff file!")
def gff3(ifile):
#try:
reader = GFF3IO(ifile,"r")
nb_line = 0
for record in reader:
nb_line += 1
# only check the first 10 sequences
if nb_line == 10: break
#except:
# raise InvalidFormatError("The provided file '" + ifile + "' is not a gff3 file!")
\ No newline at end of file
......@@ -70,7 +70,7 @@ class RADseq (NG6Workflow):
self.add_parameter("mismatches", "Max. number of mismatches allowed.",default = 0, type=int)
self.add_parameter("trim_barcode", "Should the barecode be trimmed",default = True, type=bool)
self.add_parameter("trim_reads2", " Shoud the read 2 be trimmed to have the same length as the read1",default = False, type=bool)
self.add_exclusion_rule('trim_reads2', 'trim_barcode')
#TODO Exclude self.add_exclusion_rule('trim_reads2', 'trim_barcode')