Commit 15af3a2a authored by Penom Nom's avatar Penom Nom
Browse files

Fix the memory error with large Ace files.

parent 2e12997f
......@@ -18,7 +18,7 @@
__author__ = 'Plateforme bioinformatique Midi Pyrenees'
__copyright__ = 'Copyright (C) 2009 INRA'
__license__ = 'GNU General Public License'
__version__ = '1.0'
__version__ = '1.1'
__email__ = 'support.genopole@toulouse.inra.fr'
__status__ = 'beta'
......@@ -49,19 +49,19 @@ class RunAssemblyAnalyse (Analyse):
@param ace_file : the ace file path
@return : {"contigs": {...}, ...}
"""
handle = Ace.read(open(ace_file, 'r'))
ace_iterator = Ace.parse(open(ace_file, 'r'))
contig_len = []
prof = []
#Collect values
for contig in handle.contigs:
for contig in ace_iterator:
read_sum = 0
# For each contig
# For each read
for i in range(len(contig.reads)):
# compute the read length involved in the contig
# Compute the read length involved in the contig
read_within = 0
read_length = len(contig.reads[i].rd.sequence)
# If the reas starts after the contig start or right at the start
# If the read starts after the contig start or right at the start
if contig.af[i].padded_start > 0:
read_within = min( read_length, (contig.nbases - contig.af[i].padded_start + 1) )
# Otherwise
......@@ -94,8 +94,12 @@ class RunAssemblyAnalyse (Analyse):
median_depth = round(np.median(prof),2)
mean_depth = round(np.mean(prof),2)
#Retrieve the number of reads and contigs
ace_handle = open(ace_file, 'r')
ace_header = re.split('\s+', ace_handle.readline())
ace_handle.close()
return {"contigs": {"nbcontigs": handle.ncontigs, "nbreads": handle.nreads, "minlength": min_length,
return {"contigs": {"nbcontigs": ace_header[1], "nbreads": ace_header[2], "minlength": min_length,
"maxlength": max_length, "medlength": median_length, "meanlength": mean_length},
"depth": {"mindepth": min_depth, "maxdepth": max_depth, "meddepth": median_depth,
"meandepth": mean_depth}}
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment