Commit 22bf6e1e authored by Romain Therville's avatar Romain Therville 🐭

Merge branch 'master' into 'issue#129'

# Conflicts:
#   ui/nG6/lib/class.tx_nG6_utils.php

(Both lines looked the same to me. RT)
parents 05c8a261 6c16a1fa
......@@ -190,9 +190,6 @@ class NG6Workflow (BasicNG6Workflow):
pidx = 1
nidx = 1
for rang, sample in enumerate(self.samples) :
print(sample)
print("sample readsi")
print(sample.readsi)
if sample.name :
self.samples_names.append(sample.name)
else :
......@@ -449,6 +446,7 @@ class CasavaNG6Workflow(NG6Workflow):
if self.is_casava:
self.group_prefix = list((Utils.get_group_basenames(self.get_all_reads(), "read")).keys())
logging.getLogger("ng6").debug("CasavaNG6Workflow._preprocess enter" + str(self.group_prefix))
def _process_casava_18(self, casava_directory, project_name, lane_number, input_files):
logging.getLogger("ng6").debug("CasavaNG6Workflow._process_casava_18 enter")
......@@ -646,10 +644,14 @@ class CasavaNG6Workflow(NG6Workflow):
concatenatefastq = None
filtered_read1_files = []
filtered_read2_files = []
filtered_index_files = []
analysis_files = []
saved_files = []
logging.getLogger("ng6").debug("illumina_process entering")
if self.is_casava :
logging.getLogger("ng6").debug("illumina_process self.is_casava")
analysis_files = self.get_all_reads("read1") + self.get_all_reads("read2")
if len(self.log_files) > 0 :
add_log = self.add_component("BasicAnalysis", [self.log_files,"Log Files","Log files generated during primary analysis","-","-","-","gz", "","log.gz"])
......@@ -660,41 +662,50 @@ class CasavaNG6Workflow(NG6Workflow):
demultiplex_stats = self.add_component("Demultiplex10XStats", [self.get_all_reads("read1"), self.undetermined_reads1, self.get_files_index("read1")])
else :
demultiplex_stats = self.add_component("DemultiplexStats", [self.get_all_reads("read1"), self.undetermined_reads1])
#analysis files for fastq illumina and fastqc analysis
if self.keep_reads != "all" :
logging.getLogger("ng6").debug("illumina_process self.keep_reads != all")
logging.getLogger("ng6").debug("illumina_process BEFORE FASTQILLUMINAFILTER self.get_all_reads() = " + ",".join(self.get_all_reads()))
logging.getLogger("ng6").debug("illumina_process self.group_prefix = " + ",".join(self.group_prefix))
# fastq illumina filter
fastqilluminafilter = self.add_component("FastqIlluminaFilter", [self.runobj,self.get_all_reads(), self.keep_reads, self.group_prefix])
fastqilluminafilter = self.add_component("FastqIlluminaFilter", [self.runobj, self.get_all_reads(), self.keep_reads, self.group_prefix])
logging.getLogger("ng6").debug("illumina_process fastqilluminafilter = " + ",".join(filtered_read1_files))
# list filtered files
if self.is_paired_end() :
# split read 1 and read 2 from filtered files list
[filtered_read1_files, filtered_read2_files] = Utils.split_pair(fastqilluminafilter.fastq_files_filtered, (self.group_prefix is not None))
if self.is_10Xcasava :
[filtered_read1_files, filtered_read2_files, filtered_index_files] = Utils.split_pair_and_index(fastqilluminafilter.fastq_files_filtered, (self.group_prefix is not None))
else:
[filtered_read1_files, filtered_read2_files] = Utils.split_pair(fastqilluminafilter.fastq_files_filtered, (self.group_prefix is not None))
else:
filtered_read1_files = fastqilluminafilter.fastq_files_filtered
filtered_read2_files = []
filtered_index_files = []
filtered_read1_files = sorted(filtered_read1_files)
filtered_read2_files = sorted(filtered_read2_files)
filtered_index_files = sorted(filtered_index_files)
else:
fastqilluminafilter = None
filtered_read1_files = self.get_all_reads("read1")
filtered_read2_files = self.get_all_reads("read2")
filtered_index_files = self.get_all_reads("index")
# archive the files
#TODO : if self.group_prefix == None, the create the output of fastqilluminafilter in the run.get_work_directory()
saved_files = filtered_read1_files + filtered_read2_files + self.get_all_reads("readsi")
saved_files = filtered_read1_files + filtered_read2_files + filtered_index_files
logging.getLogger("CasavaNG6Workflow").debug("illumina_process saved_files = " + ",".join(saved_files))
reads_prefixes = None
if self.group_prefix != None :
# concatenate fastq
reads_prefixes = list((Utils.get_group_basenames(saved_files, "read")).keys())
logging.getLogger("CasavaNG6Workflow").debug("illumina_process reads_prefixes = " + ",".join(reads_prefixes))
logging.getLogger("CasavaNG6Workflow").debug("illumina_process read_predixes = " + ",".join(reads_prefixes))
logging.getLogger("CasavaNG6Workflow").debug("illumina_process saved_files = " + ",".join(saved_files))
concatenatefastq = self.add_component("ConcatenateFilesGroups", [self.runobj,saved_files, reads_prefixes])
concatenatefastq = self.add_component("ConcatenateFilesGroups", [self.runobj,saved_files,reads_prefixes])
saved_files = concatenatefastq.concat_files
logging.getLogger("CasavaNG6Workflow").debug("illumina_process after concatenatefastq, saved_files = " + ",".join(saved_files))
......@@ -703,8 +714,11 @@ class CasavaNG6Workflow(NG6Workflow):
fastqilluminafilter = None
filtered_read1_files = self.get_all_reads("read1")
filtered_read2_files = self.get_all_reads("read2")
filtered_index_files = self.get_all_reads("index")
saved_files = self.get_all_reads()
# reads prefixes
reads_prefixes =list((Utils.get_group_basenames(analysis_files, "read")).keys())
# add raw
addrawfiles = self.add_component("AddRawFiles", [self.runobj, saved_files, self.compression])
contam = []
......@@ -714,6 +728,9 @@ class CasavaNG6Workflow(NG6Workflow):
contam.append(self.get_resource("yeast_bwa"))
except : pass
logging.getLogger("CasavaNG6Workflow").debug("illumina_process files_analysis = " + ",".join(filtered_read1_files))
logging.getLogger("CasavaNG6Workflow").debug("illumina_process files_analysis = " + ",".join(filtered_read2_files))
logging.getLogger("CasavaNG6Workflow").debug("illumina_process files_analysis = " + ",".join(filtered_index_files))
# contamination_search
if contam :
if self.contamination_databank: contam.extend(self.contamination_databank)
......
......@@ -202,7 +202,7 @@ class t3MySQLdb(object):
right_level_id = qresult.rows[0][0]
sql = "INSERT INTO tx_nG6_project (pid, tstamp, crdate, cruser_id, description, name, hidden, space_id) \
VALUES ('%s','%s','%s','%s','%s','%s','%s','%s')" % (self.cfg_reader.get_pid(), crdate, crdate, admin_id,
self.esc_q(pdescription), self.esc_q(name), hidden, space_id)
self.esc_q(pdescription), self.esc_q(name), int(hidden), space_id)
qresult = self.execute(sql, commit = True)
project_id = qresult.lastrowid
self.execute("INSERT INTO fe_rights (fe_user_id, project_id, right_id) VALUES ('%s','%s','%s')" % (admin_id, project_id, right_level_id),
......@@ -320,7 +320,7 @@ class t3MySQLdb(object):
'%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % ( self.cfg_reader.get_pid(), crdate, crdate, cruser_id, self.esc_q(name),
calendar.timegm(date.timetuple()), directory, self.esc_q(species),
self.esc_q(data_nature), self.esc_q(type), nb_sequences,
full_seq_size, self.esc_q(description), hidden, self.esc_q(sequencer),retention_date, "stored","0","0","0")
full_seq_size, self.esc_q(description), int(hidden), self.esc_q(sequencer),retention_date, "stored","0","0","0")
qresult = self.execute(sql, commit = True)
run_id = qresult.lastrowid
......@@ -471,7 +471,7 @@ class t3MySQLdb(object):
if description:
set_values.append( "description='%s'" % self.esc_q(description))
if hidden:
set_values.append( "hidden=%s" % hidden )
set_values.append( "hidden=%s" % int(hidden) )
if sequencer:
set_values.append( "sequencer='%s'" % self.esc_q(sequencer))
if storage_size:
......@@ -673,7 +673,8 @@ class t3MySQLdb(object):
#--------------------------------------------------------------------
# Analyze functions
def add_analysis(self, aclass, name, description, cruser_login, date, directory, software, version, params, is_editable, retention_date, hidden=1, parent_uid=0):
def add_analysis(self, aclass, name, description, cruser_login, date, \
directory, software, version, params, is_editable, retention_date, hidden=1, parent_uid=0):
"""
Update an Analyze
@param aclass : the class of the analysis
......@@ -703,7 +704,7 @@ class t3MySQLdb(object):
'%s', '%s', '%s', '%s', '%s', '%s' ,'%s', '%s', '%s', '%s', '%s', '%s', '%s')" % ( self.cfg_reader.get_pid(), crdate, crdate, cruser_id, aclass,
self.esc_q(name), self.esc_q(description), crdate,
directory, self.esc_q(software), self.esc_q(version),
hidden, self.esc_q(params), analysis_is_editable, parent_uid, retention_date, "stored","0","0","0")
int(hidden), self.esc_q(params), analysis_is_editable, parent_uid, retention_date, "stored","0","0","0")
qresult = self.execute(sql, commit= True)
analysis_id = qresult.lastrowid
......@@ -1065,4 +1066,4 @@ class t3MySQLdb(object):
qresult = self.execute(sql)
return [ r[0] for r in qresult.rows ]
\ No newline at end of file
......@@ -290,7 +290,44 @@ class Utils(object):
return [read_1_list, read_2_list]
@staticmethod
def split_pair_and_index ( file_list, is_casava=False ):
"""
Return the list of read 1, the list of read 2 and the list of index read from a list
@param file_list : the list
@param is_casava : files names in file_list are in CASVAVA format
"""
read_1_list = []
read_2_list = []
read_index_list = []
logging.getLogger("Utils").debug("split_pair_and_index. Entering")
if is_casava:
logging.getLogger("Utils").debug("split_pair_and_index. is_casava")
for file in file_list:
logging.getLogger("Utils").debug("split_pair_and_index. file = " + file)
basename_without_ext = os.path.basename(file).split(".")[0]
file_name_fields = basename_without_ext.split(Utils.CASAVA_FILENAME_SEPARATOR)
read_tag = file_name_fields[Utils.CASAVA_FILENAME['read']-1]
if read_tag == "R1":
read_1_list.append(file)
elif read_tag == "R2":
read_2_list.append(file)
else:
read_index_list.append(file)
else:
sorted_list = sorted( file_list )
logging.getLogger("Utils").debug("split_pair_and_index. file_list = " + ", ".join(file_list))
logging.getLogger("Utils").debug("split_pair_and_index. sorted_list = " + ", ".join(sorted_list))
for i in range(0,len(sorted_list),3):
logging.getLogger("Utils").debug("split_pair_and_index. sorted_list[i] = " + sorted_list[i])
logging.getLogger("Utils").debug("split_pair_and_index. sorted_list[i+1] = " + sorted_list[i+1])
logging.getLogger("Utils").debug("split_pair_and_index. sorted_list[i+1] = " + sorted_list[i+2])
read_1_list.append(sorted_list[i])
read_2_list.append(sorted_list[i+1])
read_index_list.append(sorted_list[i+2])
return [read_1_list, read_2_list, read_index_list]
@staticmethod
def get_group_basenames( file_list, group_by ):
"""
......
......@@ -50,8 +50,6 @@
<div class="sub-content sc-top-bottom">
<div class="ng6-content-header-left">
<h2>###STATUS_HEADER###</h2>
<p>###STATUS_MESSAGE###</p>
<!-- ###LOGIN_FORM### -->
<form action="###ACTION_URI###" target="_top" method="post" onsubmit="###ON_SUBMIT###" class="form-horizontal">
<fieldset>
......@@ -130,8 +128,6 @@
<div class="sub-content sc-top-bottom">
<div class="ng6-content-header-left">
<h2>###STATUS_HEADER###</h2>
<p>###STATUS_MESSAGE###</p>
<!-- ###LOGIN_FORM### -->
<form action="###ACTION_URI###" target="_top" method="post" onsubmit="###ON_SUBMIT###" class="form-horizontal">
<fieldset>
......
......@@ -2174,7 +2174,7 @@ static function get_project_runs($project_id, $orderby='', $limit='') {
* Returns a run's retention status and date
*
* @param string $r_id the analyze ids
* @return['run_data_state'] purged=> the date is purged, stored=> the data is stored
* @return['run_data_state'] purged=> the date is purged, stored=> the data is stored, extended=>retention extended and data still stored
* @return['run_retention_date'] the unix epoch timestamp
*/
static function select_run_retention_status($r_id) {
......@@ -2182,7 +2182,8 @@ static function get_project_runs($project_id, $orderby='', $limit='') {
// First select the run from the database
$queryParts = Array(
'SELECT' => 'tx_nG6_run.data_state AS run_data_state, '.
'tx_nG6_run.retention_date AS run_retention_date ',
'tx_nG6_run.retention_date AS run_retention_date, '.
'tx_nG6_run.purged_date AS run_purged_date ',
'FROM' => 'tx_nG6_run ',
'WHERE' => 'tx_nG6_run.uid='.$r_id,
'GROUPBY' => '',
......@@ -2194,8 +2195,9 @@ static function get_project_runs($project_id, $orderby='', $limit='') {
$res = $GLOBALS['TYPO3_DB']->exec_SELECT_queryArray($queryParts);
while($row = $GLOBALS['TYPO3_DB']->sql_fetch_assoc($res)) {
$result['run_data_state'] = $row['run_data_state'];
//$result['run_retention_date'] = $row['run_data_state'];
$result['run_retention_date'] = tx_nG6_utils::convert_epoch_timestamp_to_nice_date($row['run_retention_date']);
$result['run_purge_date'] = tx_nG6_utils::convert_epoch_timestamp_to_nice_date($row['run_purged_date']);
//$result['run_purge_date'] = $row['run_purged_date'];
}
return $result;
}
......@@ -2565,14 +2567,16 @@ static function get_project_runs($project_id, $orderby='', $limit='') {
*
* @param string $a_id the analysis id
* @return['analysis_data_state'] purged=> the date is purged, stored=> the data is stored
* @return['analysis_retention_date'] the unix epoch timestamp
* @return['analysis_retention_date'] the retention unix epoch timestamp converted to a nice date
* @return['analysis_purged_date'] the purge unix epoch timestamp converted to a nice date
*/
static function select_analysis_retention_status($a_id) {
// First select the analysis from the database
$queryParts = Array(
'SELECT' => 'tx_nG6_analyze.data_state AS analysis_data_state, '.
'tx_nG6_analyze.retention_date AS analysis_retention_date ',
'tx_nG6_analyze.retention_date AS analysis_retention_date, '.
'tx_nG6_analyze.purged_date AS analysis_purged_date ',
'FROM' => 'tx_nG6_analyze ',
'WHERE' => 'tx_nG6_analyze.uid='.$a_id,
'GROUPBY' => '',
......@@ -2584,8 +2588,8 @@ static function get_project_runs($project_id, $orderby='', $limit='') {
$res = $GLOBALS['TYPO3_DB']->exec_SELECT_queryArray($queryParts);
while($row = $GLOBALS['TYPO3_DB']->sql_fetch_assoc($res)) {
$result['analysis_data_state'] = $row['analysis_data_state'];
//$result['analysis_retention_date'] = $row['analysis_retention_date'];
$result['analysis_retention_date'] = tx_nG6_utils::convert_epoch_timestamp_to_nice_date($row['analysis_retention_date']);
$result['analysis_purge_date'] = tx_nG6_utils::convert_epoch_timestamp_to_nice_date($row['analysis_purged_date']);
}
return $result;
}
......
......@@ -289,10 +289,10 @@ please have a look at our storage fees (<a href="###URL_PRICE###">###URL_PRICE##
//Return a php array containing the section relative to the given space_id
static function get_retention_policy_from_space_id($space_id = "default"){
$config_file_content = tx_nG6_utils::get_config_content("default");
//To explode the file content by line, we use PHP_EOL
$array_config_content = explode(PHP_EOL, $config_file_content);
$space_id_retention_config = array();
$pattern_to_find = "[space_".$space_id."]";
$is_space_id_found = FALSE;
......
......@@ -30,12 +30,14 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
<div class="ng6-content-header-right">
{$analyse.description}
<br />
All data related to this analysis use <strong> {$analyse_size} </strong> on the hard drive.
All data related to this analysis use <strong> {$analyse_size} </strong> on the hard drive.<br/>
{block name=description_update}{/block}
{if $analysis_data_state == 'stored'}
The analysis data is <strong>stored</strong>. It will be kept until {$analysis_retention_date}.<br/>
{else}
The analysis data has been <strong>purged</strong>.<br/>
The analysis data is <strong>stored</strong>. It will be kept until <strong>{$analysis_retention_date}</strong>.<br/>
{elseif $analysis_data_state == 'purged'}
The analysis data has been <strong>purged</strong> on <strong>{$analysis_purge_date}</strong>.<br/>
{elseif $analysis_data_state == 'extended'}
The analysis data retention has been <strong>extended</strong> to <strong>{$analysis_retention_date}</strong>.<br/>
{/if}
</div>
<div style="clear:both"></div>
......
......@@ -79,7 +79,7 @@ $(function () {
//Build tbody
var tbody = "" ;
var pattern_regexp = new RegExp( '<a[^>]+class="[^"]*imglink[^"]*"[^>]+href="(fileadmin\/data\/analyze\/[^"]+)' ) ;
var pattern_regexp = new RegExp( '<a[^>]+class="[^"]*imglink[^"]*"[^>]+href="(fileadmin\/data[_]*[a-z]*\/analyze\/[^"]+)' ) ;
for( var i = 0 ; i < samples.length ; i++ ) {
tbody += '<tr>' ;
for( var j = 0 ; j < columns.length ; j++ ) {
......
......@@ -325,6 +325,7 @@ class tx_nG6_pi1 extends \TYPO3\CMS\Frontend\Plugin\AbstractPlugin {
$retention_policy = tx_nG6_db::select_run_retention_status($runs[key($runs)]['id']);
$smarty->assign('run_data_state', $retention_policy['run_data_state']);
$smarty->assign('run_retention_date', $retention_policy['run_retention_date']);
$smarty->assign('run_purge_date', $retention_policy['run_purge_date']);
$run_analysis = tx_nG6_db::get_run_analysis($runs[key($runs)]['id']);
// Add some information to the table
......@@ -404,6 +405,7 @@ class tx_nG6_pi1 extends \TYPO3\CMS\Frontend\Plugin\AbstractPlugin {
$retention_policy = tx_nG6_db::select_analysis_retention_status($this->piVars['analyze_id']);
$smarty->assign('analysis_data_state', $retention_policy['analysis_data_state']);
$smarty->assign('analysis_retention_date', $retention_policy['analysis_retention_date']);
$smarty->assign('analysis_purge_date', $retention_policy['analysis_purge_date']);
// Then select analyse results
$results = tx_nG6_db::select_analyse_results($this->piVars['analyze_id']);
......
......@@ -85,9 +85,11 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
<strong>{$run_analysis|@count}</strong> analysis has been done on this run.<br/>
{/if}
{if $run_data_state == 'stored'}
The run data is <strong>stored</strong>. It will be kept until {$run_retention_date}.<br/>
{else}
The run data has been <strong>purged</strong>.<br/>
The run data is <strong>stored</strong>. It will be kept until <strong>{$run_retention_date}</strong>.<br/>
{elseif $run_data_state == 'purged'}
The run data has been <strong>purged</strong> on <strong>{$run_purge_date}</strong>.<br/>
{elseif $run_data_state == 'extended'}
The run data retention has been <strong>extended</strong> to <strong>{$run_retention_date}</strong>.<br/>
{/if}
......
......@@ -441,7 +441,7 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
</tbody>
<tfoot>
<tr>
<th align="left" colspan="10">
<th align="left" colspan="11">
With selection :
<div class="btn-group">
<button id="btn_obsolete_mail" type="button" class="btn btn-sm btn-default" ><i class="glyphicon glyphicon-envelope"></i> Send mail </button>
......
......@@ -109,6 +109,8 @@ function get_casava_1_8_desc($string, $desc) {
$best_description = $description." (R1)";
} elseif (preg_match("/_R2/i", $string)) {
$best_description = $description." (R2)";
} elseif (preg_match("/_I1/i", $string)) {
$best_description = $description." (I1)";
}
}
}
......
......@@ -68,20 +68,22 @@ a img { border: 0; }
}
.ng6-content-header-left {
box-shadow: -4px 0 5px -5px grey, 5px 0 5px -6px grey;
box-shadow: -4px 0 5px -5px grey, 5px 0 5px -6px grey;
margin: 0;
margin-left: 10px;
margin-right: 20px;
padding: 10px;
width: 375px;
width: 400px;
height: 275px;
text-align: justify;
float: left;
background-color: #FaFaFa;
}
.ng6-content-header-right {
box-shadow: -4px 0 5px -5px grey, 5px 0 5px -6px grey;
box-shadow: -4px 0 5px -5px grey, 5px 0 5px -6px grey;
margin: 0;
padding: 10px;
width: 625px;
width: 710px;
text-align: justify;
float: left;
background-color: #FaFaFa;
......@@ -143,7 +145,7 @@ a img { border: 0; }
padding: 0;
margin-left: auto;
margin-right: auto;
width: 1300px;
width: 1000px;
color: #BBBABA;
display: block;
line-height: 1.5em;
......
......@@ -37,4 +37,4 @@ class Illumina10XQualityCheck (CasavaNG6Workflow):
def process(self):
fastqilluminafilter, filtered_read1_files, filtered_read2_files, concat_files = self.illumina_process()
\ No newline at end of file
fastqilluminafilter, filtered_read1_files, filtered_read2_files, concat_files, concatenatefastq = self.illumina_process()
\ No newline at end of file
......@@ -29,53 +29,54 @@ def migrate_project (project_id, new_space_id, output):
from ng6.analysis import Analysis
from ng6.project import Project
from ng6.run import Run
with open( output,'w') as fh :
ng6conf = NG6ConfigReader()
base_path = ng6conf.get_save_directory()
project = Project.get_from_id(project_id)
logging.getLogger("migrate_project").debug("Migrating project '" + str(project.name) + "' (" + str(project.id) + ") to " + str(new_space_id) + "\n")
fh.write("Migrating project '" + str(project.name) + "' (" + str(project.id) + ") to " + str(new_space_id) + "\n")
def log_to_output_files (log_text, log_file):
fh = open(log_file, "a")
fh.write(log_text)
fh.close()
logging.getLogger("move_project.py").debug(log_text)
print( "os.path.exists("+log_file+")=" + str(os.path.exists(log_file)))
ng6conf = NG6ConfigReader()
base_path = ng6conf.get_save_directory()
old_space_id = project.space_id
logging.getLogger("migrate_project").debug("old_space_id = " + old_space_id + ", new_space_id = " + new_space_id + "\n")
fh.write("old_space_id = " + old_space_id + ", new_space_id = " + new_space_id + "\n")
project = Project.get_from_id(project_id)
old_space_id = project.space_id
str_log="MIGRATE PROJECT: '" + str(project.name) + "' (" + str(project.id) + ") from " + old_space_id + " to " + str(new_space_id) + "\n"
log_to_output_files(str_log, output)
runs = project.get_runs()
for run in runs :
str_log="\t - ##RUN '" + run.name + "' (" + str(run.id) + ") - " + run.directory + "\n"
log_to_output_files(str_log, output)
str_cmds = []
runs = project.get_runs()
for run in runs :
fh.write("Migrating run '" + run.name + "' (" + str(run.id) + ")" + " to " + str(new_space_id) + "\n")
logging.getLogger("migrate_project").debug("Migrating run '" + run.name + "' (" + str(run.id) + ")" + " to " + str(new_space_id) + "\n")
str_cmds.append(run.change_space(new_space_id) )
fh.write("Migrating successful! \n")
logging.getLogger("migrate_project").debug("Migrating successful! \n")
run_analyzes = run.get_analysis()
for analysis in run_analyzes :
fh.write("Migrating analysis '" + analysis.name + "' (" + str(analysis.id) + ")" + " to " + str(new_space_id) + "\n")
logging.getLogger("migrate_project").debug("Migrating analysis '" + analysis.name + "' (" + str(analysis.id) + ")" + " to " + str(new_space_id) + "\n")
str_cmds.append(analysis.change_space(new_space_id) )
fh.write("Migrating successful! \n")
logging.getLogger("migrate_project").debug("Migrating successful! \n")
(ret_code, ret_command) = run.change_space(new_space_id)
str_log = "\t RET CODE: " + str(ret_code) + " CMD : " + ret_command + "\n"
log_to_output_files(str_log, output)
run_analyzes = run.get_analysis()
for analysis in run_analyzes :
str_log = "\t\t - ###ANALYSE " + analysis.name+ "' (" + str(analysis.id) + ") - " + analysis.directory + "\n"
log_to_output_files(str_log, output)
analyzes = project.get_analysis()
for analysis in analyzes :
fh.write("Migrating analysis '" + analysis.name + "' (" + str(analysis.id) + ")" + " to " + str(new_space_id) + "\n")
logging.getLogger("migrate_project").debug("Migrating analysis '" + analysis.name + "' (" + str(analysis.id) + ")" + " to " + str(new_space_id) + "\n")
str_cmds.append(analysis.change_space(new_space_id) )
fh.write("Migrating successful! \n")
logging.getLogger("migrate_project").debug("Migrating successful! \n")
for command in str_cmds:
fh.write("Command launched : "+ str(command[1]) + "\n" )
logging.getLogger("migrate_project").debug("Command launched : "+ str(command[1]) + "\n" )
fh.write("Returned code : "+ str(command[0]) + "\n" )
logging.getLogger("migrate_project").debug("Returned code : "+ str(command[0]) + "\n" )
project.update_space_id(new_space_id)
(ret_code, ret_command) = analysis.change_space(new_space_id)
str_log = "\t\t RET CODE: " + str(ret_code) + " CMD : " + ret_command + "\n"
log_to_output_files(str_log, output)
analyzes = project.get_analysis()
for analysis in analyzes :
str_log="\t - ##ANALYSE PROJECT '" + analysis.name + "' (" + str(analysis.id) + ") - " + analysis.directory + "\n"
log_to_output_files(str_log, output)
(ret_code, ret_command) = analysis.change_space(new_space_id)
str_log = "\tRET CODE: " + str(ret_code) + " CMD : " + ret_command + "\n"
log_to_output_files(str_log, output)
project.update_space_id(new_space_id)
str_log = "project.update_space_id DONE\n"
log_to_output_files(str_log, output)
class MoveProject (Component):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment