Commit 3f190de0 authored by Romain Therville's avatar Romain Therville 🐭

Merge branch 'master' into 'issue#120'

# Conflicts:
#   ui/nG6/pi6/class.tx_nG6_pi6.php

The conflict only concerned php comments. I would have prefered to remove them, but the resulting script seemed really fishy on the git file compare display.
Anyway, we'll be able to remove those comments later.
parents a23abb97 bbf78d99
......@@ -1212,19 +1212,19 @@ INSERT INTO `tx_nG6_project` (`uid`, `pid`, `tstamp`, `crdate`, `cruser_id`, `de
--
-- Structure de la vue `tx_nG6_view_project_analyze`
--
CREATE ALGORITHM=UNDEFINED DEFINER=`typo3`@`localhost` SQL SECURITY DEFINER VIEW `tx_nG6_view_project_analyze` AS select `tx_nG6_project`.`uid` AS `project_id`,`tx_nG6_project`.`name` AS `project_name`,`tx_nG6_analyze`.`uid` AS `analyze_id`,`tx_nG6_analyze`.`storage_size` AS `storage_size`,`tx_nG6_analyze`.`purged_size` AS `purged_size`,`tx_nG6_analyze`.`data_state` AS `state`,`tx_nG6_analyze`.`retention_date` AS `retention_date`,`tx_nG6_analyze`.`purge_demand_id` AS `purge_demand_id` from ((`tx_nG6_project` join `tx_nG6_project_analyze` on((`tx_nG6_project_analyze`.`project_id` = `tx_nG6_project`.`uid`))) join `tx_nG6_analyze` on((`tx_nG6_project_analyze`.`analyze_id` = `tx_nG6_analyze`.`uid`)));
CREATE ALGORITHM=UNDEFINED DEFINER=`typo3`@`localhost` SQL SECURITY DEFINER VIEW `tx_nG6_view_project_analyze` AS select `tx_nG6_project`.`uid` AS `project_id`,`tx_nG6_project`.`name` AS `project_name`,`tx_nG6_analyze`.`uid` AS `analyze_id`,`tx_nG6_analyze`.`storage_size` AS `storage_size`,`tx_nG6_analyze`.`purged_size` AS `purged_size`,`tx_nG6_analyze`.`data_state` AS `state`,`tx_nG6_analyze`.`retention_date` AS `retention_date`,`tx_nG6_analyze`.`purge_demand_id` AS `purge_demand_id`, `tx_nG6_analyze`.`hidden` AS `hidden` from ((`tx_nG6_project` join `tx_nG6_project_analyze` on((`tx_nG6_project_analyze`.`project_id` = `tx_nG6_project`.`uid`))) join `tx_nG6_analyze` on((`tx_nG6_project_analyze`.`analyze_id` = `tx_nG6_analyze`.`uid`)));
--
-- Structure de la vue `tx_nG6_view_project_run`
--
CREATE ALGORITHM=UNDEFINED DEFINER=`typo3`@`localhost` SQL SECURITY DEFINER VIEW `tx_nG6_view_project_run` AS select `tx_nG6_project`.`uid` AS `project_id`,`tx_nG6_project`.`name` AS `project_name`,`tx_nG6_run`.`uid` AS `run_id`,`tx_nG6_run`.`storage_size` AS `storage_size`,`tx_nG6_run`.`purged_size` AS `purged_size`,`tx_nG6_run`.`data_state` AS `state`,`tx_nG6_run`.`retention_date` AS `retention_date`,`tx_nG6_run`.`purge_demand_id` AS `purge_demand_id` from ((`tx_nG6_run` join `tx_nG6_project_run` on((`tx_nG6_project_run`.`run_id` = `tx_nG6_run`.`uid`))) join `tx_nG6_project` on((`tx_nG6_project`.`uid` = `tx_nG6_project_run`.`project_id`)));
CREATE ALGORITHM=UNDEFINED DEFINER=`typo3`@`localhost` SQL SECURITY DEFINER VIEW `tx_nG6_view_project_run` AS select `tx_nG6_project`.`uid` AS `project_id`,`tx_nG6_project`.`name` AS `project_name`,`tx_nG6_run`.`uid` AS `run_id`,`tx_nG6_run`.`storage_size` AS `storage_size`,`tx_nG6_run`.`purged_size` AS `purged_size`,`tx_nG6_run`.`data_state` AS `state`,`tx_nG6_run`.`retention_date` AS `retention_date`,`tx_nG6_run`.`purge_demand_id` AS `purge_demand_id`, `tx_nG6_run`.`hidden` AS `hidden` from ((`tx_nG6_run` join `tx_nG6_project_run` on((`tx_nG6_project_run`.`run_id` = `tx_nG6_run`.`uid`))) join `tx_nG6_project` on((`tx_nG6_project`.`uid` = `tx_nG6_project_run`.`project_id`)));
--
-- Structure de la vue `tx_nG6_view_project_run_analyze`
--
CREATE ALGORITHM=UNDEFINED DEFINER=`typo3`@`localhost` SQL SECURITY DEFINER VIEW `tx_nG6_view_project_run_analyze` AS select `tx_nG6_project`.`uid` AS `project_id`,`tx_nG6_project`.`name` AS `project_name`,`tx_nG6_analyze`.`uid` AS `analyze_id`,`tx_nG6_analyze`.`storage_size` AS `storage_size`,`tx_nG6_analyze`.`purged_size` AS `purged_size`,`tx_nG6_analyze`.`data_state` AS `state`,`tx_nG6_analyze`.`retention_date` AS `retention_date`,`tx_nG6_analyze`.`purge_demand_id` AS `purge_demand_id` from ((((`tx_nG6_run` join `tx_nG6_project_run` on((`tx_nG6_project_run`.`run_id` = `tx_nG6_run`.`uid`))) join `tx_nG6_project` on((`tx_nG6_project`.`uid` = `tx_nG6_project_run`.`project_id`))) join `tx_nG6_run_analyze` on((`tx_nG6_run_analyze`.`run_id` = `tx_nG6_run`.`uid`))) join `tx_nG6_analyze` on((`tx_nG6_run_analyze`.`analyze_id` = `tx_nG6_analyze`.`uid`)));
CREATE ALGORITHM=UNDEFINED DEFINER=`typo3`@`localhost` SQL SECURITY DEFINER VIEW `tx_nG6_view_project_run_analyze` AS select `tx_nG6_project`.`uid` AS `project_id`,`tx_nG6_project`.`name` AS `project_name`,`tx_nG6_analyze`.`uid` AS `analyze_id`,`tx_nG6_analyze`.`storage_size` AS `storage_size`,`tx_nG6_analyze`.`purged_size` AS `purged_size`,`tx_nG6_analyze`.`data_state` AS `state`,`tx_nG6_analyze`.`retention_date` AS `retention_date`,`tx_nG6_analyze`.`purge_demand_id` AS `purge_demand_id`, `tx_nG6_analyze`.`hidden` AS `hidden` from ((((`tx_nG6_run` join `tx_nG6_project_run` on((`tx_nG6_project_run`.`run_id` = `tx_nG6_run`.`uid`))) join `tx_nG6_project` on((`tx_nG6_project`.`uid` = `tx_nG6_project_run`.`project_id`))) join `tx_nG6_run_analyze` on((`tx_nG6_run_analyze`.`run_id` = `tx_nG6_run`.`uid`))) join `tx_nG6_analyze` on((`tx_nG6_run_analyze`.`analyze_id` = `tx_nG6_analyze`.`uid`)));
--
-- Structure de la vue `tx_nG6_view_project_user`
......
......@@ -116,7 +116,7 @@ class NG6ConfigReader(object):
def get_retention_date(self, space_id="default"):
"""
return the directory corresponding to space_id
return the retention corresponding to space_id
@return: space_dir
"""
date = None
......@@ -131,7 +131,28 @@ class NG6ConfigReader(object):
return date
except:
raise Exception("Failed when parsing the config file !")
raise ValueError("Failed while gnerating retention date!")
raise ValueError("Failed while generating retention date!")
def compute_new_retention_date(self, space_id="default", old_retention = None):
"""
return the retention corresponding to space_id
@return: space_dir
"""
date = None
try :
nb_month=self.reader.getint('space_'+space_id,"retention_period")
retention_date = time.mktime((datetime.datetime.fromtimestamp(old_retention)+ relativedelta(months=nb_month)).timetuple())
logging.getLogger("config_reader.compute_new_retention_date").error("IN retention period")
return retention_date
except:
try :
(d,m,y)=self.reader.get('space_'+space_id,"retention_date").split('/')
date = time.mktime(datetime.date(int(y),int(m),int(d)).timetuple())
logging.getLogger("config_reader.compute_new_retention_date").error("IN retention date")
return date
except:
raise Exception("Failed when parsing the config file !")
raise ValueError("Failed while generating retention date!")
def get_log_file_path(self):
"""
......
......@@ -1031,3 +1031,38 @@ class t3MySQLdb(object):
self.update_fields('tx_nG6_purge_demand',demand_ids,
["processed_date","demand_state"],
[date,"deleted"])
def set_run_retention_from_epoch_timestamp(self, run_id, date):
"""
Updates a run's retention from a given duration (in months)
@param run_id : the run id
@param duration : the epoch timestamp
"""
sql = "UPDATE tx_nG6_run SET retention_date = '%s' WHERE uid= '%s' "% ( date, run_id )
self.execute(sql, commit = True )
def set_analysis_retention_from_epoch_timestamp(self, analysis_id, date):
"""
Updates a run's retention from a given duration (in months)
@param analysis_id : the analysis id
@param duration : the epoch timestamp
"""
sql = "UPDATE tx_nG6_analyze SET retention_date = '%s' WHERE uid= '%s' "% ( date, analysis_id )
self.execute(sql, commit = True )
def get_run_analyzes_id_from_project(self, project_id):
sql = "SELECT analyze_id FROM tx_nG6_view_project_run_analyze WHERE project_id='%s'" % project_id
qresult = self.execute(sql)
return [ r[0] for r in qresult.rows ]
def get_run_creation_date(self, run_id):
sql = "SELECT crdate FROM tx_nG6_run WHERE uid='%s'" % run_id
qresult = self.execute(sql)
return [ r[0] for r in qresult.rows ]
def get_analysis_creation_date(self, analysis_id):
sql = "SELECT crdate FROM tx_nG6_analyze WHERE uid='%s'" % analysis_id
qresult = self.execute(sql)
return [ r[0] for r in qresult.rows ]
\ No newline at end of file
......@@ -377,9 +377,10 @@ class Utils(object):
if prefix == "data" or prefix == "run" :
run = t3mysql.select_run_informations(id)
source_dir = data_folder +'/'+ run['directory']
project_name = "Project_%s.%s" % ( re.sub( r"[\s\/]", "_", run['project_name']), run['project_id'] )
run_name = "Run_%s.%s" %( run['name'].replace(' ', '_').replace('/', '_'), id )
#run_name = "Run_%s.%s" %( run['name'].replace(' ', '_').replace('/', '_'), id )
run_name = "Run_%s.%s" % ( re.sub( r"[^A-Za-z0-9]", "_", run['name']), id)
raw_data_dir = os.path.join( output_folder, project_name, run_name, "RawData" )
if source_dir not in src_directories :
......@@ -392,6 +393,7 @@ class Utils(object):
analysis_dir = data_folder + '/' + analyse_values["directory"]
analysis_name = "Analyse_%s.%s"%( re.sub( r"[\s\/]", "_", analyse_values['name']), analyse_id )
dest_analysis_dir = os.path.join( output_folder, project_name, run_name, analysis_name )
if analysis_dir not in src_directories :
src_directories.append(analysis_dir)
if dest_analysis_dir not in dest_directories :
......@@ -409,6 +411,7 @@ class Utils(object):
# it's a run analysis
if analyse["run_id"]:
run_name = "Run_%s.%s" % ( re.sub( r"[\s\/]", "_", analyse["run_name"]), analyse["run_id"])
analysis_name = "Analyse_%s.%s" % ( re.sub( r"[\s\/]", "_", analyse["name"]), id)
dest_analysis_dir = os.path.join( output_folder, project_name, run_name, analysis_name )
if dest_analysis_dir not in dest_directories :
......@@ -429,7 +432,6 @@ class Utils(object):
if os.path.isdir(e) :
sources.append(e)
destinations.append(dest_directories[i])
return sources, destinations
......
......@@ -196,7 +196,7 @@ class tx_nG6_db {
return (tx_nG6_db::select_list_retention_data_info($where));
}
static function select_a_project_retention_data_info($project_id=null, $all=FALSE) {
static function select_a_project_retention_data_info($project_id=null, $all=FALSE, $count_hidden=FALSE) {
$condition=Array();
if ($all === FALSE) {
$condition[]='DATEDIFF( FROM_UNIXTIME( retention_date ) , now( ) ) <0';
......@@ -204,6 +204,9 @@ class tx_nG6_db {
if (null != $project_id) {
$condition[]="project_id = " . $project_id ;
}
if(!$count_hidden){
$condition[]="hidden = 0";
}
$where = join( ' AND ', $condition);
return(tx_nG6_db::select_list_retention_data_info($where));
}
......@@ -242,6 +245,7 @@ class tx_nG6_db {
$by_project[$current_project_id]['total_purgeable_size'] += $res_row['storage_size'];
}
}
//select obsolete analyses of a run
$res = tx_nG6_db::select_all_in_view("tx_nG6_view_project_run_analyze",$where,'10');
while($res_row = $GLOBALS['TYPO3_DB']->sql_fetch_assoc($res)) {
......@@ -251,16 +255,16 @@ class tx_nG6_db {
$by_project[$current_project_id]['project_name']= $res_row['project_name'];
}
if ($res_row['analyze_id'] != ""){
$by_project[$current_project_id]['state'][$res_row['state']]['nb_analyze'] += 1;
$key = 'storage_size';
if($res_row['state'] == "purged"){
$key = "purged_size";
}
$by_project[$current_project_id]['state'][$res_row['state']]['size_analyze'] += $res_row[$key];
$by_project[$current_project_id]['state'][$res_row['state']]['analysis_ids'][]=$res_row['analyze_id'];
$by_project[$current_project_id]['total_purgeable_size'] += $res_row['storage_size'];
$by_project[$current_project_id]['state'][$res_row['state']]['nb_analyze'] += 1;
$key = 'storage_size';
if($res_row['state'] == "purged"){
$key = "purged_size";
}
$by_project[$current_project_id]['state'][$res_row['state']]['size_analyze'] += $res_row[$key];
$by_project[$current_project_id]['state'][$res_row['state']]['analysis_ids'][]=$res_row['analyze_id'];
$by_project[$current_project_id]['total_purgeable_size'] += $res_row['storage_size'];
}
}
}
//select obsolete analyses of a project
$res = tx_nG6_db::select_all_in_view("tx_nG6_view_project_analyze", $where);
......@@ -284,21 +288,21 @@ class tx_nG6_db {
}
}
$res = tx_nG6_db::select_count_in_view("tx_nG6_view_project_run","");
$res = tx_nG6_db::select_count_in_view("tx_nG6_view_project_run",$where);
while($res_row = $GLOBALS['TYPO3_DB']->sql_fetch_assoc($res)) {
if (isset ($by_project[$res_row['project_id']])) {
$by_project[$res_row['project_id']]["nb_runs"]=$res_row['count'];
}
}
$res = tx_nG6_db::select_count_in_view("tx_nG6_view_project_run_analyze","");
$res = tx_nG6_db::select_count_in_view("tx_nG6_view_project_run_analyze",$where);
while($res_row = $GLOBALS['TYPO3_DB']->sql_fetch_assoc($res)) {
if (isset ($by_project[$res_row['project_id']])) {
$by_project[$res_row['project_id']]["nb_analyses"]=$res_row['count'];
}
}
$res = tx_nG6_db::select_count_in_view("tx_nG6_view_project_analyze","");
$res = tx_nG6_db::select_count_in_view("tx_nG6_view_project_analyze",$where);
while($res_row = $GLOBALS['TYPO3_DB']->sql_fetch_assoc($res)) {
if (isset ($by_project[$res_row['project_id']])) {
if (!isset ($by_project[$res_row['project_id']]["nb_analyses"])) {
......
......@@ -41,7 +41,7 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
{elseif $split_param[0] == "READ_NAME_REGEX"}
<li class="parameter">Regular expression that can be used to extract three variables : tile/region, x coordinate and y coordinate ({$split_param[0]}={$split_param[1]}). These values are used to estimate the rate of optical duplication.</li>
{elseif $split_param[0] == "OPTICAL_DUPLICATE_PIXEL_DISTANCE"}
<li class="parameter">The maximum offset between two duplicte clusters in order to consider them optical duplicates is {$split_param[1]} ({$split_param[0]}={$split_param[1]}).</li>
<li class="parameter">The maximum offset between two duplicate clusters in order to consider them optical duplicates is {$split_param[1]} ({$split_param[0]}={$split_param[1]}).</li>
{/if}
{/if}
{/foreach}
......@@ -71,7 +71,7 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
<th class="numeric-sort" style="vertical-align:Middle"><center>Singletons</center></th>
<th class="numeric-sort" style="vertical-align:Middle"><center>Mate mapped on a different chr</center></th>
<th class="numeric-sort" style="vertical-align:Middle"><center>Supplementary</center></th>
<th class="numeric-sort" style="vertical-align:Middle"><center>Duplicated</center></th>
<th class="numeric-sort" style="vertical-align:Middle"><center>Nb read duplicated</center></th>
<th class="numeric-sort" style="vertical-align:Middle"><center>Read pair duplicates</center></th>
<th class="numeric-sort" style="vertical-align:Middle"><center>Read pair optical duplicates</center></th>
......@@ -122,7 +122,7 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
<td>-</td>
<td>-</td>
{else}
<td>{($sample_results["default"].pairDuplicates*2 + $sample_results["default"].unpairDuplicates)|number_format:0:' ':' '}</td>
<td>{($sample_results["default"].pairDuplicates*2 + $sample_results["default"].unpairDuplicates)|number_format:0:' ':' '} ({$sample_results["default"].percentDuplication}*100%)</td>
<td>{$sample_results["default"].pairDuplicates|number_format:0:' ':' '}</td>
<td>{$sample_results["default"].pairOpticalDuplicates|number_format:0:' ':' '}</td>
{/if}
......
......@@ -118,10 +118,17 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
{/foreach}
{if $nb_files == 0}
<div class="alert alert-info">
Results folder not synchronized yet...
{if $analyse.data_state=="purged"}
The data have been purged. (Retention limit : {$analyse.retention_date|date_format})
{else}
Results folder not synchronized yet...
{/if}
</div>
{else}
<ul>
<div class="alert alert-info" name="retention-info">
Retention date is {$analyse.retention_date|date_format}. After this deadline, these data will no longer be available. Only metadata and quality control results will remain in NG6.
</div>
{foreach $dir|scandir as $file}
{assign var="link" value=(('fileadmin'|cat:'/'|cat:$analyse.directory)|cat:'/')|cat:$file}
{if $file != "." and $file != "" and $file != ".." and ($file|substr:-strlen(".png")) != ".png" and !is_dir($link)}
......
......@@ -213,7 +213,13 @@ class tx_nG6_pi1 extends \TYPO3\CMS\Frontend\Plugin\AbstractPlugin {
}
$smarty->assign('h_analysis', tx_nG6_utils::trace_hierarchy($project_analysis));
$smarty->assign('project_analysis', $project_analysis);
$smarty->assign('managment_purged_data', tx_nG6_db::select_a_project_retention_data_info($projects[key($projects)]['id'], TRUE));
if( $projects[$project_id]['is_admin'] == 1 ){
$smarty->assign('managment_purged_data', tx_nG6_db::select_a_project_retention_data_info($projects[key($projects)]['id'], TRUE, TRUE));
}else{
$smarty->assign('managment_purged_data', tx_nG6_db::select_a_project_retention_data_info($projects[key($projects)]['id'], TRUE, FALSE));
}
$txNG6Utils = new tx_nG6_utils;
//$smarty->register_object('tx_nG6_utils',$txNG6Utils);
$smarty->assign_by_ref('tx_nG6_utils', $txNG6Utils);
......
......@@ -122,7 +122,19 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
<div class="tab-pane fade" id="downloads">
{$dir=$data_folder|cat:$runs[key($runs)].directory}
{assign var="nb_files" value=0}
{foreach $dir|scandir as $file}
{if $file != "." and $file !="" and $file != ".." and ($file|substr:-strlen(".png")) != ".png"}
{$nb_files = $nb_files + 1}
{/if}
{/foreach}
<ul>
{if $nb_files >= 1}
<div class="alert alert-info" name="retention-info">
Retention date is {$runs[key($runs)].retention_date|date_format}. After this deadline, these data will no longer be available. Only metadata and quality control results will remain in NG6.
</div>
{/if}
{assign var="nb_files" value=0}
{foreach $dir|scandir as $file}
{if $file != "." and $file !="" and $file != ".." and ($file|substr:-strlen(".png")) != ".png"}
{$link=(('fileadmin'|cat:$runs[key($runs)].directory)|cat:'/')|cat:$file}
......@@ -133,7 +145,12 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
</ul>
{if $nb_files == 0}
<div class="alert alert-info">
Results folder not synchronized yet...
{if $runs[key($runs)].data_state=="purged"}
The data have been purged. (Retention limit : {$runs[key($runs)].retention_date|date_format})
{else}
Results folder not synchronized yet...
{/if}
</div>
{/if}
</div>
......
......@@ -107,8 +107,12 @@ class tx_nG6_pi6 extends \TYPO3\CMS\Frontend\Plugin\AbstractPlugin {
foreach(explode(",", $project_ids) as $project_id){
#retrieve project data
$p=tx_nG6_db::select_a_project_retention_data_info($project_id);
$p=tx_nG6_db::select_a_project_retention_data_info($project_id, FALSE, TRUE);
#retrieve discarded emails
$string_emails_to_discard = $GLOBALS['TSFE']->tmpl->setup["plugin."]["tx_nG6_pi6."]["email_to_discard"] ;
$array_emails_to_discard = explode(',',$string_emails_to_discard);
#build email list of managers
$users_id=array();
$users_emails = array();
......@@ -119,33 +123,44 @@ class tx_nG6_pi6 extends \TYPO3\CMS\Frontend\Plugin\AbstractPlugin {
}
$users_id[$u["right_level_label"]][] = $u["user_id"];
$users_emails[$u["right_level_label"]][] = $u["email"];
}
}
#Warn if mail is sent to several manager
//Warn if mail is sent to several manager
$email_warn="" ;
$purge_email_to="";
$purge_user_id_to=Array();
if ( count( $users_emails['manager']) >= 1 ) {
if ( count( $users_emails['manager']) > 1 ) {
$email_warn="Be aware that this project is associated to several managers (all received this email), please send only one answer per purge alert number.\n";
}
$purge_email_to=join(',', $users_emails['manager']);
$purge_user_id_to=$users_id['manager'];
} elseif ( count($users_emails['manager']) == 0) {
if( count( $users_emails['member']) >= 1){
$users_emails_to = array_merge($users_emails['administrator'],$users_emails['member']);
$purge_user_id_to=array_merge($users_id['administrator'],$users_id['member']);
}else{
$users_emails_to = $users_emails['administrator'];
$purge_user_id_to=array_merge($users_id['administrator']);
}
$purge_email_to=join(', ',$users_emails_to );
$email_warn="As this project do not have managers, administrators and members recieved this alert.\n";
$email_warn.= " - Administrator(s): ".join(', ',$users_emails['administrator']). "\n";
$email_warn.= " - Members(s): ".join(', ',$users_emails['member']). "\n";
}
if ( count( $users_emails['manager']) > 1 ) {
$email_warn.="Be aware that every user associated with this project received this email, please send only one answer per purge alert number.\n";
}
$purge_user_id_to = array();
$users_emails_to = array();
if(isset($users_emails['administrator'])){
$users_emails_to = array_merge($users_emails_to, $users_emails['administrator']);
$purge_user_id_to = array_merge($purge_user_id_to, $users_id['administrator']);
}
if(isset($users_emails['manager'])){
$users_emails_to = array_merge($users_emails_to,$users_emails['manager']);
$purge_user_id_to = array_merge($purge_user_id_to, $users_id['manager']);
}
if(isset($users_emails['member'])){
$users_emails_to = array_merge($users_emails_to, $users_emails['member']);
$purge_user_id_to = array_merge($purge_user_id_to, $users_id['member']);
}
$purge_email_to = join(', ',$users_emails_to );
$email_warn.= "Every user associated with this project received this alert.\n";
if( isset($users_emails['administrator']) && count($users_emails['administrator']) > 0 ){
$email_warn.= " - Administrator(s): ".join(', ',$users_emails['administrator']). "\n";
}
if( isset($users_emails['manager']) && count($users_emails['manager']) > 0 ){
$email_warn.= " - Manager(s): ".join(', ',$users_emails['manager']). "\n";
}
if( isset($users_emails['member']) && count($users_emails['member']) > 0 ){
$email_warn.= " - Member(s): ".join(', ',$users_emails['member']). "\n";
}
//Retrieve purgeable information for email
$run_info=Array();
$analyses_info=Array();
......@@ -199,13 +214,11 @@ class tx_nG6_pi6 extends \TYPO3\CMS\Frontend\Plugin\AbstractPlugin {
return "Mail sent";
}
static function resend_purge_demand_mail ($demands_id) {
static function resend_purge_demand_mail ($demands_id) {
$res_demands = tx_nG6_db::get_purge_demand_from_id($demands_id);
foreach($res_demands as $res_demand){
$res_project = tx_nG6_db::select_project($res_demand["project_id"]);
$total_project_size = tx_nG6_db::get_project_size($res_demand["project_id"], true) ;
//We need to build two strings, including links and names, from $res_demand["run_ids"] and $res_demand["analyze_ids"].
$run_info=Array();
$analyses_info=Array();
$search=array("###TYPE_OBJECT###","###RUN_ID###","###PROJECT_ID###");
......
......@@ -878,7 +878,7 @@ $(function () {
val_url += "&username=" + $("#username_val").val();
val_url += "&first_name=" + $("#first_name_val").val();
val_url += "&last_name=" + $("#last_name_val").val();
val_url += "&password=" + $("#user_password_pwd_val").val();
val_url += "&password=" + escape($("#user_password_pwd_val").val());
val_url += "&email=" + $("#email_val").val();
val_url += "&creator=" + $("#user_id").val();
val_url += "&title=" + $("#title_val").val();
......
......@@ -83,6 +83,7 @@ $(function () {
};
var _retrieve_data = function(workflow_id, callback, error_callback){
console.log("IN _retrieve_data, workflow_id=" + workflow_id);
$.ajax({
url : $("#server_url").val() + '/get_workflow_outputs?workflow_id=' + workflow_id,
dataType : 'jsonp',
......@@ -183,6 +184,14 @@ $(function () {
}
);
}
else if (statusdata.status == "failed") {
modal.$body.html([
'<div class="alert alert-danger">',
' <p>The workflow has failed, either the login or password is incorrect. </p> <p>You must provide the credentials you use to connect to the GenoToul bioinformatics infrastructure.</p>',
'</div>',
].join(''));
}
else {
$('#wfstatus').wfstatus('reload');
button.disabled = false;
......
......@@ -43,7 +43,7 @@ class AlignmentStats (Analysis):
self.add_parameter("assume_sorted", "assume_sorted", default=assume_sorted, type=bool)
self.add_parameter("validation_stringency", "validation_stringency", default=validation_stringency)
self.add_parameter("max_file_handles", "max_file_handles", default=max_file_handles, type=int)
self.add_parameter("sorting_collection_size_ratio", "sorting_collection_size_ratio", default=max_file_handles, type=float)
self.add_parameter("sorting_collection_size_ratio", "sorting_collection_size_ratio", default=sorting_collection_size_ratio, type=float)
self.add_parameter("archive_name", "archive_name", default=archive_name)
self.memory = '4G'
if self.get_memory() != None :
......@@ -136,6 +136,7 @@ class AlignmentStats (Analysis):
self._add_result_element(sample, "pairDuplicates", dupl_info["Unknown Library"]["READ_PAIR_DUPLICATES"])
self._add_result_element(sample, "unpairDuplicates", dupl_info["Unknown Library"]["UNPAIRED_READ_DUPLICATES"])
self._add_result_element(sample, "pairOpticalDuplicates", dupl_info["Unknown Library"]["READ_PAIR_OPTICAL_DUPLICATES"])
self._add_result_element(sample, "percentDuplication", dupl_info["Unknown Library"]["PERCENT_DUPLICATION"])
# Finaly create and add the archive to the analyse
......@@ -156,7 +157,7 @@ class AlignmentStats (Analysis):
xmx="-Xmx"+self.memory.lower()
if self.search_dupl:
self.tmp_bam = self.get_outputs('{basename_woext}_noDupl.bam', self.bam_files)
self.add_shell_execution(self.get_exec_path("javaPICARD")+ " "+ xmx +"-jar " + self.get_exec_path("Picard") + " MarkDuplicates INPUT=$1 METRICS_FILE=$2 OUTPUT=$3" + self.duplication_options + " 2> $4",
self.add_shell_execution(self.get_exec_path("javaPICARD")+ " "+ xmx +" -jar " + self.get_exec_path("Picard") + " MarkDuplicates INPUT=$1 METRICS_FILE=$2 OUTPUT=$3" + self.duplication_options + " 2> $4",
cmd_format='{EXE} {IN} {OUT}', map=True,
inputs=self.bam_files, outputs=[self.duplication_files, self.tmp_bam, self.dupl_stderrs])
......
......@@ -17,6 +17,7 @@
import os
import pickle
import logging
from jflow.component import Component
from ng6.utils import Utils
......@@ -27,6 +28,7 @@ def create_symbolik_link (ids_dump_path, output_list, user_script, username, pas
import os
import pickle
import subprocess
import logging
from ng6.config_reader import NG6ConfigReader
from ng6.utils import Utils, SSH
......@@ -44,7 +46,6 @@ def create_symbolik_link (ids_dump_path, output_list, user_script, username, pas
fh.close()
prefixed_ids = ";".join(prefixed_ids)
src_directories, dest_directories = Utils.get_directories_structure_and_content(ng6_username, data_folder, output_folder, prefixed_ids)
destinations = []
with open(user_script, "w") as us:
......
......@@ -64,7 +64,7 @@ class IlluminaQualityCheck (CasavaNG6Workflow):
bwa = self.add_component("BWA", [indexed_ref, filtered_read1_files, filtered_read2_files, sample_lane_prefixes, "mem", not self.delete_bam], parent = fastqilluminafilter)
# make some statistic on the alignement
alignmentstats = self.add_component("AlignmentStats", [bwa.bam_files, self.is_paired_end(), False], parent = bwa)
alignmentstats = self.add_component("AlignmentStats", [bwa.bam_files, self.is_paired_end(), True], parent = bwa)
if self.is_paired_end():
# process insert sizes
......
......@@ -16,11 +16,18 @@
#
import logging
import os
from glob import glob
from subprocess import Popen, PIPE
from ng6.ng6workflow import NG6Workflow
from ng6.utils import Utils
from _codecs import encode
class OntQualityCheck (NG6Workflow):
def __init__(self, args={}, id=None, function= "process"):
NG6Workflow.__init__(self, args, id, function)
self.log_files = []
def get_name(self):
return 'ont_qc'
......@@ -31,8 +38,8 @@ class OntQualityCheck (NG6Workflow):
logging.getLogger("jflow").debug("Begin OntQualityCheck.define_parameters! ont_qc")
self.add_parameter("compression", "How should the data be compressed once archived", choices= [ "none", "gz", "bz2"], default = "gz")
self.add_parameter("trimming", "use trimming with porechop or not",choices= [ "yes", "no"], default = "no")
self.add_input_file( "summary_file", "Input summary basecalling file", default=None)
self.add_parameter("barcoded", "Barcoded run or not", choices= [ "yes", "no"], default = "no")
self.add_input_file("summary_file", "Input summary basecalling file", default=None)
self.add_parameter("barcoded", "If barcoded run : correspondance file", default = None)
self.add_parameter("fast5dir", "path of the fast5 directory", default = None)
def process(self):
......@@ -43,13 +50,31 @@ class OntQualityCheck (NG6Workflow):
sample_names.append( sample.name )
infiles.append(sample.reads1[0])
# add raw
print(self.get_all_reads())
print(sample_names)
print(self.summary_file)
logging.getLogger("jflow").debug("OntQualityCheck.process! get_all_reads : "+",".join(self.get_all_reads()))
logging.getLogger("jflow").debug("OntQualityCheck.process! sample_name : "+str(sample_names))
logging.getLogger("jflow").debug("OntQualityCheck.process! summary_file : "+str(self.summary_file))
### check for log file
# get current path
cmd = [self.get_exec_path("pwd")]
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
exec_path = stdout.decode("utf-8").rsplit()[0]
logging.getLogger("jflow").debug("OntQualityCheck._process.logfile pwd = " + str(exec_path))
# find .log files
for file in glob(exec_path+"/*.log"):
self.log_files.append(file)
logging.getLogger("jflow").debug("OntQualityCheck._process.logfile self.log_files = " + ",".join(self.log_files))
logging.getLogger("jflow").debug("OntQualityCheck._process.logfile exiting")
# add logs
if len(self.log_files) > 0 :
add_log = self.add_component("BasicAnalysis", [self.log_files,"Log Files","Log files generated during primary analysis","-","-","-","gz", "","log.gz"])
addrawfiles = self.add_component("AddRawFiles", [self.runobj, self.get_all_reads(), self.compression])
#nanoplot = self.add_component("Nanoplot", [sample.name,self.get_all_reads(), self.nb_threads, True, "png", self.nanoplot_color,"nanoplot.tar.gz"])
ontstat = self.add_component("Run_stats", [self.summary_file, self.barcoded, sample_names[0]])
ontstat = self.add_component("Run_stats", [self.summary_file, sample_names[0]])
if (self.barcoded != None) or (self.barcoded != "no") :
demultiplexont = self.add_component("Demultiplex_ONT", [self.get_all_reads() , self.barcoded])
if self.trimming == "yes":
trim_porechop = self.add_component("Trim_porechop", [self.get_all_reads() , "discard_middle"])
if self.fast5dir != None:
......
#
# Copyright (C) 2012 INRA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import re, os
from subprocess import Popen, PIPE
import logging
import time
from ng6.analysis import Analysis
from ng6.utils import Utils
from jflow.utils import get_argument_pattern
class Demultiplex_ONT (Analysis):
"""
This module demultiplexes the total fastq of a barcoded ONT run and produces stats
"""
def __init__(self, args={}, id=None, function= "process"):
Analysis.__init__(self, args, id, function)
def define_parameters(self, fastq_files, barcode_file, archivename="DemultiplexONT_archive.tar"):
self.add_input_file_list( "fastq_files", "fastq_files", default=fastq_files, required=True, file_format = 'fastq')
self.add_parameter("barcode_file", "Name of the barcode file", default=barcode_file, required=True , file_format = 'str')
self.add_parameter("archive_name", "Name of the archive", default=archivename, type='str')
self.add_parameter( "run_name", "The name of the run (from total fastq file)", pattern='{basename_woext}', items=self.fastq_files, file_format = "fastq")
def define_analysis(self):
self.name = "DemultiplexONT"
self.description = "Demultiplexes the total fastq of a barcoded ONT run and produces stats"
self.software = "Qcat"
#if self.discard_middle == "discard_middle":
# self.options = "--discard_middle"
def __parse_stat_file (self, stat_file):
logging.getLogger("jflow").debug("Begin DemultiplexONT.__parse_stat_file! file =",stat_file)
"""
Parse the stat file
@param stat_file : the stdout porechop
@return : {"read_trim_start" : read_trim_start, ...}
"""
def post_process(self):
logging.getLogger("jflow").debug("Begin DemultiplexONT.post_process! ont_qc")
# Create dictionary : key = file name or prefix, value = files path
results_files = []
# add header of stats
group = "statsporechop"
self._add_result_element("metrics", "headers", ','.join(["read_trim_start", "read_total_start", "bp_removed_start", "read_trim_end", "read_total_end", "bp_removed_end"]), group)
print(os.listdir(self.output_directory))
for file in os.listdir(self.output_directory):
full_file_path = os.path.join(self.output_directory, file)
logging.getLogger("jflow").debug("Trimporechop.post_process : full_file_path "+full_file_path)
if file.endswith(".fastq"):
logging.getLogger("jflow").debug("Trimporechop.post_process match .fastq : full_file_path "+full_file_path)
results_files.append(full_file_path)
elif file.endswith(".stdout"):
logging.getLogger("jflow").debug("Trimporechop.post_process match .stdout: full_file_path "+full_file_path)
results_files.append(full_file_path)
filename = os.path.basename(file).split(".stdout")[0]
resultlist = self.__parse_stat_file(full_file_path)
read_trim_start = resultlist[0]
read_total_start = resultlist[1]
bp_removed_start = resultlist[2]
read_trim_end = resultlist[3]
read_total_end = resultlist[4]
bp_removed_end = resultlist[5]
#add stats for each fastq file
self._add_result_element("ont_sample", "read_trim_start", read_trim_start,filename)
self._add_result_element("ont_sample", "read_total_start", read_total_start,filename)
self._add_result_element("ont_sample", "bp_removed_start", bp_removed_start,filename)
self._add_result_element("ont_sample", "read_trim_end", read_trim_end,filename)
self._add_result_element("ont_sample", "read_total_end", read_total_end,filename)
self._add_result_element("ont_sample", "bp_removed_end", bp_removed_end,filename)