Skip to content
Snippets Groups Projects
Commit a5f0369e authored by Nathalie Vialaneix's avatar Nathalie Vialaneix
Browse files

fixed various bugs at CRAN check

parent 6ef60a74
No related branches found
No related tags found
No related merge requests found
......@@ -13,7 +13,6 @@ export(ridgeSIR)
export(sfcb)
export(sparseSIR)
export(tune.ridgeSIR)
import(adjclust)
import(doParallel)
import(foreach)
import(glmnet)
......@@ -22,6 +21,8 @@ importFrom(Boruta,Boruta)
importFrom(CORElearn,attrEval)
importFrom(Matrix,forceSymmetric)
importFrom(RSpectra,eigs)
importFrom(adjclust,adjClust)
importFrom(adjclust,cutree_chac)
importFrom(dplyr,arrange)
importFrom(dplyr,filter)
importFrom(dplyr,inner_join)
......@@ -34,10 +35,14 @@ importFrom(mixOmics,pls)
importFrom(parallel,detectCores)
importFrom(purrr,map_dfc)
importFrom(ranger,ranger)
importFrom(rlang,.data)
importFrom(stats,as.hclust)
importFrom(stats,cor)
importFrom(stats,cov)
importFrom(stats,predict)
importFrom(stats,quantile)
importFrom(stats,sd)
importFrom(tidyr,gather)
importFrom(tidyr,spread)
importFrom(tidyselect,starts_with)
importFrom(utils,head)
......@@ -4,6 +4,7 @@
#' @title Dataset "Truffles"
#' @name truffles
#' @aliases rainfall beta
#'
#' @description Yearly truffles production and corresponding monthly rainfall
#' information of the Perigord black truffle in the Vaucluse (France) between
......
#' @importFrom stats cor
#' @import adjclust
#' @importFrom adjclust adjClust cutree_chac
group_adjclust <- function(dataset) {
# Description #####
......@@ -15,6 +15,7 @@ group_adjclust <- function(dataset) {
hclust_res <- adjclust::adjClust(corr_inputs, type = "similarity")
groups <- lapply(1:ncol(dataset), function(k)
adjclust::cutree_chac(hclust_res, k = k))
hclust_res <- as.hclust(hclust_res)
names(groups) <- 1:ncol(dataset)
groups <- groups[ncol(dataset):1]
......
......@@ -60,8 +60,7 @@ predict_rf <- function(summaries, target, input_groups, selection = "none",
## compute random forests
quality_criteria <- lapply(summaries, compute_rfs, target = target,
importance = importance, repeats = repeats,
seed = seed)
repeats = repeats, seed = seed)
quality_criteria <- Reduce(c, quality_criteria)
## MSE
......@@ -98,7 +97,7 @@ predict_rf <- function(summaries, target, input_groups, selection = "none",
return(out)
}
compute_rfs <- function(summaries, target, importance, repeats, seed) {
compute_rfs <- function(summaries, target, repeats, seed) {
if (!any(is.na(summaries)) && ncol(summaries) > 0) {
curdf <- data.frame(y = target, summaries)
......
#' @import magrittr
#' @importFrom dplyr mutate filter arrange select summarise ungroup inner_join
#' @importFrom tidyselect starts_with
#' @importFrom tidyr gather spread
#' @importFrom purrr map_dfc
#' @importFrom mixOmics pls
#' @importFrom stats sd as.hclust
#' @importFrom rlang .data
summary_pls <- function(dataset, target, groups) {
# Description #####
......@@ -23,7 +26,7 @@ summary_pls <- function(dataset, target, groups) {
summary_data <- lapply(groups, function(cur_clust) {
cur_data <- dataset %>% dplyr::mutate("sim" = 1:nrow(dataset)) %>%
tidyr::gather("Var", "value", -sim)
tidyr::gather("Var", "value", - .data$sim)
var_number <- as.numeric(gsub("Var", "", cur_data$Var))
cur_data$clust <- cur_clust[var_number]
......@@ -37,9 +40,9 @@ summary_pls <- function(dataset, target, groups) {
}
compute_pls_summary <- function(aclust, dataset, target) {
tmp <- dataset %>% dplyr::filter(clust == aclust) %>%
tidyr::spread(Var, value) %>% dplyr::arrange(sim) %>%
dplyr::select(starts_with("Var"))
tmp <- dataset %>% dplyr::filter(.data$clust == aclust) %>%
tidyr::spread(.data$Var, .data$value) %>% dplyr::arrange(.data$sim) %>%
dplyr::select(tidyselect::starts_with("Var"))
if (length(unique(dataset$Var[dataset$clust == aclust])) > 1) {
out <- mixOmics::pls(tmp, target, ncomp = 1)
return(unlist(out$variates$X))
......@@ -65,22 +68,22 @@ summary_basics <- function(dataset, groups) {
summary_data <- lapply(groups, function(cur_clust) {
cur_data <- dataset %>% dplyr::mutate("sim" = 1:nrow(dataset)) %>%
tidyr::gather("Var", "value", -sim)
tidyr::gather("Var", "value", - .data$sim)
var_number <- as.numeric(gsub("Var", "", cur_data$Var))
cur_data$clust <- cur_clust[var_number]
mean_summary <- cur_data %>% dplyr::group_by(sim, clust) %>%
dplyr::summarise(VarMean = mean(value)) %>%
tidyr::spread(clust, value = VarMean, sep = "mean") %>%
mean_summary <- cur_data %>% dplyr::group_by(.data$sim, .data$clust) %>%
dplyr::summarise(VarMean = mean(.data$value)) %>%
tidyr::spread(.data$clust, value = .data$VarMean, sep = "mean") %>%
dplyr::ungroup()
sd_summary <- cur_data %>% dplyr::group_by(sim, clust) %>%
dplyr::summarise(VarSD = sd(value)) %>%
tidyr::spread(clust, value = VarSD, sep = "sd") %>%
sd_summary <- cur_data %>% dplyr::group_by(.data$sim, .data$clust) %>%
dplyr::summarise(VarSD = sd(.data$value)) %>%
tidyr::spread(.data$clust, value = .data$VarSD, sep = "sd") %>%
dplyr::ungroup()
cur_data <- dplyr::inner_join(mean_summary, sd_summary, by = "sim") %>%
dplyr::select(- sim)
dplyr::select(- .data$sim)
# remove column with NAs (for standard deviations)
contains_na <- colSums(apply(cur_data, 2, is.na))
......@@ -109,7 +112,7 @@ summary_cclustofvar <- function(dataset, groups) {
summary_data <- lapply(groups, function(cur_clust) {
cur_data <- dataset %>% dplyr::mutate("sim" = 1:nrow(dataset)) %>%
tidyr::gather("Var", "value", -sim)
tidyr::gather("Var", "value", - .data$sim)
var_number <- as.numeric(gsub("Var", "", cur_data$Var))
cur_data$clust <- cur_clust[var_number]
......@@ -123,8 +126,8 @@ summary_cclustofvar <- function(dataset, groups) {
}
compute_cov_summary <- function(aclust, dataset) {
tmp <- dataset %>% dplyr::filter(clust == aclust) %>%
tidyr::spread(Var, value) %>% dplyr::arrange(sim) %>%
tmp <- dataset %>% dplyr::filter(.data$clust == aclust) %>%
tidyr::spread(.data$Var, .data$value) %>% dplyr::arrange(.data$sim) %>%
dplyr::select(starts_with("Var"))
if (length(unique(dataset$Var[dataset$clust == aclust])) > 1) {
out <- svd(tmp, 1, 1)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment