Skip to content
Snippets Groups Projects
Commit 2c417387 authored by Skander Hatira's avatar Skander Hatira
Browse files

fixed methylkit script in creating right vector with sample.id and generating...

fixed methylkit script in creating right vector with sample.id and generating all plots for all samples
parent f3f89928
No related branches found
No related tags found
No related merge requests found
......@@ -14,6 +14,6 @@ params:
overdispersion: NM # default none, NM for overdispersion
qValue: 0.01
minCov: 4
minDiff: 0.4
context: ["CG"]
minDiff: 0.5
context: ["CpG"]
species: "Gala"
......@@ -14,6 +14,6 @@ params:
overdispersion: none # default none, NM for overdispersion
qValue: 0.01
minCov: 4
minDiff: 0.4
context: ["CG"]
species: ""
minDiff: 25
context: ["CpG"]
species: "Malus"
......@@ -86,6 +86,9 @@ rule deduplicate:
extra= lambda wildcards : config['params']['deduplicate']['extra']
benchmark:
repeat(outdir+"benchmarks/{sample}-TechRep_{techrep}-BioRep_{biorep}/{sample}-TechRep_{techrep}-BioRep_{biorep}-deduplicate.tsv",benchmark)
resources:
cpus=2,
mem_mb=15000
shell:
"deduplicate_bismark -p {input} -o {params.basename} --output_dir {params.outdir} --sam 2> {log};"
"samtools sort {output.dedup} -o {output.sort_bam} ;"
......
......@@ -28,6 +28,7 @@ rule compute_methylkit:
hyperMethylationBed=outdir+"methylation/{id}-{context}/{id}-{context}-HyperMethylated.bed",
hypoMethylationBed=outdir+"methylation/{id}-{context}/{id}-{context}-HypoMethylated.bed",
overAllMethylationBed=outdir+"methylation/{id}-{context}/{id}-{context}-overallMethylation.bed",
cutoffMethylation=outdir+"methylation/{id}-{context}/{id}-{context}-cutoff.txt"
log:
outdir+"methylation/{id}-{context}/{id}-{context}-log.out"
conda:
......
......@@ -20,15 +20,14 @@ minCov <- snakemake@params[["minCov"]]
overdispersion <- snakemake@params[["overdispersion"]]
testOverdispersion <- snakemake@params[["test"]]
## filters dmr's ###
minDiff <- 0.25 #snakemake@params[["minDiff"]]
qValue <- 0.01 #snakemake@params[["qValue"]]
minDiff <- snakemake@params[["minDiff"]]
qValue <- snakemake@params[["qValue"]]
### create output directory if it doesn't already exist
dir.create(file.path(outdir), showWarnings = FALSE)
### reading bismark's CX reports, by default using the tabix database for minimal memory usage
readingReports=methRead(files,
sample.id=as.list(c(rep("control",length(control)),rep("treatment",length(treatment)))),
sample.id=as.list(c(rep(sprintf("control%s", c(1:length(control)) )),rep(sprintf("treatment%s", c(1:length(treatment)) )))),
assembly="bismark",
treatment=c(rep(0,length(control)),rep(1,length(treatment))),
context=context,
......@@ -50,18 +49,21 @@ for (i in 1:length(files)) {
getMethylationStats(readingReports[[i]],plot=TRUE,both.strands=FALSE)
dev.off()
}
### coverage stats and plot ###
fileTxt<-file(snakemake@output[["coverageStatsTxt"]])
sink(fileTxt)
for (i in 1:length(files)) {
fileTxt<-file(snakemake@output[["coverageStatsTxt"]])
sink(fileTxt)
writeLines(sprintf("methylation coverage stats for %s",readingReports[[i]]@sample.id))
getCoverageStats(readingReports[[i]],plot=FALSE,both.strands=FALSE)
sink()
close(fileTxt)
pdf(snakemake@output[["coverageStatsPdf"]])
}
sink()
close(fileTxt)
pdf(snakemake@output[["coverageStatsPdf"]])
for (i in 1:length(files)) {
getCoverageStats(readingReports[[i]],plot=TRUE,both.strands=FALSE)
dev.off()
}
dev.off()
### mergin samples ### depends on method bins/base_level
......@@ -110,6 +112,14 @@ hyper
sink()
close(fileTxt)
fileTxt<-file(snakemake@output[["cutoffMethylation"]])
cutoff <- diffMethPerChr(methDiff,plot=FALSE,qvalue.cutoff=qValue, meth.cutoff=minDiff)
sink(fileTxt)
cutoff
sink()
close(fileTxt)
my.gr=as(hyper,"GRanges")
df <- data.frame(my.gr)
write.table(df, file=snakemake@output[["hyperMethylationBed"]], quote=F, sep="\t", row.names=F, col.names=F)
......@@ -135,3 +145,4 @@ close(fileTxt)
my.gr=as(all,"GRanges")
df <- data.frame(my.gr)
write.table(df, file=snakemake@output[["overAllMethylationBed"]], quote=F, sep="\t", row.names=F, col.names=F)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment