diff --git a/subworkflows/local/hicpro.nf b/subworkflows/local/hicpro.nf index f73fd95082e83f096f3fd3ec12155e8826a94af8..cb6f33cd4d188138beb4bfb3abf1d1f5e5bb65aa 100644 --- a/subworkflows/local/hicpro.nf +++ b/subworkflows/local/hicpro.nf @@ -70,16 +70,16 @@ workflow HICPRO { //************************************** // MERGE AND REMOVE DUPLICATES - if (params.split_fastq){ - ch_valid_pairs = ch_valid_pairs.map{ it -> removeChunks(it)}.groupTuple() - ch_hicpro_stats = HICPRO_MAPPING.out.mapstats.map{it->removeChunks(it)}.groupTuple() - .concat(HICPRO_MAPPING.out.pairstats.map{it->removeChunks(it)}.groupTuple(), - ch_valid_stats.map{it->removeChunks(it)}.groupTuple()) - }else{ - ch_hicpro_stats = HICPRO_MAPPING.out.mapstats.groupTuple() - .concat(HICPRO_MAPPING.out.pairstats.groupTuple(), - ch_valid_stats.groupTuple()) - } + //if (params.split_fastq){ + ch_valid_pairs = ch_valid_pairs.map{ it -> removeChunks(it)}.groupTuple() + ch_hicpro_stats = HICPRO_MAPPING.out.mapstats.map{it->removeChunks(it)}.groupTuple() + .concat(HICPRO_MAPPING.out.pairstats.map{it->removeChunks(it)}.groupTuple(), + ch_valid_stats.map{it->removeChunks(it)}.groupTuple()) + //}else{ + // ch_hicpro_stats = HICPRO_MAPPING.out.mapstats.groupTuple() + // .concat(HICPRO_MAPPING.out.pairstats.groupTuple(), + // ch_valid_stats.groupTuple()) + //} MERGE_VALID_INTERACTION ( ch_valid_pairs diff --git a/subworkflows/local/input_check.nf b/subworkflows/local/input_check.nf index 999d1e3db09043d0800d4f6adbbaa00b50cb75e4..d5cf6b2f3720ee2d2d016ce3aebf0512622e25b6 100644 --- a/subworkflows/local/input_check.nf +++ b/subworkflows/local/input_check.nf @@ -30,6 +30,9 @@ workflow INPUT_CHECK { .splitCsv ( header:true, sep:',' ) .map { create_fastq_channels(it) } .map { it -> [it[0], [it[1], it[2]]]} + .groupTuple(by: [0]) + .flatMap { it -> setMetaChunk(it) } + .collate(2) .set { reads } } @@ -53,3 +56,15 @@ def create_fastq_channels(LinkedHashMap row) { array = [ meta, file(row.fastq_1), file(row.fastq_2) ] return array } + +// Set the meta.chunk value in case of technical replicates +def setMetaChunk(row){ + def map = [] + row[1].eachWithIndex() { file,i -> + meta = row[0].clone() + meta.chunks = i + map += [meta, file] + } + return map +} + diff --git a/workflows/hic.nf b/workflows/hic.nf index e71e3e9b5366d08f65bd4e5b7ec446f9cb7a9f22..4592e83b4da49ee8c7ababb8cae9b2cce7f93b39 100644 --- a/workflows/hic.nf +++ b/workflows/hic.nf @@ -143,16 +143,6 @@ Channel.fromPath( params.fasta ) // Info required for completion email and summary def multiqc_report = [] -def setMetaChunk(row){ - def map = [] - row[1].eachWithIndex() { file,i -> - meta = row[0].clone() - meta.chunks = i - map += [meta, file] - } - return map -} - workflow HIC { ch_versions = Channel.empty() @@ -163,19 +153,8 @@ workflow HIC { INPUT_CHECK ( ch_input ) - .reads - //.map { - // meta, fastq -> - // meta.id = meta.id.split('_')[0..-2].join('_') - // [ meta, fastq ] } - .groupTuple(by: [0]) - .flatMap { it -> setMetaChunk(it) } - .collate(2) - .set { ch_fastq } - - ch_fastq.view() - //INPUT_CHECK.out.reads.view() + INPUT_CHECK.out.reads.view() // // SUBWORKFLOW: Prepare genome annotation @@ -190,7 +169,7 @@ workflow HIC { // MODULE: Run FastQC // FASTQC ( - ch_fastq + INPUT_CHECK.out.reads ) ch_versions = ch_versions.mix(FASTQC.out.versions) @@ -198,7 +177,7 @@ workflow HIC { // SUB-WORFLOW: HiC-Pro // HICPRO ( - ch_fastq, + INPUT_CHECK.out.reads, PREPARE_GENOME.out.index, PREPARE_GENOME.out.res_frag, PREPARE_GENOME.out.chromosome_size,