diff --git a/conf/modules.config b/conf/modules.config
index 9cef5f9b0dea9b7ed06be24cb17f84e87eaf0687..763ba3f5eb8dfeb64543febbf4c679d5ea7d4fa7 100644
--- a/conf/modules.config
+++ b/conf/modules.config
@@ -7,6 +7,14 @@ process {
     saveAs: { filename -> filename.equals('versions.yml') ? null : filename }
   ]
 
+  withName: 'CUSTOM_DUMPSOFTWAREVERSIONS' {
+    publishDir = [
+      path: { "${params.outdir}/pipeline_info" },
+      mode: 'copy',
+      pattern: '*_versions.yml'
+    ]
+  }
+
   //**********************************************
   // PREPARE_GENOME
   withName: 'BOWTIE2_BUILD' {
@@ -17,7 +25,7 @@ process {
     ]
   }
 
-  withName: 'GET_CHROMSIZE' {
+  withName: 'CUSTOM_GETCHROMSIZES' {
     publishDir = [
       path: { "${params.outdir}/genome" },
       mode: 'copy',
@@ -41,8 +49,9 @@ process {
       mode: 'copy',
       enabled: params.save_aligned_intermediates
     ]
-    ext.prefix = { "${meta.id}_${meta.mates}" }
+    ext.prefix = { params.split_fastq ? "${meta.chunk}_${meta.mates}" : "${meta.id}_${meta.mates}" } 
     ext.args = params.bwt2_opts_end2end ?: ''
+    ext.args2 = !params.dnase ? "-F 4" :""
   }
 
   withName: 'TRIM_READS' {
@@ -59,8 +68,9 @@ process {
       mode: 'copy',
       enabled: params.save_aligned_intermediates
     ]
-    ext.prefix = { "${meta.id}_${meta.mates}_trimmed" }
+    ext.prefix = { params.split_fastq ? "${meta.chunk}_${meta.mates}_trimmed" : "${meta.id}_${meta.mates}_trimmed" }
     ext.args = params.bwt2_opts_trimmed ?: ''
+    ext.args2 = "" 
   }
 
   withName: 'MERGE_BOWTIE2' {
@@ -69,45 +79,61 @@ process {
       mode: 'copy',
       enabled: params.save_aligned_intermediates
     ]
+    ext.prefix = { params.split_fastq ? "${meta.chunk}_${meta.mates}" : "${meta.id}_${meta.mates}" }
   }
 
   withName: 'COMBINE_MATES' {
     publishDir = [
       path: { "${params.outdir}/hicpro/mapping" },
-      mode: 'copy'
+      mode: 'copy',
+      pattern: '*.bam'
     ]
     ext.args = [
       "-t",
       params.keep_multi ? "--multi" : "",
       params.min_mapq ? "-q ${params.min_mapq}" : ""
     ].join(' ').trim()
+    ext.prefix = { params.split_fastq ? "${meta.chunk}" : "${meta.id}" }
   }
 
   withName: 'GET_VALID_INTERACTION' {
     publishDir = [
       path: { "${params.outdir}/hicpro/valid_pairs" },
-      mode: 'copy'
+      saveAs: { filename -> filename.equals('versions.yml') ? null : filename },
+      mode: 'copy',
+      enabled: params.save_pairs_intermediates
     ]
-    ext.args = [
+    ext.args = { [
       params.min_cis_dist > 0 ? " -d ${params.min_cis_dist}" : '',
       params.min_insert_size > 0 ?  " -s ${params.min_insert_size}" : '',
       params.max_insert_size > 0 ? " -l ${params.max_insert_size}" : '',
       params.min_restriction_fragment_size > 0 ? " -t ${params.min_restriction_fragment_size}" : '',
       params.max_restriction_fragment_size > 0 ? " -m ${params.max_restriction_fragment_size}" : '',
       params.save_interaction_bam ? " --sam" : ''
-    ].join(' ').trim()
+    ].join(' ').trim() }
+  }
+
+  withName: 'GET_VALID_INTERACTION_DNASE' {
+    publishDir = [
+      path: { "${params.outdir}/hicpro/valid_pairs" },
+      saveAs: { filename -> filename.equals('versions.yml') ? null : filename },
+      mode: 'copy',
+      enabled: params.save_pairs_intermediates
+    ]
+    ext.args = { params.min_cis_dist > 0 ? " -d ${params.min_cis_dist}" : "" }
   }
 
   withName: 'MERGE_VALID_INTERACTION' {
     publishDir = [
       [
-        path: { "${params.outdir}/hicpro/stats/" },
+        path: { "${params.outdir}/hicpro/stats/${meta.id}" },
         mode: 'copy',
 	pattern: "*stat"
       ],
       [
         path: { "${params.outdir}/hicpro/valid_pairs" },
         mode: 'copy',
+	saveAs: { filename -> filename.equals('versions.yml') ? null : filename },
 	pattern: "*Pairs"
       ]
     ]
@@ -115,14 +141,17 @@ process {
 
   withName: 'MERGE_STATS' {
     publishDir = [
-      path: { "${params.outdir}/hicpro/stats" },
-      mode: 'copy'
+      path: { "${params.outdir}/hicpro/stats/${meta.id}" },
+      saveAs: { filename -> filename.equals('versions.yml') ? null : filename },
+      mode: 'copy',
+      pattern: "*stat"
     ]
   }
 
   withName: 'HICPRO2PAIRS' {
     publishDir = [
       path: { "${params.outdir}/hicpro/valid_pairs/pairix/" },
+      saveAs: { filename -> filename.equals('versions.yml') ? null : filename },
       mode: 'copy'
     ]
   }
@@ -143,6 +172,17 @@ process {
     ]
   }
 
+  //*****************************************
+  // QUALITY METRICS
+
+  withName: 'HIC_PLOT_DIST_VS_COUNTS'{
+    publishDir = [
+      path: { "${params.outdir}/distance_decay/" },
+      saveAs: { filename -> filename.equals('versions.yml') ? null : filename },
+      mode: 'copy'
+    ]
+  }
+
   //*****************************************
   // COOLER
 
@@ -177,6 +217,7 @@ process {
     publishDir = [
       enabled: false
     ]
+    ext.prefix = { "${cool.baseName}" }
     ext.args = "--one-based-ids --balanced --na-rep 0"
   }
 
@@ -191,7 +232,7 @@ process {
       [
         path: { "${params.outdir}/contact_maps/txt/" },
         mode: 'copy',
-        pattern: "*_norm.txt"
+        pattern: "*_balanced.txt"
       ]
     ]
   }
@@ -209,6 +250,11 @@ process {
   // COMPARTMENTS
 
   withName: 'CALL_COMPARTMENTS' {
+    publishDir = [
+      path: { "${params.outdir}/compartments/" },
+      saveAs: { filename -> filename.equals('versions.yml') ? null : filename },
+      mode: 'copy'
+    ]
     ext.args = '--bigwig'
   }
 
@@ -216,11 +262,20 @@ process {
   // TADS
 
   withName: 'INSULATION' {
+    publishDir = [
+      path: { "${params.outdir}/tads/insulation/" },
+      saveAs: { filename -> filename.equals('versions.yml') ? null : filename },
+      mode: 'copy'
+    ]
     ext.args = '15 25 50 --window-pixels'
   }
 
   withName: 'HIC_FIND_TADS' {
+    publishDir = [
+      path: { "${params.outdir}/tads/hicExplorer" },
+      saveAs: { filename -> filename.equals('versions.yml') ? null : filename },
+      mode: 'copy'
+    ]
     ext.args = '--correctForMultipleTesting fdr'
   }
-
 }
\ No newline at end of file
diff --git a/modules.json b/modules.json
index b7bfabedbf2ff583704d754b3432519ab329d941..58d81058caad44803f3f87571bb8c15fbe880793 100644
--- a/modules.json
+++ b/modules.json
@@ -18,10 +18,13 @@
             "cooler/zoomify": {
                 "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d"
             },
-            "fastqc": {
+            "custom/dumpsoftwareversions": {
                 "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d"
             },
-            "multiqc": {
+            "custom/getchromsizes": {
+                "git_sha": "950700bcdc0e9a2b6883d40d2c51c6fc435cd714"
+            },
+            "fastqc": {
                 "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d"
             }
         }
diff --git a/modules/local/cooler/balance.nf b/modules/local/cooler/balance.nf
index ce6e3bb5951edd34aab6aa4aa743ec2ee92a837f..37e808d0e5a1909775f9b6039a42eb069df7fe07 100644
--- a/modules/local/cooler/balance.nf
+++ b/modules/local/cooler/balance.nf
@@ -8,20 +8,24 @@ process COOLER_BALANCE {
         'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }"
 
     input:
-    tuple val(meta), val(resolution), path(cool)
+    tuple val(meta), path(cool), val(resolution)
 
     output:
-    tuple val(meta), val(resolution), path("*_norm.cool"), emit: cool
-    path "versions.yml"                             , emit: versions
+    tuple val(meta), path("${prefix}.${extension}"), emit: cool
+    path "versions.yml"                            , emit: versions
 
     script:
     def args = task.ext.args ?: ''
+    prefix = task.ext.prefix ?: "${cool.baseName}_balanced"
+    suffix = resolution ? "::$resolution" : ""
+    extension = cool.getExtension()
     """
-    cp ${cool} ${cool.baseName}_norm.cool
     cooler balance \\
         $args \\
         -p ${task.cpus} \\
-        ${cool.baseName}_norm.cool
+        ${cool}${suffix} 
+
+    cp ${cool} ${prefix}.${extension}
 
     cat <<-END_VERSIONS > versions.yml
     "${task.process}":
diff --git a/modules/local/cooler/makebins.nf b/modules/local/cooler/makebins.nf
index 0dc4dee4140b89c4a2eb07ee666f9ad936725a9a..49e18db719d8e9b97738d9156b2c6dc3015df6b1 100644
--- a/modules/local/cooler/makebins.nf
+++ b/modules/local/cooler/makebins.nf
@@ -11,8 +11,8 @@ process COOLER_MAKEBINS {
   tuple path(chromsizes), val(cool_bin)
 
   output:
-  path ("*.bed")     , emit: bed
-  path "versions.yml", emit: versions
+  path ("*.bed")       , emit: bed
+  path ("versions.yml"), emit: versions
 
   script:
   def args = task.ext.args ?: ''
diff --git a/modules/local/cooltools/eigs-cis.nf b/modules/local/cooltools/eigs-cis.nf
index 74cd265fa48906c116a64947939bcdd6905eea2e..a8b97b9bb18008734f7b5d50950a5ebf597fc191 100644
--- a/modules/local/cooltools/eigs-cis.nf
+++ b/modules/local/cooltools/eigs-cis.nf
@@ -6,7 +6,7 @@ process CALL_COMPARTMENTS {
   label 'process_medium'
 
   input:
-  tuple val(meta), val(res), path(cool)
+  tuple val(meta), path(cool), val(resolution)
   path(fasta) 
   path(chrsize) 
 
@@ -18,7 +18,7 @@ process CALL_COMPARTMENTS {
   def args = task.ext.args ?: ''
   def prefix = task.ext.prefix ?: "${meta.id}"
   """
-  cooltools genome binnify --all-names ${chrsize} ${res} > genome_bins.txt
+  cooltools genome binnify --all-names ${chrsize} ${resolution} > genome_bins.txt
   cooltools genome gc genome_bins.txt ${fasta} > genome_gc.txt 
   cooltools eigs-cis ${args} -o ${prefix}_compartments ${cool}
 
diff --git a/modules/local/get_restriction_fragments.nf b/modules/local/get_restriction_fragments.nf
deleted file mode 100644
index 93258b7528e120505dcdf1cb4120833fcf93aa50..0000000000000000000000000000000000000000
--- a/modules/local/get_restriction_fragments.nf
+++ /dev/null
@@ -1,16 +0,0 @@
-process GET_RESTRICTION_FRAGMENTS {
-  tag "$res_site"
-  label 'process_low'
-  
-   input:
-   path fasta 
-   val(res_site)
-
-   output:
-   path "*.bed", emit: results
-
-   script:
-   """
-   digest_genome.py -r ${res_site} -o restriction_fragments.bed ${fasta}
-   """
-}
diff --git a/modules/local/hicpro/bowtie2_merge.nf b/modules/local/hicpro/bowtie2_merge.nf
index 0dfd25c26b2d99a61b82fb9019d22dbce1a4e7e8..b020b70fc990b4b18ea44bdb14ab723bc95bc82b 100644
--- a/modules/local/hicpro/bowtie2_merge.nf
+++ b/modules/local/hicpro/bowtie2_merge.nf
@@ -8,9 +8,10 @@ process MERGE_BOWTIE2{
   output:
   tuple val(meta), path("${prefix}_bwt2merged.bam"), emit: bam
   tuple val(meta), path("${prefix}.mapstat"), emit: stats
+  path("versions.yml"), emit: versions
 
   script:
-  prefix = meta.id + "_" + meta.mates
+  prefix = task.ext.prefix ?: "${meta.id}"
   tag = meta.mates
   """
   samtools merge -@ ${task.cpus} \\
@@ -33,5 +34,10 @@ process MERGE_BOWTIE2{
   samtools view -c -F 4 ${bam1} >> ${prefix}.mapstat
   echo -n "local_${tag}\t"  >> ${prefix}.mapstat
   samtools view -c -F 4 ${bam2} >> ${prefix}.mapstat
+
+  cat <<-END_VERSIONS > versions.yml
+  "${task.process}":
+    samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//')
+  END_VERSIONS
   """
 }
diff --git a/modules/local/hicpro/combine_mates.nf b/modules/local/hicpro/combine_mates.nf
index 2f5f2d097565664d7162ce940ead3473b67d6665..a137b78e44591fa897e516fe0351f8a324273868 100644
--- a/modules/local/hicpro/combine_mates.nf
+++ b/modules/local/hicpro/combine_mates.nf
@@ -8,11 +8,17 @@ process COMBINE_MATES {
   output:
   tuple val(meta), path("*bwt2pairs.bam"), emit:bam
   tuple val(meta), path("*.pairstat"), optional:true, emit:stats
+  path("versions.yml"), emit: versions
 
   script:
-  prefix = meta.id
+  prefix = task.ext.prefix ?: "${meta.id}"
   def args = task.ext.args ?: ''
   """
   mergeSAM.py -f ${bam[0]} -r ${bam[1]} -o ${prefix}_bwt2pairs.bam ${args}
+
+  cat <<-END_VERSIONS > versions.yml
+  "${task.process}":
+    python: \$(echo \$(python --version 2>&1) | sed 's/Python //')
+  END_VERSIONS
   """
 }
diff --git a/modules/local/hicpro/dnase_mapping_stats.nf b/modules/local/hicpro/dnase_mapping_stats.nf
index c11f3434e7de826ae318626441f1d095bd770150..aa78141665832af8214b574e133e3b396d235c5d 100644
--- a/modules/local/hicpro/dnase_mapping_stats.nf
+++ b/modules/local/hicpro/dnase_mapping_stats.nf
@@ -1,35 +1,25 @@
-// Import generic module functions
-include { initOptions; saveFiles; getSoftwareName } from './functions'
+process MAPPING_STATS_DNASE {
+  tag "$sample = $bam"
+  label 'process_medium'
 
-params.options = [:]
-options    = initOptions(params.options)
+  input:
+  tuple val(meta), path(bam) 
 
-process dnase_mapping_stats{
-      tag "$sample = $bam"
-      label 'process_medium'
-      publishDir "${params.outdir}/hicpro/mapping",  mode: params.publish_dir_mode, 
-   	      saveAs: { filename -> if (params.save_aligned_intermediates && filename.endsWith("stat")) "stats/$filename"
-	                else if (params.save_aligned_intermediates) filename}
+  output:
+  tuple val(meta), path(bam), emit:bam
+  tuple val(meta), path("${prefix}.mapstat"), emit:stats
 
-      input:
-      tuple val(prefix), path(bam) 
-
-      output:
-      tuple val(sample), path(bam), emit:bwt2_merged_bam
-      tuple val(oname), path("${prefix}.mapstat"), emit:all_mapstat
-
-      script:
-      sample = prefix.toString() - ~/(_R1|_R2)/
-      tag = prefix.toString() =~/_R1/ ? "R1" : "R2"
-      oname = prefix.toString() - ~/(\.[0-9]+)$/
-      """
-      echo "## ${prefix}" > ${prefix}.mapstat
-      echo -n "total_${tag}\t" >> ${prefix}.mapstat
-      samtools view -c ${bam} >> ${prefix}.mapstat
-      echo -n "mapped_${tag}\t" >> ${prefix}.mapstat
-      samtools view -c -F 4 ${bam} >> ${prefix}.mapstat
-      echo -n "global_${tag}\t" >> ${prefix}.mapstat
-      samtools view -c -F 4 ${bam} >> ${prefix}.mapstat
-      echo -n "local_${tag}\t0"  >> ${prefix}.mapstat
-      """
+  script:
+  prefix = meta.id + "_" + meta.mates
+  tag = meta.mates
+  """
+  echo "## ${prefix}" > ${prefix}.mapstat
+  echo -n "total_${tag}\t" >> ${prefix}.mapstat
+  samtools view -c ${bam} >> ${prefix}.mapstat
+  echo -n "mapped_${tag}\t" >> ${prefix}.mapstat
+  samtools view -c -F 4 ${bam} >> ${prefix}.mapstat
+  echo -n "global_${tag}\t" >> ${prefix}.mapstat
+  samtools view -c -F 4 ${bam} >> ${prefix}.mapstat
+  echo -n "local_${tag}\t0"  >> ${prefix}.mapstat
+  """
 }
diff --git a/modules/local/hicpro/get_restriction_fragments.nf b/modules/local/hicpro/get_restriction_fragments.nf
new file mode 100644
index 0000000000000000000000000000000000000000..7b304e4dfc411f9a4066b05052c851af891a6b3d
--- /dev/null
+++ b/modules/local/hicpro/get_restriction_fragments.nf
@@ -0,0 +1,22 @@
+process GET_RESTRICTION_FRAGMENTS {
+  tag "$res_site"
+  label 'process_low'
+  
+  input:
+  path fasta 
+  val(res_site)
+
+  output:
+  path "*.bed", emit: results
+  path("versions.yml"), emit: versions
+
+  script:
+  """
+  digest_genome.py -r ${res_site} -o restriction_fragments.bed ${fasta}
+
+  cat <<-END_VERSIONS > versions.yml
+  "${task.process}":
+    python: \$(echo \$(python --version 2>&1) | sed 's/Python //')
+  END_VERSIONS
+  """
+}
diff --git a/modules/local/hicpro/get_valid_interaction.nf b/modules/local/hicpro/get_valid_interaction.nf
index 385857675ffdcb5c20e2186ebf5f8cdb50ce248c..d8ea17f7541aceea09d677cae49e0843d550b13a 100644
--- a/modules/local/hicpro/get_valid_interaction.nf
+++ b/modules/local/hicpro/get_valid_interaction.nf
@@ -8,16 +8,14 @@ process GET_VALID_INTERACTION {
 
   output:
   tuple val(meta), path("*.validPairs"), emit:valid_pairs
-  tuple val(meta), path("*.DEPairs"), emit:de_pairs
-  tuple val(meta), path("*.SCPairs"), emit:sc_pairs
-  tuple val(meta), path("*.REPairs"), emit:re_pairs
-  tuple val(meta), path("*.FiltPairs"), emit:filt_pairs
-  tuple val(meta), path("*RSstat"), emit:stats
+  tuple val(meta), path("*.DEPairs"), optional:true, emit:de_pairs
+  tuple val(meta), path("*.SCPairs"), optional: true, emit:sc_pairs
+  tuple val(meta), path("*.REPairs"), optional: true, emit:re_pairs
+  tuple val(meta), path("*.FiltPairs"), optional: true, emit:filt_pairs
+  tuple val(meta), path("*RSstat"), optional: true, emit:stats
+  path("versions.yml"), emit: versions
 
   script:
-  if (params.split_fastq){
-     sample = sample.toString() - ~/(\.[0-9]+)$/
-  }
   def args = task.ext.args ?: ''
   """
   mapped_2hic_fragments.py \\
@@ -27,5 +25,10 @@ process GET_VALID_INTERACTION {
     ${args}
 
   sort -k2,2V -k3,3n -k5,5V -k6,6n -o ${bam.baseName}.validPairs ${bam.baseName}.validPairs
+
+  cat <<-END_VERSIONS > versions.yml
+  "${task.process}":
+    python: \$(echo \$(python --version 2>&1) | sed 's/Python //')
+  END_VERSIONS
   """
 }
diff --git a/modules/local/hicpro/get_valid_interaction_dnase.nf b/modules/local/hicpro/get_valid_interaction_dnase.nf
index 62c3ea4dfd8a1d86881a49d9078e0997151fb9a8..4bb40b8f1e5f081acd0044bd2bb78ef2561a01cf 100644
--- a/modules/local/hicpro/get_valid_interaction_dnase.nf
+++ b/modules/local/hicpro/get_valid_interaction_dnase.nf
@@ -1,33 +1,27 @@
-// Import generic module functions
-include { initOptions; saveFiles; getSoftwareName } from './functions'
+process GET_VALID_INTERACTION_DNASE {
+  tag "$meta.id"
+  label 'process_low'
+  
+  input:
+  tuple val(meta), path(bam) 
 
-params.options = [:]
-options    = initOptions(params.options)
+  output:
+  tuple val(meta), path("*.validPairs"), emit:valid_pairs
+  tuple val(meta), path("*RSstat"), optional: true, emit:stats
+  path("versions.yml"), emit: versions
 
-process get_valid_interaction_dnase{
-      tag "$sample"
-      label 'process_low'
-      publishDir "${params.outdir}/hicpro/valid_pairs", mode: params.publish_dir_mode,
-   	      saveAs: {filename -> if (filename.endsWith("RSstat")) "stats/$filename" 
-                                   else filename}
+  script:
+  def args = task.ext.args ?: ''
+  """
+  mapped_2hic_dnase.py \\
+    -r ${bam} \\
+    ${args}
 
-      input:
-      tuple val(sample), path(pe_bam) 
+  sort -k2,2V -k3,3n -k5,5V -k6,6n -o ${bam.baseName}.validPairs ${bam.baseName}.validPairs
 
-      output:
-      tuple val(sample), path("*.validPairs"), emit:valid_pairs
-      tuple val(sample), path("*.validPairs"), emit:valid_pairs_4cool
-      tuple val(sample), path("*RSstat"), emit:all_rsstat
-
-      script:
-      if (params.split_fastq){
-         sample = sample.toString() - ~/(\.[0-9]+)$/
-      }
-
-      opts = params.min_cis_dist > 0 ? " -d ${params.min_cis_dist}" : ''
-      prefix = pe_bam.toString() - ~/.bam/
-      """
-      mapped_2hic_dnase.py -r ${pe_bam} ${opts}
-      sort -k2,2V -k3,3n -k5,5V -k6,6n -o ${prefix}.validPairs ${prefix}.validPairs
-      """
+  cat <<-END_VERSIONS > versions.yml
+  "${task.process}":
+    python: \$(echo \$(python --version 2>&1) | sed 's/Python //')
+  END_VERSIONS
+  """
 }
diff --git a/modules/local/hicpro/hicpro2pairs.nf b/modules/local/hicpro/hicpro2pairs.nf
index 698225da80c9474ef898c8c3238c3173eccab40b..99e4a6bd18ff61b674873b5053808b7297de17b4 100644
--- a/modules/local/hicpro/hicpro2pairs.nf
+++ b/modules/local/hicpro/hicpro2pairs.nf
@@ -8,6 +8,7 @@ process HICPRO2PAIRS {
 
   output:
   tuple val(meta), path("*.pairs.gz"), path("*.pairs.gz.px2"), emit: pairs
+  path("versions.yml"), emit: versions
 
   script:
   prefix = "${meta.id}"
@@ -16,5 +17,10 @@ process HICPRO2PAIRS {
   awk '{OFS="\t";print \$1,\$2,\$3,\$5,\$6,\$4,\$7}' $vpairs > ${prefix}_contacts.pairs
   sort -k2,2 -k4,4 -k3,3n -k5,5n ${prefix}_contacts.pairs | bgzip -c > ${prefix}_contacts.pairs.gz
   pairix -f ${prefix}_contacts.pairs.gz
+
+  cat <<-END_VERSIONS > versions.yml
+  "${task.process}":
+    pairix: \$(echo \$(pairix 2>&1 | grep Version | sed -e 's/Version: //')
+  END_VERSIONS
   """
 }
diff --git a/modules/local/hicpro/merge_stats.nf b/modules/local/hicpro/merge_stats.nf
index 2ef759d1c9aa8b97652aef20e404dc3b65d492ec..8fb0eee32cca429b749d0d5f4394fbba36065c3e 100644
--- a/modules/local/hicpro/merge_stats.nf
+++ b/modules/local/hicpro/merge_stats.nf
@@ -5,16 +5,22 @@ process MERGE_STATS {
   tuple val(meta), path(fstat) 
 
   output:
-  path("stats/"), emit:mqc_mstats
-  path("*stat"), emit:all_mstats
+  path("${meta.id}/"), emit: mqc
+  path("*.{mmapstat,mpairstat,mRSstat}"), emit: stats
+  path("versions.yml"), emit:versions
 
   script:
-  if ( (fstat =~ /.mapstat/) ){ ext = "mmapstat" }
+  if ( (fstat =~ /.mapstat/) ){ ext = "${meta.mates}.mmapstat" }
   if ( (fstat =~ /.pairstat/) ){ ext = "mpairstat" }
   if ( (fstat =~ /.RSstat/) ){ ext = "mRSstat" }
   """
+  mkdir -p ${meta.id}
   merge_statfiles.py -f ${fstat} > ${meta.id}.${ext}
-  mkdir -p stats/${meta.id}
-  cp ${meta.id}.${ext} stats/${meta.id}/
+  cp *${ext} ${meta.id}/
+
+  cat <<-END_VERSIONS > versions.yml
+  "${task.process}":
+    python: \$(echo \$(python --version 2>&1) | sed 's/Python //')
+  END_VERSIONS
   """
 }
diff --git a/modules/local/hicpro/merge_valid_interaction.nf b/modules/local/hicpro/merge_valid_interaction.nf
index efc29ba1f65f87edb211acb8252b1f563ae15280..69163057aa2c29d3c3428275f99b80a85bc71362 100644
--- a/modules/local/hicpro/merge_valid_interaction.nf
+++ b/modules/local/hicpro/merge_valid_interaction.nf
@@ -7,14 +7,15 @@ process MERGE_VALID_INTERACTION {
 
    output:
    tuple val(meta), path("*.allValidPairs"), emit: valid_pairs
-   path("stats/"), emit:mqc
-   tuple val(meta), path("*mergestat"), emit:stats
+   path("${meta.id}/"), emit:mqc
+   path("*mergestat"), emit:stats
+   path("versions.yml"), emit: versions
 
    script:
    prefix = meta.id
    if ( ! params.keep_dups ){
    """
-   mkdir -p stats/${prefix}
+   mkdir -p ${prefix}
 
    ## Sort valid pairs and remove read pairs with same starts (i.e duplicated read pairs)
    sort -S 50% -k2,2V -k3,3n -k5,5V -k6,6n -m ${vpairs} | \\
@@ -29,8 +30,13 @@ process MERGE_VALID_INTERACTION {
    awk 'BEGIN{cis=0;trans=0;sr=0;lr=0} \$2 == \$5{cis=cis+1; d=\$6>\$3?\$6-\$3:\$3-\$6; if (d<=20000){sr=sr+1}else{lr=lr+1}} \$2!=\$5{trans=trans+1}END{print "trans_interaction\\t"trans"\\ncis_interaction\\t"cis"\\ncis_shortRange\\t"sr"\\ncis_longRange\\t"lr}' ${prefix}.allValidPairs >> ${prefix}_allValidPairs.mergestat
  
    ## For MultiQC
-   mkdir -p stats/${prefix} 
-   cp ${prefix}_allValidPairs.mergestat stats/${prefix}/
+   mkdir -p ${prefix} 
+   cp ${prefix}_allValidPairs.mergestat ${prefix}/
+
+   cat <<-END_VERSIONS > versions.yml
+   "${task.process}":
+     sort: \$(echo \$(sort --version 2>&1 | head -1 | awk '{print \$NF}' 2>&1))
+   END_VERSIONS
    """
    }else{
    """
@@ -44,8 +50,13 @@ process MERGE_VALID_INTERACTION {
    awk 'BEGIN{cis=0;trans=0;sr=0;lr=0} \$2 == \$5{cis=cis+1; d=\$6>\$3?\$6-\$3:\$3-\$6; if (d<=20000){sr=sr+1}else{lr=lr+1}} \$2!=\$5{trans=trans+1}END{print "trans_interaction\\t"trans"\\ncis_interaction\\t"cis"\\ncis_shortRange\\t"sr"\\ncis_longRange\\t"lr}' ${prefix}.allValidPairs >> ${prefix}_allValidPairs.mergestat
 
    ## For MultiQC
-   mkdir -p stats/${prefix}
-   cp ${prefix}_allValidPairs.mergestat stats/${prefix}/
+   mkdir -p ${prefix}
+   cp ${prefix}_allValidPairs.mergestat ${prefix}/
+
+   cat <<-END_VERSIONS > versions.yml
+   "${task.process}":
+     sort: \$(echo \$(sort --version 2>&1 | head -1 | awk '{print \$NF}' 2>&1))
+   END_VERSIONS
    """
    }
 }
diff --git a/modules/local/hicpro/run_ice.nf b/modules/local/hicpro/run_ice.nf
index 521cb3171b187fb14cefc95cd4caa5a404f79f47..7b4ee63a7cdc38059eb38a804cb17262dbf120db 100644
--- a/modules/local/hicpro/run_ice.nf
+++ b/modules/local/hicpro/run_ice.nf
@@ -8,6 +8,7 @@ process ICE_NORMALIZATION{
   output:
   tuple val(meta), val(res), path("*iced.matrix"), path(bed), emit:maps
   path ("*.biases"), emit:bias
+  path("versions.yml"), emit: versions
 
   script:
   prefix = rmaps.toString() - ~/(\.matrix)?$/
@@ -16,5 +17,11 @@ process ICE_NORMALIZATION{
       --results_filename ${prefix}_iced.matrix \
       --filter_high_counts_perc ${params.ice_filter_high_count_perc} \
       --max_iter ${params.ice_max_iter} --eps ${params.ice_eps} --remove-all-zeros-loci --output-bias 1 --verbose 1 ${rmaps}
-   """
+
+  cat <<-END_VERSIONS > versions.yml
+  "${task.process}":
+    python: \$(echo \$(python --version 2>&1) | sed 's/Python //')
+    iced: \$(python -c "import iced; print(iced.__version__)")
+  END_VERSIONS
+  """
 }
diff --git a/modules/local/hicpro/trim_reads.nf b/modules/local/hicpro/trim_reads.nf
index 5eafc43df06f9672e6734902d019642bc24dcad3..19edff14c18ccc17fe5804ced19158ab7429bf17 100644
--- a/modules/local/hicpro/trim_reads.nf
+++ b/modules/local/hicpro/trim_reads.nf
@@ -7,14 +7,18 @@ process TRIM_READS {
   val(motif)
 
   output:
-  tuple val(meta), path("*trimmed.fastq"), emit: fastq
+  tuple val(meta), path("*trimmed.fastq.gz"), emit: fastq
   path("versions.yml") , emit: versions
 
   script:
   """
-  cutsite_trimming --fastq $reads \\
-                   --cutsite ${motif} \\
-                   --out ${meta.id}_trimmed.fastq
+  zcat ${reads} > tmp.fastq
+  cutsite_trimming --fastq tmp.fastq \\
+                   --cutsite ${motif[0]} \\
+                   --out ${reads.simpleName}_trimmed.fastq
+  gzip ${reads.simpleName}_trimmed.fastq
+  /bin/rm -f tmp.fastq
+
   cat <<-END_VERSIONS > versions.yml
   "${task.process}":
       python: \$(echo \$(python --version 2>&1) | sed 's/^Python//; s/ .*\$//')
diff --git a/modules/nf-core/modules/multiqc/main.nf b/modules/local/multiqc.nf
similarity index 87%
rename from modules/nf-core/modules/multiqc/main.nf
rename to modules/local/multiqc.nf
index 1264aac1ebfc902ae6633862472b412cd929656a..8fa76a16f250b1c5e2f1394508985b4a0b3ab2ab 100644
--- a/modules/nf-core/modules/multiqc/main.nf
+++ b/modules/local/multiqc.nf
@@ -7,7 +7,11 @@ process MULTIQC {
         'quay.io/biocontainers/multiqc:1.12--pyhdfd78af_0' }"
 
     input:
-    path multiqc_files
+    path multiqc_config
+    path (mqc_custom_config)
+    path workflow_summary
+    path ('fastqc/*')
+    path ('input_*/*')
 
     output:
     path "*multiqc_report.html", emit: report
diff --git a/modules/local/split_cooler_dump.nf b/modules/local/split_cooler_dump.nf
index 3ce1aaaed39fce9a1e573f439a802baec5925a25..38e7357e2222b4216fa346fbf12fad3004b1e209 100644
--- a/modules/local/split_cooler_dump.nf
+++ b/modules/local/split_cooler_dump.nf
@@ -11,10 +11,10 @@ process SPLIT_COOLER_DUMP {
 
     script:
     def args = task.ext.args ?: ''
-    prefix = bedpe.toString() - ~/(\_norm)?.bedpe$/
+    prefix = bedpe.toString() - ~/(\_balanced)?.bedpe$/
     """
     cat ${bedpe} | awk '{OFS="\t"; print \$1,\$2,\$3}' > ${prefix}_raw.txt
-    cat ${bedpe} | awk '{OFS="\t"; print \$1,\$2,\$4}' > ${prefix}_norm.txt
+    cat ${bedpe} | awk '{OFS="\t"; print \$1,\$2,\$4}' > ${prefix}_balanced.txt
 
     cat <<-END_VERSIONS > versions.yml
     "${task.process}":
diff --git a/modules/local/get_chromsize.nf b/modules/local/trash/get_chromsize.nf
similarity index 100%
rename from modules/local/get_chromsize.nf
rename to modules/local/trash/get_chromsize.nf
diff --git a/modules/local/get_software_versions.nf b/modules/local/trash/get_software_versions.nf
similarity index 86%
rename from modules/local/get_software_versions.nf
rename to modules/local/trash/get_software_versions.nf
index be0d2b514ba664db2a19122f6ac60bb439b9705e..730316d59f951590334fb60a7ee3f0d29b35cd54 100644
--- a/modules/local/get_software_versions.nf
+++ b/modules/local/trash/get_software_versions.nf
@@ -1,8 +1,3 @@
-// Import generic module functions
-include { initOptions; saveFiles; getSoftwareName } from './functions'
-
-params.options = [:]
-options    = initOptions(params.options)
 
 process GET_SOFTWARE_VERSIONS {
     publishDir "${params.outdir}",
diff --git a/modules/local/output_documentation.nf b/modules/local/trash/output_documentation.nf
similarity index 100%
rename from modules/local/output_documentation.nf
rename to modules/local/trash/output_documentation.nf
diff --git a/modules/nf-core/modules/cooler/cload/main.nf b/modules/nf-core/modules/cooler/cload/main.nf
index 8602fb44862bceb4d6f398404318a10c08c99ad6..2dd29b7234be38112205d3332f4126495030581d 100644
--- a/modules/nf-core/modules/cooler/cload/main.nf
+++ b/modules/nf-core/modules/cooler/cload/main.nf
@@ -12,7 +12,7 @@ process COOLER_CLOAD {
     path chromsizes
 
     output:
-    tuple val(meta), val(cool_bin), path("*.cool"), emit: cool
+    tuple val(meta), path("*.cool"), val(cool_bin), emit: cool
     path "versions.yml"                           , emit: versions
 
     when:
diff --git a/modules/nf-core/modules/cooler/cload/main.nf~ b/modules/nf-core/modules/cooler/cload/main.nf~
deleted file mode 100644
index 52964b8dd931d30606bb69a122d1529c15470279..0000000000000000000000000000000000000000
--- a/modules/nf-core/modules/cooler/cload/main.nf~
+++ /dev/null
@@ -1,40 +0,0 @@
-process COOLER_CLOAD {
-    tag "$meta.id"
-    label 'process_high'
-
-    conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null)
-    container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
-        'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' :
-        'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }"
-
-    input:
-    tuple val(meta), path(pairs), path(index)
-    val cool_bin
-    path chromsizes
-
-    output:
-    tuple val(meta), val(cool_bin), path("*.cool"), emit: cool
-    path "versions.yml"                           , emit: versions
-
-    when:
-    task.ext.when == null || task.ext.when
-
-    script:
-    def args = task.ext.args ?: ''
-    def prefix = task.ext.prefix ?: "${meta.id}"
-    def nproc  = args.contains('pairix') || args.contains('tabix')? "--nproc $task.cpus" : ''
-
-    """
-    cooler cload \\
-        $args \\
-        $nproc \\
-        ${chromsizes}:${cool_bin} \\
-        $pairs \\
-        ${prefix}.${cool_bin}.cool
-
-    cat <<-END_VERSIONS > versions.yml
-    "${task.process}":
-        cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //')
-    END_VERSIONS
-    """
-}
diff --git a/modules/nf-core/modules/cooler/dump/main.nf b/modules/nf-core/modules/cooler/dump/main.nf
index 5aa98a1723013a0cf19ef409e8ec7e47e2d3b46d..0c01c8dbeab356f7a277426d4fd1cad36d946b50 100644
--- a/modules/nf-core/modules/cooler/dump/main.nf
+++ b/modules/nf-core/modules/cooler/dump/main.nf
@@ -8,7 +8,7 @@ process COOLER_DUMP {
         'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }"
 
     input:
-    tuple val(meta), val(resolution), path(cool)
+    tuple val(meta), path(cool), val(resolution)
 
     output:
     tuple val(meta), path("*.bedpe"), emit: bedpe
@@ -19,8 +19,8 @@ process COOLER_DUMP {
 
     script:
     def args = task.ext.args ?: ''
-    def prefix = task.ext.prefix ?: "${cool.baseName}"
-    def suffix   = resolution     ? "::/resolutions/$resolution" : ""
+    def prefix = task.ext.prefix ?: "${meta.id}"
+    def suffix = resolution ? "::$resolution" : ""
     """
     cooler dump \\
         $args \\
diff --git a/modules/nf-core/modules/custom/dumpsoftwareversions/main.nf b/modules/nf-core/modules/custom/dumpsoftwareversions/main.nf
new file mode 100644
index 0000000000000000000000000000000000000000..327d5100560d84eb0020b60acf0db2922497991b
--- /dev/null
+++ b/modules/nf-core/modules/custom/dumpsoftwareversions/main.nf
@@ -0,0 +1,24 @@
+process CUSTOM_DUMPSOFTWAREVERSIONS {
+    label 'process_low'
+
+    // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container
+    conda (params.enable_conda ? "bioconda::multiqc=1.11" : null)
+    container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
+        'https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0' :
+        'quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0' }"
+
+    input:
+    path versions
+
+    output:
+    path "software_versions.yml"    , emit: yml
+    path "software_versions_mqc.yml", emit: mqc_yml
+    path "versions.yml"             , emit: versions
+
+    when:
+    task.ext.when == null || task.ext.when
+
+    script:
+    def args = task.ext.args ?: ''
+    template 'dumpsoftwareversions.py'
+}
diff --git a/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml b/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml
new file mode 100644
index 0000000000000000000000000000000000000000..60b546a012c457b5459490e732e6ac9be6979db1
--- /dev/null
+++ b/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml
@@ -0,0 +1,34 @@
+name: custom_dumpsoftwareversions
+description: Custom module used to dump software versions within the nf-core pipeline template
+keywords:
+  - custom
+  - version
+tools:
+  - custom:
+      description: Custom module used to dump software versions within the nf-core pipeline template
+      homepage: https://github.com/nf-core/tools
+      documentation: https://github.com/nf-core/tools
+      licence: ["MIT"]
+input:
+  - versions:
+      type: file
+      description: YML file containing software versions
+      pattern: "*.yml"
+
+output:
+  - yml:
+      type: file
+      description: Standard YML file containing software versions
+      pattern: "software_versions.yml"
+  - mqc_yml:
+      type: file
+      description: MultiQC custom content YML file containing software versions
+      pattern: "software_versions_mqc.yml"
+  - versions:
+      type: file
+      description: File containing software versions
+      pattern: "versions.yml"
+
+authors:
+  - "@drpatelh"
+  - "@grst"
diff --git a/modules/nf-core/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/modules/nf-core/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py
new file mode 100644
index 0000000000000000000000000000000000000000..fda9fc3f809f496767975f3df49d0148d98611be
--- /dev/null
+++ b/modules/nf-core/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+
+import yaml
+import platform
+from textwrap import dedent
+
+
+def _make_versions_html(versions):
+    html = [
+        dedent(
+            """\\
+            <style>
+            #nf-core-versions tbody:nth-child(even) {
+                background-color: #f2f2f2;
+            }
+            </style>
+            <table class="table" style="width:100%" id="nf-core-versions">
+                <thead>
+                    <tr>
+                        <th> Process Name </th>
+                        <th> Software </th>
+                        <th> Version  </th>
+                    </tr>
+                </thead>
+            """
+        )
+    ]
+    for process, tmp_versions in sorted(versions.items()):
+        html.append("<tbody>")
+        for i, (tool, version) in enumerate(sorted(tmp_versions.items())):
+            html.append(
+                dedent(
+                    f"""\\
+                    <tr>
+                        <td><samp>{process if (i == 0) else ''}</samp></td>
+                        <td><samp>{tool}</samp></td>
+                        <td><samp>{version}</samp></td>
+                    </tr>
+                    """
+                )
+            )
+        html.append("</tbody>")
+    html.append("</table>")
+    return "\\n".join(html)
+
+
+versions_this_module = {}
+versions_this_module["${task.process}"] = {
+    "python": platform.python_version(),
+    "yaml": yaml.__version__,
+}
+
+with open("$versions") as f:
+    versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) #| versions_this_module
+    
+# aggregate versions by the module name (derived from fully-qualified process name)
+versions_by_module = {}
+for process, process_versions in versions_by_process.items():
+    module = process.split(":")[-1]
+    try:
+        assert versions_by_module[module] == process_versions, (
+            "We assume that software versions are the same between all modules. "
+            "If you see this error-message it means you discovered an edge-case "
+            "and should open an issue in nf-core/tools. "
+        )
+    except KeyError:
+        versions_by_module[module] = process_versions
+
+versions_by_module["Workflow"] = {
+    "Nextflow": "$workflow.nextflow.version",
+    "$workflow.manifest.name": "$workflow.manifest.version",
+}
+
+versions_mqc = {
+    "id": "software_versions",
+    "section_name": "${workflow.manifest.name} Software Versions",
+    "section_href": "https://github.com/${workflow.manifest.name}",
+    "plot_type": "html",
+    "description": "are collected at run time from the software output.",
+    "data": _make_versions_html(versions_by_module),
+}
+
+with open("software_versions.yml", "w") as f:
+    yaml.dump(versions_by_module, f, default_flow_style=False)
+with open("software_versions_mqc.yml", "w") as f:
+    yaml.dump(versions_mqc, f, default_flow_style=False)
+
+with open("versions.yml", "w") as f:
+    yaml.dump(versions_this_module, f, default_flow_style=False)
diff --git a/modules/nf-core/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py~ b/modules/nf-core/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py~
new file mode 100644
index 0000000000000000000000000000000000000000..be913b7c10a7b0df6bfe7242bfd072f8981538e5
--- /dev/null
+++ b/modules/nf-core/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py~
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+
+import yaml
+import platform
+from textwrap import dedent
+
+
+def _make_versions_html(versions):
+    html = [
+        dedent(
+            """\\
+            <style>
+            #nf-core-versions tbody:nth-child(even) {
+                background-color: #f2f2f2;
+            }
+            </style>
+            <table class="table" style="width:100%" id="nf-core-versions">
+                <thead>
+                    <tr>
+                        <th> Process Name </th>
+                        <th> Software </th>
+                        <th> Version  </th>
+                    </tr>
+                </thead>
+            """
+        )
+    ]
+    for process, tmp_versions in sorted(versions.items()):
+        html.append("<tbody>")
+        for i, (tool, version) in enumerate(sorted(tmp_versions.items())):
+            html.append(
+                dedent(
+                    f"""\\
+                    <tr>
+                        <td><samp>{process if (i == 0) else ''}</samp></td>
+                        <td><samp>{tool}</samp></td>
+                        <td><samp>{version}</samp></td>
+                    </tr>
+                    """
+                )
+            )
+        html.append("</tbody>")
+    html.append("</table>")
+    return "\\n".join(html)
+
+
+versions_this_module = {}
+versions_this_module["${task.process}"] = {
+    "python": platform.python_version(),
+    "yaml": yaml.__version__,
+}
+
+with open("$versions") as f:
+    versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module
+    
+# aggregate versions by the module name (derived from fully-qualified process name)
+versions_by_module = {}
+for process, process_versions in versions_by_process.items():
+    module = process.split(":")[-1]
+    try:
+        assert versions_by_module[module] == process_versions, (
+            "We assume that software versions are the same between all modules. "
+            "If you see this error-message it means you discovered an edge-case "
+            "and should open an issue in nf-core/tools. "
+        )
+    except KeyError:
+        versions_by_module[module] = process_versions
+
+versions_by_module["Workflow"] = {
+    "Nextflow": "$workflow.nextflow.version",
+    "$workflow.manifest.name": "$workflow.manifest.version",
+}
+
+versions_mqc = {
+    "id": "software_versions",
+    "section_name": "${workflow.manifest.name} Software Versions",
+    "section_href": "https://github.com/${workflow.manifest.name}",
+    "plot_type": "html",
+    "description": "are collected at run time from the software output.",
+    "data": _make_versions_html(versions_by_module),
+}
+
+with open("software_versions.yml", "w") as f:
+    yaml.dump(versions_by_module, f, default_flow_style=False)
+with open("software_versions_mqc.yml", "w") as f:
+    yaml.dump(versions_mqc, f, default_flow_style=False)
+
+with open("versions.yml", "w") as f:
+    yaml.dump(versions_this_module, f, default_flow_style=False)
diff --git a/modules/nf-core/modules/custom/getchromsizes/main.nf b/modules/nf-core/modules/custom/getchromsizes/main.nf
new file mode 100644
index 0000000000000000000000000000000000000000..bbcfa9becd254aa507089a34c93ccbf35f78aaa4
--- /dev/null
+++ b/modules/nf-core/modules/custom/getchromsizes/main.nf
@@ -0,0 +1,32 @@
+process CUSTOM_GETCHROMSIZES {
+    tag "$fasta"
+    label 'process_low'
+
+    conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
+    container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
+        'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
+        'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
+
+    input:
+    path fasta
+
+    output:
+    path '*.sizes'      , emit: sizes
+    path '*.fai'        , emit: fai
+    path  "versions.yml", emit: versions
+
+    when:
+    task.ext.when == null || task.ext.when
+
+    script:
+    def args = task.ext.args ?: ''
+    """
+    samtools faidx $fasta
+    cut -f 1,2 ${fasta}.fai > ${fasta}.sizes
+
+    cat <<-END_VERSIONS > versions.yml
+    "${task.process}":
+        custom: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//')
+    END_VERSIONS
+    """
+}
diff --git a/modules/nf-core/modules/custom/getchromsizes/meta.yml b/modules/nf-core/modules/custom/getchromsizes/meta.yml
new file mode 100644
index 0000000000000000000000000000000000000000..ee6c257185a41746801ae1cccaaef1d31379ddd7
--- /dev/null
+++ b/modules/nf-core/modules/custom/getchromsizes/meta.yml
@@ -0,0 +1,38 @@
+name: custom_getchromsizes
+description: Generates a FASTA file of chromosome sizes and a fasta index file
+keywords:
+  - fasta
+  - chromosome
+  - indexing
+tools:
+  - samtools:
+      description: Tools for dealing with SAM, BAM and CRAM files
+      homepage: http://www.htslib.org/
+      documentation: http://www.htslib.org/doc/samtools.html
+      tool_dev_url: https://github.com/samtools/samtools
+      doi: 10.1093/bioinformatics/btp352
+      licence: ["MIT"]
+
+input:
+  - fasta:
+      type: file
+      description: FASTA file
+      pattern: "*.{fasta}"
+
+output:
+  - sizes:
+      type: file
+      description: File containing chromosome lengths
+      pattern: "*.{sizes}"
+  - fai:
+      type: file
+      description: FASTA index file
+      pattern: "*.{fai}"
+  - versions:
+      type: file
+      description: File containing software version
+      pattern: "versions.yml"
+
+authors:
+  - "@tamara-hodgetts"
+  - "@chris-cheshire"
diff --git a/modules/nf-core/modules/multiqc/meta.yml b/modules/nf-core/modules/multiqc/meta.yml
deleted file mode 100644
index 6fa891efc2c607fa6e1d081171b1bf2a710443ab..0000000000000000000000000000000000000000
--- a/modules/nf-core/modules/multiqc/meta.yml
+++ /dev/null
@@ -1,40 +0,0 @@
-name: MultiQC
-description: Aggregate results from bioinformatics analyses across many samples into a single report
-keywords:
-  - QC
-  - bioinformatics tools
-  - Beautiful stand-alone HTML report
-tools:
-  - multiqc:
-      description: |
-        MultiQC searches a given directory for analysis logs and compiles a HTML report.
-        It's a general use tool, perfect for summarising the output from numerous bioinformatics tools.
-      homepage: https://multiqc.info/
-      documentation: https://multiqc.info/docs/
-      licence: ["GPL-3.0-or-later"]
-input:
-  - multiqc_files:
-      type: file
-      description: |
-        List of reports / files recognised by MultiQC, for example the html and zip output of FastQC
-output:
-  - report:
-      type: file
-      description: MultiQC report file
-      pattern: "multiqc_report.html"
-  - data:
-      type: dir
-      description: MultiQC data dir
-      pattern: "multiqc_data"
-  - plots:
-      type: file
-      description: Plots created by MultiQC
-      pattern: "*_data"
-  - versions:
-      type: file
-      description: File containing software versions
-      pattern: "versions.yml"
-authors:
-  - "@abhi18av"
-  - "@bunop"
-  - "@drpatelh"
diff --git a/nextflow.config b/nextflow.config
index 1a413b7365ae1222599a798317cd6adb4d049a01..c6ed56da47d4429d9c1005ee34acc1ce126f557c 100644
--- a/nextflow.config
+++ b/nextflow.config
@@ -59,8 +59,8 @@ params {
     max_restriction_fragment_size = 0
     min_insert_size = 0
     max_insert_size = 0
-    save_nonvalid_pairs = false
-
+    save_pairs_intermediates = false
+   
     // Dnase Hi-C
     dnase = false
     min_cis_dist = 0
diff --git a/subworkflows/local/cooler.nf b/subworkflows/local/cooler.nf
index 5c61d8726ef45569d44bf13064e8959c1a0662f7..7e49aa423775be48da82fbbda4a5f93adf5013a4 100644
--- a/subworkflows/local/cooler.nf
+++ b/subworkflows/local/cooler.nf
@@ -1,6 +1,6 @@
 /*
  * COOLER MAIN WORKFLOW
- * INPUT : pair text file with the list of valid interaction
+ * INPUT : .pair text file with the list of valid interaction
  * OUTPUT : cooler files
  */
 
@@ -12,6 +12,14 @@ include { COOLER_BALANCE } from '../../modules/local/cooler/balance'
 include { SPLIT_COOLER_DUMP } from '../../modules/local/split_cooler_dump'
 include { COOLER_MAKEBINS } from '../../modules/local/cooler/makebins'
 
+// add resolution in meta
+def addResolution(row) {
+  def meta = [:]
+  meta.id = row[0].id
+  meta.resolution = row[2]
+  return [meta, row[1], row[2]]
+}
+
 workflow COOLER {
 
   take:
@@ -40,8 +48,13 @@ workflow COOLER {
   )
   ch_versions = ch_versions.mix(COOLER_CLOAD.out.versions)
 
+  //Add resolution in meta
+  COOLER_CLOAD.out.cool
+    .map{ it -> addResolution(it) }
+    .set{ ch_cool }
+
   COOLER_BALANCE(
-    COOLER_CLOAD.out.cool
+    ch_cool.map{[it[0], it[1], ""]}
   )
   ch_versions = ch_versions.mix(COOLER_BALANCE.out.versions)
 
@@ -51,11 +64,11 @@ workflow COOLER {
   }else{
     ch_res_zoomify = params.res_zoomify
   }
-  COOLER_CLOAD.out.cool
+  ch_cool
     .combine(ch_res_zoomify)
-    .filter{ it [1] == it[3] }
-    .map{it->[it[0], it[2]]}
-    .set{ch_cool_zoomify}
+    .filter{ it[2] == it[3] }
+    .map{ it->[it[0], it[1]] }
+    .set{ ch_cool_zoomify }
 
   COOLER_ZOOMIFY(
     ch_cool_zoomify
@@ -67,12 +80,12 @@ workflow COOLER {
   // [meta, cool] / resolution
 
   COOLER_DUMP(
-    COOLER_BALANCE.out.cool.map{[it[0], "", it[2]]}
+    COOLER_BALANCE.out.cool.map{[it[0], it[1], ""]}
   )
   ch_versions = ch_versions.mix(COOLER_DUMP.out.versions)
 
   //COOLER_DUMP(
-  //  COOLER_ZOOMIFY.out.mcool.combine(cool_bins).map{it->[it[0], it[2], it[1]]}
+  //  COOLER_ZOOMIFY.out.mcool.combine(cool_bins).map{it->[it[0], it[1], it[2]]}
   //)
 
   SPLIT_COOLER_DUMP(
diff --git a/subworkflows/local/hicpro.nf b/subworkflows/local/hicpro.nf
index 3869d4df9de7538ad3b2fad09296e1cf4e9d9538..f73fd95082e83f096f3fd3ec12155e8826a94af8 100644
--- a/subworkflows/local/hicpro.nf
+++ b/subworkflows/local/hicpro.nf
@@ -1,31 +1,39 @@
 /*
- * HICPRO MAIN WORKFLOW
- * INPUT :  paired-end sequencing data
- * OUTPUT : .pairs file with the list of valid interaction
+ * HICPRO
+ * MAIN WORKFLOW
+ * From the raw sequencing reads to the list of valid interactions
  */
   
 include { HICPRO_MAPPING } from './hicpro_mapping'
 include { GET_VALID_INTERACTION } from '../../modules/local/hicpro/get_valid_interaction'
+include { GET_VALID_INTERACTION_DNASE } from '../../modules/local/hicpro/get_valid_interaction_dnase'
 include { MERGE_VALID_INTERACTION } from '../../modules/local/hicpro/merge_valid_interaction'
 include { MERGE_STATS } from '../../modules/local/hicpro/merge_stats'
 include { HICPRO2PAIRS } from '../../modules/local/hicpro/hicpro2pairs'
 include { BUILD_CONTACT_MAPS } from '../../modules/local/hicpro/build_contact_maps'
 include { ICE_NORMALIZATION } from '../../modules/local/hicpro/run_ice'
 
+// Remove meta.chunks
+def removeChunks(row){
+  meta = row[0].clone()
+  meta.remove('chunk')
+  return [meta, row[1]]
+}
+
 workflow HICPRO {
 
   take:
   reads // [meta, read1, read2]
-  index
-  fragments
-  chrsize
-  ligation_site
-  map_res
+  index // path
+  fragments // path
+  chrsize // path
+  ligation_site // value
+  map_res // values
 
   main:
   ch_versions = Channel.empty()
 
-  // fastq to paired-end bam
+  // Fastq to paired-end bam
   HICPRO_MAPPING(
     reads,
     index,
@@ -33,55 +41,94 @@ workflow HICPRO {
   )
   ch_versions = ch_versions.mix(HICPRO_MAPPING.out.versions)
 
-  // get valid interaction
-  GET_VALID_INTERACTION (
-    HICPRO_MAPPING.out.bam,
-    fragments    
-  )
-  //TODO ch_versions = ch_versions.mix(GET_VALID_INTERACTION.out.versions)
+  //***************************************
+  // DIGESTION PROTOCOLS
+
+  if (!params.dnase){
+    GET_VALID_INTERACTION (
+      HICPRO_MAPPING.out.bam,
+      fragments.collect()
+    )
+    ch_versions = ch_versions.mix(GET_VALID_INTERACTION.out.versions)
+    ch_valid_pairs = GET_VALID_INTERACTION.out.valid_pairs
+    ch_valid_stats = GET_VALID_INTERACTION.out.stats
+
+  }else{
+
+  //****************************************
+  // DNASE-LIKE PROTOCOLS
+
+    GET_VALID_INTERACTION_DNASE (
+      HICPRO_MAPPING.out.bam
+    )
+    ch_versions = ch_versions.mix(GET_VALID_INTERACTION_DNASE.out.versions)
+    ch_valid_pairs = GET_VALID_INTERACTION_DNASE.out.valid_pairs
+    ch_valid_stats = GET_VALID_INTERACTION_DNASE.out.stats
+  }
+  
+
+  //**************************************
+  // MERGE AND REMOVE DUPLICATES
   
-  // merge valid interactions and remove duplicates
+  if (params.split_fastq){
+    ch_valid_pairs = ch_valid_pairs.map{ it -> removeChunks(it)}.groupTuple()
+    ch_hicpro_stats = HICPRO_MAPPING.out.mapstats.map{it->removeChunks(it)}.groupTuple()
+                        .concat(HICPRO_MAPPING.out.pairstats.map{it->removeChunks(it)}.groupTuple(),
+			        ch_valid_stats.map{it->removeChunks(it)}.groupTuple())
+  }else{
+    ch_hicpro_stats = HICPRO_MAPPING.out.mapstats.groupTuple()
+                        .concat(HICPRO_MAPPING.out.pairstats.groupTuple(),
+                                ch_valid_stats.groupTuple())
+  }
+
   MERGE_VALID_INTERACTION (
-    GET_VALID_INTERACTION.out.valid_pairs
+    ch_valid_pairs
   )
-  // TODO ch_versions = ch_versions.mix(MERGE_VALID_INTERACTION.out.versions)
+  ch_versions = ch_versions.mix(MERGE_VALID_INTERACTION.out.versions)
 
-  // merge stats
-  HICPRO_MAPPING.out.mapstats.groupTuple()
-    .concat(HICPRO_MAPPING.out.pairstats.groupTuple(),
-            GET_VALID_INTERACTION.out.stats.groupTuple())
-    .view()
-    .set{ ch_hicpro_stats }
 
+  ch_hicpro_stats.view()
   MERGE_STATS(
     ch_hicpro_stats
   )
-  //TODO ch_versions = ch_versions.mix(MERGE_STATS.out.versions)
+  ch_versions = ch_versions.mix(MERGE_STATS.out.versions)
 
-  // convert to pairs
+  //***************************************
+  // CONVERTS TO PAIRS
   HICPRO2PAIRS (
     MERGE_VALID_INTERACTION.out.valid_pairs,
-    chrsize
+    chrsize.collect()
   )
-  //TODO ch_versions = ch_versions.mix(HICPRO2PAIRS.out.versions)
+  ch_versions = ch_versions.mix(HICPRO2PAIRS.out.versions)
+
+  //***************************************
+  // CONTACT MAPS
+  
+  if (params.hicpro_maps){    
 
-  if (params.hicpro_maps){
-    
     //build_contact_maps
     BUILD_CONTACT_MAPS(
       MERGE_VALID_INTERACTION.out.valid_pairs.combine(map_res),
       chrsize.collect()
     )
-    //TODO ch_versions = ch_versions.mix(BUILD_CONTACT_MAPS.out.versions)
-
+    ch_hicpro_raw_maps = BUILD_CONTACT_MAPS.out.maps
+ 
     // run_ice
     ICE_NORMALIZATION(
       BUILD_CONTACT_MAPS.out.maps
     )
-    //TODO ch_versions = ch_versions.mix(ICE_NORMALIZATION.out.versions)
+    ch_hicpro_iced_maps = ICE_NORMALIZATION.out.maps
+    ch_versions = ch_versions.mix(ICE_NORMALIZATION.out.versions)
+
+  }else{
+    ch_hicpro_raw_maps = Channel.empty()
+    ch_hicpro_iced_maps = Channel.empty()
   }
 
   emit:
   versions = ch_versions
   pairs = HICPRO2PAIRS.out.pairs
+  mqc = MERGE_VALID_INTERACTION.out.mqc.concat(MERGE_STATS.out.mqc)
+  raw_maps = ch_hicpro_raw_maps
+  iced_maps = ch_hicpro_iced_maps
 }
diff --git a/subworkflows/local/hicpro_mapping.nf b/subworkflows/local/hicpro_mapping.nf
index 0add95df88cd7fdc4bf6ce5538969cf221fcb116..3529f4ef247b5d9ad76d48a021e0023a8a948061 100644
--- a/subworkflows/local/hicpro_mapping.nf
+++ b/subworkflows/local/hicpro_mapping.nf
@@ -1,28 +1,20 @@
+/*
+ * HiC-Pro mapping
+ * From the raw sequencing reads to a paired-end bam file
+ */
+
 include { BOWTIE2_ALIGN } from '../../modules/nf-core/modules/bowtie2/align/main'
 include { TRIM_READS } from '../../modules/local/hicpro/trim_reads'
 include { BOWTIE2_ALIGN as BOWTIE2_ALIGN_TRIMMED } from '../../modules/nf-core/modules/bowtie2/align/main'
 include { MERGE_BOWTIE2 } from '../../modules/local/hicpro/bowtie2_merge'
 include { COMBINE_MATES} from '../../modules/local/hicpro/combine_mates'
-
-//include { BOWTIE2_ON_TRIMED_READS } from '../../modules/local/bowtie2_on_trimmed_reads' addParams( options: params.options )
-//include { BOWTIE2_MERGE_MAPPING_STEPS } from '../../modules/local/bowtie2_merge_mapping_steps' addParams( options: params.options )
-//include { DNASE_MAPPING_STATS } from '../../modules/local/dnase_mapping_stats' addParams( options: params.options )
-//include { COMBINE_MATES } from '../../modules/local/combine_mates' addParams( options: params.options )
-//include { GET_VALID_INTERACTION } from '../../modules/local/get_valid_interaction' addParams( options: params.options )
-//include { GET_VALID_INTERACTION_DNASE } from '../../modules/local/get_valid_interaction_dnase' addParams( options: params.options )
-//include { REMOVE_DUPLICATES } from '../../modules/local/remove_duplicates' addParams( options: params.options )
-//include { MERGE_STATS } from '../../modules/local/merge_stats' addParams( options: params.options )
-//include { BUILD_CONTACT_MAPS } from '../../modules/local/build_contact_maps' addParams( options: params.options )
-//include { RUN_ICE } from '../../modules/local/run_ice' addParams( options: params.options )
-//include { CONVERTS_TO_PAIRS } from '../../modules/local/convert_to_pairs' addParams( options: params.options )
+include { MAPPING_STATS_DNASE } from '../../modules/local/hicpro/dnase_mapping_stats'
 
 // Paired-end to Single-end 
 def pairToSingle(row, mates) {
-  def meta = [:]
-  meta.id = row[0].id
+  def meta = row[0].clone()
   meta.single_end = true
   meta.mates = mates
-  def array = []
   if (mates == "R1") {
     return [meta, [ row[1][0]] ]
   }else if (mates == "R2"){
@@ -30,26 +22,28 @@ def pairToSingle(row, mates) {
   }
 }
 
+// Single-end to Paired-end
 def singleToPair(row){
-  def meta = [:]
-  meta.id = row[0].id
+  def meta = row[0].clone()
+  meta.remove('mates')
   meta.single_end = false
   return [ meta, row[1] ]
 }
 
+
 workflow HICPRO_MAPPING {
 
   take:
   reads // [meta, read1, read2]
-  index
-  ligation_site
+  index // path
+  ligation_site // value
 
   main:
   ch_versions = Channel.empty()
  
-  // Align each mates separetly
-  ch_reads_r1 = reads.map{it -> pairToSingle(it,"R1")}
-  ch_reads_r2 = reads.map{pairToSingle(it,"R2")}
+  // Align each mates separetly and add mates information in [meta]
+  ch_reads_r1 = reads.map{ it -> pairToSingle(it,"R1") }
+  ch_reads_r2 = reads.map{ it -> pairToSingle(it,"R2") }
   ch_reads = ch_reads_r1.concat(ch_reads_r2)
 
   // bowtie2
@@ -60,47 +54,62 @@ workflow HICPRO_MAPPING {
   )
   ch_versions = ch_versions.mix(BOWTIE2_ALIGN.out.versions)
 
-  // trim reads
-  TRIM_READS(
-    BOWTIE2_ALIGN.out.fastq,
-    ligation_site.collect()
-  )
-  ch_versions = ch_versions.mix(TRIM_READS.out.versions)
+  if (!params.dnase){
+    // trim reads
+    TRIM_READS(
+      BOWTIE2_ALIGN.out.fastq,
+      ligation_site.collect()
+    )
+    ch_versions = ch_versions.mix(TRIM_READS.out.versions)
 
-  // bowtie2 on trimmed reads
-  BOWTIE2_ALIGN_TRIMMED(
-    TRIM_READS.out.fastq,
-    index.collect(),
-    Channel.value(false).collect()
-  )
-  ch_versions = ch_versions.mix(BOWTIE2_ALIGN_TRIMMED.out.versions)
+    // bowtie2 on trimmed reads
+    BOWTIE2_ALIGN_TRIMMED(
+      TRIM_READS.out.fastq,
+      index.collect(),
+      Channel.value(false).collect()
+    )
+    ch_versions = ch_versions.mix(BOWTIE2_ALIGN_TRIMMED.out.versions)
 
-  // Merge the two mapping steps
-  BOWTIE2_ALIGN.out.bam
-    .combine(BOWTIE2_ALIGN_TRIMMED.out.bam, by:[0])
-    .view()
-    .set { ch_bowtie2_align}
+    // Merge the two mapping steps
+    BOWTIE2_ALIGN.out.bam
+      .combine(BOWTIE2_ALIGN_TRIMMED.out.bam, by:[0])
+      .set { ch_bowtie2_align}
 
-  MERGE_BOWTIE2(
-    ch_bowtie2_align
-  )
-  //TODO ch_versions = ch_versions.mix(MERGE_BOWTIE2.out.versions)
+    MERGE_BOWTIE2(
+      ch_bowtie2_align
+    )
+    ch_versions = ch_versions.mix(MERGE_BOWTIE2.out.versions)
+    ch_mapping_stats = MERGE_BOWTIE2.out.stats
+    
+    // Combine mates
+    MERGE_BOWTIE2.out.bam
+      .map { singleToPair(it) }
+      .groupTuple()
+      .set {ch_bams}
+
+  }else{
+
+    MAPPING_STATS_DNASE(
+      BOWTIE2_ALIGN.out.bam
+    )
+    ch_mapping_stats = MAPPING_STATS_DNASE.out.stats
+
+    BOWTIE2_ALIGN.out.bam
+      .map { singleToPair(it) }
+      .groupTuple()
+      .set {ch_bams}
+  }
 
-  // Combine mates
-  MERGE_BOWTIE2.out.bam
-    .map { singleToPair(it) }
-    .groupTuple()
-    .view()
-    .set {ch_bams}
+  ch_bams.view()
 
   COMBINE_MATES (
     ch_bams
   )
-  //TODO ch_versions = ch_versions.mix(COMBINE_MATES.out.versions)
+  ch_versions = ch_versions.mix(COMBINE_MATES.out.versions)
 
   emit:
   versions = ch_versions
   bam = COMBINE_MATES.out.bam
-  mapstats = MERGE_BOWTIE2.out.stats
+  mapstats = ch_mapping_stats
   pairstats = COMBINE_MATES.out.stats
 }
diff --git a/subworkflows/local/input_check.nf b/subworkflows/local/input_check.nf
index 267ea4f68ae40eee9ecdd3a4b181fe008896af84..c8582b000832a6be271236592e589d4329e84ee8 100644
--- a/subworkflows/local/input_check.nf
+++ b/subworkflows/local/input_check.nf
@@ -9,10 +9,27 @@ workflow INPUT_CHECK {
     samplesheet // file: /path/to/samplesheet.csv
 
     main:
-    SAMPLESHEET_CHECK ( samplesheet )
+
+    if (params.split_fastq){
+
+      SAMPLESHEET_CHECK ( samplesheet )
+        .splitCsv ( header:true, sep:',' )
+	.map { create_fastq_channels(it) }
+	.splitFastq( by: params.fastq_chunks_size, pe:true, file: true, compress:true)
+        .map { it ->
+	  def meta = it[0].clone()
+	  meta.chunk = it[1].baseName - ~/.fastq(.gz)?/
+	  return [meta, [it[1], it[2]]]
+	}
+        .set { reads }
+
+    }else{
+      SAMPLESHEET_CHECK ( samplesheet )
         .splitCsv ( header:true, sep:',' )
         .map { create_fastq_channels(it) }
+	.map { it -> [it[0], [it[1], it[2]]]}
         .set { reads }
+   }
 
     emit:
     reads // channel: [ val(meta), [ reads ] ]
@@ -21,7 +38,7 @@ workflow INPUT_CHECK {
 // Function to get list of [ meta, [ fastq_1, fastq_2 ] ]
 def create_fastq_channels(LinkedHashMap row) {
   def meta = [:]
-  meta.id         = row.sample
+  meta.id = row.sample
   meta.single_end = false
 
   def array = []
@@ -31,6 +48,6 @@ def create_fastq_channels(LinkedHashMap row) {
   if (!file(row.fastq_2).exists()) {
     exit 1, "ERROR: Please check input samplesheet -> Read 2 FastQ file does not exist!\n${row.fastq_2}"
   }
-  array = [ meta, [ file(row.fastq_1), file(row.fastq_2) ] ]
+  array = [ meta, file(row.fastq_1), file(row.fastq_2) ]
   return array
 }
diff --git a/subworkflows/local/prepare_genome.nf b/subworkflows/local/prepare_genome.nf
index 2c4a226dfb42bd3a61cbb491680f8ab549ac9ac7..d9ee28235425c9ea98d83edeaf4663acdd165539 100644
--- a/subworkflows/local/prepare_genome.nf
+++ b/subworkflows/local/prepare_genome.nf
@@ -3,8 +3,8 @@
  */
 
 include { BOWTIE2_BUILD } from '../../modules/nf-core/modules/bowtie2/build/main'
-include { GET_CHROMSIZE } from '../../modules/local/get_chromsize'
-include { GET_RESTRICTION_FRAGMENTS } from '../../modules/local/get_restriction_fragments'
+include { CUSTOM_GETCHROMSIZES } from '../../modules/nf-core/modules/custom/getchromsizes/main'
+include { GET_RESTRICTION_FRAGMENTS } from '../../modules/local/hicpro/get_restriction_fragments'
 
 workflow PREPARE_GENOME {
 
@@ -22,6 +22,7 @@ workflow PREPARE_GENOME {
       fasta
     )
     ch_index = BOWTIE2_BUILD.out.index
+    ch_versions = ch_versions.mix(BOWTIE2_BUILD.out.versions)
   }else{
     Channel.fromPath( params.bwt2_index , checkIfExists: true)
            .ifEmpty { exit 1, "Genome index: Provided index not found: ${params.bwt2_index}" }
@@ -31,10 +32,11 @@ workflow PREPARE_GENOME {
   //***************************************
   // Chromosome size
   if(!params.chromosome_size){
-    GET_CHROMSIZE(
+    CUSTOM_GETCHROMSIZES(
       fasta
     )
-    ch_chromsize = GET_CHROMSIZE.out.results
+    ch_chromsize = CUSTOM_GETCHROMSIZES.out.sizes
+    ch_versions = ch_versions.mix(CUSTOM_GETCHROMSIZES.out.versions)
   }else{
     Channel.fromPath( params.chromosome_size , checkIfExists: true)
            .into {ch_chromsize} 
@@ -48,13 +50,17 @@ workflow PREPARE_GENOME {
       restriction_site
     )
     ch_resfrag = GET_RESTRICTION_FRAGMENTS.out.results
-  }else{
+    ch_versions = ch_versions.mix(GET_RESTRICTION_FRAGMENTS.out.versions)
+  }else if (!params.dnase){
      Channel.fromPath( params.restriction_fragments, checkIfExists: true )
             .set {ch_resfrag}
+  }else{
+    ch_resfrag = Channel.empty()
   }
 
   emit:
   index = ch_index
   chromosome_size = ch_chromsize
   res_frag = ch_resfrag
+  versions = ch_versions
 }
diff --git a/subworkflows/local/tads.nf b/subworkflows/local/tads.nf
index 496e8d6ee13fbd63ae7162ca295b2689e22b84e0..5e7f4fc305e55e6707a2a320026b91292613da71 100644
--- a/subworkflows/local/tads.nf
+++ b/subworkflows/local/tads.nf
@@ -24,4 +24,5 @@ workflow TADS {
 
   emit:
   tads = ch_tads
+  versions = ch_versions
 }
\ No newline at end of file
diff --git a/workflows/hic.nf b/workflows/hic.nf
index 0204db7e0c10dc8574bf45f8827dd7f1be602b92..4f705df847bf3b6ddbb9a486e562f85858e05e23 100644
--- a/workflows/hic.nf
+++ b/workflows/hic.nf
@@ -6,30 +6,38 @@
 
 def summary_params = NfcoreSchema.paramsSummaryMap(workflow, params)
 
+//*****************************************
 // Validate input parameters
-//WorkflowHic.initialise(params, log)
+WorkflowHic.initialise(params, log)
 
-// TODO nf-core: Add all file path parameters for the pipeline to the list below
 // Check input path parameters to see if they exist
-//def checkPathParamList = [ params.input ]
-//for (param in checkPathParamList) { if (param) { file(param, checkIfExists: true) } }
+def checkPathParamList = [ params.input ]
+checkPathParamList = [
+    params.input, params.multiqc_config,
+    params.fasta, params.bwt2_index
+]
+for (param in checkPathParamList) { if (param) { file(param, checkIfExists: true) } }
 
 // Check mandatory parameters
 if (params.input) { ch_input = file(params.input) } else { exit 1, 'Input samplesheet not specified!' }
 
+//*****************************************
 // Digestion parameters
 if (params.digestion){
   restriction_site = params.digestion ? params.digest[ params.digestion ].restriction_site ?: false : false
   ch_restriction_site = Channel.value(restriction_site)
-
   ligation_site = params.digestion ? params.digest[ params.digestion ].ligation_site ?: false : false
   ch_ligation_site = Channel.value(ligation_site)
-}else{
+}else if (params.dnase){
   ch_restriction_site = Channel.empty()
   ch_ligation_site = Channel.empty()
+}else{
+   exit 1, "Ligation motif not found. Please either use the `--digestion` parameters or specify the `--restriction_site` and `--ligation_site`. For DNase Hi-C, please use '--dnase' option"
 }
 
-// Resolutions for contact maps
+//****************************************
+// Maps resolution for downstream analysis
+
 ch_map_res = Channel.from( params.bin_size ).splitCsv().flatten()
 if (params.res_tads && !params.skip_tads){
   Channel.from( "${params.res_tads}" )
@@ -58,16 +66,12 @@ if (params.res_dist_decay && !params.skip_dist_decay){
 }
 
 if (params.res_compartments && !params.skip_compartments){
-  //Channel.fromPath( params.fasta )
-  //  .ifEmpty { exit 1, "Compartments calling: Fasta file not found: ${params.fasta}" }
-  //  .set { fasta_for_compartments }
   Channel.from( "${params.res_compartments}" )
     .splitCsv()
     .flatten()
     .set {ch_comp_res}
    ch_map_res = ch_map_res.concat(ch_comp_res)
 }else{
-  //fasta_for_compartments = Channel.empty()
   ch_comp_res = Channel.create()
   if (!params.skip_compartments){
     log.warn "[nf-core/hic] Hi-C resolution for compartment calling not specified. See --res_compartments" 
@@ -90,15 +94,12 @@ ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath(params.multi
 ========================================================================================
 */
 
-// Don't overwrite global params.modules, create a copy instead and use that within the main script.
-//def modules = params.modules.clone()
-
 //
 // MODULE: Local to the pipeline
 //
-//include { GET_SOFTWARE_VERSIONS } from '../modules/local/get_software_versions'
-//include { OUTPUT_DOCUMENTATION } from '../modules/local/output_documentation'
 include { HIC_PLOT_DIST_VS_COUNTS } from '../modules/local/hicexplorer/hicPlotDistVsCounts' 
+include { MULTIQC } from '../modules/local/multiqc'
+
 //
 // SUBWORKFLOW: Consisting of a mix of local and nf-core/modules
 //
@@ -115,14 +116,12 @@ include { TADS } from '../subworkflows/local/tads'
 ========================================================================================
 */
 
-//def multiqc_options   = modules['multiqc']
-//multiqc_options.args += params.multiqc_title ? Utils.joinModuleArgs(["--title \"$params.multiqc_title\""]) : ''
-
 //
 // MODULE: Installed directly from nf-core/modules
 //
+include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/modules/custom/dumpsoftwareversions/main'
 include { FASTQC  } from '../modules/nf-core/modules/fastqc/main'
-//include { MULTIQC } from '../modules/nf-core/modules/multiqc/main' addParams( options: multiqc_options   )
+//include { MULTIQC } from '../modules/nf-core/modules/multiqc/main'
 
 /*
 ========================================================================================
@@ -145,7 +144,7 @@ def multiqc_report = []
 
 workflow HIC {
 
-  ch_software_versions = Channel.empty()
+  ch_versions = Channel.empty()
 
   //
   // SUBWORKFLOW: Read in samplesheet, validate and stage input files
@@ -154,6 +153,8 @@ workflow HIC {
     ch_input
   )
 
+  INPUT_CHECK.out.reads.view()
+
   //
   // SUBWORKFLOW: Prepare genome annotation
   //
@@ -161,6 +162,7 @@ workflow HIC {
     ch_fasta,
     ch_restriction_site
   )
+  ch_versions = ch_versions.mix(PREPARE_GENOME.out.versions)
 
   //
   // MODULE: Run FastQC
@@ -168,6 +170,7 @@ workflow HIC {
   FASTQC (
     INPUT_CHECK.out.reads
   )
+  ch_versions = ch_versions.mix(FASTQC.out.versions)
 
   //
   // SUB-WORFLOW: HiC-Pro
@@ -180,6 +183,7 @@ workflow HIC {
     ch_ligation_site,
     ch_map_res
   )
+  ch_versions = ch_versions.mix(HICPRO.out.versions)
 
   //
   // SUB-WORKFLOW: COOLER
@@ -189,6 +193,7 @@ workflow HIC {
     PREPARE_GENOME.out.chromosome_size,
     ch_map_res
   )
+  ch_versions = ch_versions.mix(COOLER.out.versions)
 
   //
   // MODULE: HICEXPLORER/HIC_PLOT_DIST_VS_COUNTS
@@ -196,13 +201,15 @@ workflow HIC {
   if (!params.skip_dist_decay){
     COOLER.out.cool
       .combine(ch_ddecay_res)
-      .filter{ it[1] == it[3] }
-      .map { it -> [it[0], it[2]]}
+      .view()
+      .filter{ it[0].resolution == it[2] }
+      .map { it -> [it[0], it[1]]}
       .set{ ch_distdecay }
 
     HIC_PLOT_DIST_VS_COUNTS(
       ch_distdecay
     )
+    ch_versions = ch_versions.mix(HIC_PLOT_DIST_VS_COUNTS.out.versions)
   }
 
   //
@@ -211,7 +218,7 @@ workflow HIC {
   if (!params.skip_compartments){
     COOLER.out.cool
       .combine(ch_comp_res)
-      .filter{ it[1] == it[3] }
+      .filter{ it[0].resolution == it[2] }
       .map { it -> [it[0], it[1], it[2]]}
       .set{ ch_cool_compartments }
 
@@ -220,6 +227,7 @@ workflow HIC {
       ch_fasta,
       PREPARE_GENOME.out.chromosome_size
     )
+    ch_versions = ch_versions.mix(COMPARTMENTS.out.versions)
   }
 
   //
@@ -228,15 +236,23 @@ workflow HIC {
   if (!params.skip_tads){
     COOLER.out.cool
       .combine(ch_tads_res)
-      .filter{ it[1] == it[3] }
-      .map { it -> [it[0], it[2]]}
+      .filter{ it[0].resolution == it[2] }
+      .map { it -> [it[0], it[1]]}
       .set{ ch_cool_tads }
                                                                                                                                                                                                             
     TADS(
       ch_cool_tads
     )
+    ch_versions = ch_versions.mix(TADS.out.versions)
   }
 
+  //
+  // SOFTWARE VERSION
+  //
+  CUSTOM_DUMPSOFTWAREVERSIONS(
+    ch_versions.unique().collectFile(name: 'collated_versions.yml')
+  )
+
   //
   // MODULE: MultiQC
   //
@@ -245,17 +261,19 @@ workflow HIC {
 
   ch_multiqc_files = Channel.empty()
   ch_multiqc_files = ch_multiqc_files.mix(Channel.from(ch_multiqc_config))
-  ch_multiqc_files = ch_multiqc_files.mix(ch_multiqc_custom_config.collect().ifEmpty([]))
+  ch_multiqc_files = ch_multiqc_files.mix(Channel.from(ch_multiqc_custom_config.collect().ifEmpty([])))
   ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml'))
-  //ch_multiqc_files = ch_multiqc_files.mix(GET_SOFTWARE_VERSIONS.out.yaml.collect())
-  //ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}.ifEmpty([]))
-
-  //MULTIQC (
-  //  ch_multiqc_files.collect()
-  //)
-  //multiqc_report       = MULTIQC.out.report.toList()
-  multiqc_report = []
-  //ch_software_versions = ch_software_versions.mix(MULTIQC.out.version.ifEmpty(null))
+  ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.map{it->it[1]})
+  ch_multiqc_files = ch_multiqc_files.mix(HICPRO.out.mqc)
+
+  MULTIQC (
+    ch_multiqc_config,
+    ch_multiqc_custom_config.collect().ifEmpty([]),
+    ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml'),
+    FASTQC.out.zip.map{it->it[1]},
+    HICPRO.out.mqc.collect()
+  )
+  multiqc_report = MULTIQC.out.report.toList()
 }
 
 /*