diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 0fc5c61df27df619c3fc7a9e7e920622568ef2c1..f94b9a453d3e42987ffbdd9a2d2be99e9e2b70ac 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -10,14 +10,15 @@ Remember that PRs should be made against the dev branch, unless you're preparing
 
 Learn more about contributing: [CONTRIBUTING.md](https://github.com/nf-core/hic/tree/master/.github/CONTRIBUTING.md)
 -->
+<!-- markdownlint-disable ul-indent -->
 
 ## PR checklist
 
 - [ ] This comment contains a description of changes (with reason).
 - [ ] If you've fixed a bug or added code that should be tested, add tests!
-  - [ ] If you've added a new tool - add to the software_versions process and a regex to `scrape_software_versions.py`
-  - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/nf-core/hic/tree/master/.github/CONTRIBUTING.md)
-  - [ ] If necessary, also make a PR on the nf-core/hic _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository.
+    - [ ] If you've added a new tool - add to the software_versions process and a regex to `scrape_software_versions.py`
+    - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](<https://github.com/>nf-core/hic/tree/master/.github/CONTRIBUTING.md)
+    - [ ] If necessary, also make a PR on the nf-core/hic _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository.
 - [ ] Make sure your code lints (`nf-core lint .`).
 - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker`).
 - [ ] Usage Documentation in `docs/usage.md` is updated.
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ea66e3b41d1c440b42988eafa1e1b167e7e3a052..734d985bda7b57a590be1f60487f2220a45f8c71 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -8,6 +8,9 @@ on:
   release:
     types: [published]
 
+# Uncomment if we need an edge release of Nextflow again
+# env: NXF_EDGE: 1
+
 jobs:
   test:
     name: Run workflow tests
@@ -20,7 +23,7 @@ jobs:
     strategy:
       matrix:
         # Nextflow versions: check pipeline minimum and current latest
-        nxf_ver: ['20.04.0', '21.03.0-edge']
+        nxf_ver: ['20.04.0', '']
     steps:
       - name: Check out pipeline code
         uses: actions/checkout@v2
diff --git a/Dockerfile b/Dockerfile
index 23c1d0ef0c078cadf0d415f683bfca2ef94cf76d..18af828827fec11b7c15b7278c9b4591b8bc00dc 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM nfcore/base:1.13.3
+FROM nfcore/base:1.14
 LABEL authors="Nicolas Servant" \
       description="Docker image containing all software requirements for the nf-core/hic pipeline"
 
diff --git a/README.md b/README.md
index 73e2665b84470263b8d20c128cba22be7db2d6ae..cb88454ceec117d40a3f43251d85a67078085b99 100644
--- a/README.md
+++ b/README.md
@@ -51,7 +51,7 @@ results highly reproducible.
 
 ## Quick Start
 
-1. Install [`nextflow`](https://nf-co.re/usage/installation)
+1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>=20.04.0`)
 
 2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_
 
diff --git a/lib/NfcoreSchema.groovy b/lib/NfcoreSchema.groovy
index 54935ec818dec805f6d11f90083681589525183b..52ee730432905c5f6dc3e2c89352bbaee6ea145b 100644
--- a/lib/NfcoreSchema.groovy
+++ b/lib/NfcoreSchema.groovy
@@ -112,8 +112,14 @@ class NfcoreSchema {
             }
             // unexpected params
             def params_ignore = params.schema_ignore_params.split(',') + 'schema_ignore_params'
-            if (!expectedParams.contains(specifiedParam) && !params_ignore.contains(specifiedParam)) {
-                unexpectedParams.push(specifiedParam)
+            def expectedParamsLowerCase = expectedParams.collect{ it.replace("-", "").toLowerCase() }
+            def specifiedParamLowerCase = specifiedParam.replace("-", "").toLowerCase()
+            if (!expectedParams.contains(specifiedParam) && !params_ignore.contains(specifiedParam) && !expectedParamsLowerCase.contains(specifiedParamLowerCase)) {
+                // Temporarily remove camelCase/camel-case params #1035
+                def unexpectedParamsLowerCase = unexpectedParams.collect{ it.replace("-", "").toLowerCase()}
+                if (!unexpectedParamsLowerCase.contains(specifiedParamLowerCase)){
+                    unexpectedParams.push(specifiedParam)
+                }
             }
         }
 
@@ -191,11 +197,11 @@ class NfcoreSchema {
 
     // Remove an element from a JSONArray
     private static JSONArray removeElement(jsonArray, element){
-        def list = []  
+        def list = []
         int len = jsonArray.length()
-        for (int i=0;i<len;i++){ 
+        for (int i=0;i<len;i++){
             list.add(jsonArray.get(i).toString())
-        } 
+        }
         list.remove(element)
         JSONArray jsArray = new JSONArray(list)
         return jsArray
@@ -213,7 +219,7 @@ class NfcoreSchema {
                             // If the param was required, change this
                             if (definition[key].has("required")) {
                                 def cleaned_required = removeElement(definition[key].required, ignore_param)
-                                definition[key].put("required", cleaned_required) 
+                                definition[key].put("required", cleaned_required)
                             }
                         }
                     }
@@ -243,7 +249,7 @@ class NfcoreSchema {
             }
             // Cast Duration to String
             if (p['value'].getClass() == nextflow.util.Duration) {
-                new_params.replace(p.key, p['value'].toString())
+                new_params.replace(p.key, p['value'].toString().replaceFirst(/d(?!\S)/, "day"))
             }
             // Cast LinkedHashMap to String
             if (p['value'].getClass() == LinkedHashMap) {
@@ -482,10 +488,10 @@ class NfcoreSchema {
         }
         workflow_summary['runName']      = workflow.runName
         if (workflow.containerEngine) {
-            workflow_summary['containerEngine'] = "$workflow.containerEngine"
+            workflow_summary['containerEngine'] = workflow.containerEngine
         }
         if (workflow.container) {
-            workflow_summary['container']       = "$workflow.container"
+            workflow_summary['container'] = workflow.container
         }
         workflow_summary['launchDir']    = workflow.launchDir
         workflow_summary['workDir']      = workflow.workDir
@@ -506,17 +512,7 @@ class NfcoreSchema {
                     def params_value = params.get(param)
                     def schema_value = group_params.get(param).default
                     def param_type   = group_params.get(param).type
-                    if (schema_value == null) {
-                        if (param_type == 'boolean') {
-                            schema_value = false
-                        }
-                        if (param_type == 'string') {
-                            schema_value = ''
-                        }
-                        if (param_type == 'integer') {
-                            schema_value = 0
-                        }
-                    } else {
+                    if (schema_value != null) {
                         if (param_type == 'string') {
                             if (schema_value.contains('$projectDir') || schema_value.contains('${projectDir}')) {
                                 def sub_string = schema_value.replace('\$projectDir', '')
@@ -535,8 +531,13 @@ class NfcoreSchema {
                         }
                     }
 
-                    if (params_value != schema_value) {
-                        sub_params.put("$param", params_value)
+                    // We have a default in the schema, and this isn't it
+                    if (schema_value != null && params_value != schema_value) {
+                        sub_params.put(param, params_value)
+                    }
+                    // No default in the schema, and this isn't empty
+                    else if (schema_value == null && params_value != "" && params_value != null && params_value != false) {
+                        sub_params.put(param, params_value)
                     }
                 }
             }
@@ -549,22 +550,23 @@ class NfcoreSchema {
      * Beautify parameters for summary and return as string
      */
     private static String params_summary_log(workflow, params, json_schema) {
+        Map colors = log_colours(params.monochrome_logs)
         String output  = ''
         def params_map = params_summary_map(workflow, params, json_schema)
         def max_chars  = params_max_chars(params_map)
         for (group in params_map.keySet()) {
             def group_params = params_map.get(group)  // This gets the parameters of that particular group
             if (group_params) {
-                output += group + '\n'
+                output += colors.bold + group + colors.reset + '\n'
                 for (param in group_params.keySet()) {
-                    output += "    \u001B[1m" +  param.padRight(max_chars) + ": \u001B[1m" + group_params.get(param) + '\n'
+                    output += "  " + colors.blue + param.padRight(max_chars) + ": " + colors.green +  group_params.get(param) + colors.reset + '\n'
                 }
                 output += '\n'
             }
         }
-        output += "[Only displaying parameters that differ from pipeline default]\n"
         output += dashed_line(params.monochrome_logs)
-        output += '\n\n' + dashed_line(params.monochrome_logs)
+        output += colors.dim + "\n Only displaying parameters that differ from defaults.\n" + colors.reset
+        output += dashed_line(params.monochrome_logs)
         return output
     }
 
diff --git a/main.nf b/main.nf
index f7c977b0e69a6f394d34b7408d149ca7ee58da4e..ecfce36496f25b291c8bf1894674b63542c01dfc 100644
--- a/main.nf
+++ b/main.nf
@@ -1233,11 +1233,11 @@ def checkHostname() {
         params.hostnames.each { prof, hnames ->
             hnames.each { hname ->
                 if (hostname.contains(hname) && !workflow.profile.contains(prof)) {
-                    log.error '====================================================\n' +
+                    log.error "${c_red}====================================================${c_reset}\n" +
                             "  ${c_red}WARNING!${c_reset} You are running with `-profile $workflow.profile`\n" +
                             "  but your machine hostname is ${c_white}'$hostname'${c_reset}\n" +
                             "  ${c_yellow_bold}It's highly recommended that you use `-profile $prof${c_reset}`\n" +
-                            '============================================================'
+                            "${c_red}====================================================${c_reset}\n"
                 }
             }
         }
diff --git a/nextflow.config b/nextflow.config
index ee9a87526c9357eadf99e8e3634cd97bcab67555..c9fbf5424ba1b4f0ca5d00d54e388d134e390d9e 100644
--- a/nextflow.config
+++ b/nextflow.config
@@ -93,7 +93,7 @@ params {
   plaintext_email = false
   monochrome_logs = false
   help = false
-  igenomes_base = 's3://ngi-igenomes/igenomes/'
+  igenomes_base = 's3://ngi-igenomes/igenomes'
   tracedir = "${params.outdir}/pipeline_info"
   igenomes_ignore = false
 
@@ -101,7 +101,7 @@ params {
   custom_config_version = 'master'
   custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}"
   hostnames = false
-  config_profile_name = false
+  config_profile_name = null
   config_profile_description = false
   config_profile_contact = false
   config_profile_url = false
@@ -136,7 +136,7 @@ profiles {
     singularity.enabled = false
     podman.enabled = false
     shifter.enabled = false
-    charliecloud = false
+    charliecloud.enabled = false
     process.conda = "$projectDir/environment.yml"
   }
   debug { process.beforeScript = 'echo $HOSTNAME' }
@@ -165,7 +165,7 @@ profiles {
     docker.enabled = false
     podman.enabled = true
     shifter.enabled = false
-    charliecloud = false
+    charliecloud.enabled = false
   }
   shifter {
     singularity.enabled = false
@@ -200,21 +200,22 @@ env {
 // Capture exit codes from upstream processes when piping
 process.shell = ['/bin/bash', '-euo', 'pipefail']
 
+def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')
 timeline {
   enabled = true
-  file = "${params.tracedir}/execution_timeline.html"
+  file = "${params.tracedir}/execution_timeline_${trace_timestamp}.html"
 }
 report {
   enabled = true
-  file = "${params.tracedir}/execution_report.html"
+  file = "${params.tracedir}/execution_report_${trace_timestamp}.html"
 }
 trace {
   enabled = true
-  file = "${params.tracedir}/execution_trace.txt"
+  file = "${params.tracedir}/execution_trace_${trace_timestamp}.txt"
 }
 dag {
   enabled = true
-  file = "${params.tracedir}/pipeline_dag.svg"
+  file = "${params.tracedir}/pipeline_dag_${trace_timestamp}.svg"
 }
 
 manifest {
diff --git a/nextflow_schema.json b/nextflow_schema.json
index 6b23cb3e41f9b0b54d4fb9d70c928ef5035e0096..a6bead5e83ec99ef88915b82b31d89abd7fba9ef 100644
--- a/nextflow_schema.json
+++ b/nextflow_schema.json
@@ -62,7 +62,7 @@
                 "igenomes_base": {
                     "type": "string",
                     "description": "Directory / URL base for iGenomes references.",
-                    "default": "s3://ngi-igenomes/igenomes/",
+                    "default": "s3://ngi-igenomes/igenomes",
                     "fa_icon": "fas fa-cloud-download-alt",
                     "hidden": true
                 },
@@ -433,7 +433,7 @@
                     "description": "Maximum amount of memory that can be requested for any single job.",
                     "default": "128.GB",
                     "fa_icon": "fas fa-memory",
-                    "pattern": "^[\\d\\.]+\\s*.(K|M|G|T)?B$",
+                    "pattern": "^\\d+(\\.\\d+)?\\.?\\s*(K|M|G|T)?B$",
                     "hidden": true,
                     "help_text": "Use to set an upper-limit for the memory requirement for each process. Should be a string in the format integer-unit e.g. `--max_memory '8.GB'`"
                 },
@@ -442,7 +442,7 @@
                     "description": "Maximum amount of time that can be requested for any single job.",
                     "default": "240.h",
                     "fa_icon": "far fa-clock",
-                    "pattern": "^[\\d\\.]+\\.*(s|m|h|d)$",
+                    "pattern": "^(\\d+\\.?\\s*(s|m|h|day)\\s*)+$",
                     "hidden": true,
                     "help_text": "Use to set an upper-limit for the time requirement for each process. Should be a string in the format integer-unit e.g. `--max_time '2.h'`"
                 }