diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
index 538f355cedd9809e4bd3ac8067cb00832e5385b1..62ad6c259efad78784730845c6e83ac9e7b158ab 100644
--- a/.github/CONTRIBUTING.md
+++ b/.github/CONTRIBUTING.md
@@ -59,4 +59,4 @@ For further information/help, please consult the [nf-core/hic documentation](htt
 don't hesitate to get in touch on the nf-core Slack [#hic](https://nfcore.slack.com/channels/hic) channel
 ([join our Slack here](https://nf-co.re/join/slack)).
 
-For further information/help, please consult the [nf-core/hic documentation](https://nf-co.re/hic/docs) and don't hesitate to get in touch on the nf-core Slack [#hic](https://nfcore.slack.com/channels/hic) channel ([join our Slack here](https://nf-co.re/join/slack)).
+For further information/help, please consult the [nf-core/hic documentation](https://nf-co.re/hic/usage) and don't hesitate to get in touch on the nf-core Slack [#hic](https://nfcore.slack.com/channels/hic) channel ([join our Slack here](https://nf-co.re/join/slack)).
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index dea18053f8ab41016fc4e427dbbc48cf56709781..baf9343cd03e2d5e64918be1d49cdc79c6f5c72f 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -35,7 +35,7 @@ Steps to reproduce the behaviour:
 
 ## Container engine
 
-- Engine: <!-- [e.g. Conda, Docker or Singularity] -->
+- Engine: <!-- [e.g. Conda, Docker, Singularity or Podman] -->
 - version: <!-- [e.g. 1.0.0] -->
 - Image tag: <!-- [e.g. nfcore/hic:1.0.0] -->
 
diff --git a/.github/workflows/awsfulltest.yml b/.github/workflows/awsfulltest.yml
index 8344dc5156a7da39aaed1a21d6a27c11855c628d..d5cef6aca217882a56da77f50ff8c2f96ea535b5 100644
--- a/.github/workflows/awsfulltest.yml
+++ b/.github/workflows/awsfulltest.yml
@@ -1,10 +1,12 @@
 name: nf-core AWS full size tests
-# This workflow is triggered on push to the master branch.
+# This workflow is triggered on published releases.
+# It can be additionally triggered manually with GitHub actions workflow dispatch.
 # It runs the -profile 'test_full' on AWS batch
 
 on:
   release:
     types: [published]
+  workflow_dispatch:
 
 jobs:
   run-awstest:
diff --git a/.github/workflows/awstest.yml b/.github/workflows/awstest.yml
index 94bca240004c28e2608c4e1a8c2a82bb054ce428..b91b9269f62a65f9a450f3b1a8af35604acdd523 100644
--- a/.github/workflows/awstest.yml
+++ b/.github/workflows/awstest.yml
@@ -1,11 +1,10 @@
 name: nf-core AWS test
 # This workflow is triggered on push to the master branch.
-# It runs the -profile 'test' on AWS batch
+# It can be additionally triggered manually with GitHub actions workflow dispatch.
+# It runs the -profile 'test' on AWS batch.
 
 on:
-  push:
-    branches:
-      - master
+  workflow_dispatch:
 
 jobs:
   run-awstest:
@@ -36,4 +35,4 @@ jobs:
           --job-name nf-core-hic \
           --job-queue $AWS_JOB_QUEUE \
           --job-definition $AWS_JOB_DEFINITION \
-          --container-overrides '{"command": ["nf-core/hic", "-r '"${GITHUB_SHA}"' -profile test --outdir s3://'"${AWS_S3_BUCKET}"'/hic/results-'"${GITHUB_SHA}"' -w s3://'"${AWS_S3_BUCKET}"'/hic/work-'"${GITHUB_SHA}"' -with-tower"], "environment": [{"name": "TOWER_ACCESS_TOKEN", "value": "'"$TOWER_ACCESS_TOKEN"'"}]}'hic/work-'"${GITHUB_SHA}"' -with-tower"], "environment": [{"name": "TOWER_ACCESS_TOKEN", "value": "'"$TOWER_ACCESS_TOKEN"'"}]}'
+          --container-overrides '{"command": ["nf-core/hic", "-r '"${GITHUB_SHA}"' -profile test --outdir s3://'"${AWS_S3_BUCKET}"'/hic/results-'"${GITHUB_SHA}"' -w s3://'"${AWS_S3_BUCKET}"'/hic/work-'"${GITHUB_SHA}"' -with-tower"], "environment": [{"name": "TOWER_ACCESS_TOKEN", "value": "'"$TOWER_ACCESS_TOKEN"'"}]}'
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index a8a8ba508df126c0af56f37daa5fe7e94a976875..896e2aee13aff5506249910ead3deb2fb339b645 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -34,13 +34,13 @@ jobs:
 
       - name: Build new docker image
         if: env.GIT_DIFF
-        run: docker build --no-cache . -t nfcore/hic:1.2.2
+        run: docker build --no-cache . -t nfcore/hic:dev
 
       - name: Pull docker image
         if: ${{ !env.GIT_DIFF }}
         run: |
           docker pull nfcore/hic:dev
-          docker tag nfcore/hic:dev nfcore/hic:1.2.2
+          docker tag nfcore/hic:dev nfcore/hic:dev
 
       - name: Install Nextflow
         run: |
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 719ea08ec239f9d45b9ecfafba5fb63ff4e7426a..66b31d195e8a3c933d47b359fca83daf7cacb42b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,11 @@
 The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
 and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
 
+## v1.3.0dev
+
+* Template update for nf-core/tools v1.11
+* Minor fix to summary log messages in pipeline header
+
 ## v1.2.2 - 2020-09-02
 
 ### `Added`
diff --git a/Dockerfile b/Dockerfile
index 3c8d019dc4fb85111fe16279257c734d8aeafd43..20b92a846db7a82e4f361aa11bc3c82b8e7e3a0e 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,5 +1,4 @@
-FROM nfcore/base:1.10.2
-
+FROM nfcore/base:1.11
 LABEL authors="Nicolas Servant" \
       description="Docker image containing all software requirements for the nf-core/hic pipeline"
 
@@ -10,10 +9,10 @@ COPY environment.yml /
 RUN conda env create --quiet -f /environment.yml && conda clean -a
 
 # Add conda installation dir to PATH (instead of doing 'conda activate')
-ENV PATH /opt/conda/envs/nf-core-hic-1.2.2/bin:$PATH
+ENV PATH /opt/conda/envs/nf-core-hic-1.3.0dev/bin:$PATH
 
 # Dump the details of the installed packages to a file for posterity
-RUN conda env export --name nf-core-hic-1.2.2 > nf-core-hic-1.2.2.yml
+RUN conda env export --name nf-core-hic-1.3.0dev > nf-core-hic-1.3.0dev.yml
 
 # Instruct R processes to use these empty files instead of clashing with a local version
 RUN touch .Rprofile
diff --git a/README.md b/README.md
index 4ffcce8a79bc79ce52c5396597c289a63c886011..36b30b284d5422dce15ce8bfd4498e97b66fd37d 100644
--- a/README.md
+++ b/README.md
@@ -42,54 +42,35 @@ sites (bowtie2)
 
 ## Quick Start
 
-i. Install [`nextflow`](https://nf-co.re/usage/installation)
+1. Install [`nextflow`](https://nf-co.re/usage/installation)
 
-ii. Install either [`Docker`](https://docs.docker.com/engine/installation/)
-or [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/)
-for full pipeline reproducibility (please only use [`Conda`](https://conda.io/miniconda.html)
-as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))
+2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/) or [`Podman`](https://podman.io/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_
 
-iii. Download the pipeline and test it on a minimal dataset with a single command
+3. Download the pipeline and test it on a minimal dataset with a single command
 
-```bash
-nextflow run nf-core/hic -profile test,<docker/singularity/conda/institute>
-```
+    ```bash
+    nextflow run nf-core/hic -profile test,<docker/singularity/podman/conda/institute>
+    ```
 
-> Please check [nf-core/configs](https://github.com/nf-core/configs#documentation)
-to see if a custom config file to run nf-core pipelines already exists for your Institute.
-If so, you can simply use `-profile <institute>` in your command.
-This will enable either `docker` or `singularity` and set the appropriate execution
-settings for your local compute environment.
+    > Please check [nf-core/configs](https://github.com/nf-core/configs#documentation)
+    to see if a custom config file to run nf-core pipelines already exists for your Institute.
+    If so, you can simply use `-profile <institute>` in your command.
+    This will enable either `docker` or `singularity` and set the appropriate execution
+    settings for your local compute environment.
 
-iv. Start running your own analysis!
+4. Start running your own analysis!
 
-```bash
-nextflow run nf-core/hic -profile <docker/singularity/conda/institute> --reads '*_R{1,2}.fastq.gz' --genome GRCh37
-```
+    ```bash
+    nextflow run nf-core/hic -profile <docker/singularity/podman/conda/institute> --input '*_R{1,2}.fastq.gz' --genome GRCh37
+    ```
 
-See [usage docs](https://nf-co.re/hic/usage) for all of the available options when running
-the pipeline.
+See [usage docs](https://nf-co.re/hic/usage) for all of the available options when running the pipeline.
 
 ## Documentation
 
-The nf-core/hic pipeline comes with documentation about the pipeline,
-found in the `docs/` directory:
+The nf-core/hic pipeline comes with documentation about the pipeline: [usage](https://nf-co.re/hic/usage) and [output](https://nf-co.re/hic/output).
 
-1. [Installation](https://nf-co.re/usage/installation)
-2. Pipeline configuration
-    * [Local installation](https://nf-co.re/usage/local_installation)
-    * [Adding your own system config](https://nf-co.re/usage/adding_own_config)
-    * [Reference genomes](https://nf-co.re/usage/reference_genomes)
-3. [Running the pipeline](docs/usage.md)
-4. [Output and how to interpret the results](docs/output.md)
-5. [Troubleshooting](https://nf-co.re/usage/troubleshooting)
-
-The nf-core/hic pipeline comes with documentation about the pipeline which
-you can read at [https://nf-co.re/hic/usage](https://nf-co.re/hic/usage) or
-find in the [`docs/` directory](docs).
-
-For further information or help, don't hesitate to get in touch on
-[Slack](https://nfcore.slack.com/channels/hic).
+For further information or help, don't hesitate to get in touch on [Slack](https://nfcore.slack.com/channels/hic).
 You can join with [this invite](https://nf-co.re/join/slack).
 
 ## Credits
diff --git a/docs/output.md b/docs/output.md
index 95aca423a2f44853e03c8ed443f6eb8ac43b9019..895a4f2a16d75e7ca0dfb21c13d0cccc7ea3a322 100644
--- a/docs/output.md
+++ b/docs/output.md
@@ -1,8 +1,12 @@
 # nf-core/hic: Output
 
-This document describes the output produced by the pipeline.
-Most of the plots are taken from the MultiQC report, which
-summarises results at the end of the pipeline.
+## :warning: Please read this documentation on the nf-core website: [https://nf-co.re/hic/output](https://nf-co.re/hic/output)
+
+> _Documentation of pipeline parameters is generated automatically from the pipeline schema and can no longer be found in markdown files._
+
+## Introduction
+
+This document describes the output produced by the pipeline. Most of the plots are taken from the MultiQC report, which summarises results at the end of the pipeline.
 
 The directories listed below will be created in the results directory
 after the pipeline has finished. All paths are relative to the top-level
@@ -191,12 +195,16 @@ reported in the MultiQC output for future traceability.
 
 **Output files:**
 
-* `Project_multiqc_report.html`
-  * MultiQC report - a standalone HTML file that can be viewed in your
-web browser
-* `Project_multiqc_data/`
-  * Directory containing parsed statistics from the different tools used
-in the pipeline
+* `multiqc/`
+  * `multiqc_report.html`: a standalone HTML file that can be viewed in your web browser.
+  * `multiqc_data/`: directory containing parsed statistics from the different tools used in the pipeline.
+  * `multiqc_plots/`: directory containing static images from the report in various formats.
+
+## Pipeline information
+
+[Nextflow](https://www.nextflow.io/docs/latest/tracing.html) provides excellent functionality for generating various reports relevant to the running and execution of the pipeline. This will allow you to troubleshoot errors with the running of the pipeline, and also provide you with other information such as launch commands, run times and resource usage.
+
+**Output files:**
 
 * `pipeline_info/`
   * Reports generated by Nextflow: `execution_report.html`, `execution_timeline.html`,
diff --git a/docs/usage.md b/docs/usage.md
index 11b065386fdcd684ef7d12de906b4c963b180c1a..31eabe9fb8044a14de7319c5824c18a53b06936e 100644
--- a/docs/usage.md
+++ b/docs/usage.md
@@ -1,5 +1,11 @@
 # nf-core/hic: Usage
 
+## :warning: Please read this documentation on the nf-core website: [https://nf-co.re/hic/usage](https://nf-co.re/hic/usage)
+
+> _Documentation of pipeline parameters is generated automatically from the pipeline schema and can no longer be found in markdown files._
+
+## Introduction
+
 ## Running the pipeline
 
 The typical command for running the pipeline is as follows:
@@ -74,9 +80,7 @@ fails after three times then the pipeline is stopped.
 Use this parameter to choose a configuration profile. Profiles can give
 configuration presets for different compute environments.
 
-Several generic profiles are bundled with the pipeline which instruct
-the pipeline to use software packaged using different methods
-(Docker, Singularity, Conda) - see below.
+Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Conda) - see below.
 
 > We highly recommend the use of Docker or Singularity containers for full
 pipeline reproducibility, however when this is not possible, Conda is also supported.
@@ -104,9 +108,11 @@ installed and available on the `PATH`. This is _not_ recommended.
 * `singularity`
   * A generic configuration profile to be used with [Singularity](https://sylabs.io/docs/)
   * Pulls software from Docker Hub: [`nfcore/hic`](https://hub.docker.com/r/nfcore/hic/)
+* `podman`
+  * A generic configuration profile to be used with [Podman](https://podman.io/)
+  * Pulls software from Docker Hub: [`nfcore/hic`](https://hub.docker.com/r/nfcore/hic/)
 * `conda`
-  * Please only use Conda as a last resort i.e. when it's not possible to run the
-  pipeline with Docker or Singularity.
+  * Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity or Podman.
   * A generic configuration profile to be used with [Conda](https://conda.io/docs/)
   * Pulls most software from [Bioconda](https://bioconda.github.io/)
 * `test`
@@ -151,15 +157,7 @@ process {
 See the main [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html)
 for more information.
 
-If you are likely to be running `nf-core` pipelines regularly it may be a
-good idea to request that your custom config file is uploaded to the
-`nf-core/configs` git repository. Before you do this please can you test
-that the config file works with your pipeline of choice using the `-c`
-parameter (see definition below). You can then create a pull request to the
-`nf-core/configs` repository with the addition of your config file, associated
-documentation file (see examples in [`nf-core/configs/docs`](https://github.com/nf-core/configs/tree/master/docs)),
-and amending [`nfcore_custom.config`](https://github.com/nf-core/configs/blob/master/nfcore_custom.config)
-to include your custom profile.
+If you are likely to be running `nf-core` pipelines regularly it may be a good idea to request that your custom config file is uploaded to the `nf-core/configs` git repository. Before you do this please can you test that the config file works with your pipeline of choice using the `-c` parameter (see definition above). You can then create a pull request to the `nf-core/configs` repository with the addition of your config file, associated documentation file (see examples in [`nf-core/configs/docs`](https://github.com/nf-core/configs/tree/master/docs)), and amending [`nfcore_custom.config`](https://github.com/nf-core/configs/blob/master/nfcore_custom.config) to include your custom profile.
 
 If you have any questions or issues please send us a message on
 [Slack](https://nf-co.re/join/slack) on the
@@ -215,7 +213,7 @@ A normal glob pattern, enclosed in quotation marks, can then be used for `--inpu
 For example:
 
 ```bash
---single_end --reads '*.fastq'
+--single_end --input '*.fastq'
 ```
 
 It is not possible to run a mixture of single-end and paired-end files in one run.
diff --git a/environment.yml b/environment.yml
index ccca9c3d12e94380287c1be0f9f34ca9813890ae..6ee111e3beddb0e3c014d31ceec9d4a64bff25e6 100644
--- a/environment.yml
+++ b/environment.yml
@@ -1,6 +1,6 @@
 # You can use this file to create a conda environment for this pipeline:
 #   conda env create -f environment.yml
-name: nf-core-hic-1.2.2
+name: nf-core-hic-1.3.0dev
 channels:
   - conda-forge
   - bioconda
diff --git a/main.nf b/main.nf
index 3528a414cc64c2c3a5c0b106e3eef771c3c52dce..1e4842027b3fb9c31405afb779a050ff4089e2d0 100644
--- a/main.nf
+++ b/main.nf
@@ -36,7 +36,7 @@ def helpMessage() {
     Alignments
       --split_fastq [bool]                      Split fastq files in reads chunks to speed up computation. Default: false
       --fastq_chunks_size [int]                 Size of read chunks if split_fastq is true. Default: 20000000
-      --save_aligned_intermediates [bool]       Save intermediates alignment files. Default: False 
+      --save_aligned_intermediates [bool]       Save intermediates alignment files. Default: False
       --bwt2_opts_end2end [str]                 Options for bowtie2 end-to-end mappinf (first mapping step). See hic.config for default.
       --bwt2_opts_trimmed [str]                 Options for bowtie2 mapping after ligation site trimming. See hic.config for default.
       --min_mapq [int]                          Minimum mapping quality values to consider. Default: 10
@@ -45,7 +45,7 @@ def helpMessage() {
       --rm_singleton [bool]                     Remove singleton reads. Default: true
       --rm_multi [bool]                         Remove multi-mapped reads. Default: true
       --rm_dup [bool]                           Remove duplicates. Default: true
- 
+
     Contacts calling
       --min_restriction_fragment_size [int]     Minimum size of restriction fragments to consider. Default: 0
       --max_restriction_fragment_size [int]     Maximum size of restriction fragments to consider. Default: 0
@@ -189,7 +189,7 @@ else if ( params.fasta ) {
    lastPath = params.fasta.lastIndexOf(File.separator)
    fasta_base = params.fasta.substring(lastPath+1)
    bwt2_base = fasta_base.toString() - ~/(\.fa)?(\.fasta)?(\.fas)?(\.fsa)?$/
- 
+
    Channel.fromPath( params.fasta )
 	.ifEmpty { exit 1, "Genome index: Fasta file not found: ${params.fasta}" }
         .set { fasta_for_index }
@@ -255,24 +255,17 @@ summary['Min Insert Size']  = params.min_insert_size
 summary['Max Insert Size']  = params.max_insert_size
 summary['Min CIS dist']     = params.min_cis_dist
 summary['Maps resolution']  = params.bin_size
-summary['Max Memory']       = params.max_memory
-summary['Max CPUs']         = params.max_cpus
-summary['Max Time']         = params.max_time
+summary['Max Resources']    = "$params.max_memory memory, $params.max_cpus cpus, $params.max_time time per job"
+if (workflow.containerEngine) summary['Container'] = "$workflow.containerEngine - $workflow.container"
 summary['Output dir']       = params.outdir
+summary['Launch dir']       = workflow.launchDir
 summary['Working dir']      = workflow.workDir
-summary['Container Engine'] = workflow.containerEngine
-if(workflow.containerEngine)
-   summary['Container']     = workflow.container
-summary['Current home']     = "$HOME"
-summary['Current user']     = "$USER"
-summary['Current path']     = "$PWD"
-summary['Working dir']      = workflow.workDir
-summary['Output dir']       = params.outdir
 summary['Script dir']       = workflow.projectDir
-summary['Config Profile']   = workflow.profile
-if(workflow.profile == 'awsbatch'){
-   summary['AWS Region']    = params.awsregion
-   summary['AWS Queue']     = params.awsqueue
+summary['User']             = workflow.userName
+if (workflow.profile.contains('awsbatch')) {
+    summary['AWS Region']   = params.awsregion
+    summary['AWS Queue']    = params.awsqueue
+    summary['AWS CLI']      = params.awscli
 }
 summary['Config Profile'] = workflow.profile
 if (params.config_profile_description) summary['Config Profile Description'] = params.config_profile_description
@@ -738,7 +731,7 @@ process remove_duplicates {
    echo -n "valid_interaction_rmdup\t" >> stats/${sample}/${sample}_allValidPairs.mergestat
    cat ${sample}.allValidPairs | wc -l >> stats/${sample}/${sample}_allValidPairs.mergestat
 
-   ## Count short range (<20000) vs long range contacts 
+   ## Count short range (<20000) vs long range contacts
    awk 'BEGIN{cis=0;trans=0;sr=0;lr=0} \$2 == \$5{cis=cis+1; d=\$6>\$3?\$6-\$3:\$3-\$6; if (d<=20000){sr=sr+1}else{lr=lr+1}} \$2!=\$5{trans=trans+1}END{print "trans_interaction\\t"trans"\\ncis_interaction\\t"cis"\\ncis_shortRange\\t"sr"\\ncis_longRange\\t"lr}' ${sample}.allValidPairs >> stats/${sample}/${sample}_allValidPairs.mergestat
    """
    }
diff --git a/nextflow.config b/nextflow.config
index c765a4a7e242e7da17ae4f1efbb86a39a5fe7a72..14a4f91ac1ede1e0d1e63b33f5556b345ed53d86 100644
--- a/nextflow.config
+++ b/nextflow.config
@@ -80,7 +80,7 @@ params {
 
 // Container slug. Stable releases should specify release tag!
 // Developmental code should specify :dev
-process.container = 'nfcore/hic:1.2.2'
+process.container = 'nfcore/hic:dev'
 
 // Load base.config by default for all pipelines
 includeConfig 'conf/base.config'
@@ -108,6 +108,9 @@ profiles {
     singularity.enabled = true
     singularity.autoMounts = true
   }
+  podman {
+    podman.enabled = true
+  }
   test { includeConfig 'conf/test.config' }
 }
 
@@ -150,7 +153,7 @@ manifest {
   description = 'Analysis of Chromosome Conformation Capture data (Hi-C)'
   mainScript = 'main.nf'
   nextflowVersion = '>=19.10.0'
-  version = '1.2.2'
+  version = '1.3.0dev'
 }
 
 // Function to ensure that resource requirements don't go beyond
diff --git a/nextflow_schema.json b/nextflow_schema.json
index 9071bd2454457a3c914a00271a047790a3dc366d..84bb558fd02002db37df4755daa2dff36d09d89f 100644
--- a/nextflow_schema.json
+++ b/nextflow_schema.json
@@ -1,5 +1,5 @@
 {
-    "$schema": "https://json-schema.org/draft-07/schema",
+    "$schema": "http://json-schema.org/draft-07/schema",
     "$id": "https://raw.githubusercontent.com/nf-core/hic/master/nextflow_schema.json",
     "title": "nf-core/hic pipeline parameters",
     "description": "Analysis of Chromosome Conformation Capture data (Hi-C)",
@@ -295,7 +295,7 @@
                         "link",
                         "copy",
                         "copyNoFollow",
-                        "mov"
+                        "move"
                     ]
                 },
                 "name": {