From 97c4a9e610c263411705762c1f5fd928416e0729 Mon Sep 17 00:00:00 2001
From: MaxUlysse <max.u.garcia@gmail.com>
Date: Mon, 25 Jan 2021 10:47:43 +0100
Subject: [PATCH] Template update for nf-core/tools version 1.12.1

---
 .github/.dockstore.yml                        |   5 +
 .github/CONTRIBUTING.md                       | 127 ++++-
 .github/ISSUE_TEMPLATE/bug_report.md          |  75 ++-
 .github/ISSUE_TEMPLATE/config.yml             |   8 +
 .github/ISSUE_TEMPLATE/feature_request.md     |  36 +-
 .github/PULL_REQUEST_TEMPLATE.md              |  35 +-
 .github/markdownlint.yml                      |  12 +
 .github/workflows/awsfulltest.yml             |  43 ++
 .github/workflows/awstest.yml                 |  39 ++
 .github/workflows/branch.yml                  |  37 ++
 .github/workflows/ci.yml                      |  57 +++
 .github/workflows/linting.yml                 |  77 +++
 .github/workflows/linting_comment.yml         |  29 ++
 .github/workflows/push_dockerhub_dev.yml      |  28 ++
 .github/workflows/push_dockerhub_release.yml  |  29 ++
 .gitignore                                    |   5 +-
 .travis.yml                                   |  37 --
 CHANGELOG.md                                  |  16 +-
 CODE_OF_CONDUCT.md                            |   8 +-
 Dockerfile                                    |  16 +-
 README.md                                     |  99 +++-
 Singularity                                   |  18 -
 assets/email_template.html                    |   2 +
 assets/email_template.txt                     |  29 +-
 {conf => assets}/multiqc_config.yaml          |   6 +-
 assets/nf-core-hic_logo.png                   | Bin 0 -> 14014 bytes
 assets/sendmail_template.txt                  |  48 +-
 .../scrape_software_versions.cpython-36.pyc   | Bin 1324 -> 0 bytes
 bin/markdown_to_html.py                       |  91 ++++
 bin/markdown_to_html.r                        |  51 --
 bin/scrape_software_versions.py               |  53 +-
 conf/awsbatch.config                          |  13 -
 conf/base.config                              |  46 +-
 conf/igenomes.config                          | 452 ++++++++++++++----
 conf/test.config                              |  11 +-
 conf/test_full.config                         |  22 +
 docs/README.md                                |  16 +-
 docs/configuration/adding_your_own.md         |  86 ----
 docs/configuration/local.md                   |  46 --
 docs/configuration/reference_genomes.md       |  49 --
 docs/images/nf-core-hic_logo.png              | Bin 0 -> 27797 bytes
 docs/installation.md                          | 110 -----
 docs/output.md                                |  58 ++-
 docs/troubleshooting.md                       |  30 --
 docs/usage.md                                 | 246 +++-------
 environment.yml                               |  10 +-
 main.nf                                       | 401 ++++++++++------
 nextflow.config                               |  90 ++--
 nextflow_schema.json                          | 259 ++++++++++
 49 files changed, 1967 insertions(+), 1094 deletions(-)
 create mode 100644 .github/.dockstore.yml
 create mode 100644 .github/ISSUE_TEMPLATE/config.yml
 create mode 100644 .github/markdownlint.yml
 create mode 100644 .github/workflows/awsfulltest.yml
 create mode 100644 .github/workflows/awstest.yml
 create mode 100644 .github/workflows/branch.yml
 create mode 100644 .github/workflows/ci.yml
 create mode 100644 .github/workflows/linting.yml
 create mode 100644 .github/workflows/linting_comment.yml
 create mode 100644 .github/workflows/push_dockerhub_dev.yml
 create mode 100644 .github/workflows/push_dockerhub_release.yml
 delete mode 100644 .travis.yml
 delete mode 100644 Singularity
 rename {conf => assets}/multiqc_config.yaml (79%)
 create mode 100644 assets/nf-core-hic_logo.png
 delete mode 100644 bin/__pycache__/scrape_software_versions.cpython-36.pyc
 create mode 100755 bin/markdown_to_html.py
 delete mode 100755 bin/markdown_to_html.r
 delete mode 100644 conf/awsbatch.config
 create mode 100644 conf/test_full.config
 delete mode 100644 docs/configuration/adding_your_own.md
 delete mode 100644 docs/configuration/local.md
 delete mode 100644 docs/configuration/reference_genomes.md
 create mode 100644 docs/images/nf-core-hic_logo.png
 delete mode 100644 docs/installation.md
 delete mode 100644 docs/troubleshooting.md
 create mode 100644 nextflow_schema.json

diff --git a/.github/.dockstore.yml b/.github/.dockstore.yml
new file mode 100644
index 0000000..030138a
--- /dev/null
+++ b/.github/.dockstore.yml
@@ -0,0 +1,5 @@
+# Dockstore config version, not pipeline version
+version: 1.2
+workflows:
+  - subclass: nfl
+    primaryDescriptorPath: /nextflow.config
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
index fda99de..25ef2ed 100644
--- a/.github/CONTRIBUTING.md
+++ b/.github/CONTRIBUTING.md
@@ -1,45 +1,128 @@
 # nf-core/hic: Contributing Guidelines
 
-Hi there! Many thanks for taking an interest in improving nf-core/hic.
+Hi there!
+Many thanks for taking an interest in improving nf-core/hic.
 
-We try to manage the required tasks for nf-core/hic using GitHub issues, you probably came to this page when creating one. Please use the pre-filled template to save time.
+We try to manage the required tasks for nf-core/hic using GitHub issues, you probably came to this page when creating one.
+Please use the pre-filled template to save time.
 
-However, don't be put off by this template - other more general issues and suggestions are welcome! Contributions to the code are even more welcome ;)
+However, don't be put off by this template - other more general issues and suggestions are welcome!
+Contributions to the code are even more welcome ;)
 
-> If you need help using or modifying nf-core/hic then the best place to go is the Gitter chatroom where you can ask us questions directly: https://gitter.im/nf-core/Lobby
+> If you need help using or modifying nf-core/hic then the best place to ask is on the nf-core Slack [#hic](https://nfcore.slack.com/channels/hic) channel ([join our Slack here](https://nf-co.re/join/slack)).
 
 ## Contribution workflow
-If you'd like to write some code for nf-core/hic, the standard workflow
-is as follows:
 
-1. Check that there isn't already an issue about your idea in the
-   [nf-core/hic issues](https://github.com/nf-core/hic/issues) to avoid
-   duplicating work.
-    * If there isn't one already, please create one so that others know you're working on this
-2. Fork the [nf-core/hic repository](https://github.com/nf-core/hic) to your GitHub account
-3. Make the necessary changes / additions within your forked repository
-4. Submit a Pull Request against the `dev` branch and wait for the code to be reviewed and merged.
+If you'd like to write some code for nf-core/hic, the standard workflow is as follows:
 
-If you're not used to this workflow with git, you can start with some [basic docs from GitHub](https://help.github.com/articles/fork-a-repo/) or even their [excellent interactive tutorial](https://try.github.io/).
+1. Check that there isn't already an issue about your idea in the [nf-core/hic issues](https://github.com/nf-core/hic/issues) to avoid duplicating work
+    * If there isn't one already, please create one so that others know you're working on this
+2. [Fork](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) the [nf-core/hic repository](https://github.com/nf-core/hic) to your GitHub account
+3. Make the necessary changes / additions within your forked repository following [Pipeline conventions](#pipeline-contribution-conventions)
+4. Use `nf-core schema build .` and add any new parameters to the pipeline JSON schema (requires [nf-core tools](https://github.com/nf-core/tools) >= 1.10).
+5. Submit a Pull Request against the `dev` branch and wait for the code to be reviewed and merged
 
+If you're not used to this workflow with git, you can start with some [docs from GitHub](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests) or even their [excellent `git` resources](https://try.github.io/).
 
 ## Tests
-When you create a pull request with changes, [Travis CI](https://travis-ci.org/) will run automatic tests.
+
+When you create a pull request with changes, [GitHub Actions](https://github.com/features/actions) will run automatic tests.
 Typically, pull-requests are only fully reviewed when these tests are passing, though of course we can help out before then.
 
 There are typically two types of tests that run:
 
-### Lint Tests
-The nf-core has a [set of guidelines](http://nf-co.re/guidelines) which all pipelines must adhere to.
+### Lint tests
+
+`nf-core` has a [set of guidelines](https://nf-co.re/developers/guidelines) which all pipelines must adhere to.
 To enforce these and ensure that all pipelines stay in sync, we have developed a helper tool which runs checks on the pipeline code. This is in the [nf-core/tools repository](https://github.com/nf-core/tools) and once installed can be run locally with the `nf-core lint <pipeline-directory>` command.
 
 If any failures or warnings are encountered, please follow the listed URL for more documentation.
 
-### Pipeline Tests
-Each nf-core pipeline should be set up with a minimal set of test-data.
-Travis CI then runs the pipeline on this data to ensure that it exists successfully.
+### Pipeline tests
+
+Each `nf-core` pipeline should be set up with a minimal set of test-data.
+`GitHub Actions` then runs the pipeline on this data to ensure that it exits successfully.
 If there are any failures then the automated tests fail.
-These tests are run both with the latest available version of Nextflow and also the minimum required version that is stated in the pipeline code.
+These tests are run both with the latest available version of `Nextflow` and also the minimum required version that is stated in the pipeline code.
+
+## Patch
+
+:warning: Only in the unlikely and regretful event of a release happening with a bug.
+
+* On your own fork, make a new branch `patch` based on `upstream/master`.
+* Fix the bug, and bump version (X.Y.Z+1).
+* A PR should be made on `master` from patch to directly this particular bug.
 
 ## Getting help
-For further information/help, please consult the [nf-core/hic documentation](https://github.com/nf-core/hic#documentation) and don't hesitate to get in touch on [Gitter](https://gitter.im/nf-core/Lobby)
+
+For further information/help, please consult the [nf-core/hic documentation](https://nf-co.re/hic/usage) and don't hesitate to get in touch on the nf-core Slack [#hic](https://nfcore.slack.com/channels/hic) channel ([join our Slack here](https://nf-co.re/join/slack)).
+
+## Pipeline contribution conventions
+
+To make the nf-core/hic code and processing logic more understandable for new contributors and to ensure quality, we semi-standardise the way the code and other contributions are written.
+
+### Adding a new step
+
+If you wish to contribute a new step, please use the following coding standards:
+
+1. Define the corresponding input channel into your new process from the expected previous process channel
+2. Write the process block (see below).
+3. Define the output channel if needed (see below).
+4. Add any new flags/options to `nextflow.config` with a default (see below).
+5. Add any new flags/options to `nextflow_schema.json` with help text (with `nf-core schema build .`)
+6. Add any new flags/options to the help message (for integer/text parameters, print to help the corresponding `nextflow.config` parameter).
+7. Add sanity checks for all relevant parameters.
+8. Add any new software to the `scrape_software_versions.py` script in `bin/` and the version command to the `scrape_software_versions` process in `main.nf`.
+9. Do local tests that the new code works properly and as expected.
+10. Add a new test command in `.github/workflow/ci.yaml`.
+11. If applicable add a [MultiQC](https://https://multiqc.info/) module.
+12. Update MultiQC config `assets/multiqc_config.yaml` so relevant suffixes, name clean up, General Statistics Table column order, and module figures are in the right order.
+13. Optional: Add any descriptions of MultiQC report sections and output files to `docs/output.md`.
+
+### Default values
+
+Parameters should be initialised / defined with default values in `nextflow.config` under the `params` scope.
+
+Once there, use `nf-core schema build .` to add to `nextflow_schema.json`.
+
+### Default processes resource requirements
+
+Sensible defaults for process resource requirements (CPUs / memory / time) for a process should be defined in `conf/base.config`. These should generally be specified generic with `withLabel:` selectors so they can be shared across multiple processes/steps of the pipeline. A nf-core standard set of labels that should be followed where possible can be seen in the [nf-core pipeline template](https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/%7B%7Bcookiecutter.name_noslash%7D%7D/conf/base.config), which has the default process as a single core-process, and then different levels of multi-core configurations for increasingly large memory requirements defined with standardised labels.
+
+The process resources can be passed on to the tool dynamically within the process with the `${task.cpu}` and `${task.memory}` variables in the `script:` block.
+
+### Naming schemes
+
+Please use the following naming schemes, to make it easy to understand what is going where.
+
+* initial process channel: `ch_output_from_<process>`
+* intermediate and terminal channels: `ch_<previousprocess>_for_<nextprocess>`
+
+### Nextflow version bumping
+
+If you are using a new feature from core Nextflow, you may bump the minimum required version of nextflow in the pipeline with: `nf-core bump-version --nextflow . [min-nf-version]`
+
+### Software version reporting
+
+If you add a new tool to the pipeline, please ensure you add the information of the tool to the `get_software_version` process.
+
+Add to the script block of the process, something like the following:
+
+```bash
+<YOUR_TOOL> --version &> v_<YOUR_TOOL>.txt 2>&1 || true
+```
+
+or
+
+```bash
+<YOUR_TOOL> --help | head -n 1 &> v_<YOUR_TOOL>.txt 2>&1 || true
+```
+
+You then need to edit the script `bin/scrape_software_versions.py` to:
+
+1. Add a Python regex for your tool's `--version` output (as in stored in the `v_<YOUR_TOOL>.txt` file), to ensure the version is reported as a `v` and the version number e.g. `v2.1.1`
+2. Add a HTML entry to the `OrderedDict` for formatting in MultiQC.
+
+### Images and figures
+
+For overview images and other documents we follow the nf-core [style guidelines and examples](https://nf-co.re/developers/design_guidelines).
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index 8112c95..5ac2f7f 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -1,31 +1,64 @@
+---
+name: Bug report
+about: Report something that is broken or incorrect
+labels: bug
+---
+
+<!--
+# nf-core/hic bug report
+
 Hi there!
 
-Thanks for telling us about a problem with the pipeline. Please delete this text and anything that's not relevant from the template below:
+Thanks for telling us about a problem with the pipeline.
+Please delete this text and anything that's not relevant from the template below:
+-->
+
+## Check Documentation
+
+I have checked the following places for your error:
 
-#### Describe the bug
-A clear and concise description of what the bug is.
+- [ ] [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting)
+- [ ] [nf-core/hic pipeline documentation](https://nf-co.re/nf-core/hic/usage)
+
+## Description of the bug
+
+<!-- A clear and concise description of what the bug is. -->
+
+## Steps to reproduce
 
-#### Steps to reproduce
 Steps to reproduce the behaviour:
-1. Command line: `nextflow run ...`
-2. See error: _Please provide your error message_
 
-#### Expected behaviour
-A clear and concise description of what you expected to happen.
+1. Command line: <!-- [e.g. `nextflow run ...`] -->
+2. See error: <!-- [Please provide your error message] -->
+
+## Expected behaviour
+
+<!-- A clear and concise description of what you expected to happen. -->
+
+## Log files
+
+Have you provided the following extra information/files:
+
+- [ ] The command used to run the pipeline
+- [ ] The `.nextflow.log` file <!-- this is a hidden file in the directory where you launched the pipeline -->
+
+## System
+
+- Hardware: <!-- [e.g. HPC, Desktop, Cloud...] -->
+- Executor: <!-- [e.g. slurm, local, awsbatch...] -->
+- OS: <!-- [e.g. CentOS Linux, macOS, Linux Mint...] -->
+- Version <!-- [e.g. 7, 10.13.6, 18.3...] -->
+
+## Nextflow Installation
+
+- Version: <!-- [e.g. 19.10.0] -->
 
-#### System:
- - Hardware: [e.g. HPC, Desktop, Cloud...]
- - Executor: [e.g. slurm, local, awsbatch...]
- - OS: [e.g. CentOS Linux, macOS, Linux Mint...]
- - Version [e.g. 7, 10.13.6, 18.3...]
+## Container engine
 
-#### Nextflow Installation:
- - Version: [e.g. 0.31.0]
+- Engine: <!-- [e.g. Conda, Docker, Singularity or Podman] -->
+- version: <!-- [e.g. 1.0.0] -->
+- Image tag: <!-- [e.g. nfcore/hic:1.0.0] -->
 
-#### Container engine:
- - Engine: [e.g. Conda, Docker or Singularity]
- - version: [e.g. 1.0.0]
- - Image tag: [e.g. nfcore/hic:1.0.0]
+## Additional context
 
-#### Additional context
-Add any other context about the problem here.
+<!-- Add any other context about the problem here. -->
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 0000000..887f045
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,8 @@
+blank_issues_enabled: false
+contact_links:
+  - name: Join nf-core
+    url: https://nf-co.re/join
+    about: Please join the nf-core community here
+  - name: "Slack #hic channel"
+    url: https://nfcore.slack.com/channels/hic
+    about: Discussion about the nf-core/hic pipeline
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
index 1f025b7..2e01a5f 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -1,16 +1,32 @@
+---
+name: Feature request
+about: Suggest an idea for the nf-core website
+labels: enhancement
+---
+
+<!--
+# nf-core/hic feature request
+
 Hi there!
 
-Thanks for suggesting a new feature for the pipeline! Please delete this text and anything that's not relevant from the template below:
+Thanks for suggesting a new feature for the pipeline!
+Please delete this text and anything that's not relevant from the template below:
+-->
+
+## Is your feature request related to a problem? Please describe
+
+<!-- A clear and concise description of what the problem is. -->
+
+<!-- e.g. [I'm always frustrated when ...] -->
+
+## Describe the solution you'd like
+
+<!-- A clear and concise description of what you want to happen. -->
 
-#### Is your feature request related to a problem? Please describe.
-A clear and concise description of what the problem is.
-Ex. I'm always frustrated when [...]
+## Describe alternatives you've considered
 
-#### Describe the solution you'd like
-A clear and concise description of what you want to happen.
+<!-- A clear and concise description of any alternative solutions or features you've considered. -->
 
-#### Describe alternatives you've considered
-A clear and concise description of any alternative solutions or features you've considered.
+## Additional context
 
-#### Additional context
-Add any other context about the feature request here.
+<!-- Add any other context about the feature request here. -->
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 473c41d..1b480fa 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -1,15 +1,26 @@
-Many thanks to contributing to nf-core/hic!
+<!--
+# nf-core/hic pull request
 
-Please fill in the appropriate checklist below (delete whatever is not relevant). These are the most common things requested on pull requests (PRs).
+Many thanks for contributing to nf-core/hic!
+
+Please fill in the appropriate checklist below (delete whatever is not relevant).
+These are the most common things requested on pull requests (PRs).
+
+Remember that PRs should be made against the dev branch, unless you're preparing a pipeline release.
+
+Learn more about contributing: [CONTRIBUTING.md](https://github.com/nf-core/hic/tree/master/.github/CONTRIBUTING.md)
+-->
 
 ## PR checklist
- - [ ] This comment contains a description of changes (with reason)
- - [ ] If you've fixed a bug or added code that should be tested, add tests!
- - [ ] If necessary, also make a PR on the [nf-core/hic branch on the nf-core/test-datasets repo]( https://github.com/nf-core/test-datasets/pull/new/nf-core/hic)
- - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker`).
- - [ ] Make sure your code lints (`nf-core lint .`).
- - [ ] Documentation in `docs` is updated
- - [ ] `CHANGELOG.md` is updated
- - [ ] `README.md` is updated
-
-**Learn more about contributing:** https://github.com/nf-core/hic/tree/master/.github/CONTRIBUTING.md
+
+- [ ] This comment contains a description of changes (with reason).
+- [ ] If you've fixed a bug or added code that should be tested, add tests!
+ - [ ] If you've added a new tool - add to the software_versions process and a regex to `scrape_software_versions.py`
+ - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/nf-core/hic/tree/master/.github/CONTRIBUTING.md)
+ - [ ] If necessary, also make a PR on the nf-core/hic _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository.
+- [ ] Make sure your code lints (`nf-core lint .`).
+- [ ] Ensure the test suite passes (`nextflow run . -profile test,docker`).
+- [ ] Usage Documentation in `docs/usage.md` is updated.
+- [ ] Output Documentation in `docs/output.md` is updated.
+- [ ] `CHANGELOG.md` is updated.
+- [ ] `README.md` is updated (including new tool citations and authors/contributors).
diff --git a/.github/markdownlint.yml b/.github/markdownlint.yml
new file mode 100644
index 0000000..8d7eb53
--- /dev/null
+++ b/.github/markdownlint.yml
@@ -0,0 +1,12 @@
+# Markdownlint configuration file
+default: true
+line-length: false
+no-duplicate-header:
+    siblings_only: true
+no-inline-html:
+    allowed_elements:
+        - img
+        - p
+        - kbd
+        - details
+        - summary
diff --git a/.github/workflows/awsfulltest.yml b/.github/workflows/awsfulltest.yml
new file mode 100644
index 0000000..e63ea69
--- /dev/null
+++ b/.github/workflows/awsfulltest.yml
@@ -0,0 +1,43 @@
+name: nf-core AWS full size tests
+# This workflow is triggered on published releases.
+# It can be additionally triggered manually with GitHub actions workflow dispatch.
+# It runs the -profile 'test_full' on AWS batch
+
+on:
+  workflow_run:
+    workflows: ["nf-core Docker push (release)"]
+    types: [completed]
+  workflow_dispatch:
+
+jobs:
+  run-awstest:
+    name: Run AWS full tests
+    if: github.repository == 'nf-core/hic'
+    runs-on: ubuntu-latest
+    steps:
+      - name: Setup Miniconda
+        uses: conda-incubator/setup-miniconda@v2
+        with:
+          auto-update-conda: true
+          python-version: 3.7
+      - name: Install awscli
+        run: conda install -c conda-forge awscli
+      - name: Start AWS batch job
+        # TODO nf-core: You can customise AWS full pipeline tests as required
+        # Add full size test data (but still relatively small datasets for few samples)
+        # on the `test_full.config` test runs with only one set of parameters
+        # Then specify `-profile test_full` instead of `-profile test` on the AWS batch command
+        env:
+          AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+          AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+          TOWER_ACCESS_TOKEN: ${{ secrets.AWS_TOWER_TOKEN }}
+          AWS_JOB_DEFINITION: ${{ secrets.AWS_JOB_DEFINITION }}
+          AWS_JOB_QUEUE: ${{ secrets.AWS_JOB_QUEUE }}
+          AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }}
+        run: |
+          aws batch submit-job \
+            --region eu-west-1 \
+            --job-name nf-core-hic \
+            --job-queue $AWS_JOB_QUEUE \
+            --job-definition $AWS_JOB_DEFINITION \
+            --container-overrides '{"command": ["nf-core/hic", "-r '"${GITHUB_SHA}"' -profile test --outdir s3://'"${AWS_S3_BUCKET}"'/hic/results-'"${GITHUB_SHA}"' -w s3://'"${AWS_S3_BUCKET}"'/hic/work-'"${GITHUB_SHA}"' -with-tower"], "environment": [{"name": "TOWER_ACCESS_TOKEN", "value": "'"$TOWER_ACCESS_TOKEN"'"}]}'
diff --git a/.github/workflows/awstest.yml b/.github/workflows/awstest.yml
new file mode 100644
index 0000000..92dfa0d
--- /dev/null
+++ b/.github/workflows/awstest.yml
@@ -0,0 +1,39 @@
+name: nf-core AWS test
+# This workflow is triggered on push to the master branch.
+# It can be additionally triggered manually with GitHub actions workflow dispatch.
+# It runs the -profile 'test' on AWS batch.
+
+on:
+  workflow_dispatch:
+
+jobs:
+  run-awstest:
+    name: Run AWS tests
+    if: github.repository == 'nf-core/hic'
+    runs-on: ubuntu-latest
+    steps:
+      - name: Setup Miniconda
+        uses: conda-incubator/setup-miniconda@v2
+        with:
+          auto-update-conda: true
+          python-version: 3.7
+      - name: Install awscli
+        run: conda install -c conda-forge awscli
+      - name: Start AWS batch job
+        # TODO nf-core: You can customise CI pipeline run tests as required
+        # For example: adding multiple test runs with different parameters
+        # Remember that you can parallelise this by using strategy.matrix
+        env:
+          AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+          AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+          TOWER_ACCESS_TOKEN: ${{ secrets.AWS_TOWER_TOKEN }}
+          AWS_JOB_DEFINITION: ${{ secrets.AWS_JOB_DEFINITION }}
+          AWS_JOB_QUEUE: ${{ secrets.AWS_JOB_QUEUE }}
+          AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }}
+        run: |
+          aws batch submit-job \
+          --region eu-west-1 \
+          --job-name nf-core-hic \
+          --job-queue $AWS_JOB_QUEUE \
+          --job-definition $AWS_JOB_DEFINITION \
+          --container-overrides '{"command": ["nf-core/hic", "-r '"${GITHUB_SHA}"' -profile test --outdir s3://'"${AWS_S3_BUCKET}"'/hic/results-'"${GITHUB_SHA}"' -w s3://'"${AWS_S3_BUCKET}"'/hic/work-'"${GITHUB_SHA}"' -with-tower"], "environment": [{"name": "TOWER_ACCESS_TOKEN", "value": "'"$TOWER_ACCESS_TOKEN"'"}]}'
diff --git a/.github/workflows/branch.yml b/.github/workflows/branch.yml
new file mode 100644
index 0000000..92136a1
--- /dev/null
+++ b/.github/workflows/branch.yml
@@ -0,0 +1,37 @@
+name: nf-core branch protection
+# This workflow is triggered on PRs to master branch on the repository
+# It fails when someone tries to make a PR against the nf-core `master` branch instead of `dev`
+on:
+  pull_request_target:
+    branches: [master]
+
+jobs:
+  test:
+    runs-on: ubuntu-latest
+    steps:
+      # PRs to the nf-core repo master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches
+      - name: Check PRs
+        if: github.repository == 'nf-core/hic'
+        run: |
+          { [[ ${{github.event.pull_request.head.repo.full_name}} == nf-core/hic ]] && [[ $GITHUB_HEAD_REF = "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]]
+
+
+      # If the above check failed, post a comment on the PR explaining the failure
+      # NOTE - this doesn't currently work if the PR is coming from a fork, due to limitations in GitHub actions secrets
+      - name: Post PR comment
+        if: failure()
+        uses: mshick/add-pr-comment@v1
+        with:
+          message: |
+            Hi @${{ github.event.pull_request.user.login }},
+
+            It looks like this pull-request is has been made against the ${{github.event.pull_request.head.repo.full_name}} `master` branch.
+            The `master` branch on nf-core repositories should always contain code from the latest release.
+            Because of this, PRs to `master` are only allowed if they come from the ${{github.event.pull_request.head.repo.full_name}} `dev` branch.
+
+            You do not need to close this PR, you can change the target branch to `dev` by clicking the _"Edit"_ button at the top of this page.
+
+            Thanks again for your contribution!
+          repo-token: ${{ secrets.GITHUB_TOKEN }}
+          allow-repeats: false
+
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..5553430
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,57 @@
+name: nf-core CI
+# This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors
+on:
+  push:
+    branches:
+      - dev
+  pull_request:
+  release:
+    types: [published]
+
+jobs:
+  test:
+    name: Run workflow tests
+    # Only run on push if this is the nf-core dev branch (merged PRs)
+    if: ${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/hic') }}
+    runs-on: ubuntu-latest
+    env:
+      NXF_VER: ${{ matrix.nxf_ver }}
+      NXF_ANSI_LOG: false
+    strategy:
+      matrix:
+        # Nextflow versions: check pipeline minimum and current latest
+        nxf_ver: ['20.04.0', '']
+    steps:
+      - name: Check out pipeline code
+        uses: actions/checkout@v2
+
+      - name: Check if Dockerfile or Conda environment changed
+        uses: technote-space/get-diff-action@v4
+        with:
+          FILES: |
+            Dockerfile
+            environment.yml
+
+      - name: Build new docker image
+        if: env.MATCHED_FILES
+        run: docker build --no-cache . -t nfcore/hic:dev
+
+      - name: Pull docker image
+        if: ${{ !env.MATCHED_FILES }}
+        run: |
+          docker pull nfcore/hic:dev
+          docker tag nfcore/hic:dev nfcore/hic:dev
+
+      - name: Install Nextflow
+        env:
+          CAPSULE_LOG: none
+        run: |
+          wget -qO- get.nextflow.io | bash
+          sudo mv nextflow /usr/local/bin/
+
+      - name: Run pipeline with test data
+        # TODO nf-core: You can customise CI pipeline run tests as required
+        # For example: adding multiple test runs with different parameters
+        # Remember that you can parallelise this by using strategy.matrix
+        run: |
+          nextflow run ${GITHUB_WORKSPACE} -profile test,docker
diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml
new file mode 100644
index 0000000..bef81e6
--- /dev/null
+++ b/.github/workflows/linting.yml
@@ -0,0 +1,77 @@
+name: nf-core linting
+# This workflow is triggered on pushes and PRs to the repository.
+# It runs the `nf-core lint` and markdown lint tests to ensure that the code meets the nf-core guidelines
+on:
+  push:
+  pull_request:
+  release:
+    types: [published]
+
+jobs:
+  Markdown:
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v2
+      - uses: actions/setup-node@v1
+        with:
+          node-version: '10'
+      - name: Install markdownlint
+        run: npm install -g markdownlint-cli
+      - name: Run Markdownlint
+        run: markdownlint ${GITHUB_WORKSPACE} -c ${GITHUB_WORKSPACE}/.github/markdownlint.yml
+  YAML:
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v1
+      - uses: actions/setup-node@v1
+        with:
+          node-version: '10'
+      - name: Install yaml-lint
+        run: npm install -g yaml-lint
+      - name: Run yaml-lint
+        run: yamllint $(find ${GITHUB_WORKSPACE} -type f -name "*.yml")
+  nf-core:
+    runs-on: ubuntu-latest
+    steps:
+
+      - name: Check out pipeline code
+        uses: actions/checkout@v2
+
+      - name: Install Nextflow
+        env:
+          CAPSULE_LOG: none
+        run: |
+          wget -qO- get.nextflow.io | bash
+          sudo mv nextflow /usr/local/bin/
+
+      - uses: actions/setup-python@v1
+        with:
+          python-version: '3.6'
+          architecture: 'x64'
+
+      - name: Install dependencies
+        run: |
+          python -m pip install --upgrade pip
+          pip install nf-core
+
+      - name: Run nf-core lint
+        env:
+          GITHUB_COMMENTS_URL: ${{ github.event.pull_request.comments_url }}
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+          GITHUB_PR_COMMIT: ${{ github.event.pull_request.head.sha }}
+        run: nf-core -l lint_log.txt lint ${GITHUB_WORKSPACE} --markdown lint_results.md
+
+      - name: Save PR number
+        if: ${{ always() }}
+        run: echo ${{ github.event.pull_request.number }} > PR_number.txt
+
+      - name: Upload linting log file artifact
+        if: ${{ always() }}
+        uses: actions/upload-artifact@v2
+        with:
+          name: linting-log-file
+          path: |
+            lint_log.txt
+            lint_results.md
+            PR_number.txt
+
diff --git a/.github/workflows/linting_comment.yml b/.github/workflows/linting_comment.yml
new file mode 100644
index 0000000..90f03c6
--- /dev/null
+++ b/.github/workflows/linting_comment.yml
@@ -0,0 +1,29 @@
+
+name: nf-core linting comment
+# This workflow is triggered after the linting action is complete
+# It posts an automated comment to the PR, even if the PR is coming from a fork
+
+on:
+  workflow_run:
+    workflows: ["nf-core linting"]
+
+jobs:
+  test:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Download lint results
+        uses: dawidd6/action-download-artifact@v2
+        with:
+          workflow: linting.yml
+
+      - name: Get PR number
+        id: pr_number
+        run: echo "::set-output name=pr_number::$(cat linting-logs/PR_number.txt)"
+
+      - name: Post PR comment
+        uses: marocchino/sticky-pull-request-comment@v2
+        with:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+          number: ${{ steps.pr_number.outputs.pr_number }}
+          path: linting-logs/lint_results.md
+
diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml
new file mode 100644
index 0000000..d6fc716
--- /dev/null
+++ b/.github/workflows/push_dockerhub_dev.yml
@@ -0,0 +1,28 @@
+name: nf-core Docker push (dev)
+# This builds the docker image and pushes it to DockerHub
+# Runs on nf-core repo releases and push event to 'dev' branch (PR merges)
+on:
+  push:
+    branches:
+      - dev
+
+jobs:
+  push_dockerhub:
+    name: Push new Docker image to Docker Hub (dev)
+    runs-on: ubuntu-latest
+    # Only run for the nf-core repo, for releases and merged PRs
+    if: ${{ github.repository == 'nf-core/hic' }}
+    env:
+      DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+      DOCKERHUB_PASS: ${{ secrets.DOCKERHUB_PASS }}
+    steps:
+      - name: Check out pipeline code
+        uses: actions/checkout@v2
+
+      - name: Build new docker image
+        run: docker build --no-cache . -t nfcore/hic:dev
+
+      - name: Push Docker image to DockerHub (dev)
+        run: |
+          echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin
+          docker push nfcore/hic:dev
diff --git a/.github/workflows/push_dockerhub_release.yml b/.github/workflows/push_dockerhub_release.yml
new file mode 100644
index 0000000..eda09cc
--- /dev/null
+++ b/.github/workflows/push_dockerhub_release.yml
@@ -0,0 +1,29 @@
+name: nf-core Docker push (release)
+# This builds the docker image and pushes it to DockerHub
+# Runs on nf-core repo releases and push event to 'dev' branch (PR merges)
+on:
+  release:
+    types: [published]
+
+jobs:
+  push_dockerhub:
+    name: Push new Docker image to Docker Hub (release)
+    runs-on: ubuntu-latest
+    # Only run for the nf-core repo, for releases and merged PRs
+    if: ${{ github.repository == 'nf-core/hic' }}
+    env:
+      DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+      DOCKERHUB_PASS: ${{ secrets.DOCKERHUB_PASS }}
+    steps:
+      - name: Check out pipeline code
+        uses: actions/checkout@v2
+
+      - name: Build new docker image
+        run: docker build --no-cache . -t nfcore/hic:latest
+
+      - name: Push Docker image to DockerHub (release)
+        run: |
+          echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin
+          docker push nfcore/hic:latest
+          docker tag nfcore/hic:latest nfcore/hic:${{ github.event.release.tag_name }}
+          docker push nfcore/hic:${{ github.event.release.tag_name }}
diff --git a/.gitignore b/.gitignore
index 46f69e4..aa4bb5b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,4 +3,7 @@ work/
 data/
 results/
 .DS_Store
-tests/test_data
+tests/
+testing/
+testing*
+*.pyc
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 381c4cf..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-sudo: required
-language: python
-jdk: openjdk8
-services: docker
-python: '3.6'
-cache: pip
-matrix:
-  fast_finish: true
-
-before_install:
-  # PRs to master are only ok if coming from dev branch
-  - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ])'
-  # Pull the docker image first so the test doesn't wait for this
-  - docker pull nfcore/hic:dev
-  # Fake the tag locally so that the pipeline runs properly
-  - docker tag nfcore/hic:dev nfcore/hic:latest
-
-install:
-  # Install Nextflow
-  - mkdir /tmp/nextflow && cd /tmp/nextflow
-  - wget -qO- get.nextflow.io | bash
-  - sudo ln -s /tmp/nextflow/nextflow /usr/local/bin/nextflow
-  # Install nf-core/tools
-  - pip install --upgrade pip
-  - pip install nf-core
-  # Reset
-  - mkdir ${TRAVIS_BUILD_DIR}/tests && cd ${TRAVIS_BUILD_DIR}/tests
-
-env:
-  - NXF_VER='0.32.0' # Specify a minimum NF version that should be tested and work
-  - NXF_VER='' # Plus: get the latest NF version and check that it works
-
-script:
-  # Lint the pipeline code
-  - nf-core lint ${TRAVIS_BUILD_DIR}
-  # Run the pipeline with the test profile
-  - nextflow run ${TRAVIS_BUILD_DIR} -profile test,docker
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0b88e32..b84d614 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,4 +1,16 @@
 # nf-core/hic: Changelog
 
-## v1.0dev - <date>
-Initial release of nf-core/hic, created with the [nf-core](http://nf-co.re/) template.
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+## v1.0dev - [date]
+
+Initial release of nf-core/hic, created with the [nf-core](https://nf-co.re/) template.
+
+### `Added`
+
+### `Fixed`
+
+### `Dependencies`
+
+### `Deprecated`
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index 2109619..405fb1b 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -34,13 +34,13 @@ This Code of Conduct applies both within project spaces and in public spaces whe
 
 ## Enforcement
 
-Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team on the [Gitter channel](https://gitter.im/nf-core/Lobby). The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team on [Slack](https://nf-co.re/join/slack). The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
 
 Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
 
 ## Attribution
 
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [https://www.contributor-covenant.org/version/1/4/code-of-conduct/][version]
 
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
+[homepage]: https://contributor-covenant.org
+[version]: https://www.contributor-covenant.org/version/1/4/code-of-conduct/
diff --git a/Dockerfile b/Dockerfile
index 4d28385..1a375e9 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,7 +1,17 @@
-FROM nfcore/base
+FROM nfcore/base:1.12.1
 LABEL authors="Nicolas Servant" \
-      description="Docker image containing all requirements for nf-core/hic pipeline"
+      description="Docker image containing all software requirements for the nf-core/hic pipeline"
 
+# Install the conda environment
 COPY environment.yml /
-RUN conda env create -f /environment.yml && conda clean -a
+RUN conda env create --quiet -f /environment.yml && conda clean -a
+
+# Add conda installation dir to PATH (instead of doing 'conda activate')
 ENV PATH /opt/conda/envs/nf-core-hic-1.0dev/bin:$PATH
+
+# Dump the details of the installed packages to a file for posterity
+RUN conda env export --name nf-core-hic-1.0dev > nf-core-hic-1.0dev.yml
+
+# Instruct R processes to use these empty files instead of clashing with a local version
+RUN touch .Rprofile
+RUN touch .Renviron
diff --git a/README.md b/README.md
index 192fa0d..30fc4b3 100644
--- a/README.md
+++ b/README.md
@@ -1,31 +1,90 @@
-# nf-core/hic
-**Analysis of Chromosome Conformation Capture data (Hi-C)**
+# ![nf-core/hic](docs/images/nf-core-hic_logo.png)
 
-[![Build Status](https://travis-ci.org/nf-core/hic.svg?branch=master)](https://travis-ci.org/nf-core/hic)
-[![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A50.32.0-brightgreen.svg)](https://www.nextflow.io/)
+**Analysis of Chromosome Conformation Capture data (Hi-C)**.
 
-[![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](http://bioconda.github.io/)
+[![GitHub Actions CI Status](https://github.com/nf-core/hic/workflows/nf-core%20CI/badge.svg)](https://github.com/nf-core/hic/actions)
+[![GitHub Actions Linting Status](https://github.com/nf-core/hic/workflows/nf-core%20linting/badge.svg)](https://github.com/nf-core/hic/actions)
+[![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A520.04.0-brightgreen.svg)](https://www.nextflow.io/)
+
+[![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/)
 [![Docker](https://img.shields.io/docker/automated/nfcore/hic.svg)](https://hub.docker.com/r/nfcore/hic)
-![Singularity Container available](
-https://img.shields.io/badge/singularity-available-7E4C74.svg)
+[![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23hic-4A154B?logo=slack)](https://nfcore.slack.com/channels/hic)
+
+## Introduction
+
+<!-- TODO nf-core: Write a 1-2 sentence summary of what data the pipeline is for and what it does -->
+**nf-core/hic** is a bioinformatics best-practise analysis pipeline for
+
+The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool to run tasks across multiple compute infrastructures in a very portable manner. It comes with docker containers making installation trivial and results highly reproducible.
+
+## Quick Start
+
+1. Install [`nextflow`](https://nf-co.re/usage/installation)
+
+2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/) or [`Podman`](https://podman.io/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_
+
+3. Download the pipeline and test it on a minimal dataset with a single command:
+
+    ```bash
+    nextflow run nf-core/hic -profile test,<docker/singularity/podman/conda/institute>
+    ```
+
+    > Please check [nf-core/configs](https://github.com/nf-core/configs#documentation) to see if a custom config file to run nf-core pipelines already exists for your Institute. If so, you can simply use `-profile <institute>` in your command. This will enable either `docker` or `singularity` and set the appropriate execution settings for your local compute environment.
+
+4. Start running your own analysis!
+
+    <!-- TODO nf-core: Update the example "typical command" below used to run the pipeline -->
+
+    ```bash
+    nextflow run nf-core/hic -profile <docker/singularity/podman/conda/institute> --input '*_R{1,2}.fastq.gz' --genome GRCh37
+    ```
+
+See [usage docs](https://nf-co.re/hic/usage) for all of the available options when running the pipeline.
 
-### Introduction
-The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool to run tasks across multiple compute infrastructures in a very portable manner. It comes with docker / singularity containers making installation trivial and results highly reproducible.
+## Pipeline Summary
 
+By default, the pipeline currently performs the following:
 
-### Documentation
-The nf-core/hic pipeline comes with documentation about the pipeline, found in the `docs/` directory:
+<!-- TODO nf-core: Fill in short bullet-pointed list of default steps of pipeline -->
 
-1. [Installation](docs/installation.md)
-2. Pipeline configuration
-    * [Local installation](docs/configuration/local.md)
-    * [Adding your own system](docs/configuration/adding_your_own.md)
-    * [Reference genomes](docs/configuration/reference_genomes.md)  
-3. [Running the pipeline](docs/usage.md)
-4. [Output and how to interpret the results](docs/output.md)
-5. [Troubleshooting](docs/troubleshooting.md)
+* Sequencing quality control (`FastQC`)
+* Overall pipeline run summaries (`MultiQC`)
+
+## Documentation
+
+The nf-core/hic pipeline comes with documentation about the pipeline: [usage](https://nf-co.re/hic/usage) and [output](https://nf-co.re/hic/output).
 
 <!-- TODO nf-core: Add a brief overview of what the pipeline does and how it works -->
 
-### Credits
+## Credits
+
 nf-core/hic was originally written by Nicolas Servant.
+
+We thank the following people for their extensive assistance in the development
+of this pipeline:
+
+<!-- TODO nf-core: If applicable, make list of people who have also contributed -->
+
+## Contributions and Support
+
+If you would like to contribute to this pipeline, please see the [contributing guidelines](.github/CONTRIBUTING.md).
+
+For further information or help, don't hesitate to get in touch on the [Slack `#hic` channel](https://nfcore.slack.com/channels/hic) (you can join with [this invite](https://nf-co.re/join/slack)).
+
+## Citations
+
+<!-- TODO nf-core: Add citation for pipeline after first release. Uncomment lines below and update Zenodo doi. -->
+<!-- If you use  nf-core/hic for your analysis, please cite it using the following doi: [10.5281/zenodo.XXXXXX](https://doi.org/10.5281/zenodo.XXXXXX) -->
+
+You can cite the `nf-core` publication as follows:
+
+> **The nf-core framework for community-curated bioinformatics pipelines.**
+>
+> Philip Ewels, Alexander Peltzer, Sven Fillinger, Harshil Patel, Johannes Alneberg, Andreas Wilm, Maxime Ulysse Garcia, Paolo Di Tommaso & Sven Nahnsen.
+>
+> _Nat Biotechnol._ 2020 Feb 13. doi: [10.1038/s41587-020-0439-x](https://dx.doi.org/10.1038/s41587-020-0439-x).
+> ReadCube: [Full Access Link](https://rdcu.be/b1GjZ)
+
+In addition, references of tools and data used in this pipeline are as follows:
+
+<!-- TODO nf-core: Add bibliography of tools and data used in your pipeline -->
diff --git a/Singularity b/Singularity
deleted file mode 100644
index 927866f..0000000
--- a/Singularity
+++ /dev/null
@@ -1,18 +0,0 @@
-From:nfcore/base
-Bootstrap:docker
-
-%labels
-    MAINTAINER Nicolas Servant
-    DESCRIPTION Singularity image containing all requirements for the nf-core/hic pipeline
-    VERSION 1.0dev
-
-%environment
-    PATH=/opt/conda/envs/nf-core-hic-1.0dev/bin:$PATH
-    export PATH
-
-%files
-    environment.yml /
-
-%post
-    /opt/conda/bin/conda env create -f /environment.yml
-    /opt/conda/bin/conda clean -a
diff --git a/assets/email_template.html b/assets/email_template.html
index bf19807..177bccd 100644
--- a/assets/email_template.html
+++ b/assets/email_template.html
@@ -11,6 +11,8 @@
 <body>
 <div style="font-family: Helvetica, Arial, sans-serif; padding: 30px; max-width: 800px; margin: 0 auto;">
 
+<img src="cid:nfcorepipelinelogo">
+
 <h1>nf-core/hic v${version}</h1>
 <h2>Run Name: $runName</h2>
 
diff --git a/assets/email_template.txt b/assets/email_template.txt
index 59d7b54..a951c5e 100644
--- a/assets/email_template.txt
+++ b/assets/email_template.txt
@@ -1,6 +1,12 @@
-========================================
- nf-core/hic v${version}
-========================================
+----------------------------------------------------
+                                        ,--./,-.
+        ___     __   __   __   ___     /,-._.--~\\
+  |\\ | |__  __ /  ` /  \\ |__) |__         }  {
+  | \\| |       \\__, \\__/ |  \\ |___     \\`-._,-`-,
+                                        `._,._,'
+  nf-core/hic v${version}
+----------------------------------------------------
+
 Run Name: $runName
 
 <% if (success){
@@ -17,23 +23,6 @@ ${errorReport}
 } %>
 
 
-<% if (!success){
-    out << """####################################################
-## nf-core/hic execution completed unsuccessfully! ##
-####################################################
-The exit status of the task that caused the workflow execution to fail was: $exitStatus.
-The full error message was:
-
-${errorReport}
-"""
-} else {
-    out << "## nf-core/hic execution completed successfully! ##"
-}
-%>
-
-
-
-
 The workflow was completed at $dateComplete (duration: $duration)
 
 The command used to launch the workflow was as follows:
diff --git a/conf/multiqc_config.yaml b/assets/multiqc_config.yaml
similarity index 79%
rename from conf/multiqc_config.yaml
rename to assets/multiqc_config.yaml
index f2a738c..41468ca 100644
--- a/conf/multiqc_config.yaml
+++ b/assets/multiqc_config.yaml
@@ -3,5 +3,9 @@ report_comment: >
     analysis pipeline. For information about how to interpret these results, please see the
     <a href="https://github.com/nf-core/hic" target="_blank">documentation</a>.
 report_section_order:
-    nf-core/hic-software-versions:
+    software_versions:
         order: -1000
+    nf-core-hic-summary:
+        order: -1001
+
+export_plots: true
diff --git a/assets/nf-core-hic_logo.png b/assets/nf-core-hic_logo.png
new file mode 100644
index 0000000000000000000000000000000000000000..37461d9a32ae1f73d9090a3a2387cf8997c9a0ed
GIT binary patch
literal 14014
zcmXwg1ymN@_w|ExH#~GIN_RKXNQ+2!r-XEubci4wA|)l=4Fb~LrF2U7cc1t7|JG7r
zjWc)d%$$49-uvwPNmW@E6O9xN0)b%4%e_{IK;YuR?`0@R;NL9fSLomes*~J%7YGEa
z2lf{(nF)&w0-=V;zn0MS$T-aW=%e}e_DO78c}Pj}S#e0(n9xgFX55}+a|vy-a&@9{
zLO1Ex&L8!MO7741^BTS+=h42j)lVZO(A32FRwOQ&fcK0(K3;}eWb*jtf#d7IZ?nP7
zfzM<fE1t`<1C~dvqn<~Nf0s1a@TC*rA*L9R4gqBAiEr{rFAi0>ypbev5F~H{spnkq
zgkZ<sq}9+5ADZI@QKJN*KsrU9sNbY<gVPJFY~X27f~bQeoru&bROtRYYA=I6($(l8
z`QI7xcN3bS(qJ@%(1}*<|Atj}(lnNC=E;T)AYadNI1!WLg7x36-{?es=fwdxp?!()
zln$H1YefWc%7;(?O~G|>f}5B~0tW$?;QtSqh(--NiM^pFP*sj5lt5iBcPhT=6b~Gf
z$VIRdK;6!eM0@ouz6&QDqH3rc)umDB)YgI_6O7APODg~B)y@nZJ_TM<2Q;cFyE9}8
zJqp~q$7#g4R;Oxnk>&csgBQ7g2Sg?~x7Qu508ixZyfb@yxc?W2DYzo2zB9k($#hiq
z09pLMM}?owa3RR}-M``gh~FP~kG4p9=iMX{TxlKE8-y<#XVU*sm;J+U=v>0&>GhQ6
zGw|L}Ff&{Zo~*I6JpUMT^Ky%qPOlFp(2X~Ge}u*$`6Ojx1{)mzREb+@t-3z^7@v_w
z`n)1ocGut?a=?G@4mnqaTp*~BDhu1!l&&wDDlR<R2TlIfFiEgNan~)j7zEPO)3dg<
z%jL9fNEx72XsZ^P(0zJ6`M>8xNR5_Ry6Z3I*HsNTO)6YFC4NK<m0bTBGjAq&nZ{G8
z{k!2YY(P$4F)Fb>H?X7<B`AVXol@em>0@J;KtIEAc~|7Df%LY+^2taO=2&l^*D_K3
z6Nh}KTBvzAY<hIX+oRcWU0sqMv`kFsPuKloW`8Rf?75+pxy3en<&D^qrIQ1L?M~JO
zt)fw|uf#sF-sXE(>2vcTA`eAn>Eo&|3#Gva4FAVdSv`J#%0qm7j;-5MwJQeoTvO^?
zu5YcjBLYA4Ue^}G5l7b+aqx99Mn=4~j?a@9Pkcei_#LvXmnbsvjm@&vZl1H^t89fq
zE4%ds{rj7JdH9ZzvejVt20<qDv{dN(8)D^U6He)gEVtI)0G0xQIUPTg@Demt>!QHq
zs91YwbqxJsPHgvB59Rgf$z2Yv=Fd|Qyp6m6{(bnN-%PaPu@;D&quS6GJuu+kOjM>t
zzEn?)(UHSc?mDlG^%$zbByrti#Iw0PxoUDz?5Jk{1DPKgYKsQv1c?reqUDEY-Lb7K
z-9S;wLnM_Tw7)`?toxzYi04($f$NTkhvzh_|517-m%8{zk+``uA|%H<j#BON#m|=_
zg#9SG%HAby1ZP*RG52XMbw1A^mcLnCpIYaU3RomOWGZGi%j#GDHThj0%<qilN~Jf1
zj}Q^?m#imhOfH0s=60P*6k8F8{`syllVY?rl9FKPg0%+OBwuTJb)GMRbGS+KMu#=c
zB^Hy6cWtTB<s?L*pv&;_UkZ_Z+=tUu{db>}1s~(aZSs#)=xY`E4DRAmnt8R~c$lrC
z>2c5GHSVl`_KLondGqV9hg{N^BqC|tp)V}Kd@qzz1N<It2l@>CZ#O~}!jH8|r%&7U
z?$Ha0GTt>%UH>f+V_k~H=h=p^>)s(j`Kuq;q?_^on~d^pilJX8Tl@Vp$KUCEDq4@X
zleb|q{ZF+#)2#afcCEb9tda=z4+FoSW~^*pf9&omyRLL1%V2{e`_cO&t&K+lCri7@
zwRpC?IT#7E>-uz)?m+DXs)Pg`mI;6K*+<eQ<8q$Si#h8f)%?0QZp7Px!GyqWMMz3-
zyD|PYCAtk&pPjZCJFI3jAn}z?53Qk2H2ivo;mmDE?<D{1m>zxY)z5$5NuUT40Sc;J
z%sP4w7=7APabP(J>{eTjnskR@p7gbNp5>^%)GLwCTW8xQM=B-hw~cc8cFC+foxGv9
ziI~BsncugBl5Z8(X%bYN3&*JOock-PD)FrG=FzCJC`}Fr9cIk3x?M#Q>u8(~0iWi^
zTJPlb&p9nL#p$~KvSk@li#|8GC~LOTv%e}rGI@)QOBY9jn{S#L+v2+Iy`9+E>HB+3
z{o2#ndEIdEGzF)0&gazAf(aSgk=DF82LE2mCB=R>s4_u}?%U#Xw@ge-WH;@@fDBL5
zCtQvWO5bZcBV=bS$A~2NtJ>PQ{-|HQVjYDi3^q36+(UXHRT&XcHY4SGBy6IR68K(v
z%JMrLWfs-&1rf&ITgH~;dJ?LPl`!Nx<<4c1KB>--B69zZG>c8Qzbn}(M&9!h?Tfpg
zIkd$t<<D(qi+h_UG0k^=yvcK>MpzH&G!nP4criFMBvSsgxcV5@8$~QTS88!-@Vmd=
zPd}LMt!0`Y>gD*aoMIdCr2)Jtj2$G!AA&i)6^c}m?<z8awaufShV!G>{F8>Ff^OCM
z(Wu`<H6ua&R;?TQf5St(dfdPL4yInxuC|Do{oTFd#z*P|8T!Y!K9c#=mk<?N$7eUv
zy7KhM6JFJd^|sqv!u&7967FEMs_dM4>qs9f$?B7n0s<B$y{qIOT1E^WCl!2^SKQyE
zETRrkf=;5RZF(mP6kE&NcfY5P<;%uJtZHPqw79xbtPHQ3+yzz<U3pjPm<5Q6e(%+K
zyhIKFYx`tl+|9#c%=NvPWf8*0>-=7X{OLbB3nw*@h<E#Wf1bn5IrwacR?^J6-kkPR
zRrx>p#qC2)Q~4YXG&$o`qOR6jlU8rrxAQA~d~T8dXl5o(d%DUJW!KcweZci9MJ0Z7
zQN%k93pK8K2)Qb%0)3I2oO(Z<+9VdK#NS_yQh(#*xrMfI4UFF?){x%P77J{%lg7Mc
zW0QUVo}{Ft#HdQU11hDbN3O1+k?`e9*!8u?%jChzYj{XZOw7xdFFT4g3RQJ<BKP)e
zpfNH|Be`0oxWvTa=jX0wW@b5-qghs~ZH;`2^pis-RXIO@;(;U5VC231eNcUK$E|S4
z$fCf>uKP2XpFVxU37O&hOd2CYKtx2##)jS7+gnsz95`SWB$+F2XU8n)wihDmdz+(@
zEgE5e+o8sN<Y-tB$5hdR_x}BR-1clnjA@VcF@|_suFAIrr!p%tKPYR(c}L+tmLZI7
zn8`}&Hbey7a*~&`_Kc@h|Egf>{CbO~$3ER@#nUXDlSzRbply!9xiR5nZlV&cNEW0v
z;H!VDrueD7OFLL<8j<y@bZ{@kNqjJBY<-Vw?4>YYENb-n!e<Gr?7W1wvT?0aLa~Q)
zd|rp82nr$hwYe(OqO!7!+ZW%|i{sPMA|D^_Hup*^`Wl=!<CBvS?U@SQVDB%D$M)o6
z)^M{U5FU0qv|Wfa2I=AMsvo2+=exst)9aIUR>%5WB{nqz=bbScA8-r|jr_sIMa{tA
zh?91nB3U6cOT^pc-=Be0p^vPbTwL&xhM*>#jf(X*x$RfFY%6POY2ErN#>B<RYH1N!
zjAVQQUrjQ99Ko%nb0;Y1na|DN6a!!HlTKuP(FfAv{fFWE;Ucd6=gY_A8-<ak{A%xl
zM3oQA#{EPa%wAj?Xs?gdzIMte5((E&mOWMci~7evIMhZXxD4?VpGa*cMREy=6oWqA
z(SfIkg;}(elE!AOn15Rte}Jd=muA-iTLp^UQ*ViUP1CF6rx<qy`>|cZnEKbP25|rj
z5G$szN*fqt&2A=1N=iQauJkoXz-4P~!L|bPS9V4Qx$x8d*<6h^KJ%9uKKfWF$Fn%G
zM}CL4o2~fI3a{0ARKy-acx-2=g@njJq+u@bm1#vSH-9vpt2Fl8<bz^pxA_Pdc>X2)
z{P{Bih2_=Ja+TA%<nJ^dCi((u|JzRtSsjXOt$)T-KuG@C-lpT_<;|P*2GC>H_lloX
z$?FcOQ^dckjHDahm%&t`J$+rNCY#uuqRugIahS4{Mbdp-1(s!9Vu<hCUe$(Woo1da
zoOqA3wM7k)6*AO)-rndGS=rvS#w0C3YxaLa+jwiApBds3`zD>&`0@S*!~;t4p>n2R
zZe=A7WAc-MwO1>pis%jf7gl}J#OUr!RQwl4e+QFyCVHPP*0Hw9!5Z{<dbp|r(1%>a
zi`#7W-%OdV^RnCQP4wMbXK*TyEs@{D#cX^JDkO<PrSI;zooeR2qphv&c(tVp2nsK~
zr{7$oM(bUn#!;*I<m9%a2R&jgPh?g6y4`C%N_NmQv)7YyjFJtJe1&^M?6z0!&O_Kw
z;U85TB%Fg=Q|%4Oy+f3;OISLy-CLvKrxCHl?r+E_{agAO`0Tk!tBg9IExAs<p&1nx
zULVdL98^%s;K!|KfArGk#D>iMG)I?-CI4jTcO(7pqEjGz-mY&jnQ6SjQ0#Z65V345
zIqG6meBpI<bv1|k!CbrrC8W}N^2PmSeY@MP4%y&-luIo5_;a>fAdNV!`d)>Wd5xED
zbzGS@UK-+*Z>trZ4=mtKy+2=xV~AYf_?qX7n*3P1h2bZfjhqnWM31%|kiRP=+%z*4
z5q{Mc%Baz*@|1>}B8Fn^6(+TR;kD#<cX&PO|G4SqkT<hIS;*iIl9u!28$RRySPJ9Q
z&aV>_>Pl(cr~(24iIe-G#0DL{SIglW8yl53e_0-3Uv#0yx~Q}iU9kljo3dxBSYv!{
zZf<XXKtC!k;_pz3cZ~PmRPhct-c#Y9ZzXfoJ1Mcex-=fzr|gk!2?_4Bk#sgS@0no0
zp?Gv?$!NAH!5u4zA+9<PxsqwFyqiVBk;VmRqpvT9teZ6Mv&mHuB6_9w)luc`y3*UX
zgbwWw?wW7k$}hC#?b^B|Y=NAS%3&HdTP!Bod$inq3Z9JXk9)oSVpw-Lwzr<;aT6Kl
z2yaHcuRa0F&f`|}x`IB&mYY(SMy3XbNTbLG;WG%)%r?n3xpYjHqVu?nlY=IP`Az_b
z?sw7w9P<6qgV)K$qJoc&sn{yFb-a2l9vmPP!{ye8<Lnpf)|Wr7#-viYD`$(w^iz8O
zuJBLj5;3Trk8VC{VzPUV{SgFs<kHeo{K}K!7v@va)t&Kt4x6c>BE2Vuy);2;xwZ-t
zLF*|Gu4=B8gLx9JB>a=`9{1E$4&41a*X|wnDqd5(!%vWfSIN+#$&EpNmfL+3oKn>e
zG`2gq2CI@N4?%g<<aPGF4`<GIUlXC6Z0*QavW^@Z;gQ`9wfj3`w+ydB`mg`sQHppG
zjEX%8)5a8@`)_@<d{!<xx$C+!T}nJLIhm{a(&_QuGeg)@{lllOk-h2C(_eIPMq@b=
zln<uA&pZm8)-za3waU6yTD_kir>CYytUf(nLAkAPOiWBv_4Q*wRyW<BDPxCX_+9a(
z4|`nBTUI%5_QmU2dU|@!wRl#~UQ>GQ(DXkgv^)y%N0N;!(z^TjFI=7Nq<t3kXnca!
zXp|Un5OmJPW3%dXLZw>N1ACVzPp0NS*j3n{JPI3&3rPfu(f65}-0`1Bt4SGQti9Ph
zK_EoyMKGJysjB+JPCDFY@dQ=1w~M%S>DGW=9tZOK(=Cxbt9I*c6)eWSWpI_=o}=GE
zz+~3z@ifK6dPls`JOR&NQE@5sYZME<@~R7UY2q(mIf)E@rfH&+Y@c?DL(7GQ2ygZx
zgEjSft#xq1cl%@K*2Gsicz<q+2}cC7d@tYcT-*j+YLrexd29&!qDg`Q{iA*L3RR&1
z4~8LFS^ILO!cR8i#5Q_9;U{v1w0d8e%$DmX`tB^$GQqB2`Y^--cunw-AgB?rXlmTf
zI=iW&wfv#Q^mvy2WYil;NLi5}E-xq7@%t;+c(DcpJp%(BD=QXbGFlLIhe=h39dCYB
z74F-&Z(}>dz{`$>OC9O|w-Lpr5POy7ebfn(eh47ZMElBF!k<7)DykE%Zw;p>q@;uc
z3L_u8h!0@g%~raBkd*833I*SahpF6zuo1sg?yfy0hAPT4!CgE2>@0hJ?4;>~h9{DM
zMb7fhCo<tlXR*>nOj)Hnw5b$-q^N<*t~@i0QFr;?Y?Vx@!0{<s1;hZc7thzSrm(CE
zar9z!RrhlC*-1+pB;@dFpjc~xe#EDJr)(!T^UIY^xESIYO4}G}VGo!?alU;}t;a3n
zXyv2h)ULVA#U_h&DtL!(-5bO8!<OMUm}h=@i1DkMnisCo@Egq=H@Ve%eoRGd{ac9-
z0qw}trVXXr9%cm?Cx^ipi&C)^LSY97b_0v0nv6^SPY)jF<FW|}3F|JpBLcBhVzHpA
z5>WY`Ck$!hsaT(m%Tmc-ZkCjmcLA36X=+L{ZogKC#i3yxDVj=*GEVp(%8J)Mi@f`6
zg<;$~LtjE?XXixUQyK6{tIP%x1E``$Fh4^ooi}9$Ocg0T*5H`m|Loz&>drK1^)jCP
z{ssyP8JN)4<d>I`u;O;wh0Z4TaRq*Qdir`T7`vss{c6P<7at$L_HHw;3QIvEP`BA~
z{fKgNoUWV>681rgaOKxx1g51s9C_-ptuQ6kwJeGQ4|)(I;i2G4Y?@1{aqXPKN1ed+
zUDo1MmlCMSq%#LZpWBA{AT2Vm3EE#B&zCGX)pQi`=IgWXR@)|GA}?^G!iM{X^<~!w
z;Gd+OA3WpWOaBHya(WS|bcXMlJ1Vh6^f<9qE?v<uDGxt#9qkJhTem(HC>avQw-1Zm
zT?t&DLw@HWx>>Zw72wwPAz>@h*}SPM7_l?7yPML*NZc@N^{V~j@Dmjk^?EDa@uZK;
zvB8XTWil?;|LM_Vuek7JzoOj)WDeHgtBiVLX<S49hgy2Ejm^y}hh;(8t@ZuuDI1;@
zk4;QiaZEOAZfYW3tg}C}VS_$vrCg$V%=Y>B>6-^9C+Eo^Q$<7c#-*7U4UNQyPcMXo
znx#=W$2M<v^5dLNH>Be(O-)TXEJrf5<{it9@Y187Vm!&8_{@mB`U<3s>X`FPm5}dk
zckF}(uCau9uX%+9PTWz9C=WTzk$dQ)D@C}|DAD0-NtZI9j=G~znVLysw~7&H(KKW&
z_7G}1)?-1-dKhOE>2B~q@pXzgJNAL0uK+s~w=Fwve1etg^Un&DWF$N5Ej{`W<in#>
zwmTPZmA+dpA+_R)3!Bk$va-H_2e(rOgh#K5{YdwX8>wuMAB>wsxYLVq-I~Vfb-}L)
zReNl5BBBC0S&<_73!hAR0=g`?q!%fL{H|!BE!ip~0-{g%E^+;PvFgP=w-@^y4oeL=
zvF~sYqRIHMW(~Z{`X3Y(6rA>^UdgvDc^|j#th6@&xlUGiPQ;=UqLwcs?`6B#<i_-P
z5-#RZL;PFU(Do?%_V$*DB#n=VS$pl@i{qh3kj2JpZQl_P65iB!-d{U|Ul2d9j4ask
zn$B|`!@Cx$WUp*UQ|paV#cku0$DOwhY+qJ$Riv1y;QJ$DOnX!7q#OR)`sUuTwI`@Y
zMHSA_{7u~2|CwNI49XyMC(#jNo5s_f(tbEyr7dd08cD{KpJ?z{laH{GQ5qu2hYneD
zdd1Cei+y+fv*K-$^P4^K<I9Yi<ft`?>*Gm7Y_`I{h3zF?wY0e!?J_N0E6&ZXE7=^j
z;-G1h(6<Dg$<G>e;EE8PY9U300WxsqL`I<~YBhqFSHw?Wxh$Ok$M}N07ASPwf<J5M
z-)g#QRDtYxM<whrw(X-fvZ$QS%d&PIj!hNY8H}tbIOBVFSsO9=P{xF#lVosrb(}JF
zrckW8@kQ4m!N*UNk+2+K!FKn<$r^<HC%#pm;LZLx?-~}Ap!|J@qHMmz3fY<?FZY=s
zS|S^LY{+F+J^3dEM1#FAv4u_F#Nh(Cj&_oV(iLbHLQ3^Y%I<p$2?%~gC`KuG-Av*_
zkxhiGm5X;uf~o0vr;Qg2AE-}v9g9k6mJgyVm`dt+QiMAoG%|>x%9x>Am=dL7l7(S8
z)eobZkuTbzI)0rUQmz@Mt8KoCD@BMB9a7jLQzhX=Vus2Y{4~~!Jw2~w*^0%1ay<c$
z5BYa?X3U!oA4^C6=g%Lxg>pbVc9t5ehjLgA(T!|n6D1uS9LAor8CD+|cpuh6InCte
z%1f<sA`db7%{QXq=r&DqDs3UrJYuedXqZ!{?+iGtaV}-fiINDLjQy_>H)NG2nHwJM
zgnZ&{(ielr?hamY_G{CWMwC#+cI&C_TN&X`Dd_g)R-2QfWqudHAAc!wH{g5ccfO-0
zpXCO4HPFD&Z1XdxWHtbI7BAC6{yS7@+)Z!)4nUH$G*VD1k;%gubdnraR#ujaOG~@;
zKQ0r-+ZWUA9*QD(k@v?@_1CZZ?Yp(M?w6C31qVy#?jrReMC938H~r=K0KwR6@G}ZJ
zN|c9Bc?^yYT^FKy^EM#%X<Y)@YW1}5a5yW)g>Jy`<EqqJ#q2e-W4MkNySjBan1ekz
zPhiuD(f6e*DGJB%s>JC|cC}~y^+<-m=7V-7RM>-^08RQ=1+pcJn`^y`zE94nxtW<<
zWVab7peyKd^75z$w<l8US5Z?gn(FEs)rA_im7|}Cik<+2yjTcr{&>8S%4r@s<hb6_
zMq7Yr{tTf2E3bIucr$`SoI<F>{K@KWb#F5q|A^Sdw)Rj~w`=~%F;~}gHA%$s@F<eN
zf4$$ZP+uq0fJk?sTiXDAu9TGE8G#NDfliFZZugf@C_2f9#)E03rXG_fAIDU@z9c7m
z3LAVBbU%QmZJy=s@9#gqNsN!jarJri>Q&-K<sIF{ZM*`QZv7`@I(cbn#2;F=;*W+6
zPH<Fy*SYV~v4u~>qfS|2lPx46VALI$KJ~C_qFS!anUchvd~5MouN?l6zBB9?chv5^
z!U|n{JW#AVJhwR>T@5(!>R4GE)El2lkLF+xoae;OB#qS8{gm&iCZ#IqHtQPK^h)J)
zhO$c_MVKY*EM>n7Zd-3+#|jeh=@aS#ai%guBY`nHRJadU=>?u$)2F<Mw6wIGOU&mN
z7Z(7mCieF^jZ}GE*i=y6+pM;USPZAR4NTn$bqj>&C|P;hQPwbiCxfgfDj<7erL^@l
zRt?GBzsTiNYIpXI;6y^Hi|!#p!&=t+cM@GMkvbx0{}Su!k1wQ)_pX9V-EnG0>uo4r
zQt@B8Ejd%LiuN>(c+;y)deJKOEq|v5e*5-Kwc&*j6rNyETYiy|hk~v6H@wrJIR7>)
z@~qBbY^(F^IOMQAxY99QZ$3(-hS}k!nku55C6*2HwE0%BW<Li}5kue0WklcS?6biU
z{7B0>$O@{4RF{dNVh7k$MjMm+7m1umBcAB~_xhvic1U|{)DPsL_@&beCY8l(PkfFm
z(f9X0adiY}q5bbaO8JUNt8fLpT5I^-!X3g6L3^7R{;*i*z8gxoC&(GM*zj(Oi)!(<
zdQ`-WfKq4d@J_yJ`>~nf?lR=F6onbJPGQnzXiryHSLNMgdFK8(=1Ng{IhLU4Hum!y
zfLE0#E|?LEXVaT4qG?1$4b;ieLc7$ap*9C!Q#GGfL4B{Nd!d}Mld8+KFzY|ZUC--g
zv~IO$_qQfNp9`hLy?m6Y@p=`F;*vLhi#)YaI9hkd=-$a;`@~4hXz_=@`L?6DyE$Z=
ze0O&@v2J*1ND37DIZD+GmjQi?!`8$g`Tfa46~zVR0(BY?vwQZ`87>trzKxThn2`&7
z=-DUNE|bn5mlKR#TGF#yZCl>=Qh`TiW8vqo=XMLQJNn%PO;daxtMuA!pg?MFQ5;(T
z+}`D1)2`MxhE2)?f!&$;i;W7FkJd@&JL6Il?f{Hm?-pj$WA~tk0Ya*{zzP&UnS8Ym
z$te^g0++|DY}SjHhl|bUmys{jN7dBSaM94wIgA31QAN5@x5<Z6*sd0`rrZ7f0qT5A
z^tKZ07NF*R^Yu^rW{gtdls)3Q7%h^nf*=v!GUFn+{k|5sy(3v0aTo>(t>+>klNN%0
z=q7d_^!D<Q$$Bx0n9b{<B$EwLiaNV_T5H`qqh70SXg6r5Jh!(6jY6-By~<3wS3oq`
z0956&>9G@;>a%pZa>IIF4P~hKwNUJ9(tf`2Zrp{(@UpTlYv-8}1d~~Z#gqr5!`pv+
z(B=;)B&NRx2NQjy9Oo+G#2&9rOE+uTj@k{KdkGVL)-yI?!`k%gHu&DT4@?&5`Cd$m
z9wI8(PB1YsDSoJ(`e{A{a7l5Io=S!btZ@Xm6QwTzD<52!le;gdX2upEN86*k4nxod
z4|5!}BR=^e9&*&VmIj;pGWegz16kD$pH#eC=1q?u!}waeiZwJeI6nN5lk2pp-B9tr
z)rcZ-Dd_7LkOkU-f!~dJ3Ty7=1&EZL<>q>NL#;BMzp1yc749vnNk)awqUjeQC(Wx5
zBewk!Gk7~Ur0hlkfA(7Y`ec@z`zeY_O8%wo%8zI8y}OcMUbW)Bz26#yDFE(=3j=$%
z*x^Y0Kfc)9Wt1971xVFgQ~g+&saew#8_>40w6A&Cv2<SHR=tCp`*Pq2ahJ+8Sf2^7
z|Ds<EU=$7LFaj9tV)pFuz-7c4pfur&DUF@QdbY=l3V(sUUzzp92ylE$PQmnIdwYBA
z-j{aV+}zh925JPSJ@_hex6h~`pxg<4&$u$H;6ZR{BS1s^{wDo}KrB!eVF<9Xv602+
zk}&&a<jYsDI>{Vc0!z!A&_H86=w)Q$)#%8`rC|0~owB}%t5vFteG$Ag-Tlrouet2f
z6-fwAnVZ@b-_(NIGgP;oh;Ofm>q38o*5OC{s$6E&awQ&cHtQchBALH?L1X&+bl`3A
z=aiJKMN>Zrgvbo$1=x%gI_e;NrxzHCPP_qxI$2QQFp%JtUf@T|(2Zn^#T_ozn~vv6
zLxtR#kuabE(osZ>N<-_r_rn<i8b7HP>mB=njC`D0HuJc3U{_yMgu-R<yQ8$CjdA|y
ze0MT;+UCKaKX4-L4#>X(V^-XWn}R;q?0#qIt42Vlx&~YrNEDcA1o{)Q1kyVfUI5Fi
z)y^7p0~s1777G9SMaNowtbi(TC9;3?XCMh29Mvk2(nXa%^Xz{ed09x};uSl`vmIbn
z9^2HOi>2>nTwBM#dHUV7avCC-ysK9euJpRe7yDALW^v~6;B>2_wEg$pqrF6bU*E>O
zWp*!kr*Y!^AO4`kk_&8%0TG;ogQHTL*~-rD^LdBc>Q*FEd57DqKEn;XLi_zcbeU*q
zAOPg+X}0AOfLa1^M%BO|7N#A52uUoIgR^rS#1^}+eD@@o{0X6ii;K&7ROHI&=HK7M
zP0p`M+Rg4PGv)f|Ksz7F6v{X4k5w(52DdBv@#D3#bJZ+wl=%H>yMOyA-pc*WneUPR
zE9<B#0&xxN9%~9yt6Y|?D3(jd{RIxJo;FU+CV?+o<Z~jj76JV>%mbH=$ll5e^WQa}
z4p}`fW{0<(3Jm!#VDS%>X(KQ@6r%ib=+tM1qetPZB^M~4L&mo_MYuY?U7%DemgoKJ
z1-(PiJ;KhOm1Rfx(CRsfPR!b4ne7`kF)<Oa36!Ik%*?JBD40_p4Ikk;5}zx!xGz(P
z`tSp7g>K$Jch<t!WidnJuks`_N%?|J(^@4B@sn;T;z;9AFYX6S3q=JB-KAO~^#h?3
zqfu$^x&Cb5=Fye=zYYQhbCYZsI>nkM7kkqH!gA5ZG>d`GO2c-RH|<e>033WJP|L$r
z+=cbA1Mwoyk~1=*K^_gU&&lDtzxjByB=EzqeP#dAWqZW!^gY(B_Z&B0w+XYe)seQa
z?YKHJE9u`&f0P;{M=0urN&mRt*ypsPg(`iVI+1yoakvHdn!xLmS%Ev0cMH-cyAuWI
zZ6GQm-h9{|O7VZXTTN{LLW~Cbl$934q&>a8^6fLvs5Q5(03*?E@o*mV3t9NsF+-<Z
zG3&j=$HOyydb2)cTm_Ay)){sZ6ItQjrhPpWT69M-_*@bm)tC804TW;H`7=`t(5&qS
zeTVSq=;)%`iE@1*GfPY4i`@lrd3pIxVnd(6KzL-C^G<S%Xb$1qoqUhof)q0j>QOV!
zn+&&)u%%>Ei+ldk3%E%fm;)YIQXAivcuYX3(-`&lXCHcKK9axH&NU#PtDW4)qew@z
z1VjX4FGM=-dqehZ^n)J62nbu(H{03Hi}v-Q+h|FtS|I1nwfPFYP{|_ar3wR73f8}v
zDpE`ILAyl4x)8X9wM9Xz9^pLjhtcWZA<*Jc1^CywJv9|7e9{3=Bj(r03JHI22pJ*a
zm(}=nZ`EPbW>q@#&vcbm=XCrlRi0nQBT?rA3-i=M?MQ#HJ0101*1?Tu@wx4&d4d1k
zx^lbR&gahrKi*d(rSm!VO}{gwNaS<rgTm$lh`^$G8jj(k>&_V5{QSIjtqmcVz>E#m
z0sZsm>%GK=J>~k%W2NudDo(v8%ARX(?*lcX2MBylVD%wP^}a*XDVyQ2Os>C{R_lR<
z?n0`d8>8UIBj%`m0X&@=&2F2fIX(U9bGws5>G~|~tb^G!veUITzEl5jQHk0{8peKx
zx6Ml1ST)SNh^33gH9M1S=-;UM1)lf%dB6EVzerlpB%QT3)E4>5-o5dOPbE{GZho~c
zhr~3M-<cNXNf7bA1TvCn2TZ&I3ySE@BbWmQKulr%GXa+u<!!fnL@^zY_qYgfATaoj
z8aA<D{*U9<D@wn|o9zIxns2Hv*<E*3mpr#KVJ#Lx*B$Ctek%QGSy`;bUc(|TXfH$!
z1zA`yk6X^L9#7-^*ADAf`#_^lRZs6=mhB_dR8CpB4<>P&4-zPS<wUf$wiex7t+vFz
zJX+o#&!R^00IfJ(U!R1Tx8J?EwPSYj-XzwvR@ZT7LAFiKyjGT+judr=li##+#$BG-
zj2d^g5&o(WL5A?_xDe$h-G%@0=*}gnhwHP`ACe?|#%ljZ)L6XZ622-1au7b`po<Qt
z`*93VZ5Y4hciupVjEsck{-gomr^3O<5B~GV%=KuA<l*+BJNcc#C!oto<f?{D#r^!r
zc`#p%42mS|Dg{MFwfFB)gF7K30ziJk!^a=5GL=0)S|*bU$La-7H{R;~v7b!C_5p<+
zKc>gcSWFBVi@XE8Y3J(`;|u{8By)4~vHc6s#sD%c5DCFck<rm11v1qb=mB9+2oiv7
z9;Q?Fg?tsLx~BsS*&f1MAmtlxk7VK?(Cp3HQ?IiT|En}w74k5@CXZIyV|r-$gfvA^
z5i+g0?SYeufcXPblUF6+e}cm)AugFBCY7t~T*VY*I{cODbfRK^%*vW_==^+)BsJMN
zlX+su^jE<ONsoC<`%us0TJ(2pciQ!+43s}aSTF2HvP7bQZ0$wy?FPu7#<%CY_N>c%
zDF@x%?JbA}IUT<#XV-H@{l!}pw$J$ax5o^x(Qea%jg$iMEfCv&lyrYbPnJ_HuB_Cx
z#L)WM+7`hHNoz2Oc$10Nhu}1#Hl`Z8jpZAyPbjNv_q4r&e{vllHppwgo4yl5|Hlb#
zs1sQLZ^RWqL=Z=pX`ZlVdHsr)ZMrZY&kNnVpkA9!e-*#*(UGH0`{imUsY2YH>@h;d
znV;K>p-tWVPBhqJvpSWnO?ZfyJavQ_`73rE51tI{&v_{Pm3ISNLvi&8*451sJGA}r
zf;MbF+k~yPPA;D8e${7>A_}KKj?SIgXdJ51vl@r_1&s@HbK#iYm7g~~w%r{C%jBaL
z5zrHpW7{J^$ZNlQwWPNr9%tSYoib81^?s*S!$(Hc5O?Is`*!*)l8jA~fW=Bpe$`Cs
zkCq*?SorYv5c^Ll(sIR!Zt*gt0W>6}SXwD~tf^_^cb&8RW7h)Au^4^xu@tkE*&kk`
za?jq))4;5U5a8axWMpJqLj~Pk&>j9YxGG*Fll${&<9y4}7P*XLX0x(trA;a-G0@ST
zY5Jf<I9#9DPGoTk?Rf_z|6vkI{eXkopg36QgS=SBZdAyTv?}l=8m<5fl=;w?PBI`u
zyZChLV?833XqYa*SGI1u^pQ^zg=)Hp{~<?;B0)+b$xw{LHu@E<nDoZWb3#GjuLZDA
zeu-lRpfR3CirwSH$YZ0z|Dp89J*MWz$pQ$5QQqWYyP`FHQyS{Ca)laS*Wsh;j0tQf
zo7X9du`=M9Ja+&L*=)U92`2cn=9nW$$4hwl*13pt25w|uz$`c1^_Tc)LoAKZiqp<7
z<qLV7*Xe;(TM4_DPs?t)GDgq(zG42PK;9lk$8lp}4{P-O{!26lATWU^6_{r+<tI{r
zZqp-@A<@%;vwn?H^m<=;{>fqu4XdBDP^+-@i^x15^;lqv42oDwZFAxHz?Sh@`b8JC
zd#45trZKukj*WN{VHKf8QAZ3B!C^c({!+3^Ce;-}sJ`$E8j?Mx%?BbG#aGj+jlJ-N
z*IRG_-#KYeHvWaV#10qSoz(j>87Ev@n<HcF;H3vCFTBpM{~`X~l#pJfXxl8`@vi*Z
z`$?CB*Dd~FQ6wW5BLv)pazvnwNhqM-<z4>Fl+h48za3)ecmL&D$XchLf5(z&Qmz@A
zb_1ahW318p6=Ht9w>!UNg%7bIjG=gRiMK{NecI8-{1^(~tu($Oi<6)LHz`E-#Rp}7
zxC-(+48!b2<S47q`kO|?7e<`{x+f_HNjk#kEO~^hpSO&ri&^;9-H>RB;UJ*5z*;o`
z{W6E2pLj>h<O!KDS%8nOoyX1;<iyzyWQ2KUeH(2?1V*?ushh7E?AV?NJ!_@eKWPQH
z1aN%o>DjD+cjztCm$|EbS5hHqY%0JVgeI~nNGm~zZu!TlrEKCqf2bAecHKYO{N4aA
zOv0i%Zd$EeiS}#OF_aSCcgqA=dZI7?=d_ehx-9Iz6^RE0j9=7kHx6I`Y5;39j$jU2
znx2Hf5W19-vNCB0hjMz^B{6OMT|Kv>rN+cIwoG8$0YL579?O4@()?Vye<JbXsP$r5
z=XLgrPC%n>Z!@>Bkm(hJ`E$Rd1B0H^2(ALf%iP!}^{W(^%`?9^d88L_`$;Mn{`u6Y
z;of5+a0|O~yY;lmL0^9MjE8xLTX^1lm;28Hyy;CH9om-rTvd|7pIT5=jP8&M<^mpi
zBcwomggvRc54*Yj@c-S;-THML@A&y&5dBh8Qo*Z-H9Gd15A-NO@#w}BpH{wrzM*D~
zKJX-4X`n#fVkdxmaqo82=Hu#PeT_I1{h&bqpRIUVedq%t{)hnZPlN(Z4G#3edrvp}
z)zsC478Z1X*uCQ_`0e}mymu7_4SOY}?8aRP7^K|5)*RCN<Yb*R5CTZx!BQiBI2J|M
z?qs2wmR49(lOVXRXxgTyzn_kWhY&z9*>=ANSn-br-V;!_)vjO$=ahP^{VQ?cLLmbV
z2epYh**HpR1qC#%3WFGI(Q610#IauDlk(a@A0MyUVN5STG8biTHinFE<7B<tuYTzF
z@1AH9u4i?Q;V!YjH?}d%w+d@d0EZ7{C1$8341}5tetwlI>MMwZMJ|VVhamBq{6(b6
z9A`X>Zk^nS_;;1;Shlw7XF#$mi#%)EFYiXC@(BR10<mT_^W(}VK!wJR-9gds|H7gh
z^slU;0U9TA358K0HG<%u>sY23pv%Wi#Ht6y^9J#xp74fmMFm(-=-okxgpKFRZo)_@
zaMwzjOp=Uvb~xT;xcFSJhXW@XrH!qv)R79XbtQ`f`!N3?`;3uv49p1$^oltEYryyX
zb%>m3k@s=~RPml6w)UkGcN+gky~ynZY7|eGfss%*U~Ou7yxpg%C))|Wt?H-j-sp|m
zg;T-lc^+<8QrXpMThSUOOXW)dSRqX<6VLF9=Qc&l?OrKO%p!g?M@bhg3hl<P57fp*
zfWh^EXO!HlDBPn}5fl@nLMLKI8QWZoSIl;T<#_`bYx@df&?uUQiwhsf<C4J90s85l
zo&s2u!Vy71K>>%pz-HBO+D9g}SMm9Q%KLyr-o>^NADGqP;E}+p!>15*g+Mgly~_tq
z9dLEh6bjYRnLI4n21JvNCnY6uIIal)PUV0X@j7?HK&zpPqy_HWVAvCYxJ<U=V_;zT
z_MG+=<_ruJ0S^cKPa*dMZQ=Wq?(5}azwm|z{<uXb2GYS|JvIzyxBH2}l<^H>;De`6
z7&ePE^t~Vii2TT{l^O-eN?qUs2fWhwZUh>Ci3dbPM3Mx(L?r6`@2gBg|2R_$W?}(Z
zycfW?hc6+(j^H%RGrI<;b7!7(RHgkQZ_C5wA|SxIPU)ZuP@7FgtN<n?|5hGc2Xiz4
z>Sx3by>~DKFtK``{AZY}|E4&QCPW|3HRORbiZ}y$y&xGP9)tt$a0n_sY;l3v+q`Cg
zeU|TJj?~XXqN6dCQrRUY>VT<9tCUB({@DxAdLSeuOsw+-ffPI_yoBn;#s&5Tym|>Y
zdwZs;((;_}&QJXSUR+D-%Qtr<@AOd9e-&D{`zuHrZBGD5KA#`A;=bNDeA=tr9j5#*
z!@NabKl=Ba(7TU%+AoDwKPZwx^U-VZ3CMVfmqUYtl3rd-Kbro)Qiv;c(;F>gxbf1u
z)0FiLK{tiO%mg~wur7)q>WHFT*K)z1_%<wveW{ACda@tHKYeZmejoWeOEWVB*A`gf
zXW?T>XJqz9rPsTZG9O)i1bSxT0mu`IKV@NI@L^h#nk@Wm{b+?B-{vmirzJP(^FyXc
zlPMKZ*hK4P&;#w3NDr~E7}**vC@&vc=96^X-o1NA9Pj`CM-L1MqDEM9z(dKcIUM?;
zRhoSMwio6vqGSU>(2y{!r=3+&DJfrM=W^uD`U9Z$3^D(eicInL8)blx`HB$mOms;|
zNci0kQVq$P-4BC+krLpwuZCcKC-jV?GwJ1OE+ONE4VGuB&YxLY0w+u20ZiZosu7xQ
zlLl<7MVJslm+=%Ol0wa4c-II(_0Jc<j*@MoTAh49-2XOXAmKN;Zn{;ZS(kl@^u;l5
zI0&D}5;<*|Z@yfx1*+5Y`N84gUJVtI=ksDJ3no?6e0+R653rpcg4!%0E<Jgg2VZ3y
z4rC_`K;C8d{AV`6OT=aneRH;*pPNgQv*~DM)lu@c6o#pob!y}$PXGIMFKd}at!-CV
zL@<(luY7e(M>W20mQw-qjNM#iXF<(IRDoj32Fw5nQ<Zcb8W91NaKDlJEaFmDUfu`X
zlJS|DF)+V7XeB`cK(i9r)d%+4DbU_5dvqJwf|Z-fZH>q4xWYVd;q1(j<#%f>{BS-g
zr=;{6xCRq4GBA=A5P=;fFOL=olb#g2tSM)5I6Z%Z!MujFhc2-jX)h!mZucup?=BCm
ztgKM@V}ZoH0g@Wru`JlzB5%f#>8(2`{%rzqWoF$v<^=~(IYTh#G3XJ3Y!@SH4#i1V
z0Dn{lHpER}E<=%DYKx!rqz9-fK!Msggd+@yGFU*0RqCMnO$$n;&`*z&+5!yvu|69K
z9GFgXf9(Gx1(teh3;6E{;h#I9=A*)_$p+e44Tp7$>=wVFum`#ZXQ6{7jb{jaqa5}N
z+<Raf&+DE9X;cI&A-C%?Pu0SL0%KsX`87J)kFO#e`1|+ooC8ovmwe8KuMet6oi=(<
zm%R4NB<@~=$6}6NL>Y>Zj_Q5ZYQQ#-0J2z0VuaWoQ4}IlFpbgT$^i)mk~T9@dmZr3
zD;J`_zXgp21d#Z|m6fqT9~D-sLCpmlLjV&6$ri{EW)>DE9RYAK?~qI!<>58GgY5<b
z3;@jRRK~)qF(z99^{)fiKnm2sANijQ1ygIn{%>C%$V&hr3#cog;TjR;t42UU5<!h(
z9F<LtlE=fYMzCuQ<|}~^t<EY<^FR2|$6a{)->xJSvq{(%p&%<=)&F)#At0LnxBp1>
z<sl~B$Qv41pa8fd20{fDB#RuUn-_wn<w>M4NCZ~C0gv0}bJ%XFPLAYH`O2lQ!vA9}
z6W@Go{%;$VgjRM9?2HAhshI!Jl{|7mF>WSo*A?1(pNTK9dwZ*r!C)&c!84=%#J_rY
z|2M{iwJhP63{nfZ|E6%y!;^;(R<ne}WaIyS{)csO3Bb7jjm8)V1BVi)PjCuE*92cl
TqUXSdG>E*k^6PR*qoDr>{mh74

literal 0
HcmV?d00001

diff --git a/assets/sendmail_template.txt b/assets/sendmail_template.txt
index fd1cd73..bdf9058 100644
--- a/assets/sendmail_template.txt
+++ b/assets/sendmail_template.txt
@@ -1,11 +1,53 @@
 To: $email
 Subject: $subject
 Mime-Version: 1.0
-Content-Type: multipart/related;boundary="nfmimeboundary"
+Content-Type: multipart/related;boundary="nfcoremimeboundary"
 
---nfmimeboundary
+--nfcoremimeboundary
 Content-Type: text/html; charset=utf-8
 
 $email_html
 
---nfmimeboundary--
+--nfcoremimeboundary
+Content-Type: image/png;name="nf-core-hic_logo.png"
+Content-Transfer-Encoding: base64
+Content-ID: <nfcorepipelinelogo>
+Content-Disposition: inline; filename="nf-core-hic_logo.png"
+
+<% out << new File("$projectDir/assets/nf-core-hic_logo.png").
+  bytes.
+  encodeBase64().
+  toString().
+  tokenize( '\n' )*.
+  toList()*.
+  collate( 76 )*.
+  collect { it.join() }.
+  flatten().
+  join( '\n' ) %>
+
+<%
+if (mqcFile){
+def mqcFileObj = new File("$mqcFile")
+if (mqcFileObj.length() < mqcMaxSize){
+out << """
+--nfcoremimeboundary
+Content-Type: text/html; name=\"multiqc_report\"
+Content-Transfer-Encoding: base64
+Content-ID: <mqcreport>
+Content-Disposition: attachment; filename=\"${mqcFileObj.getName()}\"
+
+${mqcFileObj.
+  bytes.
+  encodeBase64().
+  toString().
+  tokenize( '\n' )*.
+  toList()*.
+  collate( 76 )*.
+  collect { it.join() }.
+  flatten().
+  join( '\n' )}
+"""
+}}
+%>
+
+--nfcoremimeboundary--
diff --git a/bin/__pycache__/scrape_software_versions.cpython-36.pyc b/bin/__pycache__/scrape_software_versions.cpython-36.pyc
deleted file mode 100644
index 07062ad2b5afae9e2bc03e055e9776da9b619a1c..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1324
zcmZ`&OONA35bjPKkK-is$Ua~%$O=fAh)FE8Xe9=31ez5$28NN?l{LunxGRY>9k-{u
zoy}y(DHm@10nSL=`6--y<+OhRaiHuZ(n{>M<*Lu+dVE#g{j}Axf9W4xt!jk)P2O4!
z#9!g7{zbtEqnymBPicWGyfUl$)e={^;Tz0gwQ+@+rQBl{;ySZYHhwew8dTTBH(3)*
z*78WH_qY0Bs}DW0xPsbrZ%tm1pT57U_?BM>8|sWaA#3{UGrs|iX%q9bz?kl%{pbmS
z*1F;!z=8aewWo)0Fg;>NYr3Xjg5{BygjFxd59AfOO<odcvj=NrzxV{qZIutVRod(!
zMq5x{R|=2Yze?{%1!mPt(tG>{CqQ~M-%n+n=wOs3k&Z=zmd1CIfduv<j`aC*e;%Z9
z3Or6=pci^sJHER7v{#rDSm+TKuS;`t9*jbzZ=>?7^$p^SGiSbWZq0*P#&x{?`*{0t
z;><xREb5eQJ!AR7{k;<jaT7z7X$^9agfnnftK2%@7R${|>U{6pBICu`vg@fdOdO@}
zI6Ut}f(v>2@fYR#vNJq)zxLe1VffzU(!fbbJSs$*DBe&M%j*1o6?zEmn4LP^ckvov
z?;wwR0|_Y)BRK9l-4mzV@Ai;!Ux|@^9ZKkLdsJOpK{?`}XzD#VG19r*Mmj%krt?<h
zM8XIoCt9cK)OE+Po@Cd9NX*=K5yWI&JLN(L`YtUhPV|g-Z3Zfmaa!y)+K?iSoO9#`
z%n7w4v&7Lcj(sG>%+V8YcC9!f(`lv$m?G5Ssq?e#;PM!EA|5LByu)~ZB4oT2i4J*Z
zXfHo19E#;JJ-lCeE-H_)4P?V*z5M4gKI}d4?Ep`q&LjkZtl%ExjV&)Es${jWR3!L&
z62`CqRRD!&K;^Yq!%XG$Ca3e<5Gf=%UF3#@koneb{JE(hl+h%w%|eaJ90`diuZ^Y1
z(!5q)gFlFTgmp^s&+qPaEaGG&TrQ%JyVqIFnR^C_7RrTWu7XG;EOe7mKN86Lv%%*s
zkFVYIPESPgbnwhoNUWcR(M>o8g?<<y!QEY_KC;GxqX9SXe}_Iugu<atRx4zYxVU3s
z3c+qCf?ZdG^e(r&nP3@*VKFpyf}%;A)G|z3Jk5$pt<6(y(|tsT)S#Ve#WHD|KBWHv
D{Ed_)

diff --git a/bin/markdown_to_html.py b/bin/markdown_to_html.py
new file mode 100755
index 0000000..a26d1ff
--- /dev/null
+++ b/bin/markdown_to_html.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+from __future__ import print_function
+import argparse
+import markdown
+import os
+import sys
+import io
+
+
+def convert_markdown(in_fn):
+    input_md = io.open(in_fn, mode="r", encoding="utf-8").read()
+    html = markdown.markdown(
+        "[TOC]\n" + input_md,
+        extensions=["pymdownx.extra", "pymdownx.b64", "pymdownx.highlight", "pymdownx.emoji", "pymdownx.tilde", "toc"],
+        extension_configs={
+            "pymdownx.b64": {"base_path": os.path.dirname(in_fn)},
+            "pymdownx.highlight": {"noclasses": True},
+            "toc": {"title": "Table of Contents"},
+        },
+    )
+    return html
+
+
+def wrap_html(contents):
+    header = """<!DOCTYPE html><html>
+    <head>
+        <link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css" integrity="sha384-ggOyR0iXCbMQv3Xipma34MD+dH/1fQ784/j6cY/iJTQUOhcWr7x9JvoRxT2MZw1T" crossorigin="anonymous">
+        <style>
+            body {
+              font-family: -apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans",sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";
+              padding: 3em;
+              margin-right: 350px;
+              max-width: 100%;
+            }
+            .toc {
+              position: fixed;
+              right: 20px;
+              width: 300px;
+              padding-top: 20px;
+              overflow: scroll;
+              height: calc(100% - 3em - 20px);
+            }
+            .toctitle {
+              font-size: 1.8em;
+              font-weight: bold;
+            }
+            .toc > ul {
+              padding: 0;
+              margin: 1rem 0;
+              list-style-type: none;
+            }
+            .toc > ul ul { padding-left: 20px; }
+            .toc > ul > li > a { display: none; }
+            img { max-width: 800px; }
+            pre {
+              padding: 0.6em 1em;
+            }
+            h2 {
+
+            }
+        </style>
+    </head>
+    <body>
+    <div class="container">
+    """
+    footer = """
+    </div>
+    </body>
+    </html>
+    """
+    return header + contents + footer
+
+
+def parse_args(args=None):
+    parser = argparse.ArgumentParser()
+    parser.add_argument("mdfile", type=argparse.FileType("r"), nargs="?", help="File to convert. Defaults to stdin.")
+    parser.add_argument(
+        "-o", "--out", type=argparse.FileType("w"), default=sys.stdout, help="Output file name. Defaults to stdout."
+    )
+    return parser.parse_args(args)
+
+
+def main(args=None):
+    args = parse_args(args)
+    converted_md = convert_markdown(args.mdfile.name)
+    html = wrap_html(converted_md)
+    args.out.write(html)
+
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/bin/markdown_to_html.r b/bin/markdown_to_html.r
deleted file mode 100755
index abe1335..0000000
--- a/bin/markdown_to_html.r
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env Rscript
-
-# Command line argument processing
-args = commandArgs(trailingOnly=TRUE)
-if (length(args) < 2) {
-  stop("Usage: markdown_to_html.r <input.md> <output.html>", call.=FALSE)
-}
-markdown_fn <- args[1]
-output_fn <- args[2]
-
-# Load / install packages
-if (!require("markdown")) {
-  install.packages("markdown", dependencies=TRUE, repos='http://cloud.r-project.org/')
-  library("markdown")
-}
-
-base_css_fn <- getOption("markdown.HTML.stylesheet")
-base_css <- readChar(base_css_fn, file.info(base_css_fn)$size)
-custom_css <-  paste(base_css, "
-body {
-  padding: 3em;
-  margin-right: 350px;
-  max-width: 100%;
-}
-#toc {
-  position: fixed;
-  right: 20px;
-  width: 300px;
-  padding-top: 20px;
-  overflow: scroll;
-  height: calc(100% - 3em - 20px);
-}
-#toc_header {
-  font-size: 1.8em;
-  font-weight: bold;
-}
-#toc > ul {
-  padding-left: 0;
-  list-style-type: none;
-}
-#toc > ul ul { padding-left: 20px; }
-#toc > ul > li > a { display: none; }
-img { max-width: 800px; }
-")
-
-markdownToHTML(
-  file = markdown_fn,
-  output = output_fn,
-  stylesheet = custom_css,
-  options = c('toc', 'base64_images', 'highlight_code')
-)
diff --git a/bin/scrape_software_versions.py b/bin/scrape_software_versions.py
index 0e98e07..feb158a 100755
--- a/bin/scrape_software_versions.py
+++ b/bin/scrape_software_versions.py
@@ -5,35 +5,50 @@ import re
 
 # TODO nf-core: Add additional regexes for new tools in process get_software_versions
 regexes = {
-    'nf-core/hic': ['v_pipeline.txt', r"(\S+)"],
-    'Nextflow': ['v_nextflow.txt', r"(\S+)"],
-    'FastQC': ['v_fastqc.txt', r"FastQC v(\S+)"],
-    'MultiQC': ['v_multiqc.txt', r"multiqc, version (\S+)"],
+    "nf-core/hic": ["v_pipeline.txt", r"(\S+)"],
+    "Nextflow": ["v_nextflow.txt", r"(\S+)"],
+    "FastQC": ["v_fastqc.txt", r"FastQC v(\S+)"],
+    "MultiQC": ["v_multiqc.txt", r"multiqc, version (\S+)"],
 }
 results = OrderedDict()
-results['nf-core/hic'] = '<span style="color:#999999;\">N/A</span>'
-results['Nextflow'] = '<span style="color:#999999;\">N/A</span>'
-results['FastQC'] = '<span style="color:#999999;\">N/A</span>'
-results['MultiQC'] = '<span style="color:#999999;\">N/A</span>'
+results["nf-core/hic"] = '<span style="color:#999999;">N/A</span>'
+results["Nextflow"] = '<span style="color:#999999;">N/A</span>'
+results["FastQC"] = '<span style="color:#999999;">N/A</span>'
+results["MultiQC"] = '<span style="color:#999999;">N/A</span>'
 
 # Search each file using its regex
 for k, v in regexes.items():
-    with open(v[0]) as x:
-        versions = x.read()
-        match = re.search(v[1], versions)
-        if match:
-            results[k] = "v{}".format(match.group(1))
+    try:
+        with open(v[0]) as x:
+            versions = x.read()
+            match = re.search(v[1], versions)
+            if match:
+                results[k] = "v{}".format(match.group(1))
+    except IOError:
+        results[k] = False
+
+# Remove software set to false in results
+for k in list(results):
+    if not results[k]:
+        del results[k]
 
 # Dump to YAML
-print ('''
-id: 'nf-core/hic-software-versions'
+print(
+    """
+id: 'software_versions'
 section_name: 'nf-core/hic Software Versions'
 section_href: 'https://github.com/nf-core/hic'
 plot_type: 'html'
 description: 'are collected at run time from the software output.'
 data: |
     <dl class="dl-horizontal">
-''')
-for k,v in results.items():
-    print("        <dt>{}</dt><dd>{}</dd>".format(k,v))
-print ("    </dl>")
+"""
+)
+for k, v in results.items():
+    print("        <dt>{}</dt><dd><samp>{}</samp></dd>".format(k, v))
+print("    </dl>")
+
+# Write out regexes as csv file:
+with open("software_versions.csv", "w") as f:
+    for k, v in results.items():
+        f.write("{}\t{}\n".format(k, v))
diff --git a/conf/awsbatch.config b/conf/awsbatch.config
deleted file mode 100644
index 79078c7..0000000
--- a/conf/awsbatch.config
+++ /dev/null
@@ -1,13 +0,0 @@
-/*
- * -------------------------------------------------
- *  Nextflow config file for AWS Batch
- * -------------------------------------------------
- * Imported under the 'awsbatch' Nextflow profile in nextflow.config
- * Uses docker for software depedencies automagically, so not specified here.
- */
-
-aws.region = params.awsregion
-process.executor = 'awsbatch'
-process.queue = params.awsqueue
-executor.awscli = '/home/ec2-user/miniconda/bin/aws'
-params.tracedir = './'
diff --git a/conf/base.config b/conf/base.config
index 23c9e4a..0b2ea22 100644
--- a/conf/base.config
+++ b/conf/base.config
@@ -11,31 +11,41 @@
 
 process {
 
-  container = params.container
-
   // TODO nf-core: Check the defaults for all processes
   cpus = { check_max( 1 * task.attempt, 'cpus' ) }
-  memory = { check_max( 8.GB * task.attempt, 'memory' ) }
-  time = { check_max( 2.h * task.attempt, 'time' ) }
+  memory = { check_max( 7.GB * task.attempt, 'memory' ) }
+  time = { check_max( 4.h * task.attempt, 'time' ) }
 
-  errorStrategy = { task.exitStatus in [143,137] ? 'retry' : 'finish' }
+  errorStrategy = { task.exitStatus in [143,137,104,134,139] ? 'retry' : 'finish' }
   maxRetries = 1
   maxErrors = '-1'
 
   // Process-specific resource requirements
-  // TODO nf-core: Customise requirements for specific processes
-  withName: fastqc {
-    errorStrategy = { task.exitStatus in [143,137] ? 'retry' : 'ignore' }
+  // NOTE - Only one of the labels below are used in the fastqc process in the main script.
+  //        If possible, it would be nice to keep the same label naming convention when
+  //        adding in your processes.
+  // TODO nf-core: Customise requirements for specific processes.
+  // See https://www.nextflow.io/docs/latest/config.html#config-process-selectors
+  withLabel:process_low {
+    cpus = { check_max( 2 * task.attempt, 'cpus' ) }
+    memory = { check_max( 14.GB * task.attempt, 'memory' ) }
+    time = { check_max( 6.h * task.attempt, 'time' ) }
   }
-  withName: multiqc {
-    errorStrategy = { task.exitStatus in [143,137] ? 'retry' : 'ignore' }
+  withLabel:process_medium {
+    cpus = { check_max( 6 * task.attempt, 'cpus' ) }
+    memory = { check_max( 42.GB * task.attempt, 'memory' ) }
+    time = { check_max( 8.h * task.attempt, 'time' ) }
   }
-}
-
-params {
-  // Defaults only, expecting to be overwritten
-  max_memory = 128.GB
-  max_cpus = 16
-  max_time = 240.h
-  igenomes_base = 's3://ngi-igenomes/igenomes/'
+  withLabel:process_high {
+    cpus = { check_max( 12 * task.attempt, 'cpus' ) }
+    memory = { check_max( 84.GB * task.attempt, 'memory' ) }
+    time = { check_max( 10.h * task.attempt, 'time' ) }
+  }
+  withLabel:process_long {
+    time = { check_max( 20.h * task.attempt, 'time' ) }
+  }
+  withName:get_software_versions {
+    cache = false
+  }
+  
 }
diff --git a/conf/igenomes.config b/conf/igenomes.config
index d19e61f..31b7ee6 100644
--- a/conf/igenomes.config
+++ b/conf/igenomes.config
@@ -9,139 +9,413 @@
 
 params {
   // illumina iGenomes reference file paths
-  // TODO nf-core: Add new reference types and strip out those that are not needed
   genomes {
     'GRCh37' {
-      bed12   = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/README.txt"
+      mito_name   = "MT"
+      macs_gsize  = "2.7e9"
+      blacklist   = "${projectDir}/assets/blacklists/GRCh37-blacklist.bed"
+    }
+    'GRCh38' {
+      fasta       = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Annotation/Genes/genes.bed"
+      mito_name   = "chrM"
+      macs_gsize  = "2.7e9"
+      blacklist   = "${projectDir}/assets/blacklists/hg38-blacklist.bed"
     }
     'GRCm38' {
-      bed12   = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/README.txt"
+      mito_name   = "MT"
+      macs_gsize  = "1.87e9"
+      blacklist   = "${projectDir}/assets/blacklists/GRCm38-blacklist.bed"
     }
     'TAIR10' {
-      bed12   = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/README.txt"
+      mito_name   = "Mt"
     }
     'EB2' {
-      bed12   = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/README.txt"
     }
     'UMD3.1' {
-      bed12   = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/README.txt"
+      mito_name   = "MT"
     }
     'WBcel235' {
-      bed12   = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.bed"
+      mito_name   = "MtDNA"
+      macs_gsize  = "9e7"
     }
     'CanFam3.1' {
-      bed12   = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/README.txt"
+      mito_name   = "MT"
     }
     'GRCz10' {
-      bed12   = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.bed"
+      mito_name   = "MT"
     }
     'BDGP6' {
-      bed12   = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.bed"
+      mito_name   = "M"
+      macs_gsize  = "1.2e8"
     }
     'EquCab2' {
-      bed12   = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/README.txt"
+      mito_name   = "MT"
     }
     'EB1' {
-      bed12   = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/README.txt"
     }
     'Galgal4' {
-      bed12   = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.bed"
+      mito_name   = "MT"
     }
     'Gm01' {
-      bed12   = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/README.txt"
     }
     'Mmul_1' {
-      bed12   = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/README.txt"
+      mito_name   = "MT"
     }
     'IRGSP-1.0' {
-      bed12   = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.bed"
+      mito_name   = "Mt"
     }
     'CHIMP2.1.4' {
-      bed12   = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/README.txt"
+      mito_name   = "MT"
     }
     'Rnor_6.0' {
-      bed12   = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.bed"
+      mito_name   = "MT"
     }
     'R64-1-1' {
-      bed12   = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.bed"
+      mito_name   = "MT"
+      macs_gsize  = "1.2e7"
     }
     'EF2' {
-      bed12   = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/README.txt"
+      mito_name   = "MT"
+      macs_gsize  = "1.21e7"
     }
     'Sbi1' {
-      bed12   = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/README.txt"
     }
     'Sscrofa10.2' {
-      bed12   = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/README.txt"
+      mito_name   = "MT"
     }
     'AGPv3' {
-      bed12   = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.bed"
-      fasta   = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/WholeGenomeFasta/genome.fa"
-      gtf     = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.gtf"
-      star    = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/STARIndex/"
+      fasta       = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.bed"
+      mito_name   = "Mt"
+    }
+    'hg38' {
+      fasta       = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Annotation/Genes/genes.bed"
+      mito_name   = "chrM"
+      macs_gsize  = "2.7e9"
+      blacklist   = "${projectDir}/assets/blacklists/hg38-blacklist.bed"
+    }
+    'hg19' {
+      fasta       = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Annotation/README.txt"
+      mito_name   = "chrM"
+      macs_gsize  = "2.7e9"
+      blacklist   = "${projectDir}/assets/blacklists/hg19-blacklist.bed"
+    }
+    'mm10' {
+      fasta       = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Annotation/README.txt"
+      mito_name   = "chrM"
+      macs_gsize  = "1.87e9"
+      blacklist   = "${projectDir}/assets/blacklists/mm10-blacklist.bed"
+    }
+    'bosTau8' {
+      fasta       = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Annotation/Genes/genes.bed"
+      mito_name   = "chrM"
+    }
+    'ce10' {
+      fasta       = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Annotation/README.txt"
+      mito_name   = "chrM"
+      macs_gsize  = "9e7"
+    }
+    'canFam3' {
+      fasta       = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Annotation/README.txt"
+      mito_name   = "chrM"
+    }
+    'danRer10' {
+      fasta       = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Annotation/Genes/genes.bed"
+      mito_name   = "chrM"
+      macs_gsize  = "1.37e9"
+    }
+    'dm6' {
+      fasta       = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Annotation/Genes/genes.bed"
+      mito_name   = "chrM"
+      macs_gsize  = "1.2e8"
+    }
+    'equCab2' {
+      fasta       = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Annotation/README.txt"
+      mito_name   = "chrM"
+    }
+    'galGal4' {
+      fasta       = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Annotation/README.txt"
+      mito_name   = "chrM"
+    }
+    'panTro4' {
+      fasta       = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Annotation/README.txt"
+      mito_name   = "chrM"
+    }
+    'rn6' {
+      fasta       = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Annotation/Genes/genes.bed"
+      mito_name   = "chrM"
+    }
+    'sacCer3' {
+      fasta       = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/BismarkIndex/"
+      readme      = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Annotation/README.txt"
+      mito_name   = "chrM"
+      macs_gsize  = "1.2e7"
+    }
+    'susScr3' {
+      fasta       = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/WholeGenomeFasta/genome.fa"
+      bwa         = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/BWAIndex/genome.fa"
+      bowtie2     = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/Bowtie2Index/"
+      star        = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/STARIndex/"
+      bismark     = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/BismarkIndex/"
+      gtf         = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Annotation/Genes/genes.gtf"
+      bed12       = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Annotation/Genes/genes.bed"
+      readme      = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Annotation/README.txt"
+      mito_name   = "chrM"
     }
   }
 }
diff --git a/conf/test.config b/conf/test.config
index a03678b..02f48cb 100644
--- a/conf/test.config
+++ b/conf/test.config
@@ -4,19 +4,22 @@
  * -------------------------------------------------
  * Defines bundled input files and everything required
  * to run a fast and simple test. Use as follows:
- *   nextflow run nf-core/hic -profile test
+ *   nextflow run nf-core/hic -profile test,<docker/singularity>
  */
 
 params {
-  // Limit resources so that this can run on Travis
+  config_profile_name = 'Test profile'
+  config_profile_description = 'Minimal test dataset to check pipeline function'
+  // Limit resources so that this can run on GitHub Actions
   max_cpus = 2
   max_memory = 6.GB
   max_time = 48.h
+
   // Input data
   // TODO nf-core: Specify the paths to your test data on nf-core/test-datasets
   // TODO nf-core: Give any required params for the test so that command line flags are not needed
-  singleEnd = false
-  readPaths = [
+  single_end = false
+  input_paths = [
     ['Testdata', ['https://github.com/nf-core/test-datasets/raw/exoseq/testdata/Testdata_R1.tiny.fastq.gz', 'https://github.com/nf-core/test-datasets/raw/exoseq/testdata/Testdata_R2.tiny.fastq.gz']],
     ['SRR389222', ['https://github.com/nf-core/test-datasets/raw/methylseq/testdata/SRR389222_sub1.fastq.gz', 'https://github.com/nf-core/test-datasets/raw/methylseq/testdata/SRR389222_sub2.fastq.gz']]
   ]
diff --git a/conf/test_full.config b/conf/test_full.config
new file mode 100644
index 0000000..921372e
--- /dev/null
+++ b/conf/test_full.config
@@ -0,0 +1,22 @@
+/*
+ * -------------------------------------------------
+ *  Nextflow config file for running full-size tests
+ * -------------------------------------------------
+ * Defines bundled input files and everything required
+ * to run a full size pipeline test. Use as follows:
+ *   nextflow run nf-core/hic -profile test_full,<docker/singularity>
+ */
+
+params {
+  config_profile_name = 'Full test profile'
+  config_profile_description = 'Full test dataset to check pipeline function'
+
+  // Input data for full size test
+  // TODO nf-core: Specify the paths to your full test data ( on nf-core/test-datasets or directly in repositories, e.g. SRA)
+  // TODO nf-core: Give any required params for the test so that command line flags are not needed
+  single_end = false
+  input_paths = [
+    ['Testdata', ['https://github.com/nf-core/test-datasets/raw/exoseq/testdata/Testdata_R1.tiny.fastq.gz', 'https://github.com/nf-core/test-datasets/raw/exoseq/testdata/Testdata_R2.tiny.fastq.gz']],
+    ['SRR389222', ['https://github.com/nf-core/test-datasets/raw/methylseq/testdata/SRR389222_sub1.fastq.gz', 'https://github.com/nf-core/test-datasets/raw/methylseq/testdata/SRR389222_sub2.fastq.gz']]
+  ]
+}
diff --git a/docs/README.md b/docs/README.md
index d7dbdac..a688954 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -1,12 +1,10 @@
 # nf-core/hic: Documentation
 
-The nf-core/hic documentation is split into the following files:
+The nf-core/hic documentation is split into the following pages:
 
-1. [Installation](installation.md)
-2. Pipeline configuration
-    * [Local installation](configuration/local.md)
-    * [Adding your own system](configuration/adding_your_own.md)
-    * [Reference genomes](configuration/reference_genomes.md)
-3. [Running the pipeline](usage.md)
-4. [Output and how to interpret the results](output.md)
-5. [Troubleshooting](troubleshooting.md)
+* [Usage](usage.md)
+  * An overview of how the pipeline works, how to run it and a description of all of the different command-line flags.
+* [Output](output.md)
+  * An overview of the different results produced by the pipeline and how to interpret them.
+
+You can find a lot more documentation about installing, configuring and running nf-core pipelines on the website: [https://nf-co.re](https://nf-co.re)
diff --git a/docs/configuration/adding_your_own.md b/docs/configuration/adding_your_own.md
deleted file mode 100644
index bf7f808..0000000
--- a/docs/configuration/adding_your_own.md
+++ /dev/null
@@ -1,86 +0,0 @@
-# nf-core/hic: Configuration for other clusters
-
-It is entirely possible to run this pipeline on other clusters, though you will need to set up your own config file so that the pipeline knows how to work with your cluster.
-
-> If you think that there are other people using the pipeline who would benefit from your configuration (eg. other common cluster setups), please let us know. We can add a new configuration and profile which can used by specifying `-profile <name>` when running the pipeline. The config file will then be hosted at `nf-core/configs` and will be pulled automatically before the pipeline is executed.
-
-If you are the only person to be running this pipeline, you can create your config file as `~/.nextflow/config` and it will be applied every time you run Nextflow. Alternatively, save the file anywhere and reference it when running the pipeline with `-c path/to/config` (see the [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html) for more).
-
-A basic configuration comes with the pipeline, which loads the [`conf/base.config`](../../conf/base.config) by default. This means that you only need to configure the specifics for your system and overwrite any defaults that you want to change.
-
-## Cluster Environment
-By default, pipeline uses the `local` Nextflow executor - in other words, all jobs are run in the login session. If you're using a simple server, this may be fine. If you're using a compute cluster, this is bad as all jobs will run on the head node.
-
-To specify your cluster environment, add the following line to your config file:
-
-```nextflow
-process.executor = 'YOUR_SYSTEM_TYPE'
-```
-
-Many different cluster types are supported by Nextflow. For more information, please see the [Nextflow documentation](https://www.nextflow.io/docs/latest/executor.html).
-
-Note that you may need to specify cluster options, such as a project or queue. To do so, use the `clusterOptions` config option:
-
-```nextflow
-process {
-  executor = 'SLURM'
-  clusterOptions = '-A myproject'
-}
-```
-
-
-## Software Requirements
-To run the pipeline, several software packages are required. How you satisfy these requirements is essentially up to you and depends on your system. If possible, we _highly_ recommend using either Docker or Singularity.
-
-Please see the [`installation documentation`](../installation.md) for how to run using the below as a one-off. These instructions are about configuring a config file for repeated use.
-
-### Docker
-Docker is a great way to run nf-core/hic, as it manages all software installations and allows the pipeline to be run in an identical software environment across a range of systems.
-
-Nextflow has [excellent integration](https://www.nextflow.io/docs/latest/docker.html) with Docker, and beyond installing the two tools, not much else is required - nextflow will automatically fetch the [nfcore/hic](https://hub.docker.com/r/nfcore/hic/) image that we have created and is hosted at dockerhub at run time.
-
-To add docker support to your own config file, add the following:
-
-```nextflow
-docker.enabled = true
-process.container = "nfcore/hic"
-```
-
-Note that the dockerhub organisation name annoyingly can't have a hyphen, so is `nfcore` and not `nf-core`.
-
-
-### Singularity image
-Many HPC environments are not able to run Docker due to security issues.
-[Singularity](http://singularity.lbl.gov/) is a tool designed to run on such HPC systems which is very similar to Docker.
-
-To specify singularity usage in your pipeline config file, add the following:
-
-```nextflow
-singularity.enabled = true
-process.container = "shub://nf-core/hic"
-```
-
-If you intend to run the pipeline offline, nextflow will not be able to automatically download the singularity image for you.
-Instead, you'll have to do this yourself manually first, transfer the image file and then point to that.
-
-First, pull the image file where you have an internet connection:
-
-```bash
-singularity pull --name nf-core-hic.simg shub://nf-core/hic
-```
-
-Then transfer this file and point the config file to the image:
-
-```nextflow
-singularity.enabled = true
-process.container = "/path/to/nf-core-hic.simg"
-```
-
-
-### Conda
-If you're not able to use Docker or Singularity, you can instead use conda to manage the software requirements.
-To use conda in your own config file, add the following:
-
-```nextflow
-process.conda = "$baseDir/environment.yml"
-```
diff --git a/docs/configuration/local.md b/docs/configuration/local.md
deleted file mode 100644
index 657422f..0000000
--- a/docs/configuration/local.md
+++ /dev/null
@@ -1,46 +0,0 @@
-# nf-core/hic: Local Configuration
-
-If running the pipeline in a local environment, we highly recommend using either Docker or Singularity.
-
-## Docker
-Docker is a great way to run `nf-core/hic`, as it manages all software installations and allows the pipeline to be run in an identical software environment across a range of systems.
-
-Nextflow has [excellent integration](https://www.nextflow.io/docs/latest/docker.html) with Docker, and beyond installing the two tools, not much else is required. The `nf-core/hic` profile comes with a configuration profile for docker, making it very easy to use. This also comes with the required presets to use the AWS iGenomes resource, meaning that if using common reference genomes you just specify the reference ID and it will be automatically downloaded from AWS S3.
-
-First, install docker on your system: [Docker Installation Instructions](https://docs.docker.com/engine/installation/)
-
-Then, simply run the analysis pipeline:
-```bash
-nextflow run nf-core/hic -profile docker --genome '<genome ID>' --design '<path to your design file>'
-```
-
-Nextflow will recognise `nf-core/hic` and download the pipeline from GitHub. The `-profile docker` configuration lists the [nf-core/hic](https://hub.docker.com/r/nfcore/hic/) image that we have created and is hosted at dockerhub, and this is downloaded.
-
-For more information about how to work with reference genomes, see [`docs/configuration/reference_genomes.md`](reference_genomes.md).
-
-### Pipeline versions
-The public docker images are tagged with the same version numbers as the code, which you can use to ensure reproducibility. When running the pipeline, specify the pipeline version with `-r`, for example `-r 1.0`. This uses pipeline code and docker image from this tagged version.
-
-
-## Singularity image
-Many HPC environments are not able to run Docker due to security issues. [Singularity](http://singularity.lbl.gov/) is a tool designed to run on such HPC systems which is very similar to Docker. Even better, it can use create images directly from dockerhub.
-
-To use the singularity image for a single run, use `-with-singularity`. This will download the docker container from dockerhub and create a singularity image for you dynamically.
-
-If you intend to run the pipeline offline, nextflow will not be able to automatically download the singularity image for you. Instead, you'll have to do this yourself manually first, transfer the image file and then point to that.
-
-First, pull the image file where you have an internet connection:
-
-> NB: The "tag" at the end of this command corresponds to the pipeline version.
-> Here, we're pulling the docker image for version 1.0 of the nf-core/hic pipeline
-> Make sure that this tag corresponds to the version of the pipeline that you're using
-
-```bash
-singularity pull --name nf-core-hic-1.0.img docker://nf-core/hic:1.0
-```
-
-Then transfer this file and run the pipeline with this path:
-
-```bash
-nextflow run /path/to/nf-core-hic -with-singularity /path/to/nf-core-hic-1.0.img
-```
diff --git a/docs/configuration/reference_genomes.md b/docs/configuration/reference_genomes.md
deleted file mode 100644
index 1fafa8f..0000000
--- a/docs/configuration/reference_genomes.md
+++ /dev/null
@@ -1,49 +0,0 @@
-# nf-core/hic: Reference Genomes Configuration
-
-The nf-core/hic pipeline needs a reference genome for alignment and annotation.
-
-These paths can be supplied on the command line at run time (see the [usage docs](../usage.md)),
-but for convenience it's often better to save these paths in a nextflow config file.
-See below for instructions on how to do this.
-Read [Adding your own system](adding_your_own.md) to find out how to set up custom config files.
-
-## Adding paths to a config file
-Specifying long paths every time you run the pipeline is a pain.
-To make this easier, the pipeline comes configured to understand reference genome keywords which correspond to preconfigured paths, meaning that you can just specify `--genome ID` when running the pipeline.
-
-Note that this genome key can also be specified in a config file if you always use the same genome.
-
-To use this system, add paths to your config file using the following template:
-
-```nextflow
-params {
-  genomes {
-    'YOUR-ID' {
-      fasta  = '<PATH TO FASTA FILE>/genome.fa'
-    }
-    'OTHER-GENOME' {
-      // [..]
-    }
-  }
-  // Optional - default genome. Ignored if --genome 'OTHER-GENOME' specified on command line
-  genome = 'YOUR-ID'
-}
-```
-
-You can add as many genomes as you like as long as they have unique IDs.
-
-## illumina iGenomes
-To make the use of reference genomes easier, illumina has developed a centralised resource called [iGenomes](https://support.illumina.com/sequencing/sequencing_software/igenome.html).
-Multiple reference index types are held together with consistent structure for multiple genomes.
-
-We have put a copy of iGenomes up onto AWS S3 hosting and this pipeline is configured to use this by default.
-The hosting fees for AWS iGenomes are currently kindly funded by a grant from Amazon.
-The pipeline will automatically download the required reference files when you run the pipeline.
-For more information about the AWS iGenomes, see https://ewels.github.io/AWS-iGenomes/
-
-Downloading the files takes time and bandwidth, so we recommend making a local copy of the iGenomes resource.
-Once downloaded, you can customise the variable `params.igenomes_base` in your custom configuration file to point to the reference location.
-For example:
-```nextflow
-params.igenomes_base = '/path/to/data/igenomes/'
-```
diff --git a/docs/images/nf-core-hic_logo.png b/docs/images/nf-core-hic_logo.png
new file mode 100644
index 0000000000000000000000000000000000000000..274eb3dc3f3db879c7f3cbc3fd8f49a705a9a3fb
GIT binary patch
literal 27797
zcmYhi1yodBxIa9A(lJPPiL@XcLze>5Eezcq!qBOdfRfTB-QCjNT}ls)bk}!y@4f%^
zorTM_#EG-dexB!7J4{(o77Lve9RvbleRwaW3IZW&gFpx`P?3RuIq#Gk0scaBe6QmI
z0%3PQ|3Qdn!6pNNs6ij3#MNP`2R`np_|l6{^`4VkEvy0<sG$Xn18QJAWPYSKqj)C-
z2r7Qte_QvwZf}?O|0eyLR@($!yd<a&38ywh#AFl~n|$@Unzcee@!##k#8j)W@Lw(p
zb2kh3y&;RA7H#@!V`>_IHIzhW^rZ_xU<olMX=X_%ih|;yO8|{v%_wd!SNh2}2b+h1
zPQ1pT+?S;rmA)4Eg9y;!W*!f-MpYei!~ZT06vfAo9ZO}oF4G$);T9d8u-VP{QO%+=
z+PK4E(>Gj60}AGxu_i;ogA1=gxxLwDVXH~QwAz^eU7GC(otezOn|={hsBAh8O_pu-
z$+EPjY)h&EO2}ly8L1E?;F7eRm!?_lvYyDMn8>W(Q2bgDPjDwhOEnT;?rnW)?IZO`
z$?W;dDv6|D5FnO&g)2vM8GmMG)}dfS{@;skm2t+mUk~kFsqQbyf0d;1@q#t3T)z)t
z|NHxgilHZE45uE_&{BR)Ii2)+SP73usdFx=13l@$W)urcxkl|{;K?@&$VR+g5$zU!
zmTrC#oH*%=Zn_=4EQ?9e2_~joKLz`Li#FGl&m3c)^naiI^%DE7X5j2`+oqsYlIn#;
z>#H6vj{mv3F(w7I2OW5}B}Y`kJ}X40Vk)zEF%J!T8ga%ZJ+GclH0#r>U^+Tv%_AN0
zpo2TpF5^K?Gbunkcu}(SbeoNM4A#p99Vm{ZFYJ`Z*8D_A!W=xXm@mOugR>DEV)6Wh
zLb0R3h$l$FUfI@Fnw|pXkg||O$&rNF)!GL24U;syZ%TGh-*5kOYc2N;_p#RA^Phs_
zWDJPp&}z@kGvk$PP8Fi!NQnQthG`CHt}H1FUQHkymok0PR>O+3cHm62Odq&i3^^$r
z`R{pGExda@V@Sjzb^k_crCLsRSBm(sn<NI~)GbD6@|#5q`SUuGly92cfD1cq6md!8
zH%{(M{S!VG_WsYx{FJVWw6g06VdG}@G#)HSfFPZG8ZzNxksYe%@o3~rE^29ssu@M;
z&3{vV$+tlQECP#1DcP0!`k}v@bdO*j<Sq8>DQY|=bflqfD<{4>=WI~$LmpnjiQQ1K
zfW90<8gRSb)@Oqzx>kY^rI%9t;?9g|bUXr-<2j-#5dWWM3Oq>x&sPU#j1i`dO_JT^
zaQpvUV2kcQ2RiIE5<!R3=E}01HCVB+vj5e>ILZi9XWh^67^NAesk%2~ro7Vq|GmcW
zB`$VCp~5<M1#ow-+2i<R2UA}Ew^I_Mw{<tXijHlow_k@5(-;5mhZvH(HKH&-X-XWm
zV^Y&n3AZGa&2EzDX4-NAJ0cEl2rS+hjGp~5Z7;NOp_=g9FCL`Qd%ThV+f~}q!z1Y<
zVl0CA5&7KC1ShjwNhK5uTuWu&j`$(KzS@jpkDyB5#zlRxdeSg4Js;Zp6`7llr(^$u
zmKZZ|`zG{VaMnxPY71nI{-qE9U_fa(a{%Ra#NV>R8c!h{J|bGuS62GT>3S7mT3w@K
z!!1Y^J!RzUw-Q!YW+w4+--mau*N)$axd^}X^r&3W-{NMNZZ4RQp<6D*S^rN#Tl=eq
z8*+p?h&++YuXtcI`#q9C#)i~DaUiK%1Zpxqa%_~cBxk%W$T^2CZ%Uj6=ksomE!iwP
z@0L1ZO->|a@24}h8(<kqv2Hp2d0js#VD7zlvR-Mr+x}b;pEj6s=c`Rn9&hT~43En6
zJ8Rj=_Lvw^)lXH9bp#e*HgM+6&8?mpcFQrt6k(g=iNS>l53@8&eb#SE!cd68beY%l
z<2j7TheSuOcoAV__$mHGlwy2O-+V635Uxt1gaD?!Hd045*LPij_m2+5`{wi1{d(n)
z$N<jTS{vm0AtWcKc;hA9Lb_Na5!3P2bk#&4^ZC7f4~ue5fXBDu3rpXwge~i}Tnw^I
zx@P`0m&|yyFznu$*qO{hNmXPhDlR(i*pE>8q=v);j#|x@XYWc4p+u$$zQE&zdiGPG
z9!bS3#`YHu$T9j6MQ8<SK89_@0w1&%yDMepy>w=rK`q(~G>}G$!MOFbCoi_)#=V(x
zD)Xl`QKx_XKPrUC!QxEPlKK?O`Ynw3y@=iKHSM7yc_ZnOHukBA&0REb_DeGB2U)oj
zb(@-6P@{yfH)lt|Aw~RuJ!J=$8D~6owX&vkwSv+%h5~9ZJoo}T-?jLjmQpN8IXySK
zo~3h@>Dq@464P|!Ela((`QyEv#<-{6*$c+17)jTFy{V4)0u)#lU3Ot5uDkw|O)A6*
z5yWA7?FixWqv5Qs;`Wnd`qH=25xr7^1oueg^Tc%KPJQ>45O_7vi(JJ$Tz<8DFb38#
z;}0Ykzr5ubvg~`P;g2YA<MH!p)N5z5Rmi`8HHec6_te#TH-0znJ%LEszM;;S$=#TJ
zE4F28Vb|BsMNYFI&h%yJa5ilj?)<@qC-)U&Zb4B&J4I}nxSAxA7^?ZhT6Dv1lvT-?
z68M)HDJqRoyzQ^fQbED&lIe0YEn3(FcH>pjU{$J`5D~EXfF&<AlVh4KH;5jHWuPO*
zq$PBAcCIiRBE%;oeDmd@l&$>Uk|~*#OdL%-KjxL5&jVJ`h6}_L_p~mCO8zBOH&49S
z+27GZzLa(*itGoI-5paWEIv1<e}W&_lLHEhgdWQ+9tA~3fs}u|&qf#-87*B>mLx83
zysw>m&;yVh2fB;DC#9uvw8|-ZuoA!&5B)VP1r#dDndD~pB5mz{)Af6bOq{9&O}LqP
z_iX^(z%(YBTp%!7<QCP_uzG>R>+*o3tn|=<ElD-p;4*G~eoYbtirR2Ifor{1XAiQ?
zQmHgX4SO^v$z7YW*4?c={npE+jYQS--P;>9^BZ~knp9=mCNS{|3B-A5#pnvpoR1G;
zl@&z&GKh?d7-Pg?;q82KDnh+rYXX!kWNf2$4m$O4Khxl}!Qr|yu368ebB(p)i;=!9
zj)0#Sm$}j&U?d(7+t-LXvKEn)-9F5wmaX}ou~-QU#)nhDIFAp-^Wp;Y@imq`y|o(2
zQi?8(EDmwW9m=F~R**VzmD3T|(Z<l)>hZ{$ei0cu+ADoyr3@u&j{Y*V6-u&aW;ia$
zPCRx!yNS?#=(aJVWiSIN8@3ot6;O4|tUXX5OjivrCF4Ww1EqS*a`2l*g!O(Z=j6aE
z_g*4%(0ZdRNG_AvGBm>?8xj0Y=fR%(y+=toYfp@%O~vnQ{NI_P_fdhW#~t61$QKjv
zATJTVBp&lmI-<*xDz2qW2{}p+2skb{Yp|nN+YN3;F>!Ic#t^w)MM@R?ET^^j_;6>k
z-W|r|Maefkx6vPfl!;IMVn@_cSz@17hdi}zOM}l#6XSsi1;$2iY*DgBZh3;)d#pCX
zel5EzJFj{X0Y1gjph5WcaU6|?%`f=y57RL>tB<vtAZD-8F9%?T;Ps#ciL+`~h+SR3
zbJ<ehPa6$oN;PHC%Dz4|=^IxzV1HmM^UB4v@0NGqjBjQ0s9SJ(wV2tfCbtHExZYki
zgPpBv9V6dYopbgvvu4b<BtUB|2{+VI5)GQIcc0gEm5Ct1e7?bn?mV-g0JVO_*K2Zq
zh46l&D<s&4SmJ4=&?PZ=?_+{<J<k}gm!bsO^tGoSmv23sz}w3?|NEPBDiwZtnHx*b
z|JTwo$q#-GZ_dUs<-%^@iMb!cP1a^VIY_Ihr~q%g$O%g2_f!W<OcCm2!AV}_Y8~)R
z-$nzNU@%G}>3u}>n+Wua(}CAE@%3yTO5-7Se!Bh_`EK%RUNQCf2sz%reAzw$?`~Qd
zG*HNd)ZP(<a<ZGBA%QbFImuzt`|Vd{8=I*d6Eh35L<vVAzkZo!V_EH~5<xnWZi{wf
zpN(`v7b^R5S^@m8Zp*!P)$!Qj1F>l8GMjW(u#^<Nlv!Y+8uzW>f4J8UO^ylbbD{B9
zo=f2;2ZBYqN8T3oPxQ>V>?T#$ieS(2>OJA0&jr7wE!7je2F*?3nPpoqYTslioaPlh
z!1h;3v&zIIe1B*=&@3QQ4FNC{*Ltqhdll$~cq4N&GixJ!E5Vdt380c~9mx~9FY908
zfFiN5XkLL3Apf?4_u}gV7GW{W-ra)Q=4rV=9Z7e^Mb0L-BvmoUVXIQIAKu|V8j5XU
zOuXpdGqW&M4qD^^hAKlcihr(rVs=XlxhGJm%6iTN^td-~J~is*EVoeNfc(EgO7;c^
z$5%iF{fH2PbQ=tJN>oPp7ku<>CSYr{sfAgj2*+;zJ@AnIi+itLYfyo=-V@Nj6HEX5
zr8Q+^LRSv}Ws?R<nNScB?)MFzR1fo~Ie>lHpjQU0eXW_mH8p!A9mw&zU))v50vzR0
zq_S1k8Mehd{UTX;!%i%%9?gCkMHDL#(a~SBY2-_14Y{k_ko46#?fKBZKwq6SmCZo}
z@raz<S%=WnQ*CAdbXg=(oF3IBFy(4%Ueeunh(*}x*7Ze1Ph0}%uu6|gLGif<I{dyt
zUaGSTqQoa6+NfKymv8d@Tf`a_zwYjTdi=uM5vA4gxOoEqyc6%dgY3ME;yk;o4t+Ts
zeVSVX!z0-D%8!bz{g01Bc9|?qOHX5_Zcrd0s`%HxUwjwRoQ(2m8d}a8iQ}dycsmPU
zN7@FduN_*WlyvUsSKc>RhWy#J#`s+d2f<H1-;5=HHhb~nMKE236R)!Jiy(20d`<fx
z(r<Mv8$y(0rf=%MC@}T>X>|E4oca;9gTnn!z@I;QVKz2*r3q<>$0xqzc17)%i(T_m
zj^rVvO6%GtFZTjREKeKPu&%}Von(+f1W?_omrQy@TeNmw;IfC>Rfz9(1L*oz#`MTm
zOc(lQ59R8`kuzxWf;|dn@X=q+V8j>4O`WPYYOhPX!H?i8{Ho*7=WT~sT1Z#V7l+n_
zA^=-4F!@^X2dJe3IGg<-;9`7>uM#GKIy$@`W^hFuNh2Wq{b*RmAKB@Tei*x=eb1@M
zBF2SPEyjjgV&WCTfH~QQ5bx6vdz&GAFutfSzGCU{r+|xj$~=j~GR5(FbNJuu^-W&e
zlEsMAE$cP@+!&14M`t~?9h)ZN<1*vk<_O8iM1dH`j<a6hHIBaKw4jSeqg*zbZ`uim
zF1p;&?9^avI5RqA_Zvjuyg-`Qy}_u{plHu*p}a1q;adyr3<!T&M(6W2YP5PJo#(nP
z4q<hBQF1j%+{iZtks3evm64LTgvlv+-`W$_BaiucGa~sK2+^v{TUpUvPnVOt{<4Mm
zsde8j)SZ6fz{n6(GltR2C87cuJJW7(q-lYcTa(h4{l2w6-<^`x@=N#zpqS5(=BxrO
z$VcS55mV{9SrMAZ@dea)PQFS98yYY$q6C8V_Ug!(e?2vlUb%#+`ZV3)@rQX*;eICJ
zuZz`pUwyOFl~~gQvum~(^Gf}&-Orox+sIrg^iqY3s3v}RND$c9Os;Vtve1lM{-{=u
zqgpv3Toz~`G~Ne0b(vFe(zK^p=tYH~D52?pT2$#pcc7>`+<jl)+&QUjr}oBOAt@k>
z=NU9gBHdmdMidnl{oECZ>e`zrerMDf?B+hI$_yc`I=}XW7~J9({kfnK*;Hd9FK##n
zq2~6pmW$tZICEqELsJs3ySSJ9^E0O7dfS8R`+mQz`ynn4fa+rLQOFd(;sw|I=_s&W
zX5HqZu=+W>J^Aw!Kc0m0!C>T%@PMqBGPlURT+7TjAcwXG-lF1Sx~O8K`3j?k>-7i(
z>Mj0QTkrUCgy_e6<k`3<%yiX$t;wwHsle|~5*yt}nZcQrpAhc92ms$EAfIE&`4LgJ
z@^RXJc##|DY^;q)AnMz%;xnnUR7k+J%H5~U&Azx))J9qW#n(hJBm;s3qd%v+24mzg
zu350i#C1TU@VJ2RSd4I->P-JjV9OF^%`$K#4TOD13IJ`5Wt8vBq=EwlT(;Wn>qDFl
z#YJZI0#?eNP<{(mesnDJukcM5o84ycM*)=&MKwXC!R{K>F>UQ=Bo1ZsAtCi_Kv`()
zi;aZrFhUqaooY>6``}4rYcrYge%Uvs6#;?Si%Z}bGj|jtt7#7qKV5V2u~@dW7b?x9
z{}8M<A}X<eXzE@1N!aRKiB8brQoSTjliOi9nL~@ptq{q<Tvx{V^y)t>b|XLiq%!9>
zm@iPBY1o-YuZz6N2r2G4OWsQPi7k)0a;Mu}mQ=Tt4)DE%0L_ku*Hs7?fsL~Nvl-AS
zEmODzc>Wuh6b9JHL9pH2L|Sqm9xyHI$oJUTSnkXHAFbk|k1k8J<R$EhnmRZvi3j?v
zm%VtwLR2nSzTs%}7C8=@+XWBuvg^<M5b9e}3UhBqNe^9PdPUL{-ix#<fn`5K`E@#A
z{t_}KUv=7b*5cZdBVL?3;QF!KnoeM4S)8e7xz+3BW;<hFF5}0KfZeHlhWVoFK-KV@
zwvz7Xd^rR$ZCw=9yHMNajUKh{p{%QyUMFKdL$9@13C7oWy|6!k{LR~lfWn);O96nZ
zl-f9ksGUy1sJLx^O+L*^Qkb6Xo6}SsM@L>UZ~9J&H0>WqF_Fd(!KU96c*?n9@U$-)
zUc{X=qH}wDd&+?N6%KU%+1XCK#Ul(503E)kq(rQ*8*grJ(}{{w@upz;Us5R9NU+KR
zRTM=j9FBy7y}CIN%gD~2IJkb{2~d%)HUA;Fgr+8;oSfWvql?A)zrSQYx98$cPVe}m
zVh3MC$4)s1%g^ez;`wxDW@h+3PG3h+iAE(SlXCO$bZngXg9B@7xPRs5(g+EW={LEG
z&z0$`sH!5SjLb5_3Ff#@VGw@jP1MW7rI4yB&bavatg<p}%4ihdHY&GfeZiF`_oJwE
zVXyUJt}&@-N@2r#jPia4`G<WtFeWPu5xhmH&Q}+0%HtEv-y`2$q=wv)DH?YC9y85V
zK5AQ{qY|K0@qgv*WQ=inE*iz<;#qM28}7Ns=-mvcCY}!nV&6~s4-0z&dSkD1*Ir=r
zrgg4;jpbh}l!?zr5FrKOpcgs2Gq^#5Hw=-)^MtYVCown}>xI6;FM2(09wtPai!1K^
z-sfvrs6x|!W^#&`D{Qt+;qVBN=q?sjR^q1cIrIWJGh!r#&jh&b=e6+`$*|rOgVgg!
z1^u;`_4O6;y!sbOBl&gppn9w~6qk;}v>$A0X7*wCQx17;ZLM~_J;m?GTbmMtHp;up
z!=R-@>`yeG_V3l9)z~3Of15sc0b4EFX`>ftd1yN;ZEageab~ty(0>Xuv$MuS32fJE
zAygGsQ*RFbG~&VG@Z-COcUq|1{Mf*w@9F6w7V+kP+qn5kA(<QP;r1fJ#-r!`beVp0
zg)>~v8^E;lO`lnSjbvQ+17gtpH!3FP)i(+uLM{Ez1kaNKA0NNqa=~umZRItv&06mM
zHRGD8BlS7`<gw9gW=uq-#2-6)AbvCit>3C3n)-y=u9xbcW;v@Qk)tA?T>khBTQ6C$
zy!4<5V^9~H#)dk)U`{akwaZ~eTs)kCXnD+M;<tfjTPhHmQH*1(b%?7D+h4<yTHX!D
zf*CO&aNkrYhF53Fn!hXCaie_WPDVdWG2~aQ>L%>$G9F<?1{Dh`7wDt}p&pBqTk+>s
z82oimJq-qLHdm>9Pbf{UfAuy6DMIE^6u+TLV@YEF&(01o@I*3jR=m%!5?FNtHHtN}
zva@OJCxoLml$DkJf6&Pq1N^7qbdY7D+5M=(QW-KR%_L0?Zg*et37La>VP*JSl2}_?
zud5={1B%J0=##fX3NIGlA4jCq(^F9QQJz#eYn=)QaW_DvDm^c(OY|CFrSRHG*xA|r
zS>#}2!-{24>cvxn1OUG;zI2hXwYRivN1^ie|AIkeygyqCJUgdKJkaUcnbX+_-^SzJ
zQD1uiQZ5-N$E_{_;{Hz$a@>y8v}iqDpz^$JQWY&G=c$TI<@cl4dL15-4ce(@L-GUl
zzNZJVzrB&4ufnqSfrhL~qp%PT3*u>-qPOFn8yQHF%xWbVOTwXXvlNnP<sZ;9)U3i;
z)sf|>6+vX{Ok${FaEstfP%!w{LK^YPjqR}D1~ca1u6?22M+BTVB{z6Q$@|7TA0ZiQ
z3Vm9M_gK<a0!6&}w8<$cC;ys{)_aHyx`F-Wdgy2V5_pgNe3SE?iJu}qg1!&?26TWY
z;QTv443y#bDDZe$-)6MY8;SY?(W!!$7nNpbwv^0#B>9U@wfTprg!_iY8v+~=V$AjN
zEXnJuwx>uO(R;9v`@iW{#0DKqGG0tzX=9LK!ft~e<#Q1|5kwmc78yNRk|D1=56#TX
z)}n;|wVOl>e^pEqB&n*dg=LI|;L^z$ZjYul4W9RsIjr)v-FyTVFapK@Y2k)<S)@XS
zIHdF6cQfbatr-c@PH_qUMD_-v#LtvbQBf7B2?Wu4qX@IvI_a4>WW|?8pKk~O>MiZz
zQoE@vo{S#rfu8M0P6Wly)k6Fwq}NcrV)oOgcgE3qZNHTGb45|gKJDq;WD285S5n@d
zVJ;~nL7us0sqU8$DqrGew?k_@hFEYQ-!cqIEnku6FLs<Nxv(cb4&)e=z;2N4h@yfK
z3<hu;%9!vKd0Fe_VzBf_Q2YLc@l27%bCaAc(`OMDoh2paFa*^(tY9Y&nKi8Zsk0+9
zA4`wKBIB*uyO}N4Vh6}KDAB-Iz<)NH(|RCIwM4rT5!jOn7dMML3)Kv6N6V4J!^6KS
z45I(6x^IN@L}z8|e0v?MoGJDl{@`*4&$-gCKc@&0&EO%>BYR=5tETxkLfJPfqH6X7
zol-t8^$y%a&Z(sl@rdGqFoSZ*-`Tx(+Si#cAn?72iadnb*ZXH=m6}ZO(os)~@K9y#
z76=MCqTCeZq#Mj5vc9ROr%HI!L#xfh!*kt5_2@rm;HR(Em(HjxM<3E}OZHq%?#mtw
ziWwplN>i9Ys^LK>IP16j<$bMJ%MtnMp6I)~ySSr27LU%4<5Xb~%9e}SPq2TD+seAS
zq$`L&gDLMk()+TdBaWL_9<Z^nuu9F6gXv_W3@3kmc>Zqy_ZzM#v?@L{{*tf$lXBVx
z>y-qD&#kMescB79nz@G@6R*aRWNx$KUdYSfJ?I<v7wMr0g0Iq@Z7$eXnQ)@!sb#CA
z$x2Ug9dN_LrcFytG=+r$+W)ltNT?ZS7tFqj!bo8L4-~4VgE}zCRMv2Uj{Txa>mxy1
zPCHJ%oYPfr;_=?gE4scPxV6jNW|cIqdmo9Zx<;cg<(Pm+8ntBKj0LnMdmEQd!x}7D
zdS7w*?{J_aD_t#Vl_I2XzXSTi!D1~2mG?0M&_Q^H@)B{7gS6{xNhm!waSxX2*{$qR
z&*!O7UmRa=L~j66gMIZ5Eww_=U%#h^g3{6+x@gENLL>F;+G_y4ZS0q~MVSeL)<_zf
zkA63QJm4NuldJqUtw_@je%B?IyY%EX79uPCt5UD~WQ$q-j&z)f&AAV^(O_cPL$SpM
zj3!K>(IQlx=Z{jXc>f|}1;=SoaXu{tw9ZE#2A5n>hww=$M&!5G4I;r@?@~}zp+!U<
z-AeNI0cfpGTX689dUb2gsm)wj&-KYh>}caFqhEUEzi*lU0gw8ZbD5N~+{wk9!3F>e
zq(6TA*sb5H;sHo#KRM0NFaP~b;T|ObFZrf#pNvc5tQF>`TdQr$y%;-#-JkTG46>95
zry2M+x>eGiL85`iyXV{BHyFpX129?QxdZ@02F3OD7Wqh6kY8qH;_3-)Nt$5BN0(ZX
zcqKnjlG~MyAIu4c4>KL<PDAX?Q1US8S(IpNm0h?g*$!zuT3&@{u1l_{`t%ozN1z;g
zED(Bo8t15s)Cwh8x_`|(@BN`wt!7^yv(E?tmFPDU#=iL|H~;DLiGR8?LVJ-$G0@*}
zY8HbI%qx8#Jb~fDHjKtidIR^MEY>PlL`RVhv%FN3JGnm_+xS8RwVblDqz~+%_PuIe
zSzB^w!`g_fiy$BYCxAWnOY2=1Yyoi<*wdjlf(!7ebU|{5BGgFf*XOvVw6T$=aj4NP
zFA$EYQEAT=KBb=W4I^baF`Van@1UXeB{S!Gs$AMoNMnB*&<{>lBZ$<Jw+N2*Pr&>B
zUT4kry&h(IJ_Nd0jrkEF`oweUho8-<wcDGaGy{m4+?S9Dhn5o=ZeCv6q^x{apb`Ik
zXCgB%kw5SaK9u;p?RZ^FcMiQ4Nhc1TPKVe}`st!y$GgyX9=ZXd7YXi!G_K52kAl!!
zXbbVh#YNKr1(!MI+lJLZR0g&QH{CU+T>f!wY$Xm~Tyqr@_IxCF{B09xhP}4449@WW
z6jnVxJg41UGDVN4${hDXZJ|MuNjj-n?jm32A;ffxhtG>J3ldN}soUyhWuv@MugzT)
zD>|zkK)}G8GHE640D*m6is`2buh#>R^>f7mfY%Zy6u`zzkNa(p7=6)H4j5Ak0jl8)
z8Q!l%AI{aJEcFkFj86taEibh+HP?X=7`4BsdD8pa=k6Flw4hM^r;{xokqqvSBtQ%Q
z<8{=!%3n5fSktKn^4cFVcK<CX_C-O(w0^DfU)u4dKV?JyYbv^`Ne=42VYoV0uuOS`
zWr}g6xy|jJy|d4|n7eQdk#$hcbPhpR!3p2jDzz(mLBtG5ig3MC8;RSi?~ZTZ;27>!
z9=*%m)^-fG5g_c!nojdWyg^ODw@YKqx_}LI7=m$x&%u`m3qhCu+H(tEvB+G@VQC7-
zVM7>C|1S#wxqdkEi!N__6tvwZ$$a<|$CUMX<QNkD^mz2t3m~QSBH4ra4Y;=|b4!Q&
z)n9jcmg)Llo)1htz$SJzhy|c#Y{J9)8oLufJLTUh3)SUqr3T4to34!s%YV1opj{IA
z#WiQ&mGd{;NhTSOJzk5}f9Ii%lo5p8d2WJ}j|42iJL-`yZxvBeg9G!$KKl2uL#f<&
zyeg^}klLbLDhJi5I5qxeN41BvN;3+IonmMm(ncl79NK?kXCD(ych-KKOu^pUy@=C_
zT+#5thyNpW{m9N?9xdIfd82*V4|FNe^<Hs#h3UYn8Iu9zS|K|6tGTvE9(xtsldXJU
z2JbAl3WY~R7*#JQYiWg?o;p9zW8XGWdzD{+5<m#lV{dmDUQT+%A%U^5-BKM^1R+~h
zDpj8oF}EeStYIA;lR{8kx4riFZ%V-6=xF-9U*WVNBd_)y=vT0<Bnu9|M{m$xkZj5C
zE=DR$<mc|Ke~ByMIH&Lag{_;+!Ru8I5`G^ki>Y6j8u|+v!oE6O?D{LpFwt|emr+BA
z0Yu_s?_gx^bgK@pXg>tkDCmG18aYrPFVjVJ&{BTJVz-i`PNH`F3M5w;920NjX4sqn
z(>_RZ{Gh8?ni?O5Z$!Moju^aLg>_ktJZ3AJFvLc^@TzI1zA-G`qV7?{#+3+~NVvi}
zA7aB<5D-A|G$qV(cSA!@6X;(}rKWaNqbl;o;#W3^qKnMO-3&4=(cq>YP9W40<vqf<
zeID#hc|?}!%FgLvTB+vlmd-NSaB7VgpRwk<a2MQIif-!fDH?ARdC%iml04d0qGhRk
zxM`-iZ*Fe*H@<O=H@iCk%!683m?BP<n~yK2s_GRAHaP%=QCNk~a#^*kte7;*^sHuF
zL7mo{PbXAQ0l+LIZS}lBPw_<vKGXQqcO1#L++Jli#42j4$kZeHc!Hzn)I%^SGcU6=
z&1%a_79Jk{4EX~C5jWv*8UUHQHB%_&<3Yygz^K&3n4S9<*^P}P06i-xE|#>jV*^mX
z`cS!B*L63Zk~Z|?V@dUB=gQNgJ0PcvI|s*mqSDL*VC(TXM05wbCx!)dK;*~!ZPA=x
zzh1NY@o2*VW8#BOvs>xgC1)2Gr@O;?o4xmSA`Azlh~n{W;_tRXJKpGYM}*53vL;dU
zA7bC{mV9!wDUay9wfDLO@w>1Gm-`AQ+wd}_+IC<y)iR=lw1}`4m*K#PlG+&Y(C7KF
z`3Sa*J)K0VY^8oE_Qy)hJIut-P?a6H7`j}D@m6l|f$aZLwKJHJ9iJ}48EuN<{%&G?
zRxyk|7l=8``@`%o7jEB=mlD=byk@rbrDf+kwnq&Ux)Q2*bcwOT4ktMg0ZxaGN9zwW
zB_ducO>uxy?-q6p;)$*ucG)<#RH{0v(>}C*2dCu68EgGj(VMxrub8odgZH_n(!XjD
zJvj_f1R)C9V4K{2^+W797E%$u1WO$Ek276=j(*o<-0|Z3>`8bM?*~QQ>ibjp(8-jN
zsPwV97^?WnZ=e5|5HRO^DvY|mCnV&NIIVe5j*N`VSDK*wzTIO1AgsiqgJSs^H9Ju*
z{&NxDx4QyjM!NUuhvA>^kg!AOBr^0Ipis&;O6lJ|#J(}=`ij-Co$gg((uXa7+jg@>
z7MGA<-2a^hH!7%VBa%oanle&J<mQEdfI!89ZHx;MMjU!XJ93aXpmwYQpE}>{&OSIa
z)ZwHToEE2QHD%7y@OZm_;`emr*FP{2)^D&Z1LE+$cJ#hnuz;-wpi~%kAgbL$C~=^E
z51;Q(qW!-08%(owGAl9l1ir*@ZF}(S!s5%6Pkf73&G4fo11EEI?scRNISr%SP>(dd
z<U8(iBE3{3*%X#zvl~tF0(Kl6TW}zE3X)Q~7em>u=kME#_QusgGMrJUe{c9qrnhCs
z`ZyvQetVEUMrG|lq{;FQ&SFM<Qk3Xt78&+LaOEpMs=RnG|D@b^6&Q)_Tc<mI5cn1&
zWF(5r>st?j{%B99f#%u;l0hfl@dt4Pq0T#^QkPQUvnE1P0>2V^77Ff0Rmm3C7^%+y
z*Vdc3fIwXSrM<6j<i7YqkUT%m)(U=_IC56l^`J>-d*Bn{zc1Y~%;mXfy6O6u|G6*d
zG&Kn~sw7mwixd@9n#GkY{6Z}G?-z*05c=65@8l#)NCMqos7Lzt#-*osIkfc|s+s`k
ztiQiMlvD)_bmQ-7X^|5-vd^p%21p-HJL5F0`c3#ptyh$82lKeNxGHXtQ|B9R5pMwP
zB@F%m<S>9YDu;(lRU1Gv$oq_*$#b<imVN7mruRt?QJx*ZKViVjkK3N^+KN1D4=Mw{
zeZvI2^RKlj%VZxyx)6Y$#Gx-huSoT|Tu63Pm8*oE7|Tx-8$9f3!5%K=ek3IodWtY!
zypmXT75opJt#$<EEp0tqt@H)qh%gF7owQ^noG~b+cLQv%VCW4Xv@BFx>=%WYZUTB`
z@;%_4jSEzR+^^aY+y3#A>O`FbvtpU6tas96|3q(Bw&08x0pzTo1iMWYa{3T<d4r5P
zS4g*SYx=@^T>T*jpAFCJhFjv;@=EdN=L!YKRs2gq{y>s6bVnN%%3u&)y!4=i5w-+K
z;`<LF^0NCA>h8h@sHq7D0-(0T9LGO5l$;5`&bRq0@-KhF-q&I}w`I4UrKV2b_D>ZI
z$XQZEc;qVvN00jlDwv#r(DfILM&tk3=Z?scIa82apXsPM$!A1#v4quZvbWjSz(9-`
zrTa!C$mcTEeduwY3)~cI7<~JpD4umH*14h2bNV3wAvIjAv6{@x&l5@(_UF(}b?PO7
z?PP@jHIDoYi`yO!9e$*xQOqLi-T>9P{=A+*#gRi{YcHQ$ZvBH?Gdeo@@#T`k1^}|S
z?NC6hpXyjy1C2IsYtc*ZFODZpf2q&+TI^2d=GhGm4|fB!@0ZWzxA~-mgs*K+4@d9i
z<+*ab0Ktvlan(QfcA~2dAd5|NS!8_n<NzWfbQ<ZIN|!nn5dnc$w?v3Gav~^u4qMs9
z>xLU&4Z+vR8*1lzU_Qe~6ZF;KG|tmX$4wujQ4Q|aU{G1=M1_!xy2XdD7Qc*)p>^Db
zsNxKPw#``V7yrVSBBZyO<NS6qP$f_~Pi1IISDnG@(>1d8L<k(L<$TMecd~!R?<Bwy
zt68ag4x2Z%?$B^UCgFFvtV%g?J#90F!HMi`RL<!=v(cUMqJ*(K;q(1NHVnKF2n?N9
z{a;VVt#gvJ`XruxnV<pHw<}4n+w;Mj2CLZyvQGxJC?Uy;yQ6&u3!!2`LJ)^2|9O6J
z6q0Q!^^!#D$5AH`L$eSPN0TG?)-;~j)9u@~$Nk;PQ(X({_F?@>l$L=H0nkhBC-6az
zf@#D;Zmhjg6cKIraP~<qV_iYUT%f2T=`Bp|uTS#$;u5YfzI^!tTl+%fcrl}uQnvE9
z*?nVYA_v!@Y~YSsfJ*5#v!@(W&(=ta)4}|^DYubi9t@wmOCG!J?=+H*@OZ7{a95z^
zmOxuc0l}KPM2r2%t+~>1O+tVB9nA}X)S#t|eoWYDfG#WA!mPR#^pyFPL-D%bi}L5A
z5Jukmf&72#q($z+_1+H&pacwlMy0^V3qt%k(+INJBbyk;2(qF|L>jx?3$%YLYx&LY
zL%EdhDEJrr2|c(#mJe=H?A$U^m%mZo<r~3O)od(#xK`mE`EBV?S}}PfJ%Er%?l@jI
zHPjM<_EeeoP`T-vg6DqGMjmbNA$dAm$S+$`qaSqj;^D{Hv{SH(7whii&UpgG9Rd}F
zPPvT+*3k*+v~N9*`XwvL8o#3zHt;hqv7*<6q?cR`cR1N_4kievBTpAOQam4Eo$&Vk
zh%uDN&KCy_t2F5|?g)H2=_03@nqmn6)aKW+Q_n<kiWeBCt3f!O_Vr|gk{(_3&KrHv
zeZZ`$wErupcKHGk0Z^Br&TndL<<!&q23J_008Ke1J;tq3?|BlkE_6AO=P9OH4|cpt
zyS_VWTd9Dy-iH&gIIgxM=wD(IbAYs<9%kL@3XsDO+!=@O`}#WraI0A-PIpsgbmHf7
z=6(oZ)+Ha|Vj>5}FHdkP5H8p?WO3lgD{5^FV-ltB&%E?-!v<%}tNtRA{S-onVdb#_
zDqzLRGLGg1BcbAqDIIx{C^pc|(WuL>q<hTdx1&ug$4D{_wXqFlQfJZ}37ltfGqc}S
z=0$qKozg}nn!4zBY2v=}n%Ym0MB<E7zjh!0g|#-G&zG905%!X7^YHCSRx=gzUV`5y
zuL}CniS@L1vzBAouVxy5e!EE&MY7N-?DdL}lWwN_kGOlm1=?il4LP)XmxYU%tJ8&Q
z36#BxN=kh#9%sKo=9g|auI||4^DV~GiTUhPjasd@Us5XoI@ESr-XVg2Bw1H|iYgSx
zI#t6GPK84Wt~2J`$B!Zxcm-_dT2maF7l6VkDf9JrqITRhKz>)<nz8n0x&RVCDbKze
z1swq(iemm$)iMwp)t7xvH-*OISd0a8ap5CieI9##{*@Y`V>(+eH8CyB?~*W;1rbpq
zEba}fFg-O$uaU@qz6w)6e~>f6x&E8P-`ypa419nJnkpE2$C{h)%4|gF3GAfnyf(FQ
z#WRz>e=k5~Bb6{GtI#UQ>%hxw;}nzgVq$_ehSiUF_>*Q(vcn}p%Z~OJMg9<qRJBW|
z1#!JjzU`V2auy-}{8BjIcL7(`4)Bbknf~K(7fwO~e#35FisP(~h4ic}erwp~K43^`
zYMM7ehzz_d&uybfyrlZ!EM*OdZxe@VJ}%WeL?`>v-jWM94G#h&C?R!q=xsCW+e-j}
z8|{o|<=H-aQf{cB77xnI3K?a<j!mRkSXlVUYo|+`*k8B*zKV_x{~d;hLT3Zb{ScsS
z)VmXzE=j2Qtc!++o>Swy$r7gObkW|w;8{4BpPSgcY+I;Cj)RYP;eqj+onJC2rYjJ>
zo7l_|WBE9|vNut@TDlYYIsfI22d(1FS6c+!#YvYIWS*6ONS4>HDJz#=Gla*cGRp>c
z^)w!Ia%o#m6i%0ZGb~(q%f6Dp_Niu{9~s~gc}f{&#lE^hDE>gj<rEeUsC%H&5c62a
z(R*!rgsnUqFr22LOevt%<F&417$n(8rNm|!^L&Ss<!GgCMU^a)T!7O$Z9g9HwSnQM
zumIE$Q07q;zn6-H*H<9`N&Gu@@UX?#CJu!{C6<a!24C=MIn3KejsMJR$-})ZkQ%_>
zMF^}(t?qELYO+<nq$^}zLXgrE%bv<Pn681*$KCX-(`CZBrJUZx-3;0BbXv^oc8(`w
zr*`~aw)RP`?JI!th?I<MM%5kUbK}cgQkKgcy5D`G)uOf4`-wF3st{eetowb!6t;1_
z^kS}xl-XV30H^|}hR~E3tvidgETh6#dR?A)^hg{Jt2L-MlZfVa%5}H^4;Jxmu(GJj
z%R>gF)=Big3FQPqp?$Tq+gAq*CW6-1^-C(6nqL7ECh4|yl9EL}78vw$H=vWkjAyIq
z>G7`VT!<WdI8{JR_ibnRa@?j}QiVVSF6z%{7?S_UKYwADpVPQ0l#=*XLW1n2n`IYB
z6SyivQc0sF<V5|$DuODW9v~wxv_T|uZYgl~2m_+$IWv-%(*#+M5w7~stZbHlehG5z
z${`BY=^8dkUx3TvYZSSEmQGc>;)>pc^0~Bh#7TVwLrkk4ly_H0eEK*5Z-`AdvXbwU
zgR%p%Rvv{+)W&$V#TdtEfeF1#ZT#Fsrg#wdU4*t{^Isq?De>V$nDwJ`>xoo}a}pef
zLeO;aP1}mYO%ks_$#l{+*t2C11qAgv<l*Mq_u9Ax+CdLsbZpGNE$l_||2u=VY;J>{
zK-bF0?}cq?xjK=f6|5<mfbQM5aG-N4IDI(LC7Vhtmn5}Kr@*=9TC@n3bl~nAEZHdb
zeA{m(C-GTvO;}~=Rio-=kAR)gk0Qd5L0sH{n6_7Pg$1A94q=p+LjeCZUvbw!Qv%It
zamZG<_fsB_2Wwiap^F6|_Wiwg*IW&`ZrX=>`@fh=bhKd9%%UQ>>T1<$I?DG_LMn1>
zvLqXa?~{7~FrIU@gbd%>GH(H_v!=m5C-SY~B)rQz(?;y22nD7uBQrg&el(j%{G7-L
z9KP7W+p(VB+J7``K{sjHr)BRnNzHWWT6CMVs|af*f5I=OC2Jrf3G(I3uqEm(_NRVl
z0+F~*mu$k?l>@tl*${LH2pqz;DF#@7rdl#pp+nf6xWOI+F_coL+;wT$2PZIBcGH5d
zqiI6&X|cMGr6WjUl9EBYr=}Jb@<Uw2wX$gRbA@UQ7-{@r^ZD;46lGfOP;rC7>YBY)
z6KlM^K!!HwazX|s#{^XQ*RBwp{YhcKvDNe3%}wTO*F602G98R)%nsgDhEVS16wYh)
ztqP1{dwGg#qyeFu9Z-reOO|;X6lxU#$$k|p8|R#~`FS^TmCmN80m|mmjiGY=Dvf1u
z=&dR!4(#amb~s?t)8)I%qPT5p%_(s%+o*a3>+vAjOJTURrKSHEk<SVn8(0F+D!@EX
zb*m}K$qMe)>$4)}hhceI-r$82yY(25XKEviZrM&<pQ5o>$xG_vM=S_s^p+})%pL<6
z)$re+#-?+?$#_m&l4DD5BW=)mj3!`p3Wx=D9MfR@G7}cZN*DWS{95li3M6CCTSMNa
zzPj{aK4$GOTDUvaCA;_r{TAkteqzbm>e&U0ubirYvV~sAgjR&^)O#U_Z0-#1$I(r4
zXX3Y_d$}lIUS@XX8FQ<|_ssICmgVKWkq`R4IEiHEa}v(dnj&7Kv2!t>lZqZIsmCyE
zs~_QYS*RZy_LDMBvRKa?h^Vtklf<F{;Q-2L2HR#zt_w+~p1hAK#~ab2dy`ARV^c`t
zq-SL>K8z`X*jiC{zeL2@%p6BmWrTToMu&1&o)7lvm@TTWFFl!$kt$J>q_IaLQ=H@m
zw--RMo(A#N)2u;!m&6e+E_YeHXD=-MF6v`LBatfWZQUggc#U>n{-}1L=1n=as}TOW
zbs1O@E^|Fl0_XqaBoo)hw^zXsh+-&6%ODFm3?s^Jt;=}dI&sS9)n$3nB2RejEpq+f
zJM{d8|D*Lf&l-GoMTPPi46;<baYoMXg<MdMjY`G^-zXGR0?liruZ8Dojoc^;F$fAe
zB*s=|maJOteoMuC)eQ<`mHJ?13#Fz#R2O2(J~67ROSPf?MKAYjz$S5!+jUpt_#{yC
zA=Q8%v`vH!y6sj<G*ZqjrpVS<oY;E|0%UWQGY5?21LWVQE$4bea!|Gy>bAU6)-CiJ
zDU8}vJ~EOB<kI`nQ_ZnvQ*mnW_d(-a-zW;9Jg*^2fasENnZN#}uNuG2PejeORN~ZH
zd<>{#-B9qX1MvilXqj6{&KVOhTXl6hZP9ko__QLT`Of!pe_R-8xEN#1T7P#aH^Pro
z-9@-DEyR&4%q5r-rc^p0OCuf+H|OEy)pZW+cJ2w!?!gNS7ts|sia`UCHrNUAw;lkM
z{;ZS&{XC;V%wJ(P_PZ{e(49OT1vLy7N9U@nKt6!ypU=BS@;3hcNA6Zy3petAjGRjc
zIy50mk`l=mCt)%+iza573U2_zPMdiM7^5*|Z>y@R)E&^zGl8rIWGv;@uH^kBkl6V+
zN06)TIVyYRso*F?JVQ2;UQ$xhFdwJD@k#VqpFOOdbFHQNAEH4<wm$Q=a0pMC^hK!m
zPt3@4DazT<O^Q)l{{jc}@;=H^UuF%M{+MBpp!v*H>}hP<1ofo)mY&T?XEa^<&^zW3
zEn))R7O4hBu;r<9_YktMXP1YIXU@PO$+N<=beNZK3uy63)E;|)quJ5|nHrVx@exi2
zDqV?7rZ@zTLGh>CJt__EQr6Cl4WZ|#d$qTU$`AVHH;SpDK;Ux9qZ@-ag>5e-zw)_f
zz10uTk&WJ)bO#L0b*;8rjrn<F8TO4X3iUx036nQpY8*;l+niCCjq?{DqI9p<E^-oF
zSD2A*X0=*Y2=-*ARn8Y!CZWgq6`zor{|qZATzoZ2*IHN9!-Dy4-;voav{0*KXr#hg
zvg*%E?KLhgE)_sUoyu2|C7%nLoK&5jn=8h4v(qR8awM%=vh5outAI!j92wxSi}pT_
zGm!VX`p0v4-Kh80ih`~79&ciMXTnS-FHYB*PL2tv^RHy~4IQqOCsH+T(eyxAyygCM
zsObX00C;X*Lq2}li;VH60mn#SLObj>Cn7|&A_ywuL1vcWSKQW9he|s&74sBfo0Hdi
z340$Rq7h5E-}x2GmIu~nYi+{&;`O{XD6mnQTz9ke``#&QXnZOCRR8AV14wKvUDzbu
zWun4n?(GL&lj!?;TE^UKfFXTciUTHLYAX52eQBH)hNo-N4>X#csr+dACBSdT5YrxM
zDHWaQ4YlOV#owPR?^6<e_*FbCEiK(}vz5}-AXTfvJOFw*c83selN>q~U=0Dao6D6~
zQGL)zLZZ##vsTpLhO(vMiXiGLO5$5cB(?p~Y;7r`uzqMFEJrB5u$raJ@tlE`3R<&s
z_BrvN4RaY00>>8sMG19du}Tlf0~{=H9lTv(<h5Hg1uP7g{NKN!&Qc?pz#{jLjz+E2
z8iP+7G^Hp($e6^21L=VM*0pgWb$n98H30z)XNVeHE#*ExJ9BgM;Ei-!xV1Y3%O59M
zpI6F@m5=M|FVe9dXU>{`<ndOe9`jEA!{igZCvWB0%M<A1hqwhZP<JL+wm0(l&{A_?
z1HhoHcm3T8z&2bQ$NF47J-u&03Qf%{E6y0mFd2>gB5^9|_Qa14yI-ueVPh*3fE}+^
zG>e)&%85WjHQrX=1L&8v^#knw+R6KNH{Yo`zt*D?lgj7(N7-p!Zmu+t@cRb0j)vs=
zf{pFb2m&*iT8%s`^_mOYLq6gbeQB(L)vkn96)3Z_Ya`iwueIS`GCmn(4byuwaTB`I
z*ReaD!>21YQi3%AhDL^9f&i;=qJY8&fN7o8V<Kd3k9B5^lT%Y{((q@iUD)gLJw-OO
zRkshY*<<MC6_$`dbTZ#K{*;}FWiJ*2;P;t)TQvi}1=2hS8Dka=X))5!2FEOGd_^q4
z)0M~&@mc({6~(Bwj3VZvvNL}i(o1UMIrBXn0yHd!@_fo{xK*8==SA-l@;kqqeBo1n
z4%jvvUN{>K9ZnM>TJgEkFP<;Iz@5UAaO`+V^wW*^-d7Zecv5AFCZx%Q_PGN|4FVR;
zpH(lxg4r1CEy_GkQnbrH!Q_Cz`KQ4t&kiZr0{(cK*!FsQRO)1X64TSLe#r??IqSmG
z3fs>DkdHe+)Rg)7;Bvc@oL;0%ardE;F!5H*ET&G&pwjs;o6mJ8F_OW@tX>5H7yWWl
z)m((-H7||uoZrEum=Zrg6QLiyN_J){-V6w0x3&zyJ2kZQbCWgMTcGUrjc`Jy(^yoY
zzxI59z7xW?;>~65SsZk=-V;&jb!Fe>#!OKToW&D;*i~wxBd~rBo_xh5q5WgW0t95p
z9DsHVByw!ZTM5aD+;D7&m4q+wm%WaJHBk7|0rT7G_S}k`-?82KF;2DRYS{}QjdL2k
zK(Ob%$`&j!;sl>to5d0JMj0%NPCHNp$wj?R0qybSYr#`L@}$SEyd3KCUSEmp#a0>;
zHNPeV7Y0Zt^_%iQ(MpwbDWPb)f5Og@G&}&bvwE=?Sud>nz873VnGOyFyx<YnZdoZ0
z4}sojs%U_=NV~F{AK<H~sRenQZ9Nm%hNIti$|@?jTp21kGFh3K?fw^QmiG3&!%18d
z)6@7<inaUFn^4bPdRBwqe`+W|&oY<XzO&T@)}oJ$x1oADr7s|!B_!B@f=jLcwVWwF
zcRwSRqEEGI!7E37&mpwte@z&G9@Y(rar75A=4Xkr?@a>0fk0rY!@Rnu^^t_#v87Gb
zQjPimNy=F+y&A#0VjvD$>m)h@n^|IkM|XEf$!ngOemtpqj~Oo;Y<#+B#?dJo6m_jC
zVW{o^IaEnUI@C|+Fbah8HG`g|ke?6_PW!e8H`~8WJnL9Y!k5Z&YiBplPUdqI4dAw@
zfZ;7dF6hd5&=i`W1o$q%Sh6f+UGaMo{@1ks7Kj1tRT<P!fN8bLTTaoRlPEJbI_&&W
zU8VKQ%nzmUi3Aj$&$liDWJiJYQ;yjZu!SgLs|WzldkKWS)b+y*%F`fYUE60yfQSrR
z4`Xl~71&;lk$>9(Xu6%3f4{{?69Qh!)rw#FUF0OCu&3ea=0L&)Co}~I_Js%Z1vMU=
zZt0Ypebylc@}@q8$7_4SH%k5}z2|w38mkZjr;HBj?3G%kcDs-Kp1l6Jj0q>8*Z`&^
zkn1F)^1Y^!`9>UIQ`U&wb~{tfVb~4=h(s6MS^AIvvo)EWjrghFz9*T-<`_u6tpXmU
ziq_A>Cn%Z(>hrqE<0Jx74-n+r0Qf%)up68nZ?=ow@a5tX6E`aRD7%1|jlGB3M)jy5
z1~9jyD&*g?q&34NSe{_V!Qf~A>N)lD$G)Ck2*@t=QTctFDb{*ErS((rGxOV)V+p{g
zkem9hJ7>#5{0#hkA8#z4T@CF<XZmMU(I)D=Cwt??b#Ywo_aJ{Yi8+<`QPRz=F6*Ae
z!9QR8_)$(w)=(lcKQ8e2B5w@k#;MkXA`Lh;bA0|!_y4j0#c}k*3E$HlU23mklI2iR
z9V$&8hB`yLz(TeB|KffC)!K;37~Af;z1V-2BSB*3=5Nq3FjUmlgMsPUp!XI)#_C#H
z8^DGtC@A3A{N8OHgogLV5HQ`0FhY_$dLzl4CZvhho;h$FlNyKlFtO|I<Vzs`8jM=(
zGza8df%7oe=Tl10E13gu)EGrE#+6h7m)FA1l%Xv&^4$VfQspGR$EzLB5ujz*Mr6bM
zJ|Gyo?y5eULx74z%<o8zj7=U2ZS|x9a?x)SG2&Dk{@SYpk&XTzbn|U~qS%xWVjhXy
zU}wnqr{(8l888^8a_LxDFn}3}(&v<H5)EzrrnQaqNIIVRiC2oTjD{I2c@F|=D<`e{
z=BQtJ9-g}3)R5B;9P{}?ZjXazHQUEPT51)onO0)~<RCYaEYdrG<n`KBiatz<41k%I
z*4C4)N4l)t)<{ts$AF}^0Ynx-V<|pZpZtN<w8-%J8qcB?2o3~b-t(q2ug(HYX&@b^
zLPu#n-sb10kS53zIM+gn=dDOOQyjt&yn50{)dwU~0-Kry_hkmh$H)DjlgVdWBgsA`
zI#Ll5QX2+dhqV>kx7lk=`(=^<^1*_S!Au|*iF9Dhz*ipEh~H(#h#{bHu}CYg=|G~w
z?tM>W`9lYl1iw!S;$LLvnO=757|P|GR#Dk3T1wd?yv*v9E#o4kD#jj}&y;oGr=kMn
z*X17~&nd%y4I9`sC_7AJANl?UyNn7j*b6e{0&)s)2mGGrjMx-{!9dXB!~VM-4B$-5
z07Hh9uCA_>U*>^JoOcV*6i9%+E8=szIw4CHS$rs`T=XattMik|L0_MOkR4jW;s=}o
z2TnT>4`!V7!a%4E(GtXT;BaF!&Mc{zoD&fESR)u-I7gYwR@fiHHYFC?o7aW@`am(}
z)tY1|xcrs-Ra7rojS3(89_nrux9e}Klkn>wEzrn8XENhGfMHe8kj^c808a*Rn4ZJi
zn(2ri!nEPe{N&GR!BywR#d%ee|0Ga=xX7Lhcy7Wv3ZN{fK39J&Jgg#Tzy9D<mHRIf
z3q19vQ%b-xdDhCTIIKtN-qSA}5oYglZ@xp@klCS{=|g#_1z{13P<0=vCjdtibRlSo
zxt^2j|9`PkqnSm;nId3pXsAo*s3i!fI-5D47%`>B=RN?rUJbIdE%bdB>)Bh51JCO<
zpYTcz!co5M@vz`M8<WqbrR)AIo8l}3rOY5P>I+P^k>?Y8^R`>Ougt5kNyNqt!VGt?
z#Ap@om_*-sfVlk)GYL=SmB$3wee<h^dA*{{Q+DQe;XH=5A|wA_UEdv#_4~F@3dzit
z%<L5^`<7i4m6<&=BgwcCWp7Dnm`Q}p?3pcD*?Wa#-fm=j&Uc^Z`~7{N-|zW(_395V
z?)!aT*Lj`Sah~UK97Zj)@)@QE-AzMZoMlBQFR0^P3q*7Pq%VM`()+*C<7qV?l2B1o
z&m_4|%4Mo2X+^iL>I;W>81nUcT-SSP3bVQRK_Pp-Cf;?xgsyecKhjmmfT!Mm=Qu*F
z4R2Z|SU~n2&+q9Y>K8e}QV1Nq=B4~9?4^|&?$YVLSxQ_P&w20hEzTH)SKNDk=LG9v
zV(}97G9!~8c%aB!R`~JHHl31A!lt$viN!>Aqm#{d7ge{N>@En=4cN!Yo*ZH^Fq3?r
zmBpPmA0ieJZx1@>%7h2VNaTa{x_;HMW*`Ah%0WTiPRJQm+~4^kmeGv*Lymuie2Ve9
z$s92qmn4}n2jb#Mo?FW2vfh-i(l_LMW{qaJN@N1V{$VUj`)PJ79)A8NIFm15b??La
zlfW>iHQ+4e><=G|yg{N1j0gUnbP$!amhfW&CX4Ry@^_{J$6#f}VH~CmH(ie0wcczR
zpO$WQEeT~heFV+wJ8YwoF@skpC_G9>iq(xZWE4C494fW|v5=S_Z)&Lg#5L_nl|#nd
ze>-u8Hw+qWDsWQawCNR^pMi@x?}Ia4YXrUjdff>VU}53@z26mGpIHDw8Z(jaOaZNQ
z9f2n;1x2f=Wro<r?xC8Mc941UD@BZDA4^GU@F?{d;4^HUGG}J>2{;|J_bi`5|LzOD
zA)X8>>PgANr``89&end6C4pq9v^QVM;vg@GAI&{4(!Ts7$2OX}hGFoMZLoP9L4ee<
z?=dz7=<G=!*<aXzh}2`lw9^(<Gp`CG7{2^!Kfrio0LUFgRrtijXn~n=X)yObW$7dZ
z`5L$W%v_+yNO}TSKq_!W^=iE(j<@rDIXYjZNsDj<>r?=3r@*F{%?K@(r2hTwRkd##
z8W3Bp1U11FAGAChjTkw&16N|sA4%`xKP$b3yIiPpxRNIu_OA9Rks4{<bFtx~@o@do
zs20%UX|G<-dao$QEY#h1-sz50^=hxYNRa7VXEePiKEhK43R3`PuC;OIQPweM@*{CZ
zm=-Zk0*-f>tfq&Kexz+C(=o8ln+87{U}R6QvIyZcM?xR&roU_y(CZ&A`W|xusXxH+
zGpe4OS*WRliCvoYGzdBVjJuElyWu$ynW8@*<G?rR>FLV>9u~r<vo*OPgRnwj7@iI-
z+JwAt!Ae57Jlt}IT#7dBLNt!057!zSDs^z44Zo9Yo%P2R+cjmsM9Uu}!={rw93eF*
z8(j2JwcOu$#YsHPw(4z4&m`J5htZ0o-KrusgR^7(+s~d3O7fqX)~pk2c9=J(g?KkF
z#5@jS7ig#maXY1=@R6M)kiH`A>PK#tk7Th{`&Mm_KdN*Z2stE@oLQ1cm1fD^RkqwF
z=jA@@{9=UKFIMxRjsp4eO@}$HS*XZ4Z@&#d_*;PXnxNP!a2OE*fX^c6G>`La!m$;u
z%}}82%)}cweuV}*I0k6tQ}fM#>+Wtv*!^$;z~mEEk5m`1hP&Ri<*yV@|3kw=I39<f
zGEn-nHj!>@uPPO}iAJM8zDEU++pbr6j9SDA@$h^FwoxRAFv!*7`M<t%stq-&a$E9w
zG)_p!$}K3^l65!zQRmg6Y2Xbjg2E?5hGctm9pg~|v*-y)>&~|vOFgl^DvKQx0~>uT
zo)4&61$<R`{AyzfJ(Mza>7=c&d}-6Ci8ra_OwNdVC12iBEF{1I;{umIPs4_M_839;
zOjqV%s(W-dF>XleyIYCK6S`y~@Ll3~6{RI@MU=`jeeEc*Dz><>S%QBI?^HP+>U36x
zdgL>fj^qcO_&Cz@pagzfgp2dTL0w$D>Sx6Ll3?5o&l+w4fo9+wB8^jXP&{z+@hP~t
z6fa=8UfmmZxF(*Y4icm^-F#!ZYYueEQH-HT%)rF*VI1KbMrbksj@<=jJqC`vDQJw~
z*AZco3)8@;$z&)<z0yEmZ=K{yBUt&1&CqSc3aJDja`ikvd*^P#yys(*5s+L!OQ2KO
zM@&X|A#5GgNx+$mkG~EzH<3$(MF|BC0K=v63s5@%pl6JLktv|As7F^KM=L-(WB7D?
zpmNj<5H&J3eGwCrCUp|k<?H<T!f_GSNZNODLY%9Pxm<jHJ<kx|{b;SgqQSQJJe}}p
z9i{vWZE>QmacALGi9y3q*`q^zA88|og=`A)@k!rPOcx%9vreqizx2=8YKmx6C4I&-
znKh}{7W$ZN?A`ZN(n6P6G?S~VE7BVPgnP%tge<z%-}EvtHVg%6p-F(Wyby&Q5C;Jq
zURNWOp#KJAySva)0tk=D)q&}wsHAiTVEY%Ds0qhlkk`q{;X>!h5Bg$=ye>)7)vsgz
z<+qpoLyuHmHoMt5Ix0hDNy?0hfPP;pyhLPVB<$k73lfZ^wv8BgJGe;Gpwi&x=O>o>
zqo`LHc`s}^uZk2lCtSKhYXk7fENEnEtZFXa^jU7xKSYliv_1JS^IR@gxa5yGghqxm
zUNXCl^d8CsC4HlQWJSCy)8b(|^U#9$i;Gk=EP=ncc@EgAC)d8qxeYB%J1k{ecs#?$
z*p4Vm7au5SRSbyH)O{Fx^qtu`f6Kin!aelmS!#MkcabZiH{Lf~mN*tsu#=l~{w?uw
z#gcxivYO$2iCOhlJaBiav0eVHbD_2xAca#^Qv+q-%PZUQ#E0WY9hliQjqA9NPSf3&
zCiG?S6vcnwE*2*{KLJlk<DLAiM+9|RP7rVE3U)WEi0GXi9}enS!xp3TV3nA%q;zW9
z2*Z8B>o(ahxIIq!MI2&d;$?AF8SV3$Ox$aNug=T*|A;TAPD_)P&zwsgU)$ZCZ8cqV
z$UgSh`xw;#sFrU~IL9B&UvtaR;0>!Q`R9pFMhoxd4UB4fFwhu*XHl8RhXZ9+_Ls-L
zMFR!~7%Hl(lgg|l0nd7)GyhEQyZSb3Pjt6le9CEh!%^c75&3y3Usb$~!Q9g%{j4_K
zEiIhPwe~hG2?GQ@)App1Wqw$#nY_mJowcSWbs1-;Ohc-d56s@`ZUqnqmgft^SUz_j
zIzHW%Okll(PQ~G^arIc8ruA6t(Jw!mwGhzqc%sK^*jD%cx<U^g(~L!3d~`2`M*~(m
zF1B~gM=>iS#zlEv4_(GNHAs<e`9dkn8C<VMmJ{1AD&%El)kEIAV<f<jq9*~8t3j#5
zS0Gf4?6Jq9zs+gL);%X9^CiGoaeO^u#i1~MA}N|rAh_gxO^}-oVbR2_fA@=gwE1cS
ziwpX^!s5rGs`hRc1`6z@11Ujb(@M`4&m<KQOx%3ks_<g|R--G2jI*X0Y2~Bagu72u
zsc_N+dNVJR4?8es8z~fgsKiZSJisNHT!`P~nY^L7d!M`Od-Ld7jhjN|9E~0cH%0`n
zm1{i>tDBd-^N~V>4sD$)offfSw?Fn$gVSI%4F01_F_ezg)cE+yy!|ByvPI=vKh%V6
zdRUhJJbdU|)P^%nn-WVTA+?z<hYCp=2rew%zf8#bXoRuN?E`!3n^55DNN0eDLKgOF
zn(%bYWWRl=CXeektWykwHxs`^DcSlfsYi~Ew70n1T1RZ%o)l|-_(~?v&3%RePj1Ck
z%tB3py*A&eLA_gug;(t7y<=APpH^&C1r+LUYj0T|afUk(uq82G&E#ds4-k=Mul|AO
zaNF1ktr@BjR3*d7l|X!{wW57@a1j&%X&LmO&bnB7MZsV!g<l#+T>NR3GW1u@?oOZG
zy)%;+{sy)Gi}6kTDZ<l-!+Ne%@3NyB9u%`qBnPqS;{RAB^}xe@B{q~MF->xiWX!G@
z9DSZ;V&N2PqLyH>kG$7RtX;8n2DWchTH)N6-@)yl_yBA~Xb2o*yJcHJoRCU@G?L~M
zOa!;f+D(0nHgH}n+uY~9bpJZ;X+OCu9`7Dy)t;)#(U`t|v~+p#8+XV`RV*KE>769*
zWo6Z==eq?w^6$AluAtuXC+jJDeD>CVCw8wZivPkx7tYL}M1Hr+728%guf?2_*S1?h
z)OV(;X8!)W(o)$^mPAjw+ElXB+y_sb*2n!nWUomdgIAyo-dz3VMm6Kj<TJ!9MMaMF
z=kbRG&gy5~eB<z@KCkI^&j((foLtrHd-r*YF%*8B@91Xr+2Zu@ja+B#Ovy>;dP9iX
zxQXA)vqp<6w3yMEeXLqsV{W|Yn1U*+T2Eh>dAj~bQwj4<W2&~!h52dsda_UA-G+B*
zBa3>ky<5r%BTuy%dW0z5sI{_muMhOZ?kgQ`MGPE1x?01)>?n3;rbKmp>S`gB-w1Q%
zV%Qrfu*ZE4R^u0=c*m$eFl*jFXLGr87}usp@K`)EFD{ohoO`YiU7}g!{W`RRh#Swv
znvd^A82zH1hHy<p7Mf4Xo~!1>IPhA}!8)$#?#7G<2tEav#WanKB4PBCyZx!uNN%lr
zX}|*aHlFFB;;Cm5>#Iu~%dWqAuG9~E-|i!iZY{KF$_?+{+*r`iW-epM5D`jr>Tiso
z6k50@v{<G)lX`mf^PSI0TgP5l`_^jyr@Kec=pgXv&L+1ME97<5z2oCOKp2rl7p8$)
zzRpb{$T1X=7&t?%fWH%3?$q!rL)6Jme}TBy{3}OGrC`cfTORF&#Z0a@A&byFf9A=K
zyLF7NT8yw;)$MFeHJfRBGx#*FKR!xKlO}w|gv%rWGniK4LmnPiuv^aUVrI-av}xYp
zZEToX;eqXo4W}yd{gpm%r4+w+^|pqFNjbr@Wx=H2#pqgeq2@K$=!FE6B{a!ppTdg_
zj&$zS3q!Z-3cTH>*o^HPX6LRi1ZvV;czuo%YvE`yat6h%vEQyu82if{T?Vh^G$zC-
zOqrlFFw-=$bF8XVw>#sU9{uClODt`RXqs*Puv*aE<eRbwNA)&T1&81*wcmA>;gVNW
zlAiJ%_f>NW`hj6_6fbq%)^GHPk)({wQ?ptTov1M`fv<Q6$<Om_@hnjFff(jas)QFq
z+dfx6yeA^-uEJyI4I*_Y)5FsnIB&M`)=?5~lUk-?qq3eYfLy*oU3Zhwal%<vzdNzo
zG(zycDD*vF58V;7A!K@Uvc7vTY8$h<EXwIc!zV22mMvaYSeP6UbY@yR+uMIjM2q2S
z8rdYJe6_{U5?=XS=k~9gTxJm#OZ0WuGU8qkJm|kwuaRdM$&(NfZ>*s8`%n0isB(d&
zqSc4?aCzZd!mI)0zCErNPbMm<$=0SKo;<I6<wPs%7>}QzJ8Qye6z-&M5R~m<_4DAR
zAE%e#!mqf28-w*;uTf~W)=q=Eh(>z65hZaMnHraTTLv9<yXz|J_Z`G{iT0nYhvgO~
z5Vzi*%Wo<FCi&z;*KGHyfJe?Md!mbQUFwbZxr{@R6{n^WONljeT3I;zDT{or5uw%z
zY2b<E!!-2Q8#Pb<@G{x?kji{Ap1GTzLnGzNN5;=2N^l}!%@M>Q*uJ4sIkus=taI4i
zEBl5>z-xKv3Q0+v!bW#OSL4G3BmX6Z7NyO1eic1iK9AR4$*(!6UDQ@4&<Z=t8n7Fl
zx9pKWC$t+)maOd38uPqY@TlR{mA9Ne=T+AdB9g*WjKPxA>TE|$yn?mHY#nKBfB@H&
zk`pf)Qy8J4WyT`N$JZG0Ep4}zLbSlL-{G)vqfWuqn>^J@P;I}EzhH&RXm%=t@m;WE
zI%Du^xyXa>ubn?NRpzCa2?f%kCIeefiq3pC;xZ$?v?O`?QRtxX<pRqcwbsr?jfLY2
z_|1He7esWN3zo&NF?->Ssn<r&r6jGDy$vD~h`KqD?u_oVdM)E{et|eCfM+1#y&)eS
z%L!prT(~*^5e;kMB=g-3X<9;7MUo$$%##i$geP9J-MgA&!Q!rYc$LXA6N&h#Mb)SH
zGWna0`Vz>f<lJo6CHytIe8avKoo*pvic;iHKDnu=s`K2cES*Ht+M#6iZWOH9>1a}x
zk%WfA?$LrjeT_D&sKB|2)f?~0sP!3(?F1WTBWq7t;1BR%9wZSR>Gn9c7z7l}$i#Se
zyrvY#6tP85`Z}RdMRT{AW%A`O{90NPsx7f@886Q6=}N`&&20M=xrK#bPPg=iofR&S
z6oMaE8ga6;kkD4Iwd+@la9x|Iq6M{C<=po8;|>8j(FeDb(@c$hS(AmOWosc~ZuH-=
zD_TsFa-TJr+K-=$d?V#92y(&LnOt5wzI@f^lu!6+QgjYNP@~4x>OFmZec^dMxKRar
z<$Ji!tyBzE&C3*&Rjck*N5;)CND%{-DBr|K`Q6@VpMY*?KqP-rIW!4KM!rxfWxv(l
zPtR2-zyJO-XRtb?CiNBH8gw8K<#<%t5vZ`?r-KIOwtBY$$X}949XZiG9XGr+&zrlV
z<?LpWGp_zf-}!=6Q?}^YZ)45j)w~l&Jvqx~SwE0#Rk`iWTU>YJ%2|FRYyLnGwTTuh
zObC(KM6;>5Rnx0T#n(6@D`RC`YN%B`*KPXd#7)#wJGaUrb9KYcz;BeyxJ)ka=&N4m
zFfrViPis+^?2IM-IPSG2J^Ym3@avgw)*zESA?qc(rd+1W?q7<e^o$ZtDs#t%`4Sfd
zw&gpPuVod;!s0@>E;Y$5>T*OG$gmt3TpHSRG#8^vCy_(vKYFPf7#k~$XfYEWFh232
zp3SA;uSz+`J7}HdcRlcrL%7@>G=z&HB{hnR;R$2M(4O7fc#MYkEyI>W@2bgjmaF{z
z9fQ;Xe~83Z3neGFdbfGMJWo!p-Kv)B_Z}x~GLykRO28=gE>#`|^EJED{H~FAu>J&Z
z+?VXFP`lNO4K&0<7C}j?B4}?_)aT>Z;hW|TrrNhEB0rOc(dvD!8%C=onN!z&_)_@q
zKziSA+1<=yq9njZ6XVlT2VEfNot68oW0F?isJcgEm!krbCs&@W3{k?WeW@89G8iB>
z+euux|8lAmJJAG$nIEf#;k2t1C*5)Wf1|*7;q(4WZV&aH?_pAlHgV{wED<|-=4!5h
z7v955zKy>mMv4m*awTKA(Q#`&zXrAg$nOzm8&w@{Ca%2ssrdhFrne=R7tnmgeUGI*
zy}Wvdh6s#?D8ifJ76&i6=Zb7IBW`i#D+}mPT12q1H#lQ&WcKxJAK&fK;|~Ju;idCu
zVPIq(5s_Uq5pgcM6AAFeWrg<(8B=Uj!m^IkIkzN}TA*4*I1Gpg90XD}4<9P2s1SfG
zTJ(-(^PDp=%r_K*Nt=O%54%eY^M7XWv=I2cyi5_@id_r7+`I=D1-v^l44L#?7FdBJ
zNkGQb1YND(-rnbHYmQr6TdDc^WI%olX`MqTj;^j(gUFbpfX7P=y~XC-`^A^7yBHBu
zjJUWs4rg@=2*!#K0~ymb8!Pt$bCng{Ok-|HlNlLj5Q#_c@Nfvs?U<LING`g(Y`-Ka
zCDl7J5(-uHEJzMoVl_}_ae)5I;8?YK%^U0^5K6n~W`OWDDU5iMtS^&L&DQz9;%Bf?
zR5CCy7{n^x`$Pr*x>97LVQTYt=(d>TE)q`<Z4P7@9TbFb=zFwXJ{!iDYWwzlGba+~
zqJsRSl6Aa=gF{1^cyz-&a+~2Mho|fZbDa+lk&YW;uwd+sQIg(lV}~aQ4*tBH33ES0
z0!rqyf!9AUK;n^eWZf#eGi;#;BEPxP8fc)>37ZEj@8}7*%r>1p1TA1I$Vz%9C)di8
z0!7f9mg8<?50NPy@b2}uydlGd=vqdaQvc}H7ht~9jXK~*Ccxexno;WL?d$VTIa;ws
z#ts{xIwAtc5rsh!_3(GVKAi=DeAHOHYQ~32M=wR#ZGQqL4;(Pp<gTo&WGw3z{QUWI
zInXjN&?Pg6>EG@wZQU;eN7$II!fcNBRKbu|x0i=#OUgx%Cq*0~z=@<|_>;(rC!9$p
z_Q@ivb9vR9s^ef2P)nox0NE!=$<1W<t>*tc+1y-SJK>fhp<Zhh1ciFUQ{e6)kWkFP
zvFZ6!!_7OfG&3`k0Cd94$ZYpg?9FwCB|q;)?x)UAS%+{u;fq2$&&=nbM`Op(VSj)B
z5l|J(VSuSu?cv<hi3!Nd6U@~wA%b;r=(*jV+JA2IvZ*UEk|xlRU6;0EWe(&tG{|Z(
zkxtX#5YPzqE{-K_dSH_y#X4JJ)5OFCLdk(C{Lf06=Ic5U2GmPi&3|TH{ot${KPjpe
zBtpH4l_B^F9*l*c4~!@yI+~1D$1*tc#x*3=9a>q1^Iflz5=EZV)80uuN0R+J>IwWp
zJ0k0S(8<s=cf|a#*kG@+`Ov3eBJ2WAEl7;g3k!Q=OR4ZMYd7NVCUSeq6SfCypD<A~
zSGFhn|3s#awNIt;s@5bs)Z6pQ!uzU`L9zmw%+H$_!j#e$iECt&$>oXUO6(I%P+#E9
znxS%I)^p4}D9p(@=RV=8J84p>i#xK{Iy|N4mJC7C*HNuqM|FmIv^|;ReCTOAdwXHW
zaq<5C{_A~Lp18Rokz!+Sui)gApSg#K5kK_Vf*TWwxa=SQ*rStQ>Cx{qGMZx#AD9;o
z*H`<eD@Qg!n;^}W0d=q<dI)`WB`+^22r36^H{&Gxh>6w*|J+p>rMs&N5+hh(@I0D}
z6q^MSS3S@{0>QJ0#9QziOOiUG{6X}wK6@P|=Fp2x{)u`tjRU9sE*#n(AM9ghhPdLS
z>eba%y*i&7u033&12$ynLZXwa_gpEQ#ta-fvhwoc1;-{PB7tdS0k82dz3nt)iNb18
z7C1qwfv*Sf7Sz?<!0-m&xaQ1E5L9Wo24$qdWXykUcZ8lUsl66=HgB<c8IzNnOANiX
zCd9X}Y|`^ww9L+My#{|dYn<u~eC+3|cvThG9LM!~EIao?n$7h}?5Gi|R1krhfmmsG
zd=Vvf$FO}<S6kZ*W-PnEb?g3G0;ohYDQRj_>f{?oLLi}wo<e$g@O^kn{MTW2lXDJ%
zrEmC8{mI)auQF477@$#Fifu6$Mr2OF8fy4ECtd8Suv4+O=cD1*dolmHv^V^rd(p8L
zZy*f+$6Y63Y(5Q+77f6CDB2te?da%0=6d{KmH-Si=GW`JufMz#GX9&?BH=xGo9?sg
zxyjOg_+b#>sFI4%1J|Ffd*zKe|L*7cRCZvTr>pnb7nXAuw%_ox7yROIh0Ee~=Xq+#
z;Je>+YyDQkJ`Ks^;1IJTe<J{lK_=X%;%IVVx%(9ZyF>9WA_+u~pYvg+gcHgaPPoO&
zaADpk+!!&KGLyi#3O~V1>%dTYR$Gf|-Gi9K%!Uh@BSe?QU;`*B7P^wc;T>tLj<y25
zs7J3uHRw6XVu#X-mASA?ImWZlIiHi0<J=)oHXWnF<UCG_5m2sL#e{jI%N#~#?T(NH
zEey9j2u?dROh#*8*yF$*RujTIbh++n7N=J{wS&%bcU)Oo+E?x8ajF@NvO^?pI~H&=
z?yOI(i|Yc{vUaB=A8mYzvIK!E@U=oFa(2M7yHIZ@feowO&-V7?3>a0o50DnV6}qzH
zC+`COC}fM>Dej5Z+rJ0kDx^Q?bre=J+5Z>xR398t`U>k}Z6V_}CbW?M=@T-!F!X)b
zvwmb_{lpn$%~4pm&@D!atbUY^)fA^ecFZKpPuT9bi%EOCB0&?5zUSOJ=d7irRW4Yt
zqEl;p_lB`p-8jl~%LW<x^F`C|Kk`BnO46}d1hQ7|#(2E=6U5v?Fm)@d%2Il+s%|wC
ziGg`1kY}Q>z@QYJOTcJX8odA0O(}naHOp;vvjcfTY)hEL=3bWBwt-RLWuHTjC3L+w
zL*a_6f`*2M5VmOk{j9>(kz#azyRB5Uod)@V+|ah4OA@f#oRddK^}C}a=6*!}g(Dr+
zst++^YAOYaL>0x_xME7G!llh?zn-O_tKc5WuPZA0fpG!c_=1}}yFIy+T~8hBIS|jX
z^ITl|9pBpOs%s)woGKcC(_eI2KAVcoIgdQx?bs8Ygp`MEfdl*gZST}GFXngex@8b?
zAr`sxqB=rwa}znkKD&*@n>&S1{CT`;j;h}qOu2E1anaV^-sR8`uDThOzoF_<WZk)+
zd9nKaa9~_)wLd<|<Lb$1*<*2wqlzxF{SA-*6Mo6DGK(9=GrGnjm_q$L@T&vvk;|)0
zz%5Hp7y$oBV1ER6&^{(fg?*#FJI}CCMg|d6#7t14UzdPZivb3hgpZ?v!39u)`SECe
z^cZHlN2E7Fg=A?j8Jl~*1K^{L>~CuK2j(`~8dx3(>aNgW1le?@0UETT0V5(Rpj_8s
zfd@-!Q{h8rk(;NteZLZcq$_4VIXk;aRWA$yxaK{U24*NA39GJgBzU)acZz%@c>jqB
zF2~%YurWR)fPV5_SqDX%;ru8hVp2x&XZ!n}R?3>cj<EkOVr)3Qu8scsGJB1d%h<Gp
zsj5~F4OGX->}=jA?(S0G`o_k>0oRa$?bf~Dj-ivX8icHYsiP$wLd<X5lbcDXU4CNC
z>brJ`=|xThL9HamyKb-3sb_eYTZW4+SS&ut8hJbsAp=2P6n>jZ?00s}qT$18`FNox
zNsi=W53W|m+xK^Nlnzgj`7G98S}qvP|8TvSqw~1|B#9|8Qyt=vn&WhhzlwHCYrfhR
zEoJWL(ni+hO@aU|HS!FfNj=i2EDTnnNIl~6d#VxUAwC(?3hyFNzMX*(FbD0<ohJr>
zV0=yvrYxX=jMyU~d0?ax2~BfK<b%w9vo+2S3xLp^FoaETl?gkubs{(j#sr06ng?9B
zwz~!qyTIu{^yk8WcY)8xDCM@Yu`+HBj4g|n=4k(}eAVym$uf=#g1Lk1+QwDOL#`L6
z_PX2)(AWnb?(yhW^etV1+a=`oyK8A_X}Ekkx%CZ!=Y+v_@j05z>Y%c!>H>(4T7d-_
z1WX!w7>#~O40-!M5<8yeJl*SXszTG4yKN#q(>nt478jexX>zs+-nbth?v;a{83U9l
zua`Lh*%4>~gJ&o?koh`d0-a#&$rD6+F-(!|IiR+#E_L__#1Q3;kc?l62NSXec=&4z
zP-KWF=U}g%IHfMr`l{V5jgWZ0D=jrCsQjFxHjpPzHbIEShgxU0lBg3r%=8ic4?@$D
zu|L;i;*cccMT68Kj*f0MC@k6<b%L{pzPbKcH;n}1AY<nFN^OgW^xsGgUv^)vk>9RH
z{PBPb;tVDz)Fm>G?e0-OXovpiG3&JKe*564<nG9R&9|KMj-`uTQtM5~IswtiYnaW6
zM@LSbsP#7;QX*j)l5Rp!m+}_CK`Yw!7+@Ny?Vu)z(;aecnsas-F2EHK5Ew4Dj^=vE
zgYOn*wLj~Hkpv*AAtZU<g!T*)QmdsEdSkO6Z#z0mRq6rPv!zA(%9Sgr0F;W|(4o5S
zT1ZR-mJBM+7HwF8s_?_?4pCK{(;alD4-j#I;?+P~yKx5Wx4Qe>DwL#kvLj{{dm#m*
zR<`QQwx9$ZW;J%@)LAZEUR~H?TL)%H!P5gb1nj~HFmG(0Ixtyu>li$Yc}@@H8Tp{q
zcW!-vio*;zT^<`>37p$G5>8Jf;#w{;TliXxEC(To7HZcKBvJ5>(BzMryUUg<4Hc_g
zm|zTt&0dFs$pdPl0`I-Yfc`2R(<u?+J)kU-FTKy4FBh&DW<XL)K#x4Wi%DGBJJmc)
zQ<aV<H{UtHbh{6xq0zL)9=IUpYz4G(<?{Az;m~ID^55Ax2q|=Bq?iDlECf8&A?Y^p
ziH{UN(>(*X#&yQz?e`=>eOE}%(F!&Jj*SMj*de&3G(pK|Sa#nMD4Pfi0ul5hKAPo6
zJ7ah6-(T{O`rE81taI+!(+&kM9n{5N>(@KU6ffo+QJisFG=w6z8Sz0wjPro@j=2Bz
z3=C-ZxY*m-U3Q*T1nw=tIU4?Y;1z(C+m>Il8U%LMt^e+QnLR*yU#qFhl1NcLIT;0j
zaN5cD-~yb#m|L8PJ`q+7@?Qj<@F{SLYXXS-;>IPzni6;!MH#JMb$(G-Yf^Ggb!7K-
zl)>4c6yn?bb@XRpj$H~GAJ~5yA6LA4_Tn}?W$#NVHF=zxvuO^X(@Ua7!e&LR=&oCl
zbu#f+wkk^2rr*`bbz)r?6Iv82V?Zt2yu%y8@QM6Ft4hXfg0YN{G1uR|2-lA2UCdj@
zQTDyCWJ3=lQ>4fac*XneZ?8gJNFn1c<^Hq`Yrs#1jAI$X6k$IOc3$?|MYHVRkZ!Pi
zV*0pc_pF7_|JHNzo}}PMUX9#5y64u;3eCUzspPZ7^PFC!m9Oul3GhPW$og-d%b+Jn
zGKqT)&zd?~R(iOFbR`4G-Jy(aSI&7%@zE*q{zJ8XY53kOevUqysx!aIVvenTi!4Ui
znI4Nx^0=>Xw5fBz5;pnUqfcchrw8BDbN>exKL+b5+ZO!s6XxJn#8Kpb_6r-^lS8R6
z!<47=QvQ1ZMP@9!PyPRw3A!=lNGMu}kBIWE@&Dh8)7_pd#KMPq&q#^<ki`GqyOHMd
z;)Hc0#L%hlc;o)ZlE48=l{e4|@$c$J(td=RDAGm{=JG1NUgGp{ZwW1ky!YC)WjAMT
zDTf_YbA--+b1E#^xz!h|{@k89&@eUnr(vqreL=Bn`G|%2?Z=jNQQ;ji&Zhc*M+6Bx
z!@VX84MNkW#z<x>o@xt#7s<M8_d(u)kvT5R#B;maBk3<3NgD1}YzC<FcO#==eR9i-
ztBK>3Z~yZDgWUCT`+r#AGu1xJ-3@ts3)!;8M@7~zDMv657+3XnDiJXHujBvk)x)0A
zqQ7{D2U&;^QRG|`J-o|GDk=L<|D^M8d2>S6+eaGntj{L_{>~WAO(k{3Jb6>U{{jj#
BA*%oY

literal 0
HcmV?d00001

diff --git a/docs/installation.md b/docs/installation.md
deleted file mode 100644
index 70c4a6d..0000000
--- a/docs/installation.md
+++ /dev/null
@@ -1,110 +0,0 @@
-# nf-core/hic: Installation
-
-To start using the nf-core/hic pipeline, follow the steps below:
-
-1. [Install Nextflow](#1-install-nextflow)
-2. [Install the pipeline](#2-install-the-pipeline)
-    * [Automatic](#21-automatic)
-    * [Offline](#22-offline)
-    * [Development](#23-development)
-3. [Pipeline configuration](#3-pipeline-configuration)
-    * [Software deps: Docker and Singularity](#31-software-deps-docker-and-singularity)
-    * [Software deps: Bioconda](#32-software-deps-bioconda)
-    * [Configuration profiles](#33-configuration-profiles)
-4. [Reference genomes](#4-reference-genomes)
-
-## 1) Install NextFlow
-Nextflow runs on most POSIX systems (Linux, Mac OSX etc). It can be installed by running the following commands:
-
-```bash
-# Make sure that Java v8+ is installed:
-java -version
-
-# Install Nextflow
-curl -fsSL get.nextflow.io | bash
-
-# Add Nextflow binary to your PATH:
-mv nextflow ~/bin/
-# OR system-wide installation:
-# sudo mv nextflow /usr/local/bin
-```
-
-See [nextflow.io](https://www.nextflow.io/) for further instructions on how to install and configure Nextflow.
-
-## 2) Install the pipeline
-
-#### 2.1) Automatic
-This pipeline itself needs no installation - NextFlow will automatically fetch it from GitHub if `nf-core/hic` is specified as the pipeline name.
-
-#### 2.2) Offline
-The above method requires an internet connection so that Nextflow can download the pipeline files. If you're running on a system that has no internet connection, you'll need to download and transfer the pipeline files manually:
-
-```bash
-wget https://github.com/nf-core/hic/archive/master.zip
-mkdir -p ~/my-pipelines/nf-core/
-unzip master.zip -d ~/my-pipelines/nf-core/
-cd ~/my_data/
-nextflow run ~/my-pipelines/nf-core/hic-master
-```
-
-To stop nextflow from looking for updates online, you can tell it to run in offline mode by specifying the following environment variable in your ~/.bashrc file:
-
-```bash
-export NXF_OFFLINE='TRUE'
-```
-
-#### 2.3) Development
-
-If you would like to make changes to the pipeline, it's best to make a fork on GitHub and then clone the files. Once cloned you can run the pipeline directly as above.
-
-
-## 3) Pipeline configuration
-By default, the pipeline loads a basic server configuration [`conf/base.config`](../conf/base.config)
-This uses a number of sensible defaults for process requirements and is suitable for running
-on a simple (if powerful!) local server.
-
-Be warned of two important points about this default configuration:
-
-1. The default profile uses the `local` executor
-    * All jobs are run in the login session. If you're using a simple server, this may be fine. If you're using a compute cluster, this is bad as all jobs will run on the head node.
-    * See the [nextflow docs](https://www.nextflow.io/docs/latest/executor.html) for information about running with other hardware backends. Most job scheduler systems are natively supported.
-2. Nextflow will expect all software to be installed and available on the `PATH`
-    * It's expected to use an additional config profile for docker, singularity or conda support. See below.
-
-#### 3.1) Software deps: Docker
-First, install docker on your system: [Docker Installation Instructions](https://docs.docker.com/engine/installation/)
-
-Then, running the pipeline with the option `-profile docker` tells Nextflow to enable Docker for this run. An image containing all of the software requirements will be automatically fetched and used from dockerhub (https://hub.docker.com/r/nfcore/hic).
-
-#### 3.1) Software deps: Singularity
-If you're not able to use Docker then [Singularity](http://singularity.lbl.gov/) is a great alternative.
-The process is very similar: running the pipeline with the option `-profile singularity` tells Nextflow to enable singularity for this run. An image containing all of the software requirements will be automatically fetched and used from singularity hub.
-
-If running offline with Singularity, you'll need to download and transfer the Singularity image first:
-
-```bash
-singularity pull --name nf-core-hic.simg shub://nf-core/hic
-```
-
-Once transferred, use `-with-singularity` and specify the path to the image file:
-
-```bash
-nextflow run /path/to/nf-core-hic -with-singularity nf-core-hic.simg
-```
-
-Remember to pull updated versions of the singularity image if you update the pipeline.
-
-
-#### 3.2) Software deps: conda
-If you're not able to use Docker _or_ Singularity, you can instead use conda to manage the software requirements.
-This is slower and less reproducible than the above, but is still better than having to install all requirements yourself!
-The pipeline ships with a conda environment file and nextflow has built-in support for this.
-To use it first ensure that you have conda installed (we recommend [miniconda](https://conda.io/miniconda.html)), then follow the same pattern as above and use the flag `-profile conda`
-
-#### 3.3) Configuration profiles
-
-See [`docs/configuration/adding_your_own.md`](configuration/adding_your_own.md)
-
-## 4) Reference genomes
-
-See [`docs/configuration/reference_genomes.md`](configuration/reference_genomes.md)
diff --git a/docs/output.md b/docs/output.md
index 9de7067..4a7372e 100644
--- a/docs/output.md
+++ b/docs/output.md
@@ -1,41 +1,63 @@
 # nf-core/hic: Output
 
+## :warning: Please read this documentation on the nf-core website: [https://nf-co.re/hic/output](https://nf-co.re/hic/output)
+
+> _Documentation of pipeline parameters is generated automatically from the pipeline schema and can no longer be found in markdown files._
+
+## Introduction
+
 This document describes the output produced by the pipeline. Most of the plots are taken from the MultiQC report, which summarises results at the end of the pipeline.
 
+The directories listed below will be created in the results directory after the pipeline has finished. All paths are relative to the top-level results directory.
+
 <!-- TODO nf-core: Write this documentation describing your workflow's output -->
 
 ## Pipeline overview
+
 The pipeline is built using [Nextflow](https://www.nextflow.io/)
 and processes data using the following steps:
 
-* [FastQC](#fastqc) - read quality control
-* [MultiQC](#multiqc) - aggregate report, describing results of the whole pipeline
+* [FastQC](#fastqc) - Read quality control
+* [MultiQC](#multiqc) - Aggregate report describing results from the whole pipeline
+* [Pipeline information](#pipeline-information) - Report metrics generated during the workflow execution
 
 ## FastQC
-[FastQC](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/) gives general quality metrics about your reads. It provides information about the quality score distribution across your reads, the per base sequence content (%T/A/G/C). You get information about adapter contamination and other overrepresented sequences.
 
-For further reading and documentation see the [FastQC help](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/).
+[FastQC](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/) gives general quality metrics about your sequenced reads. It provides information about the quality score distribution across your reads, per base sequence content (%A/T/G/C), adapter contamination and overrepresented sequences.
 
-> **NB:** The FastQC plots displayed in the MultiQC report shows _untrimmed_ reads. They may contain adapter sequence and potentially regions with low quality. To see how your reads look after trimming, look at the FastQC reports in the `trim_galore` directory.
+For further reading and documentation see the [FastQC help pages](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/).
 
-**Output directory: `results/fastqc`**
+**Output files:**
 
-* `sample_fastqc.html`
-  * FastQC report, containing quality metrics for your untrimmed raw fastq files
-* `zips/sample_fastqc.zip`
-  * zip file containing the FastQC report, tab-delimited data file and plot images
+* `fastqc/`
+  * `*_fastqc.html`: FastQC report containing quality metrics for your untrimmed raw fastq files.
+* `fastqc/zips/`
+  * `*_fastqc.zip`: Zip archive containing the FastQC report, tab-delimited data file and plot images.
 
+> **NB:** The FastQC plots displayed in the MultiQC report shows _untrimmed_ reads. They may contain adapter sequence and potentially regions with low quality.
 
 ## MultiQC
-[MultiQC](http://multiqc.info) is a visualisation tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in within the report data directory.
 
-The pipeline has special steps which allow the software versions used to be reported in the MultiQC output for future traceability.
+[MultiQC](http://multiqc.info) is a visualization tool that generates a single HTML report summarizing all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in the report data directory.
+
+The pipeline has special steps which also allow the software versions to be reported in the MultiQC output for future traceability.
+
+For more information about how to use MultiQC reports, see [https://multiqc.info](https://multiqc.info).
+
+**Output files:**
+
+* `multiqc/`
+  * `multiqc_report.html`: a standalone HTML file that can be viewed in your web browser.
+  * `multiqc_data/`: directory containing parsed statistics from the different tools used in the pipeline.
+  * `multiqc_plots/`: directory containing static images from the report in various formats.
+
+## Pipeline information
 
-**Output directory: `results/multiqc`**
+[Nextflow](https://www.nextflow.io/docs/latest/tracing.html) provides excellent functionality for generating various reports relevant to the running and execution of the pipeline. This will allow you to troubleshoot errors with the running of the pipeline, and also provide you with other information such as launch commands, run times and resource usage.
 
-* `Project_multiqc_report.html`
-  * MultiQC report - a standalone HTML file that can be viewed in your web browser
-* `Project_multiqc_data/`
-  * Directory containing parsed statistics from the different tools used in the pipeline
+**Output files:**
 
-For more information about how to use MultiQC reports, see http://multiqc.info
+* `pipeline_info/`
+  * Reports generated by Nextflow: `execution_report.html`, `execution_timeline.html`, `execution_trace.txt` and `pipeline_dag.dot`/`pipeline_dag.svg`.
+  * Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.csv`.
+  * Documentation for interpretation of results in HTML format: `results_description.html`.
diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md
deleted file mode 100644
index 927d1b7..0000000
--- a/docs/troubleshooting.md
+++ /dev/null
@@ -1,30 +0,0 @@
-# nf-core/hic: Troubleshooting
-
-<!-- TODO nf-core: Change this documentation if these parameters/errors are not relevant for your workflow -->
-
-## Input files not found
-
-If only no file, only one input file , or only read one and not read two is picked up then something is wrong with your input file declaration
-
-1. The path must be enclosed in quotes (`'` or `"`)
-2. The path must have at least one `*` wildcard character. This is even if you are only running one paired end sample.
-3. When using the pipeline with paired end data, the path must use `{1,2}` or `{R1,R2}` notation to specify read pairs.
-4.  If you are running Single end data make sure to specify `--singleEnd`
-
-If the pipeline can't find your files then you will get the following error
-
-```
-ERROR ~ Cannot find any reads matching: *{1,2}.fastq.gz
-```
-
-Note that if your sample name is "messy" then you have to be very particular with your glob specification. A file name like `L1-1-D-2h_S1_L002_R1_001.fastq.gz` can be difficult enough for a human to read. Specifying `*{1,2}*.gz` wont work give you what you want Whilst `*{R1,R2}*.gz` will.
-
-
-## Data organization
-The pipeline can't take a list of multiple input files - it takes a glob expression. If your input files are scattered in different paths then we recommend that you generate a directory with symlinked files. If running in paired end mode please make sure that your files are sensibly named so that they can be properly paired. See the previous point.
-
-## Extra resources and getting help
-If you still have an issue with running the pipeline then feel free to contact us.
-Have a look at the [pipeline website](https://github.com/nf-core/hic) to find out how.
-
-If you have problems that are related to Nextflow and not our pipeline then check out the [Nextflow gitter channel](https://gitter.im/nextflow-io/nextflow) or the [google group](https://groups.google.com/forum/#!forum/nextflow).
diff --git a/docs/usage.md b/docs/usage.md
index 95d863f..76803dd 100644
--- a/docs/usage.md
+++ b/docs/usage.md
@@ -1,58 +1,19 @@
 # nf-core/hic: Usage
 
-## Table of contents
-
-* [Introduction](#general-nextflow-info)
-* [Running the pipeline](#running-the-pipeline)
-* [Updating the pipeline](#updating-the-pipeline)
-* [Reproducibility](#reproducibility)
-* [Main arguments](#main-arguments)
-    * [`-profile`](#-profile-single-dash)
-        * [`awsbatch`](#awsbatch)
-        * [`conda`](#conda)
-        * [`docker`](#docker)
-        * [`singularity`](#singularity)
-        * [`test`](#test)
-    * [`--reads`](#--reads)
-    * [`--singleEnd`](#--singleend)
-* [Reference genomes](#reference-genomes)
-    * [`--genome`](#--genome)
-    * [`--fasta`](#--fasta)
-    * [`--igenomesIgnore`](#--igenomesignore)
-* [Job resources](#job-resources)
-* [Automatic resubmission](#automatic-resubmission)
-* [Custom resource requests](#custom-resource-requests)
-* [AWS batch specific parameters](#aws-batch-specific-parameters)
-    * [`-awsbatch`](#-awsbatch)
-    * [`--awsqueue`](#--awsqueue)
-    * [`--awsregion`](#--awsregion)
-* [Other command line parameters](#other-command-line-parameters)
-    * [`--outdir`](#--outdir)
-    * [`--email`](#--email)
-    * [`-name`](#-name-single-dash)
-    * [`-resume`](#-resume-single-dash)
-    * [`-c`](#-c-single-dash)
-    * [`--custom_config_version`](#--custom_config_version)
-    * [`--max_memory`](#--max_memory)
-    * [`--max_time`](#--max_time)
-    * [`--max_cpus`](#--max_cpus)
-    * [`--plaintext_email`](#--plaintext_email)
-    * [`--multiqc_config`](#--multiqc_config)
-
-
-## General Nextflow info
-Nextflow handles job submissions on SLURM or other environments, and supervises running the jobs. Thus the Nextflow process must run until the pipeline is finished. We recommend that you put the process running in the background through `screen` / `tmux` or similar tool. Alternatively you can run nextflow within a cluster job submitted your job scheduler.
-
-It is recommended to limit the Nextflow Java virtual machines memory. We recommend adding the following line to your environment (typically in `~/.bashrc` or `~./bash_profile`):
+## :warning: Please read this documentation on the nf-core website: [https://nf-co.re/hic/usage](https://nf-co.re/hic/usage)
+
+> _Documentation of pipeline parameters is generated automatically from the pipeline schema and can no longer be found in markdown files._
+
+## Introduction
+
+<!-- TODO nf-core: Add documentation about anything specific to running your pipeline. For general topics, please point to (and add to) the main nf-core website. -->
 
-```bash
-NXF_OPTS='-Xms1g -Xmx4g'
-```
-<!-- TODO nf-core: Document required command line parameters to run the pipeline-->
 ## Running the pipeline
+
 The typical command for running the pipeline is as follows:
+
 ```bash
-nextflow run nf-core/hic --reads '*_R{1,2}.fastq.gz' -profile docker
+nextflow run nf-core/hic --input '*_R{1,2}.fastq.gz' -profile docker
 ```
 
 This will launch the pipeline with the `docker` configuration profile. See below for more information about profiles.
@@ -67,6 +28,7 @@ results         # Finished results (configurable, see below)
 ```
 
 ### Updating the pipeline
+
 When you run the above command, Nextflow automatically pulls the pipeline code from GitHub and stores it as a cached version. When running the pipeline after this, it will always use the cached version if available - even if the pipeline has been updated since. To make sure that you're running the latest version of the pipeline, make sure that you regularly update the cached version of the pipeline:
 
 ```bash
@@ -74,181 +36,93 @@ nextflow pull nf-core/hic
 ```
 
 ### Reproducibility
+
 It's a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since.
 
 First, go to the [nf-core/hic releases page](https://github.com/nf-core/hic/releases) and find the latest version number - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`.
 
 This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future.
 
+## Core Nextflow arguments
 
-## Main arguments
+> **NB:** These options are part of Nextflow and use a _single_ hyphen (pipeline parameters use a double-hyphen).
 
 ### `-profile`
-Use this parameter to choose a configuration profile. Profiles can give configuration presets for different compute environments. Note that multiple profiles can be loaded, for example: `-profile docker` - the order of arguments is important!
 
-If `-profile` is not specified at all the pipeline will be run locally and expects all software to be installed and available on the `PATH`.
+Use this parameter to choose a configuration profile. Profiles can give configuration presets for different compute environments.
 
-* `awsbatch`
-    * A generic configuration profile to be used with AWS Batch.
-* `conda`
-    * A generic configuration profile to be used with [conda](https://conda.io/docs/)
-    * Pulls most software from [Bioconda](https://bioconda.github.io/)
-* `docker`
-    * A generic configuration profile to be used with [Docker](http://docker.com/)
-    * Pulls software from dockerhub: [`nfcore/hic`](http://hub.docker.com/r/nfcore/hic/)
-* `singularity`
-    * A generic configuration profile to be used with [Singularity](http://singularity.lbl.gov/)
-    * Pulls software from singularity-hub
-* `test`
-    * A profile with a complete configuration for automated testing
-    * Includes links to test data so needs no other parameters
+Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Conda) - see below.
 
-<!-- TODO nf-core: Document required command line parameters -->
-### `--reads`
-Use this to specify the location of your input FastQ files. For example:
+> We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported.
 
-```bash
---reads 'path/to/data/sample_*_{1,2}.fastq'
-```
-
-Please note the following requirements:
-
-1. The path must be enclosed in quotes
-2. The path must have at least one `*` wildcard character
-3. When using the pipeline with paired end data, the path must use `{1,2}` notation to specify read pairs.
-
-If left unspecified, a default pattern is used: `data/*{1,2}.fastq.gz`
+The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to see if your system is available in these configs please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation).
 
-### `--singleEnd`
-By default, the pipeline expects paired-end data. If you have single-end data, you need to specify `--singleEnd` on the command line when you launch the pipeline. A normal glob pattern, enclosed in quotation marks, can then be used for `--reads`. For example:
-
-```bash
---singleEnd --reads '*.fastq'
-```
+Note that multiple profiles can be loaded, for example: `-profile test,docker` - the order of arguments is important!
+They are loaded in sequence, so later profiles can overwrite earlier profiles.
 
-It is not possible to run a mixture of single-end and paired-end files in one run.
+If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended.
 
+* `docker`
+  * A generic configuration profile to be used with [Docker](https://docker.com/)
+  * Pulls software from Docker Hub: [`nfcore/hic`](https://hub.docker.com/r/nfcore/hic/)
+* `singularity`
+  * A generic configuration profile to be used with [Singularity](https://sylabs.io/docs/)
+  * Pulls software from Docker Hub: [`nfcore/hic`](https://hub.docker.com/r/nfcore/hic/)
+* `podman`
+  * A generic configuration profile to be used with [Podman](https://podman.io/)
+  * Pulls software from Docker Hub: [`nfcore/hic`](https://hub.docker.com/r/nfcore/hic/)
+* `conda`
+  * Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity or Podman.
+  * A generic configuration profile to be used with [Conda](https://conda.io/docs/)
+  * Pulls most software from [Bioconda](https://bioconda.github.io/)
+* `test`
+  * A profile with a complete configuration for automated testing
+  * Includes links to test data so needs no other parameters
 
-## Reference genomes
+### `-resume`
 
-The pipeline config files come bundled with paths to the illumina iGenomes reference index files. If running with docker or AWS, the configuration is set up to use the [AWS-iGenomes](https://ewels.github.io/AWS-iGenomes/) resource.
+Specify this when restarting a pipeline. Nextflow will used cached results from any pipeline steps where the inputs are the same, continuing from where it got to previously.
 
-### `--genome` (using iGenomes)
-There are 31 different species supported in the iGenomes references. To run the pipeline, you must specify which to use with the `--genome` flag.
+You can also supply a run name to resume a specific run: `-resume [run-name]`. Use the `nextflow log` command to show previous run names.
 
-You can find the keys to specify the genomes in the [iGenomes config file](../conf/igenomes.config). Common genomes that are supported are:
+### `-c`
 
-* Human
-  * `--genome GRCh37`
-* Mouse
-  * `--genome GRCm38`
-* _Drosophila_
-  * `--genome BDGP6`
-* _S. cerevisiae_
-  * `--genome 'R64-1-1'`
+Specify the path to a specific config file (this is a core Nextflow command). See the [nf-core website documentation](https://nf-co.re/usage/configuration) for more information.
 
-> There are numerous others - check the config file for more.
+#### Custom resource requests
 
-Note that you can use the same configuration setup to save sets of reference files for your own use, even if they are not part of the iGenomes resource. See the [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html) for instructions on where to save such a file.
+Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the steps in the pipeline, if the job exits with an error code of `143` (exceeded requested resources) it will automatically resubmit with higher requests (2 x original, then 3 x original). If it still fails after three times then the pipeline is stopped.
 
-The syntax for this reference configuration is as follows:
+Whilst these default requirements will hopefully work for most people with most data, you may find that you want to customise the compute resources that the pipeline requests. You can do this by creating a custom config file. For example, to give the workflow process `star` 32GB of memory, you could use the following config:
 
-<!-- TODO nf-core: Update reference genome example according to what is needed -->
 ```nextflow
-params {
-  genomes {
-    'GRCh37' {
-      fasta   = '<path to the genome fasta file>' // Used if no star index given
-    }
-    // Any number of additional genomes, key is used with --genome
+process {
+  withName: star {
+    memory = 32.GB
   }
 }
 ```
 
-<!-- TODO nf-core: Describe reference path flags -->
-### `--fasta`
-If you prefer, you can specify the full path to your reference genome when you run the pipeline:
-
-```bash
---fasta '[path to Fasta reference]'
-```
-
-### `--igenomesIgnore`
-Do not load `igenomes.config` when running the pipeline. You may choose this option if you observe clashes between custom parameters and those supplied in `igenomes.config`.
-
-## Job resources
-### Automatic resubmission
-Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the steps in the pipeline, if the job exits with an error code of `143` (exceeded requested resources) it will automatically resubmit with higher requests (2 x original, then 3 x original). If it still fails after three times then the pipeline is stopped.
+See the main [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html) for more information.
 
-### Custom resource requests
-Wherever process-specific requirements are set in the pipeline, the default value can be changed by creating a custom config file. See the files hosted at [`nf-core/configs`](https://github.com/nf-core/configs/tree/master/conf) for examples.
+If you are likely to be running `nf-core` pipelines regularly it may be a good idea to request that your custom config file is uploaded to the `nf-core/configs` git repository. Before you do this please can you test that the config file works with your pipeline of choice using the `-c` parameter (see definition above). You can then create a pull request to the `nf-core/configs` repository with the addition of your config file, associated documentation file (see examples in [`nf-core/configs/docs`](https://github.com/nf-core/configs/tree/master/docs)), and amending [`nfcore_custom.config`](https://github.com/nf-core/configs/blob/master/nfcore_custom.config) to include your custom profile.
 
-If you are likely to be running `nf-core` pipelines regularly it may be a good idea to request that your custom config file is uploaded to the `nf-core/configs` git repository. Before you do this please can you test that the config file works with your pipeline of choice using the `-c` parameter (see definition below). You can then create a pull request to the `nf-core/configs` repository with the addition of your config file, associated documentation file (see examples in [`nf-core/configs/docs`](https://github.com/nf-core/configs/tree/master/docs)), and amending [`nfcore_custom.config`](https://github.com/nf-core/configs/blob/master/nfcore_custom.config) to include your custom profile.
+If you have any questions or issues please send us a message on [Slack](https://nf-co.re/join/slack) on the [`#configs` channel](https://nfcore.slack.com/channels/configs).
 
-If you have any questions or issues please send us a message on [`Slack`](https://nf-core-invite.herokuapp.com/).
+### Running in the background
 
-## AWS Batch specific parameters
-Running the pipeline on AWS Batch requires a couple of specific parameters to be set according to your AWS Batch configuration. Please use the `-awsbatch` profile and then specify all of the following parameters.
-### `--awsqueue`
-The JobQueue that you intend to use on AWS Batch.
-### `--awsregion`
-The AWS region to run your job in. Default is set to `eu-west-1` but can be adjusted to your needs.
+Nextflow handles job submissions and supervises the running jobs. The Nextflow process must run until the pipeline is finished.
 
-Please make sure to also set the `-w/--work-dir` and `--outdir` parameters to a S3 storage bucket of your choice - you'll get an error message notifying you if you didn't.
+The Nextflow `-bg` flag launches Nextflow in the background, detached from your terminal so that the workflow does not stop if you log out of your session. The logs are saved to a file.
 
-## Other command line parameters
+Alternatively, you can use `screen` / `tmux` or similar tool to create a detached session which you can log back into at a later time.
+Some HPC setups also allow you to run nextflow within a cluster job submitted your job scheduler (from where it submits more jobs).
 
-<!-- TODO nf-core: Describe any other command line flags here -->
+#### Nextflow memory requirements
 
-### `--outdir`
-The output directory where the results will be saved.
-
-### `--email`
-Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits. If set in your user config file (`~/.nextflow/config`) then you don't need to speicfy this on the command line for every run.
-
-### `-name`
-Name for the pipeline run. If not specified, Nextflow will automatically generate a random mnemonic.
-
-This is used in the MultiQC report (if not default) and in the summary HTML / e-mail (always).
-
-**NB:** Single hyphen (core Nextflow option)
-
-### `-resume`
-Specify this when restarting a pipeline. Nextflow will used cached results from any pipeline steps where the inputs are the same, continuing from where it got to previously.
-
-You can also supply a run name to resume a specific run: `-resume [run-name]`. Use the `nextflow log` command to show previous run names.
-
-**NB:** Single hyphen (core Nextflow option)
-
-### `-c`
-Specify the path to a specific config file (this is a core NextFlow command).
-
-**NB:** Single hyphen (core Nextflow option)
-
-Note - you can use this to override pipeline defaults.
-
-### `--custom_config_version`
-Provide git commit id for custom Institutional configs hosted at `nf-core/configs`. This was implemented for reproducibility purposes. Default is set to `master`.
+In some cases, the Nextflow Java virtual machines can start to request a large amount of memory.
+We recommend adding the following line to your environment to limit this (typically in `~/.bashrc` or `~./bash_profile`):
 
 ```bash
-## Download and use config file with following git commid id
---custom_config_version d52db660777c4bf36546ddb188ec530c3ada1b96
+NXF_OPTS='-Xms1g -Xmx4g'
 ```
-
-### `--max_memory`
-Use to set a top-limit for the default memory requirement for each process.
-Should be a string in the format integer-unit. eg. `--max_memory '8.GB'`
-
-### `--max_time`
-Use to set a top-limit for the default time requirement for each process.
-Should be a string in the format integer-unit. eg. `--max_time '2.h'`
-
-### `--max_cpus`
-Use to set a top-limit for the default CPU requirement for each process.
-Should be a string in the format integer-unit. eg. `--max_cpus 1`
-
-### `--plaintext_email`
-Set to receive plain-text e-mails instead of HTML formatted.
-
-### `--multiqc_config`
-Specify a path to a custom MultiQC configuration file.
diff --git a/environment.yml b/environment.yml
index 42c74b4..9d3cc67 100644
--- a/environment.yml
+++ b/environment.yml
@@ -1,9 +1,15 @@
+# You can use this file to create a conda environment for this pipeline:
+#   conda env create -f environment.yml
 name: nf-core-hic-1.0dev
 channels:
   - conda-forge
   - bioconda
   - defaults
 dependencies:
+  - conda-forge::python=3.7.3
+  - conda-forge::markdown=3.1.1
+  - conda-forge::pymdown-extensions=6.0
+  - conda-forge::pygments=2.5.2
   # TODO nf-core: Add required software dependencies here
-  - fastqc=0.11.8
-  - multiqc=1.6
+  - bioconda::fastqc=0.11.8
+  - bioconda::multiqc=1.7
diff --git a/main.nf b/main.nf
index 97a07de..c481dd5 100644
--- a/main.nf
+++ b/main.nf
@@ -9,164 +9,159 @@
 ----------------------------------------------------------------------------------------
 */
 
-
 def helpMessage() {
     // TODO nf-core: Add to this help message with new command line parameters
+    log.info nfcoreHeader()
     log.info"""
-    =======================================================
-                                              ,--./,-.
-              ___     __   __   __   ___     /,-._.--~\'
-        |\\ | |__  __ /  ` /  \\ |__) |__         }  {
-        | \\| |       \\__, \\__/ |  \\ |___     \\`-._,-`-,
-                                              `._,._,\'
-
-     nf-core/hic v${workflow.manifest.version}
-    =======================================================
 
     Usage:
 
     The typical command for running the pipeline is as follows:
 
-    nextflow run nf-core/hic --reads '*_R{1,2}.fastq.gz' -profile docker
+    nextflow run nf-core/hic --input '*_R{1,2}.fastq.gz' -profile docker
 
     Mandatory arguments:
-      --reads                       Path to input data (must be surrounded with quotes)
-      --genome                      Name of iGenomes reference
-      -profile                      Configuration profile to use. Can use multiple (comma separated)
-                                    Available: conda, docker, singularity, awsbatch, test and more.
+      --input [file]                  Path to input data (must be surrounded with quotes)
+      -profile [str]                  Configuration profile to use. Can use multiple (comma separated)
+                                      Available: conda, docker, singularity, test, awsbatch, <institute> and more
 
     Options:
-      --singleEnd                   Specifies that the input is single end reads
+      --genome [str]                  Name of iGenomes reference
+      --single_end [bool]             Specifies that the input is single-end reads
 
-    References                      If not specified in the configuration file or you wish to overwrite any of the references.
-      --fasta                       Path to Fasta reference
+    References                        If not specified in the configuration file or you wish to overwrite any of the references
+      --fasta [file]                  Path to fasta reference
 
     Other options:
-      --outdir                      The output directory where the results will be saved
-      --email                       Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits
-      -name                         Name for the pipeline run. If not specified, Nextflow will automatically generate a random mnemonic.
+      --outdir [file]                 The output directory where the results will be saved
+      --publish_dir_mode [str]        Mode for publishing results in the output directory. Available: symlink, rellink, link, copy, copyNoFollow, move (Default: copy)
+      --email [email]                 Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits
+      --email_on_fail [email]         Same as --email, except only send mail if the workflow is not successful
+      --max_multiqc_email_size [str]  Threshold size for MultiQC report to be attached in notification email. If file generated by pipeline exceeds the threshold, it will not be attached (Default: 25MB)
+      -name [str]                     Name for the pipeline run. If not specified, Nextflow will automatically generate a random mnemonic
 
     AWSBatch options:
-      --awsqueue                    The AWSBatch JobQueue that needs to be set when running on AWSBatch
-      --awsregion                   The AWS Region for your AWS Batch job to run on
+      --awsqueue [str]                The AWSBatch JobQueue that needs to be set when running on AWSBatch
+      --awsregion [str]               The AWS Region for your AWS Batch job to run on
+      --awscli [str]                  Path to the AWS CLI tool
     """.stripIndent()
 }
 
+// Show help message
+if (params.help) {
+    helpMessage()
+    exit 0
+}
+
 /*
  * SET UP CONFIGURATION VARIABLES
  */
 
-// Show help emssage
-if (params.help){
-    helpMessage()
-    exit 0
+// Check if genome exists in the config file
+if (params.genomes && params.genome && !params.genomes.containsKey(params.genome)) {
+    exit 1, "The provided genome '${params.genome}' is not available in the iGenomes file. Currently the available genomes are ${params.genomes.keySet().join(", ")}"
 }
 
 // TODO nf-core: Add any reference files that are needed
 // Configurable reference genomes
-fasta = params.genome ? params.genomes[ params.genome ].fasta ?: false : false
-if ( params.fasta ){
-    fasta = file(params.fasta)
-    if( !fasta.exists() ) exit 1, "Fasta file not found: ${params.fasta}"
-}
 //
 // NOTE - THIS IS NOT USED IN THIS PIPELINE, EXAMPLE ONLY
-// If you want to use the above in a process, define the following:
+// If you want to use the channel below in a process, define the following:
 //   input:
-//   file fasta from fasta
+//   file fasta from ch_fasta
 //
-
+params.fasta = params.genome ? params.genomes[ params.genome ].fasta ?: false : false
+if (params.fasta) { ch_fasta = file(params.fasta, checkIfExists: true) }
 
 // Has the run name been specified by the user?
-//  this has the bonus effect of catching both -name and --name
+// this has the bonus effect of catching both -name and --name
 custom_runName = params.name
-if( !(workflow.runName ==~ /[a-z]+_[a-z]+/) ){
-  custom_runName = workflow.runName
+if (!(workflow.runName ==~ /[a-z]+_[a-z]+/)) {
+    custom_runName = workflow.runName
 }
 
-
-if( workflow.profile == 'awsbatch') {
-  // AWSBatch sanity checking
-  if (!params.awsqueue || !params.awsregion) exit 1, "Specify correct --awsqueue and --awsregion parameters on AWSBatch!"
-  if (!workflow.workDir.startsWith('s3') || !params.outdir.startsWith('s3')) exit 1, "Specify S3 URLs for workDir and outdir parameters on AWSBatch!"
-  // Check workDir/outdir paths to be S3 buckets if running on AWSBatch
-  // related: https://github.com/nextflow-io/nextflow/issues/813
-  if (!workflow.workDir.startsWith('s3:') || !params.outdir.startsWith('s3:')) exit 1, "Workdir or Outdir not on S3 - specify S3 Buckets for each to run on AWSBatch!"
+// Check AWS batch settings
+if (workflow.profile.contains('awsbatch')) {
+    // AWSBatch sanity checking
+    if (!params.awsqueue || !params.awsregion) exit 1, "Specify correct --awsqueue and --awsregion parameters on AWSBatch!"
+    // Check outdir paths to be S3 buckets if running on AWSBatch
+    // related: https://github.com/nextflow-io/nextflow/issues/813
+    if (!params.outdir.startsWith('s3:')) exit 1, "Outdir not on S3 - specify S3 Bucket to run on AWSBatch!"
+    // Prevent trace files to be stored on S3 since S3 does not support rolling files.
+    if (params.tracedir.startsWith('s3:')) exit 1, "Specify a local tracedir or run without trace! S3 cannot be used for tracefiles."
 }
 
 // Stage config files
-ch_multiqc_config = Channel.fromPath(params.multiqc_config)
-ch_output_docs = Channel.fromPath("$baseDir/docs/output.md")
+ch_multiqc_config = file("$projectDir/assets/multiqc_config.yaml", checkIfExists: true)
+ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath(params.multiqc_config, checkIfExists: true) : Channel.empty()
+ch_output_docs = file("$projectDir/docs/output.md", checkIfExists: true)
+ch_output_docs_images = file("$projectDir/docs/images/", checkIfExists: true)
 
 /*
  * Create a channel for input read files
  */
- if(params.readPaths){
-     if(params.singleEnd){
-         Channel
-             .from(params.readPaths)
-             .map { row -> [ row[0], [file(row[1][0])]] }
-             .ifEmpty { exit 1, "params.readPaths was empty - no input files supplied" }
-             .into { read_files_fastqc; read_files_trimming }
-     } else {
-         Channel
-             .from(params.readPaths)
-             .map { row -> [ row[0], [file(row[1][0]), file(row[1][1])]] }
-             .ifEmpty { exit 1, "params.readPaths was empty - no input files supplied" }
-             .into { read_files_fastqc; read_files_trimming }
-     }
- } else {
-     Channel
-         .fromFilePairs( params.reads, size: params.singleEnd ? 1 : 2 )
-         .ifEmpty { exit 1, "Cannot find any reads matching: ${params.reads}\nNB: Path needs to be enclosed in quotes!\nIf this is single-end data, please specify --singleEnd on the command line." }
-         .into { read_files_fastqc; read_files_trimming }
- }
-
+if (params.input_paths) {
+    if (params.single_end) {
+        Channel
+            .from(params.input_paths)
+            .map { row -> [ row[0], [ file(row[1][0], checkIfExists: true) ] ] }
+            .ifEmpty { exit 1, "params.input_paths was empty - no input files supplied" }
+            .into { ch_read_files_fastqc; ch_read_files_trimming }
+    } else {
+        Channel
+            .from(params.input_paths)
+            .map { row -> [ row[0], [ file(row[1][0], checkIfExists: true), file(row[1][1], checkIfExists: true) ] ] }
+            .ifEmpty { exit 1, "params.input_paths was empty - no input files supplied" }
+            .into { ch_read_files_fastqc; ch_read_files_trimming }
+    }
+} else {
+    Channel
+        .fromFilePairs(params.input, size: params.single_end ? 1 : 2)
+        .ifEmpty { exit 1, "Cannot find any reads matching: ${params.input}\nNB: Path needs to be enclosed in quotes!\nIf this is single-end data, please specify --single_end on the command line." }
+        .into { ch_read_files_fastqc; ch_read_files_trimming }
+}
 
 // Header log info
-log.info """=======================================================
-                                          ,--./,-.
-          ___     __   __   __   ___     /,-._.--~\'
-    |\\ | |__  __ /  ` /  \\ |__) |__         }  {
-    | \\| |       \\__, \\__/ |  \\ |___     \\`-._,-`-,
-                                          `._,._,\'
-
-nf-core/hic v${workflow.manifest.version}"
-======================================================="""
+log.info nfcoreHeader()
 def summary = [:]
-summary['Pipeline Name']  = 'nf-core/hic'
-summary['Pipeline Version'] = workflow.manifest.version
-summary['Run Name']     = custom_runName ?: workflow.runName
+if (workflow.revision) summary['Pipeline Release'] = workflow.revision
+summary['Run Name']         = custom_runName ?: workflow.runName
 // TODO nf-core: Report custom parameters here
-summary['Reads']        = params.reads
-summary['Fasta Ref']    = params.fasta
-summary['Data Type']    = params.singleEnd ? 'Single-End' : 'Paired-End'
-summary['Max Memory']   = params.max_memory
-summary['Max CPUs']     = params.max_cpus
-summary['Max Time']     = params.max_time
-summary['Output dir']   = params.outdir
-summary['Working dir']  = workflow.workDir
-summary['Container Engine'] = workflow.containerEngine
-if(workflow.containerEngine) summary['Container'] = workflow.container
-summary['Current home']   = "$HOME"
-summary['Current user']   = "$USER"
-summary['Current path']   = "$PWD"
-summary['Working dir']    = workflow.workDir
-summary['Output dir']     = params.outdir
-summary['Script dir']     = workflow.projectDir
+summary['Input']            = params.input
+summary['Fasta Ref']        = params.fasta
+summary['Data Type']        = params.single_end ? 'Single-End' : 'Paired-End'
+summary['Max Resources']    = "$params.max_memory memory, $params.max_cpus cpus, $params.max_time time per job"
+if (workflow.containerEngine) summary['Container'] = "$workflow.containerEngine - $workflow.container"
+summary['Output dir']       = params.outdir
+summary['Launch dir']       = workflow.launchDir
+summary['Working dir']      = workflow.workDir
+summary['Script dir']       = workflow.projectDir
+summary['User']             = workflow.userName
+if (workflow.profile.contains('awsbatch')) {
+    summary['AWS Region']   = params.awsregion
+    summary['AWS Queue']    = params.awsqueue
+    summary['AWS CLI']      = params.awscli
+}
 summary['Config Profile'] = workflow.profile
-if(workflow.profile == 'awsbatch'){
-   summary['AWS Region'] = params.awsregion
-   summary['AWS Queue'] = params.awsqueue
+if (params.config_profile_description) summary['Config Profile Description'] = params.config_profile_description
+if (params.config_profile_contact)     summary['Config Profile Contact']     = params.config_profile_contact
+if (params.config_profile_url)         summary['Config Profile URL']         = params.config_profile_url
+summary['Config Files'] = workflow.configFiles.join(', ')
+if (params.email || params.email_on_fail) {
+    summary['E-mail Address']    = params.email
+    summary['E-mail on failure'] = params.email_on_fail
+    summary['MultiQC maxsize']   = params.max_multiqc_email_size
 }
-if(params.email) summary['E-mail Address'] = params.email
-log.info summary.collect { k,v -> "${k.padRight(15)}: $v" }.join("\n")
-log.info "========================================="
+log.info summary.collect { k,v -> "${k.padRight(18)}: $v" }.join("\n")
+log.info "-\033[2m--------------------------------------------------\033[0m-"
 
+// Check the hostnames against configured profiles
+checkHostname()
 
-def create_workflow_summary(summary) {
-    def yaml_file = workDir.resolve('workflow_summary_mqc.yaml')
-    yaml_file.text  = """
+Channel.from(summary.collect{ [it.key, it.value] })
+    .map { k,v -> "<dt>$k</dt><dd><samp>${v ?: '<span style=\"color:#999999;\">N/A</a>'}</samp></dd>" }
+    .reduce { a, b -> return [a, b].join("\n            ") }
+    .map { x -> """
     id: 'nf-core-hic-summary'
     description: " - this information is collected when the pipeline is started."
     section_name: 'nf-core/hic Workflow Summary'
@@ -174,21 +169,24 @@ def create_workflow_summary(summary) {
     plot_type: 'html'
     data: |
         <dl class=\"dl-horizontal\">
-${summary.collect { k,v -> "            <dt>$k</dt><dd><samp>${v ?: '<span style=\"color:#999999;\">N/A</a>'}</samp></dd>" }.join("\n")}
+            $x
         </dl>
-    """.stripIndent()
-
-   return yaml_file
-}
-
+    """.stripIndent() }
+    .set { ch_workflow_summary }
 
 /*
  * Parse software version numbers
  */
 process get_software_versions {
+    publishDir "${params.outdir}/pipeline_info", mode: params.publish_dir_mode,
+        saveAs: { filename ->
+                      if (filename.indexOf(".csv") > 0) filename
+                      else null
+                }
 
     output:
-    file 'software_versions_mqc.yaml' into software_versions_yaml
+    file 'software_versions_mqc.yaml' into ch_software_versions_yaml
+    file "software_versions.csv"
 
     script:
     // TODO nf-core: Get all tools to print their version number here
@@ -197,82 +195,81 @@ process get_software_versions {
     echo $workflow.nextflow.version > v_nextflow.txt
     fastqc --version > v_fastqc.txt
     multiqc --version > v_multiqc.txt
-    scrape_software_versions.py > software_versions_mqc.yaml
+    scrape_software_versions.py &> software_versions_mqc.yaml
     """
 }
 
-
-
 /*
  * STEP 1 - FastQC
  */
 process fastqc {
     tag "$name"
-    publishDir "${params.outdir}/fastqc", mode: 'copy',
-        saveAs: {filename -> filename.indexOf(".zip") > 0 ? "zips/$filename" : "$filename"}
+    label 'process_medium'
+    publishDir "${params.outdir}/fastqc", mode: params.publish_dir_mode,
+        saveAs: { filename ->
+                      filename.indexOf(".zip") > 0 ? "zips/$filename" : "$filename"
+                }
 
     input:
-    set val(name), file(reads) from read_files_fastqc
+    set val(name), file(reads) from ch_read_files_fastqc
 
     output:
-    file "*_fastqc.{zip,html}" into fastqc_results
+    file "*_fastqc.{zip,html}" into ch_fastqc_results
 
     script:
     """
-    fastqc -q $reads
+    fastqc --quiet --threads $task.cpus $reads
     """
 }
 
-
-
 /*
  * STEP 2 - MultiQC
  */
 process multiqc {
-    publishDir "${params.outdir}/MultiQC", mode: 'copy'
+    publishDir "${params.outdir}/MultiQC", mode: params.publish_dir_mode
 
     input:
-    file multiqc_config from ch_multiqc_config
+    file (multiqc_config) from ch_multiqc_config
+    file (mqc_custom_config) from ch_multiqc_custom_config.collect().ifEmpty([])
     // TODO nf-core: Add in log files from your new processes for MultiQC to find!
-    file ('fastqc/*') from fastqc_results.collect().ifEmpty([])
-    file ('software_versions/*') from software_versions_yaml
-    file workflow_summary from create_workflow_summary(summary)
+    file ('fastqc/*') from ch_fastqc_results.collect().ifEmpty([])
+    file ('software_versions/*') from ch_software_versions_yaml.collect()
+    file workflow_summary from ch_workflow_summary.collectFile(name: "workflow_summary_mqc.yaml")
 
     output:
-    file "*multiqc_report.html" into multiqc_report
+    file "*multiqc_report.html" into ch_multiqc_report
     file "*_data"
+    file "multiqc_plots"
 
     script:
     rtitle = custom_runName ? "--title \"$custom_runName\"" : ''
     rfilename = custom_runName ? "--filename " + custom_runName.replaceAll('\\W','_').replaceAll('_+','_') + "_multiqc_report" : ''
+    custom_config_file = params.multiqc_config ? "--config $mqc_custom_config" : ''
     // TODO nf-core: Specify which MultiQC modules to use with -m for a faster run time
     """
-    multiqc -f $rtitle $rfilename --config $multiqc_config .
+    multiqc -f $rtitle $rfilename $custom_config_file .
     """
 }
 
-
-
 /*
  * STEP 3 - Output Description HTML
  */
 process output_documentation {
-    publishDir "${params.outdir}/Documentation", mode: 'copy'
+    publishDir "${params.outdir}/pipeline_info", mode: params.publish_dir_mode
 
     input:
     file output_docs from ch_output_docs
+    file images from ch_output_docs_images
 
     output:
     file "results_description.html"
 
     script:
     """
-    markdown_to_html.r $output_docs results_description.html
+    markdown_to_html.py $output_docs -o results_description.html
     """
 }
 
-
-
 /*
  * Completion e-mail notification
  */
@@ -280,8 +277,8 @@ workflow.onComplete {
 
     // Set up the e-mail variables
     def subject = "[nf-core/hic] Successful: $workflow.runName"
-    if(!workflow.success){
-      subject = "[nf-core/hic] FAILED: $workflow.runName"
+    if (!workflow.success) {
+        subject = "[nf-core/hic] FAILED: $workflow.runName"
     }
     def email_fields = [:]
     email_fields['version'] = workflow.manifest.version
@@ -299,54 +296,140 @@ workflow.onComplete {
     email_fields['summary']['Date Completed'] = workflow.complete
     email_fields['summary']['Pipeline script file path'] = workflow.scriptFile
     email_fields['summary']['Pipeline script hash ID'] = workflow.scriptId
-    if(workflow.repository) email_fields['summary']['Pipeline repository Git URL'] = workflow.repository
-    if(workflow.commitId) email_fields['summary']['Pipeline repository Git Commit'] = workflow.commitId
-    if(workflow.revision) email_fields['summary']['Pipeline Git branch/tag'] = workflow.revision
+    if (workflow.repository) email_fields['summary']['Pipeline repository Git URL'] = workflow.repository
+    if (workflow.commitId) email_fields['summary']['Pipeline repository Git Commit'] = workflow.commitId
+    if (workflow.revision) email_fields['summary']['Pipeline Git branch/tag'] = workflow.revision
     email_fields['summary']['Nextflow Version'] = workflow.nextflow.version
     email_fields['summary']['Nextflow Build'] = workflow.nextflow.build
     email_fields['summary']['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp
 
+    // TODO nf-core: If not using MultiQC, strip out this code (including params.max_multiqc_email_size)
+    // On success try attach the multiqc report
+    def mqc_report = null
+    try {
+        if (workflow.success) {
+            mqc_report = ch_multiqc_report.getVal()
+            if (mqc_report.getClass() == ArrayList) {
+                log.warn "[nf-core/hic] Found multiple reports from process 'multiqc', will use only one"
+                mqc_report = mqc_report[0]
+            }
+        }
+    } catch (all) {
+        log.warn "[nf-core/hic] Could not attach MultiQC report to summary email"
+    }
+
+    // Check if we are only sending emails on failure
+    email_address = params.email
+    if (!params.email && params.email_on_fail && !workflow.success) {
+        email_address = params.email_on_fail
+    }
+
     // Render the TXT template
     def engine = new groovy.text.GStringTemplateEngine()
-    def tf = new File("$baseDir/assets/email_template.txt")
+    def tf = new File("$projectDir/assets/email_template.txt")
     def txt_template = engine.createTemplate(tf).make(email_fields)
     def email_txt = txt_template.toString()
 
     // Render the HTML template
-    def hf = new File("$baseDir/assets/email_template.html")
+    def hf = new File("$projectDir/assets/email_template.html")
     def html_template = engine.createTemplate(hf).make(email_fields)
     def email_html = html_template.toString()
 
     // Render the sendmail template
-    def smail_fields = [ email: params.email, subject: subject, email_txt: email_txt, email_html: email_html, baseDir: "$baseDir" ]
-    def sf = new File("$baseDir/assets/sendmail_template.txt")
+    def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "$projectDir", mqcFile: mqc_report, mqcMaxSize: params.max_multiqc_email_size.toBytes() ]
+    def sf = new File("$projectDir/assets/sendmail_template.txt")
     def sendmail_template = engine.createTemplate(sf).make(smail_fields)
     def sendmail_html = sendmail_template.toString()
 
     // Send the HTML e-mail
-    if (params.email) {
+    if (email_address) {
         try {
-          if( params.plaintext_email ){ throw GroovyException('Send plaintext e-mail, not HTML') }
-          // Try to send HTML e-mail using sendmail
-          [ 'sendmail', '-t' ].execute() << sendmail_html
-          log.info "[nf-core/hic] Sent summary e-mail to $params.email (sendmail)"
+            if (params.plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') }
+            // Try to send HTML e-mail using sendmail
+            [ 'sendmail', '-t' ].execute() << sendmail_html
+            log.info "[nf-core/hic] Sent summary e-mail to $email_address (sendmail)"
         } catch (all) {
-          // Catch failures and try with plaintext
-          [ 'mail', '-s', subject, params.email ].execute() << email_txt
-          log.info "[nf-core/hic] Sent summary e-mail to $params.email (mail)"
+            // Catch failures and try with plaintext
+            def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ]
+            if ( mqc_report.size() <= params.max_multiqc_email_size.toBytes() ) {
+              mail_cmd += [ '-A', mqc_report ]
+            }
+            mail_cmd.execute() << email_html
+            log.info "[nf-core/hic] Sent summary e-mail to $email_address (mail)"
         }
     }
 
     // Write summary e-mail HTML to a file
-    def output_d = new File( "${params.outdir}/Documentation/" )
-    if( !output_d.exists() ) {
-      output_d.mkdirs()
+    def output_d = new File("${params.outdir}/pipeline_info/")
+    if (!output_d.exists()) {
+        output_d.mkdirs()
     }
-    def output_hf = new File( output_d, "pipeline_report.html" )
+    def output_hf = new File(output_d, "pipeline_report.html")
     output_hf.withWriter { w -> w << email_html }
-    def output_tf = new File( output_d, "pipeline_report.txt" )
+    def output_tf = new File(output_d, "pipeline_report.txt")
     output_tf.withWriter { w -> w << email_txt }
 
-    log.info "[nf-core/hic] Pipeline Complete"
+    c_green = params.monochrome_logs ? '' : "\033[0;32m";
+    c_purple = params.monochrome_logs ? '' : "\033[0;35m";
+    c_red = params.monochrome_logs ? '' : "\033[0;31m";
+    c_reset = params.monochrome_logs ? '' : "\033[0m";
+
+    if (workflow.stats.ignoredCount > 0 && workflow.success) {
+        log.info "-${c_purple}Warning, pipeline completed, but with errored process(es) ${c_reset}-"
+        log.info "-${c_red}Number of ignored errored process(es) : ${workflow.stats.ignoredCount} ${c_reset}-"
+        log.info "-${c_green}Number of successfully ran process(es) : ${workflow.stats.succeedCount} ${c_reset}-"
+    }
+
+    if (workflow.success) {
+        log.info "-${c_purple}[nf-core/hic]${c_green} Pipeline completed successfully${c_reset}-"
+    } else {
+        checkHostname()
+        log.info "-${c_purple}[nf-core/hic]${c_red} Pipeline completed with errors${c_reset}-"
+    }
+
+}
+
 
+def nfcoreHeader() {
+    // Log colors ANSI codes
+    c_black = params.monochrome_logs ? '' : "\033[0;30m";
+    c_blue = params.monochrome_logs ? '' : "\033[0;34m";
+    c_cyan = params.monochrome_logs ? '' : "\033[0;36m";
+    c_dim = params.monochrome_logs ? '' : "\033[2m";
+    c_green = params.monochrome_logs ? '' : "\033[0;32m";
+    c_purple = params.monochrome_logs ? '' : "\033[0;35m";
+    c_reset = params.monochrome_logs ? '' : "\033[0m";
+    c_white = params.monochrome_logs ? '' : "\033[0;37m";
+    c_yellow = params.monochrome_logs ? '' : "\033[0;33m";
+
+    return """    -${c_dim}--------------------------------------------------${c_reset}-
+                                            ${c_green},--.${c_black}/${c_green},-.${c_reset}
+    ${c_blue}        ___     __   __   __   ___     ${c_green}/,-._.--~\'${c_reset}
+    ${c_blue}  |\\ | |__  __ /  ` /  \\ |__) |__         ${c_yellow}}  {${c_reset}
+    ${c_blue}  | \\| |       \\__, \\__/ |  \\ |___     ${c_green}\\`-._,-`-,${c_reset}
+                                            ${c_green}`._,._,\'${c_reset}
+    ${c_purple}  nf-core/hic v${workflow.manifest.version}${c_reset}
+    -${c_dim}--------------------------------------------------${c_reset}-
+    """.stripIndent()
+}
+
+def checkHostname() {
+    def c_reset = params.monochrome_logs ? '' : "\033[0m"
+    def c_white = params.monochrome_logs ? '' : "\033[0;37m"
+    def c_red = params.monochrome_logs ? '' : "\033[1;91m"
+    def c_yellow_bold = params.monochrome_logs ? '' : "\033[1;93m"
+    if (params.hostnames) {
+        def hostname = "hostname".execute().text.trim()
+        params.hostnames.each { prof, hnames ->
+            hnames.each { hname ->
+                if (hostname.contains(hname) && !workflow.profile.contains(prof)) {
+                    log.error "====================================================\n" +
+                            "  ${c_red}WARNING!${c_reset} You are running with `-profile $workflow.profile`\n" +
+                            "  but your machine hostname is ${c_white}'$hostname'${c_reset}\n" +
+                            "  ${c_yellow_bold}It's highly recommended that you use `-profile $prof${c_reset}`\n" +
+                            "============================================================"
+                }
+            }
+        }
+    }
 }
diff --git a/nextflow.config b/nextflow.config
index 5f363c6..44cd92c 100644
--- a/nextflow.config
+++ b/nextflow.config
@@ -3,83 +3,111 @@
  *  nf-core/hic Nextflow config file
  * -------------------------------------------------
  * Default config options for all environments.
- * Cluster-specific config options should be saved
- * in the conf folder and imported under a profile
- * name here.
  */
 
 // Global default params, used in configs
 params {
 
-  // Container slug. Stable releases should specify release tag!
-  //   Developmental code should specify :latest
-  container = 'nfcore/hic:latest'
-
   // Workflow flags
   // TODO nf-core: Specify your pipeline's command line flags
-  reads = "data/*{1,2}.fastq.gz"
-  singleEnd = false
+  genome = false
+  input = "data/*{1,2}.fastq.gz"
+  single_end = false
   outdir = './results'
+  publish_dir_mode = 'copy'
 
   // Boilerplate options
   name = false
-  multiqc_config = "$baseDir/conf/multiqc_config.yaml"
+  multiqc_config = false
   email = false
+  email_on_fail = false
+  max_multiqc_email_size = 25.MB
   plaintext_email = false
+  monochrome_logs = false
   help = false
-  igenomes_base = "./iGenomes"
+  igenomes_base = 's3://ngi-igenomes/igenomes/'
   tracedir = "${params.outdir}/pipeline_info"
-  clusterOptions = false
-  awsqueue = false
-  awsregion = 'eu-west-1'
-  igenomesIgnore = false
+  igenomes_ignore = false
   custom_config_version = 'master'
+  custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}"
+  hostnames = false
+  config_profile_description = false
+  config_profile_contact = false
+  config_profile_url = false
+
+  // Defaults only, expecting to be overwritten
+  max_memory = 128.GB
+  max_cpus = 16
+  max_time = 240.h
+
 }
 
+// Container slug. Stable releases should specify release tag!
+// Developmental code should specify :dev
+process.container = 'nfcore/hic:dev'
+
 // Load base.config by default for all pipelines
 includeConfig 'conf/base.config'
 
 // Load nf-core custom profiles from different Institutions
-includeConfig "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}/nfcore_custom.config"
+try {
+  includeConfig "${params.custom_config_base}/nfcore_custom.config"
+} catch (Exception e) {
+  System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")
+}
 
 profiles {
-  awsbatch { includeConfig 'conf/awsbatch.config' }
-  conda { process.conda = "$baseDir/environment.yml" }
+  conda { process.conda = "$projectDir/environment.yml" }
   debug { process.beforeScript = 'echo $HOSTNAME' }
   docker {
     docker.enabled = true
-    process.container = params.container
+    // Avoid this error:
+    //   WARNING: Your kernel does not support swap limit capabilities or the cgroup is not mounted. Memory limited without swap.
+    // Testing this in nf-core after discussion here https://github.com/nf-core/tools/pull/351
+    // once this is established and works well, nextflow might implement this behavior as new default.
+    docker.runOptions = '-u \$(id -u):\$(id -g)'
   }
   singularity {
     singularity.enabled = true
-    process.container = {"shub://${params.container.replace('nfcore', 'nf-core')}"}
+    singularity.autoMounts = true
+  }
+  podman {
+    podman.enabled = true
   }
   test { includeConfig 'conf/test.config' }
+  test_full { includeConfig 'conf/test_full.config' }
 }
 
 // Load igenomes.config if required
-if(!params.igenomesIgnore){
+if (!params.igenomes_ignore) {
   includeConfig 'conf/igenomes.config'
 }
 
+// Export these variables to prevent local Python/R libraries from conflicting with those in the container
+env {
+  PYTHONNOUSERSITE = 1
+  R_PROFILE_USER = "/.Rprofile"
+  R_ENVIRON_USER = "/.Renviron"
+}
+
 // Capture exit codes from upstream processes when piping
 process.shell = ['/bin/bash', '-euo', 'pipefail']
 
 timeline {
   enabled = true
-  file = "${params.tracedir}/nf-core/hic_timeline.html"
+  file = "${params.tracedir}/execution_timeline.html"
 }
 report {
   enabled = true
-  file = "${params.tracedir}/nf-core/hic_report.html"
+  file = "${params.tracedir}/execution_report.html"
 }
 trace {
   enabled = true
-  file = "${params.tracedir}/nf-core/hic_trace.txt"
+  file = "${params.tracedir}/execution_trace.txt"
 }
 dag {
   enabled = true
-  file = "${params.tracedir}/nf-core/hic_dag.svg"
+  file = "${params.tracedir}/pipeline_dag.svg"
 }
 
 manifest {
@@ -88,16 +116,16 @@ manifest {
   homePage = 'https://github.com/nf-core/hic'
   description = 'Analysis of Chromosome Conformation Capture data (Hi-C)'
   mainScript = 'main.nf'
-  nextflowVersion = '>=0.32.0'
+  nextflowVersion = '>=20.04.0'
   version = '1.0dev'
 }
 
 // Function to ensure that resource requirements don't go beyond
 // a maximum limit
 def check_max(obj, type) {
-  if(type == 'memory'){
+  if (type == 'memory') {
     try {
-      if(obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1)
+      if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1)
         return params.max_memory as nextflow.util.MemoryUnit
       else
         return obj
@@ -105,9 +133,9 @@ def check_max(obj, type) {
       println "   ### ERROR ###   Max memory '${params.max_memory}' is not valid! Using default value: $obj"
       return obj
     }
-  } else if(type == 'time'){
+  } else if (type == 'time') {
     try {
-      if(obj.compareTo(params.max_time as nextflow.util.Duration) == 1)
+      if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1)
         return params.max_time as nextflow.util.Duration
       else
         return obj
@@ -115,7 +143,7 @@ def check_max(obj, type) {
       println "   ### ERROR ###   Max time '${params.max_time}' is not valid! Using default value: $obj"
       return obj
     }
-  } else if(type == 'cpus'){
+  } else if (type == 'cpus') {
     try {
       return Math.min( obj, params.max_cpus as int )
     } catch (all) {
diff --git a/nextflow_schema.json b/nextflow_schema.json
new file mode 100644
index 0000000..21e29d7
--- /dev/null
+++ b/nextflow_schema.json
@@ -0,0 +1,259 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "https://raw.githubusercontent.com/nf-core/hic/master/nextflow_schema.json",
+    "title": "nf-core/hic pipeline parameters",
+    "description": "Analysis of Chromosome Conformation Capture data (Hi-C)",
+    "type": "object",
+    "definitions": {
+        "input_output_options": {
+            "title": "Input/output options",
+            "type": "object",
+            "fa_icon": "fas fa-terminal",
+            "description": "Define where the pipeline should find input data and save output data.",
+            "required": [
+                "input"
+            ],
+            "properties": {
+                "input": {
+                    "type": "string",
+                    "fa_icon": "fas fa-dna",
+                    "description": "Input FastQ files.",
+                    "help_text": "Use this to specify the location of your input FastQ files. For example:\n\n```bash\n--input 'path/to/data/sample_*_{1,2}.fastq'\n```\n\nPlease note the following requirements:\n\n1. The path must be enclosed in quotes\n2. The path must have at least one `*` wildcard character\n3. When using the pipeline with paired end data, the path must use `{1,2}` notation to specify read pairs.\n\nIf left unspecified, a default pattern is used: `data/*{1,2}.fastq.gz`"
+                },
+                "single_end": {
+                    "type": "boolean",
+                    "description": "Specifies that the input is single-end reads.",
+                    "fa_icon": "fas fa-align-center",
+                    "help_text": "By default, the pipeline expects paired-end data. If you have single-end data, you need to specify `--single_end` on the command line when you launch the pipeline. A normal glob pattern, enclosed in quotation marks, can then be used for `--input`. For example:\n\n```bash\n--single_end --input '*.fastq'\n```\n\nIt is not possible to run a mixture of single-end and paired-end files in one run."
+                },
+                "outdir": {
+                    "type": "string",
+                    "description": "The output directory where the results will be saved.",
+                    "default": "./results",
+                    "fa_icon": "fas fa-folder-open"
+                },
+                "email": {
+                    "type": "string",
+                    "description": "Email address for completion summary.",
+                    "fa_icon": "fas fa-envelope",
+                    "help_text": "Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits. If set in your user config file (`~/.nextflow/config`) then you don't need to specify this on the command line for every run.",
+                    "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$"
+                }
+            }
+        },
+        "reference_genome_options": {
+            "title": "Reference genome options",
+            "type": "object",
+            "fa_icon": "fas fa-dna",
+            "description": "Options for the reference genome indices used to align reads.",
+            "properties": {
+                "genome": {
+                    "type": "string",
+                    "description": "Name of iGenomes reference.",
+                    "fa_icon": "fas fa-book",
+                    "help_text": "If using a reference genome configured in the pipeline using iGenomes, use this parameter to give the ID for the reference. This is then used to build the full paths for all required reference genome files e.g. `--genome GRCh38`.\n\nSee the [nf-core website docs](https://nf-co.re/usage/reference_genomes) for more details."
+                },
+                "fasta": {
+                    "type": "string",
+                    "fa_icon": "fas fa-font",
+                    "description": "Path to FASTA genome file.",
+                    "help_text": "If you have no genome reference available, the pipeline can build one using a FASTA file. This requires additional time and resources, so it's better to use a pre-build index if possible."
+                },
+                "igenomes_base": {
+                    "type": "string",
+                    "description": "Directory / URL base for iGenomes references.",
+                    "default": "s3://ngi-igenomes/igenomes/",
+                    "fa_icon": "fas fa-cloud-download-alt",
+                    "hidden": true
+                },
+                "igenomes_ignore": {
+                    "type": "boolean",
+                    "description": "Do not load the iGenomes reference config.",
+                    "fa_icon": "fas fa-ban",
+                    "hidden": true,
+                    "help_text": "Do not load `igenomes.config` when running the pipeline. You may choose this option if you observe clashes between custom parameters and those supplied in `igenomes.config`."
+                }
+            }
+        },
+        "generic_options": {
+            "title": "Generic options",
+            "type": "object",
+            "fa_icon": "fas fa-file-import",
+            "description": "Less common options for the pipeline, typically set in a config file.",
+            "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.",
+            "properties": {
+                "help": {
+                    "type": "boolean",
+                    "description": "Display help text.",
+                    "hidden": true,
+                    "fa_icon": "fas fa-question-circle"
+                },
+                "publish_dir_mode": {
+                    "type": "string",
+                    "default": "copy",
+                    "hidden": true,
+                    "description": "Method used to save pipeline results to output directory.",
+                    "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.",
+                    "fa_icon": "fas fa-copy",
+                    "enum": [
+                        "symlink",
+                        "rellink",
+                        "link",
+                        "copy",
+                        "copyNoFollow",
+                        "move"
+                    ]
+                },
+                "name": {
+                    "type": "string",
+                    "description": "Workflow name.",
+                    "fa_icon": "fas fa-fingerprint",
+                    "hidden": true,
+                    "help_text": "A custom name for the pipeline run. Unlike the core nextflow `-name` option with one hyphen this parameter can be reused multiple times, for example if using `-resume`. Passed through to steps such as MultiQC and used for things like report filenames and titles."
+                },
+                "email_on_fail": {
+                    "type": "string",
+                    "description": "Email address for completion summary, only when pipeline fails.",
+                    "fa_icon": "fas fa-exclamation-triangle",
+                    "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$",
+                    "hidden": true,
+                    "help_text": "This works exactly as with `--email`, except emails are only sent if the workflow is not successful."
+                },
+                "plaintext_email": {
+                    "type": "boolean",
+                    "description": "Send plain-text email instead of HTML.",
+                    "fa_icon": "fas fa-remove-format",
+                    "hidden": true,
+                    "help_text": "Set to receive plain-text e-mails instead of HTML formatted."
+                },
+                "max_multiqc_email_size": {
+                    "type": "string",
+                    "description": "File size limit when attaching MultiQC reports to summary emails.",
+                    "default": "25.MB",
+                    "fa_icon": "fas fa-file-upload",
+                    "hidden": true,
+                    "help_text": "If file generated by pipeline exceeds the threshold, it will not be attached."
+                },
+                "monochrome_logs": {
+                    "type": "boolean",
+                    "description": "Do not use coloured log outputs.",
+                    "fa_icon": "fas fa-palette",
+                    "hidden": true,
+                    "help_text": "Set to disable colourful command line output and live life in monochrome."
+                },
+                "multiqc_config": {
+                    "type": "string",
+                    "description": "Custom config file to supply to MultiQC.",
+                    "fa_icon": "fas fa-cog",
+                    "hidden": true
+                },
+                "tracedir": {
+                    "type": "string",
+                    "description": "Directory to keep pipeline Nextflow logs and reports.",
+                    "default": "${params.outdir}/pipeline_info",
+                    "fa_icon": "fas fa-cogs",
+                    "hidden": true
+                }
+            }
+        },
+        "max_job_request_options": {
+            "title": "Max job request options",
+            "type": "object",
+            "fa_icon": "fab fa-acquisitions-incorporated",
+            "description": "Set the top limit for requested resources for any single job.",
+            "help_text": "If you are running on a smaller system, a pipeline step requesting more resources than are available may cause the Nextflow to stop the run with an error. These options allow you to cap the maximum resources requested by any single job so that the pipeline will run on your system.\n\nNote that you can not _increase_ the resources requested by any job using these options. For that you will need your own configuration file. See [the nf-core website](https://nf-co.re/usage/configuration) for details.",
+            "properties": {
+                "max_cpus": {
+                    "type": "integer",
+                    "description": "Maximum number of CPUs that can be requested    for any single job.",
+                    "default": 16,
+                    "fa_icon": "fas fa-microchip",
+                    "hidden": true,
+                    "help_text": "Use to set an upper-limit for the CPU requirement for each process. Should be an integer e.g. `--max_cpus 1`"
+                },
+                "max_memory": {
+                    "type": "string",
+                    "description": "Maximum amount of memory that can be requested for any single job.",
+                    "default": "128.GB",
+                    "fa_icon": "fas fa-memory",
+                    "hidden": true,
+                    "help_text": "Use to set an upper-limit for the memory requirement for each process. Should be a string in the format integer-unit e.g. `--max_memory '8.GB'`"
+                },
+                "max_time": {
+                    "type": "string",
+                    "description": "Maximum amount of time that can be requested for any single job.",
+                    "default": "240.h",
+                    "fa_icon": "far fa-clock",
+                    "hidden": true,
+                    "help_text": "Use to set an upper-limit for the time requirement for each process. Should be a string in the format integer-unit e.g. `--max_time '2.h'`"
+                }
+            }
+        },
+        "institutional_config_options": {
+            "title": "Institutional config options",
+            "type": "object",
+            "fa_icon": "fas fa-university",
+            "description": "Parameters used to describe centralised config profiles. These should not be edited.",
+            "help_text": "The centralised nf-core configuration profiles use a handful of pipeline parameters to describe themselves. This information is then printed to the Nextflow log when you run a pipeline. You should not need to change these values when you run a pipeline.",
+            "properties": {
+                "custom_config_version": {
+                    "type": "string",
+                    "description": "Git commit id for Institutional configs.",
+                    "default": "master",
+                    "hidden": true,
+                    "fa_icon": "fas fa-users-cog",
+                    "help_text": "Provide git commit id for custom Institutional configs hosted at `nf-core/configs`. This was implemented for reproducibility purposes. Default: `master`.\n\n```bash\n## Download and use config file with following git commit id\n--custom_config_version d52db660777c4bf36546ddb188ec530c3ada1b96\n```"
+                },
+                "custom_config_base": {
+                    "type": "string",
+                    "description": "Base directory for Institutional configs.",
+                    "default": "https://raw.githubusercontent.com/nf-core/configs/master",
+                    "hidden": true,
+                    "help_text": "If you're running offline, nextflow will not be able to fetch the institutional config files from the internet. If you don't need them, then this is not a problem. If you do need them, you should download the files from the repo and tell nextflow where to find them with the `custom_config_base` option. For example:\n\n```bash\n## Download and unzip the config files\ncd /path/to/my/configs\nwget https://github.com/nf-core/configs/archive/master.zip\nunzip master.zip\n\n## Run the pipeline\ncd /path/to/my/data\nnextflow run /path/to/pipeline/ --custom_config_base /path/to/my/configs/configs-master/\n```\n\n> Note that the nf-core/tools helper package has a `download` command to download all required pipeline files + singularity containers + institutional configs in one go for you, to make this process easier.",
+                    "fa_icon": "fas fa-users-cog"
+                },
+                "hostnames": {
+                    "type": "string",
+                    "description": "Institutional configs hostname.",
+                    "hidden": true,
+                    "fa_icon": "fas fa-users-cog"
+                },
+                "config_profile_description": {
+                    "type": "string",
+                    "description": "Institutional config description.",
+                    "hidden": true,
+                    "fa_icon": "fas fa-users-cog"
+                },
+                "config_profile_contact": {
+                    "type": "string",
+                    "description": "Institutional config contact information.",
+                    "hidden": true,
+                    "fa_icon": "fas fa-users-cog"
+                },
+                "config_profile_url": {
+                    "type": "string",
+                    "description": "Institutional config URL link.",
+                    "hidden": true,
+                    "fa_icon": "fas fa-users-cog"
+                }
+            }
+        }
+    },
+    "allOf": [
+        {
+            "$ref": "#/definitions/input_output_options"
+        },
+        {
+            "$ref": "#/definitions/reference_genome_options"
+        },
+        {
+            "$ref": "#/definitions/generic_options"
+        },
+        {
+            "$ref": "#/definitions/max_job_request_options"
+        },
+        {
+            "$ref": "#/definitions/institutional_config_options"
+        }
+    ]
+}
-- 
GitLab