diff --git a/.gitignore b/.gitignore
index b3e110092eb44f1c8e7d8eb8f56375014a663202..e9709050f3768e04d2dea2777e6e555496f09dc0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -18,3 +18,8 @@ hs_err_pid*
 # Docker Specific
 *apt.list
 *conda.list
+nextflow
+.nextflow.log*
+.nextflow/
+work/
+results
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000000000000000000000000000000000000..03648b4bcba3a59235e177281406520da2dc4239
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,117 @@
+# Changelog
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+## [0.4.0] - 2019-11-18
+### Added
+- Add new tools (star,...)
+- conda support at the psmn
+
+## Changed
+- configuration simplification
+- docker and singularity image download instead of local build
+- hidden directories in `src` for project clarity (only `nf_modules` is visible)
+
+## Removed
+- conda support at in2p3 with `-profile in2p3_conda`
+
+## [0.3.0] - 2019-05-23
+### Added
+- Add new tools (umi_tools, fastp,...)
+- singularity support at in2p3 with `-profile in2p3`
+- conda support at in2p3 with `-profile in2p3_conda`
+
+
+## [0.2.9] - 2019-03-26
+### Added
+- Add new tools (fastq, macs2, umitools, ...)
+- singularity support
+
+### Changed
+- every tool name is now in lowercase in each module section
+
+## [0.2.7] - 2018-10-23
+### Added
+- Add new tools (BWA, GATK, sambamba, ...)
+
+### Changed
+- `sge` profile is now called `psmn` profile to prepare tests in the CCIN2P3
+- every `psmn` config file has an update configuration for mono or 16 cpus queues
+- update process naming to follow new nextflow format
+
+## [0.2.6] - 2018-08-23
+### Added
+- Added `src/training_dataset.nf` to build a small training dataset from NGS data
+
+### Changed
+- the structure of `src/nf_modules`: the `tests` folder was removed
+
+## [0.2.5] - 2018-08-22
+### Added
+- This fine changelog
+
+### Changed
+- the structure of `src/nf_modules`: the `tests` folder was removed
+
+
+## [0.2.4] - 2018-08-02
+### Changed
+- add `paired_id` variable in the output of every single-end data processes to match the paired output
+
+
+## [0.2.3] - 2018-07-25
+### Added
+- List of tools available as nextflow, docker or sge module to the `README.md`
+
+
+## [0.2.2] - 2018-07-23
+### Added
+- SRA module from cigogne/nextflow-master 52b510e48daa1fb7
+
+
+## [0.2.1] - 2018-07-23
+### Added
+- List of tools available as nextflow, docker or sge module
+
+
+## [0.2.0] - 2018-06-18
+### Added
+- `doc/TP_computational_biologists.md`
+- Kallisto/0.44.0
+
+### Changed
+- add `paired_id` variable in the output of every paired data processes
+- BEDtools: fixes for fasta handling
+- UrQt: fix git version in Docker
+
+
+## [0.1.2] - 2018-06-18
+### Added
+- `doc/tp_experimental_biologist.md` and Makefile to build the pdf
+- tests files for BEDtools
+
+### Changed
+- Kallisto: various fixes
+- UrQt: improve output and various fixes
+
+### Removed
+- `src/nf_test.config` modules have their own `.config`
+
+
+## [0.1.2] - 2018-06-18
+### Added
+- `doc/tp_experimental_biologist.md` and Makefile to build the pdf
+- tests files for BEDtools
+
+### Changed
+- Kallisto: various fixes
+- UrQt: improve output and various fixes
+
+### Removed
+- `src/nf_test.config` modules have their own `.config`
+
+
+## [0.1.0] - 2018-05-06
+This is the first working version of the repository as a nextflow module repository
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000000000000000000000000000000000000..9f1ab7baf144858c03103531c66575de0f26d13e
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,92 @@
+# Contributing
+
+When contributing to this repository, please first discuss the change you wish to make via issue,
+email, or any other method with the owners of this repository before making a change. 
+
+Please note we have a code of conduct, please follow it in all your interactions with the project.
+
+## Pull Request Process
+
+1. Ensure any install or build dependencies are removed before the end of the layer when doing a 
+   build.
+2. Update the README.md with details of changes to the interface, this includes new environment 
+   variables, exposed ports, useful file locations and container parameters.
+3. Increase the version numbers in any examples files and the README.md to the new version that this
+   Pull Request would represent. The versioning scheme we use is [SemVer](http://semver.org/).
+4. You may merge the Pull Request in once you have the sign-off of two other developers, or if you 
+   do not have permission to do that, you may request the second reviewer to merge it for you.
+
+## Code of Conduct
+
+### Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of experience,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+### Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+  address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+  professional setting
+
+### Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+### Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+### Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at [INSERT EMAIL ADDRESS]. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+### Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/LICENSE b/LICENSE
index 89cf3c6a30a5bb52a2a3693849db5135045f72e0..b42d3eb74ceb1fdb897b777e0fe8311e57322cb1 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,3 +1,5 @@
+Riboflow License 
+
 MIT License
 
 Copyright (c) 2019 Ribosome Profiling
@@ -19,3 +21,524 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
 SOFTWARE.
+
+Nexflow License : 
+
+  CeCILL FREE SOFTWARE LICENSE AGREEMENT
+
+Version 2.1 dated 2013-06-21
+
+
+    Notice
+
+This Agreement is a Free Software license agreement that is the result
+of discussions between its authors in order to ensure compliance with
+the two main principles guiding its drafting:
+
+  * firstly, compliance with the principles governing the distribution
+    of Free Software: access to source code, broad rights granted to users,
+  * secondly, the election of a governing law, French law, with which it
+    is conformant, both as regards the law of torts and intellectual
+    property law, and the protection that it offers to both authors and
+    holders of the economic rights over software.
+
+The authors of the CeCILL (for Ce[a] C[nrs] I[nria] L[ogiciel] L[ibre])
+license are:
+
+Commissariat à l'énergie atomique et aux énergies alternatives - CEA, a
+public scientific, technical and industrial research establishment,
+having its principal place of business at 25 rue Leblanc, immeuble Le
+Ponant D, 75015 Paris, France.
+
+Centre National de la Recherche Scientifique - CNRS, a public scientific
+and technological establishment, having its principal place of business
+at 3 rue Michel-Ange, 75794 Paris cedex 16, France.
+
+Institut National de Recherche en Informatique et en Automatique -
+Inria, a public scientific and technological establishment, having its
+principal place of business at Domaine de Voluceau, Rocquencourt, BP
+105, 78153 Le Chesnay cedex, France.
+
+
+    Preamble
+
+The purpose of this Free Software license agreement is to grant users
+the right to modify and redistribute the software governed by this
+license within the framework of an open source distribution model.
+
+The exercising of this right is conditional upon certain obligations for
+users so as to preserve this status for all subsequent redistributions.
+
+In consideration of access to the source code and the rights to copy,
+modify and redistribute granted by the license, users are provided only
+with a limited warranty and the software's author, the holder of the
+economic rights, and the successive licensors only have limited liability.
+
+In this respect, the risks associated with loading, using, modifying
+and/or developing or reproducing the software by the user are brought to
+the user's attention, given its Free Software status, which may make it
+complicated to use, with the result that its use is reserved for
+developers and experienced professionals having in-depth computer
+knowledge. Users are therefore encouraged to load and test the
+suitability of the software as regards their requirements in conditions
+enabling the security of their systems and/or data to be ensured and,
+more generally, to use and operate it in the same conditions of
+security. This Agreement may be freely reproduced and published,
+provided it is not altered, and that no provisions are either added or
+removed herefrom.
+
+This Agreement may apply to any or all software for which the holder of
+the economic rights decides to submit the use thereof to its provisions.
+
+Frequently asked questions can be found on the official website of the
+CeCILL licenses family (http://www.cecill.info/index.en.html) for any
+necessary clarification.
+
+
+    Article 1 - DEFINITIONS
+
+For the purpose of this Agreement, when the following expressions
+commence with a capital letter, they shall have the following meaning:
+
+Agreement: means this license agreement, and its possible subsequent
+versions and annexes.
+
+Software: means the software in its Object Code and/or Source Code form
+and, where applicable, its documentation, "as is" when the Licensee
+accepts the Agreement.
+
+Initial Software: means the Software in its Source Code and possibly its
+Object Code form and, where applicable, its documentation, "as is" when
+it is first distributed under the terms and conditions of the Agreement.
+
+Modified Software: means the Software modified by at least one
+Contribution.
+
+Source Code: means all the Software's instructions and program lines to
+which access is required so as to modify the Software.
+
+Object Code: means the binary files originating from the compilation of
+the Source Code.
+
+Holder: means the holder(s) of the economic rights over the Initial
+Software.
+
+Licensee: means the Software user(s) having accepted the Agreement.
+
+Contributor: means a Licensee having made at least one Contribution.
+
+Licensor: means the Holder, or any other individual or legal entity, who
+distributes the Software under the Agreement.
+
+Contribution: means any or all modifications, corrections, translations,
+adaptations and/or new functions integrated into the Software by any or
+all Contributors, as well as any or all Internal Modules.
+
+Module: means a set of sources files including their documentation that
+enables supplementary functions or services in addition to those offered
+by the Software.
+
+External Module: means any or all Modules, not derived from the
+Software, so that this Module and the Software run in separate address
+spaces, with one calling the other when they are run.
+
+Internal Module: means any or all Module, connected to the Software so
+that they both execute in the same address space.
+
+GNU GPL: means the GNU General Public License version 2 or any
+subsequent version, as published by the Free Software Foundation Inc.
+
+GNU Affero GPL: means the GNU Affero General Public License version 3 or
+any subsequent version, as published by the Free Software Foundation Inc.
+
+EUPL: means the European Union Public License version 1.1 or any
+subsequent version, as published by the European Commission.
+
+Parties: mean both the Licensee and the Licensor.
+
+These expressions may be used both in singular and plural form.
+
+
+    Article 2 - PURPOSE
+
+The purpose of the Agreement is the grant by the Licensor to the
+Licensee of a non-exclusive, transferable and worldwide license for the
+Software as set forth in Article 5 <#scope> hereinafter for the whole
+term of the protection granted by the rights over said Software.
+
+
+    Article 3 - ACCEPTANCE
+
+3.1 The Licensee shall be deemed as having accepted the terms and
+conditions of this Agreement upon the occurrence of the first of the
+following events:
+
+  * (i) loading the Software by any or all means, notably, by
+    downloading from a remote server, or by loading from a physical medium;
+  * (ii) the first time the Licensee exercises any of the rights granted
+    hereunder.
+
+3.2 One copy of the Agreement, containing a notice relating to the
+characteristics of the Software, to the limited warranty, and to the
+fact that its use is restricted to experienced users has been provided
+to the Licensee prior to its acceptance as set forth in Article 3.1
+<#accepting> hereinabove, and the Licensee hereby acknowledges that it
+has read and understood it.
+
+
+    Article 4 - EFFECTIVE DATE AND TERM
+
+
+      4.1 EFFECTIVE DATE
+
+The Agreement shall become effective on the date when it is accepted by
+the Licensee as set forth in Article 3.1 <#accepting>.
+
+
+      4.2 TERM
+
+The Agreement shall remain in force for the entire legal term of
+protection of the economic rights over the Software.
+
+
+    Article 5 - SCOPE OF RIGHTS GRANTED
+
+The Licensor hereby grants to the Licensee, who accepts, the following
+rights over the Software for any or all use, and for the term of the
+Agreement, on the basis of the terms and conditions set forth hereinafter.
+
+Besides, if the Licensor owns or comes to own one or more patents
+protecting all or part of the functions of the Software or of its
+components, the Licensor undertakes not to enforce the rights granted by
+these patents against successive Licensees using, exploiting or
+modifying the Software. If these patents are transferred, the Licensor
+undertakes to have the transferees subscribe to the obligations set
+forth in this paragraph.
+
+
+      5.1 RIGHT OF USE
+
+The Licensee is authorized to use the Software, without any limitation
+as to its fields of application, with it being hereinafter specified
+that this comprises:
+
+ 1. permanent or temporary reproduction of all or part of the Software
+    by any or all means and in any or all form.
+
+ 2. loading, displaying, running, or storing the Software on any or all
+    medium.
+
+ 3. entitlement to observe, study or test its operation so as to
+    determine the ideas and principles behind any or all constituent
+    elements of said Software. This shall apply when the Licensee
+    carries out any or all loading, displaying, running, transmission or
+    storage operation as regards the Software, that it is entitled to
+    carry out hereunder.
+
+
+      5.2 ENTITLEMENT TO MAKE CONTRIBUTIONS
+
+The right to make Contributions includes the right to translate, adapt,
+arrange, or make any or all modifications to the Software, and the right
+to reproduce the resulting software.
+
+The Licensee is authorized to make any or all Contributions to the
+Software provided that it includes an explicit notice that it is the
+author of said Contribution and indicates the date of the creation thereof.
+
+
+      5.3 RIGHT OF DISTRIBUTION
+
+In particular, the right of distribution includes the right to publish,
+transmit and communicate the Software to the general public on any or
+all medium, and by any or all means, and the right to market, either in
+consideration of a fee, or free of charge, one or more copies of the
+Software by any means.
+
+The Licensee is further authorized to distribute copies of the modified
+or unmodified Software to third parties according to the terms and
+conditions set forth hereinafter.
+
+
+        5.3.1 DISTRIBUTION OF SOFTWARE WITHOUT MODIFICATION
+
+The Licensee is authorized to distribute true copies of the Software in
+Source Code or Object Code form, provided that said distribution
+complies with all the provisions of the Agreement and is accompanied by:
+
+ 1. a copy of the Agreement,
+
+ 2. a notice relating to the limitation of both the Licensor's warranty
+    and liability as set forth in Articles 8 and 9,
+
+and that, in the event that only the Object Code of the Software is
+redistributed, the Licensee allows effective access to the full Source
+Code of the Software for a period of at least three years from the
+distribution of the Software, it being understood that the additional
+acquisition cost of the Source Code shall not exceed the cost of the
+data transfer.
+
+
+        5.3.2 DISTRIBUTION OF MODIFIED SOFTWARE
+
+When the Licensee makes a Contribution to the Software, the terms and
+conditions for the distribution of the resulting Modified Software
+become subject to all the provisions of this Agreement.
+
+The Licensee is authorized to distribute the Modified Software, in
+source code or object code form, provided that said distribution
+complies with all the provisions of the Agreement and is accompanied by:
+
+ 1. a copy of the Agreement,
+
+ 2. a notice relating to the limitation of both the Licensor's warranty
+    and liability as set forth in Articles 8 and 9,
+
+and, in the event that only the object code of the Modified Software is
+redistributed,
+
+ 3. a note stating the conditions of effective access to the full source
+    code of the Modified Software for a period of at least three years
+    from the distribution of the Modified Software, it being understood
+    that the additional acquisition cost of the source code shall not
+    exceed the cost of the data transfer.
+
+
+        5.3.3 DISTRIBUTION OF EXTERNAL MODULES
+
+When the Licensee has developed an External Module, the terms and
+conditions of this Agreement do not apply to said External Module, that
+may be distributed under a separate license agreement.
+
+
+        5.3.4 COMPATIBILITY WITH OTHER LICENSES
+
+The Licensee can include a code that is subject to the provisions of one
+of the versions of the GNU GPL, GNU Affero GPL and/or EUPL in the
+Modified or unmodified Software, and distribute that entire code under
+the terms of the same version of the GNU GPL, GNU Affero GPL and/or EUPL.
+
+The Licensee can include the Modified or unmodified Software in a code
+that is subject to the provisions of one of the versions of the GNU GPL,
+GNU Affero GPL and/or EUPL and distribute that entire code under the
+terms of the same version of the GNU GPL, GNU Affero GPL and/or EUPL.
+
+
+    Article 6 - INTELLECTUAL PROPERTY
+
+
+      6.1 OVER THE INITIAL SOFTWARE
+
+The Holder owns the economic rights over the Initial Software. Any or
+all use of the Initial Software is subject to compliance with the terms
+and conditions under which the Holder has elected to distribute its work
+and no one shall be entitled to modify the terms and conditions for the
+distribution of said Initial Software.
+
+The Holder undertakes that the Initial Software will remain ruled at
+least by this Agreement, for the duration set forth in Article 4.2 <#term>.
+
+
+      6.2 OVER THE CONTRIBUTIONS
+
+The Licensee who develops a Contribution is the owner of the
+intellectual property rights over this Contribution as defined by
+applicable law.
+
+
+      6.3 OVER THE EXTERNAL MODULES
+
+The Licensee who develops an External Module is the owner of the
+intellectual property rights over this External Module as defined by
+applicable law and is free to choose the type of agreement that shall
+govern its distribution.
+
+
+      6.4 JOINT PROVISIONS
+
+The Licensee expressly undertakes:
+
+ 1. not to remove, or modify, in any manner, the intellectual property
+    notices attached to the Software;
+
+ 2. to reproduce said notices, in an identical manner, in the copies of
+    the Software modified or not.
+
+The Licensee undertakes not to directly or indirectly infringe the
+intellectual property rights on the Software of the Holder and/or
+Contributors, and to take, where applicable, vis-à-vis its staff, any
+and all measures required to ensure respect of said intellectual
+property rights of the Holder and/or Contributors.
+
+
+    Article 7 - RELATED SERVICES
+
+7.1 Under no circumstances shall the Agreement oblige the Licensor to
+provide technical assistance or maintenance services for the Software.
+
+However, the Licensor is entitled to offer this type of services. The
+terms and conditions of such technical assistance, and/or such
+maintenance, shall be set forth in a separate instrument. Only the
+Licensor offering said maintenance and/or technical assistance services
+shall incur liability therefor.
+
+7.2 Similarly, any Licensor is entitled to offer to its licensees, under
+its sole responsibility, a warranty, that shall only be binding upon
+itself, for the redistribution of the Software and/or the Modified
+Software, under terms and conditions that it is free to decide. Said
+warranty, and the financial terms and conditions of its application,
+shall be subject of a separate instrument executed between the Licensor
+and the Licensee.
+
+
+    Article 8 - LIABILITY
+
+8.1 Subject to the provisions of Article 8.2, the Licensee shall be
+entitled to claim compensation for any direct loss it may have suffered
+from the Software as a result of a fault on the part of the relevant
+Licensor, subject to providing evidence thereof.
+
+8.2 The Licensor's liability is limited to the commitments made under
+this Agreement and shall not be incurred as a result of in particular:
+(i) loss due the Licensee's total or partial failure to fulfill its
+obligations, (ii) direct or consequential loss that is suffered by the
+Licensee due to the use or performance of the Software, and (iii) more
+generally, any consequential loss. In particular the Parties expressly
+agree that any or all pecuniary or business loss (i.e. loss of data,
+loss of profits, operating loss, loss of customers or orders,
+opportunity cost, any disturbance to business activities) or any or all
+legal proceedings instituted against the Licensee by a third party,
+shall constitute consequential loss and shall not provide entitlement to
+any or all compensation from the Licensor.
+
+
+    Article 9 - WARRANTY
+
+9.1 The Licensee acknowledges that the scientific and technical
+state-of-the-art when the Software was distributed did not enable all
+possible uses to be tested and verified, nor for the presence of
+possible defects to be detected. In this respect, the Licensee's
+attention has been drawn to the risks associated with loading, using,
+modifying and/or developing and reproducing the Software which are
+reserved for experienced users.
+
+The Licensee shall be responsible for verifying, by any or all means,
+the suitability of the product for its requirements, its good working
+order, and for ensuring that it shall not cause damage to either persons
+or properties.
+
+9.2 The Licensor hereby represents, in good faith, that it is entitled
+to grant all the rights over the Software (including in particular the
+rights set forth in Article 5 <#scope>).
+
+9.3 The Licensee acknowledges that the Software is supplied "as is" by
+the Licensor without any other express or tacit warranty, other than
+that provided for in Article 9.2 <#good-faith> and, in particular,
+without any warranty as to its commercial value, its secured, safe,
+innovative or relevant nature.
+
+Specifically, the Licensor does not warrant that the Software is free
+from any error, that it will operate without interruption, that it will
+be compatible with the Licensee's own equipment and software
+configuration, nor that it will meet the Licensee's requirements.
+
+9.4 The Licensor does not either expressly or tacitly warrant that the
+Software does not infringe any third party intellectual property right
+relating to a patent, software or any other property right. Therefore,
+the Licensor disclaims any and all liability towards the Licensee
+arising out of any or all proceedings for infringement that may be
+instituted in respect of the use, modification and redistribution of the
+Software. Nevertheless, should such proceedings be instituted against
+the Licensee, the Licensor shall provide it with technical and legal
+expertise for its defense. Such technical and legal expertise shall be
+decided on a case-by-case basis between the relevant Licensor and the
+Licensee pursuant to a memorandum of understanding. The Licensor
+disclaims any and all liability as regards the Licensee's use of the
+name of the Software. No warranty is given as regards the existence of
+prior rights over the name of the Software or as regards the existence
+of a trademark.
+
+
+    Article 10 - TERMINATION
+
+10.1 In the event of a breach by the Licensee of its obligations
+hereunder, the Licensor may automatically terminate this Agreement
+thirty (30) days after notice has been sent to the Licensee and has
+remained ineffective.
+
+10.2 A Licensee whose Agreement is terminated shall no longer be
+authorized to use, modify or distribute the Software. However, any
+licenses that it may have granted prior to termination of the Agreement
+shall remain valid subject to their having been granted in compliance
+with the terms and conditions hereof.
+
+
+    Article 11 - MISCELLANEOUS
+
+
+      11.1 EXCUSABLE EVENTS
+
+Neither Party shall be liable for any or all delay, or failure to
+perform the Agreement, that may be attributable to an event of force
+majeure, an act of God or an outside cause, such as defective
+functioning or interruptions of the electricity or telecommunications
+networks, network paralysis following a virus attack, intervention by
+government authorities, natural disasters, water damage, earthquakes,
+fire, explosions, strikes and labor unrest, war, etc.
+
+11.2 Any failure by either Party, on one or more occasions, to invoke
+one or more of the provisions hereof, shall under no circumstances be
+interpreted as being a waiver by the interested Party of its right to
+invoke said provision(s) subsequently.
+
+11.3 The Agreement cancels and replaces any or all previous agreements,
+whether written or oral, between the Parties and having the same
+purpose, and constitutes the entirety of the agreement between said
+Parties concerning said purpose. No supplement or modification to the
+terms and conditions hereof shall be effective as between the Parties
+unless it is made in writing and signed by their duly authorized
+representatives.
+
+11.4 In the event that one or more of the provisions hereof were to
+conflict with a current or future applicable act or legislative text,
+said act or legislative text shall prevail, and the Parties shall make
+the necessary amendments so as to comply with said act or legislative
+text. All other provisions shall remain effective. Similarly, invalidity
+of a provision of the Agreement, for any reason whatsoever, shall not
+cause the Agreement as a whole to be invalid.
+
+
+      11.5 LANGUAGE
+
+The Agreement is drafted in both French and English and both versions
+are deemed authentic.
+
+
+    Article 12 - NEW VERSIONS OF THE AGREEMENT
+
+12.1 Any person is authorized to duplicate and distribute copies of this
+Agreement.
+
+12.2 So as to ensure coherence, the wording of this Agreement is
+protected and may only be modified by the authors of the License, who
+reserve the right to periodically publish updates or new versions of the
+Agreement, each with a separate number. These subsequent versions may
+address new issues encountered by Free Software.
+
+12.3 Any Software distributed under a given version of the Agreement may
+only be subsequently distributed under the same version of the Agreement
+or a subsequent version, subject to the provisions of Article 5.3.4
+<#compatibility>.
+
+
+    Article 13 - GOVERNING LAW AND JURISDICTION
+
+13.1 The Agreement is governed by French law. The Parties agree to
+endeavor to seek an amicable solution to any disagreements or disputes
+that may arise during the performance of the Agreement.
+
+13.2 Failing an amicable solution within two (2) months as from their
+occurrence, and unless emergency proceedings are necessary, the
+disagreements or disputes shall be referred to the Paris Courts having
+jurisdiction, by the more diligent Party.
+>>>>>>> 3efbbb5e44ac834cd71303a89aa5b1f7f5e225eb
diff --git a/README.md b/README.md
index 89d0b74312f9c8cb0ff946362c202e6ef2d54fa9..72f5e326691cc93823d2f993e8230259117f378b 100644
--- a/README.md
+++ b/README.md
@@ -177,3 +177,46 @@ Also turn set the metadata flag to true, in the project file:
 
 Metadata will be stored in the output ribo file.
 
+# nextflow pipeline
+
+This repository is a template and a library repository to help you build nextflow pipeline.
+You can fork this repository to build your own pipeline.
+To get the last commits from this repository into your fork use the following commands:
+
+```sh
+git remote add upstream gitlab_lbmc:pipelines/nextflow.git
+git pull upstream master
+```
+**If you created your `.config` file before version `0.4.0` you need to run the script `src/.update_config.sh` to use the latest docker, singularity and conda configuration (don't forget to check your config files afterward for typos).**
+
+## Getting Started
+
+These instructions will get you a copy of the project up and running on your local machine for development and testing purposes. See deployment for notes on how to deploy the project on a live system.
+
+[you can follow them here.](doc/getting_started.md)
+
+## Available tools
+
+[The list of available tools.](doc/available_tools.md)
+
+## Projects using nextflow
+
+[A list of projects using nextflow at the LBMC.](doc/nf_projects.md)
+
+## Contributing
+
+Please read [CONTRIBUTING.md](CONTRIBUTING.md) for details on our code of conduct, and the process for submitting pull requests to us.
+
+## Versioning
+
+We use [SemVer](http://semver.org/) for versioning. For the versions available, see the [tags on this repository](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/tags).
+
+## Authors
+
+* **Laurent Modolo** - *Initial work*
+
+See also the list of [contributors](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/graphs/master) who participated in this project.
+
+## License
+
+This project is licensed under the CeCiLL License- see the [LICENSE](LICENSE) file for details
diff --git a/bin/.gitignore b/bin/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..72e8ffc0db8aad71a934dd11e5968bd5109e54b4
--- /dev/null
+++ b/bin/.gitignore
@@ -0,0 +1 @@
+*
diff --git a/data/.gitignore b/data/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..72e8ffc0db8aad71a934dd11e5968bd5109e54b4
--- /dev/null
+++ b/data/.gitignore
@@ -0,0 +1 @@
+*
diff --git a/doc/.gitignore b/doc/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..a1363379944a5745ceb49c0e493d80eb9335c79a
--- /dev/null
+++ b/doc/.gitignore
@@ -0,0 +1 @@
+*.pdf
diff --git a/doc/Makefile b/doc/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..3b34e9e63f3e6f520041a2b6369af61a19c1efd2
--- /dev/null
+++ b/doc/Makefile
@@ -0,0 +1,13 @@
+all: TP_experimental_biologists.pdf TP_computational_biologists.pdf ../public/TP_experimental_biologists.html ../public/TP_computational_biologists.html
+
+../public/TP_experimental_biologists.html: TP_experimental_biologists.md
+	pandoc -s TP_experimental_biologists.md -o ../public/TP_experimental_biologists.html
+
+../public/TP_computational_biologists.html: TP_computational_biologists.md
+	pandoc -s TP_computational_biologists.md -o ../public/TP_computational_biologists.html
+
+TP_experimental_biologists.pdf: TP_experimental_biologists.md
+	R -e 'require(rmarkdown); rmarkdown::render("TP_experimental_biologists.md")'
+
+TP_computational_biologists.pdf: TP_computational_biologists.md
+	R -e 'require(rmarkdown); rmarkdown::render("TP_computational_biologists.md")'
diff --git a/doc/TP_computational_biologists.md b/doc/TP_computational_biologists.md
new file mode 100644
index 0000000000000000000000000000000000000000..695a16ec3855102584ce48b1c63787213bae99d9
--- /dev/null
+++ b/doc/TP_computational_biologists.md
@@ -0,0 +1,182 @@
+---
+title: "TP for computational biologists"
+author: Laurent Modolo [laurent.modolo@ens-lyon.fr](mailto:laurent.modolo@ens-lyon.fr)
+date: 20 Jun 2018
+output:
+pdf_document:
+toc: true
+toc_depth: 3
+    number_sections: true
+highlight: tango
+    latex_engine: xelatex
+---
+
+The goal of this practical is to learn how to *wrap* tools in [Docker](https://www.docker.com/what-docker) or [Environment Module](http://www.ens-lyon.fr/PSMN/doku.php?id=documentation:tools:modules) to make them available to nextflow on a personal computer or at the [PSMN](http://www.ens-lyon.fr/PSMN/doku.php).
+
+Here we assume that you followed the [TP for experimental biologists](./TP_experimental_biologists.md), and that you know the basics of [Docker containers](https://www.docker.com/what-container) and [Environment Module](http://www.ens-lyon.fr/PSMN/doku.php?id=documentation:tools:modules). We are also going to assume that you know how to build and use a nextflow pipeline from the template [pipelines/nextflow](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow).
+
+For the practical you can either work with the WebIDE of Gitlab, or locally as described in the [git: basis formation](https://gitlab.biologie.ens-lyon.fr/formations/git_basis).
+
+# Docker
+
+To run a tool within a [Docker container](https://www.docker.com/what-container) you need to write a `Dockerfile`.
+
+[`Dockerfile`](./src/docker_modules/kallisto/0.44.0/Dockerfile) are found in the [pipelines/nextflow](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow) project under `src/docker_modules/`. Each [`Dockerfile`](./src/docker_modules/kallisto/0.44.0/Dockerfile) is paired with a [`docker_init.sh`](./src/docker_modules/kallisto/0.44.0/docker_init.sh) file like following the example for `Kallisto` version `0.43.1`:
+
+```sh
+$ ls -l src/docker_modules/kallisto/0.43.1/
+total 16K
+drwxr-xr-x 2 laurent users 4.0K Jun 5 19:06 ./
+drwxr-xr-x 3 laurent users 4.0K Jun 6 09:49 ../
+-rw-r--r-- 1 laurent users  587 Jun  5 19:06 Dockerfile
+-rwxr-xr-x 1 laurent users 79 Jun 5 19:06 docker_init.sh*
+```
+
+## [`docker_init.sh`](./src/docker_modules/kallisto/0.44.0/docker_init.sh)
+The [`docker_init.sh`](./src/docker_modules/kallisto/0.44.0/docker_init.sh) is a simple sh script with executable rights (`chmod +x`). By executing this script, the user creates a [Docker container](https://www.docker.com/what-container) with the tool installed a specific version. You can check the [`docker_init.sh`](./src/docker_modules/kallisto/0.44.0/docker_init.sh) file of any implemented tools as a template.
+
+Remember that the name of the [container](https://www.docker.com/what-container) must be in lower case and in the format `<tool_name>:<version>`.
+For tools without a version number you can use a commit hash instead.
+
+## [`Dockerfile`](./src/docker_modules/kallisto/0.44.0/Dockerfile)
+
+The recipe to wrap your tool in a [Docker container](https://www.docker.com/what-container) is written in a [`Dockerfile`](./src/docker_modules/kallisto/0.44.0/Dockerfile) file.
+
+For `Kallisto` version `0.44.0` the header of the `Dockerfile` is :
+
+```Docker
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV KALLISTO_VERSION=0.44.0
+```
+
+The `FROM` instruction means that the [container](https://www.docker.com/what-container) is initialized from a bare installation of Ubuntu 18.04. You can check the versions of Ubuntu available [here](https://hub.docker.com/_/ubuntu/) or others operating systems like [debian](https://hub.docker.com/_/debian/) or [worst](https://hub.docker.com/r/microsoft/windowsservercore/).
+
+Then we declare the *maintainer* of the container. Before declaring an environment variable for the container named `KALLISTO_VERSION`, which contains the version of the tool wrapped. This this bash variable will be declared for the user root within the [container](https://www.docker.com/what-container).
+
+You should always declare a variable `TOOLSNAME_VERSION` that contains the version number of commit number of the tools you wrap. In simple cases you just have to modify this line to create a new `Dockerfile` for another version of the tool.
+
+The following lines of the [`Dockerfile`](./src/docker_modules/kallisto/0.44.0/Dockerfile) are a succession of `bash` commands executed as the **root** user within the container.
+Each `RUN` block is run sequentially by `Docker`. If there is an error or modifications in a `RUN` block, only this block and the following `RUN` will be executed.
+
+You can learn more about the building of Docker containers [here](https://docs.docker.com/engine/reference/builder/#usage).
+
+When you build your [`Dockerfile`](./src/docker_modules/kallisto/0.44.0/Dockerfile), instead of launching many times the [`docker_init.sh`](./src/docker_modules/kallisto/0.44.0/docker_init.sh) script to tests your [container](https://www.docker.com/what-container), you can connect to a base container in interactive mode to launch tests your commands.
+
+```sh
+docker run -it ubuntu:18.04 bash
+KALLISTO_VERSION=0.44.0
+```
+
+# SGE / [PSMN](http://www.ens-lyon.fr/PSMN/doku.php)
+
+To run easily tools on the [PSMN](http://www.ens-lyon.fr/PSMN/doku.php), you need to build your own [Environment Module](http://www.ens-lyon.fr/PSMN/doku.php?id=documentation:tools:modules).
+
+You can read the Contributing guide for the [PMSN/modules](https://gitlab.biologie.ens-lyon.fr/PSMN/modules) project [here](https://gitlab.biologie.ens-lyon.fr/PSMN/modules/blob/master/CONTRIBUTING.md)
+
+# Nextflow
+
+The last step to wrap your tool is to make it available in nextflow. For this you need to create at least 4 files, like the following for Kallisto version `0.44.0`:
+
+```sh
+ls -lR src/nf_modules/kallisto
+src/nf_modules/kallisto/:
+total 12
+-rw-r--r-- 1 laurent users 551 Jun 18 17:14 index.nf
+-rw-r--r-- 1 laurent users 901 Jun 18 17:14 mapping_paired.nf
+-rw-r--r-- 1 laurent users 1037 Jun 18 17:14 mapping_single.nf
+-rwxr-xr-x 1 laurent users 627 Jun 18 17:14 tests.sh*
+```
+
+The [`.config` files](./src/nf_modules/kallisto/) file contains instructions for two profiles : `psmn` and `docker`.
+The [`.nf` files](./src/nf_modules/kallisto/) file contains nextflow processes to use `Kallisto`.
+
+The [`tests/tests.sh`](./src/nf_modules/kallisto/tests/tests.sh) script (with executable rights), contains a series of nextflow calls on the other `.nf` files of the folder. Those tests correspond to execution of the `*.nf` files present in the [`kallisto folder`](./src/nf_modules/kallisto/) on the [LBMC/tiny_dataset](https://gitlab.biologie.ens-lyon.fr/LBMC/tiny_dataset) dataset with the `docker` profile. You can read the *Running the tests* section of the [README.md](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/README.md).
+
+## [`kallisto.config`](./src/nf_modules/kallisto/)
+
+The `.config` file defines the configuration to apply to your process conditionally to the value of the `-profile` option. You must define configuration for at least the `psmn` and `docker` profile.
+
+```Groovy
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+    }
+  }
+  psmn {
+    process{
+    }
+  }
+```
+
+### `docker` profile
+
+The `docker` profile starts by enabling docker for the whole pipeline. After that you only have to define the container name for each process:
+For example, for `Kallisto` with the version `0.44.0`, we have:
+
+```Groovy
+process {
+  withName: index_fasta {
+    container = "kallisto:0.44.0"
+  }
+  withName: mapping_fastq {
+    container = "kallisto:0.44.0"
+  }
+}
+```
+
+### `psmn` profile
+
+The `psmn` profile defines for each process all the informations necessary to launch your process on a given queue with SGE at the [PSMN](http://www.ens-lyon.fr/PSMN/doku.php).
+For example, for `Kallisto`, we have:
+
+```Groovy
+process{
+  withName: index_fasta {
+    beforeScript = "source /usr/share/lmod/lmod/init/bash; module use ~/privatemodules"
+    module = "Kallisto/0.44.0"
+    executor = "sge"
+    cpus = 16
+    memory = "30GB"
+    time = "24h"
+    queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+    penv = 'openmp16'
+  }
+  withName: mapping_fastq {
+    beforeScript = "source /usr/share/lmod/lmod/init/bash; module use ~/privatemodules"
+    module = "Kallisto/0.44.0"
+    executor = "sge"
+    cpus = 16
+    memory = "30GB"
+    time = "24h"
+    queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+    penv = 'openmp16'
+  }
+}
+```
+
+The `beforeScript` variable is executed before the main script for the corresponding process.
+
+## [`kallisto.nf`](./src/nf_modules/kallisto/kallisto.nf)
+
+The [`kallisto.nf`](./src/nf_modules/kallisto/kallisto.nf) file contains examples of nextflow process that execute Kallisto.
+
+- Each example must be usable as it is to be incorporated in a nextflow pipeline.
+- You need to define, default value for the parameters passed to the process. 
+- Input and output must be clearly defined.
+- Your process should be usable as a starting process or a process retrieving the output of another process.
+
+For more informations on processes and channels you can check the [nextflow documentation](https://www.nextflow.io/docs/latest/index.html).
+
+## Making your wrapper available to the LBMC
+
+To make your module available to the LBMC you must have a `tests.sh` script and one or many `docker_init.sh` scripts working without errors.
+All the processes in your `.nf` must be covered by the tests.
+
+After pushing your modifications on your forked repository, you can make a Merge Request to the [PSMN/modules](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow) **dev** branch. Where it will be tested and integrated to the **master** branch.
+
+You can read more on this process [here](https://guides.github.com/introduction/flow/)
+
diff --git a/doc/TP_experimental_biologists.md b/doc/TP_experimental_biologists.md
new file mode 100644
index 0000000000000000000000000000000000000000..1a2f3857c24702c4fa4430a97d51658f834fdea8
--- /dev/null
+++ b/doc/TP_experimental_biologists.md
@@ -0,0 +1,435 @@
+---
+title: "TP for experimental biologists"
+author: Laurent Modolo [laurent.modolo@ens-lyon.fr](mailto:laurent.modolo@ens-lyon.fr)
+date: 6 Jun 2018
+output:
+pdf_document:
+toc: true
+toc_depth: 3
+    number_sections: true
+highlight: tango
+    latex_engine: xelatex
+---
+
+The Goal of this practical is to learn how to build your own pipeline with nextflow and using the tools already *wrapped*.
+For this we are going to build a small RNASeq analysis pipeline that should run the following steps:
+
+- remove Illumina adaptors
+- trim reads by quality
+- build the index of a reference genome
+- estimate the amount of RNA fragments mapping to the transcripts of this genome
+
+# Initialize your own project
+
+You are going to build a pipeline for you or your team. So the first step is to create your own project.
+
+## Forking
+
+Instead of reinventing the wheel, you can use the [pipelines/nextflow](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow) as a template.
+To easily do so, go to the [pipelines/nextflow](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow) repository and click on the [**fork**](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/forks/new) button.
+
+![fork button](img/fork.png)
+
+In git, the [action of forking](https://git-scm.com/book/en/v2/GitHub-Contributing-to-a-Project) means that you are going to make your own private copy of a repository. You can then write modifications in your project, and if they are of interest for the source repository (here [pipelines/nextflow](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow)) create a merge request. Merge requests are sent to the source repository to ask the maintainers to integrate modifications.
+
+![merge request button](img/merge_request.png)
+
+## Project organisation
+
+This project (and yours) follows the [guide of good practices for the LBMC](http://www.ens-lyon.fr/LBMC/intranet/services-communs/pole-bioinformatique/ressources/good_practice_LBMC)
+
+You are now on the main page of your fork of the [pipelines/nextflow](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow). You can explore this project, all the code in it is under the CeCILL licence (in the [LICENCE](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/LICENSE) file).
+
+The [README.md](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/README.md) file contains instructions to run your pipeline and test its installation.
+
+The [CONTRIBUTING.md](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/CONTRIBUTING.md) file contains guidelines if you want to contribute to the [pipelines/nextflow](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow) (making a merge request for example).
+
+The [data](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/tree/master/data) folder will be the place where you store the raw data for your analysis.
+The [results](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/tree/master/results) folder will be the place where you store the results of your analysis.
+
+> **The content of `data` and `results` folders should never be saved on git.**
+
+The [doc](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/tree/master/doc) folder contains the documentation of this practical course.
+
+And most interestingly for you, the [src](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/tree/master/src) contains code to wrap tools. This folder contains two subdirectories. A `docker_modules`, a `nf_modules` and a `psmn_modules` folder. 
+
+### `docker_modules`
+
+The `src/docker_modules` contains the code to wrap tools in [Docker](https://www.docker.com/what-docker). [Docker](https://www.docker.com/what-docker) is a framework that allows you to execute software within [containers](https://www.docker.com/what-container). The `docker_modules` contains directory corresponding to tools and subdirectories corresponding to their version.
+
+```sh
+ls -l src/docker_modules/
+rwxr-xr-x  3 laurent _lpoperator   96 May 25 15:42 bedtools/
+drwxr-xr-x  4 laurent _lpoperator  128 Jun 5 16:14 bowtie2/
+drwxr-xr-x  3 laurent _lpoperator   96 May 25 15:42 fastqc/
+drwxr-xr-x  4 laurent _lpoperator  128 Jun 5 16:14 htseq/
+```
+
+To each `tools/version` corresponds two files:
+
+```sh
+ls -l src/docker_modules/bowtie2/2.3.4.1/
+-rw-r--r-- 1 laurent _lpoperator  283 Jun  5 15:07 Dockerfile
+-rwxr-xr-x  1 laurent _lpoperator   79 Jun 5 16:18 docker_init.sh*
+```
+
+The `Dockerfile` is the [Docker](https://www.docker.com/what-docker) recipe to create a [container](https://www.docker.com/what-container) containing `Bowtie2` in its `2.3.4.1` version. And the `docker_init.sh` file is a small script to create the [container](https://www.docker.com/what-container) from this recipe.
+
+By running this script you will be able to easily install tools in different versions on your personal computer and use it in your pipeline. Some of the advantages are:
+
+- Whatever the computer, the installation and the results will be the same
+- You can keep [container](https://www.docker.com/what-container) for old version of tools and run it on new systems (science = reproducibility)
+- You don’t have to bother with tedious installation procedures, somebody else already did the job and wrote a `Dockerfile`.
+- You can easily keep [containers](https://www.docker.com/what-container) for different version of the same tools.
+
+### `psmn_modules`
+
+The `src/psmn_modules` folder is not really there. It’s a submodule of the project [PSMN/modules](https://gitlab.biologie.ens-lyon.fr/PSMN/modules). To populate it locally you can use the following command:
+
+```sh
+git submodule init
+```
+
+Like the `src/docker_modules` the [PSMN/modules](https://gitlab.biologie.ens-lyon.fr/PSMN/modules) project describe recipes to install tools and use them. The main difference is that you cannot use [Docker](https://www.docker.com/what-docker) on the PSMN. Instead you have to use another framework [Environment Module](http://www.ens-lyon.fr/PSMN/doku.php?id=documentation:tools:modules) which allows you to load modules for specific tools and version.
+The [README.md](https://gitlab.biologie.ens-lyon.fr/PSMN/modules/blob/master/README.md) file of the [PSMN/modules](https://gitlab.biologie.ens-lyon.fr/PSMN/modules) repository contains all the instruction to be able to load the modules maintained by the LBMC and present in the [PSMN/modules](https://gitlab.biologie.ens-lyon.fr/PSMN/modules) repository.
+
+### `nf_modules`
+
+The `src/nf_modules` folder contains templates of [nextflow](https://www.nextflow.io/) wrappers for the tools available in [Docker](https://www.docker.com/what-docker) and [psmn](http://www.ens-lyon.fr/PSMN/doku.php?id=documentation:tools:psmn). The details of the [nextflow](https://www.nextflow.io/) wrapper will be presented in the next section. Alongside the `.nf` and `.config` files, there is a `tests.sh` script to run test on the tool.
+
+# Nextflow pipeline
+
+A pipeline is a succession of **process**. Each process has data input(s) and optional data output(s). Data flows are modeled as **channels**.
+
+## Processes
+
+Here is an example of **process**:
+
+```Groovy
+process sample_fasta {
+  input:
+file fasta from fasta_file
+
+  output:
+file "sample.fasta" into fasta_sample
+
+  script:
+"""
+head ${fasta} > sample.fasta
+"""
+}
+```
+
+We have the process `sample_fasta` that takes a `fasta_file` **channel** as input and as output a `fasta_sample` **channel**. The process itself is defined in the `script:` block and within `"""`.
+
+```Groovy
+input:
+file fasta from fasta_file
+```
+
+When we zoom on the `input:` block we see that we define a variable `fasta` of type `file` from the `fasta_file` **channel**. This mean that groovy is going to write a file named as the content of the variable `fasta` in the root of the folder where `script:` is executed.
+
+
+```Groovy
+output:
+file "sample.fasta" into fasta_sample
+```
+
+At the end of the script, a file named `sample.fasta` is found in the root the folder where `script:` is executed and send into the **channel** `fasta_sample`.
+
+Using the WebIDE of Gitlab, create a file `src/fasta_sampler.nf` with this process and commit it to your repository.
+
+![webide](img/webide.png)
+
+## Channels
+
+Why bother with channels? In the above example, the advantages of channels are not really clear. We could have just given the `fasta` file to the process. But what if we have many fasta files to process? What if we have sub processes to run on each of the sampled fasta files? Nextflow can easily deal with these problems with the help of channels.
+
+> **Channels** are streams of items that are emitted by a source and consumed by a process. A process with a channel as input will be run on every item send through the channel.
+
+```Groovy
+Channel
+  .fromPath( "data/tiny_dataset/fasta/*.fasta" )
+  .set { fasta_file }
+```
+
+Here we defined the channel `fasta_file` that is going to send every fasta file from the folder `data/tiny_dataset/fasta/` into the process that take it as input.
+
+Add the definition of the channel to the `src/fasta_sampler.nf` file and commit it to your repository.
+
+
+## Run your pipeline locally
+
+After writing this first pipeline, you may want to test it. To do that, first clone your repository. To easily do that set the visibility level to *public* in the settings/General/Permissions page of your project.
+
+You can then run the following commands to download your project on your computer:
+
+If you are on a PSMN computer:
+
+```sh
+pip install cutadapt=1.14
+PATH="/scratch/lmodolo/:$PATH"
+git config --global http.sslVerify false
+```
+
+and then :
+
+> Don't forget to replace *https://gitlab.biologie.ens-lyon.fr/* by *gitlab_lbmc* if you are using your own computer
+
+```sh
+git clone https://gitlab.biologie.ens-lyon.fr/<usr_name>/nextflow.git
+cd nextflow
+src/install_nextflow.sh
+```
+
+We also need data to run our pipeline:
+
+```
+cd data
+git clone https://gitlab.biologie.ens-lyon.fr/LBMC/tiny_dataset.git
+cd ..
+```
+
+We can run our pipeline with the following command:
+
+```sh
+./nextflow src/fasta_sampler.nf
+```
+
+## Getting your results
+
+Our pipeline seems to work but we don’t know where is the `sample.fasta`. To get results out of a process, we need to tell nextflow to write it somewhere (we may don’t need to get every intermediate file in our results).
+
+To do that we need to add the following line before the `input:` section:
+
+```Groovy
+publishDir "results/sampling/", mode: 'copy'
+```
+
+Every file described in the `output:` section will be copied from nextflow to the folder `results/sampling/`.
+
+Add this to your `src/fasta_sampler.nf` file with the WebIDE and commit to your repository.
+Pull your modifications locally with the command:
+
+```sh
+git pull origin master
+```
+
+You can run your pipeline again and check the content of the folder `results/sampling`.
+
+## Fasta everywhere
+
+We ran our pipeline on one fasta file. How would nextflow handle 100 of them? To test that we need to duplicate the `tiny_v2.fasta` file: 
+
+```sh
+for i in {1..100}
+do
+cp data/tiny_dataset/fasta/tiny_v2.fasta data/tiny_dataset/fasta/tiny_v2_${i}.fasta
+done
+```
+
+You can run your pipeline again and check the content of the folder `results/sampling`.
+
+Every `fasta_sampler` process write a `sample.fasta` file. We need to make the name of the output file dependent of the name of the input file.
+
+```Groovy
+output:
+file "*_sample.fasta" into fasta_sample
+
+  script:
+"""
+head ${fasta} > ${fasta.baseName}_sample.fasta
+"""
+```
+
+Add this to your `src/fasta_sampler.nf` file with the WebIDE and commit it to your repository before pulling your modifications locally.
+You can run your pipeline again and check the content of the folder `results/sampling`.
+
+# Build your own RNASeq pipeline
+
+In this section you are going to build your own pipeline for RNASeq analysis from the code available in the `src/nf_modules` folder.
+
+## Create your Docker containers
+
+For this practical, we are going to need the following tools:
+
+- For Illumina adaptor removal: cutadapt
+- For reads trimming by quality: UrQt
+- For mapping and quantifying reads: BEDtools and Kallisto
+
+To initialize these tools, follow the **Installing** section of the [README.md](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/README.md) file.
+
+**If you are using a CBP computer don’t forget to clean up your docker containers at the end of the practical with the following commands:**
+
+```sh
+docker rm $(docker stop $(docker ps -aq))
+docker rmi $(docker images -qf "dangling=true")
+```
+
+## Cutadapt
+
+The first step of the pipeline is to remove any Illumina adaptors left in your read files.
+
+Open the WebIDE and create a `src/RNASeq.nf` file. Browse for [src/nf_modules/cutadapt/adaptor_removal_paired.nf](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/cutadapt/adaptor_removal_paired.nf), this file contains examples for cutadapt. We are interested in the *Illumina adaptor removal*, *for paired-end data* section of the code. Copy this code in your pipeline and commit it.
+
+Compared to before, we have few new lines:
+
+```Groovy
+params.fastq = "$baseDir/data/fastq/*_{1,2}.fastq"
+```
+
+We declare a variable that contains the path of the fastq file to look for. The advantage of using `params.fastq` is that the option `--fastq` is now a parameter of your pipeline.
+Thus, you can call your pipeline with the `--fastq` option:
+
+```sh
+./nextflow src/RNASeq.nf --fastq "data/tiny_dataset/fastq/*_R{1,2}.fastq"
+```
+
+```Groovy
+log.info "fastq files: ${params.fastq}"
+```
+
+This line simply displays the value of the variable
+
+```Groovy
+Channel
+  .fromFilePairs( params.fastq )
+```
+
+As we are working with paired-end RNASeq data, we tell nextflow to send pairs of fastq in the `fastq_file` channel.
+
+
+### cutadapt.config
+
+For the `fastq_sampler.nf` pipeline we used the command `head` present in most base UNIX systems. Here we want to use `cutadapt` which is not. Therefore, we have three main options:
+
+- install cutadapt locally so nextflow can use it
+- launch the process in a Docker container that has cutadapt installed
+- launch the process with psmn while loading the correct module to have cutadapt available
+
+We are not going to use the first option which requires no configuration for nextflow but tedious tools installations. Instead, we are going to use existing *wrappers* and tell nextflow about it. This is what the [src/nf_modules/cutadapt/adaptor_removal_paired.config](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/cutadapt/adaptor_removal_paired.config) is used for.
+
+Copy the content of this config file to an `src/RNASeq.config` file. This file is structured in process blocks. Here we are only interested in configuring `adaptor_removal` process not `trimming` process. So you can remove the `trimming` block and commit it.
+
+You can test your pipeline with the following command:
+
+```sh
+./nextflow src/RNASeq.nf -c src/RNASeq.config -profile docker --fastq "data/tiny_dataset/fastq/*_R{1,2}.fastq"
+```
+
+
+## UrQt
+
+The second step of the pipeline is to trim reads by quality.
+
+Browse for [src/nf_modules/urqt/trimming_paired.nf](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/urqt/trimming_paired.nf), this file contains examples for UrQt. We are interested in the *for paired-end data* section of the code. Copy the process section code in your pipeline and commit it.
+
+This code won’t work if you try to run it: the `fastq_file` channel is already consumed by the `adaptor_removal` process. In nextflow once a channel is used by a process, it ceases to exist. Moreover, we don’t want to trim the input fastq, we want to trim the fastq that comes from the `adaptor_removal` process.
+
+Therefore, you need to change the line:
+
+```Groovy
+set pair_id, file(reads) from fastq_files
+```
+
+In the `trimming` process to:
+
+```Groovy
+set pair_id, file(reads) from fastq_files_cut
+```
+
+The two processes are now connected by the channel `fastq_files_cut`.
+
+Add the content of the [src/nf_modules/urqt/trimming_paired.config](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/urqt/trimming_paired.config) file to your `src/RNASeq.config` file and commit it.
+
+You can test your pipeline.
+
+## BEDtools
+
+Kallisto need the sequences of the transcripts that need to be quantified. We are going to extract these sequences from the reference `data/tiny_dataset/fasta/tiny_v2.fasta` with the `bed` annotation `data/tiny_dataset/annot/tiny.bed`.
+
+You can copy to your `src/RNASeq.nf` file the content of [src/nf_modules/bedtools/fasta_from_bed.nf](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/bedtools/fasta_from_bed.nf) and to your `src/RNASeq.config` file the content of [src/nf_modules/bedtools/fasta_from_bed.config](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/bedtools/fasta_from_bed.config).
+
+Commit your work and test your pipeline with the following command:
+
+```sh
+./nextflow src/RNASeq.nf -c src/RNASeq.config -profile docker --fastq "data/tiny_dataset/fastq/*_R{1,2}.fastq" --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" --bed "data/tiny_dataset/annot/tiny.bed"
+```
+
+## Kallisto
+
+Kallisto run in two steps: the indexation of the reference and the quantification on this index.
+
+You can copy to your `src/RNASeq.nf` file the content of the files [src/nf_modules/kallisto/indexing.nf](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/kallisto/indexing.nf) and [src/nf_modules/kallisto/mapping_paired.nf](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/kallisto/mapping_paired.nf). You can add to your file `src/RNASeq.config` file the content of the files [src/nf_modules/kallisto/indexing.config](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/kallisto/indexing.config) and [src/nf_modules/kallisto/mapping_paired.config](https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/kallisto/mapping_paired.config).
+
+We are going to work with paired-end so only copy the relevant processes. The `index_fasta` process needs to take as input the output of your `fasta_from_bed` process. The `fastq` input of your `mapping_fastq` process needs to take as input the output of your `index_fasta` process and the `trimming` process.
+
+Commit your work and test your pipeline.
+You now have a RNASeq analysis pipeline that can run locally with Docker!
+
+
+## Additional nextflow option
+
+With nextflow you can restart the computation of a pipeline and get a trace of the process with the following options:
+
+```sh
+ -resume -with-dag results/RNASeq_dag.pdf -with-timeline results/RNASeq_timeline
+```
+
+# Run your RNASeq pipeline on the PSMN
+
+First you need to connect to the PSMN:
+
+```sh
+login@allo-psmn
+```
+Then once connected to `allo-psmn`, you can connect to `e5-2667v4comp1`:
+
+```sh
+login@e5-2667v4comp1
+```
+
+## Set your environment
+
+Make the LBMC modules available to you:
+
+```sh
+ln -s /Xnfs/lbmcdb/common/modules/modulefiles ~/privatemodules
+echo "module use ~/privatemodules" >> .bashrc
+```
+
+Create and go to your `scratch` folder:
+
+```sh
+mkdir -p /scratch/<login>
+cd /scratch/<login>
+echo "module use ~/privatemodules" >> .bashrc
+```
+
+Then you need to clone your pipeline and get the data:
+
+```sh
+git config --global http.sslVerify false
+git clone https://gitlab.biologie.ens-lyon.fr/<usr_name>/nextflow.git
+cd nextflow/data
+git clone https://gitlab.biologie.ens-lyon.fr/LBMC/tiny_dataset.git
+cd ..
+```
+
+## Run nextflow
+
+As we don’t want nextflow to be killed in case of disconnection, we start by launching `tmux`. In case of deconnection, you can restore your session with the command `tmux a`.
+
+```sh
+tmux
+module load nextflow/0.28.2
+nextflow src/RNASeq.nf -c src/RNASeq.config -profile psmn --fastq "data/tiny_dataset/fastq/*_R{1,2}.fastq" --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" --bed "data/tiny_dataset/annot/tiny.bed" -w /scratch/<login>
+```
+
+To use the scratch for nextflow computations add the option :
+
+```sh
+-w /scratch/<login>
+```
+
+You just ran your pipeline on the PSMN!
diff --git a/doc/available_tools.md b/doc/available_tools.md
new file mode 100644
index 0000000000000000000000000000000000000000..297cc27e9211f15023d415a8e20b13b54c8aac18
--- /dev/null
+++ b/doc/available_tools.md
@@ -0,0 +1,42 @@
+## Available tools
+
+- **nf module**: a working example of nextflow process is available in `src/nf_modules/<tools>/<tool>.nf` and `src/nf_modules/<tools>/<tool>.config`
+- **docker module**: you can create a docker with the `src/docker_modules/<tool>/<version>/docker_init.sh`
+- **psmn module**: you can use the tool in the PSMN
+- **IN2P3 module**: you can use the tool in the CCIN2P3
+
+| tool | nf module | docker module | psmn module | in2p3 module |
+|------|:---------:|:-------------:|:-----------:|:-------------:|
+BEDtools | ok | ok | ok | ok 
+BFCtools |**no**  | ok | ok | ok
+bioawk |**no**  | ok | ok | ok
+Bowtie | ok | ok | **no** | ok
+Bowtie2 | ok | ok | ok | ok
+BWA | ok | ok | ok | ok
+canu | ok | ok | ok | ok
+cutadapt | ok | ok | ok | ok
+deepTools | ok | ok | ok | ok
+fastp | ok | ok | ok | ok
+FastQC | ok | ok | ok | ok
+file_handle | **no** | ok | ok | ok
+GATK | **no** | ok | ok | ok
+HISAT2 | ok | ok | ok | ok
+HTSeq | ok | ok | ok | ok
+Kallisto | ok | ok | ok | ok
+MACS2 | ok | ok | ok | ok
+MultiQC | ok | ok | ok | ok
+MUSIC | ok | ok | ok | ok
+picard | **no** | ok | ok | ok
+pigz | **no** | ok | ok | ok
+RSEM | ok | ok | ok | ok
+Salmon | **no** | ok | ok | ok
+sambamba | ok | ok | ok | ok
+samblaster | ok | ok | ok | ok
+SAMtools | ok | ok | ok | ok
+SRAtoolkit | ok | ok | ok | ok
+STAR | ok | ok | ok | ok
+subread | **no** | ok | ok | ok
+TopHat | **no** | ok | ok | ok
+Trimmomatic | **no** | ok | ok | ok
+UMItools  | **no** | ok | ok | ok
+UrQt | ok | ok | ok | ok
diff --git a/doc/getting_started.md b/doc/getting_started.md
new file mode 100644
index 0000000000000000000000000000000000000000..e8446e5b9d12a60e9330f4bb40f301f7c17e59b2
--- /dev/null
+++ b/doc/getting_started.md
@@ -0,0 +1,66 @@
+## Getting Started
+
+These instructions will get you a copy of the project up and running on your local machine for development and testing purposes. See deployment for notes on how to deploy the project on a live system.
+
+### Prerequisites
+
+To run nextflow on you computer you need to have java (>= 1.8) installed.
+
+```sh
+java --version
+```
+
+To be able to easily test tools already implemented for nextflow on your computer (`src/nf_modules/` to see their list). You need to have docker installed.
+
+```sh
+docker run hello-world
+```
+
+### Installing
+
+To install nextflow on you computer simply run the following command:
+
+```sh
+src/install_nextflow.sh
+```
+
+Then to initialize a given tools run the following command:
+
+```sh
+src/docker_modules/<tool_name>/<tool_version>/docker_init.sh
+```
+
+For example to initialize `file_handle` version `0.1.1`, run:
+
+```sh
+src/docker_modules/file_handle/0.1.1/docker_init.sh
+```
+
+To initialize all the tools:
+```sh
+find src/docker_modules/ -name "docker_init.sh" | awk '{system($0)}'
+```
+
+## Running the tests
+
+To run tests we first need to get a training set
+```sh
+cd data
+git clone -c http.sslVerify=false https://gitlab.biologie.ens-lyon.fr/LBMC/tiny_dataset.git
+cp tiny_dataset/fastq/tiny_R1.fastq tiny_dataset/fastq/tiny2_R1.fastq
+cp tiny_dataset/fastq/tiny_R2.fastq tiny_dataset/fastq/tiny2_R2.fastq
+cp tiny_dataset/fastq/tiny_S.fastq tiny_dataset/fastq/tiny2_S.fastq
+cd ..
+```
+
+Then to run the tests for a given tools run the following command:
+
+```sh
+src/nf_modules/<tool_name>/<tool_version>/tests.sh
+```
+
+For example to run the tests on `Bowtie2` run:
+
+```sh
+src/nf_modules/bowtie2/tests.sh
+```
diff --git a/doc/img/fork.png b/doc/img/fork.png
new file mode 100644
index 0000000000000000000000000000000000000000..e5a0512f67b5919c7b82ad33b1d598012280929b
Binary files /dev/null and b/doc/img/fork.png differ
diff --git a/doc/img/merge_request.png b/doc/img/merge_request.png
new file mode 100644
index 0000000000000000000000000000000000000000..9cf899d8048df7a54d059a3fa29b49aff8b2d2d4
Binary files /dev/null and b/doc/img/merge_request.png differ
diff --git a/doc/img/webide.png b/doc/img/webide.png
new file mode 100644
index 0000000000000000000000000000000000000000..bb171cff3c5ba74e03f9c9f8c0d43343e4a18495
Binary files /dev/null and b/doc/img/webide.png differ
diff --git a/doc/mail.txt b/doc/mail.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e9f30cc646878f070cdc8b7dde4120120a0ae254
--- /dev/null
+++ b/doc/mail.txt
@@ -0,0 +1,21 @@
+[Formation BioComp] nextflow for experimental biologists
+
+Dear all,
+
+You registered for the nextflow formation which will take place this  afternoon starting at 13h30 in the room M7.1H04.
+
+Please check that you have an account on our gitlab server at the following url : 
+https://gitlab.biologie.ens-lyon.fr
+To connect, simply click on the CAS button.
+Your account is now linked to the gitlab server, but blocked
+You can send an email to laurent.modolo@ens-lyon.fr to activate your account.
+
+You will also need an account on the PSMN (http://www.ens-lyon.fr/PSMN/doku.php?id=contact:forms:inscription)
+
+If you want to work on your laptop, or plan to work remotely on your desktop via ssh (http://www.ens-lyon.fr/LBMC/intranet/services-communs/pole-bioinformatique/bioinfo_club/1_ssh_and_other_tools_to_work_remotly) you need to have the following software installed :
+- ssh
+- git
+- java (>=1.8)
+- docker
+
+Best,
diff --git a/doc/nf_projects.md b/doc/nf_projects.md
new file mode 100644
index 0000000000000000000000000000000000000000..2307ef1cd0df4552620476e510d420cde3e955ec
--- /dev/null
+++ b/doc/nf_projects.md
@@ -0,0 +1,21 @@
+## Projects using nextflow
+
+### RNASeq
+
+- [https://_https://gitlab.biologie.ens-lyon.fr/gylab/salmoninyeast](https://_https://gitlab.biologie.ens-lyon.fr/gylab/salmoninyeast)
+- [https://github.com/LBMC/readthroughpombe](https://github.com/LBMC/readthroughpombe)
+- [https://_https://gitlab.biologie.ens-lyon.fr/vvanoost/nextflow](https://_https://gitlab.biologie.ens-lyon.fr/vvanoost/nextflow)
+- [https://gitlab.biologie.ens-lyon.fr/elabaron/HIV_project](https://gitlab.biologie.ens-lyon.fr/elabaron/HIV_project)
+
+### single-cell RNA_-Seq
+
+- [https://gitlab.com/LBMC_UMR5239/sbdm/mars-seq](https://gitlab.com/LBMC_UMR5239/sbdm/mars-seq)
+
+### DNASeq
+
+- [https://github.com/LBMC/ChrSexebelari](https://github.com/LBMC/ChrSexebelari)
+
+### Chip-Seq
+
+- [https://gitlab.biologie.ens-lyon.fr/Auboeuf/ChIP-seq](https://gitlab.biologie.ens-lyon.fr/Auboeuf/ChIP-seq)
+
diff --git a/public/TP_computational_biologists.html b/public/TP_computational_biologists.html
new file mode 100644
index 0000000000000000000000000000000000000000..d34ba8ddae9a9dc51181715f648b9e05109554cd
--- /dev/null
+++ b/public/TP_computational_biologists.html
@@ -0,0 +1,305 @@
+<!DOCTYPE html>
+<html xmlns="http://www.w3.org/1999/xhtml" lang="" xml:lang="">
+<head>
+  <meta charset="utf-8" />
+  <meta name="generator" content="pandoc" />
+  <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" />
+  <title>TP_experimental_biologists</title>
+  <style>
+      code{white-space: pre-wrap;}
+      span.smallcaps{font-variant: small-caps;}
+      span.underline{text-decoration: underline;}
+      div.column{display: inline-block; vertical-align: top; width: 50%;}
+  </style>
+  <style>
+a.sourceLine { display: inline-block; line-height: 1.25; }
+a.sourceLine { pointer-events: none; color: inherit; text-decoration: inherit; }
+a.sourceLine:empty { height: 1.2em; }
+.sourceCode { overflow: visible; }
+code.sourceCode { white-space: pre; position: relative; }
+div.sourceCode { margin: 1em 0; }
+pre.sourceCode { margin: 0; }
+@media screen {
+div.sourceCode { overflow: auto; }
+}
+@media print {
+code.sourceCode { white-space: pre-wrap; }
+a.sourceLine { text-indent: -1em; padding-left: 1em; }
+}
+pre.numberSource a.sourceLine
+  { position: relative; left: -4em; }
+pre.numberSource a.sourceLine::before
+  { content: attr(title);
+    position: relative; left: -1em; text-align: right; vertical-align: baseline;
+    border: none; pointer-events: all; display: inline-block;
+    -webkit-touch-callout: none; -webkit-user-select: none;
+    -khtml-user-select: none; -moz-user-select: none;
+    -ms-user-select: none; user-select: none;
+    padding: 0 4px; width: 4em;
+    color: #aaaaaa;
+  }
+pre.numberSource { margin-left: 3em; border-left: 1px solid #aaaaaa;  padding-left: 4px; }
+div.sourceCode
+  {  }
+@media screen {
+a.sourceLine::before { text-decoration: underline; }
+}
+code span.al { color: #ff0000; font-weight: bold; } /* Alert */
+code span.an { color: #60a0b0; font-weight: bold; font-style: italic; } /* Annotation */
+code span.at { color: #7d9029; } /* Attribute */
+code span.bn { color: #40a070; } /* BaseN */
+code span.bu { } /* BuiltIn */
+code span.cf { color: #007020; font-weight: bold; } /* ControlFlow */
+code span.ch { color: #4070a0; } /* Char */
+code span.cn { color: #880000; } /* Constant */
+code span.co { color: #60a0b0; font-style: italic; } /* Comment */
+code span.cv { color: #60a0b0; font-weight: bold; font-style: italic; } /* CommentVar */
+code span.do { color: #ba2121; font-style: italic; } /* Documentation */
+code span.dt { color: #902000; } /* DataType */
+code span.dv { color: #40a070; } /* DecVal */
+code span.er { color: #ff0000; font-weight: bold; } /* Error */
+code span.ex { } /* Extension */
+code span.fl { color: #40a070; } /* Float */
+code span.fu { color: #06287e; } /* Function */
+code span.im { } /* Import */
+code span.in { color: #60a0b0; font-weight: bold; font-style: italic; } /* Information */
+code span.kw { color: #007020; font-weight: bold; } /* Keyword */
+code span.op { color: #666666; } /* Operator */
+code span.ot { color: #007020; } /* Other */
+code span.pp { color: #bc7a00; } /* Preprocessor */
+code span.sc { color: #4070a0; } /* SpecialChar */
+code span.ss { color: #bb6688; } /* SpecialString */
+code span.st { color: #4070a0; } /* String */
+code span.va { color: #19177c; } /* Variable */
+code span.vs { color: #4070a0; } /* VerbatimString */
+code span.wa { color: #60a0b0; font-weight: bold; font-style: italic; } /* Warning */
+  </style>
+  <!--[if lt IE 9]>
+    <script src="//cdnjs.cloudflare.com/ajax/libs/html5shiv/3.7.3/html5shiv-printshiv.min.js"></script>
+  <![endif]-->
+</head>
+<body>
+<p>The Goal of this practical is to learn how to build your own pipeline with nextflow and using the tools already <em>wrapped</em>. For this we are going to build a small RNASeq analysis pipeline that should run the following steps:</p>
+<ul>
+<li>remove Illumina adaptors</li>
+<li>trim reads by quality</li>
+<li>build the index of a reference genome</li>
+<li>estimate the amount of RNA fragments mapping to the transcripts of this genome</li>
+</ul>
+<h1 id="initialize-your-own-project">Initialize your own project</h1>
+<p>You are going to build a pipeline for you or your team. So the first step is to create your own project.</p>
+<h2 id="forking">Forking</h2>
+<p>Instead of reinventing the wheel, you can use the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow">pipelines/nextflow</a> as a template. To easily do so, go to the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow">pipelines/nextflow</a> repository and click on the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/forks/new"><strong>fork</strong></a> button.</p>
+<figure>
+<img src="img/fork.png" alt="fork button" /><figcaption>fork button</figcaption>
+</figure>
+<p>In git, the <a href="https://git-scm.com/book/en/v2/GitHub-Contributing-to-a-Project">action of forking</a> means that you are going to make your own private copy of a repository. You can then write modifications in your project, and if they are of interest for the source repository (here <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow">pipelines/nextflow</a>) create a merge request. Merge requests are sent to the source repository to ask the maintainers to integrate modifications.</p>
+<figure>
+<img src="img/merge_request.png" alt="merge request button" /><figcaption>merge request button</figcaption>
+</figure>
+<h2 id="project-organisation">Project organisation</h2>
+<p>This project (and yours) follows the <a href="http://www.ens-lyon.fr/LBMC/intranet/services-communs/pole-bioinformatique/ressources/good_practice_LBMC">guide of good practices for the LBMC</a></p>
+<p>You are now on the main page of your fork of the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow">pipelines/nextflow</a>. You can explore this project, all the code in it is under the CeCILL licence (in the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/LICENSE">LICENCE</a> file).</p>
+<p>The <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/README.md">README.md</a> file contains instructions to run your pipeline and test its installation.</p>
+<p>The <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/CONTRIBUTING.md">CONTRIBUTING.md</a> file contains guidelines if you want to contribute to the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow">pipelines/nextflow</a> (making a merge request for example).</p>
+<p>The <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/tree/master/data">data</a> folder will be the place where you store the raw data for your analysis. The <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/tree/master/results">results</a> folder will be the place where you store the results of your analysis.</p>
+<blockquote>
+<p><strong>The content of <code>data</code> and <code>results</code> folders should never be saved on git.</strong></p>
+</blockquote>
+<p>The <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/tree/master/doc">doc</a> folder contains the documentation of this practical course.</p>
+<p>And most interestingly for you, the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/tree/master/src">src</a> contains code to wrap tools. This folder contains two subdirectories. A <code>docker_modules</code>, a <code>nf_modules</code> and a <code>psmn_modules</code> folder.</p>
+<h3 id="docker_modules"><code>docker_modules</code></h3>
+<p>The <code>src/docker_modules</code> contains the code to wrap tools in <a href="https://www.docker.com/what-docker">Docker</a>. <a href="https://www.docker.com/what-docker">Docker</a> is a framework that allows you to execute software within <a href="https://www.docker.com/what-container">containers</a>. The <code>docker_modules</code> contains directory corresponding to tools and subdirectories corresponding to their version.</p>
+<div class="sourceCode" id="cb1"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb1-1" title="1"><span class="fu">ls</span> -l src/docker_modules/</a>
+<a class="sourceLine" id="cb1-2" title="2"><span class="ex">rwxr-xr-x</span>  3 laurent _lpoperator   96 May 25 15:42 bedtools/</a>
+<a class="sourceLine" id="cb1-3" title="3"><span class="ex">drwxr-xr-x</span>  4 laurent _lpoperator  128 Jun 5 16:14 bowtie2/</a>
+<a class="sourceLine" id="cb1-4" title="4"><span class="ex">drwxr-xr-x</span>  3 laurent _lpoperator   96 May 25 15:42 fastqc/</a>
+<a class="sourceLine" id="cb1-5" title="5"><span class="ex">drwxr-xr-x</span>  4 laurent _lpoperator  128 Jun 5 16:14 htseq/</a></code></pre></div>
+<p>To each <code>tools/version</code> corresponds two files:</p>
+<div class="sourceCode" id="cb2"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb2-1" title="1"><span class="fu">ls</span> -l src/docker_modules/bowtie2/2.3.4.1/</a>
+<a class="sourceLine" id="cb2-2" title="2"><span class="ex">-rw-r--r--</span> 1 laurent _lpoperator  283 Jun  5 15:07 Dockerfile</a>
+<a class="sourceLine" id="cb2-3" title="3"><span class="ex">-rwxr-xr-x</span>  1 laurent _lpoperator   79 Jun 5 16:18 docker_init.sh*</a></code></pre></div>
+<p>The <code>Dockerfile</code> is the <a href="https://www.docker.com/what-docker">Docker</a> recipe to create a <a href="https://www.docker.com/what-container">container</a> containing <code>Bowtie2</code> in its <code>2.3.4.1</code> version. And the <code>docker_init.sh</code> file is a small script to create the <a href="https://www.docker.com/what-container">container</a> from this recipe.</p>
+<p>By running this script you will be able to easily install tools in different versions on your personal computer and use it in your pipeline. Some of the advantages are:</p>
+<ul>
+<li>Whatever the computer, the installation and the results will be the same</li>
+<li>You can keep <a href="https://www.docker.com/what-container">container</a> for old version of tools and run it on new systems (science = reproducibility)</li>
+<li>You don’t have to bother with tedious installation procedures, somebody else already did the job and wrote a <code>Dockerfile</code>.</li>
+<li>You can easily keep <a href="https://www.docker.com/what-container">containers</a> for different version of the same tools.</li>
+</ul>
+<h3 id="psmn_modules"><code>psmn_modules</code></h3>
+<p>The <code>src/psmn_modules</code> folder is not really there. It’s a submodule of the project <a href="https://gitlab.biologie.ens-lyon.fr/PSMN/modules">PSMN/modules</a>. To populate it locally you can use the following command:</p>
+<div class="sourceCode" id="cb3"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb3-1" title="1"><span class="fu">git</span> submodule init</a></code></pre></div>
+<p>Like the <code>src/docker_modules</code> the <a href="https://gitlab.biologie.ens-lyon.fr/PSMN/modules">PSMN/modules</a> project describe recipes to install tools and use them. The main difference is that you cannot use <a href="https://www.docker.com/what-docker">Docker</a> on the PSMN. Instead you have to use another framework <a href="http://www.ens-lyon.fr/PSMN/doku.php?id=documentation:tools:modules">Environment Module</a> which allows you to load modules for specific tools and version. The <a href="https://gitlab.biologie.ens-lyon.fr/PSMN/modules/blob/master/README.md">README.md</a> file of the <a href="https://gitlab.biologie.ens-lyon.fr/PSMN/modules">PSMN/modules</a> repository contains all the instruction to be able to load the modules maintained by the LBMC and present in the <a href="https://gitlab.biologie.ens-lyon.fr/PSMN/modules">PSMN/modules</a> repository.</p>
+<h3 id="nf_modules"><code>nf_modules</code></h3>
+<p>The <code>src/nf_modules</code> folder contains templates of <a href="https://www.nextflow.io/">nextflow</a> wrappers for the tools available in <a href="https://www.docker.com/what-docker">Docker</a> and <a href="http://www.ens-lyon.fr/PSMN/doku.php?id=documentation:tools:psmn">psmn</a>. The details of the <a href="https://www.nextflow.io/">nextflow</a> wrapper will be presented in the next section. Alongside the <code>.nf</code> and <code>.config</code> files, there is a <code>tests.sh</code> script to run test on the tool.</p>
+<h1 id="nextflow-pipeline">Nextflow pipeline</h1>
+<p>A pipeline is a succession of <strong>process</strong>. Each process has data input(s) and optional data output(s). Data flows are modeled as <strong>channels</strong>.</p>
+<h2 id="processes">Processes</h2>
+<p>Here is an example of <strong>process</strong>:</p>
+<pre class="groovy"><code>process sample_fasta {
+  input:
+file fasta from fasta_file
+
+  output:
+file &quot;sample.fasta&quot; into fasta_sample
+
+  script:
+&quot;&quot;&quot;
+head ${fasta} &gt; sample.fasta
+&quot;&quot;&quot;
+}</code></pre>
+<p>We have the process <code>sample_fasta</code> that takes a <code>fasta_file</code> <strong>channel</strong> as input and as output a <code>fasta_sample</code> <strong>channel</strong>. The process itself is defined in the <code>script:</code> block and within <code>"""</code>.</p>
+<pre class="groovy"><code>input:
+file fasta from fasta_file</code></pre>
+<p>When we zoom on the <code>input:</code> block we see that we define a variable <code>fasta</code> of type <code>file</code> from the <code>fasta_file</code> <strong>channel</strong>. This mean that groovy is going to write a file named as the content of the variable <code>fasta</code> in the root of the folder where <code>script:</code> is executed.</p>
+<pre class="groovy"><code>output:
+file &quot;sample.fasta&quot; into fasta_sample</code></pre>
+<p>At the end of the script, a file named <code>sample.fasta</code> is found in the root the folder where <code>script:</code> is executed and send into the <strong>channel</strong> <code>fasta_sample</code>.</p>
+<p>Using the WebIDE of Gitlab, create a file <code>src/fasta_sampler.nf</code> with this process and commit it to your repository.</p>
+<figure>
+<img src="img/webide.png" alt="webide" /><figcaption>webide</figcaption>
+</figure>
+<h2 id="channels">Channels</h2>
+<p>Why bother with channels? In the above example, the advantages of channels are not really clear. We could have just given the <code>fasta</code> file to the process. But what if we have many fasta files to process? What if we have sub processes to run on each of the sampled fasta files? Nextflow can easily deal with these problems with the help of channels.</p>
+<blockquote>
+<p><strong>Channels</strong> are streams of items that are emitted by a source and consumed by a process. A process with a channel as input will be run on every item send through the channel.</p>
+</blockquote>
+<pre class="groovy"><code>Channel
+  .fromPath( &quot;data/tiny_dataset/fasta/*.fasta&quot; )
+  .set { fasta_file }</code></pre>
+<p>Here we defined the channel <code>fasta_file</code> that is going to send every fasta file from the folder <code>data/tiny_dataset/fasta/</code> into the process that take it as input.</p>
+<p>Add the definition of the channel to the <code>src/fasta_sampler.nf</code> file and commit it to your repository.</p>
+<h2 id="run-your-pipeline-locally">Run your pipeline locally</h2>
+<p>After writing this first pipeline, you may want to test it. To do that, first clone your repository. To easily do that set the visibility level to <em>public</em> in the settings/General/Permissions page of your project.</p>
+<p>You can then run the following commands to download your project on your computer:</p>
+<p>If you are on a PSMN computer:</p>
+<div class="sourceCode" id="cb8"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb8-1" title="1"><span class="ex">pip</span> install cutadapt=1.14</a>
+<a class="sourceLine" id="cb8-2" title="2"><span class="va">PATH=</span><span class="st">&quot;/scratch/lmodolo/:</span><span class="va">$PATH</span><span class="st">&quot;</span></a>
+<a class="sourceLine" id="cb8-3" title="3"><span class="fu">git</span> config --global http.sslVerify false</a></code></pre></div>
+<p>and then :</p>
+<blockquote>
+<p>Don’t forget to replace <em>https://gitlab.biologie.ens-lyon.fr/</em> by <em>gitlab_lbmc</em> if you are using your own computer</p>
+</blockquote>
+<div class="sourceCode" id="cb9"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb9-1" title="1"><span class="fu">git</span> clone https://gitlab.biologie.ens-lyon.fr/<span class="op">&lt;</span>usr_name<span class="op">&gt;</span>/nextflow.git</a>
+<a class="sourceLine" id="cb9-2" title="2"><span class="bu">cd</span> nextflow</a>
+<a class="sourceLine" id="cb9-3" title="3"><span class="ex">src/install_nextflow.sh</span></a></code></pre></div>
+<p>We also need data to run our pipeline:</p>
+<pre><code>cd data
+git clone https://gitlab.biologie.ens-lyon.fr/LBMC/tiny_dataset.git
+cd ..</code></pre>
+<p>We can run our pipeline with the following command:</p>
+<div class="sourceCode" id="cb11"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb11-1" title="1"><span class="ex">./nextflow</span> src/fasta_sampler.nf</a></code></pre></div>
+<h2 id="getting-your-results">Getting your results</h2>
+<p>Our pipeline seems to work but we don’t know where is the <code>sample.fasta</code>. To get results out of a process, we need to tell nextflow to write it somewhere (we may don’t need to get every intermediate file in our results).</p>
+<p>To do that we need to add the following line before the <code>input:</code> section:</p>
+<pre class="groovy"><code>publishDir &quot;results/sampling/&quot;, mode: &#39;copy&#39;</code></pre>
+<p>Every file described in the <code>output:</code> section will be copied from nextflow to the folder <code>results/sampling/</code>.</p>
+<p>Add this to your <code>src/fasta_sampler.nf</code> file with the WebIDE and commit to your repository. Pull your modifications locally with the command:</p>
+<div class="sourceCode" id="cb13"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb13-1" title="1"><span class="fu">git</span> pull origin master</a></code></pre></div>
+<p>You can run your pipeline again and check the content of the folder <code>results/sampling</code>.</p>
+<h2 id="fasta-everywhere">Fasta everywhere</h2>
+<p>We ran our pipeline on one fasta file. How would nextflow handle 100 of them? To test that we need to duplicate the <code>tiny_v2.fasta</code> file:</p>
+<div class="sourceCode" id="cb14"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb14-1" title="1"><span class="kw">for</span> <span class="ex">i</span> in <span class="dt">{1..100}</span></a>
+<a class="sourceLine" id="cb14-2" title="2"><span class="kw">do</span></a>
+<a class="sourceLine" id="cb14-3" title="3"><span class="fu">cp</span> data/tiny_dataset/fasta/tiny_v2.fasta data/tiny_dataset/fasta/tiny_v2_<span class="va">${i}</span>.fasta</a>
+<a class="sourceLine" id="cb14-4" title="4"><span class="kw">done</span></a></code></pre></div>
+<p>You can run your pipeline again and check the content of the folder <code>results/sampling</code>.</p>
+<p>Every <code>fasta_sampler</code> process write a <code>sample.fasta</code> file. We need to make the name of the output file dependent of the name of the input file.</p>
+<pre class="groovy"><code>output:
+file &quot;*_sample.fasta&quot; into fasta_sample
+
+  script:
+&quot;&quot;&quot;
+head ${fasta} &gt; ${fasta.baseName}_sample.fasta
+&quot;&quot;&quot;</code></pre>
+<p>Add this to your <code>src/fasta_sampler.nf</code> file with the WebIDE and commit it to your repository before pulling your modifications locally. You can run your pipeline again and check the content of the folder <code>results/sampling</code>.</p>
+<h1 id="build-your-own-rnaseq-pipeline">Build your own RNASeq pipeline</h1>
+<p>In this section you are going to build your own pipeline for RNASeq analysis from the code available in the <code>src/nf_modules</code> folder.</p>
+<h2 id="create-your-docker-containers">Create your Docker containers</h2>
+<p>For this practical, we are going to need the following tools:</p>
+<ul>
+<li>For Illumina adaptor removal: cutadapt</li>
+<li>For reads trimming by quality: UrQt</li>
+<li>For mapping and quantifying reads: BEDtools and Kallisto</li>
+</ul>
+<p>To initialize these tools, follow the <strong>Installing</strong> section of the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/README.md">README.md</a> file.</p>
+<p><strong>If you are using a CBP computer don’t forget to clean up your docker containers at the end of the practical with the following commands:</strong></p>
+<div class="sourceCode" id="cb16"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb16-1" title="1"><span class="ex">docker</span> rm <span class="va">$(</span><span class="ex">docker</span> stop <span class="va">$(</span><span class="ex">docker</span> ps -aq<span class="va">))</span></a>
+<a class="sourceLine" id="cb16-2" title="2"><span class="ex">docker</span> rmi <span class="va">$(</span><span class="ex">docker</span> images -qf <span class="st">&quot;dangling=true&quot;</span><span class="va">)</span></a></code></pre></div>
+<h2 id="cutadapt">Cutadapt</h2>
+<p>The first step of the pipeline is to remove any Illumina adaptors left in your read files.</p>
+<p>Open the WebIDE and create a <code>src/RNASeq.nf</code> file. Browse for <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/cutadapt/adaptor_removal_paired.nf">src/nf_modules/cutadapt/adaptor_removal_paired.nf</a>, this file contains examples for cutadapt. We are interested in the <em>Illumina adaptor removal</em>, <em>for paired-end data</em> section of the code. Copy this code in your pipeline and commit it.</p>
+<p>Compared to before, we have few new lines:</p>
+<pre class="groovy"><code>params.fastq = &quot;$baseDir/data/fastq/*_{1,2}.fastq&quot;</code></pre>
+<p>We declare a variable that contains the path of the fastq file to look for. The advantage of using <code>params.fastq</code> is that the option <code>--fastq</code> is now a parameter of your pipeline. Thus, you can call your pipeline with the <code>--fastq</code> option:</p>
+<div class="sourceCode" id="cb18"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb18-1" title="1"><span class="ex">./nextflow</span> src/RNASeq.nf --fastq <span class="st">&quot;data/tiny_dataset/fastq/*_R{1,2}.fastq&quot;</span></a></code></pre></div>
+<pre class="groovy"><code>log.info &quot;fastq files: ${params.fastq}&quot;</code></pre>
+<p>This line simply displays the value of the variable</p>
+<pre class="groovy"><code>Channel
+  .fromFilePairs( params.fastq )</code></pre>
+<p>As we are working with paired-end RNASeq data, we tell nextflow to send pairs of fastq in the <code>fastq_file</code> channel.</p>
+<h3 id="cutadapt.config">cutadapt.config</h3>
+<p>For the <code>fastq_sampler.nf</code> pipeline we used the command <code>head</code> present in most base UNIX systems. Here we want to use <code>cutadapt</code> which is not. Therefore, we have three main options:</p>
+<ul>
+<li>install cutadapt locally so nextflow can use it</li>
+<li>launch the process in a Docker container that has cutadapt installed</li>
+<li>launch the process with psmn while loading the correct module to have cutadapt available</li>
+</ul>
+<p>We are not going to use the first option which requires no configuration for nextflow but tedious tools installations. Instead, we are going to use existing <em>wrappers</em> and tell nextflow about it. This is what the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/cutadapt/adaptor_removal_paired.config">src/nf_modules/cutadapt/adaptor_removal_paired.config</a> is used for.</p>
+<p>Copy the content of this config file to an <code>src/RNASeq.config</code> file. This file is structured in process blocks. Here we are only interested in configuring <code>adaptor_removal</code> process not <code>trimming</code> process. So you can remove the <code>trimming</code> block and commit it.</p>
+<p>You can test your pipeline with the following command:</p>
+<div class="sourceCode" id="cb21"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb21-1" title="1"><span class="ex">./nextflow</span> src/RNASeq.nf -c src/RNASeq.config -profile docker --fastq <span class="st">&quot;data/tiny_dataset/fastq/*_R{1,2}.fastq&quot;</span></a></code></pre></div>
+<h2 id="urqt">UrQt</h2>
+<p>The second step of the pipeline is to trim reads by quality.</p>
+<p>Browse for <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/urqt/trimming_paired.nf">src/nf_modules/urqt/trimming_paired.nf</a>, this file contains examples for UrQt. We are interested in the <em>for paired-end data</em> section of the code. Copy the process section code in your pipeline and commit it.</p>
+<p>This code won’t work if you try to run it: the <code>fastq_file</code> channel is already consumed by the <code>adaptor_removal</code> process. In nextflow once a channel is used by a process, it ceases to exist. Moreover, we don’t want to trim the input fastq, we want to trim the fastq that comes from the <code>adaptor_removal</code> process.</p>
+<p>Therefore, you need to change the line:</p>
+<pre class="groovy"><code>set pair_id, file(reads) from fastq_files</code></pre>
+<p>In the <code>trimming</code> process to:</p>
+<pre class="groovy"><code>set pair_id, file(reads) from fastq_files_cut</code></pre>
+<p>The two processes are now connected by the channel <code>fastq_files_cut</code>.</p>
+<p>Add the content of the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/urqt/trimming_paired.config">src/nf_modules/urqt/trimming_paired.config</a> file to your <code>src/RNASeq.config</code> file and commit it.</p>
+<p>You can test your pipeline.</p>
+<h2 id="bedtools">BEDtools</h2>
+<p>Kallisto need the sequences of the transcripts that need to be quantified. We are going to extract these sequences from the reference <code>data/tiny_dataset/fasta/tiny_v2.fasta</code> with the <code>bed</code> annotation <code>data/tiny_dataset/annot/tiny.bed</code>.</p>
+<p>You can copy to your <code>src/RNASeq.nf</code> file the content of <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/bedtools/fasta_from_bed.nf">src/nf_modules/bedtools/fasta_from_bed.nf</a> and to your <code>src/RNASeq.config</code> file the content of <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/bedtools/fasta_from_bed.config">src/nf_modules/bedtools/fasta_from_bed.config</a>.</p>
+<p>Commit your work and test your pipeline with the following command:</p>
+<div class="sourceCode" id="cb24"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb24-1" title="1"><span class="ex">./nextflow</span> src/RNASeq.nf -c src/RNASeq.config -profile docker --fastq <span class="st">&quot;data/tiny_dataset/fastq/*_R{1,2}.fastq&quot;</span> --fasta <span class="st">&quot;data/tiny_dataset/fasta/tiny_v2.fasta&quot;</span> --bed <span class="st">&quot;data/tiny_dataset/annot/tiny.bed&quot;</span></a></code></pre></div>
+<h2 id="kallisto">Kallisto</h2>
+<p>Kallisto run in two steps: the indexation of the reference and the quantification on this index.</p>
+<p>You can copy to your <code>src/RNASeq.nf</code> file the content of the files <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/kallisto/indexing.nf">src/nf_modules/kallisto/indexing.nf</a> and <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/kallisto/mapping_paired.nf">src/nf_modules/kallisto/mapping_paired.nf</a>. You can add to your file <code>src/RNASeq.config</code> file the content of the files <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/kallisto/indexing.config">src/nf_modules/kallisto/indexing.config</a> and <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/kallisto/mapping_paired.config">src/nf_modules/kallisto/mapping_paired.config</a>.</p>
+<p>We are going to work with paired-end so only copy the relevant processes. The <code>index_fasta</code> process needs to take as input the output of your <code>fasta_from_bed</code> process. The <code>fastq</code> input of your <code>mapping_fastq</code> process needs to take as input the output of your <code>index_fasta</code> process and the <code>trimming</code> process.</p>
+<p>Commit your work and test your pipeline. You now have a RNASeq analysis pipeline that can run locally with Docker!</p>
+<h2 id="additional-nextflow-option">Additional nextflow option</h2>
+<p>With nextflow you can restart the computation of a pipeline and get a trace of the process with the following options:</p>
+<div class="sourceCode" id="cb25"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb25-1" title="1"> <span class="ex">-resume</span> -with-dag results/RNASeq_dag.pdf -with-timeline results/RNASeq_timeline</a></code></pre></div>
+<h1 id="run-your-rnaseq-pipeline-on-the-psmn">Run your RNASeq pipeline on the PSMN</h1>
+<p>First you need to connect to the PSMN:</p>
+<div class="sourceCode" id="cb26"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb26-1" title="1"><span class="ex">login@allo-psmn</span></a></code></pre></div>
+<p>Then once connected to <code>allo-psmn</code>, you can connect to <code>e5-2667v4comp1</code>:</p>
+<div class="sourceCode" id="cb27"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb27-1" title="1"><span class="ex">login@e5-2667v4comp1</span></a></code></pre></div>
+<h2 id="set-your-environment">Set your environment</h2>
+<p>Make the LBMC modules available to you:</p>
+<div class="sourceCode" id="cb28"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb28-1" title="1"><span class="fu">ln</span> -s /Xnfs/lbmcdb/common/modules/modulefiles ~/privatemodules</a>
+<a class="sourceLine" id="cb28-2" title="2"><span class="bu">echo</span> <span class="st">&quot;module use ~/privatemodules&quot;</span> <span class="op">&gt;&gt;</span> .bashrc</a></code></pre></div>
+<p>Create and go to your <code>scratch</code> folder:</p>
+<div class="sourceCode" id="cb29"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb29-1" title="1"><span class="fu">mkdir</span> -p /scratch/<span class="op">&lt;</span>login<span class="op">&gt;</span></a>
+<a class="sourceLine" id="cb29-2" title="2"><span class="bu">cd</span> /scratch/<span class="op">&lt;</span>login<span class="op">&gt;</span></a>
+<a class="sourceLine" id="cb29-3" title="3"><span class="bu">echo</span> <span class="st">&quot;module use ~/privatemodules&quot;</span> <span class="op">&gt;&gt;</span> .bashrc</a></code></pre></div>
+<p>Then you need to clone your pipeline and get the data:</p>
+<div class="sourceCode" id="cb30"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb30-1" title="1"><span class="fu">git</span> config --global http.sslVerify false</a>
+<a class="sourceLine" id="cb30-2" title="2"><span class="fu">git</span> clone https://gitlab.biologie.ens-lyon.fr/<span class="op">&lt;</span>usr_name<span class="op">&gt;</span>/nextflow.git</a>
+<a class="sourceLine" id="cb30-3" title="3"><span class="bu">cd</span> nextflow/data</a>
+<a class="sourceLine" id="cb30-4" title="4"><span class="fu">git</span> clone https://gitlab.biologie.ens-lyon.fr/LBMC/tiny_dataset.git</a>
+<a class="sourceLine" id="cb30-5" title="5"><span class="bu">cd</span> ..</a></code></pre></div>
+<h2 id="run-nextflow">Run nextflow</h2>
+<p>As we don’t want nextflow to be killed in case of disconnection, we start by launching <code>tmux</code>. In case of deconnection, you can restore your session with the command <code>tmux a</code>.</p>
+<div class="sourceCode" id="cb31"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb31-1" title="1"><span class="ex">tmux</span></a>
+<a class="sourceLine" id="cb31-2" title="2"><span class="ex">module</span> load nextflow/0.28.2</a>
+<a class="sourceLine" id="cb31-3" title="3"><span class="ex">nextflow</span> src/RNASeq.nf -c src/RNASeq.config -profile psmn --fastq <span class="st">&quot;data/tiny_dataset/fastq/*_R{1,2}.fastq&quot;</span> --fasta <span class="st">&quot;data/tiny_dataset/fasta/tiny_v2.fasta&quot;</span> --bed <span class="st">&quot;data/tiny_dataset/annot/tiny.bed&quot;</span> -w /scratch/<span class="op">&lt;</span>login<span class="op">&gt;</span></a></code></pre></div>
+<p>To use the scratch for nextflow computations add the option :</p>
+<div class="sourceCode" id="cb32"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb32-1" title="1"><span class="ex">-w</span> /scratch/<span class="op">&lt;</span>login<span class="op">&gt;</span></a></code></pre></div>
+<p>You just ran your pipeline on the PSMN!</p>
+</body>
+</html>
diff --git a/public/TP_experimental_biologists.html b/public/TP_experimental_biologists.html
new file mode 100644
index 0000000000000000000000000000000000000000..d34ba8ddae9a9dc51181715f648b9e05109554cd
--- /dev/null
+++ b/public/TP_experimental_biologists.html
@@ -0,0 +1,305 @@
+<!DOCTYPE html>
+<html xmlns="http://www.w3.org/1999/xhtml" lang="" xml:lang="">
+<head>
+  <meta charset="utf-8" />
+  <meta name="generator" content="pandoc" />
+  <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" />
+  <title>TP_experimental_biologists</title>
+  <style>
+      code{white-space: pre-wrap;}
+      span.smallcaps{font-variant: small-caps;}
+      span.underline{text-decoration: underline;}
+      div.column{display: inline-block; vertical-align: top; width: 50%;}
+  </style>
+  <style>
+a.sourceLine { display: inline-block; line-height: 1.25; }
+a.sourceLine { pointer-events: none; color: inherit; text-decoration: inherit; }
+a.sourceLine:empty { height: 1.2em; }
+.sourceCode { overflow: visible; }
+code.sourceCode { white-space: pre; position: relative; }
+div.sourceCode { margin: 1em 0; }
+pre.sourceCode { margin: 0; }
+@media screen {
+div.sourceCode { overflow: auto; }
+}
+@media print {
+code.sourceCode { white-space: pre-wrap; }
+a.sourceLine { text-indent: -1em; padding-left: 1em; }
+}
+pre.numberSource a.sourceLine
+  { position: relative; left: -4em; }
+pre.numberSource a.sourceLine::before
+  { content: attr(title);
+    position: relative; left: -1em; text-align: right; vertical-align: baseline;
+    border: none; pointer-events: all; display: inline-block;
+    -webkit-touch-callout: none; -webkit-user-select: none;
+    -khtml-user-select: none; -moz-user-select: none;
+    -ms-user-select: none; user-select: none;
+    padding: 0 4px; width: 4em;
+    color: #aaaaaa;
+  }
+pre.numberSource { margin-left: 3em; border-left: 1px solid #aaaaaa;  padding-left: 4px; }
+div.sourceCode
+  {  }
+@media screen {
+a.sourceLine::before { text-decoration: underline; }
+}
+code span.al { color: #ff0000; font-weight: bold; } /* Alert */
+code span.an { color: #60a0b0; font-weight: bold; font-style: italic; } /* Annotation */
+code span.at { color: #7d9029; } /* Attribute */
+code span.bn { color: #40a070; } /* BaseN */
+code span.bu { } /* BuiltIn */
+code span.cf { color: #007020; font-weight: bold; } /* ControlFlow */
+code span.ch { color: #4070a0; } /* Char */
+code span.cn { color: #880000; } /* Constant */
+code span.co { color: #60a0b0; font-style: italic; } /* Comment */
+code span.cv { color: #60a0b0; font-weight: bold; font-style: italic; } /* CommentVar */
+code span.do { color: #ba2121; font-style: italic; } /* Documentation */
+code span.dt { color: #902000; } /* DataType */
+code span.dv { color: #40a070; } /* DecVal */
+code span.er { color: #ff0000; font-weight: bold; } /* Error */
+code span.ex { } /* Extension */
+code span.fl { color: #40a070; } /* Float */
+code span.fu { color: #06287e; } /* Function */
+code span.im { } /* Import */
+code span.in { color: #60a0b0; font-weight: bold; font-style: italic; } /* Information */
+code span.kw { color: #007020; font-weight: bold; } /* Keyword */
+code span.op { color: #666666; } /* Operator */
+code span.ot { color: #007020; } /* Other */
+code span.pp { color: #bc7a00; } /* Preprocessor */
+code span.sc { color: #4070a0; } /* SpecialChar */
+code span.ss { color: #bb6688; } /* SpecialString */
+code span.st { color: #4070a0; } /* String */
+code span.va { color: #19177c; } /* Variable */
+code span.vs { color: #4070a0; } /* VerbatimString */
+code span.wa { color: #60a0b0; font-weight: bold; font-style: italic; } /* Warning */
+  </style>
+  <!--[if lt IE 9]>
+    <script src="//cdnjs.cloudflare.com/ajax/libs/html5shiv/3.7.3/html5shiv-printshiv.min.js"></script>
+  <![endif]-->
+</head>
+<body>
+<p>The Goal of this practical is to learn how to build your own pipeline with nextflow and using the tools already <em>wrapped</em>. For this we are going to build a small RNASeq analysis pipeline that should run the following steps:</p>
+<ul>
+<li>remove Illumina adaptors</li>
+<li>trim reads by quality</li>
+<li>build the index of a reference genome</li>
+<li>estimate the amount of RNA fragments mapping to the transcripts of this genome</li>
+</ul>
+<h1 id="initialize-your-own-project">Initialize your own project</h1>
+<p>You are going to build a pipeline for you or your team. So the first step is to create your own project.</p>
+<h2 id="forking">Forking</h2>
+<p>Instead of reinventing the wheel, you can use the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow">pipelines/nextflow</a> as a template. To easily do so, go to the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow">pipelines/nextflow</a> repository and click on the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/forks/new"><strong>fork</strong></a> button.</p>
+<figure>
+<img src="img/fork.png" alt="fork button" /><figcaption>fork button</figcaption>
+</figure>
+<p>In git, the <a href="https://git-scm.com/book/en/v2/GitHub-Contributing-to-a-Project">action of forking</a> means that you are going to make your own private copy of a repository. You can then write modifications in your project, and if they are of interest for the source repository (here <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow">pipelines/nextflow</a>) create a merge request. Merge requests are sent to the source repository to ask the maintainers to integrate modifications.</p>
+<figure>
+<img src="img/merge_request.png" alt="merge request button" /><figcaption>merge request button</figcaption>
+</figure>
+<h2 id="project-organisation">Project organisation</h2>
+<p>This project (and yours) follows the <a href="http://www.ens-lyon.fr/LBMC/intranet/services-communs/pole-bioinformatique/ressources/good_practice_LBMC">guide of good practices for the LBMC</a></p>
+<p>You are now on the main page of your fork of the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow">pipelines/nextflow</a>. You can explore this project, all the code in it is under the CeCILL licence (in the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/LICENSE">LICENCE</a> file).</p>
+<p>The <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/README.md">README.md</a> file contains instructions to run your pipeline and test its installation.</p>
+<p>The <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/CONTRIBUTING.md">CONTRIBUTING.md</a> file contains guidelines if you want to contribute to the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow">pipelines/nextflow</a> (making a merge request for example).</p>
+<p>The <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/tree/master/data">data</a> folder will be the place where you store the raw data for your analysis. The <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/tree/master/results">results</a> folder will be the place where you store the results of your analysis.</p>
+<blockquote>
+<p><strong>The content of <code>data</code> and <code>results</code> folders should never be saved on git.</strong></p>
+</blockquote>
+<p>The <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/tree/master/doc">doc</a> folder contains the documentation of this practical course.</p>
+<p>And most interestingly for you, the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/tree/master/src">src</a> contains code to wrap tools. This folder contains two subdirectories. A <code>docker_modules</code>, a <code>nf_modules</code> and a <code>psmn_modules</code> folder.</p>
+<h3 id="docker_modules"><code>docker_modules</code></h3>
+<p>The <code>src/docker_modules</code> contains the code to wrap tools in <a href="https://www.docker.com/what-docker">Docker</a>. <a href="https://www.docker.com/what-docker">Docker</a> is a framework that allows you to execute software within <a href="https://www.docker.com/what-container">containers</a>. The <code>docker_modules</code> contains directory corresponding to tools and subdirectories corresponding to their version.</p>
+<div class="sourceCode" id="cb1"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb1-1" title="1"><span class="fu">ls</span> -l src/docker_modules/</a>
+<a class="sourceLine" id="cb1-2" title="2"><span class="ex">rwxr-xr-x</span>  3 laurent _lpoperator   96 May 25 15:42 bedtools/</a>
+<a class="sourceLine" id="cb1-3" title="3"><span class="ex">drwxr-xr-x</span>  4 laurent _lpoperator  128 Jun 5 16:14 bowtie2/</a>
+<a class="sourceLine" id="cb1-4" title="4"><span class="ex">drwxr-xr-x</span>  3 laurent _lpoperator   96 May 25 15:42 fastqc/</a>
+<a class="sourceLine" id="cb1-5" title="5"><span class="ex">drwxr-xr-x</span>  4 laurent _lpoperator  128 Jun 5 16:14 htseq/</a></code></pre></div>
+<p>To each <code>tools/version</code> corresponds two files:</p>
+<div class="sourceCode" id="cb2"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb2-1" title="1"><span class="fu">ls</span> -l src/docker_modules/bowtie2/2.3.4.1/</a>
+<a class="sourceLine" id="cb2-2" title="2"><span class="ex">-rw-r--r--</span> 1 laurent _lpoperator  283 Jun  5 15:07 Dockerfile</a>
+<a class="sourceLine" id="cb2-3" title="3"><span class="ex">-rwxr-xr-x</span>  1 laurent _lpoperator   79 Jun 5 16:18 docker_init.sh*</a></code></pre></div>
+<p>The <code>Dockerfile</code> is the <a href="https://www.docker.com/what-docker">Docker</a> recipe to create a <a href="https://www.docker.com/what-container">container</a> containing <code>Bowtie2</code> in its <code>2.3.4.1</code> version. And the <code>docker_init.sh</code> file is a small script to create the <a href="https://www.docker.com/what-container">container</a> from this recipe.</p>
+<p>By running this script you will be able to easily install tools in different versions on your personal computer and use it in your pipeline. Some of the advantages are:</p>
+<ul>
+<li>Whatever the computer, the installation and the results will be the same</li>
+<li>You can keep <a href="https://www.docker.com/what-container">container</a> for old version of tools and run it on new systems (science = reproducibility)</li>
+<li>You don’t have to bother with tedious installation procedures, somebody else already did the job and wrote a <code>Dockerfile</code>.</li>
+<li>You can easily keep <a href="https://www.docker.com/what-container">containers</a> for different version of the same tools.</li>
+</ul>
+<h3 id="psmn_modules"><code>psmn_modules</code></h3>
+<p>The <code>src/psmn_modules</code> folder is not really there. It’s a submodule of the project <a href="https://gitlab.biologie.ens-lyon.fr/PSMN/modules">PSMN/modules</a>. To populate it locally you can use the following command:</p>
+<div class="sourceCode" id="cb3"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb3-1" title="1"><span class="fu">git</span> submodule init</a></code></pre></div>
+<p>Like the <code>src/docker_modules</code> the <a href="https://gitlab.biologie.ens-lyon.fr/PSMN/modules">PSMN/modules</a> project describe recipes to install tools and use them. The main difference is that you cannot use <a href="https://www.docker.com/what-docker">Docker</a> on the PSMN. Instead you have to use another framework <a href="http://www.ens-lyon.fr/PSMN/doku.php?id=documentation:tools:modules">Environment Module</a> which allows you to load modules for specific tools and version. The <a href="https://gitlab.biologie.ens-lyon.fr/PSMN/modules/blob/master/README.md">README.md</a> file of the <a href="https://gitlab.biologie.ens-lyon.fr/PSMN/modules">PSMN/modules</a> repository contains all the instruction to be able to load the modules maintained by the LBMC and present in the <a href="https://gitlab.biologie.ens-lyon.fr/PSMN/modules">PSMN/modules</a> repository.</p>
+<h3 id="nf_modules"><code>nf_modules</code></h3>
+<p>The <code>src/nf_modules</code> folder contains templates of <a href="https://www.nextflow.io/">nextflow</a> wrappers for the tools available in <a href="https://www.docker.com/what-docker">Docker</a> and <a href="http://www.ens-lyon.fr/PSMN/doku.php?id=documentation:tools:psmn">psmn</a>. The details of the <a href="https://www.nextflow.io/">nextflow</a> wrapper will be presented in the next section. Alongside the <code>.nf</code> and <code>.config</code> files, there is a <code>tests.sh</code> script to run test on the tool.</p>
+<h1 id="nextflow-pipeline">Nextflow pipeline</h1>
+<p>A pipeline is a succession of <strong>process</strong>. Each process has data input(s) and optional data output(s). Data flows are modeled as <strong>channels</strong>.</p>
+<h2 id="processes">Processes</h2>
+<p>Here is an example of <strong>process</strong>:</p>
+<pre class="groovy"><code>process sample_fasta {
+  input:
+file fasta from fasta_file
+
+  output:
+file &quot;sample.fasta&quot; into fasta_sample
+
+  script:
+&quot;&quot;&quot;
+head ${fasta} &gt; sample.fasta
+&quot;&quot;&quot;
+}</code></pre>
+<p>We have the process <code>sample_fasta</code> that takes a <code>fasta_file</code> <strong>channel</strong> as input and as output a <code>fasta_sample</code> <strong>channel</strong>. The process itself is defined in the <code>script:</code> block and within <code>"""</code>.</p>
+<pre class="groovy"><code>input:
+file fasta from fasta_file</code></pre>
+<p>When we zoom on the <code>input:</code> block we see that we define a variable <code>fasta</code> of type <code>file</code> from the <code>fasta_file</code> <strong>channel</strong>. This mean that groovy is going to write a file named as the content of the variable <code>fasta</code> in the root of the folder where <code>script:</code> is executed.</p>
+<pre class="groovy"><code>output:
+file &quot;sample.fasta&quot; into fasta_sample</code></pre>
+<p>At the end of the script, a file named <code>sample.fasta</code> is found in the root the folder where <code>script:</code> is executed and send into the <strong>channel</strong> <code>fasta_sample</code>.</p>
+<p>Using the WebIDE of Gitlab, create a file <code>src/fasta_sampler.nf</code> with this process and commit it to your repository.</p>
+<figure>
+<img src="img/webide.png" alt="webide" /><figcaption>webide</figcaption>
+</figure>
+<h2 id="channels">Channels</h2>
+<p>Why bother with channels? In the above example, the advantages of channels are not really clear. We could have just given the <code>fasta</code> file to the process. But what if we have many fasta files to process? What if we have sub processes to run on each of the sampled fasta files? Nextflow can easily deal with these problems with the help of channels.</p>
+<blockquote>
+<p><strong>Channels</strong> are streams of items that are emitted by a source and consumed by a process. A process with a channel as input will be run on every item send through the channel.</p>
+</blockquote>
+<pre class="groovy"><code>Channel
+  .fromPath( &quot;data/tiny_dataset/fasta/*.fasta&quot; )
+  .set { fasta_file }</code></pre>
+<p>Here we defined the channel <code>fasta_file</code> that is going to send every fasta file from the folder <code>data/tiny_dataset/fasta/</code> into the process that take it as input.</p>
+<p>Add the definition of the channel to the <code>src/fasta_sampler.nf</code> file and commit it to your repository.</p>
+<h2 id="run-your-pipeline-locally">Run your pipeline locally</h2>
+<p>After writing this first pipeline, you may want to test it. To do that, first clone your repository. To easily do that set the visibility level to <em>public</em> in the settings/General/Permissions page of your project.</p>
+<p>You can then run the following commands to download your project on your computer:</p>
+<p>If you are on a PSMN computer:</p>
+<div class="sourceCode" id="cb8"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb8-1" title="1"><span class="ex">pip</span> install cutadapt=1.14</a>
+<a class="sourceLine" id="cb8-2" title="2"><span class="va">PATH=</span><span class="st">&quot;/scratch/lmodolo/:</span><span class="va">$PATH</span><span class="st">&quot;</span></a>
+<a class="sourceLine" id="cb8-3" title="3"><span class="fu">git</span> config --global http.sslVerify false</a></code></pre></div>
+<p>and then :</p>
+<blockquote>
+<p>Don’t forget to replace <em>https://gitlab.biologie.ens-lyon.fr/</em> by <em>gitlab_lbmc</em> if you are using your own computer</p>
+</blockquote>
+<div class="sourceCode" id="cb9"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb9-1" title="1"><span class="fu">git</span> clone https://gitlab.biologie.ens-lyon.fr/<span class="op">&lt;</span>usr_name<span class="op">&gt;</span>/nextflow.git</a>
+<a class="sourceLine" id="cb9-2" title="2"><span class="bu">cd</span> nextflow</a>
+<a class="sourceLine" id="cb9-3" title="3"><span class="ex">src/install_nextflow.sh</span></a></code></pre></div>
+<p>We also need data to run our pipeline:</p>
+<pre><code>cd data
+git clone https://gitlab.biologie.ens-lyon.fr/LBMC/tiny_dataset.git
+cd ..</code></pre>
+<p>We can run our pipeline with the following command:</p>
+<div class="sourceCode" id="cb11"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb11-1" title="1"><span class="ex">./nextflow</span> src/fasta_sampler.nf</a></code></pre></div>
+<h2 id="getting-your-results">Getting your results</h2>
+<p>Our pipeline seems to work but we don’t know where is the <code>sample.fasta</code>. To get results out of a process, we need to tell nextflow to write it somewhere (we may don’t need to get every intermediate file in our results).</p>
+<p>To do that we need to add the following line before the <code>input:</code> section:</p>
+<pre class="groovy"><code>publishDir &quot;results/sampling/&quot;, mode: &#39;copy&#39;</code></pre>
+<p>Every file described in the <code>output:</code> section will be copied from nextflow to the folder <code>results/sampling/</code>.</p>
+<p>Add this to your <code>src/fasta_sampler.nf</code> file with the WebIDE and commit to your repository. Pull your modifications locally with the command:</p>
+<div class="sourceCode" id="cb13"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb13-1" title="1"><span class="fu">git</span> pull origin master</a></code></pre></div>
+<p>You can run your pipeline again and check the content of the folder <code>results/sampling</code>.</p>
+<h2 id="fasta-everywhere">Fasta everywhere</h2>
+<p>We ran our pipeline on one fasta file. How would nextflow handle 100 of them? To test that we need to duplicate the <code>tiny_v2.fasta</code> file:</p>
+<div class="sourceCode" id="cb14"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb14-1" title="1"><span class="kw">for</span> <span class="ex">i</span> in <span class="dt">{1..100}</span></a>
+<a class="sourceLine" id="cb14-2" title="2"><span class="kw">do</span></a>
+<a class="sourceLine" id="cb14-3" title="3"><span class="fu">cp</span> data/tiny_dataset/fasta/tiny_v2.fasta data/tiny_dataset/fasta/tiny_v2_<span class="va">${i}</span>.fasta</a>
+<a class="sourceLine" id="cb14-4" title="4"><span class="kw">done</span></a></code></pre></div>
+<p>You can run your pipeline again and check the content of the folder <code>results/sampling</code>.</p>
+<p>Every <code>fasta_sampler</code> process write a <code>sample.fasta</code> file. We need to make the name of the output file dependent of the name of the input file.</p>
+<pre class="groovy"><code>output:
+file &quot;*_sample.fasta&quot; into fasta_sample
+
+  script:
+&quot;&quot;&quot;
+head ${fasta} &gt; ${fasta.baseName}_sample.fasta
+&quot;&quot;&quot;</code></pre>
+<p>Add this to your <code>src/fasta_sampler.nf</code> file with the WebIDE and commit it to your repository before pulling your modifications locally. You can run your pipeline again and check the content of the folder <code>results/sampling</code>.</p>
+<h1 id="build-your-own-rnaseq-pipeline">Build your own RNASeq pipeline</h1>
+<p>In this section you are going to build your own pipeline for RNASeq analysis from the code available in the <code>src/nf_modules</code> folder.</p>
+<h2 id="create-your-docker-containers">Create your Docker containers</h2>
+<p>For this practical, we are going to need the following tools:</p>
+<ul>
+<li>For Illumina adaptor removal: cutadapt</li>
+<li>For reads trimming by quality: UrQt</li>
+<li>For mapping and quantifying reads: BEDtools and Kallisto</li>
+</ul>
+<p>To initialize these tools, follow the <strong>Installing</strong> section of the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/README.md">README.md</a> file.</p>
+<p><strong>If you are using a CBP computer don’t forget to clean up your docker containers at the end of the practical with the following commands:</strong></p>
+<div class="sourceCode" id="cb16"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb16-1" title="1"><span class="ex">docker</span> rm <span class="va">$(</span><span class="ex">docker</span> stop <span class="va">$(</span><span class="ex">docker</span> ps -aq<span class="va">))</span></a>
+<a class="sourceLine" id="cb16-2" title="2"><span class="ex">docker</span> rmi <span class="va">$(</span><span class="ex">docker</span> images -qf <span class="st">&quot;dangling=true&quot;</span><span class="va">)</span></a></code></pre></div>
+<h2 id="cutadapt">Cutadapt</h2>
+<p>The first step of the pipeline is to remove any Illumina adaptors left in your read files.</p>
+<p>Open the WebIDE and create a <code>src/RNASeq.nf</code> file. Browse for <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/cutadapt/adaptor_removal_paired.nf">src/nf_modules/cutadapt/adaptor_removal_paired.nf</a>, this file contains examples for cutadapt. We are interested in the <em>Illumina adaptor removal</em>, <em>for paired-end data</em> section of the code. Copy this code in your pipeline and commit it.</p>
+<p>Compared to before, we have few new lines:</p>
+<pre class="groovy"><code>params.fastq = &quot;$baseDir/data/fastq/*_{1,2}.fastq&quot;</code></pre>
+<p>We declare a variable that contains the path of the fastq file to look for. The advantage of using <code>params.fastq</code> is that the option <code>--fastq</code> is now a parameter of your pipeline. Thus, you can call your pipeline with the <code>--fastq</code> option:</p>
+<div class="sourceCode" id="cb18"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb18-1" title="1"><span class="ex">./nextflow</span> src/RNASeq.nf --fastq <span class="st">&quot;data/tiny_dataset/fastq/*_R{1,2}.fastq&quot;</span></a></code></pre></div>
+<pre class="groovy"><code>log.info &quot;fastq files: ${params.fastq}&quot;</code></pre>
+<p>This line simply displays the value of the variable</p>
+<pre class="groovy"><code>Channel
+  .fromFilePairs( params.fastq )</code></pre>
+<p>As we are working with paired-end RNASeq data, we tell nextflow to send pairs of fastq in the <code>fastq_file</code> channel.</p>
+<h3 id="cutadapt.config">cutadapt.config</h3>
+<p>For the <code>fastq_sampler.nf</code> pipeline we used the command <code>head</code> present in most base UNIX systems. Here we want to use <code>cutadapt</code> which is not. Therefore, we have three main options:</p>
+<ul>
+<li>install cutadapt locally so nextflow can use it</li>
+<li>launch the process in a Docker container that has cutadapt installed</li>
+<li>launch the process with psmn while loading the correct module to have cutadapt available</li>
+</ul>
+<p>We are not going to use the first option which requires no configuration for nextflow but tedious tools installations. Instead, we are going to use existing <em>wrappers</em> and tell nextflow about it. This is what the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/cutadapt/adaptor_removal_paired.config">src/nf_modules/cutadapt/adaptor_removal_paired.config</a> is used for.</p>
+<p>Copy the content of this config file to an <code>src/RNASeq.config</code> file. This file is structured in process blocks. Here we are only interested in configuring <code>adaptor_removal</code> process not <code>trimming</code> process. So you can remove the <code>trimming</code> block and commit it.</p>
+<p>You can test your pipeline with the following command:</p>
+<div class="sourceCode" id="cb21"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb21-1" title="1"><span class="ex">./nextflow</span> src/RNASeq.nf -c src/RNASeq.config -profile docker --fastq <span class="st">&quot;data/tiny_dataset/fastq/*_R{1,2}.fastq&quot;</span></a></code></pre></div>
+<h2 id="urqt">UrQt</h2>
+<p>The second step of the pipeline is to trim reads by quality.</p>
+<p>Browse for <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/urqt/trimming_paired.nf">src/nf_modules/urqt/trimming_paired.nf</a>, this file contains examples for UrQt. We are interested in the <em>for paired-end data</em> section of the code. Copy the process section code in your pipeline and commit it.</p>
+<p>This code won’t work if you try to run it: the <code>fastq_file</code> channel is already consumed by the <code>adaptor_removal</code> process. In nextflow once a channel is used by a process, it ceases to exist. Moreover, we don’t want to trim the input fastq, we want to trim the fastq that comes from the <code>adaptor_removal</code> process.</p>
+<p>Therefore, you need to change the line:</p>
+<pre class="groovy"><code>set pair_id, file(reads) from fastq_files</code></pre>
+<p>In the <code>trimming</code> process to:</p>
+<pre class="groovy"><code>set pair_id, file(reads) from fastq_files_cut</code></pre>
+<p>The two processes are now connected by the channel <code>fastq_files_cut</code>.</p>
+<p>Add the content of the <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/urqt/trimming_paired.config">src/nf_modules/urqt/trimming_paired.config</a> file to your <code>src/RNASeq.config</code> file and commit it.</p>
+<p>You can test your pipeline.</p>
+<h2 id="bedtools">BEDtools</h2>
+<p>Kallisto need the sequences of the transcripts that need to be quantified. We are going to extract these sequences from the reference <code>data/tiny_dataset/fasta/tiny_v2.fasta</code> with the <code>bed</code> annotation <code>data/tiny_dataset/annot/tiny.bed</code>.</p>
+<p>You can copy to your <code>src/RNASeq.nf</code> file the content of <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/bedtools/fasta_from_bed.nf">src/nf_modules/bedtools/fasta_from_bed.nf</a> and to your <code>src/RNASeq.config</code> file the content of <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/bedtools/fasta_from_bed.config">src/nf_modules/bedtools/fasta_from_bed.config</a>.</p>
+<p>Commit your work and test your pipeline with the following command:</p>
+<div class="sourceCode" id="cb24"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb24-1" title="1"><span class="ex">./nextflow</span> src/RNASeq.nf -c src/RNASeq.config -profile docker --fastq <span class="st">&quot;data/tiny_dataset/fastq/*_R{1,2}.fastq&quot;</span> --fasta <span class="st">&quot;data/tiny_dataset/fasta/tiny_v2.fasta&quot;</span> --bed <span class="st">&quot;data/tiny_dataset/annot/tiny.bed&quot;</span></a></code></pre></div>
+<h2 id="kallisto">Kallisto</h2>
+<p>Kallisto run in two steps: the indexation of the reference and the quantification on this index.</p>
+<p>You can copy to your <code>src/RNASeq.nf</code> file the content of the files <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/kallisto/indexing.nf">src/nf_modules/kallisto/indexing.nf</a> and <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/kallisto/mapping_paired.nf">src/nf_modules/kallisto/mapping_paired.nf</a>. You can add to your file <code>src/RNASeq.config</code> file the content of the files <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/kallisto/indexing.config">src/nf_modules/kallisto/indexing.config</a> and <a href="https://gitlab.biologie.ens-lyon.fr/pipelines/nextflow/blob/master/src/nf_modules/kallisto/mapping_paired.config">src/nf_modules/kallisto/mapping_paired.config</a>.</p>
+<p>We are going to work with paired-end so only copy the relevant processes. The <code>index_fasta</code> process needs to take as input the output of your <code>fasta_from_bed</code> process. The <code>fastq</code> input of your <code>mapping_fastq</code> process needs to take as input the output of your <code>index_fasta</code> process and the <code>trimming</code> process.</p>
+<p>Commit your work and test your pipeline. You now have a RNASeq analysis pipeline that can run locally with Docker!</p>
+<h2 id="additional-nextflow-option">Additional nextflow option</h2>
+<p>With nextflow you can restart the computation of a pipeline and get a trace of the process with the following options:</p>
+<div class="sourceCode" id="cb25"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb25-1" title="1"> <span class="ex">-resume</span> -with-dag results/RNASeq_dag.pdf -with-timeline results/RNASeq_timeline</a></code></pre></div>
+<h1 id="run-your-rnaseq-pipeline-on-the-psmn">Run your RNASeq pipeline on the PSMN</h1>
+<p>First you need to connect to the PSMN:</p>
+<div class="sourceCode" id="cb26"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb26-1" title="1"><span class="ex">login@allo-psmn</span></a></code></pre></div>
+<p>Then once connected to <code>allo-psmn</code>, you can connect to <code>e5-2667v4comp1</code>:</p>
+<div class="sourceCode" id="cb27"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb27-1" title="1"><span class="ex">login@e5-2667v4comp1</span></a></code></pre></div>
+<h2 id="set-your-environment">Set your environment</h2>
+<p>Make the LBMC modules available to you:</p>
+<div class="sourceCode" id="cb28"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb28-1" title="1"><span class="fu">ln</span> -s /Xnfs/lbmcdb/common/modules/modulefiles ~/privatemodules</a>
+<a class="sourceLine" id="cb28-2" title="2"><span class="bu">echo</span> <span class="st">&quot;module use ~/privatemodules&quot;</span> <span class="op">&gt;&gt;</span> .bashrc</a></code></pre></div>
+<p>Create and go to your <code>scratch</code> folder:</p>
+<div class="sourceCode" id="cb29"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb29-1" title="1"><span class="fu">mkdir</span> -p /scratch/<span class="op">&lt;</span>login<span class="op">&gt;</span></a>
+<a class="sourceLine" id="cb29-2" title="2"><span class="bu">cd</span> /scratch/<span class="op">&lt;</span>login<span class="op">&gt;</span></a>
+<a class="sourceLine" id="cb29-3" title="3"><span class="bu">echo</span> <span class="st">&quot;module use ~/privatemodules&quot;</span> <span class="op">&gt;&gt;</span> .bashrc</a></code></pre></div>
+<p>Then you need to clone your pipeline and get the data:</p>
+<div class="sourceCode" id="cb30"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb30-1" title="1"><span class="fu">git</span> config --global http.sslVerify false</a>
+<a class="sourceLine" id="cb30-2" title="2"><span class="fu">git</span> clone https://gitlab.biologie.ens-lyon.fr/<span class="op">&lt;</span>usr_name<span class="op">&gt;</span>/nextflow.git</a>
+<a class="sourceLine" id="cb30-3" title="3"><span class="bu">cd</span> nextflow/data</a>
+<a class="sourceLine" id="cb30-4" title="4"><span class="fu">git</span> clone https://gitlab.biologie.ens-lyon.fr/LBMC/tiny_dataset.git</a>
+<a class="sourceLine" id="cb30-5" title="5"><span class="bu">cd</span> ..</a></code></pre></div>
+<h2 id="run-nextflow">Run nextflow</h2>
+<p>As we don’t want nextflow to be killed in case of disconnection, we start by launching <code>tmux</code>. In case of deconnection, you can restore your session with the command <code>tmux a</code>.</p>
+<div class="sourceCode" id="cb31"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb31-1" title="1"><span class="ex">tmux</span></a>
+<a class="sourceLine" id="cb31-2" title="2"><span class="ex">module</span> load nextflow/0.28.2</a>
+<a class="sourceLine" id="cb31-3" title="3"><span class="ex">nextflow</span> src/RNASeq.nf -c src/RNASeq.config -profile psmn --fastq <span class="st">&quot;data/tiny_dataset/fastq/*_R{1,2}.fastq&quot;</span> --fasta <span class="st">&quot;data/tiny_dataset/fasta/tiny_v2.fasta&quot;</span> --bed <span class="st">&quot;data/tiny_dataset/annot/tiny.bed&quot;</span> -w /scratch/<span class="op">&lt;</span>login<span class="op">&gt;</span></a></code></pre></div>
+<p>To use the scratch for nextflow computations add the option :</p>
+<div class="sourceCode" id="cb32"><pre class="sourceCode sh"><code class="sourceCode bash"><a class="sourceLine" id="cb32-1" title="1"><span class="ex">-w</span> /scratch/<span class="op">&lt;</span>login<span class="op">&gt;</span></a></code></pre></div>
+<p>You just ran your pipeline on the PSMN!</p>
+</body>
+</html>
diff --git a/results/.gitignore b/results/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..72e8ffc0db8aad71a934dd11e5968bd5109e54b4
--- /dev/null
+++ b/results/.gitignore
@@ -0,0 +1 @@
+*
diff --git a/src/.conda_envs/.conda_envs_dir_test b/src/.conda_envs/.conda_envs_dir_test
new file mode 120000
index 0000000000000000000000000000000000000000..79b89f16062f6b1cfa27a8ed0cd7d1805593f5d8
--- /dev/null
+++ b/src/.conda_envs/.conda_envs_dir_test
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/.conda_envs_dir_test
\ No newline at end of file
diff --git a/src/.conda_envs/Python_2.7.13 b/src/.conda_envs/Python_2.7.13
new file mode 120000
index 0000000000000000000000000000000000000000..5c91793f9b75f93299586c455302e0e3e209384f
--- /dev/null
+++ b/src/.conda_envs/Python_2.7.13
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/Python_2.7.13
\ No newline at end of file
diff --git a/src/.conda_envs/Python_3.6.1 b/src/.conda_envs/Python_3.6.1
new file mode 120000
index 0000000000000000000000000000000000000000..7a6e74cff5631cc204ff1683f7966e57bd47fc80
--- /dev/null
+++ b/src/.conda_envs/Python_3.6.1
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/Python_3.6.1
\ No newline at end of file
diff --git a/src/.conda_envs/R_3.3.1 b/src/.conda_envs/R_3.3.1
new file mode 120000
index 0000000000000000000000000000000000000000..6544e0903216a580761735313b6504740ad9d3ef
--- /dev/null
+++ b/src/.conda_envs/R_3.3.1
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/R_3.3.1
\ No newline at end of file
diff --git a/src/.conda_envs/R_3.4.3 b/src/.conda_envs/R_3.4.3
new file mode 120000
index 0000000000000000000000000000000000000000..2f4558021803fab2cb8c969911b8fc9cda94d985
--- /dev/null
+++ b/src/.conda_envs/R_3.4.3
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/R_3.4.3
\ No newline at end of file
diff --git a/src/.conda_envs/axtchain_377 b/src/.conda_envs/axtchain_377
new file mode 120000
index 0000000000000000000000000000000000000000..845b1a9312cb20de7a8c177ff004354efd50250c
--- /dev/null
+++ b/src/.conda_envs/axtchain_377
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/axtchain_377
\ No newline at end of file
diff --git a/src/.conda_envs/bcftools_1.7 b/src/.conda_envs/bcftools_1.7
new file mode 120000
index 0000000000000000000000000000000000000000..c77cd622e68df50873142b109c36ef7a797a002e
--- /dev/null
+++ b/src/.conda_envs/bcftools_1.7
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/bcftools_1.7
\ No newline at end of file
diff --git a/src/.conda_envs/bedtools_2.25.0 b/src/.conda_envs/bedtools_2.25.0
new file mode 120000
index 0000000000000000000000000000000000000000..da8bc7748ceb28af6aaf93153faef36e4235d78b
--- /dev/null
+++ b/src/.conda_envs/bedtools_2.25.0
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/bedtools_2.25.0
\ No newline at end of file
diff --git a/src/.conda_envs/bioawk_1.0 b/src/.conda_envs/bioawk_1.0
new file mode 120000
index 0000000000000000000000000000000000000000..1907adb042aa5c7667a0f4c68676b94ada4c0217
--- /dev/null
+++ b/src/.conda_envs/bioawk_1.0
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/bioawk_1.0
\ No newline at end of file
diff --git a/src/.conda_envs/biopython_1.74 b/src/.conda_envs/biopython_1.74
new file mode 120000
index 0000000000000000000000000000000000000000..a32165cab5f8aa5cee3f6324e9c2ceacd86e7387
--- /dev/null
+++ b/src/.conda_envs/biopython_1.74
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/biopython_1.74
\ No newline at end of file
diff --git a/src/.conda_envs/bowtie2_2.3.2 b/src/.conda_envs/bowtie2_2.3.2
new file mode 120000
index 0000000000000000000000000000000000000000..6f58283d4e80af7e5168f906e070b7c75c8d70bf
--- /dev/null
+++ b/src/.conda_envs/bowtie2_2.3.2
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/bowtie2_2.3.2
\ No newline at end of file
diff --git a/src/.conda_envs/bowtie2_2.3.4.1 b/src/.conda_envs/bowtie2_2.3.4.1
new file mode 120000
index 0000000000000000000000000000000000000000..ee79b3966ee7a73c0b51ebe9a1a2808c656dd440
--- /dev/null
+++ b/src/.conda_envs/bowtie2_2.3.4.1
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/bowtie2_2.3.4.1
\ No newline at end of file
diff --git a/src/.conda_envs/canu_1.7 b/src/.conda_envs/canu_1.7
new file mode 120000
index 0000000000000000000000000000000000000000..3b3b783c64fc065862d45ea73a29d605cc667895
--- /dev/null
+++ b/src/.conda_envs/canu_1.7
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/canu_1.7
\ No newline at end of file
diff --git a/src/.conda_envs/cdhit_4.6.8 b/src/.conda_envs/cdhit_4.6.8
new file mode 120000
index 0000000000000000000000000000000000000000..0250dfbb10bcccaa03973e9c01413c723da4b817
--- /dev/null
+++ b/src/.conda_envs/cdhit_4.6.8
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/cdhit_4.6.8
\ No newline at end of file
diff --git a/src/.conda_envs/cutadapt_1.14 b/src/.conda_envs/cutadapt_1.14
new file mode 120000
index 0000000000000000000000000000000000000000..33552e9c4735d6cfa2475b9fe29863cf1cba350d
--- /dev/null
+++ b/src/.conda_envs/cutadapt_1.14
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/cutadapt_1.14
\ No newline at end of file
diff --git a/src/.conda_envs/cutadapt_2.4 b/src/.conda_envs/cutadapt_2.4
new file mode 120000
index 0000000000000000000000000000000000000000..c2ab68ca481d52b8ace959546e87c07ef9b197cc
--- /dev/null
+++ b/src/.conda_envs/cutadapt_2.4
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/cutadapt_2.4
\ No newline at end of file
diff --git a/src/.conda_envs/deeptools_3.0.2 b/src/.conda_envs/deeptools_3.0.2
new file mode 120000
index 0000000000000000000000000000000000000000..514f9cd80f15a903e5af0f370eaea45c3d38ab3c
--- /dev/null
+++ b/src/.conda_envs/deeptools_3.0.2
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/deeptools_3.0.2
\ No newline at end of file
diff --git a/src/.conda_envs/envs b/src/.conda_envs/envs
new file mode 120000
index 0000000000000000000000000000000000000000..cf88bffb3ae0500e1b2341fd4ee37bc68cbccfbd
--- /dev/null
+++ b/src/.conda_envs/envs
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/
\ No newline at end of file
diff --git a/src/.conda_envs/fastp_0.19.7 b/src/.conda_envs/fastp_0.19.7
new file mode 120000
index 0000000000000000000000000000000000000000..325a0da0ef6d62c88eba7a4477d6df84525eeee3
--- /dev/null
+++ b/src/.conda_envs/fastp_0.19.7
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/fastp_0.19.7
\ No newline at end of file
diff --git a/src/.conda_envs/fastqc_0.11.5 b/src/.conda_envs/fastqc_0.11.5
new file mode 120000
index 0000000000000000000000000000000000000000..73298f6fc3616a6de5777b317b93e8b628986b26
--- /dev/null
+++ b/src/.conda_envs/fastqc_0.11.5
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/fastqc_0.11.5
\ No newline at end of file
diff --git a/src/.conda_envs/flexi-splitter_1.0.0 b/src/.conda_envs/flexi-splitter_1.0.0
new file mode 120000
index 0000000000000000000000000000000000000000..48e582352dbbb002a4c2cbe4b12218d26eb5c671
--- /dev/null
+++ b/src/.conda_envs/flexi-splitter_1.0.0
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/flexi-splitter_1.0.0
\ No newline at end of file
diff --git a/src/.conda_envs/gatk_3.8 b/src/.conda_envs/gatk_3.8
new file mode 120000
index 0000000000000000000000000000000000000000..63462f4446854d83b270fb509620e081c1cdc00e
--- /dev/null
+++ b/src/.conda_envs/gatk_3.8
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/gatk_3.8
\ No newline at end of file
diff --git a/src/.conda_envs/hisat2_2.0.0 b/src/.conda_envs/hisat2_2.0.0
new file mode 120000
index 0000000000000000000000000000000000000000..31d4300201094daba0c2be4ed9b9bffbc3ff8569
--- /dev/null
+++ b/src/.conda_envs/hisat2_2.0.0
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/hisat2_2.0.0
\ No newline at end of file
diff --git a/src/.conda_envs/hisat2_2.1.0 b/src/.conda_envs/hisat2_2.1.0
new file mode 120000
index 0000000000000000000000000000000000000000..2a26ea5b4a994a456e3ba7228a502b30a0e4a914
--- /dev/null
+++ b/src/.conda_envs/hisat2_2.1.0
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/hisat2_2.1.0
\ No newline at end of file
diff --git a/src/.conda_envs/htseq_0.11.2 b/src/.conda_envs/htseq_0.11.2
new file mode 120000
index 0000000000000000000000000000000000000000..512c5ab495a12bb9b5503de7d9371773f9cd5858
--- /dev/null
+++ b/src/.conda_envs/htseq_0.11.2
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/htseq_0.11.2
\ No newline at end of file
diff --git a/src/.conda_envs/htseq_0.9.1 b/src/.conda_envs/htseq_0.9.1
new file mode 120000
index 0000000000000000000000000000000000000000..a0b11c0b6ed34bfe3e67105abb9361d7671b2cef
--- /dev/null
+++ b/src/.conda_envs/htseq_0.9.1
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/htseq_0.9.1
\ No newline at end of file
diff --git a/src/.conda_envs/kallisto_0.43.1 b/src/.conda_envs/kallisto_0.43.1
new file mode 120000
index 0000000000000000000000000000000000000000..f97de91a46e446fd1d2c7c0cdc76d94a666aec3d
--- /dev/null
+++ b/src/.conda_envs/kallisto_0.43.1
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/kallisto_0.43.1
\ No newline at end of file
diff --git a/src/.conda_envs/kallisto_0.44.0 b/src/.conda_envs/kallisto_0.44.0
new file mode 120000
index 0000000000000000000000000000000000000000..63b3d7d5411d998edae2703da5d7d131131a3a09
--- /dev/null
+++ b/src/.conda_envs/kallisto_0.44.0
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/kallisto_0.44.0
\ No newline at end of file
diff --git a/src/.conda_envs/liftover_357 b/src/.conda_envs/liftover_357
new file mode 120000
index 0000000000000000000000000000000000000000..ba72fec91f4d2f96683839a0ccea6b50f561ebc2
--- /dev/null
+++ b/src/.conda_envs/liftover_357
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/liftover_357
\ No newline at end of file
diff --git a/src/.conda_envs/macs2_2.1.2 b/src/.conda_envs/macs2_2.1.2
new file mode 120000
index 0000000000000000000000000000000000000000..d14a7fd7801656eb78fa2b73da9ec33e9806c8b7
--- /dev/null
+++ b/src/.conda_envs/macs2_2.1.2
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/macs2_2.1.2
\ No newline at end of file
diff --git a/src/.conda_envs/multiqc_0.9 b/src/.conda_envs/multiqc_0.9
new file mode 120000
index 0000000000000000000000000000000000000000..57372d0c5426850192bb9ec31deb8cea849f3d4f
--- /dev/null
+++ b/src/.conda_envs/multiqc_0.9
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/multiqc_0.9
\ No newline at end of file
diff --git a/src/.conda_envs/multiqc_1.0 b/src/.conda_envs/multiqc_1.0
new file mode 120000
index 0000000000000000000000000000000000000000..db90e68674b716ce181518d16577d3a7b05b7236
--- /dev/null
+++ b/src/.conda_envs/multiqc_1.0
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/multiqc_1.0
\ No newline at end of file
diff --git a/src/.conda_envs/multiqc_1.7 b/src/.conda_envs/multiqc_1.7
new file mode 120000
index 0000000000000000000000000000000000000000..46bbaff86bf20a3737a2473ece096b40df8ca2c6
--- /dev/null
+++ b/src/.conda_envs/multiqc_1.7
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/multiqc_1.7
\ No newline at end of file
diff --git a/src/.conda_envs/music_1.0.0 b/src/.conda_envs/music_1.0.0
new file mode 120000
index 0000000000000000000000000000000000000000..c451a47d3d114e0314fee9a8b0e0260b1fbe6ef2
--- /dev/null
+++ b/src/.conda_envs/music_1.0.0
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/music_1.0.0
\ No newline at end of file
diff --git a/src/.conda_envs/nextflow_0.25.1 b/src/.conda_envs/nextflow_0.25.1
new file mode 120000
index 0000000000000000000000000000000000000000..b419d0badce280a3fa26c02c7bc7d3fb8879d22d
--- /dev/null
+++ b/src/.conda_envs/nextflow_0.25.1
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/nextflow_0.25.1
\ No newline at end of file
diff --git a/src/.conda_envs/nextflow_0.28.2 b/src/.conda_envs/nextflow_0.28.2
new file mode 120000
index 0000000000000000000000000000000000000000..d6f5f3dcc36db582dfb4f9453e5289a1fe9b7aeb
--- /dev/null
+++ b/src/.conda_envs/nextflow_0.28.2
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/nextflow_0.28.2
\ No newline at end of file
diff --git a/src/.conda_envs/nextflow_0.32.0 b/src/.conda_envs/nextflow_0.32.0
new file mode 120000
index 0000000000000000000000000000000000000000..439a15215267bb1ffff044846cf6ff8b38a89499
--- /dev/null
+++ b/src/.conda_envs/nextflow_0.32.0
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/nextflow_0.32.0
\ No newline at end of file
diff --git a/src/.conda_envs/nextflow_19.01.0 b/src/.conda_envs/nextflow_19.01.0
new file mode 120000
index 0000000000000000000000000000000000000000..18a20221ccfbaf8e6cf869f1e009b710ac29e6ab
--- /dev/null
+++ b/src/.conda_envs/nextflow_19.01.0
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/nextflow_19.01.0
\ No newline at end of file
diff --git a/src/.conda_envs/picard_2.18.11 b/src/.conda_envs/picard_2.18.11
new file mode 120000
index 0000000000000000000000000000000000000000..cbf205fe4c8c13d94b55b6cf3fc0437e25577891
--- /dev/null
+++ b/src/.conda_envs/picard_2.18.11
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/picard_2.18.11
\ No newline at end of file
diff --git a/src/.conda_envs/pigz_2.3.4 b/src/.conda_envs/pigz_2.3.4
new file mode 120000
index 0000000000000000000000000000000000000000..33455f842e2168c2076dd89f26baab97deb0a9f6
--- /dev/null
+++ b/src/.conda_envs/pigz_2.3.4
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/pigz_2.3.4
\ No newline at end of file
diff --git a/src/.conda_envs/prinseq_0.20.4 b/src/.conda_envs/prinseq_0.20.4
new file mode 120000
index 0000000000000000000000000000000000000000..dca206100190bef5b2c300cda8fcc1852c15cffd
--- /dev/null
+++ b/src/.conda_envs/prinseq_0.20.4
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/prinseq_0.20.4
\ No newline at end of file
diff --git a/src/.conda_envs/ribotish_0.2.4 b/src/.conda_envs/ribotish_0.2.4
new file mode 120000
index 0000000000000000000000000000000000000000..173ef74842b0f029f772b7bbea49e102bafbbfdd
--- /dev/null
+++ b/src/.conda_envs/ribotish_0.2.4
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/ribotish_0.2.4
\ No newline at end of file
diff --git a/src/.conda_envs/rsem_1.3.0 b/src/.conda_envs/rsem_1.3.0
new file mode 120000
index 0000000000000000000000000000000000000000..cea23372fb11c6a72ca65e7a0fc66ceeedae6102
--- /dev/null
+++ b/src/.conda_envs/rsem_1.3.0
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/rsem_1.3.0
\ No newline at end of file
diff --git a/src/.conda_envs/rsem_1.3.1 b/src/.conda_envs/rsem_1.3.1
new file mode 120000
index 0000000000000000000000000000000000000000..325fb2388538b17cebb896f3abd97c171a4dc742
--- /dev/null
+++ b/src/.conda_envs/rsem_1.3.1
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/rsem_1.3.1
\ No newline at end of file
diff --git a/src/.conda_envs/salmon_0.8.2 b/src/.conda_envs/salmon_0.8.2
new file mode 120000
index 0000000000000000000000000000000000000000..65b04e4bcc71ec13f66b472b344daa659eeffe68
--- /dev/null
+++ b/src/.conda_envs/salmon_0.8.2
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/salmon_0.8.2
\ No newline at end of file
diff --git a/src/.conda_envs/samblaster_0.1.24 b/src/.conda_envs/samblaster_0.1.24
new file mode 120000
index 0000000000000000000000000000000000000000..d00932670059582660783647bf4c3e4377142f68
--- /dev/null
+++ b/src/.conda_envs/samblaster_0.1.24
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/samblaster_0.1.24
\ No newline at end of file
diff --git a/src/.conda_envs/samtools_1.5 b/src/.conda_envs/samtools_1.5
new file mode 120000
index 0000000000000000000000000000000000000000..cab31c466a7df1cd827de5fdd090a53b52f79910
--- /dev/null
+++ b/src/.conda_envs/samtools_1.5
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/samtools_1.5
\ No newline at end of file
diff --git a/src/.conda_envs/samtools_1.7 b/src/.conda_envs/samtools_1.7
new file mode 120000
index 0000000000000000000000000000000000000000..29a7fb1e11bd2d24212085320cdd22cf19044e9a
--- /dev/null
+++ b/src/.conda_envs/samtools_1.7
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/samtools_1.7
\ No newline at end of file
diff --git a/src/.conda_envs/seaborn_0.9.0 b/src/.conda_envs/seaborn_0.9.0
new file mode 120000
index 0000000000000000000000000000000000000000..dedf18f456745051b6ab5a3b858b013e2c54e890
--- /dev/null
+++ b/src/.conda_envs/seaborn_0.9.0
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/seaborn_0.9.0
\ No newline at end of file
diff --git a/src/.conda_envs/splicing_lore_env b/src/.conda_envs/splicing_lore_env
new file mode 120000
index 0000000000000000000000000000000000000000..f5570a069c24c769372e28cf7282cb8b4662f215
--- /dev/null
+++ b/src/.conda_envs/splicing_lore_env
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/splicing_lore_env
\ No newline at end of file
diff --git a/src/.conda_envs/sra-tools_2.8.2 b/src/.conda_envs/sra-tools_2.8.2
new file mode 120000
index 0000000000000000000000000000000000000000..a01645bc705e22aa2ebac2b9e4194208d67a8ee6
--- /dev/null
+++ b/src/.conda_envs/sra-tools_2.8.2
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/sra-tools_2.8.2
\ No newline at end of file
diff --git a/src/.conda_envs/star_2.5.3a b/src/.conda_envs/star_2.5.3a
new file mode 120000
index 0000000000000000000000000000000000000000..6030edb0ec8b6cffd59e9b4fdcd0fa7e710f5d59
--- /dev/null
+++ b/src/.conda_envs/star_2.5.3a
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/star_2.5.3a
\ No newline at end of file
diff --git a/src/.conda_envs/star_2.7.0e b/src/.conda_envs/star_2.7.0e
new file mode 120000
index 0000000000000000000000000000000000000000..b8cafd08ff5bb9dc50b0095550681cbb36e842cb
--- /dev/null
+++ b/src/.conda_envs/star_2.7.0e
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/star_2.7.0e
\ No newline at end of file
diff --git a/src/.conda_envs/star_2.7.3a b/src/.conda_envs/star_2.7.3a
new file mode 120000
index 0000000000000000000000000000000000000000..c25c3a3c25f8f3029211084d267995564e8fd04b
--- /dev/null
+++ b/src/.conda_envs/star_2.7.3a
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/star_2.7.3a
\ No newline at end of file
diff --git a/src/.conda_envs/subread_1.6.4 b/src/.conda_envs/subread_1.6.4
new file mode 120000
index 0000000000000000000000000000000000000000..e7bba79df0c763176ec87a05b9989a440e30a813
--- /dev/null
+++ b/src/.conda_envs/subread_1.6.4
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/subread_1.6.4
\ No newline at end of file
diff --git a/src/.conda_envs/tophat_2.1.1 b/src/.conda_envs/tophat_2.1.1
new file mode 120000
index 0000000000000000000000000000000000000000..6bb53f91843bb3b19046e9984c7b12a537a08033
--- /dev/null
+++ b/src/.conda_envs/tophat_2.1.1
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/tophat_2.1.1
\ No newline at end of file
diff --git a/src/.conda_envs/trimmomatic_0.36 b/src/.conda_envs/trimmomatic_0.36
new file mode 120000
index 0000000000000000000000000000000000000000..77753a1c7991af4eff3bbdec4fa2fc2035740fa8
--- /dev/null
+++ b/src/.conda_envs/trimmomatic_0.36
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/trimmomatic_0.36
\ No newline at end of file
diff --git a/src/.conda_envs/trimmomatic_0.39 b/src/.conda_envs/trimmomatic_0.39
new file mode 120000
index 0000000000000000000000000000000000000000..81502468997ba00e66763289fc80877c37c17088
--- /dev/null
+++ b/src/.conda_envs/trimmomatic_0.39
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/trimmomatic_0.39
\ No newline at end of file
diff --git a/src/.conda_envs/ucsc_375 b/src/.conda_envs/ucsc_375
new file mode 120000
index 0000000000000000000000000000000000000000..406934cb95e7bccdffb15a8f9f2ccd9b93b42de7
--- /dev/null
+++ b/src/.conda_envs/ucsc_375
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/ucsc_375
\ No newline at end of file
diff --git a/src/.conda_envs/umitools_0.3.4 b/src/.conda_envs/umitools_0.3.4
new file mode 120000
index 0000000000000000000000000000000000000000..1fbf3fc65e7e2b16d4d5a8b6b6a4f86d862d487f
--- /dev/null
+++ b/src/.conda_envs/umitools_0.3.4
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/umitools_0.3.4
\ No newline at end of file
diff --git a/src/.conda_envs/urqt_d62c1f8 b/src/.conda_envs/urqt_d62c1f8
new file mode 120000
index 0000000000000000000000000000000000000000..bb665c60c65c4633ea01ecb6aac9e687af8e9571
--- /dev/null
+++ b/src/.conda_envs/urqt_d62c1f8
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/envs/urqt_d62c1f8
\ No newline at end of file
diff --git a/src/.conda_packages.sh b/src/.conda_packages.sh
new file mode 100644
index 0000000000000000000000000000000000000000..bbf88057d7bff8c04a22c339d78f85fa24c6e4f8
--- /dev/null
+++ b/src/.conda_packages.sh
@@ -0,0 +1,158 @@
+source src/.conda_psmn.sh
+CONDA_ENVS=src/.conda_envs/
+if [ ! -d ${CONDA_ENVS}pigz_2.3.4 ]; then
+  conda create --yes --name pigz_2.3.4 pigz=2.3.4
+fi
+if [ ! -d ${CONDA_ENVS}tophat_2.1.1 ]; then
+  conda create --yes --name tophat_2.1.1 tophat=2.1.1
+fi
+if [ ! -d ${CONDA_ENVS}hisat2_2.0.0 ]; then
+  conda create --yes --name hisat2_2.0.0 hisat2=2.0.0 samtools=1.7
+fi
+if [ ! -d ${CONDA_ENVS}hisat2_2.1.0 ]; then
+  conda create --yes --name hisat2_2.1.0 hisat2=2.1.0 samtools=1.7
+fi
+if [ ! -d ${CONDA_ENVS}rsem_1.3.1 ]; then
+  conda create --yes --name rsem_1.3.1 rsem=1.3.1 samtools=1.3
+fi
+if [ ! -d ${CONDA_ENVS}rsem_1.3.0 ]; then
+  conda create --yes --name rsem_1.3.0 rsem=1.3.0 samtools=1.3
+fi
+if [ ! -d ${CONDA_ENVS}samblaster_0.1.24 ]; then
+  conda create --yes --name samblaster_0.1.24 samblaster=0.1.24
+fi
+if [ ! -d ${CONDA_ENVS}nextflow_0.25.1 ]; then
+  conda create --yes --name nextflow_0.25.1 nextflow=0.25.1
+fi
+if [ ! -d ${CONDA_ENVS}nextflow_19.01.0 ]; then
+  conda create --yes --name nextflow_19.01.0 nextflow=19.01.0
+fi
+if [ ! -d ${CONDA_ENVS}nextflow_0.32.0 ]; then
+  conda create --yes --name nextflow_0.32.0 nextflow=0.32.0
+fi
+if [ ! -d ${CONDA_ENVS}nextflow_0.28.2 ]; then
+  conda create --yes --name nextflow_0.28.2 nextflow=0.28.2
+fi
+if [ ! -d ${CONDA_ENVS}samtools_1.7 ]; then
+  conda create --yes --name samtools_1.7 samtools=1.7
+fi
+if [ ! -d ${CONDA_ENVS}samtools_1.5 ]; then
+  conda create --yes --name samtools_1.5 samtools=1.5
+fi
+if [ ! -d ${CONDA_ENVS}bowtie2_2.3.2 ]; then
+  conda create --yes --name bowtie2_2.3.2 bowtie2=2.3.2 samtools=1.7
+fi
+if [ ! -d ${CONDA_ENVS}bowtie2_2.3.4.1 ]; then
+  conda create --yes --name bowtie2_2.3.4.1 bowtie2=2.3.4.1 samtools=1.7 #&& \
+fi
+if [ ! -d ${CONDA_ENVS}sra-tools_2.8.2 ]; then
+  conda create --yes --name sra-tools_2.8.2 sra-tools=2.8.2
+fi
+if [ ! -d ${CONDA_ENVS}trimmomatic_0.36 ]; then
+  conda create --yes --name trimmomatic_0.36 trimmomatic=0.36
+fi
+if [ ! -d ${CONDA_ENVS}trimmomatic_0.39 ]; then
+  conda create --yes --name trimmomatic_0.39 trimmomatic=0.39
+fi
+if [ ! -d ${CONDA_ENVS}Python_3.6.1 ]; then
+  conda create --yes --name Python_3.6.1 Python=3.6.1
+fi
+if [ ! -d ${CONDA_ENVS}Python_2.7.13 ]; then
+  conda create --yes --name Python_2.7.13 Python=2.7.13
+fi
+if [ ! -d ${CONDA_ENVS}kallisto_0.44.0 ]; then
+  conda create --yes --name kallisto_0.44.0 kallisto=0.44.0
+fi
+if [ ! -d ${CONDA_ENVS}kallisto_0.43.1 ]; then
+  conda create --yes --name kallisto_0.43.1 kallisto=0.43.1
+fi
+if [ ! -d ${CONDA_ENVS}music_1.0.0 ]; then
+  conda create --yes --name music_1.0.0 music=1.0.0
+fi
+if [ ! -d ${CONDA_ENVS}umitools_0.3.4 ]; then
+  conda create --yes --name umitools_0.3.4 umitools=0.3.4
+fi
+if [ ! -d ${CONDA_ENVS}fastp_0.19.7 ]; then
+  conda create --yes --name fastp_0.19.7 fastp=0.19.7
+fi
+if [ ! -d ${CONDA_ENVS}gatk_3.8 ]; then
+  conda create --yes --name gatk_3.8 gatk=3.8
+fi
+if [ ! -d ${CONDA_ENVS}cutadapt_1.14 ]; then
+  conda create --yes --name cutadapt_1.14 cutadapt=1.14
+fi
+if [ ! -d ${CONDA_ENVS}bioawk_1.0 ]; then
+  conda create --yes --name bioawk_1.0 bioawk=1.0
+fi
+if [ ! -d ${CONDA_ENVS}canu_1.7 ]; then
+  conda create --yes --name canu_1.7 canu=1.7
+fi
+if [ ! -d ${CONDA_ENVS}fastqc_0.11.5 ]; then
+  conda create --yes --name fastqc_0.11.5 fastqc=0.11.5
+fi
+if [ ! -d ${CONDA_ENVS}bedtools_2.25.0 ]; then
+  conda create --yes --name bedtools_2.25.0 bedtools=2.25.0
+fi
+if [ ! -d ${CONDA_ENVS}macs2_2.1.2 ]; then
+  conda create --yes --name macs2_2.1.2 macs2=2.1.2
+fi
+if [ ! -d ${CONDA_ENVS}bcftools_1.7 ]; then
+  conda create --yes --name bcftools_1.7 bcftools=1.7
+fi
+if [ ! -d ${CONDA_ENVS}salmon_0.8.2 ]; then
+  conda create --yes --name salmon_0.8.2 salmon=0.8.2
+fi
+if [ ! -d ${CONDA_ENVS}urqt_d62c1f8 ]; then
+  conda create --yes --name urqt_d62c1f8 urqt=d62c1f8
+fi
+if [ ! -d ${CONDA_ENVS}multiqc_0.9 ]; then
+  conda create --yes --name multiqc_0.9 multiqc=0.9
+fi
+if [ ! -d ${CONDA_ENVS}multiqc_1.7 ]; then
+  conda create --yes --name multiqc_1.7 multiqc=1.7
+fi
+if [ ! -d ${CONDA_ENVS}multiqc_1.0 ]; then
+  conda create --yes --name multiqc_1.0 multiqc=1.0
+fi
+if [ ! -d ${CONDA_ENVS}cdhit_4.6.8 ]; then
+  conda create --yes --name cdhit_4.6.8 cdhit=4.6.8
+fi
+if [ ! -d ${CONDA_ENVS}deeptools_3.0.2 ]; then
+  conda create --yes --name deeptools_3.0.2 deeptools=3.0.2
+fi
+if [ ! -d ${CONDA_ENVS}htseq_0.9.1 ]; then
+  conda create --yes --name htseq_0.9.1 htseq=0.9.1
+fi
+if [ ! -d ${CONDA_ENVS}htseq_0.11.2 ]; then
+  conda create --yes --name htseq_0.11.2 htseq=0.11.2
+fi
+if [ ! -d ${CONDA_ENVS}R_3.4.3 ]; then
+  conda create --yes --name R_3.4.3 R=3.4.3
+fi
+if [ ! -d ${CONDA_ENVS}R_3.3.1 ]; then
+  conda create --yes --name R_3.3.1 R=3.3.1
+fi
+if [ ! -d ${CONDA_ENVS}file_handle_0.1.1 ]; then
+  conda create --yes --name file_handle_0.1.1 file_handle=0.1.1
+fi
+if [ ! -d ${CONDA_ENVS}ncdu_1.13 ]; then
+  conda create --yes --name ncdu_1.13 ncdu=1.13
+fi
+if [ ! -d ${CONDA_ENVS}picard_2.18.11 ]; then
+  conda create --yes --name picard_2.18.11 picard=2.18.11
+fi
+if [ ! -d ${CONDA_ENVS}sambamba_0.6.7 ]; then
+  conda create --yes --name sambamba_0.6.7 sambamba=0.6.7
+fi
+if [ ! -d ${CONDA_ENVS}star_2.7.3a ]; then
+  conda create --yes --name star_2.7.3a star=2.7.3a
+fi
+if [ ! -d ${CONDA_ENVS}liftover_357 ]; then
+  conda create --yes --name liftover_357 ucsc-liftover==357
+fi
+if [ ! -d ${CONDA_ENVS}axtchain_377 ]; then
+  conda create --yes --name axtchain_377 ucsc-axtchain==377
+fi
+if [ ! -d ${CONDA_ENVS}ribotish_0.2.4 ]; then
+  conda create --name ribotish_0.2.4 ribotish=0.2.4
+fi
diff --git a/src/.conda_psmn.sh b/src/.conda_psmn.sh
new file mode 120000
index 0000000000000000000000000000000000000000..cbb3d9b56bffb23950376e4f50dea1a7a88c80c8
--- /dev/null
+++ b/src/.conda_psmn.sh
@@ -0,0 +1 @@
+/Xnfs/lbmcdb/common/conda/init.sh
\ No newline at end of file
diff --git a/src/.docker_modules/bcftools/1.7/Dockerfile b/src/.docker_modules/bcftools/1.7/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..b602f187b41a8d54429b06ca45822482292ed68a
--- /dev/null
+++ b/src/.docker_modules/bcftools/1.7/Dockerfile
@@ -0,0 +1,9 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV BCFTOOLS_VERSION=1.7
+ENV PACKAGES bcftools=${BCFTOOLS_VERSION}*
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
diff --git a/src/.docker_modules/bcftools/1.7/docker_init.sh b/src/.docker_modules/bcftools/1.7/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..c2bf925159aeb2708d742ff891ff96b5d40bf05a
--- /dev/null
+++ b/src/.docker_modules/bcftools/1.7/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/bcftools:1.7
+docker build src/.docker_modules/bcftools/1.7 -t 'lbmc/bcftools:1.7'
+docker push lbmc/bcftools:1.7
diff --git a/src/.docker_modules/bedtools/2.25.0/Dockerfile b/src/.docker_modules/bedtools/2.25.0/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..73be9c631be18e86b98ba3098275a269b4171650
--- /dev/null
+++ b/src/.docker_modules/bedtools/2.25.0/Dockerfile
@@ -0,0 +1,9 @@
+FROM ubuntu:16.04
+MAINTAINER Laurent Modolo
+
+ENV BEDTOOLS_VERSION=2.25.0
+ENV PACKAGES bedtools=${BEDTOOLS_VERSION}*
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
diff --git a/src/.docker_modules/bedtools/2.25.0/docker_init.sh b/src/.docker_modules/bedtools/2.25.0/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..e35c4d6aa13c4fd78c4797d68150471af81ef94a
--- /dev/null
+++ b/src/.docker_modules/bedtools/2.25.0/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/bedtools:2.25.0
+docker build src/.docker_modules/bedtools/2.25.0 -t 'lbmc/bedtools:2.25.0'
+docker push lbmc/bedtools:2.25.0
diff --git a/src/.docker_modules/bioawk/1.0/Dockerfile b/src/.docker_modules/bioawk/1.0/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..f7ca9803e60926ed90bce0abfe4cf7af90d72672
--- /dev/null
+++ b/src/.docker_modules/bioawk/1.0/Dockerfile
@@ -0,0 +1,21 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV BIOAWK_VERSION=1.0
+ENV PACKAGES git=1:2.17* \
+   build-essential=12.4* \
+   ca-certificates=20180409 \
+   zlib1g-dev=1:1.2.11* \
+   byacc
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN git clone https://github.com/lh3/bioawk.git && \
+  cd bioawk && \
+  git checkout tags/v${BIOAWK_VERSION} && \
+  make && \
+  cd .. && \
+  mv bioawk/bioawk /usr/bin/ && \
+  rm -Rf bioawk
diff --git a/src/.docker_modules/bioawk/1.0/docker_init.sh b/src/.docker_modules/bioawk/1.0/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..8e6d7444062e7368f074f0b80386060e0f0b1a07
--- /dev/null
+++ b/src/.docker_modules/bioawk/1.0/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/bioawk:1.0
+docker build src/.docker_modules/bioawk/1.0 -t 'lbmc/bioawk:1.0'
+docker push lbmc/bioawk:1.0
diff --git a/src/.docker_modules/bowtie/1.2.2/Dockerfile b/src/.docker_modules/bowtie/1.2.2/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..128e68acde94e20c03130a0a3551978231f7a9cd
--- /dev/null
+++ b/src/.docker_modules/bowtie/1.2.2/Dockerfile
@@ -0,0 +1,12 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV BOWTIE_VERSION=1.2.2
+ENV SAMTOOLS_VERSION=1.7
+ENV PACKAGES bowtie=${BOWTIE_VERSION}* \
+    samtools=${SAMTOOLS_VERSION}*
+
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
diff --git a/src/.docker_modules/bowtie/1.2.2/docker_init.sh b/src/.docker_modules/bowtie/1.2.2/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..814a311d967eaf943fd1fa864c8c137ce724c812
--- /dev/null
+++ b/src/.docker_modules/bowtie/1.2.2/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/bowtie:1.2.2
+docker build src/.docker_modules/bowtie/1.2.2 -t 'lbmc/bowtie:1.2.2'
+docker push lbmc/bowtie:1.2.2
diff --git a/src/.docker_modules/bowtie2/2.3.4.1/Dockerfile b/src/.docker_modules/bowtie2/2.3.4.1/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..0f4ac75e48b5390714a22765c93939b72650b47e
--- /dev/null
+++ b/src/.docker_modules/bowtie2/2.3.4.1/Dockerfile
@@ -0,0 +1,12 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV BOWTIE2_VERSION=2.3.4.1
+ENV SAMTOOLS_VERSION=1.7
+ENV PACKAGES bowtie2=${BOWTIE2_VERSION}* \
+    samtools=${SAMTOOLS_VERSION}* \
+    perl=5.26.1*
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
diff --git a/src/.docker_modules/bowtie2/2.3.4.1/docker_init.sh b/src/.docker_modules/bowtie2/2.3.4.1/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..bdb93e1663ee77e81a65020a0f25a8df182f9245
--- /dev/null
+++ b/src/.docker_modules/bowtie2/2.3.4.1/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/bowtie2:2.3.4.1
+docker build src/.docker_modules/bowtie2/2.3.4.1 -t 'lbmc/bowtie2:2.3.4.1'
+docker push lbmc/bowtie2:2.3.4.1
diff --git a/src/.docker_modules/bwa/0.7.17/Dockerfile b/src/.docker_modules/bwa/0.7.17/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..654cf85e6c005f5be0fbae74fb5624848a367981
--- /dev/null
+++ b/src/.docker_modules/bwa/0.7.17/Dockerfile
@@ -0,0 +1,30 @@
+FROM lbmc/sambamba:0.6.9
+MAINTAINER Laurent Modolo
+
+ENV BWA_VERSION=0.7.17
+ENV SAMBLASTER_VERSION=0.1.24
+
+ENV PACKAGES curl=7.58* \
+    ca-certificates=20180409 \
+    build-essential=12.4* \
+    zlib1g-dev=1:1.2.11*
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN curl -k -L https://github.com/lh3/bwa/releases/download/v${BWA_VERSION}/bwa-${BWA_VERSION}.tar.bz2 -o bwa-v${BWA_VERSION}.tar.bz2 && \
+tar xjf bwa-v${BWA_VERSION}.tar.bz2 && \
+cd bwa-${BWA_VERSION}/ && \
+make && \
+cp bwa /usr/bin && \
+cd .. && \
+rm -R bwa-${BWA_VERSION}/
+
+RUN curl -k -L https://github.com/GregoryFaust/samblaster/releases/download/v.${SAMBLASTER_VERSION}/samblaster-v.${SAMBLASTER_VERSION}.tar.gz -o samblaster-v.${SAMBLASTER_VERSION}.tar.gz && \
+tar xvf samblaster-v.${SAMBLASTER_VERSION}.tar.gz && \
+cd samblaster-v.${SAMBLASTER_VERSION}/ && \
+make && \
+cp samblaster /usr/bin && \
+cd .. && \
+rm -R samblaster-v.${SAMBLASTER_VERSION}/
diff --git a/src/.docker_modules/bwa/0.7.17/docker_init.sh b/src/.docker_modules/bwa/0.7.17/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..3cabcbd9adcfdc35d6746a3f56534170a2d1d63a
--- /dev/null
+++ b/src/.docker_modules/bwa/0.7.17/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/bwa:0.7.17
+docker build src/.docker_modules/bwa/0.7.17 -t 'lbmc/bwa:0.7.17'
+docker push lbmc/bwa:0.7.17
diff --git a/src/.docker_modules/canu/1.6/Dockerfile b/src/.docker_modules/canu/1.6/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..66a55ef7ef5a6cc4ed048ee1a2494afd9877aa43
--- /dev/null
+++ b/src/.docker_modules/canu/1.6/Dockerfile
@@ -0,0 +1,9 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV CANU_VERSION=1.6
+ENV PACKAGES canu=${CANU_VERSION}*
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
diff --git a/src/.docker_modules/canu/1.6/docker_init.sh b/src/.docker_modules/canu/1.6/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..b1afabb6dedba67dc9a9537ea570a9c5c62da28f
--- /dev/null
+++ b/src/.docker_modules/canu/1.6/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/canu:1.6
+docker build src/.docker_modules/canu/1.6 -t 'lbmc/canu:1.6'
+docker push lbmc/canu:1.6
diff --git a/src/.docker_modules/cutadapt/1.14/Dockerfile b/src/.docker_modules/cutadapt/1.14/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..35d23b7b235d3ad1e611af9287c92d9147f8e440
--- /dev/null
+++ b/src/.docker_modules/cutadapt/1.14/Dockerfile
@@ -0,0 +1,14 @@
+FROM alpine:3.8
+MAINTAINER Laurent Modolo
+
+ENV CUTADAPT_VERSION=1.14
+ENV PACKAGES bash \
+             python3 \
+             python3-dev \
+             musl-dev \
+             gcc
+
+RUN apk update && \
+    apk add ${PACKAGES}
+
+RUN pip3 install cutadapt==${CUTADAPT_VERSION}
diff --git a/src/.docker_modules/cutadapt/1.14/docker_init.sh b/src/.docker_modules/cutadapt/1.14/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..1ba18cb47af7cf8a8c9d4d0fee001f8e2d5747b1
--- /dev/null
+++ b/src/.docker_modules/cutadapt/1.14/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/cutadapt:1.14
+docker build src/.docker_modules/cutadapt/1.14 -t 'lbmc/cutadapt:1.14'
+docker push lbmc/cutadapt:1.14
diff --git a/src/.docker_modules/cutadapt/1.15/Dockerfile b/src/.docker_modules/cutadapt/1.15/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..96bbd20ded423656458885b2522348711a838e97
--- /dev/null
+++ b/src/.docker_modules/cutadapt/1.15/Dockerfile
@@ -0,0 +1,14 @@
+FROM alpine:3.8
+MAINTAINER Laurent Modolo
+
+ENV CUTADAPT_VERSION=1.15
+ENV PACKAGES bash \
+             python3 \
+             python3-dev \
+             musl-dev \
+             gcc
+
+RUN apk update && \
+    apk add ${PACKAGES}
+
+RUN pip3 install cutadapt==${CUTADAPT_VERSION}
diff --git a/src/.docker_modules/cutadapt/1.15/docker_init.sh b/src/.docker_modules/cutadapt/1.15/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..49303006414d8a1ab61bda8da49b850824dde551
--- /dev/null
+++ b/src/.docker_modules/cutadapt/1.15/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/cutadapt:1.15
+docker build src/.docker_modules/cutadapt/1.15 -t 'lbmc/cutadapt:1.15'
+docker push lbmc/cutadapt:1.15
diff --git a/src/.docker_modules/cutadapt/2.1/Dockerfile b/src/.docker_modules/cutadapt/2.1/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..cb394b2f6b4c6f1256b71e2d62520e714b411b99
--- /dev/null
+++ b/src/.docker_modules/cutadapt/2.1/Dockerfile
@@ -0,0 +1,17 @@
+FROM alpine:3.8
+MAINTAINER Laurent Modolo
+
+ENV CUTADAPT_VERSION=2.1
+ENV PACKAGES bash \
+             python3 \
+             python3-dev \
+             musl-dev \
+             gcc
+
+RUN apk update && \
+    apk add ${PACKAGES}
+
+RUN pip3 install --upgrade pip && \
+  pip3 install cutadapt==${CUTADAPT_VERSION}
+
+
diff --git a/src/.docker_modules/cutadapt/2.1/docker_init.sh b/src/.docker_modules/cutadapt/2.1/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..cda255f0f22841d3f9cdf61480a053d47c948071
--- /dev/null
+++ b/src/.docker_modules/cutadapt/2.1/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/cutadapt:2.1
+docker build src/.docker_modules/cutadapt/2.1 -t 'lbmc/cutadapt:2.1'
+docker push lbmc/cutadapt:2.1
diff --git a/src/.docker_modules/deeptools/3.0.2/Dockerfile b/src/.docker_modules/deeptools/3.0.2/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..1d42cc9362faca77200117770ccb13dba10a1fe0
--- /dev/null
+++ b/src/.docker_modules/deeptools/3.0.2/Dockerfile
@@ -0,0 +1,18 @@
+FROM debian:stretch
+MAINTAINER Laurent Modolo
+
+ENV DEEPTOOLS_VERSION=3.0.2
+ENV PACKAGES build-essential \
+             python3-pip \
+             python3-setuptools \
+             python3-dev \
+             python3-wheel \
+             zlib1g-dev \
+             procps \
+             libcurl4-gnutls-dev
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN pip3 install deeptools==${DEEPTOOLS_VERSION}
diff --git a/src/.docker_modules/deeptools/3.0.2/docker_init.sh b/src/.docker_modules/deeptools/3.0.2/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..33959edcd7627e94d34d890d875d6cbe0fced74f
--- /dev/null
+++ b/src/.docker_modules/deeptools/3.0.2/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/deeptools:3.0.2
+docker build src/.docker_modules/deeptools/3.0.2 -t 'lbmc/deeptools:3.0.2'
+docker push lbmc/deeptools:3.0.2
diff --git a/src/.docker_modules/deeptools/3.1.1/Dockerfile b/src/.docker_modules/deeptools/3.1.1/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..2e84a709cd0078765070d284e5119e236b251283
--- /dev/null
+++ b/src/.docker_modules/deeptools/3.1.1/Dockerfile
@@ -0,0 +1,19 @@
+FROM debian:stretch
+MAINTAINER Laurent Modolo
+
+ENV DEEPTOOLS_VERSION=3.1.1
+ENV PACKAGES build-essential \
+             python3-pip \
+             python3-setuptools \
+             python3-dev \
+             python3-wheel \
+             zlib1g-dev \
+             libcurl4-gnutls-dev \
+             procps
+
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN pip3 install deeptools==${DEEPTOOLS_VERSION}
diff --git a/src/.docker_modules/deeptools/3.1.1/docker_init.sh b/src/.docker_modules/deeptools/3.1.1/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..06e63a90199385965a012175fe3f448f75539ba4
--- /dev/null
+++ b/src/.docker_modules/deeptools/3.1.1/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/deeptools:3.1.1
+docker build src/.docker_modules/deeptools/3.1.1 -t 'lbmc/deeptools:3.1.1'
+docker push lbmc/deeptools:3.1.1
diff --git a/src/.docker_modules/docker_push.sh b/src/.docker_modules/docker_push.sh
new file mode 100755
index 0000000000000000000000000000000000000000..32e0abc1fac52ceb410b03200b80eea96e9b181a
--- /dev/null
+++ b/src/.docker_modules/docker_push.sh
@@ -0,0 +1,2 @@
+#!/bin/sh
+fd "Dockerfile" src/.docker_modules | perl -pe 's|.*docker_modules/(.*)/(.*)/Dockerfile|\1:\2|g' | awk '{system("docker push lbmc/"$0)}'
diff --git a/src/.docker_modules/docker_update.sh b/src/.docker_modules/docker_update.sh
new file mode 100644
index 0000000000000000000000000000000000000000..39a729bf3b000647df396663343ac7ef64c30f63
--- /dev/null
+++ b/src/.docker_modules/docker_update.sh
@@ -0,0 +1,2 @@
+#!/bin/sh
+fd "Dockerfile" src/.docke_modules | perl -pe 's|.*docker_modules/(.*)/(.*)/Dockerfile|\1:\2|g' | awk '{system("docker tag "$0" lbmc/" $0)}'
diff --git a/src/.docker_modules/fastp/0.19.7/Dockerfile b/src/.docker_modules/fastp/0.19.7/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..234995a7b43807c45e673f6fc64677123cdfd67f
--- /dev/null
+++ b/src/.docker_modules/fastp/0.19.7/Dockerfile
@@ -0,0 +1,2 @@
+FROM quay.io/biocontainers/fastp:0.19.7--hdbcaa40_0
+MAINTAINER Laurent Modolo
diff --git a/src/.docker_modules/fastp/0.19.7/docker_init.sh b/src/.docker_modules/fastp/0.19.7/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..1085915c2cfd5caf2599275d8ad50a909704d728
--- /dev/null
+++ b/src/.docker_modules/fastp/0.19.7/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/fastp:0.19.7
+docker build src/.docker_modules/fastp/0.19.7 -t 'lbmc/fastp:0.19.7'
+docker push lbmc/fastp:0.19.7
diff --git a/src/.docker_modules/fastqc/0.11.5/Dockerfile b/src/.docker_modules/fastqc/0.11.5/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..999edf6a3bb5ad548289a0de1740899802ed335d
--- /dev/null
+++ b/src/.docker_modules/fastqc/0.11.5/Dockerfile
@@ -0,0 +1,10 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV FASTQC_VERSION=0.11.5
+ENV PACKAGES fastqc=${FASTQC_VERSION}* \
+		perl=5.26*
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
diff --git a/src/.docker_modules/fastqc/0.11.5/docker_init.sh b/src/.docker_modules/fastqc/0.11.5/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..6b82ff40580dc34b3594278ef2f9c46d36f73560
--- /dev/null
+++ b/src/.docker_modules/fastqc/0.11.5/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/fastqc:0.11.5
+docker build src/.docker_modules/fastqc/0.11.5 -t 'lbmc/fastqc:0.11.5'
+docker push lbmc/fastqc:0.11.5
diff --git a/src/.docker_modules/file_handle/0.1.1/Dockerfile b/src/.docker_modules/file_handle/0.1.1/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..0557983d5d16c686c577b5eee0ee1308086760fb
--- /dev/null
+++ b/src/.docker_modules/file_handle/0.1.1/Dockerfile
@@ -0,0 +1,17 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV FILE_HANDLE_VERSION 0.1.1
+ENV PACKAGES git=1:2.17.0* \
+    ca-certificates=20180409
+
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN git clone https://github.com/LBMC/file_handle.git && \
+    cd file_handle && \
+    git checkout tags/v${FILE_HANDLE_VERSION} && \
+    cp src/file_handle.py /usr/bin/file_handle.py && \
+    chmod +x /usr/bin/file_handle.py
diff --git a/src/.docker_modules/file_handle/0.1.1/docker_init.sh b/src/.docker_modules/file_handle/0.1.1/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..0f1cf512532dc8d72490b9ebb174d90418f4c640
--- /dev/null
+++ b/src/.docker_modules/file_handle/0.1.1/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/file_handle:0.1.1
+docker build src/.docker_modules/file_handle/0.1.1 -t 'lbmc/file_handle:0.1.1'
+docker push lbmc/file_handle:0.1.1
diff --git a/src/.docker_modules/gatk/4.0.8.1/Dockerfile b/src/.docker_modules/gatk/4.0.8.1/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..aceded338897254a31dca826413b63cffd933532
--- /dev/null
+++ b/src/.docker_modules/gatk/4.0.8.1/Dockerfile
@@ -0,0 +1,6 @@
+FROM broadinstitute/gatk:4.0.8.1
+MAINTAINER Laurent Modolo
+
+ENV GATK_VERSION=4.0.8.1
+
+RUN cp gatk /usr/bin/
diff --git a/src/.docker_modules/gatk/4.0.8.1/docker_init.sh b/src/.docker_modules/gatk/4.0.8.1/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..ddfd8ee0205fa9e9af20878ec561821fc4173057
--- /dev/null
+++ b/src/.docker_modules/gatk/4.0.8.1/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/gatk:4.0.8.1
+docker build src/.docker_modules/gatk/4.0.8.1 -t 'lbmc/gatk:4.0.8.1'
+docker push lbmc/gatk:4.0.8.1
diff --git a/src/.docker_modules/hisat2/2.0.0/Dockerfile b/src/.docker_modules/hisat2/2.0.0/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..6da58508779ea3adfa22c26f672040e8f588b85c
--- /dev/null
+++ b/src/.docker_modules/hisat2/2.0.0/Dockerfile
@@ -0,0 +1,20 @@
+FROM lbmc/samtools:1.7
+MAINTAINER Nicolas Fontrodona
+
+ENV HISAT2_VERSION=2.0.0
+ENV PACKAGES curl \
+             zip \
+             g++ \
+             perl \
+             python
+
+RUN apk update && \
+    apk add ${PACKAGES}
+
+RUN curl -k -L http://ccb.jhu.edu/software/hisat2/downloads/hisat2-${HISAT2_VERSION}-beta-source.zip -o hisat2_linux-v${HISAT2_VERSION}.zip && \
+unzip hisat2_linux-v${HISAT2_VERSION}.zip && \
+cd hisat2-${HISAT2_VERSION}-beta && \
+make && \
+cp hisat2 /usr/bin && \
+cp hisat2-* /usr/bin && \
+rm -Rf hisat2-${HISAT2_VERSION}-beta
diff --git a/src/.docker_modules/hisat2/2.0.0/docker_init.sh b/src/.docker_modules/hisat2/2.0.0/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..8bfb16363342039e3fff7057259a8e835c2a8c6d
--- /dev/null
+++ b/src/.docker_modules/hisat2/2.0.0/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/hisat2:2.0.0
+docker build src/.docker_modules/hisat2/2.0.0 -t 'lbmc/hisat2:2.0.0'
+docker push lbmc/hisat2:2.0.0
diff --git a/src/.docker_modules/hisat2/2.1.0/Dockerfile b/src/.docker_modules/hisat2/2.1.0/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..0fc7dacd5984006ac3d0dfebe00dfc42ed050cbd
--- /dev/null
+++ b/src/.docker_modules/hisat2/2.1.0/Dockerfile
@@ -0,0 +1,20 @@
+FROM lbmc/samtools:1.7
+MAINTAINER Nicolas Fontrodona
+
+ENV HISAT2_VERSION=2.1.0
+ENV PACKAGES curl \
+             zip \
+             g++ \
+             perl \
+             python
+
+RUN apk update && \
+    apk add ${PACKAGES}
+
+RUN curl -k -L http://ccb.jhu.edu/software/hisat2/dl/hisat2-${HISAT2_VERSION}-source.zip -o hisat2_linux-v${HISAT2_VERSION}.zip && \
+unzip hisat2_linux-v${HISAT2_VERSION}.zip && \
+cd hisat2-${HISAT2_VERSION} && \
+make && \
+cp hisat2 /usr/bin && \
+cp hisat2-* /usr/bin && \
+rm -Rf hisat2-${HISAT2_VERSION}
diff --git a/src/.docker_modules/hisat2/2.1.0/docker_init.sh b/src/.docker_modules/hisat2/2.1.0/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..55fb191ab23cbe7615f70ba5488a227b0b69580a
--- /dev/null
+++ b/src/.docker_modules/hisat2/2.1.0/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/hisat2:2.1.0
+docker build src/.docker_modules/hisat2/2.1.0 -t 'lbmc/hisat2:2.1.0'
+docker push lbmc/hisat2:2.1.0
diff --git a/src/.docker_modules/htseq/0.11.2/Dockerfile b/src/.docker_modules/htseq/0.11.2/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..d9db9b1531282ad6bf91057c76a686c9cc707610
--- /dev/null
+++ b/src/.docker_modules/htseq/0.11.2/Dockerfile
@@ -0,0 +1,18 @@
+
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV HTSEQ_VERSION=0.11.2
+ENV PACKAGES build-essential \
+             python3-pip \
+             python3-setuptools \
+             python3-dev \
+             python3-wheel
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN pip3 install numpy==1.14.3
+RUN pip3 install pysam==0.15.0
+RUN pip3 install HTSeq==${HTSEQ_VERSION}
diff --git a/src/.docker_modules/htseq/0.11.2/docker_init.sh b/src/.docker_modules/htseq/0.11.2/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..9bd50893380dc1fb19736559551389934802eade
--- /dev/null
+++ b/src/.docker_modules/htseq/0.11.2/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/htseq:0.11.2
+docker build src/.docker_modules/htseq/0.11.2 -t 'lbmc/htseq:0.11.2'
+docker push lbmc/htseq:0.11.2
diff --git a/src/.docker_modules/htseq/0.8.0/Dockerfile b/src/.docker_modules/htseq/0.8.0/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..492d91a697b945a3acd3aa3bd0ebb09590fb3de5
--- /dev/null
+++ b/src/.docker_modules/htseq/0.8.0/Dockerfile
@@ -0,0 +1,18 @@
+
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV HTSEQ_VERSION=0.8.0
+ENV PACKAGES build-essential \
+             python3-pip \
+             python3-setuptools \
+             python3-dev \
+             python3-wheel
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN pip3 install numpy==1.14.3
+RUN pip3 install pysam==0.15.0
+RUN pip3 install HTSeq==${HTSEQ_VERSION}
diff --git a/src/.docker_modules/htseq/0.8.0/docker_init.sh b/src/.docker_modules/htseq/0.8.0/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..e322517cf457f8a8a9041da975a7851caf2ab4ef
--- /dev/null
+++ b/src/.docker_modules/htseq/0.8.0/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/htseq:0.8.0
+docker build src/.docker_modules/htseq/0.8.0 -t 'lbmc/htseq:0.8.0'
+docker push lbmc/htseq:0.8.0
diff --git a/src/.docker_modules/kallisto/0.43.1/Dockerfile b/src/.docker_modules/kallisto/0.43.1/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..68a731a7fad98ed9c423909797d259baf2ea66eb
--- /dev/null
+++ b/src/.docker_modules/kallisto/0.43.1/Dockerfile
@@ -0,0 +1,15 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV KALLISTO_VERSION=0.43.1
+ENV PACKAGES curl=7.58.0* \
+   ca-certificates=20180409
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN curl -k -L  https://github.com/pachterlab/kallisto/releases/download/v${KALLISTO_VERSION}/kallisto_linux-v${KALLISTO_VERSION}.tar.gz -o kallisto_linux-v${KALLISTO_VERSION}.tar.gz && \
+tar xzf kallisto_linux-v${KALLISTO_VERSION}.tar.gz && \
+cp kallisto_linux-v${KALLISTO_VERSION}/kallisto /usr/bin && \
+rm -Rf kallisto_linux-v${KALLISTO_VERSION}*
diff --git a/src/.docker_modules/kallisto/0.43.1/docker_init.sh b/src/.docker_modules/kallisto/0.43.1/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..b93c004d24d291b3c92bab0ca7d9ae7c7131cf7a
--- /dev/null
+++ b/src/.docker_modules/kallisto/0.43.1/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/kallisto:0.43.1
+docker build src/.docker_modules/kallisto/0.43.1 -t 'lbmc/kallisto:0.43.1'
+docker push lbmc/kallisto:0.43.1
diff --git a/src/.docker_modules/kallisto/0.44.0/Dockerfile b/src/.docker_modules/kallisto/0.44.0/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..9c4dd013e34bf76c4dcd408622eebccbe3e52396
--- /dev/null
+++ b/src/.docker_modules/kallisto/0.44.0/Dockerfile
@@ -0,0 +1,15 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV KALLISTO_VERSION=0.44.0
+ENV PACKAGES curl=7.58.0* \
+   ca-certificates=20180409
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN curl -k -L  https://github.com/pachterlab/kallisto/releases/download/v${KALLISTO_VERSION}/kallisto_linux-v${KALLISTO_VERSION}.tar.gz -o kallisto_linux-v${KALLISTO_VERSION}.tar.gz && \
+tar xzf kallisto_linux-v${KALLISTO_VERSION}.tar.gz && \
+cp kallisto_linux-v${KALLISTO_VERSION}/kallisto /usr/bin && \
+rm -Rf kallisto_linux-v${KALLISTO_VERSION}*
diff --git a/src/.docker_modules/kallisto/0.44.0/docker_init.sh b/src/.docker_modules/kallisto/0.44.0/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..4fa79008a07f4e9a4afe6c8bb20fb8ca60b98858
--- /dev/null
+++ b/src/.docker_modules/kallisto/0.44.0/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/kallisto:0.44.0
+docker build src/.docker_modules/kallisto/0.44.0 -t 'lbmc/kallisto:0.44.0'
+docker push lbmc/kallisto:0.44.0
diff --git a/src/.docker_modules/liftover/357/Dockerfile b/src/.docker_modules/liftover/357/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..973a1dc03c9cc8c73209f13f1fecf84906b8be7b
--- /dev/null
+++ b/src/.docker_modules/liftover/357/Dockerfile
@@ -0,0 +1,2 @@
+FROM quay.io/biocontainers/ucsc-liftover:357--1
+MAINTAINER Laurent Modolo
diff --git a/src/.docker_modules/liftover/357/docker_init.sh b/src/.docker_modules/liftover/357/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..68bd90585292fb30242e3d2cdd94e3538d277f6f
--- /dev/null
+++ b/src/.docker_modules/liftover/357/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/liftover:357
+docker build src/.docker_modules/liftover/357/ -t 'lbmc/liftover:357'
+docker push lbmc/liftover:357
diff --git a/src/.docker_modules/macs2/2.1.2/Dockerfile b/src/.docker_modules/macs2/2.1.2/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..71979a50044cd8ce9c52f1d413db269088d352dd
--- /dev/null
+++ b/src/.docker_modules/macs2/2.1.2/Dockerfile
@@ -0,0 +1,2 @@
+FROM quay.io/biocontainers/macs2:2.1.2--py27r351h14c3975_1
+MAINTAINER Laurent Modolo
diff --git a/src/.docker_modules/macs2/2.1.2/docker_init.sh b/src/.docker_modules/macs2/2.1.2/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..8dc7b2483a1aa91f1f637e26812469f861b68f0e
--- /dev/null
+++ b/src/.docker_modules/macs2/2.1.2/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/macs2:2.1.2
+docker build src/.docker_modules/macs2/2.1.2 -t 'lbmc/macs2:2.1.2'
+docker push lbmc/macs2:2.1.2
diff --git a/src/.docker_modules/multiqc/1.0/Dockerfile b/src/.docker_modules/multiqc/1.0/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..7acf600c99d9f3500bf015d4b79e3675ac5d7789
--- /dev/null
+++ b/src/.docker_modules/multiqc/1.0/Dockerfile
@@ -0,0 +1,25 @@
+FROM debian:stretch
+MAINTAINER Laurent Modolo
+
+ENV MULTIQC_VERSION=1.0
+ENV PACKAGES build-essential \
+             python3-pip \
+             python3-setuptools \
+             python3-dev \
+             python3-wheel \
+             procps \
+             locales
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN locale-gen en_US.UTF-8
+ENV LC_ALL=en_US.utf-8
+ENV LANG=en_US.utf-8
+ENV LC_ALL=C.UTF-8
+ENV LANG=C.UTF-8
+
+
+RUN pip3 install multiqc==${MULTIQC_VERSION}
+
diff --git a/src/.docker_modules/multiqc/1.0/docker_init.sh b/src/.docker_modules/multiqc/1.0/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..1b45ce3e7d6a58c98cf34f7614603dec9ad525fc
--- /dev/null
+++ b/src/.docker_modules/multiqc/1.0/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/multiqc:1.0
+docker build src/.docker_modules/multiqc/1.0 -t 'lbmc/multiqc:1.0'
+docker push lbmc/multiqc:1.0
diff --git a/src/.docker_modules/multiqc/1.7/Dockerfile b/src/.docker_modules/multiqc/1.7/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..ea71b1ad8227b9456863b7b00fc5d818db090288
--- /dev/null
+++ b/src/.docker_modules/multiqc/1.7/Dockerfile
@@ -0,0 +1,25 @@
+FROM debian:stretch
+MAINTAINER Laurent Modolo
+
+ENV MULTIQC_VERSION=1.7
+ENV PACKAGES build-essential \
+             python3-pip \
+             python3-setuptools \
+             python3-dev \
+             python3-wheel \
+             procps \
+             locales
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN locale-gen en_US.UTF-8
+ENV LC_ALL=en_US.utf-8
+ENV LANG=en_US.utf-8
+ENV LC_ALL=C.UTF-8
+ENV LANG=C.UTF-8
+
+
+RUN pip3 install multiqc==${MULTIQC_VERSION}
+
diff --git a/src/.docker_modules/multiqc/1.7/docker_init.sh b/src/.docker_modules/multiqc/1.7/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..e091f04a2752d2fcf2901f580ff8f001c8589df6
--- /dev/null
+++ b/src/.docker_modules/multiqc/1.7/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/multiqc:1.7
+docker build src/.docker_modules/multiqc/1.7 -t 'lbmc/multiqc:1.7'
+docker push lbmc/multiqc:1.7
diff --git a/src/.docker_modules/music/6613c53/Dockerfile b/src/.docker_modules/music/6613c53/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..aa47eb572ca48736457bb0f60b0857bbe6c5d9df
--- /dev/null
+++ b/src/.docker_modules/music/6613c53/Dockerfile
@@ -0,0 +1,38 @@
+FROM alpine:3.8
+MAINTAINER Laurent Modolo
+
+ENV MUSIC_VERSION=6613c53
+ENV SAMTOOLS_VERSION=1.7
+ENV PACKAGES g++ \
+bash \
+pcre-dev \
+openssl-dev \
+build-base \
+bzip2-dev \
+xz-dev \
+git \
+curl
+
+RUN apk update && \
+    apk add ${PACKAGES}
+RUN curl -L -o samtools-${SAMTOOLS_VERSION}.tar.bz2 \
+    http://jaist.dl.sourceforge.net/project/samtools/samtools/${SAMTOOLS_VERSION}/samtools-${SAMTOOLS_VERSION}.tar.bz2 \
+ && tar jxvf samtools-${SAMTOOLS_VERSION}.tar.bz2  \
+ && cd samtools-${SAMTOOLS_VERSION}/ \
+ && ./configure --without-curses \
+ && make \
+ && make install
+
+RUN git clone https://github.com/gersteinlab/MUSIC.git && \
+  cd MUSIC && \
+  git checkout ${MUSIC_VERSION} && \
+  make clean && \
+  make  && \
+  cd .. && \
+  mv MUSIC/bin/MUSIC /usr/bin/ && \
+  mv MUSIC/bin/generate_multimappability_signal.csh /usr/bin/ && \
+  mv MUSIC/bin/run_MUSIC.csh /usr/bin/ && \
+  rm -Rf MUSIC
+
+RUN chmod +x /usr/bin/*
+
diff --git a/src/.docker_modules/music/6613c53/docker_init.sh b/src/.docker_modules/music/6613c53/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..20e327a97a09ced0c55b16d6a780f35a09e1c881
--- /dev/null
+++ b/src/.docker_modules/music/6613c53/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/music:6613c53
+docker build src/.docker_modules/music/6613c53 -t 'lbmc/music:6613c53'
+docker push lbmc/music:6613c53
diff --git a/src/.docker_modules/picard/2.18.11/Dockerfile b/src/.docker_modules/picard/2.18.11/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..6a358e55bf36a4be4842335f47491ec8ac3a4ced
--- /dev/null
+++ b/src/.docker_modules/picard/2.18.11/Dockerfile
@@ -0,0 +1,19 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV PICARD_VERSION=2.18.11
+ENV PACKAGES default-jre=2:1.1* \
+    curl=7.58.0* \
+    ca-certificates=20180409
+
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN curl -k -L https://github.com/broadinstitute/picard/releases/download/${PICARD_VERSION}/picard.jar -o picard.jar && \
+  mkdir -p /usr/share/java/  && \
+  mv picard.jar /usr/share/java/
+
+COPY PicardCommandLine /usr/bin/
+RUN chmod +x /usr/bin/PicardCommandLine
diff --git a/src/.docker_modules/picard/2.18.11/PicardCommandLine b/src/.docker_modules/picard/2.18.11/PicardCommandLine
new file mode 100644
index 0000000000000000000000000000000000000000..ce067365785f2f03c722668eff8d80a94b9b34d3
--- /dev/null
+++ b/src/.docker_modules/picard/2.18.11/PicardCommandLine
@@ -0,0 +1,15 @@
+#!/bin/sh
+set -eu
+PRG="$(basename -- "$0")"
+case "$PRG" in
+picard-tools)
+        echo 1>&2 'Warning: picard-tools is deprecated and should be replaced by PicardCommandLine'
+        ;;
+PicardCommandLine)
+        ;;
+*)
+        set -- "$PRG" "$@"
+        ;;
+esac
+
+exec java ${JAVA_OPTIONS-} -jar /usr/share/java/picard.jar "$@"
diff --git a/src/.docker_modules/picard/2.18.11/docker_init.sh b/src/.docker_modules/picard/2.18.11/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..82c4cf7d3bdf581587fa7a3345cff9cb465158ae
--- /dev/null
+++ b/src/.docker_modules/picard/2.18.11/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/picard:2.18.11
+docker build src/.docker_modules/picard/2.18.11 -t 'lbmc/picard:2.18.11'
+docker push lbmc/picard:2.18.11
diff --git a/src/.docker_modules/pigz/2.4/Dockerfile b/src/.docker_modules/pigz/2.4/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..75f9e2fcfd9b5394b45fbfa4541861e6e4ab43e1
--- /dev/null
+++ b/src/.docker_modules/pigz/2.4/Dockerfile
@@ -0,0 +1,9 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV PIGZ_VERSION=2.4
+ENV PACKAGES pigz=${PIGZ_VERSION}*
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
diff --git a/src/.docker_modules/pigz/2.4/docker_init.sh b/src/.docker_modules/pigz/2.4/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..38d7347d72e9345ca69f54c9d8ea2ac3ec0ebbb8
--- /dev/null
+++ b/src/.docker_modules/pigz/2.4/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/pigz:2.4
+docker build src/.docker_modules/pigz/2.4 -t 'lbmc/pigz:2.4'
+docker push lbmc/pigz:2.4
diff --git a/src/.docker_modules/python/3.8/Dockerfile b/src/.docker_modules/python/3.8/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..20b4edd3aaa75374695afb4ca34c7be889e01933
--- /dev/null
+++ b/src/.docker_modules/python/3.8/Dockerfile
@@ -0,0 +1,2 @@
+FROM python:3.8.0a3-stretch
+MAINTAINER Remi Seraphin
diff --git a/src/.docker_modules/python/3.8/docker_init.sh b/src/.docker_modules/python/3.8/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..9a1c9b8b04f56586c6fadda16add7af4f66c3454
--- /dev/null
+++ b/src/.docker_modules/python/3.8/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/python:3.8
+docker build src/.docker_modules/python/3.8 -t 'lbmc/python:3.8'
+docker push lbmc/python:3.8
diff --git a/src/.docker_modules/r/3.5.3/Dockerfile b/src/.docker_modules/r/3.5.3/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..7c937de7a166d84c0bf75fe4a17fb4b162d61391
--- /dev/null
+++ b/src/.docker_modules/r/3.5.3/Dockerfile
@@ -0,0 +1,7 @@
+FROM rocker/r-ver:3.5.3
+MAINTAINER Remi Seraphin
+
+RUN apt update && \
+apt install libssl-dev libcurl4-openssl-dev libxml2-dev zlib1g-dev git procps
+RUN R -e "install.packages('tidyverse'); \
+          install.packages('ggplot2')"
diff --git a/src/.docker_modules/r/3.5.3/docker_init.sh b/src/.docker_modules/r/3.5.3/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..ce78559716b93f92c3c602cf242dd1d42a1e6220
--- /dev/null
+++ b/src/.docker_modules/r/3.5.3/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/r:3.5.3
+docker build src/.docker_modules/r/3.5.3 -t 'lbmc/r:3.5.3'
+docker push lbmc/r:3.5.3
diff --git a/src/.docker_modules/rsem/1.3.0/Dockerfile b/src/.docker_modules/rsem/1.3.0/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..337521c0496db33c93c20c8fd4d756efcba603a8
--- /dev/null
+++ b/src/.docker_modules/rsem/1.3.0/Dockerfile
@@ -0,0 +1,24 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV RSEM_VERSION=1.3.0
+ENV BOWTIE2_VERSION=2.3.4.1
+ENV SAMTOOLS_VERSION=1.7
+ENV PACKAGES git=1:2.17* \
+   build-essential=12.4* \
+   ca-certificates=20180409 \
+   zlib1g-dev=1:1.2.11* \
+   bowtie2=${BOWTIE2_VERSION}* \
+   samtools=${SAMTOOLS_VERSION}*
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN git clone https://github.com/deweylab/RSEM.git RSEM_${RSEM_VERSION} && \
+  cd RSEM_${RSEM_VERSION} && \
+  git checkout tags/v${RSEM_VERSION} && \
+  make && \
+  cd .. && \
+  mv RSEM_${RSEM_VERSION}/rsem-* RSEM_${RSEM_VERSION}/rsem_* RSEM_${RSEM_VERSION}/convert-* RSEM_${RSEM_VERSION}/extract-* /usr/bin/ && \
+  rm -Rf RSEM_${RSEM_VERSION}
diff --git a/src/.docker_modules/rsem/1.3.0/docker_init.sh b/src/.docker_modules/rsem/1.3.0/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..aadcb4d8ce01353c3510a6a649d640121865bf8d
--- /dev/null
+++ b/src/.docker_modules/rsem/1.3.0/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/rsem:1.3.0
+docker build src/.docker_modules/rsem/1.3.0 -t 'lbmc/rsem:1.3.0'
+docker push lbmc/rsem:1.3.0
diff --git a/src/.docker_modules/salmon/0.8.2/Dockerfile b/src/.docker_modules/salmon/0.8.2/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..31bb28ecce7ec5e5e70baf591ec418a1258bd2dd
--- /dev/null
+++ b/src/.docker_modules/salmon/0.8.2/Dockerfile
@@ -0,0 +1,15 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV SALMON_VERSION=0.8.2
+ENV PACKAGES curl=7.58.0* \
+   ca-certificates=20180409
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN curl -k -L  https://github.com/COMBINE-lab/salmon/releases/download/v${SALMON_VERSION}/Salmon-${SALMON_VERSION}_linux_x86_64.tar.gz -o Salmon-${SALMON_VERSION}_linux_x86_64.tar.gz && \
+tar xzf Salmon-${SALMON_VERSION}_linux_x86_64.tar.gz && \
+mv Salmon-${SALMON_VERSION}_linux_x86_64/bin/* /usr/bin/ && \
+rm -Rf Salmon-${SALMON_VERSION}*
diff --git a/src/.docker_modules/salmon/0.8.2/docker_init.sh b/src/.docker_modules/salmon/0.8.2/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..f44850b49c43ae852f1ef93b88a09f301169f780
--- /dev/null
+++ b/src/.docker_modules/salmon/0.8.2/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/salmon:0.8.2
+docker build src/.docker_modules/salmon/0.8.2 -t 'lbmc/salmon:0.8.2'
+docker push lbmc/salmon:0.8.2
diff --git a/src/.docker_modules/sambamba/0.6.7/Dockerfile b/src/.docker_modules/sambamba/0.6.7/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..5858a176917fc301e165ba18a6d69c34a1bf786a
--- /dev/null
+++ b/src/.docker_modules/sambamba/0.6.7/Dockerfile
@@ -0,0 +1,17 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV SAMBAMBA_VERSION=0.6.7
+ENV PACKAGES curl=7.58.0* \
+    ca-certificates=20180409 \
+    build-essential=12.4* \
+    zlib1g-dev=1:1.2.11*
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN curl -k -L https://github.com/biod/sambamba/releases/download/v${SAMBAMBA_VERSION}/sambamba_v${SAMBAMBA_VERSION}_linux.tar.bz2 -o sambamba_v${SAMBAMBA_VERSION}_linux.tar.bz2 && \
+tar xvjf sambamba_v${SAMBAMBA_VERSION}_linux.tar.bz2 && \
+mv sambamba /usr/bin/ && \
+rm -R sambamba_v${SAMBAMBA_VERSION}_linux*
diff --git a/src/.docker_modules/sambamba/0.6.7/docker_init.sh b/src/.docker_modules/sambamba/0.6.7/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..ccedf316633c21653bde1312e1ccd5376b95fafe
--- /dev/null
+++ b/src/.docker_modules/sambamba/0.6.7/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/sambamba:0.6.7
+docker build src/.docker_modules/sambamba/0.6.7 -t 'lbmc/sambamba:0.6.7'
+docker push lbmc/sambamba:0.6.7
diff --git a/src/.docker_modules/sambamba/0.6.9/Dockerfile b/src/.docker_modules/sambamba/0.6.9/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..f45a65412dba77df471e7cd8ee5c79bd2439f1d9
--- /dev/null
+++ b/src/.docker_modules/sambamba/0.6.9/Dockerfile
@@ -0,0 +1,16 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV SAMBAMBA_VERSION=0.6.9
+ENV PACKAGES curl=7.58.0* \
+    ca-certificates=20180409 \
+    build-essential=12.4* \
+    zlib1g-dev=1:1.2.11*
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN curl -k -L https://github.com/biod/sambamba/releases/download/v${SAMBAMBA_VERSION}/sambamba-${SAMBAMBA_VERSION}-linux-static.gz -o sambamba_v${SAMBAMBA_VERSION}_linux.gz && \
+gunzip sambamba_v${SAMBAMBA_VERSION}_linux.gz && \
+mv sambamba_v${SAMBAMBA_VERSION}_linux /usr/bin/sambamba
diff --git a/src/.docker_modules/sambamba/0.6.9/docker_init.sh b/src/.docker_modules/sambamba/0.6.9/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..9525b17e688d739198a1421f641c2281b45ade9a
--- /dev/null
+++ b/src/.docker_modules/sambamba/0.6.9/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/sambamba:0.6.9
+docker build src/.docker_modules/sambamba/0.6.9 -t 'lbmc/sambamba:0.6.9'
+docker push lbmc/sambamba:0.6.9
diff --git a/src/.docker_modules/samblaster/0.1.24/Dockerfile b/src/.docker_modules/samblaster/0.1.24/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..dcba024b879e8ef7720e35873d9e8101116891c6
--- /dev/null
+++ b/src/.docker_modules/samblaster/0.1.24/Dockerfile
@@ -0,0 +1,22 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV SAMBLASTER_VERSION=0.1.24
+ENV SAMTOOLS_VERSION=1.7
+ENV PACKAGES curl=7.58.0* \
+    samtools=${SAMTOOLS_VERSION}* \
+    ca-certificates=20180409 \
+    build-essential=12.4* \
+    zlib1g-dev=1:1.2.11*
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN curl -k -L https://github.com/GregoryFaust/samblaster/releases/download/v.${SAMBLASTER_VERSION}/samblaster-v.${SAMBLASTER_VERSION}.tar.gz -o samblaster-v.${SAMBLASTER_VERSION}.tar.gz && \
+tar xvf samblaster-v.${SAMBLASTER_VERSION}.tar.gz && \
+cd samblaster-v.${SAMBLASTER_VERSION}/ && \
+make && \
+cp samblaster /usr/bin && \
+cd .. && \
+rm -R samblaster-v.${SAMBLASTER_VERSION}/
diff --git a/src/.docker_modules/samblaster/0.1.24/docker_init.sh b/src/.docker_modules/samblaster/0.1.24/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..0fec5a0782d348935647212a430f9c1efe7d4367
--- /dev/null
+++ b/src/.docker_modules/samblaster/0.1.24/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/samblaster:0.1.24
+docker build src/.docker_modules/samblaster/0.1.24 -t 'lbmc/samblaster:0.1.24'
+docker push lbmc/samblaster:0.1.24
diff --git a/src/.docker_modules/samtools/1.7/Dockerfile b/src/.docker_modules/samtools/1.7/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..212101a98a9e01783e95d2db93c43d359d5f4d3a
--- /dev/null
+++ b/src/.docker_modules/samtools/1.7/Dockerfile
@@ -0,0 +1,26 @@
+FROM alpine:3.8
+MAINTAINER Laurent Modolo
+
+ENV SAMTOOLS_VERSION=1.7
+ENV PACKAGES git \
+             make \
+             gcc \
+             musl-dev \
+             zlib-dev \
+             ncurses-dev \
+             bzip2-dev \
+             xz-dev \
+             bash
+
+RUN apk update && \
+    apk add ${PACKAGES}
+
+RUN git clone https://github.com/samtools/htslib.git && \
+cd htslib && \
+git checkout ${SAMTOOLS_VERSION}  && \
+cd .. && \
+git clone https://github.com/samtools/samtools.git && \
+cd samtools && \
+git checkout ${SAMTOOLS_VERSION} && \
+make && \
+cp samtools /usr/bin/
diff --git a/src/.docker_modules/samtools/1.7/docker_init.sh b/src/.docker_modules/samtools/1.7/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..83c510a9e6fe22e1c28eac9bed5e44d1c707da15
--- /dev/null
+++ b/src/.docker_modules/samtools/1.7/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/samtools:1.7
+docker build src/.docker_modules/samtools/1.7 -t 'lbmc/samtools:1.7'
+docker push lbmc/samtools:1.7
diff --git a/src/.docker_modules/sratoolkit/2.8.2/Dockerfile b/src/.docker_modules/sratoolkit/2.8.2/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..55622ef79ad427763d9f8e6e1523d405906835c9
--- /dev/null
+++ b/src/.docker_modules/sratoolkit/2.8.2/Dockerfile
@@ -0,0 +1,9 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV SRATOOLKIT_VERSION=2.8.2
+ENV PACKAGES sra-toolkit=${SRATOOLKIT_VERSION}*
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
diff --git a/src/.docker_modules/sratoolkit/2.8.2/docker_init.sh b/src/.docker_modules/sratoolkit/2.8.2/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..ce040fcc1b3ed4f7041d01421e7a2031d983ef6f
--- /dev/null
+++ b/src/.docker_modules/sratoolkit/2.8.2/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/sratoolkit:2.8.2
+docker build src/.docker_modules/sratoolkit/2.8.2 -t 'lbmc/sratoolkit:2.8.2'
+docker push lbmc/sratoolkit:2.8.2
diff --git a/src/.docker_modules/star/2.7.3a/Dockerfile b/src/.docker_modules/star/2.7.3a/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..a045a2352dd59dc8de04e07b233a0399b891cde8
--- /dev/null
+++ b/src/.docker_modules/star/2.7.3a/Dockerfile
@@ -0,0 +1,2 @@
+FROM quay.io/biocontainers/star:2.7.3a--0
+MAINTAINER Laurent Modolo
diff --git a/src/.docker_modules/star/2.7.3a/docker_init.sh b/src/.docker_modules/star/2.7.3a/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..50beecfcc7fcb7a9b1943a418651cafb55851495
--- /dev/null
+++ b/src/.docker_modules/star/2.7.3a/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/star:2.7.3a
+docker build src/.docker_modules/star/2.7.3a/ -t 'lbmc/star:2.7.3a'
+docker push lbmc/star:2.7.3a
diff --git a/src/.docker_modules/subread/1.6.4/Dockerfile b/src/.docker_modules/subread/1.6.4/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..97d5870ad4d7a59ec0e8f38300d8e86651cc2e8b
--- /dev/null
+++ b/src/.docker_modules/subread/1.6.4/Dockerfile
@@ -0,0 +1,2 @@
+FROM quay.io/biocontainers/subread:1.6.4--h84994c4_1
+MAINTAINER Laurent Modolo
diff --git a/src/.docker_modules/subread/1.6.4/docker_init.sh b/src/.docker_modules/subread/1.6.4/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..0dd51ca0dbc45ab1b2c237c1a43c670f14dd184a
--- /dev/null
+++ b/src/.docker_modules/subread/1.6.4/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/subread:1.6.4
+docker build src/.docker_modules/subread/1.6.4 -t 'lbmc/subread:1.6.4'
+docker push lbmc/subread:1.6.4
diff --git a/src/.docker_modules/tophat/2.1.1/Dockerfile b/src/.docker_modules/tophat/2.1.1/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..82d3c5d634b59ede9281f56b00f7cc6be1f316e6
--- /dev/null
+++ b/src/.docker_modules/tophat/2.1.1/Dockerfile
@@ -0,0 +1,9 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV TOPHAT_VERSION=2.1.1
+ENV PACKAGES tophat=${BOWTIE2_VERSION}*
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
diff --git a/src/.docker_modules/tophat/2.1.1/docker_init.sh b/src/.docker_modules/tophat/2.1.1/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..67151131596b2c2dda5e5cc7beadc69dcd64aa6c
--- /dev/null
+++ b/src/.docker_modules/tophat/2.1.1/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/tophat:2.1.1
+docker build src/.docker_modules/tophat/2.1.1 -t 'lbmc/tophat:2.1.1'
+docker push lbmc/tophat:2.1.1
diff --git a/src/.docker_modules/trimmomatic/0.36/Dockerfile b/src/.docker_modules/trimmomatic/0.36/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..8e04269a05b905fa41c17fe86338bce8f75b33c0
--- /dev/null
+++ b/src/.docker_modules/trimmomatic/0.36/Dockerfile
@@ -0,0 +1,9 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV TRIMMOMATIC_VERSION=0.36
+ENV PACKAGES trimmomatic=${TRIMMOMATIC_VERSION}*
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
diff --git a/src/.docker_modules/trimmomatic/0.36/docker_init.sh b/src/.docker_modules/trimmomatic/0.36/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..f054581bde67aff212a04284c5f463f8a6e4ab75
--- /dev/null
+++ b/src/.docker_modules/trimmomatic/0.36/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/trimmomatic:0.36
+docker build src/.docker_modules/trimmomatic/0.36 -t 'lbmc/trimmomatic:0.36'
+docker push lbmc/trimmomatic:0.36
diff --git a/src/.docker_modules/ucsc/375/Dockerfile b/src/.docker_modules/ucsc/375/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..9365a974bdc86f80897ad97ca8d13436b3bb364d
--- /dev/null
+++ b/src/.docker_modules/ucsc/375/Dockerfile
@@ -0,0 +1,27 @@
+FROM debian:jessie
+MAINTAINER Laurent Modolo
+
+ENV PACKAGES apt-utils \
+             curl \
+             build-essential \
+             libssl-dev \
+             libpng-dev \
+             uuid-dev \
+             libmysqlclient-dev \
+             procps \
+             rsync
+
+
+RUN apt-get update && \
+    apt-get install -y ${PACKAGES}
+
+ENV UCSC_VERSION=375
+
+RUN curl -k -L http://hgdownload.soe.ucsc.edu/admin/exe/userApps.v${UCSC_VERSION}.src.tgz -o userApps.v${UCSC_VERSION}.src.tgz &&\
+tar xvf userApps.v${UCSC_VERSION}.src.tgz &&\
+cd userApps/ && \
+make &&\
+cd .. &&\
+mv userApps/bin/* /usr/bin/ &&\
+rm -R userApps.v${UCSC_VERSION}.src.tgz &&\
+rm -R userApps
diff --git a/src/.docker_modules/ucsc/375/docker_init.sh b/src/.docker_modules/ucsc/375/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..f0cc90565cc1f5583eb0c4303976300f695500e0
--- /dev/null
+++ b/src/.docker_modules/ucsc/375/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/ucsc:375
+docker build src/.docker_modules/ucsc/375/ -t 'lbmc/ucsc:375'
+docker push lbmc/ucsc:375
diff --git a/src/.docker_modules/umi_tools/0.5.4/Dockerfile b/src/.docker_modules/umi_tools/0.5.4/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..81c4cc8c1529187fd8c34f5106f05e2ba6b55be6
--- /dev/null
+++ b/src/.docker_modules/umi_tools/0.5.4/Dockerfile
@@ -0,0 +1,2 @@
+FROM quay.io/biocontainers/umi_tools:0.5.4--py27hdd9f355_1
+MAINTAINER Rémi SERAPHIN
diff --git a/src/.docker_modules/umi_tools/0.5.4/docker_init.sh b/src/.docker_modules/umi_tools/0.5.4/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..200e9c066fe98de8262a48eea0f615b064ff90a4
--- /dev/null
+++ b/src/.docker_modules/umi_tools/0.5.4/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/umi_tools:1.0.0
+docker build src/.docker_modules/umi_tools/1.0.0/ -t 'lbmc/umi_tools:1.0.0'
+docker push lbmc/umi_tools:1.0.0
diff --git a/src/.docker_modules/umi_tools/1.0.0/Dockerfile b/src/.docker_modules/umi_tools/1.0.0/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..fee8aa46f3f27d30c0ea1bdcaee1a2abe974ac6e
--- /dev/null
+++ b/src/.docker_modules/umi_tools/1.0.0/Dockerfile
@@ -0,0 +1,18 @@
+FROM debian:stretch
+MAINTAINER Rémi SERAPHIN
+
+ENV UMI_TOOLS_VERSION=1.0.0
+ENV PACKAGES="bash \
+             python3 \
+             python3-dev \
+             python3-pip \
+             procps \
+             gcc"
+
+RUN apt-get update && \
+apt-get install -y --no-install-recommends ${PACKAGES} && \
+apt-get clean
+
+RUN pip3 install setuptools
+RUN pip3 install umi_tools==${UMI_TOOLS_VERSION}
+
diff --git a/src/.docker_modules/umi_tools/1.0.0/docker_init.sh b/src/.docker_modules/umi_tools/1.0.0/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..200e9c066fe98de8262a48eea0f615b064ff90a4
--- /dev/null
+++ b/src/.docker_modules/umi_tools/1.0.0/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/umi_tools:1.0.0
+docker build src/.docker_modules/umi_tools/1.0.0/ -t 'lbmc/umi_tools:1.0.0'
+docker push lbmc/umi_tools:1.0.0
diff --git a/src/.docker_modules/urqt/d62c1f8/Dockerfile b/src/.docker_modules/urqt/d62c1f8/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..9b54a7eb91ad3767532c8703fe25d6cad29e27cb
--- /dev/null
+++ b/src/.docker_modules/urqt/d62c1f8/Dockerfile
@@ -0,0 +1,21 @@
+FROM ubuntu:18.04
+MAINTAINER Laurent Modolo
+
+ENV URQT_VERSION=d62c1f8
+ENV PACKAGES git=1:2.17* \
+   build-essential=12.4* \
+   ca-certificates=20180409 \
+   procps \
+   zlib1g-dev=1:1.2.11*
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends ${PACKAGES} && \
+    apt-get clean
+
+RUN git clone https://github.com/l-modolo/UrQt.git && \
+  cd UrQt && \
+  git checkout ${URQT_VERSION} && \
+  make && \
+  cd .. && \
+  mv UrQt/UrQt /usr/bin/ && \
+  rm -Rf UrQt
diff --git a/src/.docker_modules/urqt/d62c1f8/docker_init.sh b/src/.docker_modules/urqt/d62c1f8/docker_init.sh
new file mode 100755
index 0000000000000000000000000000000000000000..bb3fb4f882ec4f93e4cec643e035fb7d2d7a4963
--- /dev/null
+++ b/src/.docker_modules/urqt/d62c1f8/docker_init.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+docker pull lbmc/urqt:d62c1f8
+docker build src/.docker_modules/urqt/d62c1f8 -t 'lbmc/urqt:d62c1f8'
+docker push lbmc/urqt:d62c1f8
diff --git a/src/.singularity_in2p3/lbmc-bcftools-1.7.img b/src/.singularity_in2p3/lbmc-bcftools-1.7.img
new file mode 120000
index 0000000000000000000000000000000000000000..3f3b72a6df7f1dd3d5fc18d4b62c804d0acfc2e4
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-bcftools-1.7.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-bcftools-1.7.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-bedtools-2.25.0.img b/src/.singularity_in2p3/lbmc-bedtools-2.25.0.img
new file mode 120000
index 0000000000000000000000000000000000000000..2003d1149ce5a96b075e382e8130c9ff2b1cc20a
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-bedtools-2.25.0.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-bedtools-2.25.0.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-bioawk-1.0.img b/src/.singularity_in2p3/lbmc-bioawk-1.0.img
new file mode 120000
index 0000000000000000000000000000000000000000..56d68b2822fc0a378460cacda068ef7a3a9ebbcd
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-bioawk-1.0.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-bioawk-1.0.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-bowtie-1.2.2.img b/src/.singularity_in2p3/lbmc-bowtie-1.2.2.img
new file mode 120000
index 0000000000000000000000000000000000000000..0977cfa43f0457f910bffef7e80fecf7c2cd3b57
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-bowtie-1.2.2.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-bowtie-1.2.2.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-bowtie2-2.3.4.1.img b/src/.singularity_in2p3/lbmc-bowtie2-2.3.4.1.img
new file mode 120000
index 0000000000000000000000000000000000000000..da3b13a066c217995d5bf467fcd4f4be9b01ea2f
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-bowtie2-2.3.4.1.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-bowtie2-2.3.4.1.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-bwa-0.7.17.img b/src/.singularity_in2p3/lbmc-bwa-0.7.17.img
new file mode 120000
index 0000000000000000000000000000000000000000..d6c0b9d74aa087e13a6977eba593f6260a7be96e
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-bwa-0.7.17.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-bwa-0.7.17.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-canu-1.6.img b/src/.singularity_in2p3/lbmc-canu-1.6.img
new file mode 120000
index 0000000000000000000000000000000000000000..202c4a121b3feae32fac6291a6f1557b9c60cf4a
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-canu-1.6.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-canu-1.6.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-cutadapt-1.14.img b/src/.singularity_in2p3/lbmc-cutadapt-1.14.img
new file mode 120000
index 0000000000000000000000000000000000000000..5a40d3d01cfdcd1d788863ae2cc5a6d6d8dddd3b
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-cutadapt-1.14.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-cutadapt-1.14.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-cutadapt-1.15.img b/src/.singularity_in2p3/lbmc-cutadapt-1.15.img
new file mode 120000
index 0000000000000000000000000000000000000000..b7fe369985b74675b03f4cd593655e333b2c2e65
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-cutadapt-1.15.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-cutadapt-1.15.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-cutadapt-2.1.img b/src/.singularity_in2p3/lbmc-cutadapt-2.1.img
new file mode 120000
index 0000000000000000000000000000000000000000..bfe8944319c632a29654f2e1177d6e6f4b31c1d6
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-cutadapt-2.1.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-cutadapt-2.1.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-deeptools-3.0.2.img b/src/.singularity_in2p3/lbmc-deeptools-3.0.2.img
new file mode 120000
index 0000000000000000000000000000000000000000..20fac04f7d79a4e30dcae9320901e3afca92fe3f
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-deeptools-3.0.2.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-deeptools-3.0.2.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-deeptools-3.1.1.img b/src/.singularity_in2p3/lbmc-deeptools-3.1.1.img
new file mode 120000
index 0000000000000000000000000000000000000000..cd9b1e8d5b711bb2c8166888b689c99595845bc9
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-deeptools-3.1.1.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-deeptools-3.1.1.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-fastp-0.19.7.img b/src/.singularity_in2p3/lbmc-fastp-0.19.7.img
new file mode 120000
index 0000000000000000000000000000000000000000..aec2258a1404d0e0bb35d93335386e45a9f65a9b
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-fastp-0.19.7.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-fastp-0.19.7.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-fastqc-0.11.5.img b/src/.singularity_in2p3/lbmc-fastqc-0.11.5.img
new file mode 120000
index 0000000000000000000000000000000000000000..939ac3cf2c1ff31e1d7900b5035c13f41e24c39a
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-fastqc-0.11.5.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-fastqc-0.11.5.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-file_handle-0.1.1.img b/src/.singularity_in2p3/lbmc-file_handle-0.1.1.img
new file mode 120000
index 0000000000000000000000000000000000000000..50396d25d4c38864c7df09053aa34bc75c1c2167
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-file_handle-0.1.1.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-file_handle-0.1.1.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-gatk-4.0.8.1.img b/src/.singularity_in2p3/lbmc-gatk-4.0.8.1.img
new file mode 120000
index 0000000000000000000000000000000000000000..f398e91fe0edfc88f10a30aaa8d3de8d0be7621c
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-gatk-4.0.8.1.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-gatk-4.0.8.1.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-hisat2-2.0.0.img b/src/.singularity_in2p3/lbmc-hisat2-2.0.0.img
new file mode 120000
index 0000000000000000000000000000000000000000..24c7eb7300fd7533b288c68743885d68ccad068c
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-hisat2-2.0.0.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-hisat2-2.0.0.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-hisat2-2.1.0.img b/src/.singularity_in2p3/lbmc-hisat2-2.1.0.img
new file mode 120000
index 0000000000000000000000000000000000000000..b210e78b748a940e0e6682be033fb114c38b3862
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-hisat2-2.1.0.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-hisat2-2.1.0.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-htseq-0.11.2.img b/src/.singularity_in2p3/lbmc-htseq-0.11.2.img
new file mode 120000
index 0000000000000000000000000000000000000000..c2e222e828a3646efa9c2997316943e42a39fb60
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-htseq-0.11.2.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-htseq-0.11.2.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-htseq-0.8.0.img b/src/.singularity_in2p3/lbmc-htseq-0.8.0.img
new file mode 120000
index 0000000000000000000000000000000000000000..d9761928187a7538046256904be6f7db47b3e2b5
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-htseq-0.8.0.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-htseq-0.8.0.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-kallisto-0.43.1.img b/src/.singularity_in2p3/lbmc-kallisto-0.43.1.img
new file mode 120000
index 0000000000000000000000000000000000000000..f0bc3cb4c224cebf21809e1d7b79f9e844c16a2b
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-kallisto-0.43.1.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-kallisto-0.43.1.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-kallisto-0.44.0.img b/src/.singularity_in2p3/lbmc-kallisto-0.44.0.img
new file mode 120000
index 0000000000000000000000000000000000000000..f5a1df9011779058335fdb0377901338434d2808
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-kallisto-0.44.0.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-kallisto-0.44.0.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-macs2-2.1.2.img b/src/.singularity_in2p3/lbmc-macs2-2.1.2.img
new file mode 120000
index 0000000000000000000000000000000000000000..3507efe473fdc01252b874f810b618fc0fd18660
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-macs2-2.1.2.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-macs2-2.1.2.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-multiqc-1.0.img b/src/.singularity_in2p3/lbmc-multiqc-1.0.img
new file mode 120000
index 0000000000000000000000000000000000000000..2c95d54baf3c3c0354fa66f1f3a85c36b4636bb0
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-multiqc-1.0.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-multiqc-1.0.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-multiqc-1.7.img b/src/.singularity_in2p3/lbmc-multiqc-1.7.img
new file mode 120000
index 0000000000000000000000000000000000000000..df7f68b57cbbf6646c97f84557183a58d08c3959
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-multiqc-1.7.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-multiqc-1.7.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-music-6613c53.img b/src/.singularity_in2p3/lbmc-music-6613c53.img
new file mode 120000
index 0000000000000000000000000000000000000000..a0f66ea144a85e37d52d56c6f0ce6d4b5eb29088
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-music-6613c53.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-music-6613c53.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-picard-2.18.11.img b/src/.singularity_in2p3/lbmc-picard-2.18.11.img
new file mode 120000
index 0000000000000000000000000000000000000000..d2373a62d5848c8bada4da7f08b1ad31c983b48f
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-picard-2.18.11.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-picard-2.18.11.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-pigz-2.4.img b/src/.singularity_in2p3/lbmc-pigz-2.4.img
new file mode 120000
index 0000000000000000000000000000000000000000..25f5929082f08108919410d1e62c675ffb193f2f
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-pigz-2.4.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-pigz-2.4.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-r-3.5.3.img b/src/.singularity_in2p3/lbmc-r-3.5.3.img
new file mode 120000
index 0000000000000000000000000000000000000000..447fc14610fccde46ff6bfdc866728b0a4008cc7
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-r-3.5.3.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-r-3.5.3.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-rsem-1.3.0.img b/src/.singularity_in2p3/lbmc-rsem-1.3.0.img
new file mode 120000
index 0000000000000000000000000000000000000000..8230e003bb1ef02394ae5a954423d41311260aab
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-rsem-1.3.0.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-rsem-1.3.0.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-salmon-0.8.2.img b/src/.singularity_in2p3/lbmc-salmon-0.8.2.img
new file mode 120000
index 0000000000000000000000000000000000000000..ebe4177379aa6fb25ee467868b0972f08f91dc66
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-salmon-0.8.2.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-salmon-0.8.2.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-sambamba-0.6.7.img b/src/.singularity_in2p3/lbmc-sambamba-0.6.7.img
new file mode 120000
index 0000000000000000000000000000000000000000..e001f82d9f8b71094f14e0e0ec3a49026f39981c
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-sambamba-0.6.7.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-sambamba-0.6.7.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-samblaster-0.1.24.img b/src/.singularity_in2p3/lbmc-samblaster-0.1.24.img
new file mode 120000
index 0000000000000000000000000000000000000000..242ae80e152d626bd9dcba519a7bcc77fcf0fbc4
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-samblaster-0.1.24.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-samblaster-0.1.24.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-samtools-1.7.img b/src/.singularity_in2p3/lbmc-samtools-1.7.img
new file mode 120000
index 0000000000000000000000000000000000000000..8f513f7638cbf6f95c6c4e69bc382295b5653cc7
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-samtools-1.7.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-samtools-1.7.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-sratoolkit-2.8.2.img b/src/.singularity_in2p3/lbmc-sratoolkit-2.8.2.img
new file mode 120000
index 0000000000000000000000000000000000000000..fc196a885058ffd2f016f8fd91db4019d703486f
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-sratoolkit-2.8.2.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-sratoolkit-2.8.2.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-star-2.7.3a.img b/src/.singularity_in2p3/lbmc-star-2.7.3a.img
new file mode 120000
index 0000000000000000000000000000000000000000..ea55bf4bb7206764b8efaa48bc9c5ef888606cde
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-star-2.7.3a.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-star-2.7.3a.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-subread-1.6.4.img b/src/.singularity_in2p3/lbmc-subread-1.6.4.img
new file mode 120000
index 0000000000000000000000000000000000000000..8a782172465289b9cacc105619396a30bc9ef761
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-subread-1.6.4.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-subread-1.6.4.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-tophat-2.1.1.img b/src/.singularity_in2p3/lbmc-tophat-2.1.1.img
new file mode 120000
index 0000000000000000000000000000000000000000..5cbb7f17d4f08fc19f72762411c2417eb1c77b3f
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-tophat-2.1.1.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-tophat-2.1.1.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-trimmomatic-0.36.img b/src/.singularity_in2p3/lbmc-trimmomatic-0.36.img
new file mode 120000
index 0000000000000000000000000000000000000000..8baf3c581996bed38e11de7072f12e3c9d074567
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-trimmomatic-0.36.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-trimmomatic-0.36.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-ucsc-375.img b/src/.singularity_in2p3/lbmc-ucsc-375.img
new file mode 120000
index 0000000000000000000000000000000000000000..54ba6659f0ad9a0ac6483d435ca2a6c9f87a4bf8
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-ucsc-375.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-ucsc-375.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-umi_tools-1.0.0.img b/src/.singularity_in2p3/lbmc-umi_tools-1.0.0.img
new file mode 120000
index 0000000000000000000000000000000000000000..240e59a0438697ce4e1f41fad639d4bfb6599e4f
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-umi_tools-1.0.0.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-umi_tools-1.0.0.img
\ No newline at end of file
diff --git a/src/.singularity_in2p3/lbmc-urqt-d62c1f8.img b/src/.singularity_in2p3/lbmc-urqt-d62c1f8.img
new file mode 120000
index 0000000000000000000000000000000000000000..a7f364f55d65c79dd11c49ecf459b6ca9163a9d9
--- /dev/null
+++ b/src/.singularity_in2p3/lbmc-urqt-d62c1f8.img
@@ -0,0 +1 @@
+/sps/lbmc/common/singularity/lbmc-urqt-d62c1f8.img
\ No newline at end of file
diff --git a/src/.update_config.sh b/src/.update_config.sh
new file mode 100644
index 0000000000000000000000000000000000000000..3e7c6a1e8f6a7b9c01b5d27bb8fc05cb14a4a633
--- /dev/null
+++ b/src/.update_config.sh
@@ -0,0 +1,19 @@
+# update docker url
+fd ".*config" -E "nf_modules" src/ -x perl -0777pe 's|container = "|container = "lbmc/|g' -i {}
+
+# update singularity url
+fd ".*config" -E "nf_modules" src/ -x perl -pe 's|container = "lbmc/file://bin/(.*).img"|container = "lbmc/\1"|g' -i {}
+
+# update singularity config
+fd ".*config" -E "nf_modules" src/ -x perl -0777pe 's|\n\s*singularity {\n\s*singularity.enabled = true|\n  singularity {\n    singularity.enabled = true\n    singularity.cacheDir = "./bin/"|mg' -i {}
+
+# update in2p3 config
+fd ".*config" -E "nf_modules" src/ -x perl -0777pe 's|\n\s*ccin2p3 {\n\s*singularity.enabled = true|\n  ccin2p3 {\n    singularity.enabled = true\n    singularity.cacheDir = "/sps/lbmc/common/singularity/"|mg' -i {}
+fd ".*config" src/ -x perl -pe 's|container = "lbmc//sps/lbmc/common/singularity/(.*).img"|container = "lbmc/\1"|g' -i {}
+fd ".*config" -E "nf_modules" src/ -x perl -0777pe 's|singularity.cacheDir = "/sps/lbmc/common/singularity/"|singularity.cacheDir = "\$baseDir/.singularity_in2p3/"|mg' -i {}
+
+# we remove the ccin2p3_conda section
+fd ".*config" -E "nf_modules" src/ -x perl -0777pe "s|\s*ccin2p3_conda {.*ccin2p3 {\n|\n  ccin2p3 {\n|msg" -i {}
+
+# we update the psmn module to conda
+fd ".*config" -E "nf_modules" src/ -x perl -0777pe 's|beforeScript = "source /usr/share/lmod/lmod/init/bash; module use ~/privatemodules"\n\s*module = "(.*)/(.*)"|beforeScript = "source \$baseDir/.conda_psmn.sh"\n        conda = "\$baseDir/.conda_envs/\L\1_\2"|mg' -i {}
diff --git a/src/.update_tools.sh b/src/.update_tools.sh
new file mode 100755
index 0000000000000000000000000000000000000000..57af5db8b119176f94fe0ac4b28043b3cd94dd91
--- /dev/null
+++ b/src/.update_tools.sh
@@ -0,0 +1,92 @@
+#/bin/sh
+
+# A POSIX variable
+OPTIND=1
+
+# Initialize our own variables:
+tool=""
+version=""
+
+while getopts "h?v:t:p:" opt; do
+  case "${opt}" in
+  h|\?)
+    echo "update_tools.sh -t toolname -v tool_version -p tool_previous_version"
+    exit 0
+    ;;
+  v)
+    version=${OPTARG}
+    ;;
+  p)
+    prev_version=${OPTARG}
+    ;;
+  t)
+    tool=${OPTARG}
+    ;;
+  esac
+done
+
+echo "tool=${tool}, version='${version}', previous version='${version}'"
+
+docker_tool_dir="src/docker_modules/"${tool}"/"
+echo ${docker_tool_dir}
+if [ -d ${docker_tool_dir} ]; then
+  echo "docker module found for ${tool}."
+  if [ -d ${docker_tool_dir}${version} ]; then
+    echo "version already existing, skipping."
+  else
+    cp -R ${docker_tool_dir}${prev_version} ${docker_tool_dir}${version}
+    sed -i "s|${prev_version}|${version}|g" "${docker_tool_dir}${version}/Dockerfile"
+    sed -i "s|${prev_version}|${version}|g" "${docker_tool_dir}${version}/docker_init.sh"
+    echo "docker_module for ${tool}:${version}, done."
+  fi
+else
+  echo "docker module not found for '${tool}', skipping."
+fi
+
+singularity_tool_dir="src/singularity_modules/"${tool}"/"
+echo ${singularity_tool_dir}
+if [ -d ${singularity_tool_dir} ]; then
+  echo "singularity module found for $tool."
+  if [ -d ${singularity_tool_dir}${version} ]; then
+    echo "version already existing, skipping."
+  else
+    cp -R ${singularity_tool_dir}${prev_version} ${singularity_tool_dir}${version}
+    sed -i "s|${prev_version}|${version}|g" "${singularity_tool_dir}${version}/${tool}.def"
+    sed -i "s|${prev_version}|${version}|g" "${singularity_tool_dir}${version}/build.sh"
+    echo "singularity_module for ${tool}:${version}, done."
+  fi
+else
+  echo "singularity module not found for '${tool}', skipping."
+fi
+
+nf_tool_dir="src/nf_modules/"$tool"/"
+echo $nf_tool_dir
+if [ -d ${nf_tool_dir} ]; then
+  echo "nf module found for ${tool}."
+  find ${nf_tool_dir} -maxdepth 1 -mindepth 1 -type f -name "*.config" |
+    awk "{system(\"sed -i \\\"s|${prev_version}|${version}|g\\\" \"\$0)}"
+  echo "nf_module for ${tool}:${version}, done."
+else
+  echo "nf module not found for '${tool}', skipping."
+fi
+
+psmn_modules_dir="src/psmn_modules/.git/"
+if [ ! -d ${nf_tool_dir} ]; then
+  git submodule init && \
+  git submodule update
+fi
+psmn_tool_app_dir="src/psmn_modules/apps/"${tool}"/"
+psmn_tool_module_dir="src/psmn_modules/modulefiles/"${tool}"/"
+echo ${psmn_tool_app_dir}
+if [ -d ${psmn_tool_app_dir} ]; then
+  echo "psmn module found for ${tool}."
+  cp ${psmn_tool_app_dir}/install_${prev_version}.sh \
+    ${psmn_tool_app_dir}/install_${version}.sh
+  sed -i "s|$prev_version|$version|g" ${psmn_tool_app_dir}/install_${version}.sh
+  cp ${psmn_tool_module_dir}/${prev_version}.lua \
+    ${psmn_tool_module_dir}/${version}.lua
+  sed -i "s|${prev_version}|${version}|g" ${psmn_tool_module_dir}/${version}.lua
+  echo "psmn_module for ${tool}:${version}, done."
+else
+  echo "psmn module not found for '${tool}', skipping."
+fi
diff --git a/src/in2p3.pbs b/src/in2p3.pbs
new file mode 100644
index 0000000000000000000000000000000000000000..7f929a7efa4aca6eb47b4114a97eed0912d2bdbb
--- /dev/null
+++ b/src/in2p3.pbs
@@ -0,0 +1,31 @@
+#! /usr/local/bin/bash -l
+#####################################
+# job script example with GE options
+#####################################
+#$ -q demon
+#$ -l demon=1
+#$ -P P_lbmc
+#$ -N nf_pipe
+#$ -o /sps/lbmc/lmodolo/logs/ # change to your username !
+#$ -e /sps/lbmc/lmodolo/logs/ # change to your username !
+#$ -r n       # relaunch y/n
+#$ -M laurent.modolo@ens-lyon.fr # change to your mail !
+#$ -m be      ## send an email when the job starts and ends
+#$ -l os=cl7  ## choose OS
+#$ -l sps=1   ## acces /sps directory
+#####################################
+
+NF_VERSION=19.10
+NF=/pbs/throng/lbmc/cl7/nextflow/${NF_VERSION}/nextflow
+# change to your username
+SCRATCH=/sps/lbmc/lmodolo/
+# change to your project / pipeline !
+PIPELINE=${SCRATCH}/nextflow/src/training_dataset.nf
+# change to your project / pipeline !
+CONFIG=${SCRATCH}/nextflow/src/training_dataset.config
+
+${NF} ${PIPELINE} -c ${CONFIG} -profile ccin2p3 \
+--fasta "${SCRATCH}/nextflow/data/tiny_dataset/fasta/tiny_v2.fasta" \
+--fastq_single "${SCRATCH}/nextflow/data/tiny_dataset/fastq/tiny2_S.fastq.gz" \
+--chromosome "X" --start 5305683 --stop 5333928 -resume \
+-w "${SCRATCH}"
diff --git a/src/install_nextflow.sh b/src/install_nextflow.sh
new file mode 100755
index 0000000000000000000000000000000000000000..272c24929f856970e80f8cd8739fea8754ae9f6c
--- /dev/null
+++ b/src/install_nextflow.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+java -version
+curl -s https://get.nextflow.io | bash
diff --git a/src/nf_modules/bedtools/fasta_from_bed.config b/src/nf_modules/bedtools/fasta_from_bed.config
new file mode 100644
index 0000000000000000000000000000000000000000..93e49c41c53cfa0b01ca92b67a2646cd19597cff
--- /dev/null
+++ b/src/nf_modules/bedtools/fasta_from_bed.config
@@ -0,0 +1,53 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: fasta_from_bed {
+        container = "lbmc/bedtools:2.25.0"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: fasta_from_bed {
+        container = "lbmc/bedtools:2.25.0"
+        cpus = 1
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: fasta_from_bed {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/${CONDA_ENVS}bedtools_2.25.0"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: fasta_from_bed {
+        container = "lbmc/bedtools:2.25.0"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/bedtools/fasta_from_bed.nf b/src/nf_modules/bedtools/fasta_from_bed.nf
new file mode 100644
index 0000000000000000000000000000000000000000..5e7504dcd35f29e44964e629ef8fba894c633224
--- /dev/null
+++ b/src/nf_modules/bedtools/fasta_from_bed.nf
@@ -0,0 +1,40 @@
+/*
+* bedtools :
+* Imputs : fasta files
+* Imputs : bed files
+* Output : fasta files
+*/
+/*                      fasta extraction                                     */
+
+params.fasta = "$baseDir/data/fasta/*.fasta"
+params.bed = "$baseDir/data/annot/*.bed"
+
+log.info "fasta file : ${params.fasta}"
+log.info "bed file : ${params.bed}"
+
+Channel
+  .fromPath( params.fasta )
+  .ifEmpty { error "Cannot find any fasta files matching: ${params.fasta}" }
+  .set { fasta_files }
+Channel
+  .fromPath( params.bed )
+  .ifEmpty { error "Cannot find any bed files matching: ${params.bed}" }
+  .set { bed_files }
+
+process fasta_from_bed {
+  tag "${bed.baseName}"
+  publishDir "results/fasta/", mode: 'copy'
+
+  input:
+  file fasta from fasta_files
+  file bed from bed_files
+
+  output:
+  file "*_extracted.fasta" into fasta_files_extracted
+
+  script:
+"""
+bedtools getfasta -name \
+-fi ${fasta} -bed ${bed} -fo ${bed.baseName}_extracted.fasta
+"""
+}
diff --git a/src/nf_modules/bedtools/tests.sh b/src/nf_modules/bedtools/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..61b3cc9c44bad7f171e87f180f2a2a156009d48f
--- /dev/null
+++ b/src/nf_modules/bedtools/tests.sh
@@ -0,0 +1,15 @@
+./nextflow src/nf_modules/bedtools/fasta_from_bed.nf \
+  -c src/nf_modules/bedtools/fasta_from_bed.config \
+  -profile docker \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  --bed "data/tiny_dataset/annot/tiny.bed" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/bedtools/fasta_from_bed.nf \
+  -c src/nf_modules/bedtools/fasta_from_bed.config \
+  -profile singularity \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  --bed "data/tiny_dataset/annot/tiny.bed" \
+  -resume
+fi
diff --git a/src/nf_modules/bowtie/indexing.config b/src/nf_modules/bowtie/indexing.config
new file mode 100644
index 0000000000000000000000000000000000000000..c67cef25d2ad1a3d9c01a93ce37b6c6e1ce6f74f
--- /dev/null
+++ b/src/nf_modules/bowtie/indexing.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: index_fasta {
+        cpus = 4
+        container = "lbmc/bowtie:1.2.2"
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: index_fasta {
+        cpus = 4
+        container = "lbmc/bowtie:1.2.2"
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: index_fasta {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/bowtie_1.2.2"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        memory = "20GB"
+        cpus = 16
+        time = "12h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: index_fasta {
+        container = "lbmc/bowtie:1.2.2"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/bowtie/indexing.nf b/src/nf_modules/bowtie/indexing.nf
new file mode 100644
index 0000000000000000000000000000000000000000..cf6bfaee07c7f81abb3b5b8a56fa526c24c67215
--- /dev/null
+++ b/src/nf_modules/bowtie/indexing.nf
@@ -0,0 +1,32 @@
+/*                      fasta indexing                                     */
+
+params.fasta = "$baseDir/data/bam/*.fasta"
+
+log.info "fasta files : ${params.fasta}"
+
+Channel
+  .fromPath( params.fasta )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.fasta}" }
+  .set { fasta_file }
+
+process index_fasta {
+  tag "$fasta.baseName"
+  publishDir "results/mapping/index/", mode: 'copy'
+
+  input:
+    file fasta from fasta_file
+
+  output:
+    file "*.index*" into index_files
+    file "*_report.txt" into indexing_report
+
+  script:
+"""
+bowtie-build --threads ${task.cpus} -f ${fasta} ${fasta.baseName}.index &> ${fasta.baseName}_bowtie_report.txt
+
+if grep -q "Error" ${fasta.baseName}_bowtie_report.txt; then
+  exit 1
+fi
+"""
+}
+
diff --git a/src/nf_modules/bowtie/mapping_paired.config b/src/nf_modules/bowtie/mapping_paired.config
new file mode 100644
index 0000000000000000000000000000000000000000..ac4c35e46a11ddab8fe6c6dc33f67dd09e99eeae
--- /dev/null
+++ b/src/nf_modules/bowtie/mapping_paired.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/bowtie:1.2.2"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: mapping_fastq {
+        cpus = 4
+        container = "lbmc/bowtie:1.2.2"
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: mapping_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/bowtie/1.2.2"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: mapping_fastq {
+        container = "lbmc/bowtie:1.2.2"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/bowtie/mapping_paired.nf b/src/nf_modules/bowtie/mapping_paired.nf
new file mode 100644
index 0000000000000000000000000000000000000000..90b47ba41994003c42653cc40f1736513100d76f
--- /dev/null
+++ b/src/nf_modules/bowtie/mapping_paired.nf
@@ -0,0 +1,54 @@
+/*
+* mapping paired fastq
+*/
+
+params.fastq = "$baseDir/data/fastq/*_{1,2}.fastq"
+params.index = "$baseDir/data/index/*.index.*"
+
+log.info "fastq files : ${params.fastq}"
+log.info "index files : ${params.index}"
+
+Channel
+  .fromFilePairs( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .set { fastq_files }
+Channel
+  .fromPath( params.index )
+  .ifEmpty { error "Cannot find any index files matching: ${params.index}" }
+  .set { index_files }
+
+process mapping_fastq {
+  tag "$pair_id"
+  publishDir "results/mapping/bams/", mode: 'copy'
+
+  input:
+  set pair_id, file(reads) from fastq_files
+  file index from index_files.collect()
+
+  output:
+  file "*.bam" into bam_files
+  file "*_report.txt" into mapping_report
+
+  script:
+  index_id = index[0]
+  for (index_file in index) {
+  if (index_file =~ /.*\.1\.ebwt/ && !(index_file =~ /.*\.rev\.1\.ebwt/)) {
+        index_id = ( index_file =~ /(.*)\.1\.ebwt/)[0][1]
+    }
+  }
+"""
+# -v specify the max number of missmatch, -k the number of match reported per
+# reads
+bowtie --best -v 3 -k 1 --sam -p ${task.cpus} ${index_id} \
+-1 ${reads[0]} -2 ${reads[1]} 2> \
+${pair_id}_bowtie_report_tmp.txt | \
+samtools view -Sb - > ${pair_id}.bam
+
+if grep -q "Error" ${pair_id}_bowtie_report_tmp.txt; then
+  exit 1
+fi
+tail -n 19 ${pair_id}_bowtie_report_tmp.txt > ${pair_id}_bowtie_report.txt
+"""
+}
+
+
diff --git a/src/nf_modules/bowtie/mapping_single.config b/src/nf_modules/bowtie/mapping_single.config
new file mode 100644
index 0000000000000000000000000000000000000000..ac4c35e46a11ddab8fe6c6dc33f67dd09e99eeae
--- /dev/null
+++ b/src/nf_modules/bowtie/mapping_single.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/bowtie:1.2.2"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: mapping_fastq {
+        cpus = 4
+        container = "lbmc/bowtie:1.2.2"
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: mapping_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/bowtie/1.2.2"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: mapping_fastq {
+        container = "lbmc/bowtie:1.2.2"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/bowtie/mapping_single.nf b/src/nf_modules/bowtie/mapping_single.nf
new file mode 100644
index 0000000000000000000000000000000000000000..268bdfa85071a766c027aa518e38bcfc4778b303
--- /dev/null
+++ b/src/nf_modules/bowtie/mapping_single.nf
@@ -0,0 +1,50 @@
+/*
+* mapping single end fastq
+*/
+
+params.fastq = "$baseDir/data/fastq/*.fastq"
+
+log.info "fastq files : ${params.fastq}"
+log.info "index files : ${params.index}"
+
+Channel
+  .fromPath( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { fastq_files }
+Channel
+  .fromPath( params.index )
+  .ifEmpty { error "Cannot find any index files matching: ${params.index}" }
+  .set { index_files }
+
+process mapping_fastq {
+  tag "$file_id"
+  publishDir "results/mapping/bams/", mode: 'copy'
+
+  input:
+  set file_id, file(reads) from fastq_files
+  file index from index_files.collect()
+
+  output:
+  set file_id, "*.bam" into bam_files
+  file "*_report.txt" into mapping_report
+
+  script:
+index_id = index[0]
+for (index_file in index) {
+  if (index_file =~ /.*\.1\.ebwt/ && !(index_file =~ /.*\.rev\.1\.ebwt/)) {
+      index_id = ( index_file =~ /(.*)\.1\.ebwt/)[0][1]
+  }
+}
+"""
+bowtie --best -v 3 -k 1 --sam -p ${task.cpus} ${index_id} \
+-q ${reads} 2> \
+${file_id}_bowtie_report_tmp.txt | \
+samtools view -Sb - > ${file_id}.bam
+
+if grep -q "Error" ${file_id}_bowtie_report_tmp.txt; then
+  exit 1
+fi
+tail -n 19 ${file_id}_bowtie_report_tmp.txt > ${file_id}_bowtie_report.txt
+"""
+}
diff --git a/src/nf_modules/bowtie/tests.sh b/src/nf_modules/bowtie/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..a30529182addf06c272ca86f9d438d0647b0f49e
--- /dev/null
+++ b/src/nf_modules/bowtie/tests.sh
@@ -0,0 +1,41 @@
+./nextflow src/nf_modules/bowtie/indexing.nf \
+  -c src/nf_modules/bowtie/indexing.config \
+  -profile docker \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  -resume
+
+./nextflow src/nf_modules/bowtie/mapping_single.nf \
+  -c src/nf_modules/bowtie/mapping_single.config \
+  -profile docker \
+  --index "results/mapping/index/*.ebwt" \
+  --fastq "data/tiny_dataset/fastq/tiny*_S.fastq" \
+  -resume
+
+./nextflow src/nf_modules/bowtie/mapping_paired.nf \
+  -c src/nf_modules/bowtie/mapping_paired.config \
+  -profile docker \
+  --index "results/mapping/index/*.ebwt" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R{1,2}.fastq" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/bowtie/indexing.nf \
+  -c src/nf_modules/bowtie/indexing.config \
+  -profile singularity \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  -resume
+
+./nextflow src/nf_modules/bowtie/mapping_single.nf \
+  -c src/nf_modules/bowtie/mapping_single.config \
+  -profile singularity \
+  --index "results/mapping/index/*.ebwt" \
+  --fastq "data/tiny_dataset/fastq/tiny*_S.fastq" \
+  -resume
+
+./nextflow src/nf_modules/bowtie/mapping_paired.nf \
+  -c src/nf_modules/bowtie/mapping_paired.config \
+  -profile singularity \
+  --index "results/mapping/index/*.ebwt" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R{1,2}.fastq" \
+  -resume
+fi
diff --git a/src/nf_modules/bowtie2/indexing.config b/src/nf_modules/bowtie2/indexing.config
new file mode 100644
index 0000000000000000000000000000000000000000..dd1e5e0648c2c38159cce2841d2201bd05d79e29
--- /dev/null
+++ b/src/nf_modules/bowtie2/indexing.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: index_fasta {
+        container = "lbmc/bowtie2:2.3.4.1"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: index_fasta {
+        container = "lbmc/bowtie2:2.3.4.1"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: index_fasta {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/bowtie2_2.3.4.1"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "20GB"
+        time = "12h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: index_fasta {
+        container = "lbmc/bowtie2:2.3.4.1"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/bowtie2/indexing.nf b/src/nf_modules/bowtie2/indexing.nf
new file mode 100644
index 0000000000000000000000000000000000000000..45318049b076f6dcad4d7ef2335185796a5e7234
--- /dev/null
+++ b/src/nf_modules/bowtie2/indexing.nf
@@ -0,0 +1,31 @@
+params.fasta = "$baseDir/data/bam/*.fasta"
+
+log.info "fasta files : ${params.fasta}"
+
+Channel
+  .fromPath( params.fasta )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.fasta}" }
+  .set { fasta_file }
+
+process index_fasta {
+  tag "$fasta.baseName"
+  publishDir "results/mapping/index/", mode: 'copy'
+
+  input:
+    file fasta from fasta_file
+
+  output:
+    file "*.index*" into index_files
+    file "*_report.txt" into indexing_report
+
+  script:
+"""
+bowtie2-build --threads ${task.cpus} ${fasta} ${fasta.baseName}.index &> ${fasta.baseName}_bowtie2_report.txt
+
+if grep -q "Error" ${fasta.baseName}_bowtie2_report.txt; then
+  exit 1
+fi
+"""
+}
+
+
diff --git a/src/nf_modules/bowtie2/mapping_paired.config b/src/nf_modules/bowtie2/mapping_paired.config
new file mode 100644
index 0000000000000000000000000000000000000000..c4b0fdd102fd74fa4151e7543e44f42ecfcb3cdf
--- /dev/null
+++ b/src/nf_modules/bowtie2/mapping_paired.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/bowtie2:2.3.4.1"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/bowtie2:2.3.4.1"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: mapping_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/bowtie2_2.3.4.1"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: mapping_fastq {
+        container = "lbmc/bowtie2:2.3.4.1"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/bowtie2/mapping_paired.nf b/src/nf_modules/bowtie2/mapping_paired.nf
new file mode 100644
index 0000000000000000000000000000000000000000..225508d88449f0141775902092b1fb4e4e57ffc2
--- /dev/null
+++ b/src/nf_modules/bowtie2/mapping_paired.nf
@@ -0,0 +1,47 @@
+params.fastq = "$baseDir/data/fastq/*_{1,2}.fastq"
+params.index = "$baseDir/data/index/*.index.*"
+
+log.info "fastq files : ${params.fastq}"
+log.info "index files : ${params.index}"
+
+Channel
+  .fromFilePairs( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .set { fastq_files }
+Channel
+  .fromPath( params.index )
+  .ifEmpty { error "Cannot find any index files matching: ${params.index}" }
+  .set { index_files }
+
+process mapping_fastq {
+  tag "$pair_id"
+  publishDir "results/mapping/bams/", mode: 'copy'
+
+  input:
+  set pair_id, file(reads) from fastq_files
+  file index from index_files.collect()
+
+  output:
+  set pair_id, "*.bam" into bam_files
+  file "*_report.txt" into mapping_report
+
+  script:
+  index_id = index[0]
+  for (index_file in index) {
+    if (index_file =~ /.*\.1\.bt2/ && !(index_file =~ /.*\.rev\.1\.bt2/)) {
+        index_id = ( index_file =~ /(.*)\.1\.bt2/)[0][1]
+    }
+  }
+"""
+bowtie2 --very-sensitive -p ${task.cpus} -x ${index_id} \
+-1 ${reads[0]} -2 ${reads[1]} 2> \
+${pair_id}_bowtie2_report_tmp.txt | \
+samtools view -Sb - > ${pair_id}.bam
+
+if grep -q "Error" ${pair_id}_bowtie2_report_tmp.txt; then
+  exit 1
+fi
+tail -n 19 ${pair_id}_bowtie2_report_tmp.txt > ${pair_id}_bowtie2_report.txt
+"""
+}
+
diff --git a/src/nf_modules/bowtie2/mapping_single.config b/src/nf_modules/bowtie2/mapping_single.config
new file mode 100644
index 0000000000000000000000000000000000000000..c4b0fdd102fd74fa4151e7543e44f42ecfcb3cdf
--- /dev/null
+++ b/src/nf_modules/bowtie2/mapping_single.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/bowtie2:2.3.4.1"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/bowtie2:2.3.4.1"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: mapping_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/bowtie2_2.3.4.1"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: mapping_fastq {
+        container = "lbmc/bowtie2:2.3.4.1"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/bowtie2/mapping_single.nf b/src/nf_modules/bowtie2/mapping_single.nf
new file mode 100644
index 0000000000000000000000000000000000000000..b5bfff2b11cdb837f216b0b1a245587c486c84d2
--- /dev/null
+++ b/src/nf_modules/bowtie2/mapping_single.nf
@@ -0,0 +1,46 @@
+params.fastq = "$baseDir/data/fastq/*.fastq"
+
+log.info "fastq files : ${params.fastq}"
+log.info "index files : ${params.index}"
+
+Channel
+  .fromPath( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { fastq_files }
+Channel
+  .fromPath( params.index )
+  .ifEmpty { error "Cannot find any index files matching: ${params.index}" }
+  .set { index_files }
+
+process mapping_fastq {
+  tag "$file_id"
+  publishDir "results/mapping/bams/", mode: 'copy'
+
+  input:
+  set file_id, file(reads) from fastq_files
+  file index from index_files.collect()
+
+  output:
+  set file_id, "*.bam" into bam_files
+  file "*_report.txt" into mapping_report
+
+  script:
+  index_id = index[0]
+  for (index_file in index) {
+    if (index_file =~ /.*\.1\.bt2/ && !(index_file =~ /.*\.rev\.1\.bt2/)) {
+        index_id = ( index_file =~ /(.*)\.1\.bt2/)[0][1]
+    }
+  }
+"""
+bowtie2 --very-sensitive -p ${task.cpus} -x ${index_id} \
+-U ${reads} 2> \
+${file_id}_bowtie2_report_tmp.txt | \
+samtools view -Sb - > ${file_id}.bam
+
+if grep -q "Error" ${file_id}_bowtie2_report_tmp.txt; then
+  exit 1
+fi
+tail -n 19 ${file_id}_bowtie2_report_tmp.txt > ${file_id}_bowtie2_report.txt
+"""
+}
diff --git a/src/nf_modules/bowtie2/tests.sh b/src/nf_modules/bowtie2/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..bfec0d533a6430fb6cac928fe7350f3fa97a01ae
--- /dev/null
+++ b/src/nf_modules/bowtie2/tests.sh
@@ -0,0 +1,41 @@
+./nextflow src/nf_modules/bowtie2/indexing.nf \
+  -c src/nf_modules/bowtie2/indexing.config \
+  -profile docker \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  -resume
+
+./nextflow src/nf_modules/bowtie2/mapping_single.nf \
+  -c src/nf_modules/bowtie2/mapping_single.config \
+  -profile docker \
+  --index "data/tiny_dataset/fasta/*.bt2" \
+  --fastq "data/tiny_dataset/fastq/tiny*_S.fastq" \
+  -resume
+
+./nextflow src/nf_modules/bowtie2/mapping_paired.nf \
+  -c src/nf_modules/bowtie2/mapping_paired.config \
+  -profile docker \
+  --index "data/tiny_dataset/fasta/*.bt2" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R{1,2}.fastq" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/bowtie2/indexing.nf \
+  -c src/nf_modules/bowtie2/indexing.config \
+  -profile singularity \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  -resume
+
+./nextflow src/nf_modules/bowtie2/mapping_single.nf \
+  -c src/nf_modules/bowtie2/mapping_single.config \
+  -profile singularity \
+  --index "data/tiny_dataset/fasta/*.bt2" \
+  --fastq "data/tiny_dataset/fastq/tiny*_S.fastq" \
+  -resume
+
+./nextflow src/nf_modules/bowtie2/mapping_paired.nf \
+  -c src/nf_modules/bowtie2/mapping_paired.config \
+  -profile singularity \
+  --index "data/tiny_dataset/fasta/*.bt2" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R{1,2}.fastq" \
+  -resume
+fi
diff --git a/src/nf_modules/bwa/indexing.config b/src/nf_modules/bwa/indexing.config
new file mode 100644
index 0000000000000000000000000000000000000000..689b5fa2de7db12f2691efd89221fbaf66d7a950
--- /dev/null
+++ b/src/nf_modules/bwa/indexing.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: index_fasta {
+        container = "lbmc/bwa:0.7.17"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: index_fasta {
+        container = "lbmc/bwa:0.7.17"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: index_fasta {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/bwa_0.7.17"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: index_fasta {
+        container = "lbmc/bwa:0.7.17"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/bwa/indexing.nf b/src/nf_modules/bwa/indexing.nf
new file mode 100644
index 0000000000000000000000000000000000000000..09096eeaa0b7e9b77d7078c4edd68734a7d68dbf
--- /dev/null
+++ b/src/nf_modules/bwa/indexing.nf
@@ -0,0 +1,28 @@
+params.fasta = "$baseDir/data/bam/*.fasta"
+
+log.info "fasta files : ${params.fasta}"
+
+Channel
+  .fromPath( params.fasta )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.fasta}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { fasta_file }
+
+process index_fasta {
+  tag "$fasta_id"
+  publishDir "results/mapping/index/", mode: 'copy'
+
+  input:
+    set fasta_id, file(fasta) from fasta_file
+
+  output:
+    set fasta_id, "${fasta.baseName}.*" into index_files
+    file "*_bwa_report.txt" into index_files_report
+
+  script:
+"""
+bwa index -p ${fasta.baseName} ${fasta} \
+&> ${fasta.baseName}_bwa_report.txt
+"""
+}
+
diff --git a/src/nf_modules/bwa/mapping_paired.config b/src/nf_modules/bwa/mapping_paired.config
new file mode 100644
index 0000000000000000000000000000000000000000..b2e198a0629c28595aaf4fedc7f83e7e5dd9232c
--- /dev/null
+++ b/src/nf_modules/bwa/mapping_paired.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/bwa:0.7.17"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/bwa:0.7.17"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: mapping_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/bwa_0.7.17"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: mapping_fastq {
+        container = "lbmc/bwa:0.7.17"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/bwa/mapping_paired.nf b/src/nf_modules/bwa/mapping_paired.nf
new file mode 100644
index 0000000000000000000000000000000000000000..7309c6052fb2e670d29ef51b7b547c59ce092c4a
--- /dev/null
+++ b/src/nf_modules/bwa/mapping_paired.nf
@@ -0,0 +1,37 @@
+params.fastq = "$baseDir/data/fastq/*_{1,2}.fastq"
+params.index = "$baseDir/data/index/*.index.*"
+
+log.info "fastq files : ${params.fastq}"
+log.info "index files : ${params.index}"
+
+Channel
+  .fromFilePairs( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .set { fastq_files }
+Channel
+  .fromPath( params.index )
+  .ifEmpty { error "Cannot find any index files matching: ${params.index}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .groupTuple()
+  .set { index_files }
+
+process mapping_fastq {
+  tag "$reads"
+  publishDir "results/mapping/sam/", mode: 'copy'
+
+  input:
+  set pair_id, file(reads) from fastq_files
+  set index_id, file(index) from index_files.collect()
+
+  output:
+  file "${pair_id}.sam" into sam_files
+  file "${pair_id}_bwa_report.txt" into mapping_repport_files
+
+  script:
+"""
+bwa mem -t ${task.cpus} \
+${index_id} ${reads[0]} ${reads[1]} \
+-o ${pair_id}.sam &> ${pair_id}_bwa_report.txt
+"""
+}
+
diff --git a/src/nf_modules/bwa/tests.sh b/src/nf_modules/bwa/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..e601d200cd9b642ad7d96ff05be666938e2c6178
--- /dev/null
+++ b/src/nf_modules/bwa/tests.sh
@@ -0,0 +1,42 @@
+./nextflow src/nf_modules/bwa/indexing.nf \
+  -c src/nf_modules/bwa/indexing.config \
+  -profile docker \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  -resume
+
+# ./nextflow src/nf_modules/bwa/mapping_single.nf \
+#   -c src/nf_modules/bwa/mapping_single.config \
+#   -profile docker \
+#   --index "results/mapping/index/tiny_v2.index" \
+#   --fastq "data/tiny_dataset/fastq/tiny*_S.fastq"
+
+./nextflow src/nf_modules/bwa/mapping_paired.nf \
+  -c src/nf_modules/bwa/mapping_paired.config \
+  -profile docker \
+  --index "results/mapping/index/tiny_v2.*" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R{1,2}.fastq" \
+  -resume
+
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/bwa/indexing.nf \
+  -c src/nf_modules/bwa/indexing.config \
+  -profile singularity \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  -resume
+
+
+# ./nextflow src/nf_modules/bwa/mapping_single.nf \
+#   -c src/nf_modules/bwa/mapping_single.config \
+#   -profile singularity \
+#   --index "results/mapping/index/tiny_v2.index" \
+#   --fastq "data/tiny_dataset/fastq/tiny*_S.fastq"
+
+./nextflow src/nf_modules/bwa/mapping_paired.nf \
+  -c src/nf_modules/bwa/mapping_paired.config \
+  -profile singularity \
+  --index "results/mapping/index/tiny_v2.*" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R{1,2}.fastq" \
+  -resume
+
+fi
diff --git a/src/nf_modules/cutadapt/adaptor_removal_paired.config b/src/nf_modules/cutadapt/adaptor_removal_paired.config
new file mode 100644
index 0000000000000000000000000000000000000000..13dfac50e21eb2b7a6bfbbd2cbad8bbceb6655cc
--- /dev/null
+++ b/src/nf_modules/cutadapt/adaptor_removal_paired.config
@@ -0,0 +1,53 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: adaptor_removal {
+        container = "lbmc/cutadapt:2.1"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: adaptor_removal {
+        container = "lbmc/cutadapt:2.1"
+        cpus = 1
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: adaptor_removal {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/cutadapt_2.1"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: adaptor_removal {
+        container = "lbmc/cutadapt:2.1"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/cutadapt/adaptor_removal_paired.nf b/src/nf_modules/cutadapt/adaptor_removal_paired.nf
new file mode 100644
index 0000000000000000000000000000000000000000..f78d5c2dab0c8a83fd105c5eaddbbc1fd8c4e13e
--- /dev/null
+++ b/src/nf_modules/cutadapt/adaptor_removal_paired.nf
@@ -0,0 +1,24 @@
+log.info "fastq files : ${params.fastq}"
+
+Channel
+  .fromFilePairs( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .set { fastq_files }
+
+process adaptor_removal {
+  tag "$pair_id"
+  publishDir "results/fastq/adaptor_removal/", mode: 'copy'
+
+  input:
+  set pair_id, file(reads) from fastq_files
+
+  output:
+  set pair_id, "*_cut_R{1,2}.fastq.gz" into fastq_files_cut
+
+  script:
+  """
+  cutadapt -a AGATCGGAAGAG -g CTCTTCCGATCT -A AGATCGGAAGAG -G CTCTTCCGATCT \
+  -o ${pair_id}_cut_R1.fastq.gz -p ${pair_id}_cut_R2.fastq.gz \
+  ${reads[0]} ${reads[1]} > ${pair_id}_report.txt
+  """
+}
diff --git a/src/nf_modules/cutadapt/adaptor_removal_single.config b/src/nf_modules/cutadapt/adaptor_removal_single.config
new file mode 100644
index 0000000000000000000000000000000000000000..13dfac50e21eb2b7a6bfbbd2cbad8bbceb6655cc
--- /dev/null
+++ b/src/nf_modules/cutadapt/adaptor_removal_single.config
@@ -0,0 +1,53 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: adaptor_removal {
+        container = "lbmc/cutadapt:2.1"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: adaptor_removal {
+        container = "lbmc/cutadapt:2.1"
+        cpus = 1
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: adaptor_removal {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/cutadapt_2.1"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: adaptor_removal {
+        container = "lbmc/cutadapt:2.1"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/cutadapt/adaptor_removal_single.nf b/src/nf_modules/cutadapt/adaptor_removal_single.nf
new file mode 100644
index 0000000000000000000000000000000000000000..26f3cd7602bf242f718a027bad253ba2fe1e8d8c
--- /dev/null
+++ b/src/nf_modules/cutadapt/adaptor_removal_single.nf
@@ -0,0 +1,25 @@
+log.info "fastq files : ${params.fastq}"
+
+Channel
+  .fromPath( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { fastq_files }
+
+process adaptor_removal {
+  tag "$file_id"
+
+  input:
+  set file_id, file(reads) from fastq_files
+
+  output:
+  set file_id, "*_cut.fastq.gz" into fastq_files_cut
+
+  script:
+  """
+  cutadapt -a AGATCGGAAGAG -g CTCTTCCGATCT\
+  -o ${file_id}_cut.fastq.gz \
+  ${reads} > ${file_id}_report.txt
+  """
+}
+
diff --git a/src/nf_modules/cutadapt/tests.sh b/src/nf_modules/cutadapt/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..77712b2083e0ba335a3f0c1944c2e8045b66f4c7
--- /dev/null
+++ b/src/nf_modules/cutadapt/tests.sh
@@ -0,0 +1,49 @@
+./nextflow src/nf_modules/cutadapt/adaptor_removal_paired.nf \
+  -c src/nf_modules/cutadapt/adaptor_removal_paired.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny_R{1,2}.fastq" \
+  -resume
+
+./nextflow src/nf_modules/cutadapt/adaptor_removal_single.nf \
+  -c src/nf_modules/cutadapt/adaptor_removal_single.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny*_S.fastq" \
+  -resume
+
+./nextflow src/nf_modules/cutadapt/trimming_paired.nf \
+  -c src/nf_modules/cutadapt/trimming_paired.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny_R{1,2}.fastq" \
+  -resume
+
+./nextflow src/nf_modules/cutadapt/trimming_single.nf \
+  -c src/nf_modules/cutadapt/trimming_single.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny*_S.fastq" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/cutadapt/adaptor_removal_paired.nf \
+  -c src/nf_modules/cutadapt/adaptor_removal_paired.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny_R{1,2}.fastq" \
+  -resume
+
+./nextflow src/nf_modules/cutadapt/adaptor_removal_single.nf \
+  -c src/nf_modules/cutadapt/adaptor_removal_single.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny*_S.fastq" \
+  -resume
+
+./nextflow src/nf_modules/cutadapt/trimming_paired.nf \
+  -c src/nf_modules/cutadapt/trimming_paired.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny_R{1,2}.fastq" \
+  -resume
+
+./nextflow src/nf_modules/cutadapt/trimming_single.nf \
+  -c src/nf_modules/cutadapt/trimming_single.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny*_S.fastq" \
+  -resume
+fi
diff --git a/src/nf_modules/cutadapt/trimming_paired.config b/src/nf_modules/cutadapt/trimming_paired.config
new file mode 100644
index 0000000000000000000000000000000000000000..222663bc4cf84889c3ee183930271cd4687f0ab2
--- /dev/null
+++ b/src/nf_modules/cutadapt/trimming_paired.config
@@ -0,0 +1,53 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: trimming {
+        container = "lbmc/cutadapt:2.1"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: trimming {
+        container = "lbmc/cutadapt:2.1"
+        cpus = 1
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: trimming {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/cutadapt_2.1"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: trimming {
+        container = "lbmc/cutadapt:2.1"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/cutadapt/trimming_paired.nf b/src/nf_modules/cutadapt/trimming_paired.nf
new file mode 100644
index 0000000000000000000000000000000000000000..704240f46818b79a540e71273fc1512216d109e6
--- /dev/null
+++ b/src/nf_modules/cutadapt/trimming_paired.nf
@@ -0,0 +1,24 @@
+log.info "fastq files : ${params.fastq}"
+
+Channel
+  .fromFilePairs( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .set { fastq_files }
+
+process trimming {
+  tag "$pair_id"
+  publishDir "results/fastq/trimming/", mode: 'copy'
+
+  input:
+  set pair_id, file(reads) from fastq_files
+
+  output:
+  set pair_id, "*_trim_R{1,2}.fastq.gz" into fastq_files_trim
+
+  script:
+  """
+  cutadapt -q 20,20 \
+  -o ${pair_id}_trim_R1.fastq.gz -p ${pair_id}_trim_R2.fastq.gz \
+  ${reads[0]} ${reads[1]} > ${pair_id}_report.txt
+  """
+}
diff --git a/src/nf_modules/cutadapt/trimming_single.config b/src/nf_modules/cutadapt/trimming_single.config
new file mode 100644
index 0000000000000000000000000000000000000000..222663bc4cf84889c3ee183930271cd4687f0ab2
--- /dev/null
+++ b/src/nf_modules/cutadapt/trimming_single.config
@@ -0,0 +1,53 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: trimming {
+        container = "lbmc/cutadapt:2.1"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: trimming {
+        container = "lbmc/cutadapt:2.1"
+        cpus = 1
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: trimming {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/cutadapt_2.1"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: trimming {
+        container = "lbmc/cutadapt:2.1"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/cutadapt/trimming_single.nf b/src/nf_modules/cutadapt/trimming_single.nf
new file mode 100644
index 0000000000000000000000000000000000000000..9b3764dbed4dd51bfb361b56b4f5fc737b18c270
--- /dev/null
+++ b/src/nf_modules/cutadapt/trimming_single.nf
@@ -0,0 +1,25 @@
+log.info "fastq files : ${params.fastq}"
+
+Channel
+  .fromPath( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { fastq_files }
+
+process trimming {
+  tag "$file_id"
+
+  input:
+  set file_id, file(reads) from fastq_files
+
+  output:
+  set file_id, "*_trim.fastq.gz" into fastq_files_cut
+
+  script:
+  """
+  cutadapt -q 20,20 \
+  -o ${file_id}_trim.fastq.gz \
+  ${reads} > ${file_id}_report.txt
+  """
+}
+
diff --git a/src/nf_modules/deeptools/bam_to_bigwig.config b/src/nf_modules/deeptools/bam_to_bigwig.config
new file mode 100644
index 0000000000000000000000000000000000000000..2cbf9d68a467a99ed2bdd59db667c7b5533c42be
--- /dev/null
+++ b/src/nf_modules/deeptools/bam_to_bigwig.config
@@ -0,0 +1,85 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: index_bam {
+        container = "lbmc/sambamba:0.6.7"
+        cpus = 4
+      }
+      withName: bam_to_bigwig {
+        container = "lbmc/deeptools:3.0.2"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: index_bam {
+        container = "lbmc/sambamba:0.6.7"
+        cpus = 4
+      }
+      withName: bam_to_bigwig {
+        container = "lbmc/deeptools:3.0.2"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: index_bam {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/sambamba_0.6.7"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+      withName: bam_to_bigwig {
+        beforeScript = "source /usr/share/lmod/lmod/init/bash; module use ~/privatemodules"
+        module = "deeptools/3.0.2"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: index_bam {
+        container = "lbmc/sambamba:0.6.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: bam_to_bigwig {
+        container = "lbmc/deeptools:3.0.2"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/deeptools/bam_to_bigwig.nf b/src/nf_modules/deeptools/bam_to_bigwig.nf
new file mode 100644
index 0000000000000000000000000000000000000000..4c30ee0eed193fba70ea3b236a8bfde800993697
--- /dev/null
+++ b/src/nf_modules/deeptools/bam_to_bigwig.nf
@@ -0,0 +1,49 @@
+params.bam = "$baseDir/data/bam/*.bam"
+
+log.info "bams files : ${params.bam}"
+
+Channel
+  .fromPath( params.bam )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.bam}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { bam_files }
+
+bam_files.into{
+  bam_files_index;
+  bam_files_bigwig
+  }
+
+process index_bam {
+  tag "$file_id"
+
+  input:
+    set file_id, file(bam) from bam_files_index
+
+  output:
+    set file_id, "*.bam*" into indexed_bam_file
+
+  script:
+"""
+sambamba index -t ${task.cpus} ${bam}
+"""
+}
+
+bam_files_indexed = bam_files_bigwig.join(indexed_bam_file, by: 0)
+
+process bam_to_bigwig {
+  tag "$file_id"
+
+  publishDir "results/mapping/bigwig/", mode: 'copy'
+
+  input:
+    set file_id, file(bam), file(idx) from bam_files_indexed
+
+  output:
+    set file_id, "*.bw" into bw_files
+
+  script:
+"""
+bamCoverage -p ${task.cpus} --ignoreDuplicates -b ${bam} -o ${file_id}.bw
+"""
+}
+
diff --git a/src/nf_modules/deeptools/compute_matrix.config b/src/nf_modules/deeptools/compute_matrix.config
new file mode 100644
index 0000000000000000000000000000000000000000..d1c05456c37ccf67b9a21ff48da719b45060aa50
--- /dev/null
+++ b/src/nf_modules/deeptools/compute_matrix.config
@@ -0,0 +1,55 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: compute_matrix {
+        container = "lbmc/deeptools:3.0.2"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: compute_matrix {
+        container = "lbmc/deeptools:3.0.2"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: compute_matrix {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/deeptools_3.0.2"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: compute_matrix {
+        container = "lbmc/deeptools:3.0.2"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/deeptools/compute_matrix.nf b/src/nf_modules/deeptools/compute_matrix.nf
new file mode 100644
index 0000000000000000000000000000000000000000..2b6e0e915b00fd1f475818352e11e4ce97225ee0
--- /dev/null
+++ b/src/nf_modules/deeptools/compute_matrix.nf
@@ -0,0 +1,38 @@
+params.bw = "$baseDir/data/bigwig/*.bw"
+params.bed = "$baseDir/data/annot/*.bed"
+
+log.info "bigwig files : ${params.bw}"
+log.info "bed files : ${params.bed}"
+
+Channel
+  .fromPath( params.bw )
+  .ifEmpty { error "Cannot find any bigwig files matching: ${params.bw}" }
+  .set { bw_files }
+
+Channel
+  .fromPath( params.bed )
+  .ifEmpty { error "Cannot find any bed files matching: ${params.bed}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { bed_files }
+
+process compute_matrix {
+  tag "$bed_file_id"
+  publishDir "results/mapping/region_matrix/", mode: 'copy'
+
+  input:
+    file bw from bw_files.collect()
+    set bed_file_id, file(bed) from bed_files.collect()
+
+  output:
+    set bed_file_id, "*.mat.gz" into region_matrix
+
+  script:
+"""
+computeMatrix scale-regions -S ${bw} \
+  -p ${task.cpus} \
+  -R ${bed} \
+  --beforeRegionStartLength 100 \
+  --afterRegionStartLength 100 \
+  -o ${bed_file_id}.mat.gz
+"""
+}
diff --git a/src/nf_modules/deeptools/plot_profile.config b/src/nf_modules/deeptools/plot_profile.config
new file mode 100644
index 0000000000000000000000000000000000000000..c10e0edc66a119775a2600b2890eede8824a9468
--- /dev/null
+++ b/src/nf_modules/deeptools/plot_profile.config
@@ -0,0 +1,55 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: plot_profile {
+        container = "lbmc/deeptools:3.0.2"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: compute_matrix {
+        container = "lbmc/deeptools:3.0.2"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: plot_profile {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/deeptools_3.0.2"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: plot_profile {
+        container = "lbmc/deeptools:3.0.2"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/deeptools/plot_profile.nf b/src/nf_modules/deeptools/plot_profile.nf
new file mode 100644
index 0000000000000000000000000000000000000000..dfce4e5504bdd4cede56fa156ffa4fa268a7fede
--- /dev/null
+++ b/src/nf_modules/deeptools/plot_profile.nf
@@ -0,0 +1,36 @@
+params.matrix = "$baseDir/data/region_matrix/*.mat.gz"
+params.title = "plot title"
+
+log.info "matrix files : ${params.matrix}"
+log.info "plot title : ${params.title}"
+
+Channel
+  .fromPath( params.matrix )
+  .ifEmpty { error "Cannot find any matrix files matching: ${params.matrix}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { matrix_files }
+
+process plot_profile {
+  tag "$file_id"
+  publishDir "results/mapping/region_matrix/", mode: 'copy'
+
+  input:
+    set file_id, file(matrix) from matrix_files
+
+  output:
+    set file_id, "*.pdf" into region_matrix
+
+  script:
+/*
+see more option at
+https://deeptools.readthedocs.io/en/develop/content/tools/plotProfile.html
+*/
+"""
+plotProfile -m ${matrix} \
+  --plotFileFormat=pdf \
+  -out ${file_id}.pdf \
+  --plotType=fill \
+  --perGroup \
+  --plotTitle "${params.title}"
+"""
+}
diff --git a/src/nf_modules/deeptools/tests.sh b/src/nf_modules/deeptools/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..4253689a7c94a62feec6be1536f11d394fec909b
--- /dev/null
+++ b/src/nf_modules/deeptools/tests.sh
@@ -0,0 +1,46 @@
+#!/bin/sh
+
+cp data/tiny_dataset/map/tiny_v2.sort.bam \
+  data/tiny_dataset/map/tiny_v2_bis.sort.bam
+
+./nextflow src/nf_modules/deeptools/bam_to_bigwig.nf \
+  -c src/nf_modules/deeptools/bam_to_bigwig.config \
+  -profile docker \
+  --bam "data/tiny_dataset/map/tiny_v2*.sort.bam" \
+  -resume
+
+./nextflow src/nf_modules/deeptools/compute_matrix.nf \
+  -c src/nf_modules/deeptools/compute_matrix.config \
+  -profile docker \
+  --bw "results/mapping/bigwig/*.bw" \
+  --bed "data/tiny_dataset/annot/tiny.bed" \
+  -resume
+
+./nextflow src/nf_modules/deeptools/plot_profile.nf \
+  -c src/nf_modules/deeptools/plot_profile.config \
+  -profile docker \
+  --matrix "results/mapping/region_matrix/*.mat.gz" \
+  --title "plot title" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/deeptools/bam_to_bigwig.nf \
+  -c src/nf_modules/deeptools/bam_to_bigwig.config \
+  -profile docker \
+  --bam "data/tiny_dataset/map/tiny_v2*.sort.bam" \
+  -resume
+
+./nextflow src/nf_modules/deeptools/compute_matrix.nf \
+  -c src/nf_modules/deeptools/compute_matrix.config \
+  -profile docker \
+  --bw "results/mapping/bigwig/*.bw" \
+  --bed "data/tiny_dataset/annot/tiny.bed" \
+  -resume
+
+./nextflow src/nf_modules/deeptools/plot_profile.nf \
+  -c src/nf_modules/deeptools/plot_profile.config \
+  -profile docker \
+  --matrix "results/mapping/region_matrix/*.mat.gz" \
+  --title "plot title" \
+  -resume
+fi
diff --git a/src/nf_modules/fastp/fastp_paired.config b/src/nf_modules/fastp/fastp_paired.config
new file mode 100644
index 0000000000000000000000000000000000000000..d2ef420f462fcc3b43dfd29b3c24baeb0afc86db
--- /dev/null
+++ b/src/nf_modules/fastp/fastp_paired.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: fastp_fastq {
+        container = "lbmc/fastp:0.19.7"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: fastp_fastq {
+        cpus = 1
+        container = "lbmc/fastp:0.19.7"
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: fastp_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/fastp_0.19.7"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: fastp_fastq {
+        container = "lbmc/fastp:0.19.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/fastp/fastp_paired.nf b/src/nf_modules/fastp/fastp_paired.nf
new file mode 100644
index 0000000000000000000000000000000000000000..88d6710bc93061804681bcceea1bce7ce10cd669
--- /dev/null
+++ b/src/nf_modules/fastp/fastp_paired.nf
@@ -0,0 +1,34 @@
+params.fastq = "$baseDir/data/fastq/*_{1,2}.fastq"
+
+log.info "fastq files : ${params.fastq}"
+
+Channel
+  .fromFilePairs( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .set { fastq_files }
+
+process fastp_fastq {
+  tag "$pair_id"
+  publishDir "results/fastq/fastp/", mode: 'copy'
+
+  input:
+  set pair_id, file(reads) from fastq_files
+
+  output:
+    file "*.{zip,html}" into fastp_report
+    set pair_id, file "*.fastq.gz" fastq_trim_files
+
+  script:
+"""
+fastp --thread ${task.cpus} \
+--qualified_quality_phred 20 \
+--disable_length_filtering \
+--detect_adapter_for_pe \
+--in1 ${reads[0]} \
+--in2 ${reads[1]} \
+--out1 ${pair_id}_R1_trim.fastq.gz \
+--out2 ${pair_id}_R2_trim.fastq.gz \
+--html ${pair_id}.html \
+--report_title ${pair_id}
+"""
+}
diff --git a/src/nf_modules/fastp/fastp_single.config b/src/nf_modules/fastp/fastp_single.config
new file mode 100644
index 0000000000000000000000000000000000000000..d2ef420f462fcc3b43dfd29b3c24baeb0afc86db
--- /dev/null
+++ b/src/nf_modules/fastp/fastp_single.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: fastp_fastq {
+        container = "lbmc/fastp:0.19.7"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: fastp_fastq {
+        cpus = 1
+        container = "lbmc/fastp:0.19.7"
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: fastp_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/fastp_0.19.7"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: fastp_fastq {
+        container = "lbmc/fastp:0.19.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/fastp/fastp_single.nf b/src/nf_modules/fastp/fastp_single.nf
new file mode 100644
index 0000000000000000000000000000000000000000..31262172f8e45376b09293b98bdd4fa5403c9b0c
--- /dev/null
+++ b/src/nf_modules/fastp/fastp_single.nf
@@ -0,0 +1,32 @@
+params.fastq = "$baseDir/data/fastq/*.fastq"
+
+log.info "fastq files : ${params.fastq}"
+
+Channel
+  .fromPath( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { fastq_files }
+
+process fastp_fastq {
+  tag "$file_id"
+  publishDir "results/fastq/fastp/", mode: 'copy'
+
+  input:
+  set file_id, file(reads) from fastq_files
+
+  output:
+    file "*.{zip,html}" into fastp_report
+    set file_id, file "*.fastq.gz" fastq_trim_files
+
+  script:
+"""
+fastp --thread ${task.cpus} \
+--qualified_quality_phred 20 \
+--disable_length_filtering \
+--in1 ${reads} \
+--out1 ${file_id}_R1_trim.fastq.gz \
+--html ${file_id}.html \
+--report_title ${file_id}
+"""
+}
diff --git a/src/nf_modules/fastp/tests.sh b/src/nf_modules/fastp/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..fae4f08a92c56752b4d467b5c31aca629eb2aac3
--- /dev/null
+++ b/src/nf_modules/fastp/tests.sh
@@ -0,0 +1,25 @@
+./nextflow src/nf_modules/fastp/fastp_paired.nf \
+  -c src/nf_modules/fastp/fastp_paired.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny_R{1,2}.fastq" \
+  -resume
+
+./nextflow src/nf_modules/fastp/fastp_single.nf \
+  -c src/nf_modules/fastp/fastp_single.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny_S.fastq" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/fastp/fastp_paired.nf \
+  -c src/nf_modules/fastp/fastp_paired.config \
+  -profile singularity \
+  --fastq "data/tiny_dataset/fastq/tiny_R{1,2}.fastq" \
+  -resume
+
+./nextflow src/nf_modules/fastp/fastp_single.nf \
+  -c src/nf_modules/fastp/fastp_single.config \
+  -profile singularity \
+  --fastq "data/tiny_dataset/fastq/tiny_S.fastq" \
+  -resume
+fi
diff --git a/src/nf_modules/fastqc/fastqc_paired.config b/src/nf_modules/fastqc/fastqc_paired.config
new file mode 100644
index 0000000000000000000000000000000000000000..743c85ec616152e94ee9da40e254c8a6a826416a
--- /dev/null
+++ b/src/nf_modules/fastqc/fastqc_paired.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: fastqc_fastq {
+        container = "lbmc/fastqc:0.11.5"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: fastqc_fastq {
+        cpus = 1
+        container = "lbmc/fastqc:0.11.5"
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: fastqc_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/fastqc_0.11.5"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: fastqc_fastq {
+        container = "lbmc/fastqc:0.11.5"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/fastqc/fastqc_paired.nf b/src/nf_modules/fastqc/fastqc_paired.nf
new file mode 100644
index 0000000000000000000000000000000000000000..6755edec7dca244b1c1581dc6459cd2b8afcc996
--- /dev/null
+++ b/src/nf_modules/fastqc/fastqc_paired.nf
@@ -0,0 +1,26 @@
+params.fastq = "$baseDir/data/fastq/*_{1,2}.fastq"
+
+log.info "fastq files : ${params.fastq}"
+
+Channel
+  .fromFilePairs( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .set { fastq_files }
+
+process fastqc_fastq {
+  tag "$pair_id"
+  publishDir "results/fastq/fastqc/", mode: 'copy'
+
+  input:
+  set pair_id, file(reads) from fastq_files
+
+  output:
+    file "*.{zip,html}" into fastqc_report
+
+  script:
+"""
+fastqc --quiet --threads ${task.cpus} --format fastq --outdir ./ \
+${reads[0]} ${reads[1]}
+"""
+}
+
diff --git a/src/nf_modules/fastqc/fastqc_single.config b/src/nf_modules/fastqc/fastqc_single.config
new file mode 100644
index 0000000000000000000000000000000000000000..743c85ec616152e94ee9da40e254c8a6a826416a
--- /dev/null
+++ b/src/nf_modules/fastqc/fastqc_single.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: fastqc_fastq {
+        container = "lbmc/fastqc:0.11.5"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: fastqc_fastq {
+        cpus = 1
+        container = "lbmc/fastqc:0.11.5"
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: fastqc_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/fastqc_0.11.5"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: fastqc_fastq {
+        container = "lbmc/fastqc:0.11.5"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/fastqc/fastqc_single.nf b/src/nf_modules/fastqc/fastqc_single.nf
new file mode 100644
index 0000000000000000000000000000000000000000..ab7e22aade6df50ad6a87f574c9d1eb57cd35c02
--- /dev/null
+++ b/src/nf_modules/fastqc/fastqc_single.nf
@@ -0,0 +1,26 @@
+params.fastq = "$baseDir/data/fastq/*.fastq"
+
+log.info "fastq files : ${params.fastq}"
+
+Channel
+  .fromPath( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { fastq_files }
+
+process fastqc_fastq {
+  tag "$file_id"
+  publishDir "results/fastq/fastqc/", mode: 'copy'
+
+  input:
+  set file_id, file(reads) from fastq_files
+
+  output:
+    file "*.{zip,html}" into fastqc_report
+
+  script:
+"""
+fastqc --quiet --threads ${task.cpus} --format fastq --outdir ./ ${reads}
+"""
+}
+
diff --git a/src/nf_modules/fastqc/tests.sh b/src/nf_modules/fastqc/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..7002e71d6396d407f5ba9f37864ccdbf661597f3
--- /dev/null
+++ b/src/nf_modules/fastqc/tests.sh
@@ -0,0 +1,25 @@
+./nextflow src/nf_modules/fastqc/fastqc_paired.nf \
+  -c src/nf_modules/fastqc/fastqc_paired.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny_R{1,2}.fastq" \
+  -resume
+
+./nextflow src/nf_modules/fastqc/fastqc_single.nf \
+  -c src/nf_modules/fastqc/fastqc_single.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny_S.fastq" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/fastqc/fastqc_paired.nf \
+  -c src/nf_modules/fastqc/fastqc_paired.config \
+  -profile singularity \
+  --fastq "data/tiny_dataset/fastq/tiny_R{1,2}.fastq" \
+  -resume
+
+./nextflow src/nf_modules/fastqc/fastqc_single.nf \
+  -c src/nf_modules/fastqc/fastqc_single.config \
+  -profile singularity \
+  --fastq "data/tiny_dataset/fastq/tiny_S.fastq" \
+  -resume
+fi
diff --git a/src/nf_modules/hisat2/indexing.config b/src/nf_modules/hisat2/indexing.config
new file mode 100644
index 0000000000000000000000000000000000000000..ab38148c140ddf8691df3b1ded842c328ad9f027
--- /dev/null
+++ b/src/nf_modules/hisat2/indexing.config
@@ -0,0 +1,55 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: index_fasta {
+        container = "lbmc/hisat2:2.1.0"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: index_fasta {
+        cpus = 4
+        container = "lbmc/hisat2:2.1.0"
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: index_fasta {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/hisat2_2.1.0"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        memory = "20GB"
+        cpus = 16
+        time = "12h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: index_fasta {
+        container = "lbmc/hisat2:2.1.0"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/hisat2/indexing.nf b/src/nf_modules/hisat2/indexing.nf
new file mode 100644
index 0000000000000000000000000000000000000000..1b11b3ef7ec09a21e84e2dfd33cf6bea129bfa52
--- /dev/null
+++ b/src/nf_modules/hisat2/indexing.nf
@@ -0,0 +1,32 @@
+/*
+* Hisat2 :
+* Imputs : fastq files
+* Imputs : fasta files
+* Output : bam files
+*/
+
+/*                      fasta indexing                                     */
+params.fasta = "$baseDir/data/bam/*.fasta"
+
+log.info "fasta files : ${params.fasta}"
+
+Channel
+  .fromPath( params.fasta )
+  .ifEmpty { error "Cannot find any fasta files matching: ${params.fasta}" }
+  .set { fasta_file }
+
+process index_fasta {
+  tag "$fasta.baseName"
+  publishDir "results/mapping/index/", mode: 'copy'
+
+  input:
+    file fasta from fasta_file
+
+  output:
+    file "*.index*" into index_files
+
+  script:
+"""
+hisat2-build -p ${task.cpus} ${fasta} ${fasta.baseName}.index
+"""
+}
diff --git a/src/nf_modules/hisat2/mapping_paired.config b/src/nf_modules/hisat2/mapping_paired.config
new file mode 100644
index 0000000000000000000000000000000000000000..b3e4df5797cd3b98c7c28632f44944f50e5188fc
--- /dev/null
+++ b/src/nf_modules/hisat2/mapping_paired.config
@@ -0,0 +1,55 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: mapping_fastq {
+        cpus = 4
+        container = "lbmc/hisat2:2.1.0"
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: mapping_fastq {
+        cpus = 4
+        container = "lbmc/hisat2:2.1.0"
+      }
+    }
+  }
+  sge {
+    process{
+      withName: mapping_fastq {
+        beforeScript = "source /usr/share/lmod/lmod/init/bash; module use ~/privatemodules"
+        module = "hisat2/2.1.0:samtools/1.7"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        memory = "20GB"
+        cpus = 16
+        time = "12h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: mapping_fastq {
+        container = "lbmc/hisat2:2.1.0"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/hisat2/mapping_paired.nf b/src/nf_modules/hisat2/mapping_paired.nf
new file mode 100644
index 0000000000000000000000000000000000000000..28b37e005db84e248fa72209ec1dfa2a290bd264
--- /dev/null
+++ b/src/nf_modules/hisat2/mapping_paired.nf
@@ -0,0 +1,48 @@
+params.fastq = "$baseDir/data/fastq/*_{1,2}.fastq"
+params.index = "$baseDir/data/index/*.index.*"
+
+log.info "fastq files : ${params.fastq}"
+log.info "index files : ${params.index}"
+
+Channel
+  .fromFilePairs( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .set { fastq_files }
+Channel
+  .fromPath( params.index )
+  .ifEmpty { error "Cannot find any index files matching: ${params.index}" }
+  .set { index_files }
+
+process mapping_fastq {
+  tag "$pair_id"
+  publishDir "results/mapping/", mode: 'copy'
+
+  input:
+  set pair_id, file(reads) from fastq_files
+  file index from index_files.collect()
+
+  output:
+  file "*" into counts_files
+  set pair_id, "*.bam" into bam_files
+  file "*_report.txt" into mapping_report
+
+  script:
+  index_id = index[0]
+  for (index_file in index) {
+    if (index_file =~ /.*\.1\.ht2/ && !(index_file =~ /.*\.rev\.1\.ht2/)) {
+        index_id = ( index_file =~ /(.*)\.1\.ht2/)[0][1]
+    }
+  }
+"""
+hisat2 -p ${task.cpus} \
+  -x ${index_id} \
+  -1 ${reads[0]} \
+  -2 ${reads[1]} 2> \
+${pair_id}_hisat2_report.txt | \
+samtools view -Sb - > ${pair_id}.bam
+
+if grep -q "Error" ${pair_id}_hisat2_report.txt; then
+  exit 1
+fi
+"""
+}
diff --git a/src/nf_modules/hisat2/mapping_single.config b/src/nf_modules/hisat2/mapping_single.config
new file mode 100644
index 0000000000000000000000000000000000000000..b3e4df5797cd3b98c7c28632f44944f50e5188fc
--- /dev/null
+++ b/src/nf_modules/hisat2/mapping_single.config
@@ -0,0 +1,55 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: mapping_fastq {
+        cpus = 4
+        container = "lbmc/hisat2:2.1.0"
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: mapping_fastq {
+        cpus = 4
+        container = "lbmc/hisat2:2.1.0"
+      }
+    }
+  }
+  sge {
+    process{
+      withName: mapping_fastq {
+        beforeScript = "source /usr/share/lmod/lmod/init/bash; module use ~/privatemodules"
+        module = "hisat2/2.1.0:samtools/1.7"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        memory = "20GB"
+        cpus = 16
+        time = "12h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: mapping_fastq {
+        container = "lbmc/hisat2:2.1.0"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/hisat2/mapping_single.nf b/src/nf_modules/hisat2/mapping_single.nf
new file mode 100644
index 0000000000000000000000000000000000000000..0fdb729e90f9bdc097209846e20af08a6a0ce41c
--- /dev/null
+++ b/src/nf_modules/hisat2/mapping_single.nf
@@ -0,0 +1,53 @@
+/*
+* for single-end data
+*/
+
+params.fastq = "$baseDir/data/fastq/*.fastq"
+params.index = "$baseDir/data/index/*.index*"
+
+log.info "fastq files : ${params.fastq}"
+log.info "index files : ${params.index}"
+
+Channel
+  .fromPath( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { fastq_files }
+Channel
+  .fromPath( params.index )
+  .ifEmpty { error "Cannot find any index files matching: ${params.index}" }
+  .set { index_files }
+
+process mapping_fastq {
+  tag "$file_id"
+  publishDir "results/mapping/", mode: 'copy'
+
+  input:
+  set file_id, file(reads) from fastq_files
+  file index from index_files.collect()
+
+  output:
+  file "*" into count_files
+  set file_id, "*.bam" into bam_files
+  file "*_report.txt" into mapping_report
+
+  script:
+  index_id = index[0]
+  for (index_file in index) {
+    if (index_file =~ /.*\.1\.ht2/ && !(index_file =~ /.*\.rev\.1\.ht2/)) {
+        index_id = ( index_file =~ /(.*)\.1\.ht2/)[0][1]
+    }
+  }
+"""
+hisat2 -p ${task.cpus} \
+ -x ${index_id} \
+ -U ${reads} 2> \
+${file_id}_hisat2_report.txt | \
+samtools view -Sb - > ${file_id}.bam
+
+if grep -q "Error" ${file_id}_hisat2_report.txt; then
+  exit 1
+fi
+
+"""
+}
diff --git a/src/nf_modules/hisat2/tests.sh b/src/nf_modules/hisat2/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..50e4396652e867a3bc114db2ba79b4af4dc7de9a
--- /dev/null
+++ b/src/nf_modules/hisat2/tests.sh
@@ -0,0 +1,39 @@
+./nextflow src/nf_modules/hisat2/indexing.nf \
+  -c src/nf_modules/hisat2/indexing.config \
+  -profile docker \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  -resume
+
+./nextflow src/nf_modules/hisat2/mapping_paired.nf \
+  -c src/nf_modules/hisat2/mapping_paired.config \
+  -profile docker \
+  --index "results/mapping/index/tiny_v2.index*" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R{1,2}.fastq" \
+  -resume
+
+./nextflow src/nf_modules/hisat2/mapping_single.nf \
+  -c src/nf_modules/hisat2/mapping_single.config \
+  -profile docker \
+  --index "results/mapping/index/tiny_v2.index*" \
+  --fastq "data/tiny_dataset/fastq/tiny*_S.fastq" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/hisat2/indexing.nf \
+  -c src/nf_modules/hisat2/indexing.config \
+  -profile singularity \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  -resume
+
+./nextflow src/nf_modules/hisat2/mapping_paired.nf \
+  -c src/nf_modules/hisat2/mapping_paired.config \
+  -profile singularity \
+  --index "results/mapping/index/tiny_v2.index*" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R{1,2}.fastq"
+
+./nextflow src/nf_modules/hisat2/mapping_single.nf \
+  -c src/nf_modules/hisat2/mapping_single.config \
+  -profile singularity \
+  --index "results/mapping/index/tiny_v2.index*" \
+  --fastq "data/tiny_dataset/fastq/tiny*_S.fastq"
+fi
diff --git a/src/nf_modules/htseq/htseq.config b/src/nf_modules/htseq/htseq.config
new file mode 100644
index 0000000000000000000000000000000000000000..2eebc16c3af08c2bcf5924b42a6ed346a421a4f4
--- /dev/null
+++ b/src/nf_modules/htseq/htseq.config
@@ -0,0 +1,83 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: sort_bam {
+        container = "lbmc/samtools:1.7"
+        cpus = 1
+      }
+      withName: counting {
+        container = "lbmc/htseq:0.11.2"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: sort_bam {
+        container = "lbmc/samtools:1.7"
+        cpus = 1
+      }
+      withName: counting {
+        container = "lbmc/htseq:0.11.2"
+        cpus = 1
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: sort_bam {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/samtools_1.7"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+      withName: counting {
+        beforeScript = "source /usr/share/lmod/lmod/init/bash; module use ~/privatemodules"
+        module = "htseq/0.11.2"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: sort_bam {
+        container = "lbmc/samtools:1.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: counting {
+        container = "lbmc/htseq:0.11.2"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/htseq/htseq.nf b/src/nf_modules/htseq/htseq.nf
new file mode 100644
index 0000000000000000000000000000000000000000..7cade9a55b17ced135f32a36ffd90dc5354b72af
--- /dev/null
+++ b/src/nf_modules/htseq/htseq.nf
@@ -0,0 +1,52 @@
+params.bam = "$baseDir/data/bam/*.bam"
+params.gtf = "$baseDir/data/annotation/*.gtf"
+
+log.info "bam files : ${params.bam}"
+log.info "gtf files : ${params.gtf}"
+
+Channel
+  .fromPath( params.bam )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.bam}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { bam_files }
+Channel
+  .fromPath( params.gtf )
+  .ifEmpty { error "Cannot find any gtf file matching: ${params.gtf}" }
+  .set { gtf_file }
+
+process sort_bam {
+  tag "$file_id"
+  cpus 4
+
+  input:
+    set file_id, file(bam) from bam_files
+
+  output:
+    set file_id, "*_sorted.sam" into sorted_bam_files
+
+  script:
+"""
+# sort bam by name
+samtools sort -@ ${task.cpus} -n -O SAM -o ${file_id}_sorted.sam ${bam}
+"""
+}
+
+process counting {
+  tag "$file_id"
+  publishDir "results/quantification/", mode: 'copy'
+
+  input:
+  set file_id, file(bam) from sorted_bam_files
+  file gtf from gtf_file
+
+  output:
+  file "*.count" into count_files
+
+  script:
+"""
+htseq-count ${bam} ${gtf} \
+-r pos --mode=intersection-nonempty -a 10 -s no -t exon -i gene_id \
+> ${file_id}.count
+"""
+}
+
diff --git a/src/nf_modules/htseq/tests.sh b/src/nf_modules/htseq/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..eada26b6d280b80f6ebb7db3d3dc5bf652013b27
--- /dev/null
+++ b/src/nf_modules/htseq/tests.sh
@@ -0,0 +1,15 @@
+./nextflow src/nf_modules/htseq/htseq.nf \
+  -c src/nf_modules/htseq/htseq.config \
+  -profile docker \
+  --gtf "data/tiny_dataset/annot/tiny.gff" \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/htseq/htseq.nf \
+  -c src/nf_modules/htseq/htseq.config \
+  -profile singularity \
+  --gtf "data/tiny_dataset/annot/tiny.gff" \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  -resume
+fi
diff --git a/src/nf_modules/kallisto/indexing.config b/src/nf_modules/kallisto/indexing.config
new file mode 100644
index 0000000000000000000000000000000000000000..5dfd26a2e2125034582ab419119092be396502bb
--- /dev/null
+++ b/src/nf_modules/kallisto/indexing.config
@@ -0,0 +1,55 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withNamek index_fasta {
+        container = "lbmc/kallisto:0.44.0"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: index_fasta {
+        container = "lbmc/kallisto:0.44.0"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: index_fasta {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/kallisto_0.44.0"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: index_fasta {
+        container = "lbmc/kallisto:0.44.0"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/kallisto/indexing.nf b/src/nf_modules/kallisto/indexing.nf
new file mode 100644
index 0000000000000000000000000000000000000000..ea153569693b5dd057727a072d14dadd0ce77bf5
--- /dev/null
+++ b/src/nf_modules/kallisto/indexing.nf
@@ -0,0 +1,27 @@
+params.fasta = "$baseDir/data/bam/*.fasta"
+
+log.info "fasta files : ${params.fasta}"
+
+Channel
+  .fromPath( params.fasta )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.fasta}" }
+  .set { fasta_file }
+
+process index_fasta {
+  tag "$fasta.baseName"
+  publishDir "results/mapping/index/", mode: 'copy'
+
+  input:
+    file fasta from fasta_file
+
+  output:
+    file "*.index*" into index_files
+    file "*_kallisto_report.txt" into index_files_report
+
+  script:
+"""
+kallisto index -k 31 --make-unique -i ${fasta.baseName}.index ${fasta} \
+2> ${fasta.baseName}_kallisto_report.txt
+"""
+}
+
diff --git a/src/nf_modules/kallisto/mapping_paired.config b/src/nf_modules/kallisto/mapping_paired.config
new file mode 100644
index 0000000000000000000000000000000000000000..04b61fea005056198f09c2a6b8418608dd1c94b4
--- /dev/null
+++ b/src/nf_modules/kallisto/mapping_paired.config
@@ -0,0 +1,57 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/kallisto:0.44.0"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/kallisto:0.44.0"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: mapping_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/kallisto_0.44.0"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: mapping_fastq {
+        container = "lbmc/kallisto:0.44.0"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
+
+
diff --git a/src/nf_modules/kallisto/mapping_paired.nf b/src/nf_modules/kallisto/mapping_paired.nf
new file mode 100644
index 0000000000000000000000000000000000000000..455e70cd4030a28e9fe3fcaccadc92e915c75859
--- /dev/null
+++ b/src/nf_modules/kallisto/mapping_paired.nf
@@ -0,0 +1,35 @@
+params.fastq = "$baseDir/data/fastq/*_{1,2}.fastq"
+params.index = "$baseDir/data/index/*.index.*"
+
+log.info "fastq files : ${params.fastq}"
+log.info "index files : ${params.index}"
+
+Channel
+  .fromFilePairs( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .set { fastq_files }
+Channel
+  .fromPath( params.index )
+  .ifEmpty { error "Cannot find any index files matching: ${params.index}" }
+  .set { index_files }
+
+process mapping_fastq {
+  tag "$reads"
+  publishDir "results/mapping/quantification/", mode: 'copy'
+
+  input:
+  set pair_id, file(reads) from fastq_files
+  file index from index_files.collect()
+
+  output:
+  file "*" into counts_files
+
+  script:
+"""
+mkdir ${pair_id}
+kallisto quant -i ${index} -t ${task.cpus} \
+--bias --bootstrap-samples 100 -o ${pair_id} \
+${reads[0]} ${reads[1]} &> ${pair_id}/kallisto_report.txt
+"""
+}
+
diff --git a/src/nf_modules/kallisto/mapping_single.config b/src/nf_modules/kallisto/mapping_single.config
new file mode 100644
index 0000000000000000000000000000000000000000..04b61fea005056198f09c2a6b8418608dd1c94b4
--- /dev/null
+++ b/src/nf_modules/kallisto/mapping_single.config
@@ -0,0 +1,57 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/kallisto:0.44.0"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/kallisto:0.44.0"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: mapping_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/kallisto_0.44.0"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: mapping_fastq {
+        container = "lbmc/kallisto:0.44.0"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
+
+
diff --git a/src/nf_modules/kallisto/mapping_single.nf b/src/nf_modules/kallisto/mapping_single.nf
new file mode 100644
index 0000000000000000000000000000000000000000..95bed86418491de0f8038a23c14c6b75eb973d55
--- /dev/null
+++ b/src/nf_modules/kallisto/mapping_single.nf
@@ -0,0 +1,41 @@
+params.fastq = "$baseDir/data/fastq/*.fastq"
+params.index = "$baseDir/data/index/*.index*"
+params.mean = 200
+params.sd = 100
+
+log.info "fastq files : ${params.fastq}"
+log.info "index files : ${params.index}"
+log.info "mean read size: ${params.mean}"
+log.info "sd read size: ${params.sd}"
+
+Channel
+  .fromPath( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { fastq_files }
+Channel
+  .fromPath( params.index )
+  .ifEmpty { error "Cannot find any index files matching: ${params.index}" }
+  .set { index_files }
+
+process mapping_fastq {
+  tag "$file_id"
+  publishDir "results/mapping/quantification/", mode: 'copy'
+
+  input:
+  set file_id, file(reads) from fastq_files
+  file index from index_files.collect()
+
+  output:
+  file "*" into count_files
+
+  script:
+"""
+mkdir ${file_id}
+kallisto quant -i ${index} -t ${task.cpus} --single \
+--bias --bootstrap-samples 100 -o ${file_id} \
+-l ${params.mean} -s ${params.sd} \
+${reads} &> ${file_id}/kallisto_report.txt
+"""
+}
+
diff --git a/src/nf_modules/kallisto/tests.sh b/src/nf_modules/kallisto/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..412a29754d7948ba562b31e7b54038db80f60241
--- /dev/null
+++ b/src/nf_modules/kallisto/tests.sh
@@ -0,0 +1,41 @@
+./nextflow src/nf_modules/kallisto/indexing.nf \
+  -c src/nf_modules/kallisto/indexing.config \
+  -profile docker \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  -resume
+
+./nextflow src/nf_modules/kallisto/mapping_single.nf \
+  -c src/nf_modules/kallisto/mapping_single.config \
+  -profile docker \
+  --index "results/mapping/index/tiny_v2.index" \
+  --fastq "data/tiny_dataset/fastq/tiny*_S.fastq" \
+  -resume
+
+./nextflow src/nf_modules/kallisto/mapping_paired.nf \
+  -c src/nf_modules/kallisto/mapping_paired.config \
+  -profile docker \
+  --index "results/mapping/index/tiny_v2.index" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R{1,2}.fastq" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/kallisto/indexing.nf \
+  -c src/nf_modules/kallisto/indexing.config \
+  -profile singularity \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  -resume
+
+./nextflow src/nf_modules/kallisto/mapping_single.nf \
+  -c src/nf_modules/kallisto/mapping_single.config \
+  -profile singularity \
+  --index "results/mapping/index/tiny_v2.index" \
+  --fastq "data/tiny_dataset/fastq/tiny*_S.fastq" \
+  -resume
+
+./nextflow src/nf_modules/kallisto/mapping_paired.nf \
+  -c src/nf_modules/kallisto/mapping_paired.config \
+  -profile singularity \
+  --index "results/mapping/index/tiny_v2.index" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R{1,2}.fastq" \
+  -resume
+fi
diff --git a/src/nf_modules/macs2/peak_calling.config b/src/nf_modules/macs2/peak_calling.config
new file mode 100644
index 0000000000000000000000000000000000000000..35186c52c4701124f937a5daa625e72e8cb0a85e
--- /dev/null
+++ b/src/nf_modules/macs2/peak_calling.config
@@ -0,0 +1,55 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: peak_calling {
+        container = "lbmc/macs2:2.1.2"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: peak_calling {
+        container = "lbmc/macs2:2.1.2"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: peak_calling {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/macs2_2.1.2"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: peak_calling {
+        container = "lbmc/macs2:2.1.2"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/macs2/peak_calling.nf b/src/nf_modules/macs2/peak_calling.nf
new file mode 100644
index 0000000000000000000000000000000000000000..12398f2e92e48f8df7936efe62d4778c8112cdfd
--- /dev/null
+++ b/src/nf_modules/macs2/peak_calling.nf
@@ -0,0 +1,52 @@
+params.genome_size = "hs"
+params.control_tag = "control"
+log.info "bam files : ${params.bam}"
+log.info "genome size : ${params.genome_size}"
+log.info "control tag : ${params.control_tag}"
+
+Channel
+  .fromPath( params.bam )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.bam}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { bam_files }
+
+/* split bam Channel into control and ip if "control" keyword is detected*/
+bam_files_control = Channel.create()
+bam_files_ip = Channel.create()
+bam_files.choice(
+  bam_files_control,
+  bam_files_ip ) { a -> a[0] =~ /.*${params.control_tag}.*/ ? 0 : 1 }
+
+process peak_calling {
+  tag "${file_id}"
+  publishDir "results/peak_calling/${file_id}", mode: 'copy'
+
+  input:
+    set file_id, file(file_ip) from bam_files_ip
+    set file_id_control, file(file_control) from bam_files_control
+      .ifEmpty {
+        error "Cannot find any bam files matching: ${params.control_tag}"
+      }
+      .collect()
+
+  output:
+    file "*" into peak_output
+    file "*_report.txt" into peak_calling_report
+
+  script:
+/* remove --nomodel option for real dataset */
+"""
+macs2 callpeak \
+  --nomodel \
+  --treatment ${file_ip} \
+  --control ${file_control} \
+  --name ${file_id} \
+  --gsize ${params.genome_size} 2> \
+${file_ip}_macs2_report.txt
+
+if grep -q "ERROR" ${file_ip}_macs2_report.txt; then
+  echo "MACS2 error"
+  exit 1
+fi
+"""
+}
diff --git a/src/nf_modules/macs2/tests.sh b/src/nf_modules/macs2/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..fc73ce29a9e9aed5ee83ba0e171e49059185bc5c
--- /dev/null
+++ b/src/nf_modules/macs2/tests.sh
@@ -0,0 +1,18 @@
+cp data/tiny_dataset/map/tiny_v2.bam data/tiny_dataset/map/tiny_v2_control.bam
+./nextflow src/nf_modules/macs2/peak_calling.nf \
+  -c src/nf_modules/macs2/peak_calling.config \
+  -profile docker \
+  -resume \
+  --bam "data/tiny_dataset/map/tiny_v2*.bam" \
+  --genome_size 129984 \
+  --control_tag "control"
+
+if [ -x "$(command -v singularity)" ]; then
+  ./nextflow src/nf_modules/macs2/peak_calling.nf \
+    -c src/nf_modules/macs2/peak_calling.config \
+    -profile singularity \
+    -resume \
+    --bam "data/tiny_dataset/map/tiny_v2*.bam" \
+    --genome_size 129984 \
+    --control_tag "control"
+fi
diff --git a/src/nf_modules/multiqc/multiqc_paired.config b/src/nf_modules/multiqc/multiqc_paired.config
new file mode 100644
index 0000000000000000000000000000000000000000..1cb17b7cd1cdb912c835877e043d1af80f34002d
--- /dev/null
+++ b/src/nf_modules/multiqc/multiqc_paired.config
@@ -0,0 +1,89 @@
+
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: fastqc_fastq {
+        container = "lbmc/fastqc:0.11.5"
+        cpus = 1
+      }
+      withName: multiqc {
+        container = "lbmc/multiqc:1.7"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: fastqc_fastq {
+        container = "lbmc/fastqc:0.11.5"
+        cpus = 1
+      }
+      withName: multiqc {
+        container = "lbmc/multiqc:1.7"
+        cpus = 1
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: fastqc_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/fastqc_0.11.5"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "5GB"
+        time = "6h"
+        queueSize = 1.70
+        pollInterval = '60sec'
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+      withName: multiqc {
+        beforeScript = "source /usr/share/lmod/lmod/init/bash; module use ~/privatemodules"
+        module = "multiqc/1.7"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "5GB"
+        time = "6h"
+        queueSize = 1.70
+        pollInterval = '60sec'
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: fastq_fastqc {
+        container = "lbmc/fastqc:0.11.5"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: multiqc {
+        container = "lbmc/multiqc:1.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
+
diff --git a/src/nf_modules/multiqc/multiqc_paired.nf b/src/nf_modules/multiqc/multiqc_paired.nf
new file mode 100644
index 0000000000000000000000000000000000000000..b459a9bbc8ddd4c89cd51f164d3ef3a14c814841
--- /dev/null
+++ b/src/nf_modules/multiqc/multiqc_paired.nf
@@ -0,0 +1,43 @@
+params.fastq = "$baseDir/data/fastq/*_{1,2}.fastq"
+
+log.info "fastq files : ${params.fastq}"
+
+Channel
+  .fromFilePairs( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .set { fastq_files }
+
+process fastqc_fastq {
+  tag "$pair_id"
+  publishDir "results/fastq/fastqc/", mode: 'copy'
+
+  input:
+  set pair_id, file(reads) from fastq_files
+
+  output:
+    file "*.{zip,html}" into fastqc_report
+
+  script:
+"""
+fastqc --quiet --threads ${task.cpus} --format fastq --outdir ./ \
+${reads[0]} ${reads[1]}
+"""
+}
+
+process multiqc {
+  tag "$report[0].baseName"
+  publishDir "results/fastq/multiqc/", mode: 'copy'
+  cpus = 1
+
+  input:
+    file report from fastqc_report.collect()
+
+  output:
+    file "*multiqc_*" into multiqc_report
+
+  script:
+"""
+multiqc -f .
+"""
+}
+
diff --git a/src/nf_modules/multiqc/multiqc_single.config b/src/nf_modules/multiqc/multiqc_single.config
new file mode 100644
index 0000000000000000000000000000000000000000..ba1bbe7c996ec4f7ae9ce1e2c4cefbcb883087b1
--- /dev/null
+++ b/src/nf_modules/multiqc/multiqc_single.config
@@ -0,0 +1,87 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: fastqc_fastq {
+        container = "lbmc/fastqc:0.11.5"
+        cpus = 1
+      }
+      withName: multiqc {
+        container = "lbmc/multiqc:1.7"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: fastqc_fastq {
+        container = "lbmc/fastqc:0.11.5"
+        cpus = 1
+      }
+      withName: multiqc {
+        container = "lbmc/multiqc:1.7"
+        cpus = 1
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: fastqc_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/fastqc_0.11.5"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "5GB"
+        time = "6h"
+        queueSize = 1.70
+        pollInterval = '60sec'
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+      withName: multiqc {
+        beforeScript = "source /usr/share/lmod/lmod/init/bash; module use ~/privatemodules"
+        module = "multiqc/1.7"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "5GB"
+        time = "6h"
+        queueSize = 1.70
+        pollInterval = '60sec'
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: fastq_fastqc {
+        container = "lbmc/fastqc:0.11.5"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: multiqc {
+        container = "lbmc/multiqc:1.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/multiqc/multiqc_single.nf b/src/nf_modules/multiqc/multiqc_single.nf
new file mode 100644
index 0000000000000000000000000000000000000000..ea1115b546f0776a4970e4a56fefcce5e3b90de9
--- /dev/null
+++ b/src/nf_modules/multiqc/multiqc_single.nf
@@ -0,0 +1,44 @@
+params.fastq = "$baseDir/data/fastq/*.fastq"
+
+log.info "fastq files : ${params.fastq}"
+
+Channel
+  .fromPath( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { fastq_files }
+
+process fastqc_fastq {
+  tag "$file_id"
+  publishDir "results/fastq/fastqc/", mode: 'copy'
+  cpus = 1
+
+  input:
+    set file_id, file(reads) from fastq_files
+
+  output:
+    file "*.{zip,html}" into fastqc_report
+
+  script:
+"""
+fastqc --quiet --threads ${task.cpus} --format fastq --outdir ./ ${reads}
+"""
+}
+
+process multiqc {
+  tag "$report[0].baseName"
+  publishDir "results/fastq/multiqc/", mode: 'copy'
+  cpus = 1
+
+  input:
+    file report from fastqc_report.collect()
+
+  output:
+    file "*multiqc_*" into multiqc_report
+
+  script:
+"""
+multiqc -f .
+"""
+}
+
diff --git a/src/nf_modules/multiqc/tests.sh b/src/nf_modules/multiqc/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..ff23852be9e17167a860752f869a5cde154e4d87
--- /dev/null
+++ b/src/nf_modules/multiqc/tests.sh
@@ -0,0 +1,25 @@
+./nextflow src/nf_modules/multiqc/multiqc_paired.nf \
+  -c src/nf_modules/multiqc/multiqc_paired.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny_R{1,2}.fastq" \
+  -resume
+
+./nextflow src/nf_modules/multiqc/multiqc_single.nf \
+  -c src/nf_modules/multiqc/multiqc_single.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny_S.fastq" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/multiqc/multiqc_paired.nf \
+  -c src/nf_modules/multiqc/multiqc_paired.config \
+  -profile singularity \
+  --fastq "data/tiny_dataset/fastq/tiny_R{1,2}.fastq" \
+  -resume
+
+./nextflow src/nf_modules/multiqc/multiqc_single.nf \
+  -c src/nf_modules/multiqc/multiqc_single.config \
+  -profile singularity \
+  --fastq "data/tiny_dataset/fastq/tiny_S.fastq" \
+  -resume
+fi
diff --git a/src/nf_modules/music/peak_calling_single.config b/src/nf_modules/music/peak_calling_single.config
new file mode 100644
index 0000000000000000000000000000000000000000..4a500bb9c52cecf86bca242df15b8024f65fe632
--- /dev/null
+++ b/src/nf_modules/music/peak_calling_single.config
@@ -0,0 +1,112 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: compute_mappability {
+        container = "lbmc/music:6613c53"
+        cpus = 1
+      }
+      withName: music_preprocessing {
+        container = "lbmc/music:6613c53"
+        cpus = 1
+      }
+      withName: music_computation{
+        container = "lbmc/music:6613c53"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: compute_mappability {
+        container = "lbmc/music:6613c53"
+        cpus = 1
+      }
+      withName: music_preprocessing {
+        container = "lbmc/music:6613c53"
+        cpus = 1
+      }
+      withName: music_computation{
+        container = "lbmc/music:6613c53"
+        cpus = 1
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: compute_mappability {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/music_6613c53"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+      withName: music_preprocessing {
+        beforeScript = "source /usr/share/lmod/lmod/init/bash; module use ~/privatemodules"
+        module = "music/6613c53"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+      withName: music_computation{
+        beforeScript = "source /usr/share/lmod/lmod/init/bash; module use ~/privatemodules"
+        module = "music/6613c53"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: compute_mappability {
+        container = "lbmc/music:6613c53"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: music_preprocessing {
+        container = "lbmc/music:6613c53"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: music_computation{
+        container = "lbmc/music:6613c53"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/music/peak_calling_single.nf b/src/nf_modules/music/peak_calling_single.nf
new file mode 100644
index 0000000000000000000000000000000000000000..be280394b80e6ddd9644da85f62e8d2be5d843ed
--- /dev/null
+++ b/src/nf_modules/music/peak_calling_single.nf
@@ -0,0 +1,104 @@
+params.read_size = 100
+params.frag_size = 200
+params.step_l = 50
+params.min_l = 200
+params.max_l = 5000
+log.info "bam files : ${params.bam}"
+log.info "index files : ${params.index}"
+log.info "fasta files : ${params.fasta}"
+
+Channel
+  .fromPath( params.fasta )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.fasta}" }
+  .set { fasta_files }
+Channel
+  .fromPath( params.bam )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.bam}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { bam_files }
+Channel
+  .fromPath( params.index )
+  .ifEmpty { error "Cannot find any index files matching: ${params.index}" }
+  .set { index_files }
+
+process compute_mappability {
+  tag "${fasta.baseName}"
+
+  input:
+    file index from index_files.collect()
+    file fasta from fasta_files
+
+  output:
+    file "*.bin" into mappability
+    file "temp/chr_ids.txt" into chr_ids
+
+  script:
+
+"""
+generate_multimappability_signal.csh ${fasta} ${params.read_size} ./
+bash temp_map_reads.csh
+bash temp_process_mapping.csh
+"""
+}
+
+process music_preprocessing {
+  tag "${file_id}"
+
+  input:
+    set file_id, file(bam) from bam_files
+    file chr_ids from chr_ids.collect()
+
+  output:
+    set file_id, "preprocessed/*.tar" into preprocessed_bam_files
+
+  script:
+
+"""
+mkdir preprocessed
+samtools view *.bam | \
+MUSIC -preprocess SAM stdin preprocessed/
+mkdir preprocessed/sorted
+MUSIC -sort_reads preprocessed/ preprocessed/sorted/
+mkdir preprocessed/dedup
+MUSIC -remove_duplicates ./preprocessed/sorted 2 preprocessed/dedup/
+cd preprocessed
+tar -c -f ${file_id}.tar *
+"""
+}
+
+preprocessed_bam_files_control = Channel.create()
+preprocessed_bam_files_chip = Channel.create()
+preprocessed_bam_files.choice(
+  preprocessed_bam_files_control,
+  preprocessed_bam_files_chip ) { a -> a[0] =~ /.*control.*/ ? 0 : 1 }
+
+process music_computation {
+  tag "${file_id}"
+  publishDir "results/peak_calling/${file_id}", mode: 'copy'
+
+  input:
+    set file_id, file(control) from preprocessed_bam_files_chip
+    set file_id_control, file(chip) from preprocessed_bam_files_control.collect()
+    file mapp from mappability.collect()
+
+  output:
+    file "*" into music_output_forward
+    file "*.bed" into peaks_forward
+
+  script:
+
+"""
+mkdir mappability control chip
+mv ${mapp} mappability/
+tar -xf ${control} -C control/
+tar -xf ${chip} -C chip/
+
+MUSIC -get_per_win_p_vals_vs_FC -chip chip/ -control control/ \
+  -l_win_step ${params.step_l} \
+  -l_win_min ${params.min_l} -l_win_max ${params.max_l}
+MUSIC -get_multiscale_punctate_ERs \
+  -chip chip/ -control control/ -mapp mappability/ \
+  -l_mapp ${params.read_size} -l_frag ${params.frag_size} -q_val 1 -l_p 0
+ls -l
+"""
+}
diff --git a/src/nf_modules/music/tests.sh b/src/nf_modules/music/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..4169c449d5706e01a28c443247bf04c2a39fe40d
--- /dev/null
+++ b/src/nf_modules/music/tests.sh
@@ -0,0 +1,20 @@
+cp data/tiny_dataset/map/tiny_v2.sort.bam data/tiny_dataset/map/tiny_v2_control.sort.bam
+./nextflow src/nf_modules/music/peak_calling_single.nf \
+  -c src/nf_modules/music/peak_calling_single.config \
+  -profile docker \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  --bam "data/tiny_dataset/map/*.sort.bam" \
+  --index "data/tiny_dataset/map/*.sort.bam.bai*" \
+  --read_size 50 --frag_size 300 \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/music/peak_calling_single.nf \
+  -c src/nf_modules/music/peak_calling_single.config \
+  -profile singularity \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  --bam "data/tiny_dataset/map/*.sort.bam" \
+  --index "data/tiny_dataset/map/*.sort.bam.bai*" \
+  --read_size 50 --frag_size 300 \
+  -resume
+fi
diff --git a/src/nf_modules/rsem/indexing.config b/src/nf_modules/rsem/indexing.config
new file mode 100644
index 0000000000000000000000000000000000000000..d5d3c7814b253b5869e799f7268f6985972d3d2c
--- /dev/null
+++ b/src/nf_modules/rsem/indexing.config
@@ -0,0 +1,55 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: index_fasta {
+        container = "lbmc/rsem:1.3.0"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: index_fasta {
+        container = "lbmc/rsem:1.3.0"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: index_fasta {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/rsem_1.3.0"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: index_fasta {
+        container = "lbmc/rsem:1.3.0"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/rsem/indexing.nf b/src/nf_modules/rsem/indexing.nf
new file mode 100644
index 0000000000000000000000000000000000000000..4b95622aaa31f5eece4390f3f09418750e44570c
--- /dev/null
+++ b/src/nf_modules/rsem/indexing.nf
@@ -0,0 +1,39 @@
+params.fasta = "$baseDir/data/bam/*.fasta"
+params.annotation = "$baseDir/data/bam/*.gff3"
+
+log.info "fasta files : ${params.fasta}"
+
+Channel
+  .fromPath( params.fasta )
+  .ifEmpty { error "Cannot find any fasta files matching: ${params.fasta}" }
+  .set { fasta_file }
+Channel
+  .fromPath( params.annotation )
+  .ifEmpty { error "Cannot find any annotation files matching: ${params.annotation}" }
+  .set { annotation_file }
+
+process index_fasta {
+  tag "$fasta.baseName"
+  publishDir "results/mapping/index/", mode: 'copy'
+
+  input:
+    file fasta from fasta_file
+    file annotation from annotation_file
+
+  output:
+    file "*.index*" into index_files
+
+  script:
+  def cmd_annotation = "--gff3 ${annotation}"
+  if(annotation ==~ /.*\.gtf$/){
+    cmd_annotation = "--gtf ${annotation}"
+  }
+"""
+rsem-prepare-reference -p ${task.cpus} --bowtie2 \
+--bowtie2-path \$(which bowtie2 | sed 's/bowtie2\$//g') \
+${cmd_annotation} ${fasta} ${fasta.baseName}.index > \
+${fasta.baseName}_rsem_bowtie2_report.txt
+"""
+}
+
+
diff --git a/src/nf_modules/rsem/quantification_paired.config b/src/nf_modules/rsem/quantification_paired.config
new file mode 100644
index 0000000000000000000000000000000000000000..bde03fc98ab828ebc58ee268a2dd9b1fe342f77b
--- /dev/null
+++ b/src/nf_modules/rsem/quantification_paired.config
@@ -0,0 +1,55 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/rsem:1.3.0"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/rsem:1.3.0"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: mapping_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/rsem_1.3.0"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: mapping_fastq {
+        container = "lbmc/rsem:1.3.0"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/rsem/quantification_paired.nf b/src/nf_modules/rsem/quantification_paired.nf
new file mode 100644
index 0000000000000000000000000000000000000000..c9e23ac8d19e510eee63823069dcb5bc86a38c9b
--- /dev/null
+++ b/src/nf_modules/rsem/quantification_paired.nf
@@ -0,0 +1,48 @@
+params.fastq = "$baseDir/data/fastq/*_{1,2}.fastq"
+params.index = "$baseDir/data/index/*.index.*"
+
+log.info "fastq files : ${params.fastq}"
+log.info "index files : ${params.index}"
+
+Channel
+  .fromFilePairs( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .set { fastq_files }
+Channel
+  .fromPath( params.index )
+  .ifEmpty { error "Cannot find any index files matching: ${params.index}" }
+  .set { index_files }
+
+process mapping_fastq {
+  tag "$pair_id"
+  publishDir "results/mapping/quantification/", mode: 'copy'
+
+  input:
+  set pair_id, file(reads) from fastq_files
+  file index from index_files.toList()
+
+  output:
+  file "*" into counts_files
+
+  script:
+  index_id = index[0]
+  for (index_file in index) {
+    if (index_file =~ /.*\.1\.bt2/ && !(index_file =~ /.*\.rev\.1\.bt2/)) {
+        index_id = ( index_file =~ /(.*)\.1\.bt2/)[0][1]
+    }
+  }
+"""
+rsem-calculate-expression --bowtie2 \
+--bowtie2-path \$(which bowtie2 | sed 's/bowtie2\$//g') \
+--bowtie2-sensitivity-level "very_sensitive" \
+-output-genome-bam -p ${task.cpus} \
+--paired-end ${reads[0]} ${reads[1]} ${index_id} ${pair_id} \
+2> ${pair_id}_rsem_bowtie2_report.txt
+
+if grep -q "Error" ${pair_id}_rsem_bowtie2_report.txt; then
+  exit 1
+fi
+"""
+}
+
+
diff --git a/src/nf_modules/rsem/quantification_single.config b/src/nf_modules/rsem/quantification_single.config
new file mode 100644
index 0000000000000000000000000000000000000000..bde03fc98ab828ebc58ee268a2dd9b1fe342f77b
--- /dev/null
+++ b/src/nf_modules/rsem/quantification_single.config
@@ -0,0 +1,55 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/rsem:1.3.0"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/rsem:1.3.0"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: mapping_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/rsem_1.3.0"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: mapping_fastq {
+        container = "lbmc/rsem:1.3.0"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/rsem/quantification_single.nf b/src/nf_modules/rsem/quantification_single.nf
new file mode 100644
index 0000000000000000000000000000000000000000..bacba6b33ab974f4390cdcff141ad4222be5b1db
--- /dev/null
+++ b/src/nf_modules/rsem/quantification_single.nf
@@ -0,0 +1,53 @@
+params.fastq = "$baseDir/data/fastq/*.fastq"
+params.index = "$baseDir/data/index/*.index*"
+params.mean = 200
+params.sd = 100
+
+log.info "fastq files : ${params.fastq}"
+log.info "index files : ${params.index}"
+log.info "mean read size: ${params.mean}"
+log.info "sd read size: ${params.sd}"
+
+Channel
+  .fromPath( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { fastq_files }
+Channel
+  .fromPath( params.index )
+  .ifEmpty { error "Cannot find any index files matching: ${params.index}" }
+  .set { index_files }
+
+process mapping_fastq {
+  tag "$file_id"
+  publishDir "results/mapping/quantification/", mode: 'copy'
+
+  input:
+  set file_id, file(reads) from fastq_files
+  file index from index_files.collect()
+
+  output:
+  file "*" into count_files
+
+  script:
+  index_id = index[0]
+  for (index_file in index) {
+    if (index_file =~ /.*\.1\.bt2/ && !(index_file =~ /.*\.rev\.1\.bt2/)) {
+        index_id = ( index_file =~ /(.*)\.1\.bt2/)[0][1]
+    }
+  }
+"""
+rsem-calculate-expression --bowtie2 \
+--bowtie2-path \$(which bowtie2 | sed 's/bowtie2\$//g') \
+--bowtie2-sensitivity-level "very_sensitive" \
+--fragment-length-mean ${params.mean} --fragment-length-sd ${params.sd} \
+--output-genome-bam -p ${task.cpus} \
+${reads} ${index_id} ${file_id} \
+2> ${file_id}_rsem_bowtie2_report.txt
+
+if grep -q "Error" ${file_id}_rsem_bowtie2_report.txt; then
+  exit 1
+fi
+"""
+}
+
diff --git a/src/nf_modules/rsem/tests.sh b/src/nf_modules/rsem/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..e4cd27267371e378d4c47454f9344298cdde4566
--- /dev/null
+++ b/src/nf_modules/rsem/tests.sh
@@ -0,0 +1,44 @@
+./nextflow src/nf_modules/rsem/indexing.nf \
+  -c src/nf_modules/rsem/indexing.config \
+  -profile docker \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  --annotation "data/tiny_dataset/annot/tiny.gff" \
+  -resume
+
+./nextflow src/nf_modules/rsem/quantification_single.nf \
+  -c src/nf_modules/rsem/quantification_single.config \
+  -profile docker \
+  --index "results/mapping/index/tiny_v2.index*" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R1.fastq" \
+  -resume
+
+./nextflow src/nf_modules/rsem/quantification_paired.nf \
+  -c src/nf_modules/rsem/quantification_paired.config \
+  -profile docker \
+  --index "results/mapping/index/tiny_v2.index*" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R{1,2}.fastq" \
+  -resume
+
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/rsem/indexing.nf \
+  -c src/nf_modules/rsem/indexing.config \
+  -profile docker \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  --annotation "data/tiny_dataset/annot/tiny.gff" \
+  -resume
+
+./nextflow src/nf_modules/rsem/quantification_single.nf \
+  -c src/nf_modules/rsem/quantification_single.config \
+  -profile docker \
+  --index "results/mapping/index/tiny_v2.index*" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R1.fastq" \
+  -resume
+
+./nextflow src/nf_modules/rsem/quantification_paired.nf \
+  -c src/nf_modules/rsem/quantification_paired.config \
+  -profile docker \
+  --index "results/mapping/index/tiny_v2.index*" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R{1,2}.fastq" \
+  -resume
+fi
diff --git a/src/nf_modules/sambamba/index_bams.config b/src/nf_modules/sambamba/index_bams.config
new file mode 100644
index 0000000000000000000000000000000000000000..3a20b7e120fceb5532a82555829a84e3fe5a435a
--- /dev/null
+++ b/src/nf_modules/sambamba/index_bams.config
@@ -0,0 +1,55 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: index_bam {
+        container = "lbmc/sambamba:0.6.9"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: index_bam {
+        container = "lbmc/sambamba:0.6.9"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: index_bam {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/sambamba_0.6.9"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: index_bam {
+        container = "lbmc/sambamba:0.6.9"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/sambamba/index_bams.nf b/src/nf_modules/sambamba/index_bams.nf
new file mode 100644
index 0000000000000000000000000000000000000000..3ea36df4a3e512fba7b577b4b2c6916a4e0bb940
--- /dev/null
+++ b/src/nf_modules/sambamba/index_bams.nf
@@ -0,0 +1,25 @@
+params.bam = "$baseDir/data/bam/*.bam"
+
+log.info "bams files : ${params.bam}"
+
+Channel
+  .fromPath( params.bam )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.bam}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { bam_files }
+
+process index_bam {
+  tag "$file_id"
+
+  input:
+    set file_id, file(bam) from bam_files
+
+  output:
+    set file_id, "*.bam*" into indexed_bam_file
+
+  script:
+"""
+sambamba index -t ${task.cpus} ${bam}
+"""
+}
+
diff --git a/src/nf_modules/sambamba/sort_bams.config b/src/nf_modules/sambamba/sort_bams.config
new file mode 100644
index 0000000000000000000000000000000000000000..8612de4aacf04c0ee6eb41e318b118cf3e72b4a6
--- /dev/null
+++ b/src/nf_modules/sambamba/sort_bams.config
@@ -0,0 +1,56 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: sort_bam {
+        container = "lbmc/sambamba:0.6.9"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: sort_bam {
+        container = "lbmc/sambamba:0.6.9"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: sort_bam {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/sambamba_0.6.9"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 4
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: sort_bam {
+        container = "lbmc/sambamba:0.6.9"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+
+    }
+  }
+}
diff --git a/src/nf_modules/sambamba/sort_bams.nf b/src/nf_modules/sambamba/sort_bams.nf
new file mode 100644
index 0000000000000000000000000000000000000000..ac610cdca146693df69d8b765928d406b36652b6
--- /dev/null
+++ b/src/nf_modules/sambamba/sort_bams.nf
@@ -0,0 +1,26 @@
+params.bam = "$baseDir/data/bam/*.bam"
+
+log.info "bams files : ${params.bam}"
+
+Channel
+  .fromPath( params.bam )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.bam}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { bam_files }
+
+process sort_bam {
+  tag "$file_id"
+  cpus 4
+
+  input:
+    set file_id, file(bam) from bam_files
+
+  output:
+    set file_id, "*_sorted.bam" into sorted_bam_files
+
+  script:
+"""
+sambamba sort -t ${task.cpus} -o ${file_id}_sorted.bam ${bam}
+"""
+}
+
diff --git a/src/nf_modules/sambamba/split_bams.config b/src/nf_modules/sambamba/split_bams.config
new file mode 100644
index 0000000000000000000000000000000000000000..4ce0bcb06a61c5941223381c16f1996c48ee67b1
--- /dev/null
+++ b/src/nf_modules/sambamba/split_bams.config
@@ -0,0 +1,55 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: split_bam {
+        container = "lbmc/sambamba:0.6.9"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: split_bam {
+        container = "lbmc/sambamba:0.6.9"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: split_bam {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/sambamba_0.6.9"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: split_bam {
+        container = "lbmc/sambamba:0.6.9"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/sambamba/split_bams.nf b/src/nf_modules/sambamba/split_bams.nf
new file mode 100644
index 0000000000000000000000000000000000000000..ba64d2e2b77eb3a9b24ec1355c2b3a68b95c7a4d
--- /dev/null
+++ b/src/nf_modules/sambamba/split_bams.nf
@@ -0,0 +1,27 @@
+params.bam = "$baseDir/data/bam/*.bam"
+
+log.info "bams files : ${params.bam}"
+
+Channel
+  .fromPath( params.bam )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.bam}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { bam_files }
+
+process split_bam {
+  tag "$file_id"
+  cpus 4
+
+  input:
+    set file_id, file(bam) from bam_files
+
+  output:
+    set file_id, "*_forward.bam*" into forward_bam_files
+    set file_id, "*_reverse.bam*" into reverse_bam_files
+  script:
+"""
+sambamba view -t ${task.cpus} -h -F "strand == '+'" ${bam} > ${file_id}_forward.bam
+sambamba view -t ${task.cpus} -h -F "strand == '-'" ${bam} > ${file_id}_reverse.bam
+"""
+}
+
diff --git a/src/nf_modules/sambamba/tests.sh b/src/nf_modules/sambamba/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..256df65f9ed23efac640aea6548f15397479ca60
--- /dev/null
+++ b/src/nf_modules/sambamba/tests.sh
@@ -0,0 +1,37 @@
+./nextflow src/nf_modules/sambamba/sort_bams.nf \
+  -c src/nf_modules/sambamba/sort_bams.config \
+  -profile docker \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  -resume
+
+./nextflow src/nf_modules/sambamba/index_bams.nf \
+  -c src/nf_modules/sambamba/index_bams.config \
+  -profile docker \
+  --bam "data/tiny_dataset/map/tiny_v2.sort.bam" \
+  -resume
+
+./nextflow src/nf_modules/sambamba/split_bams.nf \
+  -c src/nf_modules/sambamba/split_bams.config \
+  -profile docker \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/sambamba/sort_bams.nf \
+  -c src/nf_modules/sambamba/sort_bams.config \
+  -profile singularity \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  -resume
+
+./nextflow src/nf_modules/sambamba/index_bams.nf \
+  -c src/nf_modules/sambamba/index_bams.config \
+  -profile singularity \
+  --bam "data/tiny_dataset/map/tiny_v2.sort.bam" \
+  -resume
+
+./nextflow src/nf_modules/sambamba/split_bams.nf \
+  -c src/nf_modules/sambamba/split_bams.config \
+  -profile singularity \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  -resume
+fi
diff --git a/src/nf_modules/samblaster/dedup_sams.config b/src/nf_modules/samblaster/dedup_sams.config
new file mode 100644
index 0000000000000000000000000000000000000000..66f3051148ad1bcfddf4ca73c85b2b9040a3c963
--- /dev/null
+++ b/src/nf_modules/samblaster/dedup_sams.config
@@ -0,0 +1,55 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: dedup_sam {
+        container = "lbmc/samblaster:0.1.24"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: split_bam {
+        container = "lbmc/sambamba:0.6.7"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: dedup_sam {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/samblaster_0.1.24"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: dedup_sam {
+        container = "lbmc/sambamba:0.6.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/samblaster/dedup_sams.nf b/src/nf_modules/samblaster/dedup_sams.nf
new file mode 100644
index 0000000000000000000000000000000000000000..bcfd829c6bba1c119ea71e34ef5d148c693e4fcf
--- /dev/null
+++ b/src/nf_modules/samblaster/dedup_sams.nf
@@ -0,0 +1,27 @@
+params.sam = "$baseDir/data/sam/*.bam"
+
+log.info "bam files : ${params.bam}"
+
+Channel
+  .fromPath( params.bam )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.bam}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { bam_files }
+
+process dedup_sam {
+  tag "$file_id"
+
+  input:
+    set file_id, file(bam) from bam_files
+
+  output:
+    set file_id, "*_dedup.bam*" into dedup_bam_files
+  script:
+"""
+samtools view -h ${bam} | \
+samblaster --addMateTags 2> /dev/null | \
+samtools view -Sb - > ${file_id}_dedup.bam
+"""
+}
+
+
diff --git a/src/nf_modules/samblaster/tests.sh b/src/nf_modules/samblaster/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..10ad93d7841ba4b708664803c0ca1299cc34ee19
--- /dev/null
+++ b/src/nf_modules/samblaster/tests.sh
@@ -0,0 +1,13 @@
+./nextflow src/nf_modules/samblaster/dedup_sams.nf \
+  -c src/nf_modules/samblaster/dedup_sams.config \
+  -profile docker \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/samblaster/dedup_sams.nf \
+  -c src/nf_modules/samblaster/dedup_sams.config \
+  -profile docker \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  -resume
+fi
diff --git a/src/nf_modules/samtools/filter_bams.config b/src/nf_modules/samtools/filter_bams.config
new file mode 100644
index 0000000000000000000000000000000000000000..b2cdd8e5202f179833e877e8346430e01826fc17
--- /dev/null
+++ b/src/nf_modules/samtools/filter_bams.config
@@ -0,0 +1,55 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: filter_bam {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: filter_bam {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: filter_bam {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/samtools_1.7"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: filter_bam {
+        container = "lbmc/samtools:1.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/samtools/filter_bams.nf b/src/nf_modules/samtools/filter_bams.nf
new file mode 100644
index 0000000000000000000000000000000000000000..6e81b758dede1f317d981409f7c85571ede55b06
--- /dev/null
+++ b/src/nf_modules/samtools/filter_bams.nf
@@ -0,0 +1,32 @@
+params.bam = "$baseDir/data/bam/*.bam"
+params.bed = "$baseDir/data/bam/*.bed"
+
+log.info "bams files : ${params.bam}"
+log.info "bed file : ${params.bed}"
+
+Channel
+  .fromPath( params.bam )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.bam}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { bam_files }
+Channel
+  .fromPath( params.bed )
+  .ifEmpty { error "Cannot find any bed file matching: ${params.bed}" }
+  .set { bed_files }
+
+process filter_bam {
+  tag "$file_id"
+
+  input:
+    set file_id, file(bam) from bam_files
+    file bed from bed_files
+
+  output:
+    set file_id, "*_filtered.bam*" into filtered_bam_files
+  script:
+"""
+samtools view -@ ${task.cpus} -hb ${bam} -L ${bed} > ${file_id}_filtered.bam
+"""
+}
+
+
diff --git a/src/nf_modules/samtools/index_bams.config b/src/nf_modules/samtools/index_bams.config
new file mode 100644
index 0000000000000000000000000000000000000000..3e2156e16ee72fddb0b317c0dd22e3342fbe1cd8
--- /dev/null
+++ b/src/nf_modules/samtools/index_bams.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: index_bam {
+        container = "lbmc/samtools:1.7"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: index_bam {
+        container = "lbmc/samtools:1.7"
+        cpus = 1
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: index_bam {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/samtools_1.7"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: index_bam {
+        container = "lbmc/samtools:1.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/samtools/index_bams.nf b/src/nf_modules/samtools/index_bams.nf
new file mode 100644
index 0000000000000000000000000000000000000000..489b0f4f71f39d1fdc5b7870547e9fd18a29f9af
--- /dev/null
+++ b/src/nf_modules/samtools/index_bams.nf
@@ -0,0 +1,25 @@
+params.bam = "$baseDir/data/bam/*.bam"
+
+log.info "bams files : ${params.bam}"
+
+Channel
+  .fromPath( params.bam )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.bam}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { bam_files }
+
+process index_bam {
+  tag "$file_id"
+
+  input:
+    set file_id, file(bam) from bam_files
+
+  output:
+    set file_id, "*.bam*" into indexed_bam_file
+
+  script:
+"""
+samtools index ${bam}
+"""
+}
+
diff --git a/src/nf_modules/samtools/sort_bams.config b/src/nf_modules/samtools/sort_bams.config
new file mode 100644
index 0000000000000000000000000000000000000000..dd7cd38b2fe475ad409444ad2406e08c6a21b6cb
--- /dev/null
+++ b/src/nf_modules/samtools/sort_bams.config
@@ -0,0 +1,55 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: sort_bam {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: sort_bam {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: sort_bam {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/samtools_1.7"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: sort_bam {
+        container = "lbmc/samtools:1.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/samtools/sort_bams.nf b/src/nf_modules/samtools/sort_bams.nf
new file mode 100644
index 0000000000000000000000000000000000000000..db1b30d1d6d59ae74535267815f6f887e2b267dd
--- /dev/null
+++ b/src/nf_modules/samtools/sort_bams.nf
@@ -0,0 +1,25 @@
+params.bam = "$baseDir/data/bam/*.bam"
+
+log.info "bams files : ${params.bam}"
+
+Channel
+  .fromPath( params.bam )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.bam}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { bam_files }
+
+process sort_bam {
+  tag "$file_id"
+
+  input:
+    set file_id, file(bam) from bam_files
+
+  output:
+    set file_id, "*_sorted.bam" into sorted_bam_files
+
+  script:
+"""
+samtools sort -@ ${task.cpus} -O BAM -o ${file_id}_sorted.bam ${bam}
+"""
+}
+
diff --git a/src/nf_modules/samtools/split_bams.config b/src/nf_modules/samtools/split_bams.config
new file mode 100644
index 0000000000000000000000000000000000000000..6c8ca3e963019002889a8f1ee3c3bb1166df7737
--- /dev/null
+++ b/src/nf_modules/samtools/split_bams.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: split_bam {
+        container = "lbmc/samtools:1.7"
+        cpus = 2
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: split_bam {
+        container = "lbmc/samtools:1.7"
+        cpus = 2
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: split_bam {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/samtools_1.7"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: split_bam {
+        container = "lbmc/samtools:1.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/samtools/split_bams.nf b/src/nf_modules/samtools/split_bams.nf
new file mode 100644
index 0000000000000000000000000000000000000000..0d02a5d170893a88625a0885ffed16e1adbef35c
--- /dev/null
+++ b/src/nf_modules/samtools/split_bams.nf
@@ -0,0 +1,26 @@
+params.bam = "$baseDir/data/bam/*.bam"
+
+log.info "bams files : ${params.bam}"
+
+Channel
+  .fromPath( params.bam )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.bam}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { bam_files }
+
+process split_bam {
+  tag "$file_id"
+
+  input:
+    set file_id, file(bam) from bam_files
+
+  output:
+    set file_id, "*_forward.bam*" into forward_bam_files
+    set file_id, "*_reverse.bam*" into reverse_bam_files
+  script:
+"""
+samtools view -hb -F 0x10 ${bam} > ${file_id}_forward.bam &
+samtools view -hb -f 0x10 ${bam} > ${file_id}_reverse.bam
+"""
+}
+
diff --git a/src/nf_modules/samtools/tests.sh b/src/nf_modules/samtools/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..256fa058beeaf3464391f4e974a2b4a2ef499b0c
--- /dev/null
+++ b/src/nf_modules/samtools/tests.sh
@@ -0,0 +1,51 @@
+./nextflow src/nf_modules/samtools/sort_bams.nf \
+  -c src/nf_modules/samtools/sort_bams.config \
+  -profile docker \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  -resume
+
+./nextflow src/nf_modules/samtools/index_bams.nf \
+  -c src/nf_modules/samtools/index_bams.config \
+  -profile docker \
+  --bam "data/tiny_dataset/map/tiny_v2.sort.bam" \
+  -resume
+
+./nextflow src/nf_modules/samtools/split_bams.nf \
+  -c src/nf_modules/samtools/split_bams.config \
+  -profile docker \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  -resume
+
+./nextflow src/nf_modules/samtools/filter_bams.nf \
+  -c src/nf_modules/samtools/filter_bams.config \
+  -profile docker \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  --bed "data/tiny_dataset/OLD/2genes.bed" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/samtools/sort_bams.nf \
+  -c src/nf_modules/samtools/sort_bams.config \
+  -profile docker \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  -resume
+
+./nextflow src/nf_modules/samtools/index_bams.nf \
+  -c src/nf_modules/samtools/index_bams.config \
+  -profile docker \
+  --bam "data/tiny_dataset/map/tiny_v2.sort.bam" \
+  -resume
+
+./nextflow src/nf_modules/samtools/split_bams.nf \
+  -c src/nf_modules/samtools/split_bams.config \
+  -profile docker \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  -resume
+
+./nextflow src/nf_modules/samtools/filter_bams.nf \
+  -c src/nf_modules/samtools/filter_bams.config \
+  -profile docker \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  --bed "data/tiny_dataset/OLD/2genes.bed" \
+  -resume
+fi
diff --git a/src/nf_modules/sratoolkit/fastqdump.config b/src/nf_modules/sratoolkit/fastqdump.config
new file mode 100644
index 0000000000000000000000000000000000000000..e6e80c5dffe098e1e893af480600a6ce40661d03
--- /dev/null
+++ b/src/nf_modules/sratoolkit/fastqdump.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: fastq_dump {
+        container = "lbmc/sratoolkit:2.8.2"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: split_bam {
+        container = "lbmc/sratoolkit:2.8.2"
+        cpus = 1
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: fastq_dump {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/sratoolkit_2.8.2"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: fastq_dump {
+        container = "lbmc/sratoolkit:2.8.2"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/sratoolkit/fastqdump.nf b/src/nf_modules/sratoolkit/fastqdump.nf
new file mode 100644
index 0000000000000000000000000000000000000000..6ba77271405a0f00eef0283d85f7813d3e3a6c9d
--- /dev/null
+++ b/src/nf_modules/sratoolkit/fastqdump.nf
@@ -0,0 +1,48 @@
+/*
+* sra-tools :
+
+*/
+
+/*                      fastq-dump
+* Imputs : srr list
+* Outputs : fastq files
+*/
+
+params.list_srr = "$baseDir/data/SRR/*.txt"
+
+log.info "downloading list srr : ${params.list_srr}"
+
+Channel
+  .fromPath( params.list_srr )
+  .ifEmpty { error "Cannot find any bam files matching: ${params.list_srr}" }
+  .splitCsv()
+  .map { it -> it[0]}
+  .set { SRR }
+
+//run is the column name containing SRR ids
+
+process fastq_dump {
+  tag "$file_id"
+  publishDir "results/download/fastq/${file_id}/", mode: 'copy'
+
+  input:
+    val file_id from SRR
+
+  output:
+    set file_id, "*.fastq" into fastq
+
+  script:
+"""
+HOME=\$(pwd) # ensure that fastq dump tmp file are in the right place
+#for test only 10000  reads are downloading with the option -N 10000 -X 20000
+fastq-dump --split-files --defline-seq '@\$ac_\$si/\$ri' --defline-qual "+" -N 10000 -X 20000 ${file_id}
+if [ -f ${file_id}_1.fastq ]
+then
+  mv ${file_id}_1.fastq ${file_id}_R1.fastq
+fi
+if [ -f ${file_id}_2.fastq ]
+then
+  mv ${file_id}_2.fastq ${file_id}_R2.fastq
+fi
+"""
+}
diff --git a/src/nf_modules/sratoolkit/list-srr.txt b/src/nf_modules/sratoolkit/list-srr.txt
new file mode 100644
index 0000000000000000000000000000000000000000..a58fc103ffe37a56f511aee117b26383b1e3f516
--- /dev/null
+++ b/src/nf_modules/sratoolkit/list-srr.txt
@@ -0,0 +1,6 @@
+ERR572281
+ERR572146
+ERR572201
+ERR638114
+ERR638115
+ERR638116
diff --git a/src/nf_modules/sratoolkit/tests.sh b/src/nf_modules/sratoolkit/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..1747137d454630bffa8538efc3009469f271b50c
--- /dev/null
+++ b/src/nf_modules/sratoolkit/tests.sh
@@ -0,0 +1,13 @@
+./nextflow src/nf_modules/sratoolkit/fastqdump.nf \
+  -c src/nf_modules/sratoolkit/fastqdump.config \
+  -profile docker \
+  --list_srr "src/nf_modules/sratoolkit/list-srr.txt" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/sratoolkit/fastqdump.nf \
+  -c src/nf_modules/sratoolkit/fastqdump.config \
+  -profile docker \
+  --list_srr "src/nf_modules/sratoolkit/list-srr.txt" \
+  -resume
+fi
diff --git a/src/nf_modules/star/indexing.config b/src/nf_modules/star/indexing.config
new file mode 100644
index 0000000000000000000000000000000000000000..34683657e3856d700dfc357591f46477598de70b
--- /dev/null
+++ b/src/nf_modules/star/indexing.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: index_fasta {
+        container = "lbmc/star:2.7.3a"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: index_fasta {
+        container = "lbmc/star:2.7.3a"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: index_fasta {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/star_2.7.3a"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "20GB"
+        time = "12h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: index_fasta {
+        container = "lbmc/star:2.7.3a"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/star/indexing.nf b/src/nf_modules/star/indexing.nf
new file mode 100644
index 0000000000000000000000000000000000000000..0f340b2d3d11ff5fd79d6b1e5e3f8c56d35f2154
--- /dev/null
+++ b/src/nf_modules/star/indexing.nf
@@ -0,0 +1,36 @@
+params.fasta = "$baseDir/data/bam/*.fasta"
+params.annotation = "$baseDir/data/bam/*.gtf"
+
+log.info "fasta files : ${params.fasta}"
+
+Channel
+  .fromPath( params.fasta )
+  .ifEmpty { error "Cannot find any fasta files matching: ${params.fasta}" }
+  .set { fasta_file }
+Channel
+  .fromPath( params.annotation )
+  .ifEmpty { error "Cannot find any annotation files matching: ${params.annotation}" }
+  .set { annotation_file }
+
+process index_fasta {
+  tag "$fasta.baseName"
+  publishDir "results/mapping/index/", mode: 'copy'
+
+  input:
+    file fasta from fasta_file
+    file annotation from annotation_file
+
+  output:
+    file "*" into index_files
+
+  script:
+"""
+STAR --runThreadN ${task.cpus} --runMode genomeGenerate \
+--genomeDir ./ \
+--genomeFastaFiles ${fasta} \
+--sjdbGTFfile ${annotation} \
+--genomeSAindexNbases 3 # min(14, log2(GenomeLength)/2 - 1)
+"""
+}
+
+
diff --git a/src/nf_modules/star/mapping_paired.config b/src/nf_modules/star/mapping_paired.config
new file mode 100644
index 0000000000000000000000000000000000000000..615404c4928c2281722ad15b50055b37eddfb7aa
--- /dev/null
+++ b/src/nf_modules/star/mapping_paired.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/star:2.7.3a"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/star:2.7.3a"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: mapping_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/star_2.7.3a"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: mapping_fastq {
+        container = "lbmc/star:2.7.3a"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/star/mapping_paired.nf b/src/nf_modules/star/mapping_paired.nf
new file mode 100644
index 0000000000000000000000000000000000000000..9ea901751da50944d23327f47bc3813ff3a49859
--- /dev/null
+++ b/src/nf_modules/star/mapping_paired.nf
@@ -0,0 +1,40 @@
+params.fastq = "$baseDir/data/fastq/*_{1,2}.fastq"
+params.index = "$baseDir/data/index/*.index.*"
+
+log.info "fastq files : ${params.fastq}"
+log.info "index files : ${params.index}"
+
+Channel
+  .fromFilePairs( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .set { fastq_files }
+Channel
+  .fromPath( params.index )
+  .ifEmpty { error "Cannot find any index files matching: ${params.index}" }
+  .set { index_files }
+
+process mapping_fastq {
+  tag "$pair_id"
+  publishDir "results/mapping/bams/", mode: 'copy'
+
+  input:
+  set pair_id, file(reads) from fastq_files
+  file index from index_files.collect()
+
+  output:
+  set pair_id, "*.bam" into bam_files
+  file "*.out" into mapping_report
+
+  script:
+"""
+mkdir -p index
+mv ${index} index/
+STAR --runThreadN ${task.cpus} \
+--genomeDir index/ \
+--readFilesIn ${reads[0]} ${reads[1]} \
+--outFileNamePrefix ${pair_id} \
+--outSAMmapqUnique 0 \
+--outSAMtype BAM SortedByCoordinate
+"""
+}
+
diff --git a/src/nf_modules/star/mapping_single.config b/src/nf_modules/star/mapping_single.config
new file mode 100644
index 0000000000000000000000000000000000000000..615404c4928c2281722ad15b50055b37eddfb7aa
--- /dev/null
+++ b/src/nf_modules/star/mapping_single.config
@@ -0,0 +1,54 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/star:2.7.3a"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: mapping_fastq {
+        container = "lbmc/star:2.7.3a"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: mapping_fastq {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/star_2.7.3a"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: mapping_fastq {
+        container = "lbmc/star:2.7.3a"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/star/mapping_single.nf b/src/nf_modules/star/mapping_single.nf
new file mode 100644
index 0000000000000000000000000000000000000000..9d3d51b38da6cc7cf7dff985e5c0052b2924168a
--- /dev/null
+++ b/src/nf_modules/star/mapping_single.nf
@@ -0,0 +1,39 @@
+params.fastq = "$baseDir/data/fastq/*.fastq"
+
+log.info "fastq files : ${params.fastq}"
+log.info "index files : ${params.index}"
+
+Channel
+  .fromPath( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { fastq_files }
+Channel
+  .fromPath( params.index )
+  .ifEmpty { error "Cannot find any index files matching: ${params.index}" }
+  .set { index_files }
+
+process mapping_fastq {
+  tag "$file_id"
+  publishDir "results/mapping/bams/", mode: 'copy'
+
+  input:
+  set file_id, file(reads) from fastq_files
+  file index from index_files.collect()
+
+  output:
+  set file_id, "*.bam" into bam_files
+  file "*.out" into mapping_report
+
+  script:
+"""
+mkdir -p index
+mv ${index} index/
+STAR --runThreadN ${task.cpus} \
+--genomeDir index/ \
+--readFilesIn ${reads} \
+--outFileNamePrefix ${file_id} \
+--outSAMmapqUnique 0 \
+--outSAMtype BAM SortedByCoordinate
+"""
+}
diff --git a/src/nf_modules/star/tests.sh b/src/nf_modules/star/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..046ffe4f32bc78467a61440774553335f888b288
--- /dev/null
+++ b/src/nf_modules/star/tests.sh
@@ -0,0 +1,43 @@
+./nextflow src/nf_modules/star/indexing.nf \
+  -c src/nf_modules/star/indexing.config \
+  -profile docker \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  --annotation "data/tiny_dataset/annot/tiny.gtf" \
+  -resume
+
+./nextflow src/nf_modules/star/mapping_single.nf \
+  -c src/nf_modules/star/mapping_single.config \
+  -profile docker \
+  --index "results/mapping/index/*" \
+  --fastq "data/tiny_dataset/fastq/tiny*_S.fastq" \
+  -resume
+
+./nextflow src/nf_modules/star/mapping_paired.nf \
+  -c src/nf_modules/star/mapping_paired.config \
+  -profile docker \
+  --index "results/mapping/index/*" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R{1,2}.fastq" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/star/indexing.nf \
+  -c src/nf_modules/star/indexing.config \
+  -profile singularity \
+  --fasta "data/tiny_dataset/fasta/tiny_v2.fasta" \
+  --annotation "data/tiny_dataset/annot/tiny.gtf" \
+  -resume
+
+./nextflow src/nf_modules/star/mapping_single.nf \
+  -c src/nf_modules/star/mapping_single.config \
+  -profile singularity \
+  --index "results/mapping/index/*" \
+  --fastq "data/tiny_dataset/fastq/tiny*_S.fastq" \
+  -resume
+
+./nextflow src/nf_modules/star/mapping_paired.nf \
+  -c src/nf_modules/star/mapping_paired.config \
+  -profile singularity \
+  --index "results/mapping/index/*" \
+  --fastq "data/tiny_dataset/fastq/tiny*_R{1,2}.fastq" \
+  -resume
+fi
diff --git a/src/nf_modules/subread/subread.config b/src/nf_modules/subread/subread.config
new file mode 100644
index 0000000000000000000000000000000000000000..68147001c4c8d630210e9c617b193d977212721c
--- /dev/null
+++ b/src/nf_modules/subread/subread.config
@@ -0,0 +1,83 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: sort_bam {
+        container = "lbmc/samtools:1.7"
+        cpus = 1
+      }
+      withName: counting {
+        container = "lbmc/subread:1.6.4"
+        cpus = 1
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: sort_bam {
+        container = "lbmc/samtools:1.7"
+        cpus = 1
+      }
+      withName: counting {
+        container = "lbmc/subread:1.6.4"
+        cpus = 1
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: sort_bam {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/samtools_1.7"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+      withName: counting {
+        beforeScript = "source /usr/share/lmod/lmod/init/bash; module use ~/privatemodules"
+        module = "subread/1.6.4"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: sort_bam {
+        container = "lbmc/samtools:1.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: counting {
+        container = "lbmc/subread:1.6.4"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/nf_modules/subread/subread.nf b/src/nf_modules/subread/subread.nf
new file mode 100644
index 0000000000000000000000000000000000000000..f0b53e396a27b871f9091ecd5cff4858550fa98f
--- /dev/null
+++ b/src/nf_modules/subread/subread.nf
@@ -0,0 +1,52 @@
+params.bam = "$baseDir/data/bam/*.bam"
+params.gtf = "$baseDir/data/annotation/*.gtf"
+
+log.info "bam files : ${params.bam}"
+log.info "gtf files : ${params.gtf}"
+
+Channel
+  .fromPath( params.bam )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.bam}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { bam_files }
+Channel
+  .fromPath( params.gtf )
+  .ifEmpty { error "Cannot find any gtf file matching: ${params.gtf}" }
+  .set { gtf_file }
+
+process sort_bam {
+  tag "$file_id"
+  cpus 4
+
+  input:
+    set file_id, file(bam) from bam_files
+
+  output:
+    set file_id, "*_sorted.sam" into sorted_bam_files
+
+  script:
+"""
+# sort bam by name
+samtools sort -@ ${task.cpus} -n -O SAM -o ${file_id}_sorted.sam ${bam}
+"""
+}
+
+process counting {
+  tag "$file_id"
+  publishDir "results/quantification/", mode: 'copy'
+
+  input:
+  set file_id, file(bam) from sorted_bam_files
+  file gtf from gtf_file
+
+  output:
+  file "*.count" into count_files
+
+  script:
+"""
+featureCounts ${bam} -a ${gtf} -p \
+  -o ${file_id}.count \
+  -R BAM
+"""
+}
+
diff --git a/src/nf_modules/subread/tests.sh b/src/nf_modules/subread/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..c50b20e3601efeaa8a0d3721ad6ab20739c3db48
--- /dev/null
+++ b/src/nf_modules/subread/tests.sh
@@ -0,0 +1,15 @@
+./nextflow src/nf_modules/subread/subread.nf \
+  -c src/nf_modules/subread/subread.config \
+  -profile docker \
+  --gtf "data/tiny_dataset/annot/tiny.gff" \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/subread/subread.nf \
+  -c src/nf_modules/subread/subread.config \
+  -profile singularity \
+  --gtf "data/tiny_dataset/annot/tiny.gff" \
+  --bam "data/tiny_dataset/map/tiny_v2.bam" \
+  -resume
+fi
diff --git a/src/nf_modules/urqt/tests.sh b/src/nf_modules/urqt/tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..9992c67490ce65df42471a73d5c36bb0b708763e
--- /dev/null
+++ b/src/nf_modules/urqt/tests.sh
@@ -0,0 +1,25 @@
+./nextflow src/nf_modules/urqt/trimming_paired.nf \
+  -c src/nf_modules/urqt/trimming_paired.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny_R{1,2}.fastq" \
+  -resume
+
+./nextflow src/nf_modules/urqt/trimming_single.nf \
+  -c src/nf_modules/urqt/trimming_single.config \
+  -profile docker \
+  --fastq "data/tiny_dataset/fastq/tiny_R{1,2}.fastq" \
+  -resume
+
+if [ -x "$(command -v singularity)" ]; then
+./nextflow src/nf_modules/urqt/trimming_single.nf \
+  -c src/nf_modules/urqt/trimming_single.config \
+  -profile singularity \
+  --fastq "data/tiny_dataset/fastq/tiny_R{1,2}.fastq" \
+  -resume
+
+./nextflow src/nf_modules/urqt/trimming_single.nf \
+  -c src/nf_modules/urqt/trimming_single.config \
+  -profile singularity \
+  --fastq "data/tiny_dataset/fastq/tiny_R{1,2}.fastq" \
+  -resume
+fi
diff --git a/src/nf_modules/urqt/trimming_paired.config b/src/nf_modules/urqt/trimming_paired.config
new file mode 100644
index 0000000000000000000000000000000000000000..a9f28632bc1552a9d497f9ddc8b7f73e26b9f6fe
--- /dev/null
+++ b/src/nf_modules/urqt/trimming_paired.config
@@ -0,0 +1,56 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: trimming {
+        cpus = 4
+        container = "lbmc/urqt:d62c1f8"
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: trimming {
+        cpus = 4
+        container = "lbmc/urqt:d62c1f8"
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: trimming {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/urqt_d62c1f8"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        memory = "5GB"
+        cpus = 16
+        time = "12h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: trimming {
+        container = "lbmc/urqt:d62c1f8"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
+
diff --git a/src/nf_modules/urqt/trimming_paired.nf b/src/nf_modules/urqt/trimming_paired.nf
new file mode 100644
index 0000000000000000000000000000000000000000..f15efac0c306cd09280b791035fccfc3efc6f039
--- /dev/null
+++ b/src/nf_modules/urqt/trimming_paired.nf
@@ -0,0 +1,26 @@
+log.info "fastq files : ${params.fastq}"
+
+Channel
+  .fromFilePairs( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .set { fastq_files }
+
+process trimming {
+  tag "${reads}"
+  publishDir "results/fastq/trimming/", mode: 'copy'
+
+  input:
+  set pair_id, file(reads) from fastq_files
+
+  output:
+  set pair_id, "*_trim_R{1,2}.fastq.gz" into fastq_files_trim
+
+  script:
+"""
+UrQt --t 20 --m ${task.cpus} --gz \
+--in ${reads[0]} --inpair ${reads[1]} \
+--out ${pair_id}_trim_R1.fastq.gz --outpair ${pair_id}_trim_R2.fastq.gz \
+> ${pair_id}_trimming_report.txt
+"""
+}
+
diff --git a/src/nf_modules/urqt/trimming_single.config b/src/nf_modules/urqt/trimming_single.config
new file mode 100644
index 0000000000000000000000000000000000000000..012b3f3afb518677d0aae318979701d32861cca5
--- /dev/null
+++ b/src/nf_modules/urqt/trimming_single.config
@@ -0,0 +1,56 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: trimming {
+        container = "lbmc/urqt:d62c1f8"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: trimming {
+        container = "lbmc/urqt:d62c1f8"
+        cpus = 4
+      }
+    }
+  }
+  psmn{
+    process{
+      withName: trimming {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/urqt_d62c1f8"
+        executor = "sge"
+        clusterOptions = "-cwd -V"
+        cpus = 16
+        memory = "5GB"
+        time = "12h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: trimming {
+        container = "lbmc/urqt:d62c1f8"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n\
+        "
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
+
diff --git a/src/nf_modules/urqt/trimming_single.nf b/src/nf_modules/urqt/trimming_single.nf
new file mode 100644
index 0000000000000000000000000000000000000000..4116a69a7af5164ea59aa918fd314238f5068b77
--- /dev/null
+++ b/src/nf_modules/urqt/trimming_single.nf
@@ -0,0 +1,29 @@
+params.fastq = "$baseDir/data/fastq/*.fastq"
+
+log.info "fastq files : ${params.fastq}"
+
+Channel
+  .fromPath( params.fastq )
+  .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq}" }
+  .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+  .set { fastq_files }
+
+process trimming {
+  tag "$file_id"
+  echo true
+
+  input:
+  set file_id, file(reads) from fastq_files
+
+  output:
+  set file_id, "*_trim.fastq.gz" into fastq_files_trim
+
+  script:
+  """
+  UrQt --t 20 --m ${task.cpus} --gz \
+  --in ${reads} \
+  --out ${file_id}_trim.fastq.gz \
+  > ${file_id}_trimming_report.txt
+  """
+}
+
diff --git a/src/training_dataset.config b/src/training_dataset.config
new file mode 100644
index 0000000000000000000000000000000000000000..226acd9fd45046d7e24d829de16447d81873c8d4
--- /dev/null
+++ b/src/training_dataset.config
@@ -0,0 +1,370 @@
+profiles {
+  docker {
+    docker.temp = 'auto'
+    docker.enabled = true
+    process {
+      withName: build_synthetic_bed {
+        container = "lbmc/bedtools:2.25.0"
+        cpus = 1
+      }
+      withName: fasta_from_bed {
+        container = "lbmc/bedtools:2.25.0"
+        cpus = 1
+      }
+      withName: index_fasta {
+        container = "lbmc/bowtie2:2.3.4.1"
+        cpus = 4
+      }
+      withName: mapping_fastq_paired {
+        container = "lbmc/bowtie2:2.3.4.1"
+        cpus = 4
+      }
+      withName: bam_2_fastq_paired {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+      withName: filter_bam_paired {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+      withName: sort_bam_paired {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+      withName: index_bam_paired {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+      withName: mapping_fastq_single {
+        container = "lbmc/bowtie2:2.3.4.1"
+        cpus = 4
+      }
+      withName: bam_2_fastq_single {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+      withName: filter_bam_single {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+      withName: sort_bam_single {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+      withName: index_bam_single {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+    }
+  }
+  singularity {
+    singularity.enabled = true
+    singularity.cacheDir = "./bin/"
+    process {
+      withName: build_synthetic_bed {
+        container = "lbmc/bedtools:2.25.0"
+        cpus = 1
+      }
+      withName: fasta_from_bed {
+        container = "lbmc/bedtools:2.25.0"
+        cpus = 1
+      }
+      withName: index_fasta {
+        container = "lbmc/bowtie2:2.3.4.1"
+        cpus = 4
+      }
+      withName: mapping_fastq_single {
+        container = "lbmc/bowtie2:2.3.4.1"
+        cpus = 4
+      }
+      withName: mapping_fastq_paired {
+        container = "lbmc/bowtie2:2.3.4.1"
+        cpus = 4
+      }
+      withName: bam_2_fastq_paired {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+      withName: filter_bam_paired {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+      withName: sort_bam_paired {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+      withName: index_bam_paired {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+      withName: bam_2_fastq_single {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+      withName: filter_bam_single {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+      withName: sort_bam_single {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+      withName: index_bam_single {
+        container = "lbmc/samtools:1.7"
+        cpus = 4
+      }
+    }
+  }
+  psmn {
+    process{
+      withName: build_synthetic_bed {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/bedtools_2.25.0"
+        executor = "sge"
+        clusterOptions = "-m e -cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+      withName: fasta_from_bed {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/bedtools_2.25.0"
+        executor = "sge"
+        clusterOptions = "-m e -cwd -V"
+        cpus = 1
+        memory = "20GB"
+        time = "12h"
+        queue = 'monointeldeb128,monointeldeb48,h48-E5-2670deb128,h6-E5-2667v4deb128'
+      }
+      withName: index_fasta {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/bowtie2_2.3.4.1"
+        executor = "sge"
+        clusterOptions = "-m e -cwd -V"
+        cpus = 16
+        memory = "20GB"
+        time = "12h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+      withName: mapping_fastq_paired {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/bowtie2_2.3.4.1"
+        executor = "sge"
+        clusterOptions = "-m e -cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+      withName: bam_2_fastq_paired {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/samtools_1.7"
+        executor = "sge"
+        clusterOptions = "-m e -cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+      withName: sort_bam_paired {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/samtools_1.7"
+        executor = "sge"
+        clusterOptions = "-m e -cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+      withName: index_bam_paired {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/samtools_1.7"
+        executor = "sge"
+        clusterOptions = "-m e -cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+      withName: mapping_fastq_single {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/bowtie2_2.3.4.1"
+        executor = "sge"
+        clusterOptions = "-m e -cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+      withName: bam_2_fastq_single {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/samtools_1.7"
+        executor = "sge"
+        clusterOptions = "-m e -cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+      withName: sort_bam_single {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/samtools_1.7"
+        executor = "sge"
+        clusterOptions = "-m e -cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+      withName: index_bam_single {
+        beforeScript = "source $baseDir/.conda_psmn.sh"
+        conda = "$baseDir/.conda_envs/samtools_1.7"
+        executor = "sge"
+        clusterOptions = "-m e -cwd -V"
+        cpus = 16
+        memory = "30GB"
+        time = "24h"
+        queue = 'E5-2670deb128A,E5-2670deb128B,E5-2670deb128C,E5-2670deb128D,E5-2670deb128E,E5-2670deb128F'
+        penv = 'openmp16'
+      }
+    }
+  }
+  ccin2p3 {
+    singularity.enabled = true
+    singularity.cacheDir = "$baseDir/.singularity_in2p3/"
+    singularity.runOptions = "--bind /pbs,/sps,/scratch"
+    process{
+      withName: fasta_from_bed {
+        container = "lbmc/bedtools:2.25.0"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+    process{
+      withName: build_synthetic_bed {
+        container = "lbmc/bedtools:2.25.0"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: fasta_from_bed {
+        container = "lbmc/bedtools:2.25.0"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: index_fasta {
+        container = "lbmc/bowtie2:2.3.4.1"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: mapping_fastq_paired {
+        container = "lbmc/bowtie2:2.3.4.1"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: bam_2_fastq_paired {
+        container = "lbmc/samtools:1.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: sort_bam_paired {
+        container = "lbmc/samtools:1.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: index_bam_paired {
+        container = "lbmc/samtools:1.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: mapping_fastq_single {
+        container = "lbmc/bowtie2:2.3.4.1"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: bam_2_fastq_single {
+        container = "lbmc/samtools:1.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: sort_bam_single {
+        container = "lbmc/samtools:1.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+      withName: index_bam_single {
+        container = "lbmc/samtools:1.7"
+        scratch = true
+        stageInMode = "copy"
+        stageOutMode = "rsync"
+        executor = "sge"
+        clusterOptions = "-P P_lbmc -l os=cl7 -l sps=1 -r n"
+        cpus = 1
+        queue = 'huge'
+      }
+    }
+  }
+}
diff --git a/src/training_dataset.nf b/src/training_dataset.nf
new file mode 100644
index 0000000000000000000000000000000000000000..c6f48e882a82eaa7437a409c7500c7045224893e
--- /dev/null
+++ b/src/training_dataset.nf
@@ -0,0 +1,308 @@
+/*
+small pipeline to build a training dataset from whole genome data
+
+input:
+- fasta
+- fastq
+- chromosome
+- start position
+- stop position
+
+output:
+- sort fasta
+- sort fastq
+
+example for paired-end data:
+./nextflow src/training_dataset.nf -c src/training_dataset.config --fasta "data/genome.fa" --fastq_paired "data/*_R{1,2}.fastq.gz" --chromosome "X" --start 5305683 --stop 5333928 -resume
+
+example for single-end data:
+./nextflow src/training_dataset.nf -c src/training_dataset.config --fasta "data/genome.fa" --fastq_single  "data/*_R1.fastq.gz"  --chromosome "X" --start 5305683 --stop 5333928 -resume
+
+*/
+
+params.fastq_paired = ""
+params.fastq_single = ""
+
+log.info "fasta files : ${params.fasta}"
+log.info "fastq paired files : ${params.fastq_paired}"
+log.info "fastq single files : ${params.fastq_single}"
+log.info "chromosome : ${params.chromosome}"
+log.info "start position : ${params.start}"
+log.info "stop position : ${params.stop}"
+
+
+Channel
+  .fromPath( params.fasta )
+  .ifEmpty { error "Cannot find any index files matching: ${params.fasta}" }
+  .set { fasta_file }
+
+
+process build_synthetic_bed {
+  tag "${chromosome}:${start}-${stop}"
+  cpus 4
+
+  input:
+  val chromosome from params.chromosome
+  val start from params.start
+  val stop from params.stop
+
+  output:
+  file "*.bed" into bed_files
+
+  script:
+"""
+echo "${chromosome}\t${start}\t${stop}" > synthetic.bed
+"""
+}
+
+process fasta_from_bed {
+  tag "${fasta.baseName}"
+  cpus 4
+  publishDir "results/training/fasta/", mode: 'copy'
+
+  input:
+  file fasta from fasta_file
+  file bed from bed_files
+  val chromosome from params.chromosome
+
+  output:
+  file "*.fasta" into fasta_files_extracted
+
+  script:
+"""
+bedtools getfasta \
+-fi ${fasta} -bed ${bed} -fo s${fasta.baseName}.fasta
+"""
+}
+
+process index_fasta {
+  tag "$fasta.baseName"
+  cpus 4
+  publishDir "results/training/mapping/index/", mode: 'copy'
+
+  input:
+    file fasta from fasta_files_extracted
+
+  output:
+    file "*.index*" into index_files
+    file "*_report.txt" into indexing_report
+
+  script:
+"""
+bowtie2-build --threads ${task.cpus} ${fasta} ${fasta.baseName}.index &> ${fasta.baseName}_bowtie2_report.txt
+
+if grep -q "Error" ${fasta.baseName}_bowtie2_report.txt; then
+  exit 1
+fi
+"""
+}
+
+if ( params.fastq_paired != "" ) {
+  Channel
+    .fromFilePairs( params.fastq_paired )
+    .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq_paired}" }
+    .set { fastq_files_paired }
+
+  process mapping_fastq_paired {
+    tag "$pair_id"
+    cpus 4
+
+    input:
+    set pair_id, file(reads) from fastq_files_paired
+    file index from index_files.collect()
+
+    output:
+    set pair_id, "*.bam" into bam_files_paired
+    file "*_report.txt" into mapping_report
+
+    script:
+    index_id = index[0]
+    for (index_file in index) {
+      if (index_file =~ /.*\.1\.bt2/ && !(index_file =~ /.*\.rev\.1\.bt2/)) {
+          index_id = ( index_file =~ /(.*)\.1\.bt2/)[0][1]
+      }
+    }
+  """
+  bowtie2 --very-sensitive -p ${task.cpus} -x ${index_id} \
+  -1 ${reads[0]} -2 ${reads[1]} 2> \
+  ${pair_id}_bowtie2_report.txt | \
+  samtools view -Sb - > ${pair_id}.bam
+
+  if grep -q "Error" ${pair_id}_bowtie2_report.txt; then
+    exit 1
+  fi
+  """
+  }
+
+  bam_files_paired.into{ bam_files_paired_fa; bam_files_paired_ba}
+
+  process bam_2_fastq_paired {
+    tag "$file_id"
+    publishDir "results/training/fastq/", mode: 'copy'
+
+    input:
+      set file_id, file(bam) from bam_files_paired_fa
+
+    output:
+      set file_id, "*.fastq" into fastq_files_extracted
+    script:
+  """
+  samtools fastq -1 s${file_id}_R1.fastq -2 s${file_id}_R2.fastq -F 0x4 ${bam}
+  """
+  }
+
+  process filter_bam_paired {
+    tag "$file_id"
+    cpus 4
+
+    input:
+      set file_id, file(bam) from bam_files_paired_ba
+      file bed from bed_files
+
+    output:
+      set file_id, "*.bam" into filtered_bam_files_paired
+    script:
+  """
+  samtools view -@ ${task.cpus} -hb ${bam} -F 0x4 > f${file_id}.bam
+  """
+  }
+
+  process sort_bam_paired {
+    tag "$file_id"
+    publishDir "results/training/bams/", mode: 'copy'
+    cpus 4
+
+    input:
+      set file_id, file(bam) from filtered_bam_files_paired
+
+    output:
+      set file_id, "*.bam" into sorted_bam_files_paired
+
+    script:
+  """
+  samtools sort -@ ${task.cpus} -O BAM -o s${file_id}.bam ${bam}
+  """
+  }
+
+  process index_bam_paired {
+    tag "$file_id"
+    publishDir "results/training/bams/", mode: 'copy'
+
+    input:
+      set file_id, file(bam) from sorted_bam_files_paired
+
+    output:
+      set file_id, "*.bam*" into indexed_bam_file_paired
+
+    script:
+  """
+  samtools index ${bam}
+  """
+  }
+}
+
+
+if ( params.fastq_single != "" ) {
+  Channel
+    .fromPath( params.fastq_single )
+    .ifEmpty { error "Cannot find any fastq files matching: ${params.fastq_single}" }
+    .map { it -> [(it.baseName =~ /([^\.]*)/)[0][1], it]}
+    .set { fastq_files_single }
+
+  process mapping_fastq_single {
+    tag "$file_id"
+    cpus 4
+
+    input:
+    set file_id, file(reads) from fastq_files_single
+    file index from index_files.collect()
+
+    output:
+    set file_id, "*.bam" into bam_files_single
+    file "*_report.txt" into mapping_report
+
+    script:
+    index_id = index[0]
+    for (index_file in index) {
+      if (index_file =~ /.*\.1\.bt2/ && !(index_file =~ /.*\.rev\.1\.bt2/)) {
+          index_id = ( index_file =~ /(.*)\.1\.bt2/)[0][1]
+      }
+    }
+  """
+  bowtie2 --very-sensitive -p ${task.cpus} -x ${index_id} \
+  -U ${reads} 2> \
+  ${file_id}_bowtie2_report.txt | \
+  samtools view -Sb - > ${file_id}.bam
+
+  if grep -q "Error" ${file_id}_bowtie2_report.txt; then
+    exit 1
+  fi
+  """
+  }
+
+  bam_files_single.into{ bam_files_single_fa; bam_files_single_ba}
+
+  process bam_2_fastq_single {
+    tag "$file_id"
+
+    input:
+      set file_id, file(bam) from bam_files_single_fa
+
+    output:
+      set file_id, "*.fastq" into fastq_files_extracted
+    script:
+  """
+  samtools fastq -0 s${file_id}.fastq -F 0x4 ${bam}
+  """
+  }
+
+  process filter_bam_single {
+    tag "$file_id"
+    cpus 4
+
+    input:
+      set file_id, file(bam) from bam_files_single_ba
+      file bed from bed_files
+
+    output:
+      set file_id, "*.bam" into filtered_bam_files_single
+    script:
+  """
+  samtools view -@ ${task.cpus} -hb ${bam} -F 0x4 > f${file_id}.bam
+  """
+  }
+
+  process sort_bam_single {
+    tag "$file_id"
+    publishDir "results/training/bams/", mode: 'copy'
+    cpus 4
+
+    input:
+      set file_id, file(bam) from filtered_bam_files_single
+
+    output:
+      set file_id, "*.bam" into sorted_bam_files_single
+
+    script:
+  """
+  samtools sort -@ ${task.cpus} -O BAM -o s${file_id}.bam ${bam}
+  """
+  }
+
+  process index_bam_single {
+    tag "$file_id"
+    publishDir "results/training/bams/", mode: 'copy'
+
+    input:
+      set file_id, file(bam) from sorted_bam_files_single
+
+    output:
+      set file_id, "*.bam*" into indexed_bam_file_single
+
+    script:
+  """
+  samtools index ${bam}
+  """
+  }
+}
+