From 1619519c2b1279218de9b310bbf1496f2306a80b Mon Sep 17 00:00:00 2001 From: Christopher Tomkins-Tinch Date: Thu, 16 Feb 2017 22:47:30 -0500 Subject: [PATCH 1/8] WIP conda recipe deplo dependencies restructured so conda recipe can source dependencies from same lists used for testing. added conda recipe template as jinja2 recipe, with script to render the recipe with package lists, along with release tag as version number, and md5 hash from tagged release package. conda push is untested, as is branch/tag interaction (since this is the initial commit to GitHub). WIP --- .travis.yml | 10 + packaging/conda-recipe/render-recipe.py | 294 ++++++++++++++++++ .../conda-recipe/viral-ngs-template/build.sh | 22 ++ .../conda-recipe/viral-ngs-template/meta.yaml | 93 ++++++ .../viral-ngs-template/post-link.sh | 8 + packaging/conda-recipe/viral-ngs/.gitignore | 6 + requirements-conda.txt | 15 +- requirements-docs.txt | 5 - requirements-pipes.txt | 3 - requirements-py-docs.txt | 4 + ...nts-tests.txt => requirements-py-tests.txt | 2 + requirements-py2.txt | 1 + requirements-py3.txt | 2 + requirements.txt | 4 - travis/deploy.sh | 47 +++ travis/install-conda.sh | 6 +- travis/install-pip.sh | 9 +- travis/tests-long.sh | 1 + 18 files changed, 504 insertions(+), 28 deletions(-) create mode 100755 packaging/conda-recipe/render-recipe.py create mode 100644 packaging/conda-recipe/viral-ngs-template/build.sh create mode 100644 packaging/conda-recipe/viral-ngs-template/meta.yaml create mode 100644 packaging/conda-recipe/viral-ngs-template/post-link.sh create mode 100644 packaging/conda-recipe/viral-ngs/.gitignore delete mode 100644 requirements-docs.txt delete mode 100644 requirements-pipes.txt create mode 100644 requirements-py-docs.txt rename requirements-tests.txt => requirements-py-tests.txt (84%) create mode 100644 requirements-py2.txt create mode 100644 requirements-py3.txt delete mode 100644 requirements.txt create mode 100755 travis/deploy.sh diff --git a/.travis.yml b/.travis.yml index c64b63699..51d20a8c8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -30,7 +30,10 @@ env: - PIP_DIR="$HOME/virtualenv" - GATK_PATH="$CACHE_DIR/GenomeAnalysisTK-3.6" - PYTHONIOENCODING=UTF8 + # $BUNDLE_SECRET for decrypting tarball of third-party tools - secure: KX7DwKRD85S7NgspxevgbulTtV+jHQIiM6NBus2/Ur/P0RMdpt0EQQ2wDq79qGN70bvvkw901N7EjSYd+GWCAM7StXtaxnLRrrZ3XI1gX7KMk8E3QzPf0zualLDs7cuQmL6l6WiElUAEqumLc7WGpLZZLdSPzNqFSg+CBKCmTI8= + # $ANACONDA_TOKEN for uploading builds to anaconda.org ("broad-viral" channel) + - secure: "O+yKZxHthroiSi8KqMEF9qWDNv43iDXpk4rbhaZDlUKBiR5+AeXLR7OodWpX9LvhihpWgDoS5W42K0joPRP/rUJ2Jux9GH84Jhg+uDKN+XDi2sNT1/DsI4BTF0xxO0TeQ6IokbzV7idfW6gbhNoWMky7DnXtK6ruCJkkx4tWlno=" git: depth: 3 @@ -49,3 +52,10 @@ script: after_success: - coveralls + +deploy: + provider: script + script: travis/deploy.sh $TRAVIS_TAG + on: + tags: true + #all_branches: true diff --git a/packaging/conda-recipe/render-recipe.py b/packaging/conda-recipe/render-recipe.py new file mode 100755 index 000000000..51e47c944 --- /dev/null +++ b/packaging/conda-recipe/render-recipe.py @@ -0,0 +1,294 @@ +#!/usr/bin/python + +# stdlib +import os, sys, re +import glob +import jinja2 +import json +import pprint +import argparse +import hashlib +import time +# since py3 split up urllib +try: + from urllib.request import urlopen +except ImportError: + from urllib2 import urlopen + +""" +Renders Jinja2 templates using variables from dependency files + +The behavior is not (yet) recursive. +""" + +input_directory = "viral-ngs-template" +output_directory = "viral-ngs" +source_url = "" + +dir_path = os.path.dirname(os.path.realpath(__file__)) + +class VersionString(object): + """ + Class to validate and parse PEP440 version strings (also used by conda) + Shortened and derived from: https://github.com/pypa/packaging/blob/16.7/packaging/version.py + """ + + VERSION_PATTERN = r""" + (?Pv?) + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?P(a|b|c|rc|alpha|beta|pre|preview))
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+    """
+    version_re = re.compile(
+        r"^\s*" + VERSION_PATTERN + r"\s*$",
+        re.VERBOSE | re.IGNORECASE,)
+
+    def __init__(self, v):
+        self.v = v
+
+    def __str__(self):
+        parts = []
+
+        try:
+            # 'v' prefix
+            if self.version_re.match(self.v).group("prefix") is not None:
+                parts.append("{0}".format(self.version_re.match(self.v).group("prefix")))
+
+            # Epoch
+            if ( int(self.version_re.match(self.v).group("epoch")) if self.version_re.match(self.v).group("epoch") else 0) != 0:
+                parts.append("{0}!".format(self.version_re.match(self.v).group("epoch")))
+
+            # Release segment
+            parts.append(".".join(str(x) for x in self.version_re.match(self.v).group("release").split(".")))
+
+            # Pre-release
+            if self.version_re.match(self.v).group("pre") is not None:
+                parts.append("".join(str(x) for x in self.version_re.match(self.v).group("pre")))
+
+            # Post-release
+            if self.version_re.match(self.v).group("post") is not None:
+                parts.append(".post{0}".format(self.version_re.match(self.v).group("post")))
+
+            # Development release
+            if self.version_re.match(self.v).group("dev") is not None:
+                parts.append(".dev{0}".format(self.version_re.match(self.v).group("dev")))
+
+            # Local version segment
+            if self.version_re.match(self.v).group("local") is not None:
+                parts.append(
+                    "+{0}".format(".".join(str(x) for x in self.version_re.match(self.v).group("local")))
+                )
+        except:
+            raise argparse.ArgumentTypeError("String '%s' does not match required PEP440 format"%(self.v,))
+
+        return "".join(parts)
+
+
+def reformat_package_line(line):
+    """
+    This function is meant to take a package spec in conda or pip format
+    and return one in conda recipe format: https://conda.io/docs/spec.html
+    """
+    # regex to match comment-only line
+    comment_re = re.compile(r"^(?:\s*\#.*)$")
+
+    # regex to match package spec line, with support for comments and selectors.
+    # This will also capture hash-indicated selectors and comments (ex. "# [osx]")
+    # which may, or may not, be useful in their original context.
+    package_re = re.compile(r"^(?P[a-zA-Z0-9\-\_]+)(?:\s*)(?:(?P[\>\<=]?=?)(?:\s*)(?P[^\s\#=]+)(?:=(?P[0-9]*))?(?:\s*))?(?P\s*\#\s*\[.*\])?(?P\s*\#.*)?$")
+
+    # when we need to specify a different comparator for the recipe
+    comparator_replacements = {
+        "=": "==",
+    }
+
+    # the line shold not have a newline
+    line = line.replace("\n","").replace("\r","")
+
+    # if the line is a comment, simpy return it
+    if len(line)==0 or comment_re.match(line):
+        return line
+    # otherwise, build a package spec string suitable for a conda recipe
+    else: 
+        m = package_re.match(line)
+        recipe_package_string = "- {package} {comparator}{version}{build}{selector}{comment}".format(
+            package    = m.group("package").lower(), # conda packages must have lowercase names
+            comparator = "" if not m.group("comparator") else comparator_replacements.get(m.group("comparator"), m.group("comparator")),
+            version    = "" if not m.group("version") else m.group("version"),
+            build      = "" if not m.group("build") else " "+m.group("build")+"*", # Todo: verify build separator character for recip format
+            selector   = "" if not m.group("selector") else " "+m.group("selector"),
+            comment    = "" if not m.group("comment") else " "+m.group("comment")
+        )
+        return recipe_package_string
+
+def url_md5(url):
+    hash_md5 = hashlib.md5()
+    CHUNK_SIZE = 16 * 1024
+
+    # try four times to download the file. If one fails, wait two seconds and try again.
+    try_count = 1
+    while True:
+        try:
+            print("Downloading source package for hash calculation...")
+            response = urlopen(url)
+            for chunk in iter(lambda: response.read(CHUNK_SIZE), b""):
+                hash_md5.update(chunk)
+            break
+        except:
+            print("Download {} failed, sleeping then retrying...".format(try_count))
+            try_count +=1
+            if try_count >3:
+                raise
+            time.sleep(2)
+            continue
+
+    return hash_md5.hexdigest()
+
+if __name__ == "__main__":
+
+    parser = argparse.ArgumentParser(description='Renger the conda recipe.')
+    parser.add_argument('version',
+                        type=VersionString,
+                        help='the version number of the package')
+    parser.add_argument('--build-reqs', nargs='*', dest='build_requirements',
+                        type=argparse.FileType('r'),
+                        help='build-time requirements file')
+    parser.add_argument('--run-reqs', nargs='*', dest='run_requirements',
+                        type=argparse.FileType('r'),
+                        help='run-time requirements file')
+    parser.add_argument('--py2-run-reqs', nargs='*', dest='py2_run_requirements',
+                        type=argparse.FileType('r'),
+                        help='python2-only run-time requirements file')
+    parser.add_argument('--py3-run-reqs', nargs='*', dest='py3_run_requirements',
+                        type=argparse.FileType('r'),
+                        help='python3-only run-time requirements file')
+    parser.add_argument('--linux-run-reqs', nargs='*', dest='linux_run_requirements',
+                        type=argparse.FileType('r'),
+                        help='linux-only run-time requirements file')
+    parser.add_argument('--osx-run-reqs', nargs='*', dest='osx_run_requirements',
+                        type=argparse.FileType('r'),
+                        help='osx-only run-time requirements file')
+    parser.add_argument('--test-reqs', nargs='*', dest='test_requirements',
+                        type=argparse.FileType('r'),
+                        help='test-time requirements file')
+
+    try:
+       args = parser.parse_args()
+       if not any(vars(args).values()):
+            parser.print_help()
+            sys.exit(0)
+    except:
+        sys.exit(0)
+
+    args_dict = vars(args)
+
+    recipe_variables = {}
+
+    # store two separate version strings, one to use for the conda package and one
+    # that should match github tagged releases
+    recipe_variables["PKG_VERSION"] = str(args_dict.pop("version"))
+
+    # strip "v" prefix from versions that look like v1.14.0
+    if recipe_variables["PKG_VERSION"].startswith("v"):
+        recipe_variables["PKG_VERSION_CONDA"] = recipe_variables["PKG_VERSION"][1:]
+    else:
+        recipe_variables["PKG_VERSION_CONDA"] = recipe_variables["PKG_VERSION"]
+
+    # after we pop the positional argument(s), the optional ones remaining are all files
+    for var_name, req_files in args_dict.items():
+        if req_files:
+            for reqs_file in req_files:
+                if reqs_file:
+                    recipe_variables[var_name] = []
+                    for line in reqs_file:
+
+                        #print("Before: "+line.replace("\n",""))
+                        #print("After:  "+reformat_package_line(line)+"\n")
+                        conda_style_package_line = reformat_package_line(line)
+                        if len(conda_style_package_line):
+                            recipe_variables[var_name].append(conda_style_package_line)
+    pprint.pprint(recipe_variables)
+
+    j_env = jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.join(dir_path, input_directory)))
+
+    if not os.path.exists(output_directory):
+        os.makedirs(output_directory)
+
+    template_files = os.listdir(os.path.join(dir_path,input_directory))
+
+    for template_file in template_files:
+        print("Rendering "+ template_file)
+        # jinja expects the filename to be just that, not a path
+        # it should be relative to the FileSystemLoader() path set above
+        template = j_env.get_template(template_file)
+        output_from_parsed_template = template.render(recipe_variables)
+
+        # save the rendered output
+        with open(os.path.join(dir_path, output_directory, template_file), "wb") as f:
+            f.write(output_from_parsed_template)
+
+        # populate md5 hashes for any source urls present
+        if(template_file.endswith(".yaml")):
+            # calculate and add md5 hashes to recipe
+            with open(os.path.join(dir_path, output_directory, template_file), "rb") as inf:
+                with open(os.path.join(dir_path, output_directory, template_file+".checksumed"), "wb") as outf:
+                    for line in inf:
+                        # if this is an md5 line, don't write it out
+                        if line.strip().startswith("md5"):
+                            continue
+                        # if this is not a url line, write it verbatim
+                        else:
+                            outf.writelines([line])
+
+                            # if this is a url line
+                            if line.strip().startswith("url"):
+                                # parse out the url
+                                #print(line)
+                                url_re = re.compile(r"^(?:(?P\s*)url:\s*)(?P[\S]*)(?P.*)$")
+                                matches = url_re.match(line)
+                                #print(matches)
+                                if matches:
+                                    if matches.group("url"):
+                                        # download file and calculate md5
+                                        src_hash = url_md5(matches.group("url"))
+                                        hash_line = "{leadingspace}md5: {src_hash}{extra}".format(
+                                            leadingspace="" if not matches.group("leadingspace") else matches.group("leadingspace"),
+                                            src_hash=src_hash,
+                                            extra="" if not matches.group("extra") else matches.group("extra")
+                                        )
+                                        outf.writelines([hash_line+"\n"])
+
+                                    else:
+                                        raise Exception("The yaml file url line does not appear to contain a url")
+
+
+            # move the file with checksums
+            os.rename(os.path.join(dir_path, output_directory, template_file+".checksumed"), os.path.join(dir_path, output_directory, template_file))
+
+
+
+
diff --git a/packaging/conda-recipe/viral-ngs-template/build.sh b/packaging/conda-recipe/viral-ngs-template/build.sh
new file mode 100644
index 000000000..22991b872
--- /dev/null
+++ b/packaging/conda-recipe/viral-ngs-template/build.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+
+BINARY_HOME=$PREFIX/bin
+PACKAGE_HOME=$PREFIX/opt/$PKG_NAME-$PKG_VERSION
+
+cd $SRC_DIR
+
+# remove files duplicated by conda packages
+rm tools/binaries/V-Phaser-2.0/MacOSX/libgomp.1.dylib
+#chmod +x tools/scripts/*
+
+find tools/scripts/ -name "*.py" -exec chmod +x {} \;
+find tools/scripts/ -name "*.sh" -exec chmod +x {} \;
+
+# copy source to bin
+mkdir -p $PREFIX/bin
+mkdir -p $PACKAGE_HOME
+cp -R $SRC_DIR/* $PACKAGE_HOME/
+cd $PACKAGE_HOME && chmod a+x *.py
+
+cd $PACKAGE_HOME
+find *.py -type f -exec ln -s $PACKAGE_HOME/{} $BINARY_HOME/{} \;
diff --git a/packaging/conda-recipe/viral-ngs-template/meta.yaml b/packaging/conda-recipe/viral-ngs-template/meta.yaml
new file mode 100644
index 000000000..640bfd012
--- /dev/null
+++ b/packaging/conda-recipe/viral-ngs-template/meta.yaml
@@ -0,0 +1,93 @@
+package:
+  name: viral-ngs
+  version: {{PKG_VERSION_CONDA}}
+
+about:
+  home: https://github.com/broadinstitute/viral-ngs
+  license: https://raw.githubusercontent.com/broadinstitute/viral-ngs/master/LICENSE
+  summary: A set of scripts and tools for the analysis of viral NGS data
+
+source:
+  fn: viral-ngs-{{PKG_VERSION}}.zip
+  url: https://github.com/broadinstitute/viral-ngs/archive/{{PKG_VERSION}}.zip
+
+build:
+    number: 0
+    rpaths:
+        - lib/
+        - lib//
+
+requirements:
+  build:
+    - python
+    - java-jdk >=7
+    - perl
+    {% for item in build_requirements %}
+    {{ item }}
+    {%- endfor %}
+    # C lib or compilation-related
+    - gcc   # [not osx]
+    - llvm  # [osx]
+    - boost 1.61* # needed for diamond, among other packages
+
+  run:
+    - python
+    - java-jdk >=7
+    - perl
+    {% for item in run_requirements %}
+    {{ item }}
+    {%- endfor %}
+    {% for item in py2_run_requirements %}
+    {{ item }} # [py2k]
+    {%- endfor %}
+    {% for item in py3_run_requirements %}
+    {{ item }} # [py3k]
+    {%- endfor %}
+    {% for item in linux_run_requirements %}
+    {{ item }} # [linux]
+    {%- endfor %}
+    {% for item in osx_run_requirements %}
+    {{ item }} # [osx]
+    {%- endfor %}
+    # C lib-related
+    - libgcc # [not osx]
+    - boost 1.61* # needed for diamond, among other packages
+
+test:
+  requires:
+    # testing-related
+    {% for item in test_requirements %}
+    {{ item }}
+    {%- endfor %}
+  commands:
+    # test command-line tools
+    - "assembly.py -h &> /dev/null"
+    - "broad_utils.py -h &> /dev/null"
+    - "illumina.py -h &> /dev/null"
+    - "interhost.py -h &> /dev/null"
+    - "intrahost.py -h &> /dev/null"
+    - "metagenomics.py -h &> /dev/null"
+    - "ncbi.py -h &> /dev/null"
+    - "read_utils.py -h &> /dev/null"
+    - "reports.py -h &> /dev/null"
+    - "taxon_filter.py -h &> /dev/null"
+
+    # test tool install
+    #- "cd $PREFIX/opt/$PKG_NAME-$PKG_VERSION && py.test -v test/unit/test_tools.py"
+
+    # various unit tests
+    #cd 
+    - "cd $PREFIX/opt/$PKG_NAME-$PKG_VERSION && py.test -v test/unit/test_tools_vphaser2.py"
+    - "cd $PREFIX/opt/$PKG_NAME-$PKG_VERSION && py.test -v test/unit/test_tools_picard.py"
+    - "cd $PREFIX/opt/$PKG_NAME-$PKG_VERSION && py.test -v test/unit/test_tools_samtools.py"
+    - "cd $PREFIX/opt/$PKG_NAME-$PKG_VERSION && py.test -v test/unit/test_taxon_filter.py"
+    - "cd $PREFIX/opt/$PKG_NAME-$PKG_VERSION && py.test -v test/unit/test_util_misc.py"
+    - "cd $PREFIX/opt/$PKG_NAME-$PKG_VERSION && py.test -v test/unit/test_util_vcf.py"    
+    - "cd $PREFIX/opt/$PKG_NAME-$PKG_VERSION && py.test -v test/unit/test_snake.py" # [py3k]
+    #- "cd $PREFIX/opt/$PKG_NAME-$PKG_VERSION && py.test -v test/unit/test_read_utils.py"
+    #- "cd $(dirname $(which illumina.py))/../opt/$PKG_NAME-$PKG_VERSION && py.test -v test/unit/test_tools_novoalign.py"
+
+    #- "cd $PREFIX/opt/$PKG_NAME-$PKG_VERSION && py.test -v test/unit/test_interhost.py"
+    #- "cd $PREFIX/opt/$PKG_NAME-$PKG_VERSION && py.test -v test/unit/test_intrahost.py"
+    #- "cd $PREFIX/opt/$PKG_NAME-$PKG_VERSION && py.test -v test/unit/test_assembly.py"
+    #- "cd $PREFIX/opt/$PKG_NAME-$PKG_VERSION && py.test -v test/unit/test_illumina.py"
diff --git a/packaging/conda-recipe/viral-ngs-template/post-link.sh b/packaging/conda-recipe/viral-ngs-template/post-link.sh
new file mode 100644
index 000000000..3c9bbf0d5
--- /dev/null
+++ b/packaging/conda-recipe/viral-ngs-template/post-link.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+echo "viral-ngs depends on GATK"
+# call gatk-register which will print its usage statement
+$PREFIX/bin/gatk-register
+
+# exit 0 so the install is a success
+exit 0 
diff --git a/packaging/conda-recipe/viral-ngs/.gitignore b/packaging/conda-recipe/viral-ngs/.gitignore
new file mode 100644
index 000000000..db1714674
--- /dev/null
+++ b/packaging/conda-recipe/viral-ngs/.gitignore
@@ -0,0 +1,6 @@
+# this directory is a placeholder to contain jinja2-rendered
+# conda recipes, so the files in it should be ignored
+
+*
+*/
+!.gitignore
\ No newline at end of file
diff --git a/requirements-conda.txt b/requirements-conda.txt
index f73c29a35..53d945a5d 100644
--- a/requirements-conda.txt
+++ b/requirements-conda.txt
@@ -2,14 +2,14 @@ blast=2.2.31
 bmtagger=3.101
 bwa=0.7.15
 diamond=0.8.22=2
-gatk=3.7
-kraken-all=0.10.6_eaf8fb68
+gatk=3.6
+kraken-all=0.10.6_eaf8fb68 # [linux]
 krona=2.7
 last=719
 fastqc=0.11.5
 mafft=7.221
-mummer=3.23
-muscle=3.8.1551
+mummer=3.23 
+muscle=3.8.1551 
 mvicuna=1.0
 novoalign=3.06.05
 picard=2.5.0
@@ -23,13 +23,6 @@ vphaser2=2.0
 biopython=1.68
 matplotlib=1.5.3
 future>=0.15.2
-PyYAML=3.11
 pysam=0.9.1
-pycodestyle
-mock==2.0.0
-six<2
-pytest=3.0.5
-pytest-cov=2.4.0
-pytest-xdist=1.15.0
 bedtools=2.26.0
 pybedtools=0.7.8
\ No newline at end of file
diff --git a/requirements-docs.txt b/requirements-docs.txt
deleted file mode 100644
index b88d7b246..000000000
--- a/requirements-docs.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Sphinx==1.4.4
-sphinx-argparse==0.1.14
-boltons==15.0.0
-sphinx-rtd-theme==0.1.9
-matplotlib==1.5.3
diff --git a/requirements-pipes.txt b/requirements-pipes.txt
deleted file mode 100644
index fd45218aa..000000000
--- a/requirements-pipes.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-boto==2.38.0
-snakemake==3.9.0
-PyYAML==3.11
diff --git a/requirements-py-docs.txt b/requirements-py-docs.txt
new file mode 100644
index 000000000..cb711cce1
--- /dev/null
+++ b/requirements-py-docs.txt
@@ -0,0 +1,4 @@
+Sphinx==1.4.4
+sphinx-argparse==0.1.15
+sphinx_rtd_theme==0.1.9
+matplotlib==1.5.3
diff --git a/requirements-tests.txt b/requirements-py-tests.txt
similarity index 84%
rename from requirements-tests.txt
rename to requirements-py-tests.txt
index dd0153b0d..7c604b13a 100644
--- a/requirements-tests.txt
+++ b/requirements-py-tests.txt
@@ -1,3 +1,4 @@
+PyYAML==3.11
 pytest==3.0.5
 coveralls==1.1
 pycodestyle
@@ -6,3 +7,4 @@ six<2
 pytest-cov==2.4.0
 pytest-logging==2015.11.4
 pytest-xdist==1.15.0
+flake8<=3
\ No newline at end of file
diff --git a/requirements-py2.txt b/requirements-py2.txt
new file mode 100644
index 000000000..4b9d1a68e
--- /dev/null
+++ b/requirements-py2.txt
@@ -0,0 +1 @@
+futures==3.0.3
\ No newline at end of file
diff --git a/requirements-py3.txt b/requirements-py3.txt
new file mode 100644
index 000000000..7df4593fa
--- /dev/null
+++ b/requirements-py3.txt
@@ -0,0 +1,2 @@
+boto==2.38.0
+snakemake==3.9.0
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index 920749abb..000000000
--- a/requirements.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-# Package source, listed here for clarity:
---index-url https://pypi.python.org/simple/
-
-futures==3.0.3; python_version == '2.6' or python_version=='2.7'
diff --git a/travis/deploy.sh b/travis/deploy.sh
new file mode 100755
index 000000000..86450f603
--- /dev/null
+++ b/travis/deploy.sh
@@ -0,0 +1,47 @@
+#!/bin/bash
+
+# This script performs various packing and deployment operations.
+# It assumes it will be caused as a deploy hook of TravisCI; ex.:
+#
+# deploy:
+#   provider: script
+#   script: travis/deploy.sh $TRAVIS_TAG
+#   on:
+#     tags: true
+#     all_branches: master
+
+
+# way to get the absolute path to this script that should
+# work regardless of whether or not this script has been sourced
+# Find original directory of bash script, resovling symlinks
+# http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in/246128#246128
+SOURCE="${BASH_SOURCE[0]}"
+while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
+    DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
+    if [[ "$OSTYPE" == "darwin"* ]]; then
+        SOURCE="$(readlink "$SOURCE")"
+    else
+        SOURCE="$(readlink -f "$SOURCE")"
+    fi
+    [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
+done
+SCRIPT=$SOURCE
+SCRIPT_DIRNAME="$(dirname "$SOURCE")"
+SCRIPTPATH="$(cd -P "$(echo $SCRIPT_DIRNAME)" &> /dev/null && pwd)"
+SCRIPT="$SCRIPTPATH/$(basename "$SCRIPT")"
+
+PKG_VERSION=$1
+
+# === Build conda package and upload
+
+# if the ANACONDA_TOKEN is defined (not on an external branch)
+if [ ! -z "$ANACONDA_TOKEN" ]; then
+    echo "Running $SCRIPTPATH/package-conda.sh"
+    # Render recipe from template and dependency files, setting the tag as the current version
+    packaging/conda-recipe/render-recipe.py $PKG_VERSION --build-reqs requirements-conda.txt --run-reqs requirements-conda.txt --py3-run-reqs requirements-py3.txt --py2-run-reqs requirements-py2.txt --test-reqs requirements-py-tests.txt
+    conda build --python $TRAVIS_PYTHON_VERSION --token $ANACONDA_TOKEN packaging/conda-recipe/viral-ngs
+    # TODO: this is where we check the exit code of conda build, and if successful,
+    # trigger the viral-ngs-deploy repository to build the docker container
+else
+    echo "ANACONDA_TOKEN is not defined. Conda package upload is only supported for branches on the original repository."
+fi
diff --git a/travis/install-conda.sh b/travis/install-conda.sh
index 57fd63bbe..786ef7ca7 100755
--- a/travis/install-conda.sh
+++ b/travis/install-conda.sh
@@ -40,11 +40,13 @@ else # if it does not exist, we need to install miniconda
     fi
     hash -r
     conda config --set always_yes yes --set changeps1 no
+    conda config --set anaconda_upload yes # for uploading packages after successful build
     conda config --add channels bioconda
     conda config --add channels r
     conda config --add channels conda-forge
-    conda update -y -q conda
-    conda install java-jdk==8.0.92
+    conda install -y -q conda=4.2 # pin to 4.2.* until this is fixed: https://github.com/conda/conda-build/issues/1666
+    conda config --set auto_update_conda false
+    conda install -y java-jdk==8.0.92
 fi
 
 conda info -a # for debugging
diff --git a/travis/install-pip.sh b/travis/install-pip.sh
index 082f6da7c..2885ddad6 100755
--- a/travis/install-pip.sh
+++ b/travis/install-pip.sh
@@ -2,16 +2,19 @@
 set -e
 
 echo "pip installing required python packages"
-pip install -r requirements.txt
+#pip install -r requirements.txt
 
 #PYVER=`python -V 2>&1 | cut -c 8`
 PYVER=`echo $TRAVIS_PYTHON_VERSION | cut -c 1`
 if [ "$PYVER" = "3" ]; then
     echo "pip installing snakemake packages (py3 only)"
-    pip install -r requirements-pipes.txt
+    pip install -r requirements-py3.txt
+elif [ "$PYVER" = "2" ]; then
+    echo "pip install py2 packages"
+    pip install -r requirements-py2.txt
 fi
 
 python --version
 
 echo "pip installing test-related packages (coveralls, etc.)"
-pip install -r requirements-tests.txt
+pip install -r requirements-py-tests.txt
diff --git a/travis/tests-long.sh b/travis/tests-long.sh
index 355a593fb..6b1bc6d71 100755
--- a/travis/tests-long.sh
+++ b/travis/tests-long.sh
@@ -3,6 +3,7 @@ set -e
 
 echo "TRAVIS_BRANCH: $TRAVIS_BRANCH"
 echo "TRAVIS_PULL_REQUEST: $TRAVIS_PULL_REQUEST"
+echo "TRAVIS_TAG: $TRAVIS_TAG"
 
 if [ $TRAVIS_PULL_REQUEST != "false" -o $TRAVIS_BRANCH = "master" -o -n "$TRAVIS_TAG" ]; then
     echo "This is on master or is a pull request: executing long running tests..."

From 109b1ab5faf43f5eeee44384dc60d889e5a141cd Mon Sep 17 00:00:00 2001
From: Christopher Tomkins-Tinch 
Date: Fri, 17 Feb 2017 10:13:41 -0500
Subject: [PATCH 2/8] add jinja2 as test requirement

jinja templates are used to prepare and render the conda recipe
---
 requirements-py-tests.txt | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/requirements-py-tests.txt b/requirements-py-tests.txt
index 7c604b13a..3d40cc804 100644
--- a/requirements-py-tests.txt
+++ b/requirements-py-tests.txt
@@ -7,4 +7,5 @@ six<2
 pytest-cov==2.4.0
 pytest-logging==2015.11.4
 pytest-xdist==1.15.0
-flake8<=3
\ No newline at end of file
+flake8<=3
+Jinja2==2.8

From 917b4be73cc17170f1bb469d042ccc6e63553e54 Mon Sep 17 00:00:00 2001
From: Christopher Tomkins-Tinch 
Date: Fri, 17 Feb 2017 14:54:47 -0500
Subject: [PATCH 3/8] pytest-catchlog -> pytest-logging

the current pytest-logging is deprecated, with a new package called
pytest-catchlog superseding it, though the latter will apparently be
renamed pytest-logging in time. Changing the name here until
pytest-logging is updated with the new code.
---
 requirements-py-tests.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/requirements-py-tests.txt b/requirements-py-tests.txt
index 3d40cc804..5cf6033a3 100644
--- a/requirements-py-tests.txt
+++ b/requirements-py-tests.txt
@@ -5,7 +5,7 @@ pycodestyle
 mock==2.0.0
 six<2
 pytest-cov==2.4.0
-pytest-logging==2015.11.4
+pytest-catchlog==1.2.2
 pytest-xdist==1.15.0
 flake8<=3
 Jinja2==2.8

From fdeb6c5c2990b6dfc98639b78442bff5f3d572a7 Mon Sep 17 00:00:00 2001
From: Christopher Tomkins-Tinch 
Date: Fri, 17 Feb 2017 16:26:17 -0500
Subject: [PATCH 4/8] tidy some comments

---
 packaging/conda-recipe/render-recipe.py | 7 +------
 1 file changed, 1 insertion(+), 6 deletions(-)

diff --git a/packaging/conda-recipe/render-recipe.py b/packaging/conda-recipe/render-recipe.py
index 51e47c944..f801c279a 100755
--- a/packaging/conda-recipe/render-recipe.py
+++ b/packaging/conda-recipe/render-recipe.py
@@ -225,9 +225,6 @@ def url_md5(url):
                 if reqs_file:
                     recipe_variables[var_name] = []
                     for line in reqs_file:
-
-                        #print("Before: "+line.replace("\n",""))
-                        #print("After:  "+reformat_package_line(line)+"\n")
                         conda_style_package_line = reformat_package_line(line)
                         if len(conda_style_package_line):
                             recipe_variables[var_name].append(conda_style_package_line)
@@ -260,17 +257,15 @@ def url_md5(url):
                         # if this is an md5 line, don't write it out
                         if line.strip().startswith("md5"):
                             continue
-                        # if this is not a url line, write it verbatim
+                        # if this is not an md5 line, write it verbatim
                         else:
                             outf.writelines([line])
 
                             # if this is a url line
                             if line.strip().startswith("url"):
                                 # parse out the url
-                                #print(line)
                                 url_re = re.compile(r"^(?:(?P\s*)url:\s*)(?P[\S]*)(?P.*)$")
                                 matches = url_re.match(line)
-                                #print(matches)
                                 if matches:
                                     if matches.group("url"):
                                         # download file and calculate md5

From 776fbd1e99d0e44d84ad95e8000bb66bbd1389d2 Mon Sep 17 00:00:00 2001
From: Christopher Tomkins-Tinch 
Date: Tue, 21 Feb 2017 10:29:49 -0500
Subject: [PATCH 5/8] move test-related packages back to conda recipe

---
 requirements-conda.txt    | 10 ++++++++++
 requirements-py-tests.txt | 12 +-----------
 travis/deploy.sh          |  2 +-
 3 files changed, 12 insertions(+), 12 deletions(-)

diff --git a/requirements-conda.txt b/requirements-conda.txt
index b305512f2..146c79067 100644
--- a/requirements-conda.txt
+++ b/requirements-conda.txt
@@ -26,3 +26,13 @@ future>=0.15.2
 pysam=0.9.1
 bedtools=2.26.0
 pybedtools=0.7.8
+PyYAML==3.11
+pytest==3.0.5
+pycodestyle
+mock==2.0.0
+six<2
+pytest-cov==2.4.0
+pytest-catchlog==1.2.2
+pytest-xdist==1.15.0
+flake8<=3
+Jinja2==2.8
\ No newline at end of file
diff --git a/requirements-py-tests.txt b/requirements-py-tests.txt
index 5cf6033a3..530edf62a 100644
--- a/requirements-py-tests.txt
+++ b/requirements-py-tests.txt
@@ -1,11 +1 @@
-PyYAML==3.11
-pytest==3.0.5
-coveralls==1.1
-pycodestyle
-mock==2.0.0
-six<2
-pytest-cov==2.4.0
-pytest-catchlog==1.2.2
-pytest-xdist==1.15.0
-flake8<=3
-Jinja2==2.8
+coveralls==1.1
\ No newline at end of file
diff --git a/travis/deploy.sh b/travis/deploy.sh
index 86450f603..b115245de 100755
--- a/travis/deploy.sh
+++ b/travis/deploy.sh
@@ -38,7 +38,7 @@ PKG_VERSION=$1
 if [ ! -z "$ANACONDA_TOKEN" ]; then
     echo "Running $SCRIPTPATH/package-conda.sh"
     # Render recipe from template and dependency files, setting the tag as the current version
-    packaging/conda-recipe/render-recipe.py $PKG_VERSION --build-reqs requirements-conda.txt --run-reqs requirements-conda.txt --py3-run-reqs requirements-py3.txt --py2-run-reqs requirements-py2.txt --test-reqs requirements-py-tests.txt
+    packaging/conda-recipe/render-recipe.py $PKG_VERSION --build-reqs requirements-conda.txt --run-reqs requirements-conda.txt --py3-run-reqs requirements-py3.txt --py2-run-reqs requirements-py2.txt
     conda build --python $TRAVIS_PYTHON_VERSION --token $ANACONDA_TOKEN packaging/conda-recipe/viral-ngs
     # TODO: this is where we check the exit code of conda build, and if successful,
     # trigger the viral-ngs-deploy repository to build the docker container

From 819187c8f6577162910303555ab90a4526eeb0a9 Mon Sep 17 00:00:00 2001
From: Christopher Tomkins-Tinch 
Date: Tue, 21 Feb 2017 11:08:32 -0500
Subject: [PATCH 6/8] started adding files to trigger build of viral-ngs-deploy
 repository

---
 .travis.yml                           |   4 +-
 travis/deploy.sh                      |   8 +-
 travis/trigger-tests-in-other-repo.sh |  16 +++
 travis/trigger-travis.sh              | 181 ++++++++++++++++++++++++++
 4 files changed, 205 insertions(+), 4 deletions(-)
 create mode 100644 travis/trigger-tests-in-other-repo.sh
 create mode 100644 travis/trigger-travis.sh

diff --git a/.travis.yml b/.travis.yml
index 51d20a8c8..6393dcea2 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -33,7 +33,9 @@ env:
   # $BUNDLE_SECRET for decrypting tarball of third-party tools
   - secure: KX7DwKRD85S7NgspxevgbulTtV+jHQIiM6NBus2/Ur/P0RMdpt0EQQ2wDq79qGN70bvvkw901N7EjSYd+GWCAM7StXtaxnLRrrZ3XI1gX7KMk8E3QzPf0zualLDs7cuQmL6l6WiElUAEqumLc7WGpLZZLdSPzNqFSg+CBKCmTI8=
   # $ANACONDA_TOKEN for uploading builds to anaconda.org ("broad-viral" channel) 
-  - secure: "O+yKZxHthroiSi8KqMEF9qWDNv43iDXpk4rbhaZDlUKBiR5+AeXLR7OodWpX9LvhihpWgDoS5W42K0joPRP/rUJ2Jux9GH84Jhg+uDKN+XDi2sNT1/DsI4BTF0xxO0TeQ6IokbzV7idfW6gbhNoWMky7DnXtK6ruCJkkx4tWlno="
+  - secure: O+yKZxHthroiSi8KqMEF9qWDNv43iDXpk4rbhaZDlUKBiR5+AeXLR7OodWpX9LvhihpWgDoS5W42K0joPRP/rUJ2Jux9GH84Jhg+uDKN+XDi2sNT1/DsI4BTF0xxO0TeQ6IokbzV7idfW6gbhNoWMky7DnXtK6ruCJkkx4tWlno=
+  # $TRAVIS_ACCESS_TOKEN_FOR_OTHER_REPO
+  - secure: ChB0K3gPr5HknxYA41xCrpgChHDmLkqc79p1NABB/tbqOEnrPzDPqE+FU4/QlmeV96jMYn4uyLVauJpzVXyBIVoOa8guqoF5VdiKlAhaUwh9UQJ75i3SKQtGBrqaTXSDVI1vJARMiGabduCrcNJxVsxV9Bm+YzTq6tuhWyqR4fs=
 
 git:
   depth: 3
diff --git a/travis/deploy.sh b/travis/deploy.sh
index b115245de..3462974b8 100755
--- a/travis/deploy.sh
+++ b/travis/deploy.sh
@@ -39,9 +39,11 @@ if [ ! -z "$ANACONDA_TOKEN" ]; then
     echo "Running $SCRIPTPATH/package-conda.sh"
     # Render recipe from template and dependency files, setting the tag as the current version
     packaging/conda-recipe/render-recipe.py $PKG_VERSION --build-reqs requirements-conda.txt --run-reqs requirements-conda.txt --py3-run-reqs requirements-py3.txt --py2-run-reqs requirements-py2.txt
-    conda build --python $TRAVIS_PYTHON_VERSION --token $ANACONDA_TOKEN packaging/conda-recipe/viral-ngs
-    # TODO: this is where we check the exit code of conda build, and if successful,
-    # trigger the viral-ngs-deploy repository to build the docker container
+    if conda build --python $TRAVIS_PYTHON_VERSION --token $ANACONDA_TOKEN packaging/conda-recipe/viral-ngs; then
+        # check the exit code of conda build, and if successful,
+        # trigger the viral-ngs-deploy repository to test/build the docker container
+        ./travis/trigger-tests-in-other-repo.sh
+    fi
 else
     echo "ANACONDA_TOKEN is not defined. Conda package upload is only supported for branches on the original repository."
 fi
diff --git a/travis/trigger-tests-in-other-repo.sh b/travis/trigger-tests-in-other-repo.sh
new file mode 100644
index 000000000..cbe511298
--- /dev/null
+++ b/travis/trigger-tests-in-other-repo.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+echo "Script called to trigger tests in external repository..."
+
+# only initiate tests in other repo if the travis token string has a value
+if [ ! -z "$TRAVIS_ACCESS_TOKEN_FOR_OTHER_REPO" ]; then
+    echo "TRAVIS_ACCESS_TOKEN_FOR_OTHER_REPO is defined"
+        # if this is a tagged release add that information to the dependent build request
+    if [ -n "$TRAVIS_TAG" ]; then
+        ./travis/trigger-travis.sh --script "env UPSTREAM_BRANCH=$TRAVIS_BRANCH UPSTREAM_TAG=$TRAVIS_TAG bash ./travis/tests-long.sh" broadinstitute viral-ngs-deploy $TRAVIS_ACCESS_TOKEN_FOR_OTHER_REPO "UPSTREAM_BRANCH=$TRAVIS_BRANCH UPSTREAM_TAG=$TRAVIS_TAG"
+    else
+        ./travis/trigger-travis.sh --script "env UPSTREAM_BRANCH=$TRAVIS_BRANCH ./travis/tests-long.sh" broadinstitute viral-ngs-deploy $TRAVIS_ACCESS_TOKEN_FOR_OTHER_REPO "UPSTREAM_BRANCH=$TRAVIS_BRANCH"
+    fi
+else
+    echo "TRAVIS_ACCESS_TOKEN_FOR_OTHER_REPO is undefined. Check the secure variable."
+fi
\ No newline at end of file
diff --git a/travis/trigger-travis.sh b/travis/trigger-travis.sh
new file mode 100644
index 000000000..a33802856
--- /dev/null
+++ b/travis/trigger-travis.sh
@@ -0,0 +1,181 @@
+#!/bin/sh -f
+
+# Derived from: https://github.com/mernst/plume-lib/blob/master/bin/trigger-travis.sh
+# Which is originally under an MIT license
+
+# Trigger a new Travis-CI job.
+# Ordinarily, a new Travis job is triggered when a commit is pushed to a
+# GitHub repository.  The trigger-travis.sh script provides a programmatic
+# way to trigger a new Travis job.
+
+# Usage:
+#   trigger-travis.sh [--pro] [--branch BRANCH] GITHUBID GITHUBPROJECT TRAVIS_ACCESS_TOKEN [MESSAGE]
+# For example:
+#   trigger-travis.sh typetools checker-framework `cat ~/private/.travis-access-token` "Trigger for testing"
+#
+# where --pro means to use travis-ci.com instead of travis-ci.org, and
+# where TRAVIS_ACCESS_TOKEN is, or ~/private/.travis-access-token contains,
+# the Travis access token.
+#
+# Your Travis access token is the text after "Your access token is " in
+# the ouput of these commands:
+#   travis login && travis token
+# (If the travis program isn't installed, do so with one of these two commands:
+#    gem install travis
+#    sudo apt-get install ruby-dev && sudo gem install travis
+# Don't do "sudo apt-get install travis" which installs a trajectory analyzer.)
+# Note that the Travis access token output by `travis token` differs from the
+# Travis token available at https://travis-ci.org/profile .
+# If you store it in in a file, make sure the file is not readable by others,
+# for example by running:  chmod og-rwx ~/private
+
+# To use this script to trigger a dependent build in Travis, do two things:
+#
+# 1. Set an environment variable TRAVIS_ACCESS_TOKEN by navigating to
+#   https://travis-ci.org/MYGITHUBID/MYGITHUBPROJECT/settings
+# The TRAVIS_ACCESS_TOKEN environment variable will be set when Travis runs
+# the job, but won't be visible to anyone browsing https://travis-ci.org/.
+#
+# 2. Add the following before_install and after_script block to your
+# .travis.yml file, where you replace OTHERGITHUB* by a specific downstream
+# project, but you leave $TRAVIS_ACCESS_TOKEN as literal text:
+#
+# before_install:
+#   - npm install --save-dev travis-after-all
+#
+# after_script:
+#   - |
+#       declare exitCode;
+#       $(npm bin)/travis-after-all
+#       exitCode=$?
+#
+#       if [ "$exitCode" -eq 0 ]; then
+#         if [[ ($TRAVIS_BRANCH == master) &&
+#               ($TRAVIS_PULL_REQUEST == false) ]] ; then
+#           curl -LO https://raw.github.com/mernst/plume-lib/master/bin/trigger-travis.sh
+#           sh trigger-travis.sh OTHERGITHUBID OTHERGITHUBPROJECT $TRAVIS_ACCESS_TOKEN
+#         fi
+#       fi
+#
+# Your .travis.yml file must not use `language: generic` because then
+# npm won't be installed.
+#
+# Note that Travis does not fail a job if an after_success command fails.
+# If you misspell a GitHub ID or project name, then this script will fail,
+# but Travis won't inform you of the mistake.  So, check the end of the
+# Travis build log the first time that a build succeeds.
+
+# Here is an explanation of the conditional in the after_success block:
+#
+# 1. Downstream projects are triggered only for builds of the mainline, not
+# branches or pull requests.  The reason is that typically a downstream
+# project clones and uses the mainline.  You could enhance this script to
+# accept pass an environment variable for the upstream project; the
+# downstream project's build script would need to read and use that
+# environment variable.  If you make this enhancement, feel free to submit
+# a pull request so that others can benefit from it.
+#
+# 2. Downstream projects are triggered only if the Travis job number
+# contains no "." or ends with ".1".  In other words, if your .travis.yml
+# defines a build matrix
+# (https://docs.travis-ci.com/user/customizing-the-build/#Build-Matrix)
+# that runs the same job using different configurations, then the
+# "after_success:" block is run only for the first configuration.
+# By default an after_success: block is run for every build in the matrix,
+# but you really want it to run once if all the builds in the matrix
+# succeed.  Running if the first job succeeds is simple and it is usually
+# adequate, even though the downstream job is triggered even if some job
+# other than the first one fails.
+
+# TODO: enable the script to clone a particular branch rather than master.
+# This would require a way to know the relationships among branches in
+# different GitHub projects.  It's easier to run all your tests within a
+# single Travis job, if they fit within Travis's 50-minute time limit.
+
+# An alternative to this script would be to install the Travis command-line
+# client and then run:
+#   travis restart -r OTHERGITHUBID/OTHERGITHUBPROJECT
+# That is undesirable because it restarts an old job, destroying its history,
+# rather than starting a new job which is our goal.
+
+# Parts of this script were originally taken from
+# http://docs.travis-ci.com/user/triggering-builds/
+
+# TODO: take a --branch command-line argument.
+
+if [ "$#" -lt 3 ] || [ "$#" -ge 7 ]; then
+  echo "Wrong number of arguments $# to trigger-travis.sh; run like:"
+  echo " trigger-travis.sh [--pro] [--script 'script value...'] [--branch BRANCH] GITHUBID GITHUBPROJECT TRAVIS_ACCESS_TOKEN [MESSAGE]" >&2
+  exit 1
+fi
+
+if [ "$1" = "--pro" ] ; then
+  TRAVIS_URL=travis-ci.com
+  shift
+else
+  TRAVIS_URL=travis-ci.org
+fi
+
+if [ "$1" = "--branch" ] ; then
+  shift
+  BRANCH="$1"
+  shift
+else
+  BRANCH=master
+fi
+
+if [ "$1" = "--script" ] ; then
+  shift
+  SCRIPT_VAL="$1"
+  shift
+else
+  SCRIPT_VAL=""
+fi
+
+USER=$1
+REPO=$2
+TOKEN=$3
+if [ $# -eq 4 ] ; then
+    MESSAGE=",\"message\": \"$4\""
+elif [ -n "$TRAVIS_REPO_SLUG" ] ; then
+    MESSAGE=",\"message\": \"Triggered by upstream build of $TRAVIS_REPO_SLUG commit "`git rev-parse --short HEAD`"\""
+else
+    MESSAGE=""
+fi
+
+if [ -n "$SCRIPT_VAL" ]; then
+  SCRIPT_BODY="\"script\": \"$SCRIPT_VAL\""
+else
+  SCRIPT_BODY=""
+fi
+
+body="{
+\"request\": {
+  \"branch\":\"$BRANCH\"$MESSAGE,
+
+  \"config\":{
+  $SCRIPT_BODY
+}}}"
+
+echo "Travis API request body:\n$body"
+
+#exit 0
+
+echo "Making request to start tests in other repository..."
+
+# It does not work to put / in place of %2F in the URL below. 
+curl -s -X POST \
+  -H "Content-Type: application/json" \
+  -H "Accept: application/json" \
+  -H "Travis-API-Version: 3" \
+  -H "Authorization: token ${TOKEN}" \
+  -d "$body" \
+  https://api.${TRAVIS_URL}/repo/${USER}%2F${REPO}/requests \
+ | tee /tmp/travis-request-output.$$.txt
+
+if grep -q '"@type": "error"' /tmp/travis-request-output.$$.txt; then
+    exit 1
+fi
+if grep -q 'access denied' /tmp/travis-request-output.$$.txt; then
+    exit 1
+fi
\ No newline at end of file

From baf946c111e8d5a49edc42ac7c7ffb3b3f1ce010 Mon Sep 17 00:00:00 2001
From: Christopher Tomkins-Tinch 
Date: Tue, 21 Feb 2017 11:08:54 -0500
Subject: [PATCH 7/8] comment revision

---
 .travis.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.travis.yml b/.travis.yml
index 6393dcea2..e6cc75ce1 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -34,7 +34,7 @@ env:
   - secure: KX7DwKRD85S7NgspxevgbulTtV+jHQIiM6NBus2/Ur/P0RMdpt0EQQ2wDq79qGN70bvvkw901N7EjSYd+GWCAM7StXtaxnLRrrZ3XI1gX7KMk8E3QzPf0zualLDs7cuQmL6l6WiElUAEqumLc7WGpLZZLdSPzNqFSg+CBKCmTI8=
   # $ANACONDA_TOKEN for uploading builds to anaconda.org ("broad-viral" channel) 
   - secure: O+yKZxHthroiSi8KqMEF9qWDNv43iDXpk4rbhaZDlUKBiR5+AeXLR7OodWpX9LvhihpWgDoS5W42K0joPRP/rUJ2Jux9GH84Jhg+uDKN+XDi2sNT1/DsI4BTF0xxO0TeQ6IokbzV7idfW6gbhNoWMky7DnXtK6ruCJkkx4tWlno=
-  # $TRAVIS_ACCESS_TOKEN_FOR_OTHER_REPO
+  # $TRAVIS_ACCESS_TOKEN_FOR_OTHER_REPO (viral-ngs-deploy)
   - secure: ChB0K3gPr5HknxYA41xCrpgChHDmLkqc79p1NABB/tbqOEnrPzDPqE+FU4/QlmeV96jMYn4uyLVauJpzVXyBIVoOa8guqoF5VdiKlAhaUwh9UQJ75i3SKQtGBrqaTXSDVI1vJARMiGabduCrcNJxVsxV9Bm+YzTq6tuhWyqR4fs=
 
 git:

From fff652faf408714a4850df69caae7420141288cd Mon Sep 17 00:00:00 2001
From: Christopher Tomkins-Tinch 
Date: Tue, 21 Feb 2017 11:11:06 -0500
Subject: [PATCH 8/8] moved sudo:false out of travis matrix (was failing lint),
 add fast_finish:true

---
 .travis.yml | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index e6cc75ce1..1fb883b44 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,15 +1,14 @@
 language: python
+sudo: false
 
 matrix:
+    fast_finish: true
     include:
         - os: linux
-          sudo: false
           python: 2.7
         - os: linux
-          sudo: false
           python: 3.4
         - os: linux
-          sudo: false
           python: 3.5
 #        - os: osx
 #          language: generic