From 98dda4e4cc6aadece1b3fcf4f521e0b501fc5057 Mon Sep 17 00:00:00 2001 From: Paul Sud <41386393+paul-sud@users.noreply.github.com> Date: Thu, 29 Apr 2021 15:54:48 -0700 Subject: [PATCH] PIP-1540-input-generation-script (#98) --- .isort.cfg | 2 +- docs/reference.md | 2 + requirements-scripts.txt | 1 + scripts/make_input_json_from_portal.py | 140 ++++++++++++++++++ .../test_make_input_json_from_portal.py | 129 ++++++++++++++++ tox.ini | 1 + 6 files changed, 274 insertions(+), 1 deletion(-) create mode 100644 requirements-scripts.txt create mode 100644 scripts/make_input_json_from_portal.py create mode 100644 tests/python/test_make_input_json_from_portal.py diff --git a/.isort.cfg b/.isort.cfg index 14a05489..b42de475 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -4,4 +4,4 @@ include_trailing_comma = True force_grid_wrap = 0 use_parentheses = True line_length = 88 -known_third_party =pysam,pytest +known_third_party =pysam,pytest,requests diff --git a/docs/reference.md b/docs/reference.md index f0a20035..12783ebe 100644 --- a/docs/reference.md +++ b/docs/reference.md @@ -18,6 +18,8 @@ This pipeline has several different supported modes of operation. As such there are various entrypoints into the pipeline, each with their own set of relevant inputs. The various entrypoints are described in detail [here](#entrypoints), and the individual parameters are described [here](#input-descriptions). We recommend first determining which entrypoint you need then cross-referencing the relevant input descriptions. +You can create an input JSON for running the pipeline end-to-end, i.e. from fastqs to loop and domain calls, on a Hi-C experiment from the ENCODE portal using the provided [input JSON generation script](../scripts/make_input_json_from_portal.py). Before running it install the requirements with `pip install -r requirements-scripts.txt`. To invoke it, you must at the minimum provide the accession of the experiment on the portal and the restriction enzyme name (can be `none` for nonspecific enzymes like DNAse). See the script's help text for documentation of usage and options (`python scripts/make_input_json_from_portal.py --help`). + ### Entrypoints Under each individual entrypoint the inputs for that entrypoint are listed. To run the pipeline using that particular entrypoint you need only specify the required inputs. diff --git a/requirements-scripts.txt b/requirements-scripts.txt new file mode 100644 index 00000000..9d84d358 --- /dev/null +++ b/requirements-scripts.txt @@ -0,0 +1 @@ +requests==2.25.1 diff --git a/scripts/make_input_json_from_portal.py b/scripts/make_input_json_from_portal.py new file mode 100644 index 00000000..d0fc6a97 --- /dev/null +++ b/scripts/make_input_json_from_portal.py @@ -0,0 +1,140 @@ +import argparse +import json +from pathlib import Path +from urllib.parse import urljoin + +import requests + +PORTAL_URL = "https://www.encodeproject.org" + +REFERENCE_FILES = { + "GRCh38": { + "restriction_sites": { + "HindIII": urljoin( + PORTAL_URL, "/files/ENCFF984SUZ/@@download/ENCFF984SUZ.txt.gz" + ), + "DpnII": urljoin( + PORTAL_URL, "/files/ENCFF132WAM/@@download/ENCFF132WAM.txt.gz" + ), + "MboI": urljoin( + PORTAL_URL, "/files/ENCFF132WAM/@@download/ENCFF132WAM.txt.gz" + ), + }, + "bwa_index": urljoin( + PORTAL_URL, "/files/ENCFF643CGH/@@download/ENCFF643CGH.tar.gz" + ), + "chrom_sizes": urljoin( + PORTAL_URL, + "/files/GRCh38_EBV.chrom.sizes/@@download/GRCh38_EBV.chrom.sizes.tsv", + ), + } +} + +ALLOWED_STATUSES = ("released", "in progress") + + +def main(): + parser = get_parser() + args = parser.parse_args() + auth = read_auth_from_file(args.keypair_file) + experiment = get_experiment(args.accession, auth=auth) + fastqs = get_fastqs_from_experiment(experiment) + input_json = get_input_json( + fastqs=fastqs, assembly_name=args.assembly_name, enzyme=args.enzyme + ) + outfile = args.outfile or "{}.json".format(args.accession) + write_json_to_file(input_json, outfile) + + +def get_experiment(accession, auth=None): + response = requests.get( + urljoin(PORTAL_URL, accession), + auth=auth, + headers={"Accept": "application/json"}, + ) + response.raise_for_status() + return response.json() + + +def get_fastqs_from_experiment(experiment): + fastq_pairs_by_replicate = {} + for file in experiment["files"]: + if file["file_format"] == "fastq" and file["status"] in ALLOWED_STATUSES: + biological_replicate = file["biological_replicates"][0] + paired_with_id = file["paired_with"] + paired_with_file = [ + f for f in experiment["files"] if f["@id"] == paired_with_id + ][0] + replicate_fastqs = fastq_pairs_by_replicate.get(biological_replicate) + if replicate_fastqs is None: + fastq_pairs_by_replicate[biological_replicate] = [] + if file["paired_end"] == "2": + file, paired_with_file = paired_with_file, file + fastq_pair = { + "read_1": urljoin(PORTAL_URL, file["href"]), + "read_2": urljoin(PORTAL_URL, paired_with_file["href"]), + } + if fastq_pair not in fastq_pairs_by_replicate[biological_replicate]: + fastq_pairs_by_replicate[biological_replicate].append(fastq_pair) + output = [replicate for replicate in fastq_pairs_by_replicate.values()] + return output + + +def get_input_json(fastqs, assembly_name, enzyme): + input_json = { + "hic.fastq": fastqs, + "hic.assembly_name": assembly_name, + "hic.chrsz": REFERENCE_FILES[assembly_name]["chrom_sizes"], + "hic.reference_index": REFERENCE_FILES[assembly_name]["bwa_index"], + "hic.restriction_enzymes": [enzyme], + } + if enzyme != "none": + input_json["hic.restriction_sites"] = REFERENCE_FILES[assembly_name][ + "restriction_sites" + ][enzyme] + return input_json + + +def write_json_to_file(data, outfile): + Path(outfile).write_text(json.dumps(data, indent=2, sort_keys=True)) + + +def read_auth_from_file(keypair_file): + keypair_path = Path(keypair_file).expanduser() + if keypair_path.exists(): + data = json.loads(keypair_path.read_text()) + return (data["submit"]["key"], data["submit"]["secret"]) + else: + return None + + +def get_parser(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-a", + "--accession", + required=True, + help="Accession of portal experiment to generate input for", + ) + parser.add_argument( + "-e", + "--enzyme", + choices=("HindIII", "DpnII", "MboI", "none"), + required=True, + help="Name of restriction enzyme", + ) + parser.add_argument("--outfile") + parser.add_argument( + "--keypair-file", help="Path to keypairs.json", default="~/keypairs.json" + ) + parser.add_argument( + "--assembly-name", + choices=("GRCh38",), + default="GRCh38", + help="Name of assembly, mm10 is not yet supported", + ) + return parser + + +if __name__ == "__main__": + main() diff --git a/tests/python/test_make_input_json_from_portal.py b/tests/python/test_make_input_json_from_portal.py new file mode 100644 index 00000000..3f242bd8 --- /dev/null +++ b/tests/python/test_make_input_json_from_portal.py @@ -0,0 +1,129 @@ +from scripts.make_input_json_from_portal import ( + get_fastqs_from_experiment, + get_input_json, +) + + +def test_get_input_json(): + result = get_input_json( + fastqs=["foo", "bar"], assembly_name="GRCh38", enzyme="MboI" + ) + assert result == { + "hic.assembly_name": "GRCh38", + "hic.chrsz": "https://www.encodeproject.org/files/GRCh38_EBV.chrom.sizes/@@download/GRCh38_EBV.chrom.sizes.tsv", + "hic.fastq": ["foo", "bar"], + "hic.reference_index": "https://www.encodeproject.org/files/ENCFF643CGH/@@download/ENCFF643CGH.tar.gz", + "hic.restriction_enzymes": ["MboI"], + "hic.restriction_sites": "https://www.encodeproject.org/files/ENCFF132WAM/@@download/ENCFF132WAM.txt.gz", + } + + +def test_get_input_json_none_enzyme_has_no_restriction_sites(): + result = get_input_json( + fastqs=["foo", "bar"], assembly_name="GRCh38", enzyme="none" + ) + assert result == { + "hic.assembly_name": "GRCh38", + "hic.chrsz": "https://www.encodeproject.org/files/GRCh38_EBV.chrom.sizes/@@download/GRCh38_EBV.chrom.sizes.tsv", + "hic.fastq": ["foo", "bar"], + "hic.reference_index": "https://www.encodeproject.org/files/ENCFF643CGH/@@download/ENCFF643CGH.tar.gz", + "hic.restriction_enzymes": ["none"], + } + + +def test_get_fastqs_from_experiment(): + experiment = { + "files": [ + { + "@id": "foo", + "biological_replicates": ["1"], + "paired_end": "1", + "paired_with": "bar", + "status": "released", + "file_format": "fastq", + "href": "download1", + }, + { + "@id": "bar", + "biological_replicates": ["1"], + "paired_end": "2", + "paired_with": "foo", + "status": "released", + "file_format": "fastq", + "href": "download2", + }, + { + "@id": "baz", + "biological_replicates": ["2"], + "paired_end": "1", + "paired_with": "qux", + "status": "in progress", + "file_format": "fastq", + "href": "download3", + }, + { + "@id": "qux", + "biological_replicates": ["2"], + "paired_end": "2", + "paired_with": "baz", + "status": "in progress", + "file_format": "fastq", + "href": "download4", + }, + { + "@id": "quux", + "biological_replicates": ["2"], + "paired_end": "1", + "paired_with": "corge", + "status": "released", + "file_format": "fastq", + "href": "download5", + }, + { + "@id": "corge", + "biological_replicates": ["2"], + "paired_end": "2", + "paired_with": "quux", + "status": "released", + "file_format": "fastq", + "href": "download6", + }, + { + "@id": "grault", + "biological_replicates": ["2"], + "paired_end": "1", + "paired_with": "garply", + "status": "replaced", + "file_format": "fastq", + "href": "download7", + }, + { + "@id": "garply", + "biological_replicates": ["2"], + "paired_end": "2", + "paired_with": "grault", + "status": "replaced", + "file_format": "fastq", + "href": "download8", + }, + ] + } + result = get_fastqs_from_experiment(experiment) + assert result == [ + [ + { + "read_1": "https://www.encodeproject.org/download1", + "read_2": "https://www.encodeproject.org/download2", + } + ], + [ + { + "read_1": "https://www.encodeproject.org/download3", + "read_2": "https://www.encodeproject.org/download4", + }, + { + "read_1": "https://www.encodeproject.org/download5", + "read_2": "https://www.encodeproject.org/download6", + }, + ], + ] diff --git a/tox.ini b/tox.ini index 09d40b24..652fd3b1 100644 --- a/tox.ini +++ b/tox.ini @@ -7,6 +7,7 @@ skipsdist = True commands = python -m pytest --ignore=tests/functional/ --ignore=tests/integration --ignore=tests/unit --noconftest {posargs} deps = pytest + -rrequirements-scripts.txt [testenv:wdl] basepython = python3.7