Skip to content

Commit

Permalink
PIP-1540-input-generation-script (#98)
Browse files Browse the repository at this point in the history
  • Loading branch information
paul-sud authored Apr 29, 2021
1 parent 4940e88 commit 98dda4e
Show file tree
Hide file tree
Showing 6 changed files with 274 additions and 1 deletion.
2 changes: 1 addition & 1 deletion .isort.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@ include_trailing_comma = True
force_grid_wrap = 0
use_parentheses = True
line_length = 88
known_third_party =pysam,pytest
known_third_party =pysam,pytest,requests
2 changes: 2 additions & 0 deletions docs/reference.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@

This pipeline has several different supported modes of operation. As such there are various entrypoints into the pipeline, each with their own set of relevant inputs. The various entrypoints are described in detail [here](#entrypoints), and the individual parameters are described [here](#input-descriptions). We recommend first determining which entrypoint you need then cross-referencing the relevant input descriptions.

You can create an input JSON for running the pipeline end-to-end, i.e. from fastqs to loop and domain calls, on a Hi-C experiment from the ENCODE portal using the provided [input JSON generation script](../scripts/make_input_json_from_portal.py). Before running it install the requirements with `pip install -r requirements-scripts.txt`. To invoke it, you must at the minimum provide the accession of the experiment on the portal and the restriction enzyme name (can be `none` for nonspecific enzymes like DNAse). See the script's help text for documentation of usage and options (`python scripts/make_input_json_from_portal.py --help`).

### Entrypoints

Under each individual entrypoint the inputs for that entrypoint are listed. To run the pipeline using that particular entrypoint you need only specify the required inputs.
Expand Down
1 change: 1 addition & 0 deletions requirements-scripts.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
requests==2.25.1
140 changes: 140 additions & 0 deletions scripts/make_input_json_from_portal.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
import argparse
import json
from pathlib import Path
from urllib.parse import urljoin

import requests

PORTAL_URL = "https://www.encodeproject.org"

REFERENCE_FILES = {
"GRCh38": {
"restriction_sites": {
"HindIII": urljoin(
PORTAL_URL, "/files/ENCFF984SUZ/@@download/ENCFF984SUZ.txt.gz"
),
"DpnII": urljoin(
PORTAL_URL, "/files/ENCFF132WAM/@@download/ENCFF132WAM.txt.gz"
),
"MboI": urljoin(
PORTAL_URL, "/files/ENCFF132WAM/@@download/ENCFF132WAM.txt.gz"
),
},
"bwa_index": urljoin(
PORTAL_URL, "/files/ENCFF643CGH/@@download/ENCFF643CGH.tar.gz"
),
"chrom_sizes": urljoin(
PORTAL_URL,
"/files/GRCh38_EBV.chrom.sizes/@@download/GRCh38_EBV.chrom.sizes.tsv",
),
}
}

ALLOWED_STATUSES = ("released", "in progress")


def main():
parser = get_parser()
args = parser.parse_args()
auth = read_auth_from_file(args.keypair_file)
experiment = get_experiment(args.accession, auth=auth)
fastqs = get_fastqs_from_experiment(experiment)
input_json = get_input_json(
fastqs=fastqs, assembly_name=args.assembly_name, enzyme=args.enzyme
)
outfile = args.outfile or "{}.json".format(args.accession)
write_json_to_file(input_json, outfile)


def get_experiment(accession, auth=None):
response = requests.get(
urljoin(PORTAL_URL, accession),
auth=auth,
headers={"Accept": "application/json"},
)
response.raise_for_status()
return response.json()


def get_fastqs_from_experiment(experiment):
fastq_pairs_by_replicate = {}
for file in experiment["files"]:
if file["file_format"] == "fastq" and file["status"] in ALLOWED_STATUSES:
biological_replicate = file["biological_replicates"][0]
paired_with_id = file["paired_with"]
paired_with_file = [
f for f in experiment["files"] if f["@id"] == paired_with_id
][0]
replicate_fastqs = fastq_pairs_by_replicate.get(biological_replicate)
if replicate_fastqs is None:
fastq_pairs_by_replicate[biological_replicate] = []
if file["paired_end"] == "2":
file, paired_with_file = paired_with_file, file
fastq_pair = {
"read_1": urljoin(PORTAL_URL, file["href"]),
"read_2": urljoin(PORTAL_URL, paired_with_file["href"]),
}
if fastq_pair not in fastq_pairs_by_replicate[biological_replicate]:
fastq_pairs_by_replicate[biological_replicate].append(fastq_pair)
output = [replicate for replicate in fastq_pairs_by_replicate.values()]
return output


def get_input_json(fastqs, assembly_name, enzyme):
input_json = {
"hic.fastq": fastqs,
"hic.assembly_name": assembly_name,
"hic.chrsz": REFERENCE_FILES[assembly_name]["chrom_sizes"],
"hic.reference_index": REFERENCE_FILES[assembly_name]["bwa_index"],
"hic.restriction_enzymes": [enzyme],
}
if enzyme != "none":
input_json["hic.restriction_sites"] = REFERENCE_FILES[assembly_name][
"restriction_sites"
][enzyme]
return input_json


def write_json_to_file(data, outfile):
Path(outfile).write_text(json.dumps(data, indent=2, sort_keys=True))


def read_auth_from_file(keypair_file):
keypair_path = Path(keypair_file).expanduser()
if keypair_path.exists():
data = json.loads(keypair_path.read_text())
return (data["submit"]["key"], data["submit"]["secret"])
else:
return None


def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
"-a",
"--accession",
required=True,
help="Accession of portal experiment to generate input for",
)
parser.add_argument(
"-e",
"--enzyme",
choices=("HindIII", "DpnII", "MboI", "none"),
required=True,
help="Name of restriction enzyme",
)
parser.add_argument("--outfile")
parser.add_argument(
"--keypair-file", help="Path to keypairs.json", default="~/keypairs.json"
)
parser.add_argument(
"--assembly-name",
choices=("GRCh38",),
default="GRCh38",
help="Name of assembly, mm10 is not yet supported",
)
return parser


if __name__ == "__main__":
main()
129 changes: 129 additions & 0 deletions tests/python/test_make_input_json_from_portal.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
from scripts.make_input_json_from_portal import (
get_fastqs_from_experiment,
get_input_json,
)


def test_get_input_json():
result = get_input_json(
fastqs=["foo", "bar"], assembly_name="GRCh38", enzyme="MboI"
)
assert result == {
"hic.assembly_name": "GRCh38",
"hic.chrsz": "https://www.encodeproject.org/files/GRCh38_EBV.chrom.sizes/@@download/GRCh38_EBV.chrom.sizes.tsv",
"hic.fastq": ["foo", "bar"],
"hic.reference_index": "https://www.encodeproject.org/files/ENCFF643CGH/@@download/ENCFF643CGH.tar.gz",
"hic.restriction_enzymes": ["MboI"],
"hic.restriction_sites": "https://www.encodeproject.org/files/ENCFF132WAM/@@download/ENCFF132WAM.txt.gz",
}


def test_get_input_json_none_enzyme_has_no_restriction_sites():
result = get_input_json(
fastqs=["foo", "bar"], assembly_name="GRCh38", enzyme="none"
)
assert result == {
"hic.assembly_name": "GRCh38",
"hic.chrsz": "https://www.encodeproject.org/files/GRCh38_EBV.chrom.sizes/@@download/GRCh38_EBV.chrom.sizes.tsv",
"hic.fastq": ["foo", "bar"],
"hic.reference_index": "https://www.encodeproject.org/files/ENCFF643CGH/@@download/ENCFF643CGH.tar.gz",
"hic.restriction_enzymes": ["none"],
}


def test_get_fastqs_from_experiment():
experiment = {
"files": [
{
"@id": "foo",
"biological_replicates": ["1"],
"paired_end": "1",
"paired_with": "bar",
"status": "released",
"file_format": "fastq",
"href": "download1",
},
{
"@id": "bar",
"biological_replicates": ["1"],
"paired_end": "2",
"paired_with": "foo",
"status": "released",
"file_format": "fastq",
"href": "download2",
},
{
"@id": "baz",
"biological_replicates": ["2"],
"paired_end": "1",
"paired_with": "qux",
"status": "in progress",
"file_format": "fastq",
"href": "download3",
},
{
"@id": "qux",
"biological_replicates": ["2"],
"paired_end": "2",
"paired_with": "baz",
"status": "in progress",
"file_format": "fastq",
"href": "download4",
},
{
"@id": "quux",
"biological_replicates": ["2"],
"paired_end": "1",
"paired_with": "corge",
"status": "released",
"file_format": "fastq",
"href": "download5",
},
{
"@id": "corge",
"biological_replicates": ["2"],
"paired_end": "2",
"paired_with": "quux",
"status": "released",
"file_format": "fastq",
"href": "download6",
},
{
"@id": "grault",
"biological_replicates": ["2"],
"paired_end": "1",
"paired_with": "garply",
"status": "replaced",
"file_format": "fastq",
"href": "download7",
},
{
"@id": "garply",
"biological_replicates": ["2"],
"paired_end": "2",
"paired_with": "grault",
"status": "replaced",
"file_format": "fastq",
"href": "download8",
},
]
}
result = get_fastqs_from_experiment(experiment)
assert result == [
[
{
"read_1": "https://www.encodeproject.org/download1",
"read_2": "https://www.encodeproject.org/download2",
}
],
[
{
"read_1": "https://www.encodeproject.org/download3",
"read_2": "https://www.encodeproject.org/download4",
},
{
"read_1": "https://www.encodeproject.org/download5",
"read_2": "https://www.encodeproject.org/download6",
},
],
]
1 change: 1 addition & 0 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ skipsdist = True
commands = python -m pytest --ignore=tests/functional/ --ignore=tests/integration --ignore=tests/unit --noconftest {posargs}
deps =
pytest
-rrequirements-scripts.txt

[testenv:wdl]
basepython = python3.7
Expand Down

0 comments on commit 98dda4e

Please sign in to comment.