Skip to content

Commit

Permalink
Merge pull request #111 from TIBHannover/development
Browse files Browse the repository at this point in the history
Release v0.9.1
  • Loading branch information
katrinleinweber authored Aug 7, 2019
2 parents a1be776 + 42367fd commit 39fb81d
Show file tree
Hide file tree
Showing 19 changed files with 110 additions and 64 deletions.
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# R for travis: see documentation at https://docs.travis-ci.com/user/languages/r

language: R
r: devel
sudo: false
cache: packages

Expand Down
1 change: 1 addition & 0 deletions BacDiveR.Rproj
Original file line number Diff line number Diff line change
Expand Up @@ -17,3 +17,4 @@ StripTrailingWhitespace: Yes
BuildType: Package
PackageUseDevtools: Yes
PackageInstallArgs: --no-multiarch --with-keep.source
PackageRoxygenize: rd,collate,namespace
27 changes: 14 additions & 13 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Package: BacDiveR
Title: Programmatic Interface For The DSMZ' Bacterial Diversity Metadatabase
Version: 0.9.0
Date: 2019-04-30
Title: Programmatic Interface For The Bacterial Diversity
Metadatabase by DSMZ
Version: 0.9.1
Authors@R:
person(given = "Katrin",
family = "Leinweber",
Expand All @@ -12,23 +12,24 @@ Description: 'BacDive' focuses on providing organism-linked
information covering the multifarious aspects of bacterial
biodiversity: taxonomic classification, morphology, physiology,
cultivation, origin natural habitat and more, both for bacterial and
archaeal strains (see Söhngen et al. (2014) <doi:10.1093/nar/gkt1058>).
BacDiveR helps you investigate this data offline, and semi-automatically in R.
archaeal strains (see Söhngen et al. (2014)
<doi:10.1093/nar/gkt1058>). BacDiveR helps you investigate this data
offline, and semi-automatically in R.
License: MIT + file LICENSE
URL: https://github.com/TIBHannover/BacDiveR
BugReports: https://github.com/TIBHannover/BacDiveR/issues
Imports:
httr (>= 1.3.0),
jsonlite (>= 1.5),
httr (>= 1.4.0),
jsonlite (>= 1.6),
utils (>= 3.5.0)
Suggests:
ggplot2 (>= 3.0.0),
knitr (>= 1.20),
ggplot2 (>= 3.2.0),
knitr (>= 1.23),
magrittr (>= 1.5),
purrr (>= 0.2.5),
rmarkdown (>= 1.10),
spelling (>= 1.2),
testthat (>= 2.0.0)
purrr (>= 0.3.2),
rmarkdown (>= 1.13),
spelling (>= 2.1),
testthat (>= 2.1.1)
VignetteBuilder:
knitr
ByteCompile: true
Expand Down
2 changes: 2 additions & 0 deletions NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,5 @@ export(bd_retrieve_by_search)
export(bd_retrieve_by_sequence)
export(bd_retrieve_taxon)
export(prepare_Renviron)
import(httr)
importFrom(jsonlite,fromJSON)
13 changes: 13 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,19 @@
### Fixed
### Security


## BacDiveR 0.9.1

### Fixed

- Expose a previously silent download error (#110; Thanks to @jfy133!)

### Changed

- Repair outdated links in documentation
- Improve some code sections in minor ways


## BacDiveR 0.9.0

All notable changes to this project will be documented in this file.
Expand Down
4 changes: 2 additions & 2 deletions R/bd_retrieve.R
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@
#'
#' @export
#' @examples
#' dataset_717 <- bd_retrieve(id = 717)
#' dataset_717 <- bd_retrieve(id = "717")
#' dataset_717 <- bd_retrieve(id = 717)
#' dataset_717 <- bd_retrieve(id = "717")
#' @rdname bd_retrieve
bd_retrieve <- function(id) {
bd_retrieve_data(searchTerm = as.character(id), searchType = "bacdive_id")
Expand Down
2 changes: 1 addition & 1 deletion R/bd_retrieve_by_culture.R
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#' number by which the associated dataset(s) will be retrieved.
#' @export
#' @examples
#' dataset_DSM_319 <- bd_retrieve_by_culture(collection_no = "DSM 319")
#' dataset_DSM_319 <- bd_retrieve_by_culture(collection_no = "DSM 319")
bd_retrieve_by_culture <- function(collection_no) {
bd_retrieve_data(searchTerm = collection_no, searchType = "culturecollectionno")
}
36 changes: 21 additions & 15 deletions R/bd_retrieve_by_search.R
Original file line number Diff line number Diff line change
Expand Up @@ -12,23 +12,27 @@
#' @export
#'
#' @examples
#' plant_animal_pathogens <-
#' bd_retrieve_by_search(
#' queryURL = paste(
#' "https://bacdive.dsmz.de/advsearch?site=advsearchsearch",
#' "params%5B5%5D%5Bsearchterm%5D=1",
#' "searchparams%5B157%5D%5Bsearchterm%5D=1",
#' "searchparams%5B158%5D%5Bsearchterm%5D=1",
#' "advsearch=search", sep = "&")
#' )
#' plant_animal_pathogens <-
#' bd_retrieve_by_search(
#' queryURL = paste(
#' "https://bacdive.dsmz.de/advsearch?site=advsearchsearch",
#' "params%5B5%5D%5Bsearchterm%5D=1",
#' "searchparams%5B157%5D%5Bsearchterm%5D=1",
#' "searchparams%5B158%5D%5Bsearchterm%5D=1",
#' "advsearch=search",
#' sep = "&"
#' )
#' )
bd_retrieve_by_search <- function(queryURL) {
# guard against other URLs
if (!grepl(pattern = "^https:\\/\\/bacdive\\.dsmz\\.de\\/advsearch", queryURL) |
!grepl("[?&]site=advsearch", queryURL) |
!grepl("[?&]advsearch=search", queryURL) |
!grepl("\\&searchparams", queryURL)) {
!grepl("[?&]site=advsearch", queryURL) |
!grepl("[?&]advsearch=search", queryURL) |
!grepl("\\&searchparams", queryURL)) {
stop(
"I'm sorry, but this doesn't seem like an advanced search URL from https://BacDive.DSMZ.de/advsearch! Aborting...\nPlease read https://TIBHannover.GitHub.io/BacDiveR/#how-to-use"
"I'm sorry, but this doesn't seem like an advanced search URL from
https://BacDive.DSMZ.de/advsearch! Aborting...\nPlease read
https://TIBHannover.GitHub.io/BacDiveR/#how-to-use"
)
}

Expand All @@ -46,8 +50,10 @@ bd_retrieve_by_search <- function(queryURL) {
httr::content(response, as = "text", encoding = "UTF-8")

if (grepl("^[[:digit:]]", payload)) {
aggregate_datasets(payload = strsplit(x = payload, split = "\\n")[[1]],
from_IDs = TRUE)
aggregate_datasets(
payload = strsplit(x = payload, split = "\\n")[[1]],
from_IDs = TRUE
)
} else if (grepl("^<!DOCTYPE", payload)) {
warning("No datasets found. Please check your advanced search and copy-paste the URL again.")
list()
Expand Down
2 changes: 1 addition & 1 deletion R/bd_retrieve_by_sequence.R
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#' which the associated dataset(s) will be retrieved.
#' @export
#' @examples
#' dataset_AJ000733 <- bd_retrieve_by_sequence(accession = "AJ000733")
#' dataset_AJ000733 <- bd_retrieve_by_sequence(accession = "AJ000733")
bd_retrieve_by_sequence <- function(accession) {
bd_retrieve_data(searchTerm = accession, searchType = "sequence")
}
9 changes: 4 additions & 5 deletions R/bd_retrieve_data.R
Original file line number Diff line number Diff line change
Expand Up @@ -93,13 +93,12 @@ sanitise_type <- function(searchType) {
construct_url <- function(searchTerm,
searchType = "bacdive_id") {
utils::URLencode(
paste0(
"https://bacdive.dsmz.de/api/bacdive/",
paste(
"https://bacdive.dsmz.de/api/bacdive",
searchType,
"/",
searchTerm,
"/",
"?format=json"
"?format=json",
sep = "/"
)
)
}
18 changes: 13 additions & 5 deletions R/util-download.R
Original file line number Diff line number Diff line change
@@ -1,16 +1,24 @@
#' Download Something from BacDive
#'
#' Both `user` and `password` are read from the .Renviron file by default.
#'
#' @param URL A correctly encoded character string, either from construct_url()
#' or a JSON list
#'
#' @return The R object deserialised from the downloaded JSON
#' @keywords internal
download <- function(URL) {
#' @import httr
#' @importFrom jsonlite fromJSON
download <- function(URL, user = get_credentials()[1], password = get_credentials()[2]) {
message(URLs_to_IDs(URL), " ", appendLF = FALSE)

cred <- get_credentials()
response <- GET(URL, authenticate(user, password))
payload <- content(response, as = "text", encoding = "UTF-8")
data <- fromJSON(payload)

response <- httr::GET(URL, httr::authenticate(cred[1], cred[2]))
payload <- httr::content(response, as = "text", encoding = "UTF-8")
jsonlite::fromJSON(payload)
if (response$status_code == 403) {
stop(paste(data, "\nCheck your .Renviron file, and try copy-pasting your login credentials into https://bacdive.dsmz.de/api/bacdive/ to test them. Correct as necessary and try your data download again."))
} else {
return(data)
}
}
3 changes: 2 additions & 1 deletion R/util-get_credentials.R
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#' Read BacDive Login Credentials from .Renviron
#'
#' @return A vector or the two character strings `BacDive_email` and `BacDive_password` representing those two environment variables.
#' @return A vector or the two character strings `BacDive_email`
#' and `BacDive_password` representing those two environment variables.
#' @keywords internal
get_credentials <- function() {
readRenviron(construct_Renviron_path())
Expand Down
12 changes: 6 additions & 6 deletions man/bd_retrieve.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

20 changes: 11 additions & 9 deletions man/bd_retrieve_by_search.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 3 additions & 2 deletions man/download.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion man/get_credentials.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 7 additions & 2 deletions tests/testthat/test-bd_retrieve_by_search.R
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,12 @@ test_that("Inconsistent datasets get corrected", {
})

test_that("Fuzzing of queryURL parameter produces error", {
fuzzy_URL <-
paste0(sample(strsplit(queryURL, "")[[1]], size = nchar(queryURL)), collapse = "")
fuzzy_URL <- paste0(
sample(strsplit(queryURL, "")[[1]],
size = nchar(queryURL)
),
collapse = ""
)

expect_error(bd_retrieve_by_search(fuzzy_URL))
})
4 changes: 4 additions & 0 deletions tests/testthat/test-download.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
test_that("Downloading without preper credentials raises an error", {
random <- sample(letters, 8)
expect_error(download(construct_url(717), user = random, password = random))
})
3 changes: 2 additions & 1 deletion tests/testthat/test-utils.R
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ test_that("preparing .Renviron file works", {
prepare_Renviron()
r_env_lines <- readLines(construct_Renviron_path())

for (type in c("email", "password"))
for (type in c("email", "password")) {
expect_true(any(grepl(paste0("^BacDive_", type, "="), r_env_lines)))
}
})

0 comments on commit 39fb81d

Please sign in to comment.