diff --git a/.github/workflows/check.yaml b/.github/workflows/check.yaml index b801ff72..1cd763e9 100644 --- a/.github/workflows/check.yaml +++ b/.github/workflows/check.yaml @@ -10,6 +10,7 @@ on: - ready_for_review branches: - main + - training push: branches: - main diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e14c31ee..e3a5b979 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,15 +9,17 @@ repos: args: [--style_pkg=styler, --style_fun=tidyverse_style] - id: roxygenize additional_dependencies: - - r-lib/cli@*release - - r-lib/desc@*release - - r-lib/gh@*release - - r-lib/pkgcache@*release - - r-lib/pkgdepends@*release - - r-lib/rcmdcheck@*release - - r-lib/remotes@*release + - cli + - desc + - gh + - knitr + - pkgcache + - pkgdepends + - rcmdcheck + - remotes + - stats - utils - - r-lib/zip@*release + - withr # codemeta must be above use-tidy-description when both are used # - id: codemeta-description-updated - id: use-tidy-description @@ -25,6 +27,7 @@ repos: exclude: > (?x)^( data/.*| + inst/.*| (.*/|)\.Rprofile| (.*/|)\.Renviron| (.*/|)\.gitignore| diff --git a/DESCRIPTION b/DESCRIPTION index cddecce3..f88a0765 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -3,8 +3,11 @@ Package: verdepcheck Title: Check Package using Various Versions of Dependencies Version: 0.0.0.9000 Date: 2023-02-14 -Authors@R: - person("Pawel Rucki", , , "pawel.rucki@roche.com", role = c("aut", "cre")) +Authors@R: c( + person("Pawel Rucki", , , "pawel.rucki@roche.com", role = c("aut", "cre")), + person("Andre Verissimo", , , "andre.verissimo@roche.com", role = "aut"), + person("F. Hoffmann-La Roche AG", role = c("cph", "fnd")) + ) Description: Derive package dependencies from DESCRIPTION file using various strategies and run "R CMD CHECK" to validate package compatibility. License: Apache License 2.0 | file LICENSE URL: https://github.com/insightsengineering/verdepcheck/ @@ -20,12 +23,12 @@ Imports: rcmdcheck, remotes (>= 2.2.0), stats, - utils + utils, + withr (>= 2.4.3) Suggests: knitr (>= 1.42), pingr, - testthat (>= 3.0.4), - withr (>= 2.4.3) + testthat (>= 3.0.4) Config/Needs/verdepcheck: r-lib/desc, r-lib/gh, @@ -33,10 +36,10 @@ Config/Needs/verdepcheck: r-lib/pkgdepends, r-lib/rcmdcheck, r-lib/remotes, + r-lib/withr, yihui/knitr, r-lib/pingr, - r-lib/testthat, - r-lib/withr + r-lib/testthat VignetteBuilder: knitr Encoding: UTF-8 diff --git a/LICENSE b/LICENSE index 261eeb9e..63aaf2de 100644 --- a/LICENSE +++ b/LICENSE @@ -1,201 +1,13 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ +Copyright 2022 F. Hoffmann-La Roche AG - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at - 1. Definitions. + http://www.apache.org/licenses/LICENSE-2.0 - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/NAMESPACE b/NAMESPACE index 8593332a..ea456ab2 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -6,26 +6,61 @@ S3method(get_ref_min,remote_ref_github) S3method(get_ref_min,remote_ref_standard) S3method(get_ref_min_incl_cran,remote_ref) S3method(get_ref_min_incl_cran,remote_ref_github) +S3method(get_release_date,remote_ref) +S3method(get_release_date,remote_ref_cran) +S3method(get_release_date,remote_ref_github) +S3method(get_release_date,remote_ref_standard) +S3method(solve_ip,deps_installation_proposal) +S3method(solve_ip,min_isolated_deps_installation_proposal) +export(check_ip) +export(download_ip) +export(execute_ip) export(get_ref_max) +export(get_ref_min) +export(get_ref_min_incl_cran) export(get_ref_release) +export(get_release_date) +export(install_ip) export(max_deps_check) -export(min_deps_check) +export(min_cohort_deps_check) +export(min_isolated_deps_check) export(new_max_deps_installation_proposal) -export(new_min_deps_installation_proposal) +export(new_min_cohort_deps_installation_proposal) +export(new_min_isolated_deps_installation_proposal) export(new_release_deps_installation_proposal) export(release_deps_check) +export(solve_ip) +importFrom(cli,cli_alert_danger) importFrom(cli,cli_progress_bar) importFrom(cli,cli_progress_update) +importFrom(cli,col_blue) +importFrom(cli,col_green) +importFrom(cli,col_yellow) +importFrom(cli,pb_current) +importFrom(cli,pb_elapsed) +importFrom(cli,pb_eta) +importFrom(cli,pb_extra) +importFrom(cli,pb_spin) +importFrom(cli,pb_total) +importFrom(cli,style_bold) +importFrom(cli,symbol) importFrom(desc,desc) importFrom(gh,gh) importFrom(gh,gh_gql) importFrom(pkgcache,cran_archive_list) importFrom(pkgcache,meta_cache_list) +importFrom(pkgcache,ppm_repo_url) +importFrom(pkgcache,ppm_snapshots) +importFrom(pkgdepends,as_pkg_dependencies) importFrom(pkgdepends,new_pkg_deps) importFrom(pkgdepends,new_pkg_installation_proposal) importFrom(pkgdepends,parse_pkg_ref) +importFrom(pkgdepends,parse_pkg_refs) importFrom(pkgdepends,pkg_dep_types) importFrom(rcmdcheck,rcmdcheck) importFrom(remotes,github_remote) +importFrom(stats,na.omit) importFrom(stats,setNames) +importFrom(utils,head) importFrom(utils,installed.packages) +importFrom(withr,defer) diff --git a/R/check.R b/R/check.R index c0b8d893..68be3bec 100644 --- a/R/check.R +++ b/R/check.R @@ -1,101 +1,146 @@ -#' Executes installation plan. +#' Execute `R CMD CHECK` on a local package with all dependencies pre-installed using various strategies. #' -#' This function would executes the following: -#' * solves package dependencies -#' * downloads all package dependencies -#' * installs system requirements -#' * installs all package dependencies +#' @inheritSection new_max_deps_installation_proposal strategies +#' @inherit new_max_deps_installation_proposal note #' -#' @param ip (`pkg_installation_plan`) object to execute +#' @inheritParams new_max_deps_installation_proposal +#' @inheritParams execute_ip #' -#' @returns `pkg_installation_plan` object invisibly +#' @inherit execute_ip return #' -#' @keywords internal -install_ip <- function(ip) { - try({ - solve_ip(ip) - ip$stop_for_solution_error() - - ip$download() - ip$stop_for_download_error() +#' @seealso [deps_installation_proposal] +#' +#' @rdname deps_check +#' +#' @export +#' +#' @examplesIf Sys.getenv("R_USER_CACHE_DIR", "") != "" +#' x <- max_deps_check(".") +#' x$ip +#' x$check +max_deps_check <- function(path, + config = list(), + build_args = character(), + check_args = character(), + ...) { + ip <- new_max_deps_installation_proposal(path, config) + execute_ip(ip, path, check_args, build_args, ...) +} - ip$install_sysreqs() - ip$install() - }) +#' @rdname deps_check +#' @export +#' +#' @examplesIf Sys.getenv("R_USER_CACHE_DIR", "") != "" +#' x <- release_deps_check(".") +#' x$ip +#' x$check +release_deps_check <- function(path, + config = list(), + build_args = character(), + check_args = character(), + ...) { + ip <- new_release_deps_installation_proposal(path, config) + execute_ip(ip, path, check_args, build_args, ...) +} - return(invisible(ip)) +#' @rdname deps_check +#' @export +#' +#' @examplesIf Sys.getenv("R_USER_CACHE_DIR", "") != "" +#' x <- min_cohort_deps_check(".") +#' x$ip +#' x$check +min_cohort_deps_check <- function(path, + config = list(), + build_args = character(), + check_args = character(), + ...) { + ip <- new_min_cohort_deps_installation_proposal(path, config) + execute_ip(ip, path, check_args, build_args, ...) } -#' Try to solve using standard method. If error - use [solve_ignore_remotes_release]. -#' @keywords internal -solve_ip <- function(ip) { - ip$solve() - tryCatch( - ip$stop_for_solution_error(), - error = function(e) { - if (!grepl("*.dependency conflict$", e$message)) stop(e) - cat("Solve using alternative method ignoring `@*release` for conflicting refs.\n") - solve_ignore_remotes_release(ip) - ip$stop_for_solution_error() - } - ) +#' @rdname deps_check +#' @export +#' +#' @examplesIf Sys.getenv("R_USER_CACHE_DIR", "") != "" +#' x <- min_isolated_deps_check(".") +#' x$ip +#' x$check +min_isolated_deps_check <- function(path, + config = list(), + build_args = character(), + check_args = character(), + ...) { + ip <- new_min_isolated_deps_installation_proposal(path, config) + execute_ip(ip, path, check_args, build_args, ...) } -#' Solve installation plan ignoring entries with "@*release" remote refs for detected conflicts. -#' @keywords internal -solve_ignore_remotes_release <- function(ip) { - # ugly hack! - overwrite resolution result before calling solve - # replace "@*release" GH refs to the "@" for all direct dependent pkgs to avoid conflicts - # use case: - # foo -imports-> bar (>= 1.2.3) & baz (>= 1.2.3) (and has bar@*release and baz@*release in its Remotes) - # bar -imports-> baz (and has baz@*release in its Remotes) - # when doing min_deps we identify min version of baz to be 1.2.3 - # there is a conflict between baz@1.2.3 and baz@*release +#' Executes installation plan and [`rcmdcheck::rcmdcheck()`] in "try mode" to always return. +#' +#' @param ip (`pkg_installation_plan`) object to execute +#' @inheritParams check_ip +#' +#' @return a named `list` with two elements: +#' * `"ip"` - installation plan object +#' * `"check"` - returned value from [`rcmdcheck::rcmdcheck()`] +#' +#' @export +execute_ip <- function(ip, path, build_args, check_args, ...) { + check_res <- NULL + try({ + ip <- solve_ip(ip) + ip <- download_ip(ip) + ip <- install_ip(ip) + check_res <- check_ip(ip, path, build_args, check_args, ...) + }) - ip$resolve() + return(invisible(list(ip = ip, check = check_res))) +} - conflicting_pkgs <- resolution <- ip$get_resolution() +#' Solve installation plan ignoring entries with "@*release" remote refs for detected conflicts. +#' +#' @inheritParams check_ip +#' +#' @inherit solve_ip return +#' +#' @export +download_ip <- function(ip) { + ip$download() + ip$stop_for_download_error() - conflicting_pkgs <- split(resolution, as.factor(conflicting_pkgs$package)) - conflicting_pkgs <- Filter(function(x) any(grepl("\\@\\*release", x$ref)), conflicting_pkgs) - conflicting_pkgs <- Filter(function(x) length(unique(x$ref)) > 1, conflicting_pkgs) + return(invisible(ip)) +} - conflicting_pkgs_refs <- lapply( - conflicting_pkgs, - function(x) { - c( - package = x$package[1], - old_ref = grep("\\@\\*release", x$ref, value = TRUE)[1], - new_ref = grep("\\@\\*release", x$ref, value = TRUE, invert = TRUE)[1] - ) +#' Executes installation plan. +#' +#' This function would executes the following: +#' * solves package dependencies +#' * downloads all package dependencies +#' * installs system requirements +#' * installs all package dependencies +#' +#' @inheritParams execute_ip +#' +#' @returns `pkg_installation_plan` object invisibly +#' +#' @export +install_ip <- function(ip) { + ip$install_sysreqs() + tryCatch( + ip$install(), + error = function(err) { + # Print compilation error when installation fails to help debug + print(err) + stop(err) } ) - conflicting_pkgs_refs <- data.frame(do.call(rbind, conflicting_pkgs_refs), row.names = NULL) - - replace_using_df <- function(x, df) { - for (i in seq_len(nrow(df))) { - x <- replace(x, x == df[i, 1], df[i, 2]) - } - x - } - for (i in seq_len(nrow(resolution))) { - i_deps <- resolution[i, "deps"][[1]] - if (any(i_deps$package %in% conflicting_pkgs_refs$package)) { - i_deps$ref <- replace_using_df(i_deps$ref, conflicting_pkgs_refs[, c("old_ref", "new_ref")]) - } - resolution[i, "deps"] <- list(list(i_deps)) - } - - ip$.__enclos_env__$private$plan$.__enclos_env__$private$resolution$result <- resolution - - ip$solve() return(invisible(ip)) } #' Executes [`rcmdcheck::rcmdcheck()`] on a local package using `libpath` from the installation plan. #' -#' @param ip (`pkg_installation_plan`) object to extract `libpath` from +#' @inheritParams execute_ip #' @param path (`string`) path to the package sources #' @param build_args (`string`) value passed as `build_args` argument into [`rcmdcheck::rcmdcheck()`] #' @param check_args (`string`) value passed as `args` argument into [`rcmdcheck::rcmdcheck()`] @@ -107,7 +152,7 @@ solve_ignore_remotes_release <- function(ip) { #' #' @importFrom rcmdcheck rcmdcheck #' -#' @keywords internal +#' @export check_ip <- function(ip, path, build_args = character(), @@ -115,91 +160,12 @@ check_ip <- function(ip, ...) { libpath <- ip$get_config()$get("library") - res <- tryCatch( - rcmdcheck::rcmdcheck( - path, - libpath = libpath, - args = check_args, - build_args = build_args, - error_on = "never", - ... - ), - error = function(e) { - NULL - } + rcmdcheck::rcmdcheck( + path, + libpath = libpath, + args = check_args, + build_args = build_args, + error_on = "never", + ... ) - - return(res) -} - -#' Executes installation plan and [`rcmdcheck::rcmdcheck()`] -#' -#' @inheritParams install_ip -#' @inheritParams check_ip -#' -#' @return a named `list` with two elements: -#' * `"ip"` - installation plan object -#' * `"check"` - returned value from [`rcmdcheck::rcmdcheck()`] -#' -#' @keywords internal -deps_check_internal <- function(ip, path, build_args, check_args, ...) { - ip <- install_ip(ip) - check_res <- check_ip(ip, path, build_args, check_args, ...) - - return(invisible(list(ip = ip, check = check_res))) -} - -#' Execute `R CMD CHECK` on a local package with all dependencies pre-installed using various strategies. -#' -#' @inheritSection new_max_deps_installation_proposal strategies -#' @inherit new_max_deps_installation_proposal note -#' -#' @inheritParams new_max_deps_installation_proposal -#' @inheritParams check_ip -#' -#' @inherit deps_check_internal return -#' -#' @seealso [deps_installation_proposal] -#' -#' @rdname deps_check -#' -#' @export -max_deps_check <- function(path, - config = list( - dependencies = .desc_field, - library = tempfile() - ), - build_args = character(), - check_args = character(), - ...) { - ip <- new_max_deps_installation_proposal(path, config) - deps_check_internal(ip, path, check_args, build_args, ...) -} - -#' @rdname deps_check -#' @export -release_deps_check <- function(path, - config = list( - dependencies = .desc_field, - library = tempfile() - ), - build_args = character(), - check_args = character(), - ...) { - ip <- new_release_deps_installation_proposal(path, config) - deps_check_internal(ip, path, check_args, build_args, ...) -} - -#' @rdname deps_check -#' @export -min_deps_check <- function(path, - config = list( - dependencies = .desc_field, - library = tempfile() - ), - build_args = character(), - check_args = character(), - ...) { - ip <- new_min_deps_installation_proposal(path, config) - deps_check_internal(ip, path, check_args, build_args, ...) } diff --git a/R/deps_installation_proposal.R b/R/deps_installation_proposal.R index 021df472..8947d038 100644 --- a/R/deps_installation_proposal.R +++ b/R/deps_installation_proposal.R @@ -9,28 +9,38 @@ #' #' @section strategies: #' Currently implemented strategies: -#' * `max` - use the greatest version of dependent packages -#' * `release` - use released version of dependent packages - use CRAN if possible else if GitHub release -#' is available then use it else fail. -#' * `min` - use the lowest version of dependent packages incorporating minimal version specification in -#' `"Imports"` and `"Suggests"`. If no version is specified then the minimal available -#' version is assumed. See [get_ref_min] for details how the minimal version is determined. +#' * `max` - use the greatest version of dependent packages. Please note that using development version is not +#' guaranteed to be stable. +#' See [get_ref_max] for details. +#' * `release` - use the released version of dependent packages. It will try use CRAN if possible else if +#' GitHub release is available then use it else fail. +#' See [get_ref_release] for details. +#' * `min_cohort` - find maximum date of directly dependent packages release dates and use that as PPM snapshot date +#' for dependency resolve. +#' * `min_isolated` - for each direct dependency: find its release date and use it as PPM snapshot for resolving itself. +#' Next, combine all the individual resolutions and resolve it altogether again. #' -#' Any modification is done for _direct_ dependencies. Indirect ones are installed as usual. +#' Both "min" strategies relies on PPM snapshot in order to limit the versions of indirect dependencies so that +#' dependency resolution ends with a package released no earlier than any of its dependency. +#' However, that's not always true for `min_isolated` strategy - done on purpose. +#' +#' Please note that only `min_cohort` and `min_isolated` strategies are "stable". The rest are basing on dynamic +#' references therefore it results might be different without changes in tested package. +#' The most straightforward example is `max` strategy in which the environment will be different after any push of +#' any of the dependencies. #' #' @section configuration: #' `verdepcheck` will look into `"Config/Needs/verdepcheck"` field of the `DESCRIPTION` file for dependent packages -#' references. See [`pkgdepends::pkg_refs`] for details. -#' Some functions are supported only for package references from GitHub. -#' If you specify additional details (i.e. tag, commit, PR or `@*release`) then it wouldn't be changed. Therefore, -#' in order to make full use of various strategies, it is recommended to specify general reference in form of +#' references. See [`pkgdepends::pkg_refs`] for details and this package `DESCRIPTION` file for an example. +#' Please note that some features are enabled only for package references from GitHub. +#' If you specify additional details (i.e. tag, commit, PR or `@*release`) in the reference then it wouldn't be changed. +#' Therefore, in order to make full use of various strategies, it is recommended to specify general reference in form of #' `[=][github::]/[/]` - i.e. without `[]` part. +#' Please see also [`pkgdepends::pkg_config`] and [`pak::pak-config`] for other configuration possibilities. #' #' @param path (`string`) path to the package sources #' @param config (`list`) configuration options. See [`pkgdepends::pkg_config`] for details. -#' If it does not include `library` then temporary directory is used which simulates clean environment -#' without using any pre-installed packages. -#' If it does not include `dependencies` then `TRUE` value is used which means all hard dependencies plus `Suggests`. +#' `"dependencies"` and `"library"` elements are overwritten by package level defaults. #' #' @returns `pkg_installation_plan` object #' @@ -43,13 +53,10 @@ #' x <- new_max_deps_installation_proposal(".") #' x$solve() #' x$get_solution() -new_max_deps_installation_proposal <- function( # nolint - path, - config = list( - dependencies = .desc_field, - library = tempfile() - )) { +new_max_deps_installation_proposal <- function(path, # nolint + config = list()) { path <- normalizePath(path) + config <- append_config(default_config(), config) d <- desc::desc(path) @@ -61,7 +68,7 @@ new_max_deps_installation_proposal <- function( # nolint cli_pb_update(refs[[i]]$package) new_refs <- c(new_refs, list(get_ref_max(refs[[i]]))) } - new_refs_str <- vapply(new_refs, `[[`, character(1), "ref") + new_refs_str <- map_key_character(new_refs, "ref") d <- desc_cond_set_refs(d, new_refs_str) @@ -77,13 +84,10 @@ new_max_deps_installation_proposal <- function( # nolint #' x <- new_release_deps_installation_proposal(".") #' x$solve() #' x$get_solution() -new_release_deps_installation_proposal <- function( # nolint - path, - config = list( - dependencies = .desc_field, - library = tempfile() - )) { +new_release_deps_installation_proposal <- function(path, # nolint + config = list()) { path <- normalizePath(path) + config <- append_config(default_config(), config) d <- desc::desc(path) @@ -95,9 +99,10 @@ new_release_deps_installation_proposal <- function( # nolint cli_pb_update(refs[[i]]$package) new_refs <- c(new_refs, list(get_ref_release(refs[[i]]))) } - new_refs_str <- vapply(new_refs, `[[`, character(1), "ref") + new_refs_str <- map_key_character(new_refs, "ref") d <- desc_cond_set_refs(d, new_refs_str) + d <- desc_remotes_cleanup(d) res <- desc_to_ip(d, config) class(res) <- c("release_deps_installation_proposal", "deps_installation_proposal", class(res)) @@ -107,122 +112,157 @@ new_release_deps_installation_proposal <- function( # nolint #' @rdname deps_installation_proposal #' @export #' @importFrom desc desc -#' @importFrom pkgdepends new_pkg_deps parse_pkg_ref -#' @importFrom utils installed.packages +#' @importFrom pkgdepends as_pkg_dependencies parse_pkg_ref #' @examplesIf Sys.getenv("R_USER_CACHE_DIR", "") != "" -#' x <- new_min_deps_installation_proposal(".") -#' x$solve() +#' x <- new_min_cohort_deps_installation_proposal(".") +#' solve_ip(x) #' x$get_solution() -new_min_deps_installation_proposal <- function( # nolint - path, - config = list( - dependencies = .desc_field, - library = tempfile() - )) { +new_min_cohort_deps_installation_proposal <- function(path, # nolint + config = list()) { path <- normalizePath(path) + config <- append_config(default_config(), config) d <- desc::desc(path) refs <- get_refs_from_desc(d) - new_refs <- list() + # convert github to standard if possible + new_refs <- lapply( + refs, + function(x) { + version <- version_from_desc(d, x$package) + if (inherits(x, "remote_ref_github") && + check_if_on_cran(x, version$op, version$op_ver) && + x$commitish == "") { + pkgdepends::parse_pkg_ref(x$package) + } else { + x + } + } + ) + # for github type - find ref for min version and add it to the GH ref + new_refs <- lapply( + new_refs, + function(x) { + if (inherits(x, "remote_ref_github")) { + version <- version_from_desc(d, x$package) + get_ref_min(x, version$op, version$op_ver) + } else { + x + } + } + ) + new_refs_str <- map_key_character(new_refs, "ref") + d <- desc_cond_set_refs(d, new_refs_str) + d <- desc_remotes_cleanup(d) - cli_pb_init("min", length(refs)) - for (i in seq_along(refs)) { - pkg <- refs[[i]]$package - version <- subset(d$get_deps(), package == pkg, "version")[[1]] - if (version == "*") { - op <- op_ver <- "" - } else { - op <- strsplit(version, " ")[[1]][1] - op_ver <- strsplit(version, " ")[[1]][2] + # find PPM snapshot + refs <- get_refs_from_desc(d) + refs_pkg <- map_key_character(refs, "package") + deps <- d$get_deps() + + dependencies_config_cache <- tolower( + pkgdepends::as_pkg_dependencies(config$dependencies)$direct + ) + + deps_release_dates <- lapply( + seq_len(nrow(deps)), + function(i) { + i_pkg <- deps[i, "package"] + + if (tolower(deps[i, "type"]) %nin% dependencies_config_cache) { + return(NA) + } + if (i_pkg %in% base_pkgs()) { + return(NA) + } + if (i_pkg %nin% refs_pkg) { + return(NA) + } + + i_ref <- refs[[which(refs_pkg == i_pkg)]] + + i_ver <- deps[i, "version"] + + version <- version_from_desc(d, i_ref$package) + i_ref_ver <- get_ref_min(i_ref, version$op, version$op_ver) + + get_release_date(i_ref_ver) } - cli_pb_update(pkg) - new_refs <- c(new_refs, list(get_ref_min_incl_cran(refs[[i]], op, op_ver))) - } - new_refs_str <- vapply(new_refs, `[[`, character(1), "ref") + ) - d <- desc_cond_set_refs(d, new_refs_str) + # Obtain the maximum release data of all the dependencies + max_release_date <- as.Date( + max( + as.Date(-Inf), # Suppress warning when running max() with all NA and `na.rm = TRUE` + unlist( + lapply(deps_release_dates, as.Date, origin = "1970-01-01") + ), + na.rm = TRUE + ), + origin = "1970-01-01" + ) + + ppm_repo <- get_ppm_snapshot_by_date(max_release_date) + + config <- append_config(config, list("cran_mirror" = ppm_repo)) res <- desc_to_ip(d, config) - class(res) <- c("min_deps_installation_proposal", "deps_installation_proposal", class(res)) + class(res) <- c("min_cohort_deps_installation_proposal", "deps_installation_proposal", class(res)) res } -#' Read DESCRIPTION file and return list of references. -#' Returned list is an union between references specified in `"Config/Needs/verdepcheck"` field and -#' standard references for all other not covered dependencies. -#' @importFrom pkgdepends pkg_dep_types parse_pkg_ref -#' @importFrom utils installed.packages -#' @keywords internal +#' @rdname deps_installation_proposal +#' @export +#' @importFrom desc desc +#' @importFrom pkgdepends parse_pkg_ref #' @examplesIf Sys.getenv("R_USER_CACHE_DIR", "") != "" -#' d <- desc::desc("!new") -#' d$set_dep("foo", "Import") -#' d$set_dep("bar", "Suggest") -#' d$set_list("Config/Needs/verdepcheck", "foo/bar") -#' d$set_list("Config/Needs/verdepcheck", "foo/baz") # not in pacakge deps - will be skipped -#' get_refs_from_desc(d) -get_refs_from_desc <- function(d) { - if (.desc_field %nin% d$fields()) { - return(list()) - } - all_deps <- subset(d$get_deps(), type %in% pkgdepends::pkg_dep_types(), "package")[[1]] - refs <- lapply( - trimws(strsplit(d$get_field(.desc_field), ",")[[1]]), - pkgdepends::parse_pkg_ref - ) - base_pkgs <- c("R", rownames(utils::installed.packages(priority = "base"))) - missing_refs <- setdiff(setdiff(all_deps, base_pkgs), vapply(refs, `[[`, character(1), "package")) - res <- c( - refs, - lapply(missing_refs, pkgdepends::parse_pkg_ref) - ) - res_idx <- match(all_deps, vapply(res, `[[`, character(1), "package")) - res_idx <- res_idx[!is.na(res_idx)] - res[res_idx] -} +#' x <- new_min_isolated_deps_installation_proposal(".") +#' solve_ip(x) +#' x$get_solution() +new_min_isolated_deps_installation_proposal <- function(path, # nolint + config = list()) { + path <- normalizePath(path) + config <- append_config(default_config(), config) -#' Set `"Config/Needs/verdepcheck"` section into the `desc` object if not empty else clear this section. -#' @keywords internal -desc_cond_set_refs <- function(d, refs) { - if (length(refs)) { - d$set_list(.desc_field, refs) - } else { - d$del(.desc_field) - } - return(invisible(d)) -} + d <- desc::desc(path) -#' Create `installation_plan` object from `desc` object -#' @importFrom pkgdepends new_pkg_deps new_pkg_installation_proposal -#' @keywords internal -desc_to_ip <- function(d, config) { - temp_desc <- tempfile() - d$write(temp_desc) + refs <- get_refs_from_desc(d) - pkgdepends::new_pkg_installation_proposal( - refs = paste0("deps::", temp_desc), - config = config + # convert github to standard if possible + new_refs <- lapply( + refs, + function(x) { + version <- version_from_desc(d, x$package) + if ( + inherits(x, "remote_ref_github") && + check_if_on_cran(x, version$op, version$op_ver) && + x$commitish == "" + ) { + pkgdepends::parse_pkg_ref(x$package) + } else { + x + } + } ) -} -#' @importFrom cli cli_progress_bar -#' @keywords internal -cli_pb_init <- function(type, total) { - cli::cli_progress_bar( - format = paste( - "{cli::pb_spin} Resolving {cli::pb_extra$type} version {cli::pb_extra$package}", - "[{cli::pb_current}/{cli::pb_total}] ETA:{cli::pb_eta}" - ), - format_done = paste0( - "{cli::col_green(cli::symbol$tick)} Resolved {cli::pb_total} packages in {cli::pb_elapsed}." - ), - extra = list(type = type, package = character(0)), - total = total, - .envir = parent.frame(2L) + # for github type - find ref for min version and add it to the GH ref + new_refs <- lapply( + new_refs, + function(x) { + if (inherits(x, "remote_ref_github")) { + version <- version_from_desc(d, x$package) + get_ref_min(x, version$op, version$op_ver) + } else { + x + } + } ) -} -#' @importFrom cli cli_progress_update -#' @keywords internal -cli_pb_update <- function(package) { - cli::cli_progress_update(extra = list(package = package), .envir = parent.frame(2L)) + new_refs_str <- map_key_character(new_refs, "ref") + + d <- desc_cond_set_refs(d, new_refs_str) + d <- desc_remotes_cleanup(d) + + res <- desc_to_ip(d, config) + class(res) <- c("min_isolated_deps_installation_proposal", "deps_installation_proposal", class(res)) + res } diff --git a/R/desc_utils.R b/R/desc_utils.R new file mode 100644 index 00000000..80983b3c --- /dev/null +++ b/R/desc_utils.R @@ -0,0 +1,210 @@ +#' Read DESCRIPTION file and return list of references. +#' +#' Returned list is an union between references specified in `"Config/Needs/verdepcheck"` field and +#' standard references for all other not covered dependencies. +#' @importFrom pkgdepends pkg_dep_types parse_pkg_ref +#' @keywords internal +#' @examplesIf Sys.getenv("R_USER_CACHE_DIR", "") != "" +#' d <- desc::desc("!new") +#' d$set_dep("foo", "Import") +#' d$set_dep("bar", "Suggest") +#' d$set_list("Config/Needs/verdepcheck", "foo/bar") +#' d$set_list("Config/Needs/verdepcheck", "foo/baz") # not in package deps - will be skipped +#' get_refs_from_desc(d) +get_refs_from_desc <- function(d) { + if (.desc_field %nin% d$fields()) { + refs <- list() + } else { + refs <- lapply(get_desc_field_pkgs(d), pkgdepends::parse_pkg_ref) + } + all_deps <- d$get_deps() + all_deps <- all_deps$package[all_deps$type %in% pkgdepends::pkg_dep_types()] + missing_refs <- setdiff( + setdiff(all_deps, base_pkgs()), + map_key_character(refs, "package") + ) + res <- c( + refs, + lapply(missing_refs, pkgdepends::parse_pkg_ref) + ) + res_idx <- match(all_deps, map_key_character(res, "package")) + res_idx <- res_idx[!is.na(res_idx)] + res[res_idx] +} + +#' Get the packages from the custom configuration field +#' @param d (`desc`) DESCRIPTION object from [desc::description] +#' @return character string +#' @keywords internal +get_desc_field_pkgs <- function(d) { + if (!d$has_fields(.desc_field)) { + return(character(0)) + } + trimws(strsplit(d$get_field(.desc_field), ",")[[1]]) +} + +#' Replace Remotes in the `desc` that have been resolved to a GitHub tag or are +#' in CRAN. +#' +#' Replaces any existing Remotes entry with the resolved GitHub tag from +#' `Config/Needs/verdepcheck`. +#' +#' @param d (`desc`) DESCRIPTION object +#' +#' @importFrom pkgdepends parse_pkg_refs +#' @keywords internal +#' @examples +#' # Example that should replace dplyr & tibble on Remotes but not pkgdepends +#' +#' d <- desc::desc( +#' file = verdepcheck:::local_description( +#' list( +#' dplyr = "Import", +#' tibble = "Import", +#' pkgdepends = "Import" +#' ), +#' remotes = c( +#' "tidyverse/dplyr@*release", +#' "tidyverse/tibble@*release", +#' "r-lib/pkgdepends@*release" +#' ), +#' need_verdepcheck = c( +#' "dplyr", +#' "tibble=tidyverse/tibble@v3.2.1" +#' ) +#' ) +#' ) +#' verdepcheck:::desc_remotes_cleanup(d) +desc_remotes_cleanup <- function(d) { + if (length(get_desc_field_pkgs(d)) == 0) { + return(d) + } + # Parse the `Config/Needs/verdepcheck` to retrieve references and extract package names + desc_field_refs <- pkgdepends::parse_pkg_refs(get_desc_field_pkgs(d)) + desc_field_names <- map_key_character(desc_field_refs, "package") + + # Parse the remotes to retrieve the package names + remotes_refs <- pkgdepends::parse_pkg_refs(d$get_remotes()) + remotes_names <- map_key_character(remotes_refs, "package") + + # Add to remotes `Config/Needs/verdepcheck` that resolve to a remote_ref_github + desc_field_include_ix <- vapply(desc_field_refs, inherits, logical(1), "remote_ref_github") + + # Only keep previous remotes that are not defined in `Config/Needs/verdepcheck` + remotes_include_ix <- remotes_names %in% setdiff(remotes_names, desc_field_names) + + # Create new list of references that will be used as "Remotes" + new_remotes <- c( + map_key_character(desc_field_refs[desc_field_include_ix], "ref"), + map_key_character(remotes_refs[remotes_include_ix], "ref") + ) + + new_d <- d$clone() + # Remove all remotes and override it + new_d$clear_remotes() + + # Return clause without Remotes section if none should be kept + if (is.null(new_remotes) || length(new_remotes) == 0) { + return(new_d) + } + new_d$set_remotes(new_remotes) + new_d +} + +#' Set `"Config/Needs/verdepcheck"` section into the `desc` object if not empty else clear this section. +#' @keywords internal +desc_cond_set_refs <- function(d, refs) { + if (length(refs)) { + d$set_list(.desc_field, refs) + } else { + d$del(.desc_field) + } + return(invisible(d)) +} + +#' Create `installation_plan` object from `desc` object +#' @importFrom pkgdepends new_pkg_installation_proposal +#' @keywords internal +desc_to_ip <- function(d, config) { + temp_desc <- tempfile() + d$write(temp_desc) + + pkgdepends::new_pkg_installation_proposal( + refs = paste0("deps::", temp_desc), + config = config + ) +} + +#' Get package version from description +#' @param d (`desc`) DESCRIPTION object from [desc::description] +#' @param pkg_name (`character`) Package name +#' @keywords internal +#' +#' @examples +#' d <- desc::desc(cmd = "!new") +#' +#' d$set_dep("magrittr", type = "Imports", version = "*") +#' verdepcheck:::version_from_desc(d, "magrittr") +#' +#' d$set_dep("magrittr", type = "Imports", version = ">= 1.5") +#' verdepcheck:::version_from_desc(d, "magrittr") +version_from_desc <- function(d, pkg_name) { + all_deps <- d$get_deps() + + version <- (all_deps$version[all_deps$package == pkg_name])[[1]] + result <- list( + package = pkg_name, + version_str = version, + op = "", + op_ver = "" + ) + if (version == "*" || trimws(version) == "") { + return(result) + } + split_vec <- strsplit(version, " ")[[1]] + result$op <- split_vec[1] + result$op_ver <- split_vec[2] + result +} + +#' Filter for package versions that comply with an operator and version +#' +#' @param x `vector` of valid package versions. +#' @param op `character(1)` relational operator (`>=`, `==`, ...) +#' @param op_ver `character(1)` or `package_version(1)` with version to compare +#' with using a relational operator. +#' +#' @keywords internal +#' +#' @examples +#' versions <- paste(1:10, 0, sep = ".") +#' verdepcheck:::filter_valid_version(versions, ">=", "3.1") +filter_valid_version <- function(x, op, op_ver) { + res <- Filter(Negate(is.na), numeric_version(x, strict = FALSE)) + if (op == "" || op_ver == "") { + return(res) + } + Filter(function(x) check_valid_version(x, op, op_ver), res) +} + +#' Check for package versions that comply with an operator and version +#' +#' @param x `vector` of valid package versions. +#' @param op `character(1)` relational operator (`>=`, `==`, ...) +#' @param op_ver `character(1)` or `package_version(1)` with version to compare +#' with using a relational operator. +#' +#' @keywords internal +#' +#' @examples +#' versions <- paste(1:10, 0, sep = ".") +#' verdepcheck:::check_valid_version(versions, ">=", "3.1") +check_valid_version <- function(x, op, op_ver) { + res <- numeric_version(x, strict = FALSE) + res <- Filter(Negate(is.na), res) + if (op == "" || op_ver == "") { + return(rep(TRUE, NROW(res))) + } + + do.call(op, list(res, numeric_version(op_ver))) +} diff --git a/R/get_ref.R b/R/get_ref.R index 69dfa638..1a793ca2 100644 --- a/R/get_ref.R +++ b/R/get_ref.R @@ -6,7 +6,7 @@ #' #' @returns (`remote_ref`) object with the package reference #' -#' @keywords internal +#' @export #' #' @seealso [get_ref_min_incl_cran()] get_ref_min_incl_cran <- function(remote_ref, op = "", op_ver = "") { @@ -29,7 +29,7 @@ get_ref_min_incl_cran.remote_ref <- function(remote_ref, op = "", op_ver = "") { #' @examplesIf Sys.getenv("R_USER_CACHE_DIR", "") != "" #' verdepcheck:::get_ref_min_incl_cran(pkgdepends::parse_pkg_ref("cran/dplyr")) get_ref_min_incl_cran.remote_ref_github <- function(remote_ref, op = "", op_ver = "") { - if (check_if_on_cran(remote_ref)) { + if (check_if_on_cran(remote_ref, op = op, op_ver = op_ver)) { gh_res <- get_ref_min(remote_ref, op, op_ver) gh_desc <- get_desc_from_gh(gh_res$username, gh_res$repo, gh_res$commitish) gh_ver <- gh_desc$get_version() @@ -51,14 +51,25 @@ get_ref_min_incl_cran.remote_ref_github <- function(remote_ref, op = "", op_ver #' Check if package is available on CRAN. #' @importFrom pkgcache meta_cache_list #' @keywords internal -check_if_on_cran <- function(remote_ref) { - nrow(pkgcache::meta_cache_list(remote_ref$package)) > 0 +#' +#' @examplesIf Sys.getenv("R_USER_CACHE_DIR", "") != "" +#' verdepcheck:::check_if_on_cran(list(package = "dplyr")) +#' verdepcheck:::check_if_on_cran(list(package = "dplyr"), op = ">=", op_ver = "1.1.0") +#' verdepcheck:::check_if_on_cran(list(package = "dplyr"), op = ">=", op_ver = "9999.9.99") +#' verdepcheck:::check_if_on_cran(list(package = "dplyr"), op = "<", op_ver = "0.0.0") +check_if_on_cran <- function(remote_ref, op = "", op_ver = "") { + cran_listings <- pkgcache::meta_cache_list(remote_ref$package) + if (op == "" || op_ver == "") { + return(NROW(cran_listings) > 0) + } + # Check if minimum version exists on CRAN + NROW(filter_valid_version(cran_listings$version, op, op_ver)) > 0 } #' Get reference to the minimal version of the package. #' #' @inherit get_ref_min_incl_cran -#' @keywords internal +#' @export #' #' @seealso [get_ref_min_incl_cran()] get_ref_min <- function(remote_ref, op = "", op_ver = "") { @@ -78,6 +89,7 @@ get_ref_min.remote_ref <- function(remote_ref, op = "", op_ver = "") { #' #' @rdname get_ref_min #' @exportS3Method get_ref_min remote_ref_cran +#' @importFrom cli cli_alert_danger #' @importFrom pkgcache cran_archive_list meta_cache_list #' @importFrom pkgdepends parse_pkg_ref #' @importFrom stats setNames @@ -96,7 +108,20 @@ get_ref_min.remote_ref_cran <- function(remote_ref, op = "", op_ver = "") { min_ver <- Filter(function(x) x == min(pv), pv) new_ref <- sprintf("%s@%s", remote_ref$ref, names(min_ver)) # @TODO deparse, add ver, parse again - pkgdepends::parse_pkg_ref(new_ref) + tryCatch( + pkgdepends::parse_pkg_ref(new_ref), + error = function(err) { + cli::cli_alert_danger( + paste( + sep = " ", + "Problem with finding CRAN release meeting following criteria:", + "`{remote_ref$package} ({op} {op_ver})`.", + "The package name or version might be invalid." + ) + ) + stop(err) + } + ) } #' @rdname get_ref_min @@ -107,7 +132,7 @@ get_ref_min.remote_ref_standard <- function(remote_ref, op = "", op_ver = "") { get_ref_min.remote_ref_cran(remote_ref, op, op_ver) } -#' * for GitHub type of remote - this would use [`gh::gh_gql()`] to get list of all tags +#' * for GitHub type of remote - this would use [`gh::gh_gql()`] to get list of all releases or tags #' and then [`gh::gh()`] to download `DESCRIPTION` file and then read package version. #' #' @rdname get_ref_min @@ -121,49 +146,92 @@ get_ref_min.remote_ref_github <- function(remote_ref, op = "", op_ver = "") { return(remote_ref) } - tags <- get_gh_tags(remote_ref$username, remote_ref$repo) + refs <- get_gh_refs(remote_ref$username, remote_ref$repo) - if (length(tags) == 0) { + if (length(refs) == 0) { return(remote_ref) } ref_suffix <- "" if (op == "") { - # loop through the tags starting from the earliest until the first valid description file - for (tag in tags) { - tag_desc <- get_desc_from_gh(remote_ref$username, remote_ref$repo, tag) - if ((length(tag_desc) == 1 && is.na(tag_desc)) || tag_desc$get_field("Package") != remote_ref$package) next - ref_suffix <- sprintf("@%s", tag) + # loop through the refs starting from the earliest until the first valid description file + for (ref in refs) { + ref_desc <- get_desc_from_gh(remote_ref$username, remote_ref$repo, ref) + if ((length(ref_desc) == 1 && is.na(ref_desc)) || ref_desc$get_field("Package") != remote_ref$package) next + ref_suffix <- sprintf("@%s", ref) break } } else { - # loop through the tags starting from the earliest until the first version condition met - for (tag in tags) { - tag_desc <- get_desc_from_gh(remote_ref$username, remote_ref$repo, tag) - if ((length(tag_desc) == 1 && is.na(tag_desc)) || tag_desc$get_field("Package") != remote_ref$package) next - tag_ver <- tag_desc$get_version() - op_res <- do.call(op, list(tag_ver, package_version(op_ver))) + # loop through the refs starting from the earliest until the first version condition met + for (ref in refs) { + ref_desc <- get_desc_from_gh(remote_ref$username, remote_ref$repo, ref) + if ((length(ref_desc) == 1 && is.na(ref_desc)) || ref_desc$get_field("Package") != remote_ref$package) next + ref_ver <- ref_desc$get_version() + op_res <- check_valid_version(ref_ver, op, op_ver) if (op_res) { - ref_suffix <- sprintf("@%s", tag) + ref_suffix <- sprintf("@%s", ref) break } } } - new_ref <- sprintf("%s=%s/%s%s", remote_ref$package, remote_ref$username, remote_ref$repo, ref_suffix) # @TODO + new_ref <- sprintf( + "%s=%s/%s%s", + remote_ref$package, + remote_ref$username, + remote_ref$repo, + ref_suffix + ) pkgdepends::parse_pkg_ref(new_ref) } +# Get list of releases if not empty else get list of tags +#' @keywords internal +get_gh_refs <- function(org, repo) { + res <- get_gh_releases(org, repo) + if (length(res) > 0) { + return(res) + } + get_gh_tags(org, repo) +} + +#' @importFrom gh gh_gql +#' @keywords internal +get_gh_releases <- function(org, repo, max_date = Sys.Date() + 1, min_date = as.Date("1900-01-01")) { + gql_query <- sprintf("{ + repository(owner: \"%s\", name: \"%s\") { + releases(last: 100, orderBy: { field: CREATED_AT, direction: ASC}) { + nodes { + tagName + isPrerelease + createdAt + } + } + } + }", org, repo) + resp <- try(gh::gh_gql(gql_query), silent = TRUE) + if (inherits(resp, "try-error")) { + return(character(0)) + } + res <- Filter( + function(x) isFALSE(x$isPrerelease) & x$createdAt > min_date & x$createdAt < max_date, + resp$data$repository$releases$nodes + ) + map_key_character(res, "tagName") +} #' @importFrom gh gh_gql #' @keywords internal -get_gh_tags <- function(org, repo) { +get_gh_tags <- function(org, repo, max_date = Sys.Date() + 1, min_date = as.Date("1900-01-01")) { gql_query <- sprintf("{ repository(owner: \"%s\", name: \"%s\") { refs(refPrefix: \"refs/tags/\", last: 100, orderBy: {field: TAG_COMMIT_DATE, direction: ASC}) { - edges { - node { - name + nodes { + name + target { + ... on Commit { + committedDate + } } } } @@ -173,11 +241,22 @@ get_gh_tags <- function(org, repo) { if (inherits(resp, "try-error")) { return(character(0)) } - vapply(resp$data$repository$refs$edges, function(x) x$node$name, character(1)) + res <- Filter( + function(x) as.Date(x$target$committedDate) > min_date & as.Date(x$target$committedDate) < max_date, + resp$data$repository$refs$nodes + ) + map_key_character(res, "name") } + +#' Get DESCRIPTION from GitHub Repository +#' #' @importFrom desc desc #' @importFrom gh gh #' @keywords internal +#' +#' @examples +#' verdepcheck:::get_desc_from_gh("tidyverse", "dplyr") +#' verdepcheck:::get_desc_from_gh("tidyverse", "dplyr", "v1.1.0") get_desc_from_gh <- function(org, repo, ref = "") { if (ref == "") ref <- "HEAD" url_str <- sprintf("/repos/%s/%s/contents/DESCRIPTION?ref=%s", org, repo, ref) @@ -188,22 +267,11 @@ get_desc_from_gh <- function(org, repo, ref = "") { desc::desc(text = resp$message) } -#' @keywords internal -filter_valid_version <- function(x, op, op_ver) { - res <- x - res <- Filter(Negate(is.na), res) - if (op != "" && op_ver != "") { - res <- Filter(function(x) do.call(op, list(x, package_version(op_ver))), res) - } - return(res) -} - #' Get reference to the maximal version of the package. #' #' @inheritParams get_ref_min #' @inherit get_ref_min return #' -#' @importFrom pkgdepends parse_pkg_ref #' @export get_ref_max <- function(remote_ref) { remote_ref @@ -228,9 +296,9 @@ get_ref_release <- function(remote_ref) { return(remote_ref) } if (!is.null(remote_ref$release) && remote_ref$release != "") { - return(cond_parse_pkg_ref_remote(remote_ref)) + return(cond_parse_pkg_ref_release(remote_ref)) } - return(cond_parse_pkg_ref_remote(remote_ref)) + return(cond_parse_pkg_ref_release(remote_ref)) } return(remote_ref) } @@ -238,7 +306,7 @@ get_ref_release <- function(remote_ref) { #' @importFrom pkgdepends parse_pkg_ref #' @importFrom remotes github_remote #' @keywords internal -cond_parse_pkg_ref_remote <- function(remote_ref) { +cond_parse_pkg_ref_release <- function(remote_ref) { has_release <- function(remote_ref) { isFALSE(inherits( try(remotes::github_remote(sprintf("%s/%s@*release", remote_ref$username, remote_ref$repo))), @@ -266,3 +334,122 @@ cond_parse_pkg_ref_remote <- function(remote_ref) { NULL } } + +#' Get release date. +#' +#' @inheritParams get_ref_min +#' @inherit get_ref_min return +#' +#' @export +get_release_date <- function(remote_ref) { + UseMethod("get_release_date", remote_ref) +} + +#' Get release date from GitHub references +#' +#' @rdname get_release_date +#' @exportS3Method get_release_date remote_ref_github +#' @importFrom gh gh_gql +#' +#' @examplesIf gh::gh_token() != "" +#' remote_ref <- pkgdepends::parse_pkg_ref("tidyverse/dplyr@v1.1.0") +#' get_release_date(remote_ref) +get_release_date.remote_ref_github <- function(remote_ref) { + gql_query <- sprintf("{ + repository(owner: \"%s\", name: \"%s\") { + refs(refPrefix: \"refs/tags/\", query: \"%s\", first: 100) { + edges { + node { + name + target { + ... on Commit { + committedDate + } + } + } + } + } + } + }", remote_ref$username, remote_ref$repo, remote_ref$commitish) + + resp <- try(gh::gh_gql(gql_query), silent = TRUE) + if (inherits(resp, "try-error") || is.null(resp$data$repository$refs$edges)) { + return(as.Date(NA_real_)) + } + + result <- vapply( + resp$data$repository$refs$edges, + function(x) { + if (x$node$name != remote_ref$commitish) { + return(as.Date(NA_real_)) + } + as.Date(x$node$target$committedDate) + }, + double(1) + ) + + result <- Filter(function(el) !is.na(el) && !is.null(el), result) + + if (length(result) == 0) { + return(as.Date(NA_real_)) + } + + max(as.Date(result)) +} + +#' Get release date from GitHub references +#' +#' @rdname get_release_date +#' @exportS3Method get_release_date remote_ref_cran +#' +#' @examplesIf Sys.getenv("R_USER_CACHE_DIR", "") != "" +#' remote_ref <- pkgdepends::parse_pkg_ref("dplyr@1.1.0") +#' get_release_date.remote_ref_cran(remote_ref) +get_release_date.remote_ref_cran <- function(remote_ref) { + result <- subset( + get_cran_data(remote_ref$package), + package_version(version, strict = FALSE) == package_version(remote_ref$version, strict = FALSE), + select = "mtime" + )[[1]][1] + as.Date(result) +} + +#' @export +get_release_date.remote_ref_standard <- function(remote_ref) { + get_release_date.remote_ref_cran(remote_ref) +} + +#' @export +get_release_date.remote_ref <- function(remote_ref) { + as.Date(NA_real_) +} + +#' Get CRAN/Bioconductor metadata information on packages +#' +#' @importFrom pkgcache cran_archive_list meta_cache_list +#' @keywords internal +#' @examplesIf Sys.getenv("R_USER_CACHE_DIR", "") != "" +#' verdepcheck:::get_cran_data("dplyr") +#' verdepcheck:::get_cran_data("SummarizedExperiment") +get_cran_data <- function(package) { + cran_archive <- pkgcache::cran_archive_list(packages = package)[, c( + "package", "version", "mtime" + )] + cran_current <- pkgcache::meta_cache_list(packages = package)[, c( + "type", "package", "version", "published" + )] + + # Bioc custom logic as packages in Bioconductor do not return a published date + # this will be immediately obsolete if {pkgcache} starts to return a non-NA value + # note: a date is required for the `min_cohort` strategy + bioc_na_mtime_ix <- is.na(cran_current$published) & cran_current$type == "bioc" + if (NROW(cran_current[bioc_na_mtime_ix, ]) > 0) { + cran_current[bioc_na_mtime_ix, "published"] <- Sys.Date() + } + + # Remove extra columns + cran_current <- cran_current[, setdiff(names(cran_current), c("type"))] + + cran_current <- setNames(cran_current, names(cran_archive)) + rbind(cran_archive, cran_current) +} diff --git a/R/solve.R b/R/solve.R new file mode 100644 index 00000000..2fd25c1a --- /dev/null +++ b/R/solve.R @@ -0,0 +1,166 @@ +#' Try to solve using standard method. If error - use [resolve_ignoring_release_remote]. +#' +#' @inheritParams check_ip +#' +#' @returns `pkg_installation_plan` object invisibly +#' +#' @export +solve_ip <- function(ip) { + UseMethod("solve_ip", ip) +} + +#' @exportS3Method solve_ip deps_installation_proposal +solve_ip.deps_installation_proposal <- function(ip) { + ip$solve() + resolve_ignoring_release_remote(ip) +} + +#' Try to solve using min_isolated method. If Error - use [resolve_ignoring_release_remote] +#' +#' For each direct dependency, resolve that package using PPM snapshot as of release date + 1. +#' Finally, combine resolutions and run solve. +#' +#' @keywords internal +#' +#' @importFrom stats na.omit +#' +#' @exportS3Method solve_ip min_isolated_deps_installation_proposal +solve_ip.min_isolated_deps_installation_proposal <- function(ip) { # nolint + ip$resolve() + res <- ip$get_resolution() + + # deps clean-up + deps <- res[1, "deps"][[1]] + ## copy op and version to Config\Needs\verdepcheck rows + deps <- split(deps, as.factor(deps$package)) + deps <- lapply(deps, function(x) { + x$op <- x$op[1] + x$version <- x$version[1] + x + }) + deps <- do.call(rbind, deps) + ## remove non-needed rows (has to be done after copy the data because oftentimes these are the source) + deps <- deps[tolower(deps$type) %in% tolower(res[1, "dep_types"][[1]]), ] + ## remove multiple rows for the same package + deps <- split(deps, as.factor(deps$package)) + deps <- lapply(deps, function(x) { + # pick one according to the dependency order in config - assuming that custom filed is the first choice + stats::na.omit(x[match(tolower(res[1, "dep_types"][[1]]), tolower(x$type)), ])[1, ] + }) + deps <- do.call(rbind, deps) + + # Avoid repeating calls to resolve_ppm_snapshot + deps <- deps[!duplicated(deps[, c("ref", "op", "version")]), ] + + cli_pb_init("min_isolated", total = nrow(deps)) + + deps_res <- lapply(seq_len(nrow(deps)), function(i) { + i_pkg <- deps[i, "package"] + + cli_pb_update(package = i_pkg, n = 4L) + + if (i_pkg %in% base_pkgs()) { + return(NULL) + } + + tryCatch( + resolve_ppm_snapshot(deps[i, "ref"], deps[i, "op"], deps[i, "version"]), + error = function(err) NULL + ) + }) + + new_res <- do.call(rbind, deps_res) + + # Keep only top versions in calculated resolution (new_res). + # Very large resolution tables can become problematic and take a long to + # converge to a solution. + new_res <- new_res[order(new_res$ref, package_version(new_res$version, strict = FALSE), decreasing = TRUE), ] + new_res <- new_res[!duplicated(new_res[, c("ref")]), ] + + # Keep res at top + new_res <- rbind(res[1, ], new_res) + new_res[1, "deps"][[1]] <- list(deps) + ip$.__enclos_env__$private$plan$.__enclos_env__$private$resolution$result <- new_res + ip$solve() + + resolve_ignoring_release_remote(ip) + + return(invisible(ip)) +} + +#' If solution errors finishes with "dependency conflict" error then +#' re-try again ignoring "@*release" remote refs for detected conflicts. +#' +#' @inheritParams check_ip +#' +#' @inherit solve_ip return +#' +#' @keywords internal +resolve_ignoring_release_remote <- function(ip) { # nolint + tryCatch( + ip$stop_for_solution_error(), + error = function(e) { + if (!grepl("*.dependency conflict$", e$message)) stop(e) + cat("Solve using alternative method ignoring `@*release` for conflicting refs.\n") + solve_ip_ignore_remotes_release(ip) + ip$stop_for_solution_error() + } + ) + return(invisible(ip)) +} + +#' Solve installation plan ignoring entries with "@*release" remote refs for detected conflicts. +#' +#' @inheritParams check_ip +#' +#' @inherit solve_ip return +#' +#' @keywords internal +solve_ip_ignore_remotes_release <- function(ip) { # nolint + # replace "@*release" GH refs to the "@" for all direct dependent pkgs to avoid conflicts + # use case: + # foo -imports-> bar (>= 1.2.3) & baz (>= 1.2.3) (and has bar@*release and baz@*release in its Remotes) + # bar -imports-> baz (and has baz@*release in its Remotes) + # when doing min_deps we identify min version of baz to be 1.2.3 + # there is a conflict between baz@1.2.3 and baz@*release + + if (is.null(ip$.__enclos_env__$private$plan$.__enclos_env__$private$resolution$result)) ip$resolve() + + conflicting_pkgs <- resolution <- ip$get_resolution() + + conflicting_pkgs <- split(resolution, as.factor(conflicting_pkgs$package)) + conflicting_pkgs <- Filter(function(x) any(grepl("\\@\\*release", x$ref)), conflicting_pkgs) + conflicting_pkgs <- Filter(function(x) length(unique(x$ref)) > 1, conflicting_pkgs) + + conflicting_pkgs_refs <- lapply( + conflicting_pkgs, + function(x) { + c( + package = x$package[1], + old_ref = grep("\\@\\*release", x$ref, value = TRUE)[1], + new_ref = grep("\\@\\*release", x$ref, value = TRUE, invert = TRUE)[1] + ) + } + ) + conflicting_pkgs_refs <- data.frame(do.call(rbind, conflicting_pkgs_refs), row.names = NULL) + + replace_using_df <- function(x, df) { + for (i in seq_len(nrow(df))) { + x <- replace(x, x == df[i, 1], df[i, 2]) + } + x + } + for (i in seq_len(nrow(resolution))) { + i_deps <- resolution[i, "deps"][[1]] + if (any(i_deps$package %in% conflicting_pkgs_refs$package)) { + i_deps$ref <- replace_using_df(i_deps$ref, conflicting_pkgs_refs[, c("old_ref", "new_ref")]) + } + resolution[i, "deps"] <- list(list(i_deps)) + } + + ip$.__enclos_env__$private$plan$.__enclos_env__$private$resolution$result <- resolution + + ip$solve() + + return(invisible(ip)) +} diff --git a/R/utils.R b/R/utils.R index 4f32feea..cf0d024c 100644 --- a/R/utils.R +++ b/R/utils.R @@ -1,3 +1,180 @@ `%nin%` <- Negate(`%in%`) +`%||%` <- function(x, y) if (is.null(x) || length(x) == 0) y else x + .desc_field <- "Config/Needs/verdepcheck" + +#' @importFrom pkgdepends as_pkg_dependencies +default_config <- function() { + list( + dependencies = c(.desc_field, pkgdepends::as_pkg_dependencies(TRUE)$direct), + library = tempfile() + ) +} +append_config <- function(x1, x2) { + append(x1, x2)[unique(c(names(x1), names(x2)))] +} + +#' @importFrom utils installed.packages +base_pkgs <- function() { + c("R", rownames(utils::installed.packages(priority = "base"))) +} + +#' @importFrom pkgcache ppm_repo_url ppm_snapshots +#' @importFrom utils head +#' +#' @examplesIf Sys.getenv("R_USER_CACHE_DIR", "") != "" +#' get_ppm_snapshot_by_date(NA) +#' get_ppm_snapshot_by_date("2023-08-01") +#' get_ppm_snapshot_by_date(Sys.Date() + 10) +get_ppm_snapshot_by_date <- function(date) { + fallback_repo <- file.path(pkgcache::ppm_repo_url(), "latest") + if (is.na(date) || is.infinite.POSIXlt(date)) { + return(fallback_repo) + } + + snaps <- pkgcache::ppm_snapshots() + res <- as.character(as.Date(utils::head( + snaps[as.Date(snaps$date) > as.Date(date), "date"], + 1 + ))) + if (length(res) == 0) { + warning(sprintf( + paste0( + "Cannot find PPM snapshot for date after %s.", + " Will use latest ppm snapshot instead." + ), + as.character(date) + )) + return(fallback_repo) + } + parse_ppm_url(res) +} + +#' @importFrom pkgcache ppm_repo_url +parse_ppm_url <- function(snapshot) { + file.path(pkgcache::ppm_repo_url(), snapshot) +} + +#' Resolve the dependencies of package based on the release date + 1 +#' +#' @importFrom pkgdepends new_pkg_deps parse_pkg_ref +#' @keywords internal +resolve_ppm_snapshot <- function(pkg_ref_str, operator, pkg_version) { + i_ref <- pkgdepends::parse_pkg_ref(pkg_ref_str) + + i_ref_minver <- get_ref_min_incl_cran(i_ref, operator, pkg_version) + + i_release_date <- get_release_date(i_ref_minver) + + ppm_repo <- get_ppm_snapshot_by_date(i_release_date) + + i_pkg_deps <- pkgdepends::new_pkg_deps( + ifelse( + inherits(i_ref_minver, "remote_ref_github"), + i_ref_minver$ref, + i_ref$ref + ), + config = list(dependencies = "hard", cran_mirror = ppm_repo, library = tempfile()) + ) + suppressMessages(i_pkg_deps$resolve()) + + i_res <- i_pkg_deps$get_resolution() + i_res$direct <- i_res$directpkg <- FALSE + i_res +} + +#' Create `cli` progress bar for resolving versions. +#' @importFrom cli col_blue col_yellow cli_progress_bar col_green pb_current pb_elapsed pb_eta pb_extra +#' pb_spin pb_total style_bold symbol +#' @keywords internal +cli_pb_init <- function(type, total, ...) { + cli::cli_progress_bar( + format = paste( + "{cli::pb_spin} Resolving", + "{cli::style_bold(cli::col_yellow(cli::pb_extra$type))}", + "version of {cli::col_blue(cli::pb_extra$package)}", + "[{cli::pb_current}/{cli::pb_total}] ETA:{cli::pb_eta}" + ), + format_done = paste0( + "{cli::col_green(cli::symbol$tick)} Resolved {cli::pb_total} packages in {cli::pb_elapsed}." + ), + extra = list(type = type, package = character(0)), + total = total, + .envir = parent.frame(2L), + ... + ) +} +#' @importFrom cli cli_progress_update +#' @keywords internal +cli_pb_update <- function(package, n = 2L, ...) { + cli::cli_progress_update(extra = list(package = package), .envir = parent.frame(n), ...) +} + +#' Temporarily create a valid DESCRIPTION file to a location that will be deleted +#' +#' The file is deleted after the parent environment where this function was called +#' has exited, when the R session ends or on demand via [withr::deferred_run()] +#' +#' @param pkg_list (`vector`) named character vector or list with +#' paired name and type of dependency. It supports versions by using quotes on +#' the key +#' @param remotes (`vector`) string vector that contains remotes to add to +#' the DESCRIPTION file +#' @param need_verdepcheck (`vector`) string vector that contains +#' `Config/Need/verdepcheck` elements to add to the DESCRIPTION file +#' @param .local_envir (`envirnoment`) The environment to use for scoping. +#' +#' @importFrom desc desc +#' @importFrom withr defer +#' +#' @keywords internal +#' @examples +#' verdepcheck:::local_description( +#' list(dplyr = "Import"), +#' remotes = "tidyverse/dplyr", +#' need_verdepcheck = "dplyr=tidyverse/dplyr@v1.1.0" +#' ) +local_description <- function(pkg_list = c(pkgdepends = "Import"), + remotes = c(), + need_verdepcheck = c(), + .local_envir = parent.frame()) { + d_std <- desc::desc("!new") + + for (pkg in names(pkg_list)) { + d_std$set_dep(pkg, pkg_list[[pkg]]) + } + + for (remote in remotes) { + d_std$add_remotes(remote) + } + + if (!is.null(need_verdepcheck) && length(need_verdepcheck) > 0) { + d_std$set(.desc_field, paste(need_verdepcheck, collapse = ", ")) + } + + path <- tempfile(pattern = "DESCRIPTION-") + d_std$write(path) + withr::defer(unlink(path), envir = .local_envir) + + path +} + +#' Parse through vector of `remote_ref` and retrieve one of the keys of each +#' element +#' +#' Support function to reduce repetitive code +#' +#' @param x (`list`) list of lists where each internal list contain the same key +#' @param field (`character(1)`) key of field to retrieve +#' +#' @keywords internal +#' +#' @examples +#' verdepcheck:::map_key_character( +#' list(list(a = "1", b = "2"), list(a = "3", b = "4"), list(a = "5", b = "6")), +#' "a" +#' ) +map_key_character <- function(x, key) { + vapply(x, `[[`, character(1), key) +} diff --git a/README.md b/README.md index fe7b1bcb..4b7c75d1 100644 --- a/README.md +++ b/README.md @@ -25,11 +25,10 @@ Typical workflow includes the following: Supported strategies are: -- `max` - use the greatest (development) versions of dependencies -- `release` - use the latest release - using `@*release` reference of the `remotes` package -- `min` - use the minimal version of dependencies - -Please note that this is applicable only for direct dependencies of your package. Indirect dependencies are being installed using default installation process. +- `max` - use the greatest version of dependent packages. Please note that using development version is not guaranteed to be stable. +- `release` - use the released version of dependent packages. It will try use CRAN if possible else if GitHub release is available then use it else fail. +- `min_cohort` - find maximum date of directly dependent packages release dates and use that as PPM snapshot date for dependency resolve. +- `min_isolated` - for each direct dependency: find its release date and use it as PPM snapshot for resolving itself. Next, combine all the individual resolutions and resolve it altogether again. The main functions are: diff --git a/_pkgdown.yml b/_pkgdown.yml index 367852e9..98a8849f 100644 --- a/_pkgdown.yml +++ b/_pkgdown.yml @@ -25,7 +25,13 @@ reference: contents: - ends_with("_deps_installation_proposal") + - title: Installation Plan methods + desc: Methods on the installation plan objects + contents: + - ends_with("_ip") + - title: Find package reference desc: Determine package reference for appropriate version contents: - starts_with("get_ref_") + - get_release_date diff --git a/inst/training_202307/.gitignore b/inst/training_202307/.gitignore new file mode 100644 index 00000000..e0f49bcf --- /dev/null +++ b/inst/training_202307/.gitignore @@ -0,0 +1 @@ +*_files/* diff --git a/inst/training_202307/202307.pdf b/inst/training_202307/202307.pdf new file mode 100644 index 00000000..635118d7 Binary files /dev/null and b/inst/training_202307/202307.pdf differ diff --git a/inst/training_202307/202307.qmd b/inst/training_202307/202307.qmd new file mode 100644 index 00000000..ef2489b6 --- /dev/null +++ b/inst/training_202307/202307.qmd @@ -0,0 +1,307 @@ +--- +title: "`verdepcheck`" +author: "Pawel Rucki" +date: 2023-07-20 +date-format: long +format: revealjs +--- + +## Agenda + +1. Motivation +1. Solution +1. How to use? +1. How it works? +1. Discussion +1. Future work + +# Motivation + +```{.yaml code-line-numbers="7"} +Type: Package +Package: foo.package +Version: 1.2.3.9000 +Depends: + R (>= 3.6) +Imports: + dplyr, + (...) +``` + +The above specification indicates compatibility with **all** versions of `dplyr` which is very unlikely to be true. + +# Motivation + +```.default +Error: object ‘foo’ is not exported by 'namespace:bar' +``` + +
+ +```.default +`foo()` was deprecated in bar 1.0.0. +i Please use `baz()` instead. +``` + +
+ +The new versions of dependencies might introduce a breaking changes. + +# Motivation + +Many interdependent packages actively developed in parallel - when to increase dependency version requirement? + +Blindly increase all requirements might be incorrect as one package might be fully compatible with the older version of the other one. + +Keeping all unchanged is incorrect as there might be some _breaking_ changes. + +# Motivation + +R environments for clinical trials are usually not updated very frequently. + +Users might use `renv` for environment management and packages used might not get updates for some time. + +As a result, your package might be used with _older_ version of dependent packages. + +# Solution + +`verdepcheck` (**ver**sion of **dep**endencies **check**) - a tool for package developers to check your package against different set of dependencies. It is meant to be used primarily in CI. + +Available at: + +- [`insightsengineering/verdepcheck`](https://github.com/insightsengineering/verdepcheck) +- [`insightsengineering/r-verdepcheck-action`](https://github.com/insightsengineering/r-verdepcheck-action) + +:::{.callout-important} +Do not confuse `verdepcheck` with `revdepcheck` for reverse dependency checks. Those are totally different tools! +::: + +# + +What it is not about: + +- This does not perform matrix testing of _all_ versions of dependent packages. +- This does not perform search for optimal set of dependencies. It is more a _validator_ of dependencies specification. +- This only looks into package versions - in particular: it does not check R versions or system architectures. + +# + +Let's call a "different set of dependencies" a _strategy_. Currently there are four _strategies_ implemented: + +- **max** - use _development_ version of dependencies +- **release** - use _released_ version of dependencies (e.g. from CRAN) +- **min_cohort** and **min_isolated** - use _minimal_ version of dependencies as per `DESCRIPTION` file. + +This tool is looking into a **direct** dependencies only. + +# What are the benefits? + +* It forces you to specify minimal version of package dependencies to prevent incorrect package usage. +* Incorporate information about upcoming breaking changes. + +# What's needed? + +For **max** strategy the algorithm will look for dependent packages references in a new section `Config/Needs/verdepcheck` of the `DESCRIPTION` file. + +For **release** and **min** it's not needed (unless non-CRAN packages). + +:::{.callout-note} +This might indicate some duplication with the `Remotes` section but it cannot be used because of side effects, i.e. configuration of `verdepcheck` should have no impact package installation. +::: + + +# How to use? + +`_deps_check(path)` + +```{r} +#| eval: false +#| echo: true +#| code-line-numbers: false + +library(verdepcheck) + +max_deps_check("/path/to/package") +release_deps_check("/path/to/package") +min_deps_check("/path/to/package") +``` + +# How to use? + +Multiple steps: + +```{r} +#| eval: false +#| echo: true +#| code-line-numbers: false + +library(verdepcheck) + +x <- new_max_deps_installation_proposal("/path/to/package") + +# resolve dependencies +solve_ip(x) + +# download dependencies +download_ip(x) + +# install +install_ip(x) + +# run R CMD CHECK +check_ip(x) +``` + +## How to use the output? + +The main `_deps_check` function returns a list of two elements: + +- `"ip"` - installation proposal object from `pkgdepends` - see [docs](https://r-lib.github.io/pkgdepends/reference/pkg_installation_proposal.html#public-methods) +- `"check"` - returned value from `rcmdcheck::rcmdcheck()` - see [docs](https://rcmdcheck.r-lib.org/reference/rcmdcheck.html) + +```{r} +#| eval: false +#| echo: true +#| code-line-numbers: false + +x <- max_deps_check("/path/to/package") + +# show dependency resolution +x$ip$show_solution() +x$ip$draw() + +# create artifact +x$ip$create_lockfile("/path/to/pkg.lock") + +# print R CMD CHECK results +x$check$session_info +x$check$status +``` + +# How it works? + +This package is heavily based on [`pkgdepends`](https://r-lib.github.io/pkgdepends) for dependency detection and resolution and also [`rcmdcheck`](https://rcmdcheck.r-lib.org) for executing `"R CMD CHECK"`. + +It also uses other packages like `pkgcache`, `pkgbuild`, `desc`, `cli`, `gh`. + +
+ +Please also see: [r-lib/rcmdcheck/issues/195](https://github.com/r-lib/rcmdcheck/issues/195) and [r-lib/pkgdepends/issues/305](https://github.com/r-lib/pkgdepends/issues/305). Workarounds have been implemented. + +## The algorithm + +1. Read package dependencies (alongside minimal versions) from `DESCRIPTION` file using Depends, Imports, Suggests sections. +1. Derive package reference according to the strategy used. +1. Resolve dependency tree. +1. Download package sources. +1. Build and install into temporary directory. +1. Execute `"R CMD CHECK"` using directory from the previous step as a library path. + +## Package reference format + +We are using `pkgdepends` (which is then calling `pak`) for package installation. It uses specific format for [package references](https://pak.r-lib.org/reference/pak_package_sources.html). + +Few examples: + +``` + +dplyr +dplyr@1.0.0 +cran::dplyr + +foo/bar +foo/bar@12ab3456 +github::foo/bar + +bioc::S4Vectors + +/absolute/path/to/package/dir +local::. + +deps::. +``` + +## Find package references - the algorithm {.smaller} + +- **max** (`get_ref_max()`) + + Use reference provided in `Config/Needs/verdepcheck` as is. + +- **release** (`get_ref_release()`) + + For CRAN pkg: use standard reference (i.e. package name only). + + For GitHub pkg: find the latest release reference. + +- **min_cohort** (`get_ref_min()`, `get_release_date()`) + + For each directly dependent package: + + - Find the lowest possible version for which version condition is met. Take the earliest possible if no condition. Use CRAN archive and GH releases or tags if no releases. + + - Derive release (commit) date. + + Calculate maximal release date and use it as PPM snapshot when resolving dependency tree. + +- **min_isolated** (`get_ref_min()`, `get_release_date()`) + + Similarly to the above but resolve each of the directly dependent packages separately using its own release date as PPM snapshot date. Next, combine all resolutions and resolve once again (aggregating by max version). + +## Discussion on the **max** strategy + +By definition, it is using non-stable, development package version. + +The result is not _stable_ - it utilizes a dynamic type of reference which might result in different results each time you run this function. + +For packages that does not practice version increase on each commit, package version number is not a valid package state identifier. It might happen that multiple package states will be `v1.2.3` each. Use md5 sum or package source reference instead. + +This shouldn't be an obligatory type of check. It's failure should be incorporated as a notification about upcoming dependent breaking changes. + +## Discussion on the **release** strategy + +By definition, it is using a stable package version. + +Similarly to the previous one, the result is not _stable_ - it utilizes a dynamic type of reference which might result in different results each time you run this function. + +## Discussion on the **min** strategy + +It is quite complex topic how to set up an environment with packages that respects minimal version requirements and also are: + +- **installable** + +- **resolvable** - no conflicts when resolving joint dependency tree + +- **coherent** - each package fully compatible with its dependencies. In practice it means that a package cannot be released earlier than any of its dependency. + +This package does not aim to find an optimal minimal environment but it's rather a validator of existing dependency specification. + +## Discussion on the **min** strategy + +**min_cohort** fulfills all of above criteria but it doesn't necessarily mean it's the optimum (i.e. minimal possible). + +The coherence attribute is debatable as many user errors we have encountered stems from non coherent environment. Yet another question to what incoherency extend should we allow the target environment to be. **min_isolated** strategy propose one option by joining individual, self-coherent trees which after joining doesn't have to be coherent anymore. + +## Real-life observations: + +- Some very old packages are not compilable on modern machines and you have to increase versions to make it installable. + +- The package resolution is just the first step. The next one is to use them in `R CMD CHECK` and validate its compatibility. + +- In order to enter GHA debug lines, `verdepcheck` will continue on error entering `R CMD CHECK` step when e.g. dependency resolve failed. If you encounter `vignette builder 'knitr' not found` error - most likely you failed on resolve or install. + +## When to run it? + +As discussed earlier, most of the tests are not _stable_. Therefore, this test should be triggered periodically and not on code change (unless changes includes dependencies and its usage). Triggering it on change might give a false message that a failure stems from code changes which doesn't have to be true. + +# Future work: + +- Idea: add `branch` strategy with recursive resolve for possible replacement of `staged.dependencies`. +- Idea: add custom strategies. +- Idea: add recursive version lookups. +- Idea: new functionality (probably a separate package) that gets you optimal minimal dependency specification / environment. +- Enhance `r-verdepcheck-action` output readability. +- Remove workarounds implemented once referenced issues are closed. + +# THANK YOU! + +# diff --git a/man/check_if_on_cran.Rd b/man/check_if_on_cran.Rd index 78b1364a..4fe8146b 100644 --- a/man/check_if_on_cran.Rd +++ b/man/check_if_on_cran.Rd @@ -4,9 +4,17 @@ \alias{check_if_on_cran} \title{Check if package is available on CRAN.} \usage{ -check_if_on_cran(remote_ref) +check_if_on_cran(remote_ref, op = "", op_ver = "") } \description{ Check if package is available on CRAN. } +\examples{ +\dontshow{if (Sys.getenv("R_USER_CACHE_DIR", "") != "") (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} +verdepcheck:::check_if_on_cran(list(package = "dplyr")) +verdepcheck:::check_if_on_cran(list(package = "dplyr"), op = ">=", op_ver = "1.1.0") +verdepcheck:::check_if_on_cran(list(package = "dplyr"), op = ">=", op_ver = "9999.9.99") +verdepcheck:::check_if_on_cran(list(package = "dplyr"), op = "<", op_ver = "0.0.0") +\dontshow{\}) # examplesIf} +} \keyword{internal} diff --git a/man/check_ip.Rd b/man/check_ip.Rd index ef39b2eb..060115dd 100644 --- a/man/check_ip.Rd +++ b/man/check_ip.Rd @@ -7,7 +7,7 @@ check_ip(ip, path, build_args = character(), check_args = character(), ...) } \arguments{ -\item{ip}{(\code{pkg_installation_plan}) object to extract \code{libpath} from} +\item{ip}{(\code{pkg_installation_plan}) object to execute} \item{path}{(\code{string}) path to the package sources} @@ -28,4 +28,3 @@ Executes \code{\link[rcmdcheck:rcmdcheck]{rcmdcheck::rcmdcheck()}} on a local pa \seealso{ \code{\link[rcmdcheck:rcmdcheck]{rcmdcheck::rcmdcheck()}} for other configuration possibilities } -\keyword{internal} diff --git a/man/check_valid_version.Rd b/man/check_valid_version.Rd new file mode 100644 index 00000000..5cccaca8 --- /dev/null +++ b/man/check_valid_version.Rd @@ -0,0 +1,24 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/desc_utils.R +\name{check_valid_version} +\alias{check_valid_version} +\title{Check for package versions that comply with an operator and version} +\usage{ +check_valid_version(x, op, op_ver) +} +\arguments{ +\item{x}{\code{vector} of valid package versions.} + +\item{op}{\code{character(1)} relational operator (\code{>=}, \code{==}, ...)} + +\item{op_ver}{\code{character(1)} or \code{package_version(1)} with version to compare +with using a relational operator.} +} +\description{ +Check for package versions that comply with an operator and version +} +\examples{ +versions <- paste(1:10, 0, sep = ".") +verdepcheck:::check_valid_version(versions, ">=", "3.1") +} +\keyword{internal} diff --git a/man/cli_pb_init.Rd b/man/cli_pb_init.Rd new file mode 100644 index 00000000..62a5daa9 --- /dev/null +++ b/man/cli_pb_init.Rd @@ -0,0 +1,12 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/utils.R +\name{cli_pb_init} +\alias{cli_pb_init} +\title{Create \code{cli} progress bar for resolving versions.} +\usage{ +cli_pb_init(type, total, ...) +} +\description{ +Create \code{cli} progress bar for resolving versions. +} +\keyword{internal} diff --git a/man/deps_check.Rd b/man/deps_check.Rd index 307ac395..2d450648 100644 --- a/man/deps_check.Rd +++ b/man/deps_check.Rd @@ -3,12 +3,13 @@ \name{max_deps_check} \alias{max_deps_check} \alias{release_deps_check} -\alias{min_deps_check} +\alias{min_cohort_deps_check} +\alias{min_isolated_deps_check} \title{Execute \verb{R CMD CHECK} on a local package with all dependencies pre-installed using various strategies.} \usage{ max_deps_check( path, - config = list(dependencies = .desc_field, library = tempfile()), + config = list(), build_args = character(), check_args = character(), ... @@ -16,15 +17,23 @@ max_deps_check( release_deps_check( path, - config = list(dependencies = .desc_field, library = tempfile()), + config = list(), build_args = character(), check_args = character(), ... ) -min_deps_check( +min_cohort_deps_check( path, - config = list(dependencies = .desc_field, library = tempfile()), + config = list(), + build_args = character(), + check_args = character(), + ... +) + +min_isolated_deps_check( + path, + config = list(), build_args = character(), check_args = character(), ... @@ -34,9 +43,7 @@ min_deps_check( \item{path}{(\code{string}) path to the package sources} \item{config}{(\code{list}) configuration options. See \code{\link[pkgdepends:pkg_config]{pkgdepends::pkg_config}} for details. -If it does not include \code{library} then temporary directory is used which simulates clean environment -without using any pre-installed packages. -If it does not include \code{dependencies} then \code{TRUE} value is used which means all hard dependencies plus \code{Suggests}.} +\code{"dependencies"} and \code{"library"} elements are overwritten by package level defaults.} \item{build_args}{(\code{string}) value passed as \code{build_args} argument into \code{\link[rcmdcheck:rcmdcheck]{rcmdcheck::rcmdcheck()}}} @@ -58,17 +65,50 @@ Execute \verb{R CMD CHECK} on a local package with all dependencies pre-installe Currently implemented strategies: \itemize{ -\item \code{max} - use the greatest version of dependent packages -\item \code{release} - use released version of dependent packages - use CRAN if possible else if GitHub release -is available then use it else fail. -\item \code{min} - use the lowest version of dependent packages incorporating minimal version specification in -\code{"Imports"} and \code{"Suggests"}. If no version is specified then the minimal available -version is assumed. See \link{get_ref_min} for details how the minimal version is determined. +\item \code{max} - use the greatest version of dependent packages. Please note that using development version is not +guaranteed to be stable. +See \link{get_ref_max} for details. +\item \code{release} - use the released version of dependent packages. It will try use CRAN if possible else if +GitHub release is available then use it else fail. +See \link{get_ref_release} for details. +\item \code{min_cohort} - find maximum date of directly dependent packages release dates and use that as PPM snapshot date +for dependency resolve. +\item \code{min_isolated} - for each direct dependency: find its release date and use it as PPM snapshot for resolving itself. +Next, combine all the individual resolutions and resolve it altogether again. } -Any modification is done for \emph{direct} dependencies. Indirect ones are installed as usual. +Both "min" strategies relies on PPM snapshot in order to limit the versions of indirect dependencies so that +dependency resolution ends with a package released no earlier than any of its dependency. +However, that's not always true for \code{min_isolated} strategy - done on purpose. + +Please note that only \code{min_cohort} and \code{min_isolated} strategies are "stable". The rest are basing on dynamic +references therefore it results might be different without changes in tested package. +The most straightforward example is \code{max} strategy in which the environment will be different after any push of +any of the dependencies. } +\examples{ +\dontshow{if (Sys.getenv("R_USER_CACHE_DIR", "") != "") (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} +x <- max_deps_check(".") +x$ip +x$check +\dontshow{\}) # examplesIf} +\dontshow{if (Sys.getenv("R_USER_CACHE_DIR", "") != "") (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} +x <- release_deps_check(".") +x$ip +x$check +\dontshow{\}) # examplesIf} +\dontshow{if (Sys.getenv("R_USER_CACHE_DIR", "") != "") (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} +x <- min_cohort_deps_check(".") +x$ip +x$check +\dontshow{\}) # examplesIf} +\dontshow{if (Sys.getenv("R_USER_CACHE_DIR", "") != "") (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} +x <- min_isolated_deps_check(".") +x$ip +x$check +\dontshow{\}) # examplesIf} +} \seealso{ \link{deps_installation_proposal} } diff --git a/man/deps_installation_proposal.Rd b/man/deps_installation_proposal.Rd index 7532c199..b468b30f 100644 --- a/man/deps_installation_proposal.Rd +++ b/man/deps_installation_proposal.Rd @@ -4,31 +4,23 @@ \alias{new_max_deps_installation_proposal} \alias{deps_installation_proposal} \alias{new_release_deps_installation_proposal} -\alias{new_min_deps_installation_proposal} +\alias{new_min_cohort_deps_installation_proposal} +\alias{new_min_isolated_deps_installation_proposal} \title{Create installation proposal using various dependency strategies} \usage{ -new_max_deps_installation_proposal( - path, - config = list(dependencies = .desc_field, library = tempfile()) -) +new_max_deps_installation_proposal(path, config = list()) -new_release_deps_installation_proposal( - path, - config = list(dependencies = .desc_field, library = tempfile()) -) +new_release_deps_installation_proposal(path, config = list()) -new_min_deps_installation_proposal( - path, - config = list(dependencies = .desc_field, library = tempfile()) -) +new_min_cohort_deps_installation_proposal(path, config = list()) + +new_min_isolated_deps_installation_proposal(path, config = list()) } \arguments{ \item{path}{(\code{string}) path to the package sources} \item{config}{(\code{list}) configuration options. See \code{\link[pkgdepends:pkg_config]{pkgdepends::pkg_config}} for details. -If it does not include \code{library} then temporary directory is used which simulates clean environment -without using any pre-installed packages. -If it does not include \code{dependencies} then \code{TRUE} value is used which means all hard dependencies plus \code{Suggests}.} +\code{"dependencies"} and \code{"library"} elements are overwritten by package level defaults.} } \value{ \code{pkg_installation_plan} object @@ -42,25 +34,37 @@ as described below. Currently implemented strategies: \itemize{ -\item \code{max} - use the greatest version of dependent packages -\item \code{release} - use released version of dependent packages - use CRAN if possible else if GitHub release -is available then use it else fail. -\item \code{min} - use the lowest version of dependent packages incorporating minimal version specification in -\code{"Imports"} and \code{"Suggests"}. If no version is specified then the minimal available -version is assumed. See \link{get_ref_min} for details how the minimal version is determined. +\item \code{max} - use the greatest version of dependent packages. Please note that using development version is not +guaranteed to be stable. +See \link{get_ref_max} for details. +\item \code{release} - use the released version of dependent packages. It will try use CRAN if possible else if +GitHub release is available then use it else fail. +See \link{get_ref_release} for details. +\item \code{min_cohort} - find maximum date of directly dependent packages release dates and use that as PPM snapshot date +for dependency resolve. +\item \code{min_isolated} - for each direct dependency: find its release date and use it as PPM snapshot for resolving itself. +Next, combine all the individual resolutions and resolve it altogether again. } -Any modification is done for \emph{direct} dependencies. Indirect ones are installed as usual. +Both "min" strategies relies on PPM snapshot in order to limit the versions of indirect dependencies so that +dependency resolution ends with a package released no earlier than any of its dependency. +However, that's not always true for \code{min_isolated} strategy - done on purpose. + +Please note that only \code{min_cohort} and \code{min_isolated} strategies are "stable". The rest are basing on dynamic +references therefore it results might be different without changes in tested package. +The most straightforward example is \code{max} strategy in which the environment will be different after any push of +any of the dependencies. } \section{configuration}{ \code{verdepcheck} will look into \code{"Config/Needs/verdepcheck"} field of the \code{DESCRIPTION} file for dependent packages -references. See \code{\link[pkgdepends:pkg_refs]{pkgdepends::pkg_refs}} for details. -Some functions are supported only for package references from GitHub. -If you specify additional details (i.e. tag, commit, PR or \verb{@*release}) then it wouldn't be changed. Therefore, -in order to make full use of various strategies, it is recommended to specify general reference in form of +references. See \code{\link[pkgdepends:pkg_refs]{pkgdepends::pkg_refs}} for details and this package \code{DESCRIPTION} file for an example. +Please note that some features are enabled only for package references from GitHub. +If you specify additional details (i.e. tag, commit, PR or \verb{@*release}) in the reference then it wouldn't be changed. +Therefore, in order to make full use of various strategies, it is recommended to specify general reference in form of \verb{[=][github::]/[/]} - i.e. without \verb{[]} part. +Please see also \code{\link[pkgdepends:pkg_config]{pkgdepends::pkg_config}} and \code{\link[pak:pak-config]{pak::pak-config}} for other configuration possibilities. } \examples{ @@ -75,8 +79,13 @@ x$solve() x$get_solution() \dontshow{\}) # examplesIf} \dontshow{if (Sys.getenv("R_USER_CACHE_DIR", "") != "") (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} -x <- new_min_deps_installation_proposal(".") -x$solve() +x <- new_min_cohort_deps_installation_proposal(".") +solve_ip(x) +x$get_solution() +\dontshow{\}) # examplesIf} +\dontshow{if (Sys.getenv("R_USER_CACHE_DIR", "") != "") (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} +x <- new_min_isolated_deps_installation_proposal(".") +solve_ip(x) x$get_solution() \dontshow{\}) # examplesIf} } diff --git a/man/desc_cond_set_refs.Rd b/man/desc_cond_set_refs.Rd index 961619f7..cdaea6a0 100644 --- a/man/desc_cond_set_refs.Rd +++ b/man/desc_cond_set_refs.Rd @@ -1,5 +1,5 @@ % Generated by roxygen2: do not edit by hand -% Please edit documentation in R/deps_installation_proposal.R +% Please edit documentation in R/desc_utils.R \name{desc_cond_set_refs} \alias{desc_cond_set_refs} \title{Set \code{"Config/Needs/verdepcheck"} section into the \code{desc} object if not empty else clear this section.} diff --git a/man/desc_remotes_cleanup.Rd b/man/desc_remotes_cleanup.Rd new file mode 100644 index 00000000..d575c6ed --- /dev/null +++ b/man/desc_remotes_cleanup.Rd @@ -0,0 +1,40 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/desc_utils.R +\name{desc_remotes_cleanup} +\alias{desc_remotes_cleanup} +\title{Replace Remotes in the \code{desc} that have been resolved to a GitHub tag or are +in CRAN.} +\usage{ +desc_remotes_cleanup(d) +} +\arguments{ +\item{d}{(\code{desc}) DESCRIPTION object} +} +\description{ +Replaces any existing Remotes entry with the resolved GitHub tag from +\code{Config/Needs/verdepcheck}. +} +\examples{ +# Example that should replace dplyr & tibble on Remotes but not pkgdepends + +d <- desc::desc( + file = verdepcheck:::local_description( + list( + dplyr = "Import", + tibble = "Import", + pkgdepends = "Import" + ), + remotes = c( + "tidyverse/dplyr@*release", + "tidyverse/tibble@*release", + "r-lib/pkgdepends@*release" + ), + need_verdepcheck = c( + "dplyr", + "tibble=tidyverse/tibble@v3.2.1" + ) + ) +) +verdepcheck:::desc_remotes_cleanup(d) +} +\keyword{internal} diff --git a/man/desc_to_ip.Rd b/man/desc_to_ip.Rd index abdab7fd..3bc72b2a 100644 --- a/man/desc_to_ip.Rd +++ b/man/desc_to_ip.Rd @@ -1,5 +1,5 @@ % Generated by roxygen2: do not edit by hand -% Please edit documentation in R/deps_installation_proposal.R +% Please edit documentation in R/desc_utils.R \name{desc_to_ip} \alias{desc_to_ip} \title{Create \code{installation_plan} object from \code{desc} object} diff --git a/man/solve_ignore_remotes_release.Rd b/man/download_ip.Rd similarity index 62% rename from man/solve_ignore_remotes_release.Rd rename to man/download_ip.Rd index 511289c2..c66056a9 100644 --- a/man/solve_ignore_remotes_release.Rd +++ b/man/download_ip.Rd @@ -1,12 +1,17 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/check.R -\name{solve_ignore_remotes_release} -\alias{solve_ignore_remotes_release} +\name{download_ip} +\alias{download_ip} \title{Solve installation plan ignoring entries with "@*release" remote refs for detected conflicts.} \usage{ -solve_ignore_remotes_release(ip) +download_ip(ip) +} +\arguments{ +\item{ip}{(\code{pkg_installation_plan}) object to execute} +} +\value{ +\code{pkg_installation_plan} object invisibly } \description{ Solve installation plan ignoring entries with "@*release" remote refs for detected conflicts. } -\keyword{internal} diff --git a/man/deps_check_internal.Rd b/man/execute_ip.Rd similarity index 82% rename from man/deps_check_internal.Rd rename to man/execute_ip.Rd index bc3c23a3..8ba67d50 100644 --- a/man/deps_check_internal.Rd +++ b/man/execute_ip.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/check.R -\name{deps_check_internal} -\alias{deps_check_internal} -\title{Executes installation plan and \code{\link[rcmdcheck:rcmdcheck]{rcmdcheck::rcmdcheck()}}} +\name{execute_ip} +\alias{execute_ip} +\title{Executes installation plan and \code{\link[rcmdcheck:rcmdcheck]{rcmdcheck::rcmdcheck()}} in "try mode" to always return.} \usage{ -deps_check_internal(ip, path, build_args, check_args, ...) +execute_ip(ip, path, build_args, check_args, ...) } \arguments{ \item{ip}{(\code{pkg_installation_plan}) object to execute} @@ -25,6 +25,5 @@ a named \code{list} with two elements: } } \description{ -Executes installation plan and \code{\link[rcmdcheck:rcmdcheck]{rcmdcheck::rcmdcheck()}} +Executes installation plan and \code{\link[rcmdcheck:rcmdcheck]{rcmdcheck::rcmdcheck()}} in "try mode" to always return. } -\keyword{internal} diff --git a/man/filter_valid_version.Rd b/man/filter_valid_version.Rd new file mode 100644 index 00000000..a036904a --- /dev/null +++ b/man/filter_valid_version.Rd @@ -0,0 +1,24 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/desc_utils.R +\name{filter_valid_version} +\alias{filter_valid_version} +\title{Filter for package versions that comply with an operator and version} +\usage{ +filter_valid_version(x, op, op_ver) +} +\arguments{ +\item{x}{\code{vector} of valid package versions.} + +\item{op}{\code{character(1)} relational operator (\code{>=}, \code{==}, ...)} + +\item{op_ver}{\code{character(1)} or \code{package_version(1)} with version to compare +with using a relational operator.} +} +\description{ +Filter for package versions that comply with an operator and version +} +\examples{ +versions <- paste(1:10, 0, sep = ".") +verdepcheck:::filter_valid_version(versions, ">=", "3.1") +} +\keyword{internal} diff --git a/man/get_cran_data.Rd b/man/get_cran_data.Rd new file mode 100644 index 00000000..34e6f37d --- /dev/null +++ b/man/get_cran_data.Rd @@ -0,0 +1,18 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_ref.R +\name{get_cran_data} +\alias{get_cran_data} +\title{Get CRAN/Bioconductor metadata information on packages} +\usage{ +get_cran_data(package) +} +\description{ +Get CRAN/Bioconductor metadata information on packages +} +\examples{ +\dontshow{if (Sys.getenv("R_USER_CACHE_DIR", "") != "") (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} +verdepcheck:::get_cran_data("dplyr") +verdepcheck:::get_cran_data("SummarizedExperiment") +\dontshow{\}) # examplesIf} +} +\keyword{internal} diff --git a/man/get_desc_field_pkgs.Rd b/man/get_desc_field_pkgs.Rd new file mode 100644 index 00000000..774ebf11 --- /dev/null +++ b/man/get_desc_field_pkgs.Rd @@ -0,0 +1,18 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/desc_utils.R +\name{get_desc_field_pkgs} +\alias{get_desc_field_pkgs} +\title{Get the packages from the custom configuration field} +\usage{ +get_desc_field_pkgs(d) +} +\arguments{ +\item{d}{(\code{desc}) DESCRIPTION object from \link[desc:description]{desc::description}} +} +\value{ +character string +} +\description{ +Get the packages from the custom configuration field +} +\keyword{internal} diff --git a/man/get_desc_from_gh.Rd b/man/get_desc_from_gh.Rd new file mode 100644 index 00000000..2bf134a7 --- /dev/null +++ b/man/get_desc_from_gh.Rd @@ -0,0 +1,16 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_ref.R +\name{get_desc_from_gh} +\alias{get_desc_from_gh} +\title{Get DESCRIPTION from GitHub Repository} +\usage{ +get_desc_from_gh(org, repo, ref = "") +} +\description{ +Get DESCRIPTION from GitHub Repository +} +\examples{ +verdepcheck:::get_desc_from_gh("tidyverse", "dplyr") +verdepcheck:::get_desc_from_gh("tidyverse", "dplyr", "v1.1.0") +} +\keyword{internal} diff --git a/man/get_ref_min.Rd b/man/get_ref_min.Rd index 2a9d7fdb..e81310bb 100644 --- a/man/get_ref_min.Rd +++ b/man/get_ref_min.Rd @@ -37,7 +37,7 @@ to obtain historical data. } \itemize{ -\item for GitHub type of remote - this would use \code{\link[gh:gh_gql]{gh::gh_gql()}} to get list of all tags +\item for GitHub type of remote - this would use \code{\link[gh:gh_gql]{gh::gh_gql()}} to get list of all releases or tags and then \code{\link[gh:gh]{gh::gh()}} to download \code{DESCRIPTION} file and then read package version. } } @@ -56,4 +56,3 @@ verdepcheck:::get_ref_min(pkgdepends::parse_pkg_ref("cran/dplyr")) \seealso{ \code{\link[=get_ref_min_incl_cran]{get_ref_min_incl_cran()}} } -\keyword{internal} diff --git a/man/get_ref_min_incl_cran.Rd b/man/get_ref_min_incl_cran.Rd index 32dddaa9..d0d12517 100644 --- a/man/get_ref_min_incl_cran.Rd +++ b/man/get_ref_min_incl_cran.Rd @@ -37,4 +37,3 @@ verdepcheck:::get_ref_min_incl_cran(pkgdepends::parse_pkg_ref("cran/dplyr")) \seealso{ \code{\link[=get_ref_min_incl_cran]{get_ref_min_incl_cran()}} } -\keyword{internal} diff --git a/man/get_refs_from_desc.Rd b/man/get_refs_from_desc.Rd index bc60cd87..52e55fe4 100644 --- a/man/get_refs_from_desc.Rd +++ b/man/get_refs_from_desc.Rd @@ -1,15 +1,12 @@ % Generated by roxygen2: do not edit by hand -% Please edit documentation in R/deps_installation_proposal.R +% Please edit documentation in R/desc_utils.R \name{get_refs_from_desc} \alias{get_refs_from_desc} -\title{Read DESCRIPTION file and return list of references. -Returned list is an union between references specified in \code{"Config/Needs/verdepcheck"} field and -standard references for all other not covered dependencies.} +\title{Read DESCRIPTION file and return list of references.} \usage{ get_refs_from_desc(d) } \description{ -Read DESCRIPTION file and return list of references. Returned list is an union between references specified in \code{"Config/Needs/verdepcheck"} field and standard references for all other not covered dependencies. } @@ -19,7 +16,7 @@ d <- desc::desc("!new") d$set_dep("foo", "Import") d$set_dep("bar", "Suggest") d$set_list("Config/Needs/verdepcheck", "foo/bar") -d$set_list("Config/Needs/verdepcheck", "foo/baz") # not in pacakge deps - will be skipped +d$set_list("Config/Needs/verdepcheck", "foo/baz") # not in package deps - will be skipped get_refs_from_desc(d) \dontshow{\}) # examplesIf} } diff --git a/man/get_release_date.Rd b/man/get_release_date.Rd new file mode 100644 index 00000000..d36fe28f --- /dev/null +++ b/man/get_release_date.Rd @@ -0,0 +1,37 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_ref.R +\name{get_release_date} +\alias{get_release_date} +\alias{get_release_date.remote_ref_github} +\alias{get_release_date.remote_ref_cran} +\title{Get release date.} +\usage{ +get_release_date(remote_ref) + +\method{get_release_date}{remote_ref_github}(remote_ref) + +\method{get_release_date}{remote_ref_cran}(remote_ref) +} +\arguments{ +\item{remote_ref}{(\code{remote_ref}) object created with \code{\link[pkgdepends:parse_pkg_refs]{pkgdepends::parse_pkg_ref()}}} +} +\value{ +(\code{remote_ref}) object with the package reference +} +\description{ +Get release date. + +Get release date from GitHub references + +Get release date from GitHub references +} +\examples{ +\dontshow{if (gh::gh_token() != "") (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} +remote_ref <- pkgdepends::parse_pkg_ref("tidyverse/dplyr@v1.1.0") +get_release_date(remote_ref) +\dontshow{\}) # examplesIf} +\dontshow{if (Sys.getenv("R_USER_CACHE_DIR", "") != "") (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} +remote_ref <- pkgdepends::parse_pkg_ref("dplyr@1.1.0") +get_release_date.remote_ref_cran(remote_ref) +\dontshow{\}) # examplesIf} +} diff --git a/man/install_ip.Rd b/man/install_ip.Rd index 31254c9d..89cf447f 100644 --- a/man/install_ip.Rd +++ b/man/install_ip.Rd @@ -21,4 +21,3 @@ This function would executes the following: \item installs all package dependencies } } -\keyword{internal} diff --git a/man/local_description.Rd b/man/local_description.Rd new file mode 100644 index 00000000..29c914d7 --- /dev/null +++ b/man/local_description.Rd @@ -0,0 +1,38 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/utils.R +\name{local_description} +\alias{local_description} +\title{Temporarily create a valid DESCRIPTION file to a location that will be deleted} +\usage{ +local_description( + pkg_list = c(pkgdepends = "Import"), + remotes = c(), + need_verdepcheck = c(), + .local_envir = parent.frame() +) +} +\arguments{ +\item{pkg_list}{(\code{vector}) named character vector or list with +paired name and type of dependency. It supports versions by using quotes on +the key} + +\item{remotes}{(\code{vector}) string vector that contains remotes to add to +the DESCRIPTION file} + +\item{need_verdepcheck}{(\code{vector}) string vector that contains +\code{Config/Need/verdepcheck} elements to add to the DESCRIPTION file} + +\item{.local_envir}{(\code{envirnoment}) The environment to use for scoping.} +} +\description{ +The file is deleted after the parent environment where this function was called +has exited, when the R session ends or on demand via \code{\link[withr:defer]{withr::deferred_run()}} +} +\examples{ +verdepcheck:::local_description( + list(dplyr = "Import"), + remotes = "tidyverse/dplyr", + need_verdepcheck = "dplyr=tidyverse/dplyr@v1.1.0" +) +} +\keyword{internal} diff --git a/man/map_key_character.Rd b/man/map_key_character.Rd new file mode 100644 index 00000000..05e6c552 --- /dev/null +++ b/man/map_key_character.Rd @@ -0,0 +1,24 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/utils.R +\name{map_key_character} +\alias{map_key_character} +\title{Parse through vector of \code{remote_ref} and retrieve one of the keys of each +element} +\usage{ +map_key_character(x, key) +} +\arguments{ +\item{x}{(\code{list}) list of lists where each internal list contain the same key} + +\item{field}{(\code{character(1)}) key of field to retrieve} +} +\description{ +Support function to reduce repetitive code +} +\examples{ +verdepcheck:::map_key_character( + list(list(a = "1", b = "2"), list(a = "3", b = "4"), list(a = "5", b = "6")), + "a" +) +} +\keyword{internal} diff --git a/man/resolve_ignoring_release_remote.Rd b/man/resolve_ignoring_release_remote.Rd new file mode 100644 index 00000000..d9eba900 --- /dev/null +++ b/man/resolve_ignoring_release_remote.Rd @@ -0,0 +1,20 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/solve.R +\name{resolve_ignoring_release_remote} +\alias{resolve_ignoring_release_remote} +\title{If solution errors finishes with "dependency conflict" error then +re-try again ignoring "@*release" remote refs for detected conflicts.} +\usage{ +resolve_ignoring_release_remote(ip) +} +\arguments{ +\item{ip}{(\code{pkg_installation_plan}) object to execute} +} +\value{ +\code{pkg_installation_plan} object invisibly +} +\description{ +If solution errors finishes with "dependency conflict" error then +re-try again ignoring "@*release" remote refs for detected conflicts. +} +\keyword{internal} diff --git a/man/resolve_ppm_snapshot.Rd b/man/resolve_ppm_snapshot.Rd new file mode 100644 index 00000000..11b8613c --- /dev/null +++ b/man/resolve_ppm_snapshot.Rd @@ -0,0 +1,12 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/utils.R +\name{resolve_ppm_snapshot} +\alias{resolve_ppm_snapshot} +\title{Resolve the dependencies of package based on the release date + 1} +\usage{ +resolve_ppm_snapshot(pkg_ref_str, operator, pkg_version) +} +\description{ +Resolve the dependencies of package based on the release date + 1 +} +\keyword{internal} diff --git a/man/solve_ip.Rd b/man/solve_ip.Rd index a6fdce45..5796790c 100644 --- a/man/solve_ip.Rd +++ b/man/solve_ip.Rd @@ -1,12 +1,17 @@ % Generated by roxygen2: do not edit by hand -% Please edit documentation in R/check.R +% Please edit documentation in R/solve.R \name{solve_ip} \alias{solve_ip} -\title{Try to solve using standard method. If error - use \link{solve_ignore_remotes_release}.} +\title{Try to solve using standard method. If error - use \link{resolve_ignoring_release_remote}.} \usage{ solve_ip(ip) } +\arguments{ +\item{ip}{(\code{pkg_installation_plan}) object to execute} +} +\value{ +\code{pkg_installation_plan} object invisibly +} \description{ -Try to solve using standard method. If error - use \link{solve_ignore_remotes_release}. +Try to solve using standard method. If error - use \link{resolve_ignoring_release_remote}. } -\keyword{internal} diff --git a/man/solve_ip.min_isolated_deps_installation_proposal.Rd b/man/solve_ip.min_isolated_deps_installation_proposal.Rd new file mode 100644 index 00000000..ee71f151 --- /dev/null +++ b/man/solve_ip.min_isolated_deps_installation_proposal.Rd @@ -0,0 +1,13 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/solve.R +\name{solve_ip.min_isolated_deps_installation_proposal} +\alias{solve_ip.min_isolated_deps_installation_proposal} +\title{Try to solve using min_isolated method. If Error - use \link{resolve_ignoring_release_remote}} +\usage{ +\method{solve_ip}{min_isolated_deps_installation_proposal}(ip) +} +\description{ +For each direct dependency, resolve that package using PPM snapshot as of release date + 1. +Finally, combine resolutions and run solve. +} +\keyword{internal} diff --git a/man/solve_ip_ignore_remotes_release.Rd b/man/solve_ip_ignore_remotes_release.Rd new file mode 100644 index 00000000..2816963d --- /dev/null +++ b/man/solve_ip_ignore_remotes_release.Rd @@ -0,0 +1,18 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/solve.R +\name{solve_ip_ignore_remotes_release} +\alias{solve_ip_ignore_remotes_release} +\title{Solve installation plan ignoring entries with "@*release" remote refs for detected conflicts.} +\usage{ +solve_ip_ignore_remotes_release(ip) +} +\arguments{ +\item{ip}{(\code{pkg_installation_plan}) object to execute} +} +\value{ +\code{pkg_installation_plan} object invisibly +} +\description{ +Solve installation plan ignoring entries with "@*release" remote refs for detected conflicts. +} +\keyword{internal} diff --git a/man/version_from_desc.Rd b/man/version_from_desc.Rd new file mode 100644 index 00000000..76f9f27b --- /dev/null +++ b/man/version_from_desc.Rd @@ -0,0 +1,26 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/desc_utils.R +\name{version_from_desc} +\alias{version_from_desc} +\title{Get package version from description} +\usage{ +version_from_desc(d, pkg_name) +} +\arguments{ +\item{d}{(\code{desc}) DESCRIPTION object from \link[desc:description]{desc::description}} + +\item{pkg_name}{(\code{character}) Package name} +} +\description{ +Get package version from description +} +\examples{ +d <- desc::desc(cmd = "!new") + +d$set_dep("magrittr", type = "Imports", version = "*") +verdepcheck:::version_from_desc(d, "magrittr") + +d$set_dep("magrittr", type = "Imports", version = ">= 1.5") +verdepcheck:::version_from_desc(d, "magrittr") +} +\keyword{internal} diff --git a/tests/testthat/helper.R b/tests/testthat/helper.R index 0f013d24..30d14488 100644 --- a/tests/testthat/helper.R +++ b/tests/testthat/helper.R @@ -1,6 +1,15 @@ +expect_latest_ppm <- function(uri) { + expect_no_match(uri, "/[0-9]{4}-[0-9]{2}-[0-9]{2}$") +} + skip_if_offline <- function() { + # Using this conditional clause instead of `skip_if_not_installed` to provide + # a better message + if (!requireNamespace("pingr", quietly = TRUE)) { + skip("`pingr` cannot be loaded, can't test internet connection.") + } res <- tryCatch( - pingr::ping("https://api.github.com", count = 1L), + pingr::ping("api.github.com", count = 1L), error = function(e) NA ) @@ -15,3 +24,95 @@ skip_if_empty_gh_token <- function() { if (isFALSE(res)) skip("Not run with empty GH token") } + +#' Aggregator of tests to generally perform on proposals +#' +#' @param x (`pkg_installation_proposal` object) Valid proposal created by one +#' of the available methods. +#' @param pkg_name (`string`) Name of package that is being tested for version. +#' @param platform (optional `string`) Name of the platform, should be 'source' in +#' most cases. +#' @param pkg_ver_target (optional `string`) version that is expected to be in the +#' proposal. A `NULL` value indicates to use the latest version on CRAN or a +#' GitHub repository reference +#' @param pkg_gh_str (optional `string`) GitHub repository reference to retrieve +#' the version that is on the main branch. When both this parameter and +#' `pkg_ver_target` are `NULL`, then it will compare the version in the proposal +#' with the latest version in CRAN. +#' @param solve_ip_flag (optional `logical`) indicates if the installation +#' proposal should be solved +#' +#' @keywords internal +test_proposal_common <- function(x, + pkg_name = "pkgdepends", + platform = "source", + pkg_ver_target = NULL, + pkg_gh_str = NULL, + solve_ip_flag = TRUE) { + expect_s3_class(x, "pkg_installation_proposal") + + # Allows to re-use x accross packages without having to solve it again + if (solve_ip_flag) solve_ip(x) + + expect_equal(x$get_solution()$status, "OK") + + x_solution <- x$get_resolution() + + x_solution_pkg <- subset( + x_solution, + package == pkg_name & platform == "source" + ) + + expect_true(nrow(x_solution_pkg) >= 1) + expect_true(any(x_solution_pkg$status == "OK")) + + pkg_ver_act <- max(package_version(x_solution_pkg$version)) + + # If there is no specific version to check, then compare against latest from + # CRAN + if (is.null(pkg_gh_str) && is.null(pkg_ver_target)) { + pkg_ver_target <- package_version( + available.packages( + repos = pkgcache::default_cran_mirror(), + filters = list( + add = TRUE, function(x) x[x[, "Package"] == pkg_name, ] + ) + )[["Version"]] + ) + } else if (!is.null(pkg_gh_str) && is.null(pkg_ver_target)) { + gh_str_split <- strsplit(pkg_gh_str, "/")[[1]] + pkg_ver_target <- package_version(as.character( + get_desc_from_gh(gh_str_split[1], gsub("@.*$", "", gh_str_split[2]))$get_version() + )) + } + + expect_identical(pkg_ver_act, package_version(pkg_ver_target)) + + invisible(x) +} + +#' @inheritParams test_proposal_common +#' @keywords internal +test_proposal_common_bioc <- function(x, + pkg_name = "pkgdepends", + platform = "source", + solve_ip_flag = TRUE) { + expect_s3_class(x, "pkg_installation_proposal") + + # Allows to re-use x accross packages without having to solve it again + if (solve_ip_flag) solve_ip(x) + + expect_equal(x$get_solution()$status, "OK") + + x_solution <- x$get_resolution() + + x_solution_pkg <- subset( + x_solution, + package == pkg_name & platform == "source" + ) + + expect_true(nrow(x_solution_pkg) >= 1) + expect_true(any(x_solution_pkg$status == "OK")) + + invisible(x) +} diff --git a/tests/testthat/test-deps_installation_proposal.R b/tests/testthat/test-deps_installation_proposal.R index 2340fdad..aa21d985 100644 --- a/tests/testthat/test-deps_installation_proposal.R +++ b/tests/testthat/test-deps_installation_proposal.R @@ -1,225 +1,342 @@ -d_std <- desc::desc("!new") -d_std$set_dep("pkgdepends", "Import") -ref_std_path <- tempfile() -d_std$write(ref_std_path) -on.exit(unlink(ref_std_path), add = TRUE, after = FALSE) - test_that("new_max_deps_installation_proposal correctly handles standard reference", { skip_if_offline() skip_if_empty_gh_token() - x <- new_max_deps_installation_proposal(ref_std_path) - on.exit(unlink(x$get_config()$library), add = TRUE, after = FALSE) + d_std_path <- local_description(list(pkgdepends = "Import")) + x <- new_max_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) - expect_s3_class(x, "pkg_installation_proposal") + test_proposal_common(x, "pkgdepends", "source", NULL, NULL) +}) - x$solve() +test_that("new_release_deps_installation_proposal correctly handles standard reference", { + skip_if_offline() + skip_if_empty_gh_token() - expect_equal(x$get_solution()$status, "OK") + d_std_path <- local_description(list(pkgdepends = "Import")) + x <- new_release_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) - x_solution <- x$get_resolution() + test_proposal_common(x, "pkgdepends", "source", NULL, NULL) +}) - x_solution_pkg <- subset(x_solution, package == "pkgdepends" & platform == "source") - expect_equal(nrow(x_solution_pkg), 1) +test_that("new_min_isolated_installation_proposal correctly handles standard reference", { + skip_if_offline() + skip_if_empty_gh_token() - pkg_ver_act <- package_version(x_solution_pkg$version) - pkg_ver_target <- package_version( - available.packages( - repos = pkgcache::default_cran_mirror(), - filters = list(add = TRUE, function(x) x[x[, "Package"] == "pkgdepends", ]) - )[["Version"]] - ) - expect_identical(pkg_ver_act, pkg_ver_target) + d_std_path <- local_description(list(pkgdepends = "Import")) + x <- new_min_isolated_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) + + test_proposal_common(x, "pkgdepends", "source", "0.1.0", NULL) }) -test_that("new_release_deps_installation_proposal correctly handles standard reference", { +test_that("new_min_cohort_deps_installation_proposal correctly handles standard reference", { skip_if_offline() skip_if_empty_gh_token() - x <- new_release_deps_installation_proposal(ref_std_path) - on.exit(unlink(x$get_config()$library), add = TRUE, after = FALSE) + d_std_path <- local_description(list(pkgdepends = "Import")) + x <- new_min_cohort_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) - expect_s3_class(x, "pkg_installation_proposal") + test_proposal_common(x, "pkgdepends", "source", "0.1.0", NULL) +}) - x$solve() +# ################################################################# +# +# _ _ _ _ +# (_) | | | | | +# __ ___| |_| |__ _ __ ___ _ __ ___ ___ | |_ ___ ___ +# \ \ /\ / / | __| '_ \ | '__/ _ \ '_ ` _ \ / _ \| __/ _ \/ __| +# \ V V /| | |_| | | | | | | __/ | | | | | (_) | || __/\__ \ +# \_/\_/ |_|\__|_| |_| |_| \___|_| |_| |_|\___/ \__\___||___/ +# +# +# +# with remotes +# ################################################################ - expect_equal(x$get_solution()$status, "OK") +test_that("new_max_deps_installation_proposal correctly handles / reference", { + skip_if_offline() + skip_if_empty_gh_token() - x_solution <- x$get_resolution() + remote_str <- "r-lib/pkgdepends" + desc_str <- "r-lib/pkgdepends" + d_std_path <- local_description( + list(pkgdepends = "Import"), + remotes = c(remote_str), need_verdepcheck = desc_str + ) + x <- new_max_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) - x_solution_pkg <- subset(x_solution, package == "pkgdepends" & platform == "source") - expect_equal(nrow(x_solution_pkg), 1) + test_proposal_common(x, "pkgdepends", "source", NULL, remote_str) +}) - pkg_ver_act <- package_version(x_solution_pkg$version) - pkg_ver_target <- package_version( - available.packages( - repos = pkgcache::default_cran_mirror(), - filters = list(add = TRUE, function(x) x[x[, "Package"] == "pkgdepends", ]) - )[["Version"]] +test_that("new_max_deps_installation_proposal correctly handles /@*release reference", { + skip_if_offline() + skip_if_empty_gh_token() + + remote_str <- "r-lib/pkgdepends" + desc_str <- "r-lib/pkgdepends@*release" + d_std_path <- local_description( + list(pkgdepends = "Import"), + remotes = c(remote_str), need_verdepcheck = desc_str ) - expect_identical(pkg_ver_act, pkg_ver_target) + x <- new_max_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) + + test_proposal_common(x, "pkgdepends", "source", NULL, remote_str) }) -test_that("new_min_deps_installation_proposal correctly handles standard reference", { +test_that("new_max_deps_installation_proposal correctly handles /@ ref. (particular remote tag)", { skip_if_offline() skip_if_empty_gh_token() - x <- new_min_deps_installation_proposal(ref_std_path) - on.exit(unlink(x$get_config()$library), add = TRUE, after = FALSE) + remote_str <- "r-lib/pkgdepends@v0.3.2" + desc_str <- "r-lib/pkgdepends@v0.3.2" + d_std_path <- local_description( + list(pkgdepends = "Import"), + remotes = c(remote_str), need_verdepcheck = desc_str + ) + x <- new_max_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) - expect_s3_class(x, "pkg_installation_proposal") + test_proposal_common(x, "pkgdepends", "source", "0.3.2", remote_str) +}) - x$solve() +test_that("new_max_deps_installation_proposal correctly handles / ref. (without Config/Need/verdpcheck)", { + skip_if_offline() + skip_if_empty_gh_token() - expect_equal(x$get_solution()$status, "OK") + remote_str <- "r-lib/pkgdepends" + d_std_path <- local_description(list(pkgdepends = "Import"), remotes = c(remote_str)) + x <- new_max_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) - x_solution <- x$get_resolution() + test_proposal_common(x, "pkgdepends", "source", NULL, remote_str) +}) - x_solution_pkg <- subset(x_solution, package == "pkgdepends" & platform == "source") - expect_equal(nrow(x_solution_pkg), 1) +test_that("new_release_deps_installation_proposal correctly handles / reference", { + skip_if_offline() + skip_if_empty_gh_token() - pkg_ver_act <- package_version(x_solution_pkg$version) - pkg_ver_target <- package_version("0.1.0") + remote_str <- "r-lib/pkgdepends" + d_std_path <- local_description(list(pkgdepends = "Import"), remotes = c(remote_str)) + x <- new_release_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) - expect_identical(pkg_ver_act, pkg_ver_target) + test_proposal_common(x, "pkgdepends", "source", NULL, NULL) }) -#### +test_that("new_min_cohort_deps_installation_proposal correctly handles / reference", { + skip_if_offline() + skip_if_empty_gh_token() + + remote_str <- "r-lib/pkgdepends" + d_std_path <- local_description(list(pkgdepends = "Import"), remotes = c(remote_str)) + x <- new_min_cohort_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) -d_gh <- desc::desc("!new") -d_gh$set_dep("pkgdepends", "Import") -d_gh$add_remotes("r-lib/pkgdepends") -ref_gh_path <- tempfile() -d_gh$write(ref_gh_path) -on.exit(unlink(ref_gh_path), add = TRUE, after = FALSE) + test_proposal_common(x, "pkgdepends", "source", "0.1.0", NULL) +}) -test_that("new_max_deps_installation_proposal correctly handles / reference", { +test_that("new_min_deps_installation_proposal correctly handles / reference", { skip_if_offline() skip_if_empty_gh_token() - x <- new_max_deps_installation_proposal(ref_gh_path) - on.exit(unlink(x$get_config()$library), add = TRUE, after = FALSE) - - expect_s3_class(x, "pkg_installation_proposal") + remote_str <- "r-lib/pkgdepends" + desc_str <- "r-lib/pkgdepends" + d_std_path <- local_description( + list(pkgdepends = "Import"), + remotes = c(remote_str), + need_verdepcheck = desc_str + ) + x <- new_min_isolated_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) - x$solve() - expect_equal(x$get_solution()$status, "OK") + test_proposal_common(x, "pkgdepends", "source", "0.1.0", NULL) +}) - x_solution <- x$get_resolution() +# ################################################################ +# +# _ _ _ ____ _ __ +# (_) | | | / /\ \ ______ | | \ \ +# __ ___| |_| |__ | | \ \______| __ _ | |__ ___| | +# \ \ /\ / / | __| '_ \ | | > >_____ / _` | | '_ \ / __| | +# \ V V /| | |_| | | | | | / /______| | (_| |_| |_) | (__| | +# \_/\_/ |_|\__|_| |_| | | /_/ \__,_(_)_.__(_)___| | +# \_\ /_/ +# +# +# with (>= a.b.c) +# ############################################################### + +test_that("new_min_isolated_deps_installation_proposal correctly handles \">=\" dependency for / ref.", { + skip_if_offline() + skip_if_empty_gh_token() - x_solution_pkg <- subset(x_solution, package == "pkgdepends" & platform == "source") - expect_equal(nrow(x_solution_pkg), 1) + d_std_path <- local_description(list(`pkgdepends (>= 0.2.0)` = "Import"), "r-lib/pkgdepends") + x <- new_min_isolated_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) - pkg_ver_act <- package_version(x_solution_pkg$version) - pkg_ver_target <- package_version(as.character(get_desc_from_gh("r-lib", "pkgdepends")$get_version())) - expect_equal(pkg_ver_act, pkg_ver_target) + test_proposal_common(x, "pkgdepends", "source", "0.2.0", NULL) }) -test_that("new_release_deps_installation_proposal correctly handles / reference", { +test_that("new_min_isolated_deps_installation_proposal correctly handles \">=\" dependency for standard reference", { skip_if_offline() skip_if_empty_gh_token() - x <- new_release_deps_installation_proposal(ref_gh_path) - on.exit(unlink(x$get_config()$library), add = TRUE, after = FALSE) - - expect_s3_class(x, "pkg_installation_proposal") + d_std_path <- local_description(list(`pkgdepends (>= 0.2.0)` = "Import")) + x <- new_min_isolated_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) - x$solve() - expect_equal(x$get_solution()$status, "OK") + test_proposal_common(x, "pkgdepends", "source", "0.2.0", NULL) +}) - x_solution <- x$get_resolution() +test_that("new_min_cohort_deps_installation_proposal correctly handles \">=\" dependency for / reference", { + skip_if_offline() + skip_if_empty_gh_token() - x_solution_pkg <- subset(x_solution, package == "pkgdepends" & platform == "source") - expect_equal(nrow(x_solution_pkg), 1) + d_std_path <- local_description(list(`pkgdepends (>= 0.2.0)` = "Import"), "r-lib/pkgdepends") + x <- new_min_cohort_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) - pkg_ver_act <- package_version(x_solution_pkg$version) - pkg_ver_target <- package_version( - available.packages( - repos = pkgcache::default_cran_mirror(), - filters = list(add = TRUE, function(x) x[x[, "Package"] == "pkgdepends", ]) - )[["Version"]] - ) - expect_equal(pkg_ver_act, pkg_ver_target) + test_proposal_common(x, "pkgdepends", "source", "0.2.0", NULL) }) -test_that("new_min_deps_installation_proposal correctly handles / reference", { +test_that("new_min_cohort_deps_installation_proposal correctly handles \">=\" dependency for standard reference", { skip_if_offline() skip_if_empty_gh_token() - x <- new_min_deps_installation_proposal(ref_gh_path) - on.exit(unlink(x$get_config()$library), add = TRUE, after = FALSE) + d_std_path <- local_description(list(`pkgdepends (>= 0.2.0)` = "Import")) + x <- new_min_cohort_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) + + test_proposal_common(x, "pkgdepends", "source", "0.2.0", NULL) +}) - expect_s3_class(x, "pkg_installation_proposal") +test_that("new_min_isolated_deps_installation_proposal correctly handles tern and rtables", { + skip_if_offline() + skip_if_empty_gh_token() - x$solve() - expect_equal(x$get_solution()$status, "OK") + d_std_path <- local_description( + list( + "tern (>= 0.8.3)" = "Import", + "rtables (>= 0.6.1)" = "Import", + "formatters (>= 0.5.0)" = "Import" + ), + need_verdepcheck = list( + "insightsengineering/tern", + "insightsengineering/rtables", + "insightsengineering/formatters" + ) + ) + x <- new_min_isolated_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) - x_solution <- x$get_resolution() + x <- test_proposal_common(x, "tern", "source", "0.8.3", NULL) + x <- test_proposal_common(x, "rtables", "source", "0.6.1", NULL, solve_ip = FALSE) + test_proposal_common(x, "formatters", "source", "0.5.0", NULL, solve_ip = FALSE) +}) - x_solution_pkg <- subset(x_solution, package == "pkgdepends" & platform == "source") - expect_equal(nrow(x_solution_pkg), 1) +# Test for encapsulation isssue where another dependency (primary or in the tree) +# requires a version that is more recent than the primary version +# +# Note that the calls to `test_proposal_common` have different versions from the +# local description specification (in `rtables` and `formatters` packages) +test_that("new_min_isolated_deps_installation_proposal correctly resolves a different version from specifications", { + skip_if_offline() + skip_if_empty_gh_token() - pkg_ver_act <- package_version(x_solution_pkg$version) - pkg_ver_target <- package_version("0.1.0") + d_std_path <- local_description( + list( + "tern (>= 0.8.3)" = "Import", + "rtables (>= 0.6.0)" = "Import", + "formatters (>= 0.4.1)" = "Import" + ) + ) + x <- new_min_isolated_deps_installation_proposal(d_std_path) + withr::defer(unlink(x$get_config()$library)) - expect_identical(pkg_ver_act, pkg_ver_target) + x <- test_proposal_common(x, "tern", "source", "0.8.3", NULL) + x <- test_proposal_common(x, "rtables", "source", "0.6.1", NULL, solve_ip = FALSE) + test_proposal_common(x, "formatters", "source", "0.5.0", NULL, solve_ip = FALSE) }) -#### - -test_that("new_min_deps_installation_proposal correctly handles \">=\" dependency for / reference", { +# ################################################################# +# +# ____ _ _ _ +# | _ \(_) | | | | +# | |_) |_ ___ ___ ___ _ __ __| |_ _ ___| |_ ___ _ __ +# | _ <| |/ _ \ / __/ _ \| '_ \ / _` | | | |/ __| __/ _ \| '__| +# | |_) | | (_) | (_| (_) | | | | (_| | |_| | (__| || (_) | | +# |____/|_|\___/ \___\___/|_| |_|\__,_|\__,_|\___|\__\___/|_| +# +# +# +# Install Bioconductor single packages +# +# note: until pkgcache or another mechanism can retrieve the published date +# of Bioconductor packages some warnings are expected as it uses the current +# date instead. Bioconductor packges have their versions bumped every 6 months +# with a new Bioc release. +# ################################################################ + + +test_that("new_min_cohort_deps_installation_proposal correctly handles Bioc package", { skip_if_offline() skip_if_empty_gh_token() - temp_path <- tempfile() - d <- desc::desc("!new") - d$set_dep("pkgdepends", "Import", ">= 0.2.0") - d$add_remotes("r-lib/pkgdepends") - d$write(temp_path) - on.exit(unlink(temp_path), add = TRUE, after = FALSE) + d_std_path <- local_description(list(SummarizedExperiment = "Import")) - x <- new_min_deps_installation_proposal(temp_path) - on.exit(unlink(x$get_config()$library), add = TRUE, after = FALSE) + expect_warning( + x <- new_min_cohort_deps_installation_proposal(d_std_path), + "Cannot find PPM snapshot" + ) - expect_s3_class(x, "pkg_installation_proposal") + withr::defer(unlink(x$get_config()$library)) - x$solve() - expect_equal(x$get_solution()$status, "OK") + test_proposal_common_bioc(x, "SummarizedExperiment") +}) - x_solution <- x$get_resolution() +test_that("new_min_isolated_deps_installation_proposal correctly handles Bioc package", { + skip_if_offline() + skip_if_empty_gh_token() - x_solution_pkg <- subset(x_solution, package == "pkgdepends" & platform == "source") - expect_equal(nrow(x_solution_pkg), 1) + d_std_path <- local_description(list(SummarizedExperiment = "Import")) - pkg_ver_act <- package_version(x_solution_pkg$version) - pkg_ver_target <- package_version("0.2.0") - expect_identical(pkg_ver_act, pkg_ver_target) + x <- new_min_isolated_deps_installation_proposal(d_std_path) + + withr::defer(unlink(x$get_config()$library)) + + expect_warning( + test_proposal_common_bioc(x, "SummarizedExperiment"), + "Cannot find PPM snapshot" + ) }) -test_that("new_min_deps_installation_proposal correctly handles \">=\" dependency for standard reference", { +test_that("new_release_deps_installation_proposal correctly handles Bioc package", { skip_if_offline() skip_if_empty_gh_token() - temp_path <- tempfile() - d <- desc::desc("!new") - d$set_dep("pkgdepends", "Import", ">= 0.2.0") - d$write(temp_path) - on.exit(unlink(temp_path), add = TRUE, after = FALSE) + d_std_path <- local_description(list(SummarizedExperiment = "Import")) + + x <- new_release_deps_installation_proposal(d_std_path) - x <- new_min_deps_installation_proposal(temp_path) - on.exit(unlink(x$get_config()$library), add = TRUE, after = FALSE) + withr::defer(unlink(x$get_config()$library)) - expect_s3_class(x, "pkg_installation_proposal") + test_proposal_common_bioc(x, "SummarizedExperiment") +}) + +test_that("new_max_deps_installation_proposal correctly handles Bioc package", { + skip_if_offline() + skip_if_empty_gh_token() - x$solve() - expect_equal(x$get_solution()$status, "OK") + d_std_path <- local_description(list(SummarizedExperiment = "Import")) - x_solution <- x$get_resolution() + x <- new_max_deps_installation_proposal(d_std_path) - x_solution_pkg <- subset(x_solution, package == "pkgdepends" & platform == "source") - expect_equal(nrow(x_solution_pkg), 1) + withr::defer(unlink(x$get_config()$library)) - pkg_ver_act <- package_version(x_solution_pkg$version) - pkg_ver_target <- package_version("0.2.0") - expect_identical(pkg_ver_act, pkg_ver_target) + test_proposal_common_bioc(x, "SummarizedExperiment") }) diff --git a/tests/testthat/test-desc_utils.R b/tests/testthat/test-desc_utils.R new file mode 100644 index 00000000..f7388d73 --- /dev/null +++ b/tests/testthat/test-desc_utils.R @@ -0,0 +1,170 @@ +test_that("filter_valid_version will filter for valid versions", { + versions <- paste(1:10, rep("0", 10), sep = ".") + + expect_length(filter_valid_version(versions, ">=", "3.1"), 7) + + expect_length(filter_valid_version(versions, ">", "3.1"), 7) + expect_length(filter_valid_version(versions, ">", "3.0"), 7) + + expect_length(filter_valid_version(versions, "<=", "2.0"), 2) + + expect_length(filter_valid_version(versions, "<", "2.1"), 2) + expect_length(filter_valid_version(versions, "<", "2.0.0"), 1) + + expect_length(filter_valid_version(versions, "==", "3.1"), 0) + expect_length(filter_valid_version(versions, "==", "1.0.0.0.0.0.0"), 1) + + expect_length(filter_valid_version(versions, "!=", "1.0"), 9) + expect_length(filter_valid_version(versions, "!=", "1.0.01"), 10) +}) + +test_that("check_valid_version will return vector of logicals", { + versions <- paste(1:10, rep("0", 10), sep = ".") + + expect_true(all(check_valid_version(versions, "", ""))) + expect_true(all(check_valid_version(versions, ">=", ""))) + expect_true(all(check_valid_version(versions, ">=", "0.0.0"))) + + expect_error( + check_valid_version(versions, "0.3", ">="), + "invalid version specification" + ) + + expect_error( + check_valid_version(versions, ">="), + "argument \"op_ver\" is missing, with no default" + ) + + expect_error( + check_valid_version(versions), + "argument \"op\" is missing, with no default" + ) +}) + +test_that("desc_remotes_cleanup will replace remotes with tag", { + d <- desc::desc( + file = verdepcheck:::local_description( + list( + dplyr = "Import", + "tibble" = "Import", + pkgdepends = "Import" + ), + remotes = c( + "tidyverse/dplyr@*release", + "tidyverse/tibble@*release", + "r-lib/pkgdepends@*release" + ), + need_verdepcheck = c( + "dplyr", + "tibble=tidyverse/tibble@v3.2.1" + ) + ) + ) + + clean_d <- desc_remotes_cleanup(d) + + expect_contains(clean_d$get_remotes(), "r-lib/pkgdepends@*release") + expect_contains(clean_d$get_remotes(), "tibble=tidyverse/tibble@v3.2.1") + expect_failure(expect_contains(clean_d$get_remotes(), "tidyverse/dplyr@*release")) +}) + +test_that("desc_remotes_cleanup will remove Config/Needs/verdepcheck's CRAN reference from remotes", { + d <- desc::desc( + file = verdepcheck:::local_description( + list( + dplyr = "Import", + "tibble" = "Import", + pkgdepends = "Import" + ), + remotes = c( + "tidyverse/dplyr@*release", + "r-lib/pkgdepends@*release" + ), + need_verdepcheck = c( + "dplyr", + "tibble=tidyverse/tibble@v3.2.1" + ) + ) + ) + + clean_d <- desc_remotes_cleanup(d) + + expect_length(clean_d$get_remotes(), 2) + expect_contains(clean_d$get_remotes(), "r-lib/pkgdepends@*release") + expect_contains(clean_d$get_remotes(), "tibble=tidyverse/tibble@v3.2.1") + expect_failure(expect_contains(clean_d$get_remotes(), "tidyverse/dplyr@*release")) + expect_failure(expect_contains(clean_d$get_remotes(), "tidyverse/dplyr")) + expect_failure(expect_contains(clean_d$get_remotes(), "dplyr")) +}) + +test_that("desc_remotes_cleanup will add all Config/Needs/verdepcheck GH entries to remotes", { + d <- desc::desc( + file = verdepcheck:::local_description( + list( + dplyr = "Import", + tibble = "Import", + pkgdepends = "Import" + ), + remotes = c( + "tidyverse/dplyr@*release", + "r-lib/pkgdepends@*release" + ), + need_verdepcheck = c( + "tidyverse/dplyr@v1.0.0", + "tibble=tidyverse/tibble@v3.2.1" + ) + ) + ) + + clean_d <- desc_remotes_cleanup(d) + + expect_length(clean_d$get_remotes(), 3) + expect_contains(clean_d$get_remotes(), "r-lib/pkgdepends@*release") + expect_contains(clean_d$get_remotes(), "tibble=tidyverse/tibble@v3.2.1") + expect_contains(clean_d$get_remotes(), "tidyverse/dplyr@v1.0.0") +}) + +test_that("desc_remotes_cleanup accepts no Config/Need/verdepcheck", { + d <- desc::desc( + file = verdepcheck:::local_description( + list( + dplyr = "Import", + "tibble" = "Import", + pkgdepends = "Import" + ), + remotes = c( + "tidyverse/dplyr@*release", + "tidyverse/tibble@*release", + "r-lib/pkgdepends@*release" + ) + ) + ) + + clean_d <- desc_remotes_cleanup(d) + + expect_length(get_desc_field_pkgs(clean_d), 0) + + expect_length(clean_d$get_remotes(), 3) + expect_contains(clean_d$get_remotes(), "r-lib/pkgdepends@*release") + expect_contains(clean_d$get_remotes(), "tidyverse/tibble@*release") + expect_contains(clean_d$get_remotes(), "tidyverse/dplyr@*release") +}) + +test_that("get_desc_field_pkgs allows for no Config/Needs/verdepcheck", { + d <- desc::desc( + file = verdepcheck:::local_description( + list( + dplyr = "Import", + "tibble" = "Import", + pkgdepends = "Import" + ), + remotes = c( + "tidyverse/dplyr@*release", + "tidyverse/tibble@*release", + "r-lib/pkgdepends@*release" + ) + ) + ) + + expect_length(get_desc_field_pkgs(d), 0) +}) diff --git a/tests/testthat/test-get_ref.R b/tests/testthat/test-get_ref.R new file mode 100644 index 00000000..6c5ac605 --- /dev/null +++ b/tests/testthat/test-get_ref.R @@ -0,0 +1,109 @@ +test_that("get_release_date.remote_ref_github will only retrieve 1 date for teal@v0.10.0", { + skip_if_offline() + skip_if_empty_gh_token() + + # Teal v0.10.0 has 2 tags (release candidate and release) + remote_ref <- pkgdepends::parse_pkg_ref("insightsengineering/teal@v0.10.0") + result <- get_release_date.remote_ref_github(remote_ref) + + expect_length(result, 1) + expect_s3_class(result, "Date") + expect_identical(as.Date(result), as.Date("2021-10-08T15:10:35Z")) +}) + +test_that("get_release_date.remote_ref_github will only retrieve 1 date for rlang@1.0.0", { + skip_if_offline() + skip_if_empty_gh_token() + + # Teal v0.10.0 has 2 tags (release candidate and release) + remote_ref <- pkgdepends::parse_pkg_ref("r-lib/rlang@v1.0.0") + result <- get_release_date.remote_ref_github(remote_ref) + + expect_length(result, 1) + expect_s3_class(result, "Date") + expect_identical(as.Date(result), as.Date("2022-01-20T16:47:02Z")) +}) + +test_that("get_release_date.remote_ref_github will retrieve missing date (NA) for rlang@0.0.0", { + skip_if_offline() + skip_if_empty_gh_token() + + # Teal v0.10.0 has 2 tags (release candidate and release) + remote_ref <- pkgdepends::parse_pkg_ref("r-lib/rlang@v0.0.0") + result <- get_release_date.remote_ref_github(remote_ref) + + expect_length(result, 1) + expect_true(is.na(result)) + expect_s3_class(result, "Date") +}) + +test_that("get_release_date.remote_ref_cran will retrieve missing date (NA) for rlang@0.0.0", { + skip_if_offline() + skip_if_empty_gh_token() + + remote_ref <- pkgdepends::parse_pkg_ref("package.does.not.exist@1.1.0") + result <- get_release_date.remote_ref_cran(remote_ref) + + expect_length(result, 1) + expect_true(is.na(result)) + expect_s3_class(result, "Date") +}) + +test_that("get_release_date with any class other than remote_ref.{github,cran,standard} returns missing", { + remote_ref <- pkgdepends::parse_pkg_ref("dplyr@1.1.0") + class(remote_ref) <- Filter( + function(el) !grepl("remote_ref_(cran|github|standard)", el), + class(remote_ref) + ) + result <- get_release_date.remote_ref(remote_ref) + + expect_length(result, 1) + expect_true(is.na(result)) + expect_s3_class(result, "Date") +}) + +test_that("get_cran_data returns date for Bioconductor", { + skip_if_offline() + + expect_false(any(is.na(get_cran_data("SummarizedExperiment")$mtime))) +}) + +test_that("get_ref_release returns a CRAN remote_reference if package exists", { + skip_if_offline() + skip_if_empty_gh_token() + + test_refs <- c( + "dplyr", + "dplyr@1.1.0", + "tidyverse/dplyr", + "tidyverse/dplyr@v1.1.0", + "tidyverse/dplyr@c48230c13" + ) + + for (el_ref in test_refs) { + remote_ref <- pkgdepends::parse_pkg_ref(el_ref) + expect_s3_class(get_ref_release(remote_ref), "remote_ref_standard") + } +}) + +test_that("get_ref_release returns a CRAN remote_reference if package exists", { + skip_if_offline() + skip_if_empty_gh_token() + + testthat::local_mocked_bindings( + check_if_on_cran = function(remote_ref, op = "", op_ver = "") FALSE + ) + + test_refs <- c( + "tidyverse/dplyr", # github format + "tidyverse/dplyr@c48230c13", # commit format + "tidyverse/dplyr#681", # pull request format + "tidyverse/dplyr@v1.1.0", # tag format + "tidyverse/dplyr@*release" # release format + ) + + for (el_ref in test_refs) { + remote_ref <- pkgdepends::parse_pkg_ref(el_ref) + expect_s3_class(get_ref_release(remote_ref), "remote_ref_github") + } +}) diff --git a/tests/testthat/test-utils.R b/tests/testthat/test-utils.R new file mode 100644 index 00000000..1e30911d --- /dev/null +++ b/tests/testthat/test-utils.R @@ -0,0 +1,117 @@ +test_that("local_description will create a valid DESCRIPTION file", { + file_path <- local_description( + pkg_list = list(pkgdepends = "Imports", `dplyr (>= 1.0)` = "Imports") + ) + expect_true(file.exists(file_path)) + d <- expect_silent( + desc::desc(file = file_path) + ) + + deps <- d$get_deps() + + expect_true("pkgdepends" %in% deps$package) + expect_true("dplyr" %in% deps$package) + + expect_equal(deps$version[deps$package == "dplyr"], ">= 1.0") +}) + +test_that("local_description will create a valid DESCRIPTION file with Remotes", { + file_path <- local_description( + pkg_list = list( + pkgdepends = "Imports", `dplyr (>= 1.0)` = "Imports" + ), + remotes = c("r-lib/pkgdepends@*release") + ) + expect_true(file.exists(file_path)) + + d <- expect_silent( + desc::desc(file = file_path) + ) + + deps <- d$get_deps() + + expect_true("pkgdepends" %in% deps$package) + expect_true("dplyr" %in% deps$package) + + expect_equal(deps$version[deps$package == "dplyr"], ">= 1.0") + + expect_equal(NROW(d$get_remotes()), 1) + + ref <- pkgdepends::parse_pkg_ref(d$get_remotes()) + + expect_equal(ref$package, "pkgdepends") + expect_equal(ref$ref, "r-lib/pkgdepends@*release") + expect_s3_class(ref, "remote_ref_github") +}) + +test_that("local_description will create a valid DESCRIPTION file with Config/Need/verdepcheck", { + file_path <- local_description( + pkg_list = list( + pkgdepends = "Imports", `dplyr (>= 1.0)` = "Imports" + ), + need_verdepcheck = c("r-lib/pkgdepends") + ) + expect_true(file.exists(file_path)) + + d <- expect_silent( + desc::desc(file = file_path) + ) + + deps <- d$get_deps() + + expect_true("pkgdepends" %in% deps$package) + expect_true("dplyr" %in% deps$package) + + expect_equal(deps$version[deps$package == "dplyr"], ">= 1.0") + + expect_equal(NROW(d$get_remotes()), 0) + + ref <- pkgdepends::parse_pkg_ref(get_desc_field_pkgs(d)) + + expect_equal(ref$package, "pkgdepends") + expect_equal(ref$ref, "r-lib/pkgdepends") + expect_s3_class(ref, "remote_ref_github") +}) + + +test_that("local_description will create a temporary file", { + inner_fun <- function() { + file_path <- local_description( + pkg_list = list(pkgdepends = "Imports") + ) + + expect_true(file.exists(file_path)) + + d <- desc::desc(file = file_path) + + return(list(d = d, file_path = file_path)) + } + + result <- inner_fun() + + expect_true("pkgdepends" %in% result$d$get_deps()$package) + + expect_false(file.exists(result$file_path)) +}) + +test_that("get_ppm_snapshot_by_date will accept NA", { + expect_latest_ppm(get_ppm_snapshot_by_date(NA)) +}) + +test_that("get_ppm_snapshot_by_date will accept dates in the future", { + skip_if_offline() + auxiliary_fun <- function(days = 0) { + expect_warning( + expect_latest_ppm(get_ppm_snapshot_by_date(Sys.Date())), + "Cannot find PPM snapshot for date" + ) + } + + auxiliary_fun(0) + auxiliary_fun(10) + auxiliary_fun(1) + + expect_failure( + expect_latest_ppm(get_ppm_snapshot_by_date(Sys.Date() - 365)), + ) +})