Skip to content

Commit

Permalink
[skip vbump] Closes #2462 fix_derive_extreme_event: re-enable check_t…
Browse files Browse the repository at this point in the history
…ype = "none" (#2463)

#2462 fix_derive_extreme_event: re-enable check_type = "none"
  • Loading branch information
bundfussr authored Jun 17, 2024
1 parent 503909a commit 6d0d4ec
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 28 deletions.
2 changes: 1 addition & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Type: Package
Package: admiral
Title: ADaM in R Asset Library
Version: 1.1.0.9001
Version: 1.1.1
Authors@R: c(
person("Ben", "Straub", , "[email protected]", role = c("aut", "cre")),
person("Stefan", "Bundfuss", role = "aut"),
Expand Down
5 changes: 4 additions & 1 deletion NEWS.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
# admiral (development version)
# admiral 1.1.1

- `derive_extreme_event()` was fixed such that `check_type = "none"` is accepted
again. (#2462)

# admiral 1.1.0

Expand Down
64 changes: 39 additions & 25 deletions R/derive_extreme_event.R
Original file line number Diff line number Diff line change
Expand Up @@ -440,14 +440,19 @@ derive_extreme_event <- function(dataset = NULL,
filter_if(event$condition) %>%
ungroup()
if (!is.null(event$mode)) {
# Check for duplicates
signal_duplicate_records(
dataset = data_events,
by_vars = append(by_vars, event_order),
msg = paste("Check duplicates: ", event$dataset_name, " dataset contains duplicate
records with respect to {.var {replace_values_by_names(by_vars)}}"),
cnd_type = check_type
)
if (check_type != "none") {
# Check for duplicates
signal_duplicate_records(
dataset = data_events,
by_vars = append(by_vars, event_order),
msg = paste(
"Check duplicates: ", event$dataset_name,
"dataset contains duplicate records with respect to",
"{.var {replace_values_by_names(by_vars)}}"
),
cnd_type = check_type
)
}

data_events <- filter_extreme(
data_events,
Expand All @@ -458,14 +463,19 @@ derive_extreme_event <- function(dataset = NULL,
)
}
} else {
# Check for duplicates
signal_duplicate_records(
dataset = data_source,
by_vars = append(by_vars, event_order),
msg = paste("Check duplicates: ", event$dataset_name, " dataset contains duplicate records
with respect to {.var {replace_values_by_names(by_vars)}}"),
cnd_type = check_type
)
if (check_type != "none") {
# Check for duplicates
signal_duplicate_records(
dataset = data_source,
by_vars = append(by_vars, event_order),
msg = paste(
"Check duplicates: ", event$dataset_name,
"dataset contains duplicate records with respect to",
"{.var {replace_values_by_names(by_vars)}}"
),
cnd_type = check_type
)
}

data_events <- filter_joined(
data_source,
Expand Down Expand Up @@ -495,15 +505,19 @@ derive_extreme_event <- function(dataset = NULL,
)
selected_records <- bind_rows(selected_records_ls)

# Check for duplicates
signal_duplicate_records(
dataset = selected_records,
by_vars = append(by_vars, order),
msg = paste("Check duplicates: the dataset which consists of all records selected
for any of the events defined by {.arg events} contains duplicate records
with respect to {.var {replace_values_by_names(by_vars)}}"),
cnd_type = check_type
)
if (check_type != "none") {
# Check for duplicates
signal_duplicate_records(
dataset = selected_records,
by_vars = append(by_vars, order),
msg = paste(
"Check duplicates: the dataset which consists of all records selected",
"for any of the events defined by {.arg events} contains duplicate records",
"with respect to {.var {replace_values_by_names(by_vars)}}"
),
cnd_type = check_type
)
}

## filter_extreme
new_obs <- selected_records %>%
Expand Down
2 changes: 1 addition & 1 deletion tests/testthat/test-derive_extreme_event.R
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ test_that("derive_extreme_event Test 1: `mode` = first", {
set_values_to = exprs(
PARAMCD = "WSP"
),
check_type = "warning"
check_type = "none"
)

expect_dfs_equal(
Expand Down

0 comments on commit 6d0d4ec

Please sign in to comment.