Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use testthat 3 + remove deprecated features #577

Merged
merged 9 commits into from
Nov 27, 2024
Merged
5 changes: 4 additions & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ Imports:
rlang (>= 1.0.3),
magrittr,
scales (>= 1.2.1),
testthat (>= 3.1.6),
testthat (>= 3.2.0),
tibble (>= 3.1.8),
tidyr (>= 1.3.0),
tidyselect (>= 1.2.0),
Expand All @@ -67,3 +67,6 @@ Suggests:
dittodb,
odbc
Roxygen: list(markdown = TRUE)
Config/testthat/edition: 3
Config/testthat/parallel: true
Config/testthat/start-first: expectation_fns, scan_data,tidyselect_fails_safely_batch,test_fns
5 changes: 5 additions & 0 deletions R/col_vals_between.R
Original file line number Diff line number Diff line change
Expand Up @@ -428,6 +428,11 @@ col_vals_between <- function(

agent <- x

# Avoid rlang (>= 0.3.0) soft deprecation warning
# Quosure lists can't be concatenated with objects other than quosures.
left <- as.list(left)
right <- as.list(right)

if (is.null(brief)) {

brief <-
Expand Down
6 changes: 6 additions & 0 deletions R/col_vals_not_between.R
Original file line number Diff line number Diff line change
Expand Up @@ -431,6 +431,12 @@ col_vals_not_between <- function(

agent <- x

# Avoid rlang (>= 0.3.0) soft deprecation warning
# Quosure lists can't be concatenated with objects other than quosures

left <- as.list(left)
right <- as.list(right)

if (is.null(brief)) {

brief <-
Expand Down
4 changes: 2 additions & 2 deletions R/get_agent_report.R
Original file line number Diff line number Diff line change
Expand Up @@ -1682,8 +1682,8 @@ get_agent_report <- function(
"eval_sym", "W", "S", "N", "extract"
)
) %>%
gt_missing(columns = c("columns", "values", "units", "extract")) %>%
gt_missing(columns = "status_color", missing_text = "") %>%
gt::sub_missing(columns = c("columns", "values", "units", "extract")) %>%
gt::sub_missing(columns = "status_color", missing_text = "") %>%
gt::cols_hide(columns = c("W_val", "S_val", "N_val", "active", "eval")) %>%
gt::text_transform(
locations = gt::cells_body(columns = "units"),
Expand Down
4 changes: 2 additions & 2 deletions R/get_multiagent_report.R
Original file line number Diff line number Diff line change
Expand Up @@ -809,7 +809,7 @@ get_multiagent_report <- function(
) %>%
gt::fmt_markdown(columns = 2:n_columns) %>%
gt::fmt_markdown(columns = "sha1") %>%
gt_missing(
gt::sub_missing(
columns = columns_used_tbl,
missing_text = gt::html(
as.character(
Expand All @@ -824,7 +824,7 @@ get_multiagent_report <- function(
)
)
) %>%
gt_missing(
gt::sub_missing(
columns = columns_not_used,
missing_text = ""
) %>%
Expand Down
6 changes: 3 additions & 3 deletions R/get_sundered_data.R
Original file line number Diff line number Diff line change
Expand Up @@ -382,13 +382,13 @@ get_sundered_data <- function(
tbl_check_join <-
tbl_check_join %>%
dplyr::select(
dplyr::one_of(by_cols), dplyr::starts_with("pb_is_good_")
dplyr::all_of(by_cols), dplyr::starts_with("pb_is_good_")
) %>%
dplyr::left_join(
tbl_check_join_r %>%
dplyr::rename(!!new_col_ii := pb_is_good_) %>%
dplyr::select(
dplyr::one_of(by_cols), dplyr::starts_with("pb_is_good_")
dplyr::all_of(by_cols), dplyr::starts_with("pb_is_good_")
),
by = by_cols
) %>%
Expand Down Expand Up @@ -425,7 +425,7 @@ get_sundered_data <- function(
tbl_check_join <-
tbl_check_join %>%
dplyr::mutate(pb_is_good_ = !!rlang::parse_expr(columns_str_add)) %>%
dplyr::select(-dplyr::one_of(columns_str_vec)) %>%
dplyr::select(-dplyr::all_of(columns_str_vec)) %>%
dplyr::mutate(pb_is_good_ = dplyr::case_when(
pb_is_good_ == validation_n ~ TRUE,
TRUE ~ FALSE
Expand Down
7 changes: 6 additions & 1 deletion R/incorporate.R
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,12 @@ incorporate <- function(informant) {

# Get the target table for this informant object
# TODO: extend the materialize table function to use an agent or informant
tbl <- informant$tbl
if (rlang::has_name(informant, "tbl")) {
# Avoid partial matching
tbl <- informant$tbl
} else {
tbl <- informant$tbl_name
}
tbl_name <- informant$tbl_name
read_fn <- informant$read_fn

Expand Down
14 changes: 7 additions & 7 deletions R/interrogate.R
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,7 @@ interrogate <- function(

tbl_checked %>%
dplyr::mutate(pb_is_good_ = !!rlang::parse_expr(columns_str_add)) %>%
dplyr::select(-dplyr::one_of(columns_str_vec)) %>%
dplyr::select(-dplyr::all_of(columns_str_vec)) %>%
dplyr::mutate(pb_is_good_ = dplyr::case_when(
pb_is_good_ == validation_n ~ TRUE,
TRUE ~ FALSE
Expand Down Expand Up @@ -3331,9 +3331,9 @@ add_table_extract <- function(
) {

problem_rows <-
dplyr::sample_n(
tbl = problem_rows,
size = sample_n,
dplyr::slice_sample(
problem_rows,
n = sample_n,
replace = FALSE) %>%
dplyr::as_tibble()

Expand All @@ -3345,9 +3345,9 @@ add_table_extract <- function(
) {

problem_rows <-
dplyr::sample_frac(
tbl = problem_rows,
size = sample_frac,
dplyr::slice_sample(
problem_rows,
prop = sample_frac,
replace = FALSE) %>%
dplyr::as_tibble() %>%
utils::head(sample_limit)
Expand Down
12 changes: 6 additions & 6 deletions R/scan_data.R
Original file line number Diff line number Diff line change
Expand Up @@ -809,7 +809,7 @@ get_common_values_gt <- function(
n = get_lsv("table_scan/tbl_lab_count")[[lang]],
frequency = get_lsv("table_scan/tbl_lab_frequency")[[lang]],
) %>%
gt_missing(columns = "value", missing_text = "**NA**") %>%
gt::sub_missing(columns = "value", missing_text = "**NA**") %>%
gt::text_transform(
locations = gt::cells_body(columns = "value"),
fn = function(x) {
Expand Down Expand Up @@ -847,7 +847,7 @@ get_common_values_gt <- function(
n = get_lsv("table_scan/tbl_lab_count")[[lang]],
frequency = get_lsv("table_scan/tbl_lab_frequency")[[lang]],
) %>%
gt_missing(columns = "value", missing_text = "**NA**") %>%
gt::sub_missing(columns = "value", missing_text = "**NA**") %>%
gt::text_transform(
locations = gt::cells_body(columns = "value"),
fn = function(x) ifelse(x == "**NA**", "<code>NA</code>", x)
Expand Down Expand Up @@ -1301,7 +1301,7 @@ probe_interactions <- function(data) {
columns_char, FUN.VALUE = integer(1), USE.NAMES = FALSE,
FUN = function(x) {
data %>%
dplyr::select(dplyr::one_of(x)) %>%
dplyr::select(dplyr::all_of(x)) %>%
dplyr::distinct() %>%
dplyr::count() %>%
dplyr::pull(n)
Expand All @@ -1317,7 +1317,7 @@ probe_interactions <- function(data) {
# Create a ggplot2 plot matrix with the data
plot_matrix <-
data %>%
dplyr::select(dplyr::one_of(col_names)) %>%
dplyr::select(dplyr::all_of(col_names)) %>%
ggplot2::ggplot(ggplot2::aes(x = .panel_x, y = .panel_y)) +
ggplot2::geom_point(alpha = 0.50, shape = 16, size = 1) +
ggforce::geom_autodensity() +
Expand Down Expand Up @@ -1388,7 +1388,7 @@ probe_correlations <- function(data) {
)
}

data_corr <- dplyr::select(data, dplyr::one_of(columns_numeric))
data_corr <- dplyr::select(data, dplyr::all_of(columns_numeric))

corr_pearson <-
stats::cor(data_corr, method = "pearson", use = "pairwise.complete.obs")
Expand Down Expand Up @@ -1539,7 +1539,7 @@ probe_sample <- function(data) {
probe_sample <-
data %>%
gt::gt_preview(top_n = 5, bottom_n = 5) %>%
gt_missing(columns = gt::everything(), missing_text = "**NA**") %>%
gt::sub_missing(columns = gt::everything(), missing_text = "**NA**") %>%
gt::text_transform(
locations = gt::cells_body(columns = gt::everything()),
fn = function(x) ifelse(x == "**NA**", "<code>NA</code>", x)
Expand Down
8 changes: 4 additions & 4 deletions R/utils-profiling.R
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ get_tbl_dbi_missing_tbl <- function(data) {

missing_n_span <-
data %>%
dplyr::select(1, dplyr::one_of(x__))
dplyr::select(1, dplyr::all_of(x__))

if (ncol(missing_n_span) == 1) {

Expand Down Expand Up @@ -460,7 +460,7 @@ get_tbl_df_missing_tbl <- function(data) {
col_names,
FUN = function(x__) {

data <- dplyr::select(data, dplyr::one_of(x__))
data <- dplyr::select(data, dplyr::all_of(x__))
data <- tibble::rowid_to_column(data)
data <-
dplyr::mutate(data, `::cut_group::` = dplyr::case_when(
Expand Down Expand Up @@ -524,7 +524,7 @@ get_missing_by_column_tbl <- function(data) {
col_names,
FUN = function(x__) {

data <- dplyr::select(data, dplyr::one_of(x__))
data <- dplyr::select(data, dplyr::all_of(x__))
data <- dplyr::group_by(data)
data <-
dplyr::summarize_all(
Expand Down Expand Up @@ -617,7 +617,7 @@ get_table_slice_gt <- function(data_column,
data_column %>%
gt::gt() %>%
gt::fmt_percent(columns = 3, locale = locale) %>%
gt_missing(columns = 1, missing_text = "**NA**") %>%
gt::sub_missing(columns = 1, missing_text = "**NA**") %>%
gt::text_transform(
locations = gt::cells_body(columns = 1),
fn = function(x) ifelse(x == "**NA**", "<code>NA</code>", x)
Expand Down
11 changes: 4 additions & 7 deletions R/utils.R
Original file line number Diff line number Diff line change
Expand Up @@ -1643,13 +1643,6 @@ print_time <- function(time_diff_s) {
)
}

gt_missing <-
if (packageVersion("gt") >= "0.6.0") {
gt::sub_missing
} else {
gt::fmt_missing
}

pb_get_image_tag <- function(file, dir = "images") {

repo_url <- "https://raw.githubusercontent.com/rstudio/pointblank/main"
Expand Down Expand Up @@ -1691,6 +1684,10 @@ pb_get_image_tag <- function(file, dir = "images") {
deparse_expr <- function(expr, collapse = " ", ...) {
if (rlang::is_scalar_atomic(expr)) {
as.character(expr)
} else if (rlang::is_quosure(expr)) {
expr <- rlang::quo_get_expr(expr)
deparsed <- paste(deparse(expr, ...), collapse = collapse)
paste("<expr>", deparsed)
} else {
deparsed <- paste(deparse(expr, ...), collapse = collapse)
paste("<expr>", deparsed)
Expand Down
74 changes: 74 additions & 0 deletions tests/testthat/_snaps/interrogate_with_agent.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
# Select validation steps can be `active` or not

Code
obj <- small_table %>% col_is_character(columns = vars(b), actions = al) %>%
col_is_numeric(columns = vars(a), actions = al) %>% col_is_posix(columns = vars(
date_time), actions = al) %>% col_is_date(columns = vars(date), actions = al) %>%
col_is_integer(columns = vars(a), actions = al) %>% col_is_logical(columns = vars(
e), actions = al) %>% col_vals_between(columns = vars(d), left = 0, right = 5000,
actions = al) %>% col_vals_equal(columns = vars(d), value = 283.94, actions = al) %>%
col_vals_gt(columns = vars(date_time), value = vars(date), actions = al) %>%
col_vals_gte(columns = vars(date_time), value = vars(date), actions = al) %>%
col_vals_lt(columns = vars(date_time), value = vars(date), actions = al) %>%
col_vals_lte(columns = vars(date_time), value = vars(date), actions = al) %>%
col_vals_in_set(columns = vars(f), set = c("low", "mid", "high"), actions = al) %>%
col_vals_not_between(columns = vars(d), left = 500, right = 1000, actions = al) %>%
col_vals_not_equal(columns = vars(d), value = 283.94, actions = al) %>%
col_vals_not_in_set(columns = vars(f), set = c("lower", "middle", "higher"),
actions = al) %>% col_vals_not_null(columns = vars(c), actions = al) %>%
col_vals_null(columns = vars(b), actions = al) %>% col_vals_regex(columns = vars(
f), regex = "[a-z]{3}", actions = al) %>% rows_distinct(actions = al) %>%
conjointly(~ col_vals_gt(., columns = vars(a), value = 1), ~ col_vals_lt(.,
columns = vars(c), value = 10, na_pass = TRUE), ~ col_vals_not_null(.,
columns = vars(d)), actions = al) %>% serially(~ test_col_vals_gt(.,
columns = vars(a), value = 0), ~ test_col_vals_lt(., columns = vars(c),
value = 10, na_pass = TRUE), ~ col_vals_not_null(., columns = vars(d)),
actions = al) %>% specially(fn = function(x) {
as.integer(x$date) <= as.integer(x$date_time)
}, actions = al)
Condition
Warning:
Failure to validate that column `a` is of type: numeric.
The `col_is_numeric()` validation failed beyond the absolute threshold level (1).
* failure level (1) >= failure threshold (1)
Warning:
Exceedance of failed test units where values in `d` should have been between `0` and `5000`.
The `col_vals_between()` validation failed beyond the absolute threshold level (1).
* failure level (1) >= failure threshold (1)
Warning:
Exceedance of failed test units where values in `d` should have been == `283.94`.
The `col_vals_equal()` validation failed beyond the absolute threshold level (1).
* failure level (12) >= failure threshold (1)
Warning:
Exceedance of failed test units where values in `date_time` should have been < `~date`.
The `col_vals_lt()` validation failed beyond the absolute threshold level (1).
* failure level (13) >= failure threshold (1)
Warning:
Exceedance of failed test units where values in `date_time` should have been <= `~date`.
The `col_vals_lte()` validation failed beyond the absolute threshold level (1).
* failure level (13) >= failure threshold (1)
Warning:
Exceedance of failed test units where values in `d` should not have been between `500` and `1000`.
The `col_vals_not_between()` validation failed beyond the absolute threshold level (1).
* failure level (4) >= failure threshold (1)
Warning:
Exceedance of failed test units where values in `d` should have been != `283.94`.
The `col_vals_not_equal()` validation failed beyond the absolute threshold level (1).
* failure level (1) >= failure threshold (1)
Warning:
Exceedance of failed test units where values in `c` should not have been NULL.
The `col_vals_not_null()` validation failed beyond the absolute threshold level (1).
* failure level (2) >= failure threshold (1)
Warning:
Exceedance of failed test units where values in `b` should have been NULL.
The `col_vals_null()` validation failed beyond the absolute threshold level (1).
* failure level (13) >= failure threshold (1)
Warning:
Exceedance of failed test units where there weren't distinct rows across all columns.
The `rows_distinct()` validation failed beyond the absolute threshold level (1).
* failure level (2) >= failure threshold (1)
Warning:
Exceedance of failed test units where there should have been conjoint 'pass' units across the following expressions: `~col_vals_gt(., columns = vars(a), value = 1)`, `~col_vals_lt(., columns = vars(c), value = 10, na_pass = TRUE)`, `~col_vals_not_null(., columns = vars(d))`.
The `conjointly()` validation failed beyond the absolute threshold level (1).
* failure level (1) >= failure threshold (1)

6 changes: 6 additions & 0 deletions tests/testthat/helper.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
expect_equal_unlist <- function(object, expected, ...) {
expect_equal(
unlist(object),
expected
)
}
6 changes: 3 additions & 3 deletions tests/testthat/postgres-2.R
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ test_that("scan_data works with dittodb-mocked Postgres database connection", {
DBI::dbDisconnect(con)
# stop_db_capturing()

expect_is(scan_results, "examination_page")
expect_is(scan_results, "shiny.tag.list")
expect_is(scan_results, "list")
expect_s3_class(scan_results, "examination_page")
expect_s3_class(scan_results, "shiny.tag.list")
expect_type(scan_results, "list")
})
})
Loading
Loading