diff --git a/R/CohortConstruction.R b/R/CohortConstruction.R index 2dfb86a..5baecad 100644 --- a/R/CohortConstruction.R +++ b/R/CohortConstruction.R @@ -249,7 +249,7 @@ generateCohort <- function(cohortId = NULL, connection <- DatabaseConnector::connect(connectionDetails) on.exit(DatabaseConnector::disconnect(connection)) } - ParallelLogger::logInfo(i, "/", nrow(cohortDefinitionSet), "- Generating cohort: ", cohortName, " (id = ", cohortId, ")") + rlang::inform(paste0(i, "/", nrow(cohortDefinitionSet), "- Generating cohort: ", cohortName, " (id = ", cohortId, ")")) sql <- cohortDefinitionSet$sql[i] if (!isSubset) { diff --git a/R/CohortCount.R b/R/CohortCount.R index f14244f..a89e483 100644 --- a/R/CohortCount.R +++ b/R/CohortCount.R @@ -67,7 +67,7 @@ getCohortCounts <- function(connectionDetails = NULL, if (tolower(cohortTable) %in% tablesInServer) { counts <- DatabaseConnector::querySql(connection, sql, snakeCaseToCamelCase = TRUE) delta <- Sys.time() - start - ParallelLogger::logInfo(paste("Counting cohorts took", signif(delta, 3), attr(delta, "units"))) + rlang::inform(paste("Counting cohorts took", signif(delta, 3), attr(delta, "units"))) if (!is.null(cohortDefinitionSet)) { # If the user has NOT specified a list of cohortIds # to use to filter the cohortDefinitionSet, then diff --git a/R/CohortDefinitionSet.R b/R/CohortDefinitionSet.R index 7bfc34d..42a2113 100644 --- a/R/CohortDefinitionSet.R +++ b/R/CohortDefinitionSet.R @@ -244,7 +244,7 @@ getCohortDefinitionSet <- function(settingsFileName = "Cohorts.csv", path <- system.file(fileName, package = packageName) } if (verbose) { - ParallelLogger::logInfo(paste0(" -- Loading ", basename(fileName), " from ", path)) + rlang::inform(paste0(" -- Loading ", basename(fileName), " from ", path)) } if (!file.exists(path)) { if (grepl(".json$", tolower(basename(fileName))) && warnOnMissingJson) { @@ -259,7 +259,7 @@ getCohortDefinitionSet <- function(settingsFileName = "Cohorts.csv", } # Read the settings file which holds the cohortDefinitionSet - ParallelLogger::logInfo("Loading cohortDefinitionSet") + rlang::inform("Loading cohortDefinitionSet") settings <- readCsv(file = getPath(fileName = settingsFileName), warnOnCaseMismatch = FALSE) assert_settings_columns(names(settings), getPath(fileName = settingsFileName)) @@ -313,12 +313,12 @@ getCohortDefinitionSet <- function(settingsFileName = "Cohorts.csv", # Loading cohort subset definitions with their associated targets if (loadSubsets & nrow(subsetsToLoad) > 0) { if (dir.exists(subsetJsonFolder)) { - ParallelLogger::logInfo("Loading Cohort Subset Definitions") + rlang::inform("Loading Cohort Subset Definitions") ## Loading subsets that apply to the saved definition sets for (i in unique(subsetsToLoad$subsetDefinitionId)) { subsetFile <- file.path(subsetJsonFolder, paste0(i, ".json")) - ParallelLogger::logInfo("Loading Cohort Subset Defintion ", subsetFile) + rlang::inform(paste0("Loading Cohort Subset Defintion ", subsetFile)) subsetDef <- CohortSubsetDefinition$new(ParallelLogger::loadSettingsFromJson(subsetFile)) # Find target cohorts for this subset definition subsetTargetIds <- unique(subsetsToLoad[subsetsToLoad$subsetDefinitionId == i, ]$subsetParent) @@ -397,7 +397,7 @@ saveCohortDefinitionSet <- function(cohortDefinitionSet, # Export the cohortDefinitionSet to the settings folder if (verbose) { - ParallelLogger::logInfo("Exporting cohortDefinitionSet to ", settingsFileName) + rlang::inform(paste0("Exporting cohortDefinitionSet to ", settingsFileName)) } # Write the settings file and ensure that the "sql" and "json" columns are # not included @@ -425,7 +425,7 @@ saveCohortDefinitionSet <- function(cohortDefinitionSet, } if (verbose) { - ParallelLogger::logInfo("Exporting (", i, "/", nrow(cohortDefinitionSet), "): ", cohortName) + rlang::inform(paste0("Exporting (", i, "/", nrow(cohortDefinitionSet), "): ", cohortName)) } if (!is.na(json) && nchar(json) > 0) { @@ -441,7 +441,7 @@ saveCohortDefinitionSet <- function(cohortDefinitionSet, } } - ParallelLogger::logInfo("Cohort definition saved") + rlang::inform("Cohort definition saved") } .getSettingsFileRequiredColumns <- function() { diff --git a/R/CohortSample.R b/R/CohortSample.R index 6cb393b..e522244 100644 --- a/R/CohortSample.R +++ b/R/CohortSample.R @@ -262,7 +262,7 @@ sampleCohortDefinitionSet <- function(cohortDefinitionSet, ) if (nrow(sampleTable) == 0) { - ParallelLogger::logInfo("No entires found for ", targetCohortId, " was it generated?") + rlang::inform(paste0("No entires found for ", targetCohortId, " was it generated?")) return(sampledCohortDefinition) } # Called only for side effects diff --git a/R/CohortStats.R b/R/CohortStats.R index e3ce475..307cbdf 100644 --- a/R/CohortStats.R +++ b/R/CohortStats.R @@ -71,7 +71,7 @@ insertInclusionRuleNames <- function(connectionDetails = NULL, # Insert the inclusion rules if (nrow(inclusionRules) > 0) { - ParallelLogger::logInfo("Inserting inclusion rule names") + rlang::inform("Inserting inclusion rule names") DatabaseConnector::insertTable( connection = connection, databaseSchema = cohortDatabaseSchema, @@ -107,7 +107,7 @@ getStatsTable <- function(connectionDetails, databaseId <- NULL } - ParallelLogger::logInfo("- Fetching data from ", table) + rlang::inform("- Fetching data from ", table) sql <- "SELECT {@database_id != ''}?{CAST('@database_id' as VARCHAR(255)) as database_id,} t.* FROM @cohort_database_schema.@table t" data <- DatabaseConnector::renderTranslateQuerySql( sql = sql, diff --git a/R/CohortTables.R b/R/CohortTables.R index bf0e2b9..19a57c7 100644 --- a/R/CohortTables.R +++ b/R/CohortTables.R @@ -98,13 +98,13 @@ createCohortTables <- function(connectionDetails = NULL, for (i in 1:length(cohortTableNames)) { if (toupper(cohortTableNames[i]) %in% toupper(tables)) { createTableFlagList[i] <- FALSE - ParallelLogger::logInfo("Table \"", cohortTableNames[i], "\" already exists and in incremental mode, so not recreating it.") + rlang::inform(paste0("Table \"", cohortTableNames[i], "\" already exists and in incremental mode, so not recreating it.")) } } } if (any(unlist(createTableFlagList, use.names = FALSE))) { - ParallelLogger::logInfo("Creating cohort tables") + rlang::inform("Creating cohort tables") createSampleTable <- ifelse( test = is.null(createTableFlagList$cohortSampleTable), yes = FALSE, @@ -137,7 +137,7 @@ createCohortTables <- function(connectionDetails = NULL, DatabaseConnector::executeSql(connection, sql, progressBar = FALSE, reportOverallTime = FALSE) logCreateTableMessage <- function(schema, tableName) { - ParallelLogger::logInfo("- Created table ", schema, ".", tableName) + rlang::inform(paste0("- Created table ", schema, ".", tableName)) } for (i in 1:length(createTableFlagList)) { if (createTableFlagList[[i]]) { @@ -146,7 +146,7 @@ createCohortTables <- function(connectionDetails = NULL, } delta <- Sys.time() - start - ParallelLogger::logInfo("Creating cohort tables took ", round(delta, 2), attr(delta, "units")) + rlang::inform(paste0("Creating cohort tables took ", round(delta, 2), attr(delta, "units"))) } } @@ -173,7 +173,7 @@ dropCohortStatsTables <- function(connectionDetails = NULL, # Export the stats dropTable <- function(table) { - ParallelLogger::logInfo("- Dropping ", table) + rlang::inform(paste0("- Dropping ", table)) sql <- "TRUNCATE TABLE @cohort_database_schema.@table; DROP TABLE @cohort_database_schema.@table;" DatabaseConnector::renderTranslateExecuteSql( diff --git a/R/Export.R b/R/Export.R index b0c013b..971f3a8 100644 --- a/R/Export.R +++ b/R/Export.R @@ -73,7 +73,7 @@ exportCohortStatsTables <- function(connectionDetails, exportStats <- function(data, fileName) { fullFileName <- file.path(cohortStatisticsFolder, fileName) - ParallelLogger::logInfo("- Saving data to - ", fullFileName) + rlang::inform(paste0("- Saving data to - ", fullFileName)) if (incremental) { if (snakeCaseToCamelCase) { cohortDefinitionIds <- unique(data$cohortDefinitionId) diff --git a/R/Incremental.R b/R/Incremental.R index de400e4..cb578b0 100644 --- a/R/Incremental.R +++ b/R/Incremental.R @@ -70,7 +70,7 @@ isTaskRequired <- function(..., checksum, recordKeepingFile, verbose = TRUE) { if (verbose) { key <- list(...) key <- paste(sprintf("%s = '%s'", names(key), key), collapse = ", ") - ParallelLogger::logInfo("Skipping ", key, " because it is unchanged from earlier run") + rlang::inform(paste0("Skipping ", key, " because it is unchanged from earlier run")) } return(FALSE) } else { @@ -115,7 +115,7 @@ getRequiredTasks <- function(..., checksum, recordKeepingFile) { tasks$checksum <- NULL if (length(idx) > 0) { text <- paste(sprintf("%s = %s", names(tasks), tasks[idx, ]), collapse = ", ") - ParallelLogger::logInfo("Skipping ", text, " because it is unchanged from earlier run") + rlang::inform(paste0("Skipping ", text, " because it is unchanged from earlier run")) tasks <- tasks[-idx, ] } } diff --git a/R/NegativeControlCohorts.R b/R/NegativeControlCohorts.R index 516662c..51e4691 100644 --- a/R/NegativeControlCohorts.R +++ b/R/NegativeControlCohorts.R @@ -163,7 +163,7 @@ generateNegativeControlOutcomeCohorts <- function(connectionDetails = NULL, stop(paste0("Table: ", cohortTable, " not found in schema: ", cohortDatabaseSchema, ". Please use `createCohortTable` to ensure the cohort table is created before generating cohorts.")) } - ParallelLogger::logInfo("Generating negative control outcome cohorts") + rlang::inform("Generating negative control outcome cohorts") # Send the negative control outcome cohort set to the server for use # in processing. This temp table will hold the mapping between diff --git a/tests/testthat/test-CohortDefinitionSet.R b/tests/testthat/test-CohortDefinitionSet.R index 90cb9bd..6f48618 100644 --- a/tests/testthat/test-CohortDefinitionSet.R +++ b/tests/testthat/test-CohortDefinitionSet.R @@ -179,7 +179,7 @@ test_that("Call saveCohortDefinitionSet with missing json", { )) } - expect_output( + expect_message( saveCohortDefinitionSet( cohortDefinitionSet = cohortsToCreate, settingsFileName = file.path(tempdir(), "settings"),