From 7fc29f7764ca09f70e748e87e22597efd0a617db Mon Sep 17 00:00:00 2001 From: aheinri5 <Anna@netzkritzler.de> Date: Sat, 6 Feb 2021 16:30:35 +0100 Subject: [PATCH] [base] WIP updating ensemble class --- R/gml.R | 318 ----------- R/ogs6_ensemble.R | 17 +- R/sim_utils.R | 9 +- R/vtu.R | 651 ----------------------- vignettes/ensemble_workflow_vignette.Rmd | 313 +++++++++++ vignettes/theis_vignette.Rmd | 301 ----------- 6 files changed, 332 insertions(+), 1277 deletions(-) delete mode 100644 R/gml.R delete mode 100644 R/vtu.R create mode 100644 vignettes/ensemble_workflow_vignette.Rmd delete mode 100644 vignettes/theis_vignette.Rmd diff --git a/R/gml.R b/R/gml.R deleted file mode 100644 index 732c4a2..0000000 --- a/R/gml.R +++ /dev/null @@ -1,318 +0,0 @@ - -#===== OGS6_gml ===== - - -#'OGS6_gml -#'@description Constructor for the OGS6_gml base class -#'@export -OGS6_gml <- R6::R6Class( - "OGS6_gml", - public = list( - - #'@description - #'Creates new OGS6_gml object - #'@param gml_path string: Optional: Path to .gml file - #'@param name string: Geometry name - #'@param points tibble: Must have 3 vectors named 'x', 'y' and 'z', may - #' have optional 'name' vector - #'@param polylines list(list("foo", c(1, 2))): - #'@param surfaces list(list("foo", c(1, 2, 3), c(2, 3, 4))): - initialize = function(gml_path = NULL, - name = NULL, - points = NULL, - polylines = NULL, - surfaces = NULL) { - - if(is.null(gml_path)){ - self$name <- name - self$points <- points - self$polylines <- polylines - self$surfaces <- surfaces - }else{ - if(!is.null(name) || - !is.null(points) || - !is.null(polylines) || - !is.null(surfaces)){ - warning(paste("`gml_path` was specified for OGS6_gml", - "initialization, so all other parameters", - "will be ignored!"), call. = FALSE) - } - - xml_doc <- validate_read_in_xml(gml_path) - - self$name <- xml2::xml_text(xml2::xml_find_first(xml_doc, - "//name")) - self$points <- read_in_points(xml_doc) - self$polylines <- read_in_polylines(xml_doc) - self$surfaces <- read_in_surfaces(xml_doc) - } - - private$.gml_path <- gml_path - private$validate() - } - ), - - active = list( - - #'@field gml_path - #'Getter for private parameter '.gml_path' - gml_path = function(value) { - private$gml_path - }, - - #'@field name - #'Access to private parameter '.name' - name = function(value) { - if(missing(value)) { - private$.name - }else{ - assertthat::assert_that(assertthat::is.string(value)) - private$.name <- value - } - }, - - #'@field points - #'Access to private parameter '.points' - points = function(value) { - if(missing(value)) { - private$.points - }else{ - private$.points <- validate_points(value) - } - }, - - #'@field polylines - #'Access to private parameter '.polylines' - polylines = function(value) { - if(missing(value)) { - private$.polylines - }else{ - if(!is.null(value)){ - value <- validate_polylines(value) - } - - private$.polylines <- value - } - }, - - #'@field surfaces - #'Access to private parameter '.surfaces' - surfaces = function(value) { - if(missing(value)) { - private$.surfaces - }else{ - if(!is.null(value)){ - value <- validate_surfaces(value) - } - private$.surfaces <- value - } - }, - - #'@field is_subclass - #'Getter for private parameter '.is_subclass' - is_subclass = function(value) { - private$.is_subclass - }, - - #'@field attr_names - #'Getter for private parameter '.attr_names' - attr_names = function(value) { - private$.attr_names - }, - - #'@field flatten_on_exp - #'Getter for private parameter '.flatten_on_exp' - flatten_on_exp = function(value) { - private$.flatten_on_exp - } - ), - - private = list( - - validate = function(){ - maximal_point_id <- length(self$points[[1]]) - 1 - - check_pnt <- function(pnt){ - if(pnt > maximal_point_id || - pnt < 0){ - stop(paste("Point with ID", pnt, "does not exist"), - call. = FALSE) - } - } - - #Check if polylines reference existing points - lapply(self$polylines, function(x){ - lapply(x[[2]], check_pnt) - }) - - #Check if surfaces reference existing points - lapply(self$surfaces, function(x){ - lapply(x[[2]], check_pnt) - if(length(x) == 3){ - lapply(x[[3]], check_pnt) - } - }) - }, - - .gml_path = NULL, - .name = NULL, - .points = NULL, - .polylines = NULL, - .surfaces = NULL, - .is_subclass = TRUE, - .attr_names = c("point", "name", "id", "element"), - .flatten_on_exp = character() - ) -) - - -#===== Validation utility ===== - - -#'validate_points -#'@description Checks if the input is a tibble, if this tibble has the right -#' number of elements, if those elements are named correctly and if there are -#' any overlapping points or duplicate point names -#'@param points tibble: Must have 3 vectors named 'x', 'y' and 'z', may have -#' optional 'name' vector -validate_points <- function(points) { - - assertthat::assert_that(inherits(points, "tbl_df")) - - names <- names(points) - - if (!((length(points) == 4 && names[[1]] == "x" && names[[2]] == "y" && - names[[3]] == "z" && names[[4]] == "name") || - (length(points) == 3 && names[[1]] == "x" && names[[2]] == "y" && - names[[3]] == "z"))){ - stop(paste(points, " column names do not fit to 'x, y, z, (name)' "), - call. = FALSE) - } - - assertthat::assert_that(is.numeric(points$x)) - assertthat::assert_that(is.numeric(points$y)) - assertthat::assert_that(is.numeric(points$z)) - - has_names <- (length(points) == 4) - - # #Find overlapping points and duplicate names - # for(i in 1:(length(points[[1]])-1)){ - # for(j in (i+1):length(points[[1]])){ - # if(points[[1]][[i]] == points[[1]][[j]] && - # points[[2]][[i]] == points[[2]][[j]] && - # points[[3]][[i]] == points[[3]][[j]]){ - # stop("Overlapping .gml points detected", call. = FALSE) - # } - # - # if(has_names){ - # if(points[[4]][[i]] == points[[4]][[j]] && - # points[[4]][[i]] != ""){ - # warning("Duplicate .gml point names detected", - # call. = FALSE) - # } - # } - # } - # } - - return(invisible(points)) -} - - -#'validate_polylines -#'@description Checks if the input is a list, if this list consists of other -#' lists and if those lists have the correct structure (length of 2, first -#' element is a string named 'name', second element is a numeric vector) -#'@param polylines list(list("foo", c(1, 2))): -validate_polylines <- function(polylines) { - - assertthat::assert_that(is.list(polylines)) - - polylines <- lapply(polylines, function(x){ - assertthat::assert_that(is.list(x)) - assertthat::assert_that(length(x) == 2) - assertthat::assert_that(assertthat::is.string(x[[1]])) - assertthat::assert_that(is.numeric(x[[2]])) - names(x)[[1]] <- c("name") - names(x[[2]]) <- rep("pnt", length(names(x[[2]]))) - return(x) - }) - - names(polylines) <- rep("polyline", length(polylines)) - return(invisible(polylines)) -} - - -#'validate_surfaces -#'@description Checks if the input is a list, if this list consists of other -#' lists and if those lists have the correct structure (length of 2 or 3, first -#' element is a string named 'name', second and third element are numeric -#' vectors) -#'@param surfaces list(list("foo", c(1, 2, 3), c(2, 3, 4))): -validate_surfaces <- function(surfaces) { - - assertthat::assert_that(is.list(surfaces)) - - validate_element <- function(element){ - assertthat::assert_that(is.numeric(element)) - assertthat::assert_that(length(element) == 3) - names(element) <- c("p1", "p2", "p3") - return(invisible(element)) - } - - surfaces <- lapply(surfaces, function(x){ - - assertthat::assert_that(is.list(x)) - assertthat::assert_that(length(x) == 2 || - length(x) == 3) - - names(x) <- c("name", rep("element", (length(x)-1))) - x[[2]] <- validate_element(x[[2]]) - - if(length(x) == 3){ - x[[3]] <- validate_element(x[[3]]) - # validate_pnt_values(x[[2]], x[[3]]) - } - - return(x) - }) - - names(surfaces) <- rep("surface", length(surfaces)) - - return(invisible(surfaces)) -} - - -#'validate_pnt_values -#'@description Checks if two numerical vectors of length 3 -#' (two surface elements) each consist of 3 different elements and have -#' exactly 2 matching elements between them. Think of the two vectors as -#' triangles, and the triangles together form a square which is our surface. -#'@param element_1 numeric, length = 3 -#'@param element_2 numeric, length = 3 -validate_pnt_values = function (element_1, element_2) { - - if(element_1[[1]] == element_1[[2]] || - element_1[[1]] == element_1[[3]] || - element_1[[2]] == element_1[[3]] || - element_2[[1]] == element_2[[2]] || - element_2[[1]] == element_2[[3]] || - element_2[[2]] == element_2[[3]]) { - stop("A surface element must consist of 3 different points", - call. = FALSE) - } - - equal_count <- 0 - - for(i in 1:length(element_1)) { - for(j in 1:length(element_2)) { - if(element_1[[i]] == element_2[[j]]) { - equal_count <- equal_count + 1 - break - } - } - } - - if(equal_count != 2) { - stop("Invalid surface detected", call. = FALSE) - } -} diff --git a/R/ogs6_ensemble.R b/R/ogs6_ensemble.R index 8734728..a3279c3 100644 --- a/R/ogs6_ensemble.R +++ b/R/ogs6_ensemble.R @@ -67,9 +67,12 @@ OGS6_Ensemble <- R6::R6Class( #' For ensembles, output will be written to logfiles. #'@param parallel flag: Should the function be run in parallel? #' This is implementented via the 'parallel' package. - run_simulation = function(parallel = FALSE){ + #'@param verbose flag + run_simulation = function(parallel = FALSE, + verbose = F){ assertthat::assert_that(assertthat::is.flag(parallel)) + assertthat::assert_that(assertthat::is.flag(verbose)) # Create ensemble directory assertthat::assert_that(!dir.exists(self$ens_path)) @@ -103,7 +106,8 @@ OGS6_Ensemble <- R6::R6Class( foreach::foreach(i = seq_along(ensemble)) %dopar% { ogs6_obj <- ensemble[[i]] r2ogs6::run_simulation(ogs6_obj, - write_logfile = TRUE) + write_logfile = TRUE, + verbose = verbose) } # Cleanup @@ -117,14 +121,17 @@ OGS6_Ensemble <- R6::R6Class( parallel::mclapply(self$ensemble, run_simulation, write_logfile = TRUE, + verbose = verbose, mc.cores = n_cores) } }else{ # For serial ensembles, we can use lapply - lapply(self$ensemble, - run_simulation, - write_logfile = TRUE) + exit_codes <- lapply(self$ensemble, + run_simulation, + write_logfile = TRUE, + verbose = verbose) + return(exit_codes) } }, diff --git a/R/sim_utils.R b/R/sim_utils.R index 25aecee..3047ccd 100644 --- a/R/sim_utils.R +++ b/R/sim_utils.R @@ -9,13 +9,16 @@ #'@param write_logfile flag: Should output be written to a logfile? #'@param ogs_bin_path string: Optional: OpenGeoSys 6 bin folder path. Defaults #' to `options("r2ogs6.default_ogs_bin_path")` +#'@param verbose flag #'@export run_simulation <- function(ogs6_obj, write_logfile = TRUE, - ogs_bin_path) { + ogs_bin_path, + verbose = F) { assertthat::assert_that(inherits(ogs6_obj, "OGS6")) assertthat::assert_that(assertthat::is.flag(write_logfile)) + assertthat::assert_that(assertthat::is.flag(verbose)) # Call all validators if(!ogs6_obj$get_status(print_status = FALSE)){ @@ -74,7 +77,9 @@ run_simulation <- function(ogs6_obj, assertthat::assert_that(!file.exists(ogs6_obj$logfile)) file.create(ogs6_obj$logfile) - cat("\nRunning sim...\n") + if(verbose){ + cat("\nRunning simulation '", ogs6_obj$sim_name, "'\n", sep = "") + } exit_code <- system2(command = ogs6_command_str, args = ogs6_args, diff --git a/R/vtu.R b/R/vtu.R deleted file mode 100644 index 042a125..0000000 --- a/R/vtu.R +++ /dev/null @@ -1,651 +0,0 @@ - -#===== OGS6_pvd ===== - - -#'OGS6_pvd -#'@description Constructor for the OGS6_pvd base class -#'@export -OGS6_pvd <- R6::R6Class( - "OGS6_pvd", - public = list( - - #'@description - #'Creates new OGS6_pvd object - #'@param pvd_path string: Path to .pvd file - initialize = function(pvd_path) { - - xml_doc <- validate_read_in_xml(pvd_path) - dataset_nodes <- xml2::xml_find_all(xml_doc, - "/VTKFile/Collection/DataSet") - - private$.pvd_path <- pvd_path - private$.datasets <- lapply(dataset_nodes, - node_to_object) - private$.OGS6_vtus <- lapply(self$abs_vtu_paths, - OGS6_vtu$new) - - }, - - #'@description - #'Returns .vtu path for specified timestep - #'@param timestep string: Timestep - vtu_by_timestep = function(timestep){ - - assertthat::assert_that(assertthat::is.number(timestep)) - - for(i in seq_len(length(self$timesteps))){ - if(self$timesteps[[i]] == timestep){ - return(self$vtu_paths[[i]]) - } - } - - warning(paste("No .vtu path found for timestep", timestep), - call. = FALSE) - }, - - #'@description - #'Returns timestep for specified .vtu path - #'@param vtu_path string: .vtu path - timestep_by_vtu = function(vtu_path){ - - assertthat::assert_that(assertthat::is.string(vtu_path)) - - for(i in seq_len(length(self$vtu_paths))){ - if(self$vtu_paths[[i]] == vtu_path){ - return(self$timesteps[[i]]) - } - } - - warning(paste("No timestep found for .vtu path", vtu_path), - call. = FALSE) - }, - - #'@description - #'Returns a tibble containing point data - #'@param coordinates list(numeric): List of coordinates (a coordinate - #' is a numeric vector of length 3) - #'@param Names character: Optional: `Name` attributes of `DataArray` - #' elements. Defaults to all. - #'@param start_at_timestep number: Optional: Timestep to start at. - #' Defaults to first timestep. - #'@param end_at_timestep number: Optional: Timestep to end at. Defaults - #' to last timestep. - get_point_data_at = function(coordinates, - Names, - start_at_timestep, - end_at_timestep){ - - coordinates <- validate_coordinates(coordinates) - - if(missing(Names)){ - Names <- as.character(self$point_data$keys()) - } - - assertthat::assert_that(is.character(Names)) - - # Use point locator to get data - point_ids <- lapply(coordinates, function(x){ - self$OGS6_vtus[[1]]$vtkPointLocator$FindClosestPoint(x) - }) - - return(self$get_point_data(point_ids = as.numeric(point_ids), - Names = Names, - start_at_timestep = start_at_timestep, - end_at_timestep = end_at_timestep)) - }, - - #'@description - #'Returns a tibble containing point data - #'@param point_ids numeric: Optional: Point IDs. Defaults to all. - #'@param Names character: Optional: `Name` attributes of `DataArray` - #' elements. Defaults to all. - #'@param start_at_timestep number: Optional: Timestep to start at. - #' Defaults to first timestep. - #'@param end_at_timestep number: Optional: Timestep to end at. Defaults - #' to last timestep. - get_point_data = function(point_ids, - Names, - start_at_timestep, - end_at_timestep){ - - if(missing(point_ids)){ - max_id <- self$OGS6_vtus[[1]]$number_of_points - 1 - point_ids <- seq(0, max_id) - } - - if(missing(Names)){ - Names <- as.character(self$OGS6_vtus[[1]]$point_data$keys()) - } - - private$get_data( - data_type = "points", - ids = point_ids, - Names = Names, - start_at_timestep = start_at_timestep, - end_at_timestep = end_at_timestep - ) - }, - - #'@description - #'Returns a tibble containing cell data - #'@param cell_ids numeric: Optional: Cell IDs. Defaults to all. - #'@param Names character: Optional: `Name` attributes of `DataArray` - #' elements. Defaults to all. - #'@param start_at_timestep number: Optional: Timestep to start at. - #' Defaults to first timestep. - #'@param end_at_timestep number: Optional: Timestep to end at. Defaults - #' to last timestep. - get_cell_data = function(cell_ids, - Names, - start_at_timestep, - end_at_timestep){ - - if(missing(cell_ids)){ - max_id <- self$OGS6_vtus[[1]]$number_of_cells - 1 - cell_ids <- seq(0, max_id) - } - - if(missing(Names)){ - Names <- as.character(self$OGS6_vtus[[1]]$cell_data$keys()) - } - - private$get_data( - data_type = "cells", - ids = cell_ids, - Names = Names, - start_at_timestep = start_at_timestep, - end_at_timestep = end_at_timestep - ) - } - ), - - active = list( - - #'@field pvd_path - #'Getter for private parameter '.pvd_path' - pvd_path = function() { - private$.pvd_path - }, - - #'@field datasets - #'Getter for private parameter '.datasets' - datasets = function() { - private$.datasets - }, - - #'@field vtu_paths - #'Getter for `datasets` `file` - vtu_paths = function() { - - vtu_paths <- lapply(private$.datasets, function(x){ - x[["file"]] - }) - }, - - #'@field abs_vtu_paths - #'Gets absolute .vtu paths, e.g. `dirname(pvd_path)` + `datasets` `file` - abs_vtu_paths = function() { - - abs_vtu_paths <- lapply(self$vtu_paths, function(x){ - abs_vtu_path <- paste0(dirname(self$pvd_path), "/", x) - return(invisible(abs_vtu_path)) - }) - }, - - #'@field timesteps - #'Gets timesteps from private parameter '.datasets' - timesteps = function() { - - timesteps <- lapply(private$.datasets, function(x){ - as.double(x[["timestep"]]) - }) - }, - - #'@field OGS6_vtus - #'Getter for private parameter '.OGS6_vtus' - OGS6_vtus = function() { - private$.OGS6_vtus - } - ), - - private = list( - - # Gets sublist from referenced .vtus by timesteps - get_relevant_vtus = function(start_at_timestep, - end_at_timestep){ - - assertthat::assert_that(assertthat::is.number(start_at_timestep)) - assertthat::assert_that(assertthat::is.number(end_at_timestep)) - - relevant_vtus <- list() - - for(i in seq_len(length(self$OGS6_vtus))){ - timestep <- self$timestep_by_vtu(self$vtu_paths[[i]]) - - if(timestep >= start_at_timestep && - timestep <= end_at_timestep){ - relevant_vtus <- c(relevant_vtus, list(self$OGS6_vtus[[i]])) - } - } - - return(relevant_vtus) - }, - - #Returns a dataframe with all of the CellData - get_data = function(data_type, - ids, - Names, - start_at_timestep, - end_at_timestep){ - - assertthat::assert_that(is.numeric(ids)) - assertthat::assert_that(is.character(Names)) - - if(missing(start_at_timestep)){ - start_at_timestep <- self$timesteps[[1]] - } - - if(missing(end_at_timestep)){ - end_at_timestep <- self$timesteps[[length(self$timesteps)]] - } - - vtus <- private$get_relevant_vtus(start_at_timestep, - end_at_timestep) - - tbl_rows <- list() - - for(i in seq_len(length(ids))){ - - timestep_rows <- lapply(vtus, function(vtu){ - - if (data_type == "points") { - data <- vtu$point_data - coords <- vtu$get_point_coords(ids[[i]]) - extra_columns <- list(x = coords[[1]], - y = coords[[2]], - z = coords[[3]]) - } else{ - data <- vtu$cell_data - extra_columns <- list() - } - - new_row <- list(id = ids[[i]]) - new_row <- c(new_row, extra_columns) - - values <- list() - - for(j in seq_len(length(Names))){ - - rid <- ids[[i]] + 1 - - if(length( - dim(data[[Names[[j]]]])) == 1){ - value <- data[[Names[[j]]]][[rid]] - - }else{ - value <- list(as.numeric( - data[[Names[[j]]]][rid,])) - } - - values <- c(values, list(value)) - names(values)[[length(values)]] <- Names[[j]] - } - - new_row <- c(new_row, values) - - new_row <- - c(new_row, - list(timestep = - self$timestep_by_vtu( - basename(vtu$vtu_path)))) - - new_row <- tibble::as_tibble_row(new_row) - return(new_row) - }) - - tbl_rows <- c(tbl_rows, timestep_rows) - } - - tbl <- dplyr::bind_rows(tbl_rows) - return(tbl) - }, - - .pvd_path = NULL, - .datasets = NULL, - .OGS6_vtus = NULL - ) -) - - -#===== OGS6_vtu ===== - - -#'OGS6_vtu -#'@description Constructor for the OGS6_vtu base class -#'@export -OGS6_vtu <- R6::R6Class( - "OGS6_vtu", - public = list( - - #'@description - #'Creates new OGS6_vtu object - #'@param vtu_path string: Path to .vtu file - initialize = function(vtu_path) { - - vtk_xml_ugr <- vtk$vtkXMLUnstructuredGridReader() - vtk_xml_ugr$SetFileName(vtu_path) - vtk_xml_ugr$Update() - self$vtkUnstructuredGrid <- vtk_xml_ugr$GetOutput() - - point_locator <- vtk$vtkPointLocator() - point_locator$SetDataSet(self$vtkUnstructuredGrid) - point_locator$BuildLocator() - private$.vtkPointLocator <- point_locator - - private$.vtu_path <- vtu_path - }, - - #'@description - #'Gets FieldData. - #'@param Names character: Optional: `Name` attributes of `DataArray` - #' elements, defaults to all in `FieldData` - #'@return list: List of format list(value_a = 1, value_b = 2), where the - #' names reference the `Name` attributes of the `DataArray` elements - get_field_data = function(Names){ - - if(missing(Names)){ - Names <- as.character(self$field_data$keys()) - } - - field_data <- lapply(Names, function(x){ - self$field_data[[x]] - }) - - return(field_data) - }, - - #'@description - #'Gets coordinates of specific points by their IDs. - #'@param point_ids numeric: Point IDs - #'@return If `point_ids` is a number, a coordinate array. If `point_ids` - #' is a numeric vector with length > 1, a list of coordinate arrays. - get_point_coords = function(point_ids){ - assertthat::assert_that(is.numeric(point_ids)) - - if(assertthat::is.number(point_ids)){ - return(self$points[(point_ids + 1),]) - } - - point_coords <- lapply(point_ids, function(x){ - return(self$points[(x + 1),]) - }) - - return(point_coords) - }, - - #'@description - #'Gets PointData at specified coordinates. - #'@param coordinates list(numeric): List of coordinates (a coordinate - #' is a numeric vector of length 3) - #'@param Names character: Optional: `Name` attributes of `DataArray` - #' elements, defaults to all in `PointData` - get_point_data_at = function(coordinates, - Names){ - - coordinates <- validate_coordinates(coordinates) - - if(missing(Names)){ - Names <- as.character(self$point_data$keys()) - } - - assertthat::assert_that(is.character(Names)) - - # Use point locator to get data - point_ids <- lapply(coordinates, function(x){ - self$vtkPointLocator$FindClosestPoint(x) - }) - - return(self$get_point_data(point_ids = as.numeric(point_ids), - Names = Names)) - }, - - #'@description - #'Gets PointData for points with IDs in `point_ids`. - #'@param point_ids numeric: Optional: Point IDs, defaults to all - #'@param Names character: Optional: `Name` attributes of `DataArray` - #' elements, defaults to all in `PointData` - #'@return tibble: Tibble where each row represents a point. - get_point_data = function(point_ids, - Names){ - - if(missing(point_ids)){ - max_point_id <- self$number_of_points() - 1 - point_ids <- seq(0, max_point_id) - } - - if(missing(Names)){ - Names <- as.character(self$point_data$keys()) - } - - private$get_data(data_type = "points", - ids = point_ids, - Names = Names) - - }, - - #'@description - #'Gets CellData for cells with IDs in `cell_ids`. - #'@param cell_ids numeric: Optional: Cell IDs, defaults to all - #'@param Names character: Optional: `Name` attributes of `DataArray` - #' elements, defaults to all in `CellData` - #'@return tibble: Tibble where each row represents a cell. - get_cell_data = function(cell_ids, - Names){ - - if(missing(cell_ids)){ - max_cell_id <- self$number_of_cells() - 1 - cell_ids <- seq(0, max_cell_id) - } - - if(missing(Names)){ - Names <- as.character(self$cell_data$keys()) - } - - private$get_data(data_type = "cells", - ids = cell_ids, - Names = Names) - - } - ), - - active = list( - - #'@field vtu_path - #'Getter for private parameter '.vtu_path' - vtu_path = function() { - private$.vtu_path - }, - - #'@field number_of_points - #'Getter for NumberOfPoints parameter of '.vtkUnstructuredGrid' - number_of_points = function() { - self$vtkUnstructuredGrid$GetNumberOfPoints() - }, - - #'@field number_of_cells - #'Getter for NumberOfCells parameter of '.vtkUnstructuredGrid' - number_of_cells = function() { - self$vtkUnstructuredGrid$GetNumberOfCells() - }, - - #'@field points - #'Getter for Points parameter of '.dsa_vtkUnstructuredGrid' - points = function() { - self$dsa_vtkUnstructuredGrid$Points - }, - - #'@field cells - #'Getter for Cells parameter of '.dsa_vtkUnstructuredGrid' - cells = function() { - self$dsa_vtkUnstructuredGrid$Cells - }, - - #'@field field_data - #'Getter for FieldData parameter of '.dsa_vtkUnstructuredGrid' - field_data = function() { - self$dsa_vtkUnstructuredGrid$FieldData - }, - - #'@field point_data - #'Getter for PointData parameter of '.dsa_vtkUnstructuredGrid' - point_data = function() { - self$dsa_vtkUnstructuredGrid$PointData - }, - - #'@field cell_data - #'Getter for CellData parameter of '.dsa_vtkUnstructuredGrid' - cell_data = function() { - self$dsa_vtkUnstructuredGrid$CellData - }, - - #'@field vtkPointLocator - #'Getter for private parameter '.vtkPointLocator' - vtkPointLocator = function() { - private$.vtkPointLocator - }, - - #'@field vtkUnstructuredGrid - #'Access to private parameter '.vtkUnstructuredGrid' - vtkUnstructuredGrid = function(value) { - if(missing(value)) { - private$.vtkUnstructuredGrid - }else{ - # Check class - private$.vtkUnstructuredGrid <- value - private$.dsa_vtkUnstructuredGrid <- - vtk$numpy_interface$dataset_adapter$WrapDataObject(value) - } - }, - - #'@field dsa_vtkUnstructuredGrid - #'Getter for private parameter '.dsa_vtkUnstructuredGrid' - dsa_vtkUnstructuredGrid = function() { - private$.dsa_vtkUnstructuredGrid - } - ), - - private = list( - - get_data = function(data_type, - ids, - Names){ - - assertthat::assert_that(is.numeric(ids)) - assertthat::assert_that(is.character(Names)) - - tbl_rows <- list() - - for (i in seq_len(length(ids))) { - if (data_type == "points") { - data <- self$point_data - coords <- self$get_point_coords(ids[[i]]) - extra_columns <- list(x = coords[[1]], - y = coords[[2]], - z = coords[[3]]) - } else{ - data <- self$cell_data - extra_columns <- list() - } - - new_row <- list(id = ids[[i]]) - new_row <- c(new_row, extra_columns) - - - values <- list() - - for (j in seq_len(length(Names))) { - rid <- ids[[i]] + 1 - - if (length(dim(data[[Names[[j]]]])) == 1) { - value <- data[[Names[[j]]]][[rid]] - - } else{ - value <- list(as.numeric(data[[Names[[j]]]][rid,])) - } - - values <- c(values, list(value)) - names(values)[[length(values)]] <- - Names[[j]] - } - - new_row <- c(new_row, - values) - - new_row <- tibble::as_tibble_row(new_row) - tbl_rows <- c(tbl_rows, list(new_row)) - } - - tbl <- dplyr::bind_rows(tbl_rows) - return(tbl) - }, - - .vtu_path = NULL, - .vtkPointLocator = NULL, - .vtkUnstructuredGrid = NULL, - .dsa_vtkUnstructuredGrid = NULL - ) -) - - -#===== validate_coordinates ===== - - -validate_coordinates <- function(coordinates){ - - if(!is.list(coordinates)){ - coordinates <- list(coordinates) - } - - # Check if contents of list are coordinates - lapply(coordinates, function(x){ - assertthat::assert_that(is.numeric(x)) - assertthat::assert_that(length(x) == 3) - }) - - return(coordinates) -} - - -#===== generate_structured_mesh ===== - - -#'generate_structured_mesh -#'@description Wrapper function to call generateStructuredMesh.exe -#' (VTK mesh generator). For full documentation see -#'https://www.opengeosys.org/docs/tools/meshing/structured-mesh-generation/ -#'@param args_str string: The arguments the script will be called with -#'@param ogs_bin_path string: Optional: Path to OpenGeoSys6 bin folder. -#' Defaults to options("r2ogs6.default_ogs_bin_path"). -#'@return string: .vtu file path -#'@export -generate_structured_mesh = function(args_str, - ogs_bin_path) { - - if(missing(ogs_bin_path)){ - ogs_bin_path <- unlist(options("r2ogs6.default_ogs_bin_path")) - } - - assertthat::assert_that(assertthat::is.string(ogs_bin_path)) - assertthat::assert_that(assertthat::is.string(args_str)) - - - # Get .vtu path from args_str - vtu_path <- stringr::str_extract(args_str, "-o [^ ]*") - vtu_path <- stringr::str_remove(vtu_path, "-o ") - - system(command = paste0(ogs_bin_path, - "generateStructuredMesh.exe ", - args_str)) - - return(invisible(vtu_path)) -} diff --git a/vignettes/ensemble_workflow_vignette.Rmd b/vignettes/ensemble_workflow_vignette.Rmd new file mode 100644 index 0000000..f012fa7 --- /dev/null +++ b/vignettes/ensemble_workflow_vignette.Rmd @@ -0,0 +1,313 @@ +--- +title: "Ensemble Guide" +output: rmarkdown::html_vignette +vignette: > + %\VignetteIndexEntry{Ensemble Guide} + %\VignetteEngine{knitr::rmarkdown} + %\VignetteEncoding{UTF-8} +--- + +```{r, include = FALSE} +knitr::opts_chunk$set( + collapse = TRUE, + comment = "#>" +) + +devtools::load_all(".") +``` + +```{r setup} +library(r2ogs6) +library(dplyr) +``` + +## Introduction +Hi there! This is a practical guide on the `OGS6_Ensemble` class from the `r2ogs6` package. I will show you how you can use this class to set up ensemble runs for OpenGeoSys 6, extract the results and plot them. + +If you want to follow this tutorial, you'll need to have `r2ogs6` installed and loaded. The prerequisites for `r2ogs6` are described in detail in the `r2ogs6 User Guide` vignette. + +Additionally, you'll need the following benchmark files: + +* [Theis' Problem](https://gitlab.opengeosys.org/ogs/ogs/-/tree/master/Tests/Data/Parabolic/LiquidFlow/AxiSymTheis) as described [here](https://www.opengeosys.org/docs/benchmarks/liquid-flow/liquid-flow-theis-problem/) + +* [Theis solution for well pumping](https://gitlab.opengeosys.org/ogs/ogs/-/tree/master/Tests/Data/Parabolic/ComponentTransport/Theis) as described [here](https://www.opengeosys.org/docs/benchmarks/hydro-component/theis/hc_theis/) + +General instructions on how to download OpenGeoSys 6 benchmark files can be found [here](https://www.opengeosys.org/docs/userguide/basics/introduction/#download-benchmarks). + +## Theis' problem + +We will consider the following parameters for our sensitivity analysis: + +* `permeability` + +* `porosity` + +* `storage` + + +### Setup + +First, we create a simulation object to base our ensemble on and read in the `.prj` file. + +```{r} +# Change this to fit your system +testdir_path <- system.file("extdata/test_tempdirs/", package = "r2ogs6") +sim_path <- paste0(testdir_path, "/axisym_theis_sim") + +ogs6_obj <- OGS6$new(sim_name = "axisym_theis", + sim_id = 1, + sim_path = sim_path) + +# Change this to fit your system +prj_path <- system.file("examples/Theis_problem/benchmark_files/", + "axisym_theis.prj", package = "r2ogs6") + +read_in_prj(ogs6_obj, prj_path) +``` + +We already know *which* parameters we want to change between the different simulations. But before we can create an ensemble, we need to know what their respective *values* should be. Say we don't want to hardcode each value, but instead examine the effects of changing our parameters by 1%, 10% and 50%. Below I've supplied a little function that simply takes a value and creates a tibble based on the given `deviations`. + +```{r} +# This returns a df so we can keep the data together for plotting +get_values <- function(value, + deviations = c(-0.5, -0.1, -0.01, 0, 0.01, 0.1, 0.5), + zero_to_one = FALSE){ + + values_and_devs <- tibble::tibble(value = numeric(), + dev = numeric()) + + for(i in seq_len(length(deviations))){ + if(zero_to_one && + !dplyr::between((value + (value * deviations[[i]])), 0, 1)){ + next + } + + values_and_devs <- + add_row(values_and_devs, + value = value + (value * deviations[[i]]), + dev = deviations[[i]]) + } + + return(invisible(values_and_devs)) +} + +# demo +paste(get_values(100)$value, collapse = " ") +``` +We can now use this function for a minimal working example. Let's set up an ensemble with only the `storage` parameter. First we create the corresponding vector... + +```{r} +# Create the tibble based on the original value +storage <- get_values(ogs6_obj$media[[1]]$properties[[4]]$value) + +paste(storage$value, collapse = " ") +``` +... then we pass the original reference as well as the vector to an ensemble object. + +```{r} +# First we define an ensemble object +ogs6_ens <- + OGS6_Ensemble$new( + ogs6_obj = ogs6_obj, + parameters = list(list(ogs6_obj$media[[1]]$properties[[4]]$value, + storage$value))) + +# Now we run the simulations +exit_codes <- ogs6_ens$run_simulation() +paste(exit_codes, collapse = " ") +``` + + +### Plot results +#### Single simulation + +Our simulations (hopefully) ran successfully - great! Now it'd be nice to see some results. Say we're interested in the `pressure` data of the first simulation. + +```{r} +# This will get a list of tibbles - one for each simulation. +storage_tbls <- + lapply(ogs6_ens$ensemble, function(x){ + x$pvds[[1]]$get_point_data(point_ids = c(0, 1, 2), + Names = c("pressure")) + }) +``` + +You may leave out the `point_ids` parameter. It defaults to all points in the dataset - I specify it here because there's about 500 which would slow down building this vignette significantly. + +```{r} +# Let's look at the first 10 rows of the dataset for the first simulation. +head(storage_tbls[[1]], 10) +``` + +You can see there's one row per timestep for each point ID. Say we want to plot the *average* pressure for each `timestep`, that is, the mean of the `pressure` values where the rows have the same `timestep`. + + +```{r} +# Get average pressure from first dataframe +avg_pr_df <- storage_tbls[[1]] %>% + group_by(timestep) %>% + summarise(avg_pressure = mean(pressure)) +avg_pr_df +``` + + +```{r} +# Plot pressure over time for first simulation +ggplot(data = avg_pr_df) + + geom_point(mapping = aes(x = as.numeric(row.names(avg_pr_df)), + y = avg_pressure)) + xlab("Timestep") + + +``` +#### Multiple simulations + +```{r} +# Get list of dataframes that each look like avg_pr_df +avg_pr_dfs <- list() + +for(i in seq_len(length(storage_tbls))){ + old_df <- storage_tbls[[i]] + new_df <- old_df %>% + group_by(timestep) %>% + summarise(avg_pressure = mean(pressure)) + + # Add column so we can separate the values based on their old df + new_df$df_id <- rep(i, nrow(new_df)) + + # Add column so we can plot based on old df rownames + new_df$orig_rwns <- as.numeric(row.names(new_df)) + + avg_pr_dfs <- c(avg_pr_dfs, list(new_df)) +} + +# Combine dataframes into single dataframe for plot +avg_pr_combined_df <- bind_rows(avg_pr_dfs) + +# Make plot +ggplot(avg_pr_combined_df, + aes(x = orig_rwns, + y = avg_pressure)) + + geom_point(aes(color = as.factor(df_id))) + + geom_line(aes(color = as.factor(df_id))) + + xlab("Timestep") + + labs(color = "sim id") + + facet_grid(rows = vars(df_id)) + +``` + +Now we have the average `pressure` over time for each of the simulations (rows). Since they're pretty similar, let's put them in the same plot for a better comparison. + +```{r} +ggplot(avg_pr_combined_df, aes(x = orig_rwns, + y = avg_pressure, + group = df_id)) + + geom_point(aes(color = as.factor(df_id))) + + geom_line(aes(color = as.factor(df_id))) + + labs(color = "sim id") + + xlab("Timestep") +``` + + +#### Matrix plot + +So far we've only considered the `storage` parameter. Now we want to have a look at how the other two parameters influence the simulation. We will use our `get_values` function again and keep the default deviations. + +```{r} +permeability <- get_values(ogs6_obj$media[[1]]$properties[[1]]$value) + +# porosity will have length 4 because its value range is between 0 and 1 +porosity <- get_values(ogs6_obj$media[[1]]$properties[[3]]$value, + zero_to_one = TRUE) +``` + +Now we'll put them into an ensemble together. This time it's important we set the `OGS6_Ensemble` parameter `sequential_mode` to `TRUE` as this will change the supplied parameters *sequentially* which means in the end we have 19 (7 + 7 + 4) simulations which is equal to the sum of elements in the value vectors you supply (I've named them `values` below for clarity, naming them is optional though). + +The default `FALSE` would give an error message because our value vectors do not have the same length and even if they had, it wouldn't do what we want - the number of simulations would equal the length of *one* value vector (thus requiring them to be of the same length). Generally, set `sequential_mode` to `TRUE` if you want to examine the influence of parameters on a simulation *independently*. If you want to examine how the parameters influence *each other* as in wanting to test parameter combinations, the default mode is the way to go. + +```{r} +# Change this to fit your system +testdir_path <- system.file("extdata/test_tempdirs/", package = "r2ogs6") +sim_path <- paste0(testdir_path, "/axisym_theis_sim_big") + +ogs6_obj$sim_path <- sim_path + +ogs6_ens_big <- + OGS6_Ensemble$new( + ogs6_obj = ogs6_obj, + parameters = list(per = list(ogs6_obj$media[[1]]$properties[[1]]$value, + values = permeability$value), + por = list(ogs6_obj$media[[1]]$properties[[3]]$value, + values = porosity$value), + sto = list(ogs6_obj$media[[1]]$properties[[4]]$value, + values = storage$value)), + sequential_mode = TRUE) + +exit_codes <- ogs6_ens_big$run_simulation() +paste(exit_codes) +``` + +This will take a short time. As soon as the simulations are done, we can extract the point data much like we did before. This time we want to plot the point x coodinates on the x axis so we're leaving out `point_ids` to get all points. Also we just want the data from the last timestep. + +```{r} +# This will get a list of tibbles - one for each simulation. +per_por_sto_tbls <- + lapply(ogs6_ens_big$ensemble, function(x){ + x$pvds[[1]]$get_point_data(Names = c("pressure"), + start_at_timestep = x$pvds[[1]]$last_timestep) + }) +``` + +Plotting time? Not quite. We can't really differentiate what values were used where yet. Since we set `sequential_mode` to `TRUE`, `OGS_Ensemble` allows us to utilize `relevant_parameter_at()` to fix this. It looks up which value vector the parameter we changed for each simulation came from. We will also put in an extra column `dev` so we can group by deviations. + +```{r} +# Combine deviation columns +deviations_comb <- c(permeability$dev, porosity$dev, storage$dev) + +# Get list of dataframes +x_pr_dfs <- list() + +for(i in seq_len(length(per_por_sto_tbls))){ + new_df <- per_por_sto_tbls[[i]] + + # Add name and deviation column + new_df$name <- ogs6_ens_big$relevant_parameter_at(i) + new_df$dev <- deviations_comb[[i]] + + x_pr_dfs <- c(x_pr_dfs, list(new_df)) +} + +# Combine dataframes into single dataframe +x_pr_combined_df <- bind_rows(x_pr_dfs) +``` + +*Now* it's plotting time. We group our variables corresponding to the deviations and then use a facet grid with the `name` parameter we added to the dataframes. + +```{r} +# Make plot +ggplot(x_pr_combined_df, + aes(x = x, + y = pressure, + group = dev)) + + geom_point(aes(color = as.factor(dev))) + + xlab("x point coordinate") + + labs(color = "%") + + facet_grid(cols = vars(name)) +``` +Ta-Daa! All nicely grouped. We can see `permeability` has the most influence on the pressure. Though they may seem suspicious, `porosity` and `storage` are being plotted correctly - the points are just being placed right on top of each other. Since `porosity` can't go over the value `1` which was the original value, our value vector only went from -50% to 0% which is why the line colors of `porosity` and `storage` differ. + + +## Theis solution for well pumping + +We will consider the following parameters for our sensitivity analysis: + +* `permeability` + +* `porosity` + +* `slope` + + +setup + +plot single + diff --git a/vignettes/theis_vignette.Rmd b/vignettes/theis_vignette.Rmd deleted file mode 100644 index d5a6e4d..0000000 --- a/vignettes/theis_vignette.Rmd +++ /dev/null @@ -1,301 +0,0 @@ ---- -title: "Ensemble Guide" -output: rmarkdown::html_vignette -vignette: > - %\VignetteIndexEntry{Ensemble Guide} - %\VignetteEngine{knitr::rmarkdown} - %\VignetteEncoding{UTF-8} ---- - -```{r, include = FALSE} -knitr::opts_chunk$set( - collapse = TRUE, - comment = "#>" -) -``` - -```{r setup} -library(r2ogs6) -library(tidyverse) -``` - -## Hi there! -hi this is a guide on the theis problem. - -Instructions on how to download benchmark files can be found [here](https://www.opengeosys.org/docs/userguide/basics/introduction/#download-benchmarks). - -I will - - - -## Theis' problem - - -This problem is described in detail [here](https://www.opengeosys.org/docs/benchmarks/liquid-flow/liquid-flow-theis-problem/). At the top of the page, there is a link which will take you to the benchmark files you need. - -We will consider the following parameters for our sensitivity analysis: - -* `permeability` - -* `porosity` - -* `storage` - - -### Setup - -First, we create a simulation object to base our ensemble on and read in the `.prj` file. - -```{r} -# Change this to fit your system -testdir_path <- system.file("extdata/test_tempdirs/", package = "r2ogs6") -sim_path <- paste0(testdir_path, "/axisym_theis_sim") - -ogs6_obj <- OGS6$new(sim_name = "axisym_theis", - sim_id = 1, - sim_path = sim_path) - -# Change this to fit your system -prj_path <- system.file("examples/Theis_problem/benchmark_files/", - "axisym_theis.prj", package = "r2ogs6") - -read_in_prj(ogs6_obj, prj_path) -``` - -We already know *which* parameters we want to change between the different simulations. But before we can create an ensemble, we need to know what their respective *values* should be. Say we don't want to hardcode each value, but instead examine the effects of changing our parameters by 1%, 10% and 50%. Below I've supplied a little function that simply takes a value and creates a numeric vector based on the given `deviations`. - -```{r} -get_values <- function(value, - deviations = c(-0.5, -0.1, -0.01, 0, 0.01, 0.1, 0.5), - zero_to_one = FALSE){ - - values <- lapply(deviations, function(x){ - value + (value * x) - }) - - if(zero_to_one){ - values <- values[dplyr::between(values, 0, 1)] - } - - return(invisible(values)) -} - -# demo -paste(get_values(100), collapse = " ") -``` -We can now use this function for a minimal working example. Let's set up an ensemble with only the `storage` parameter. First we create the corresponding vector... - -```{r} -# Create the vector based on the original value -storage <- get_values(ogs6_obj$media[[1]]$properties[[4]]$value) -paste(storage, collapse = " ") -``` -... then we pass the original reference as well as the vector to an ensemble object. - -```{r} -# First we define an ensemble object -ogs6_ens <- - OGS6_Ensemble$new( - ogs6_obj = ogs6_obj, - parameters = list(list(ogs6_obj$media[[1]]$properties[[4]]$value, storage)), - use_original_values = FALSE) - -# Now we run the simulations -ogs6_ens$run_simulation() -``` - - -### Plot results -#### Single simulation - -Our simulations (hopefully) ran successfully - great! Now it'd be nice to see some results. For now, we're interested in the `pressure` and `v`elocity data of the first simulation. - -```{r} -# This will get a list of tibbles - one for each simulation. -storage_tbls <- - lapply(ogs6_ens$ensemble, function(x){ - x$pvds[[1]]$get_point_data(point_ids = c(0, 1, 2), - Names = c("pressure", "v")) - }) -``` - -You may leave out the `point_ids` parameter. It defaults to all points in the dataset - I specify it here because there's about 500 which would slow down building this vignette significantly. - -```{r} -# Let's look at the first 10 rows of the dataset for the first simulation. -head(storage_tbls[[1]], 10) -``` - -You can see there's one row per timestep for each point ID. Say we want to plot the *average* pressure for each `timestep`, that is, the mean of the `pressure` values where the rows have the same `timestep`. - - -```{r} -# Get average pressure from first dataframe -avg_pr_df <- storage_tbls[[1]] %>% - group_by(timestep) %>% - summarise(avg_pressure = mean(pressure)) -avg_pr_df -``` - - -```{r} -# Plot pressure over time for first simulation -ggplot(data = avg_pr_df) + - geom_point(mapping = aes(x = as.numeric(row.names(avg_pr_df)), - y = avg_pressure)) + xlab("Timestep") - - -``` -#### Multiple simulations - -```{r} -# Get list of dataframes that each look like avg_pr_df -avg_pr_dfs <- list() - -for(i in seq_len(length(storage_tbls))){ - old_df <- storage_tbls[[i]] - new_df <- old_df %>% - group_by(timestep) %>% - summarise(avg_pressure = mean(pressure)) - - # Add column so we can separate the values based on their old df - new_df$df_id <- rep(i, nrow(new_df)) - - # Add column so we can plot based on old df rownames - new_df$orig_rwns <- as.numeric(row.names(new_df)) - avg_pr_dfs <- c(avg_pr_dfs, list(new_df)) -} - -# Combine dataframes into single dataframe for plot -avg_pr_combined_df <- bind_rows(avg_pr_dfs) - -# Make plot -ggplot(avg_pr_combined_df, - aes(x = orig_rwns, - y = avg_pressure)) + - geom_point(aes(color = df_id)) + - geom_line(aes(color = df_id)) + - xlab("Timestep") + - facet_grid(rows = vars(df_id)) - -``` - -Now we have the average `pressure` over time for each of the simulations (rows). Since they're pretty similar, let's put them in the same plot for a better comparison. - -```{r} -ggplot(avg_pr_combined_df, aes(x = orig_rwns, - y = avg_pressure, - group = df_id)) + - geom_point(aes(color = df_id)) + - geom_line(aes(color = df_id)) + - xlab("Timestep") -``` - - -#### Matrix plot - -So far we've only considered the `storage` parameter. Now we want to have a look at how the other two parameters influence the simulation and each other. Our `storage` vector had 7 values. Assuming the same length for `permeability` and `porosity` would lead us to a whopping 343 combinations, so we will use our `get_values` function to create smaller vectors. Say we choose a length of 4 with deviations of -1%, -10% and -50% as well as the original value. - -```{r} - -new_deviations <- c(-0.5, -0.1, -0.01, 0) - -permeability <- get_values(ogs6_obj$media[[1]]$properties[[1]]$value, - new_deviations) -porosity <- get_values(ogs6_obj$media[[1]]$properties[[3]]$value, - new_deviations) -storage <- get_values(ogs6_obj$media[[1]]$properties[[4]]$value, - new_deviations) - -# Total length of each vector we want to pass to the ensemble -vec_len <- length(permeability) * length(porosity) * length(storage) - -# Replicate elements based on vec_len -per_long <- rep(permeability, (vec_len / length(permeability))) -por_long <- rep(porosity, (vec_len / length(porosity))) -sto_long <- rep(storage, (vec_len / length(storage))) -``` - -Now we take these long vectors and feed them to a new ensemble. We can base this on the same `OGS6` object as before since our project file hasn't changed. - -```{r} -ogs6_ens_big <- - OGS6_Ensemble$new( - ogs6_obj = ogs6_obj, - parameters = list(list(ogs6_obj$media[[1]]$properties[[1]]$value, per_long), - list(ogs6_obj$media[[1]]$properties[[3]]$value, por_long), - list(ogs6_obj$media[[1]]$properties[[4]]$value, sto_long)), - use_original_values = FALSE) - -#ogs6_ens_big$run_simulation() -``` - -This will take a while on most systems since we're creating 64 objects and running the same number of simulations. Go grab a coffee (inclusive-)or two. As soon as the simulations are done, we can extract the point data just as we did before. - -```{r} -# This will get a list of tibbles - one for each simulation. -per_por_sto_tbls <- - lapply(ogs6_ens$ensemble, function(x){ - x$pvds[[1]]$get_point_data(point_ids = c(0, 1, 2), - Names = c("pressure", "v")) - }) -``` - -Plotting time! Well, not quite. This time we have to think a bit more about how we plot. There's 3 parameters now and we replicated their original vectors to get our combinations so our ensemble is all over the place. We'll start by putting the data from the tibbles we extracted into a combined dataframe first like we did in the `storage`-only run. - -```{r} -# Get list of dataframes -avg_pr_dfs <- list() - -for(i in seq_len(length(per_por_sto_tbls))){ - old_df <- per_por_sto_tbls[[i]] - new_df <- old_df %>% - group_by(timestep) %>% - summarise(avg_pressure = mean(pressure)) - - # Add column so we can separate the values based on their old df - new_df$df_id <- rep(i, nrow(new_df)) - - # Add column so we can plot based on old df rownames - new_df$orig_rwns <- as.numeric(row.names(new_df)) - avg_pr_dfs <- c(avg_pr_dfs, list(new_df)) -} - -# Combine dataframes into single dataframe for plot -avg_pr_combined_df <- bind_rows(avg_pr_dfs) - -``` - - - -```{r} -# # Make plot -# ggplot(avg_pr_combined_df, -# aes(x = orig_rwns, -# y = avg_pressure)) + -# geom_point(aes(color = df_id)) + -# geom_line(aes(color = df_id)) + -# xlab("Timestep") + -# facet_grid(rows = vars(df_id)) -``` - - -## Theis solution for well pumping - -.... - -This problem is described in detail [here](https://www.opengeosys.org/docs/benchmarks/hydro-component/theis/hc_theis/). At the top of the page, there is a link which will take you to the benchmark files you need. - -We will consider the following parameters for our sensitivity analysis: - -* `permeability` - -* `porosity` - -* `slope` - - -setup - -plot single - -- GitLab