Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Patch code coverage #51

Merged
merged 16 commits into from
Jun 25, 2024
7 changes: 4 additions & 3 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Package: phenor
Title: An R phenology Modelling Framework
Version: 1.3.2
Version: 1.4
Authors@R: c(person(
family = "Hufkens",
given = "Koen",
Expand All @@ -22,7 +22,6 @@ Imports:
stats,
GenSA,
BayesianTools,
rgenoud,
httr,
rvest,
jsonlite,
Expand All @@ -39,12 +38,14 @@ Imports:
Suggests:
knitr,
covr,
rmarkdown,
testthat
Remotes:
github::florianhartig/BayesianTools/BayesianTools
License: AGPL-3
LazyData: true
ByteCompile: true
RoxygenNote: 7.2.3
RoxygenNote: 7.3.1
URL: https://github.com/bluegreen-labs/phenor
BugReports: https://github.com/bluegreen-labs/phenor/issues
VignetteBuilder: knitr
3 changes: 1 addition & 2 deletions NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@ export(pr_fm_be)
export(pr_fm_cmip)
export(pr_fm_csv)
export(pr_fm_daymet_tiles)
export(pr_fm_eobs)
export(pr_fm_era5)
export(pr_fm_gee)
export(pr_fm_modis)
Expand All @@ -66,7 +65,7 @@ export(pr_plot_arrows)
export(pr_plot_comparison)
export(pr_predict)
export(rmse)
export(rotate_cmip5)
export(rotate_cmip)
export(shape_model_output)
export(triangular_temperature_response)
import(graphics)
Expand Down
9 changes: 9 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,12 @@
# version 1.4

* CRAN release
* removed rgenoud support
* removed E-OBS support
* fix VP issue
* increase code coverage
* remaining {terra} and {sf} conversions

# version 1.3.2

* provisional CMIP6 support
Expand Down
2 changes: 1 addition & 1 deletion R/cost_functions.r
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ cvmae <- function(
#'
#' The function is aimed to be maximized, to use it with optimizers which
#' minimize cost functions wrap the function as such:
#' `cost = function(...){abs(likelihood(...))}`
#' `cost = function(...)\{abs(likelihood(...))\}`
#'
#' @param par a vector of parameter values, this is functions specific
#' @param data nested data structure with validation data as returned
Expand Down
6 changes: 3 additions & 3 deletions R/helper_functions.R
Original file line number Diff line number Diff line change
Expand Up @@ -38,21 +38,21 @@ AICc <- function(measured, predicted, k){
"AICc" = AICc))
}

#' Rotate CMIP5 NetCDF data cubes
#' Rotate CMIP NetCDF data cubes
#'
#' Rotate NetCDF files faster than the default
#' raster rotate() command, as data is cropped
#' before any translations (reducing memory load)
#'
#' @param r CMIP5 raster layer, stack or brick
#' @param r CMIP raster layer, stack or brick
#' @param extent vector with coordinates defining the region of interest defined
#' as xmin, xmax, ymin, ymax in lat/lon (default = c(-74,-65, 40, 48))
#' @return Returns raster data in the same format (layer, stack, brick) as
#' the input, but rotated from a climatological longitude (0 - 360) format to
#' a normal longitude format (-180 to 180).
#' @export

rotate_cmip5 <- function(
rotate_cmip <- function(
r = NULL,
extent = c(-74, -65, 40, 48)
){
Expand Down
7 changes: 0 additions & 7 deletions R/phenor.r

This file was deleted.

4 changes: 2 additions & 2 deletions R/pr_dl_be.R
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ pr_dl_be <- function(
){

# set server
server = "http://berkeleyearth.lbl.gov/auto/Global/Gridded"
server <- "https://berkeley-earth-temperature.s3.us-west-1.amazonaws.com/Global/Gridded"

# set the decadal splits
split = seq(1880,as.numeric(format(Sys.Date(),"%Y")),10)
split <- seq(1880,as.numeric(format(Sys.Date(),"%Y")),10)

# download 2 files if on split decade
if (year %in% split){
Expand Down
2 changes: 1 addition & 1 deletion R/pr_dl_cmip.R
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
#' (default = c("daily_maximum_near_surface_air_temperature",
#' "daily_minimum_near_surface_air_temperature",
#' "precipitation"))
#' @param time_out time out in seconds before the {ecmwfr} download returns
#' @param time_out time out in seconds before the \{ecmwfr\} download returns
#' to the prompt retaining the request running on the CDS server.
#' Data can be retrieved later from the CDS webpage once finished.
#' By default the time-out is set to an 3600 seconds (1h). It is advised to
Expand Down
2 changes: 1 addition & 1 deletion R/pr_dl_era5.R
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
#' data for the year of interest and the preceding year to make predictions.
#' When specifying one year the preceding year will be appended, when
#' specifying multiple years no checks are in place!!
#' @param time_out time out in seconds before the {ecmwfr} download returns
#' @param time_out time out in seconds before the \{ecmwfr\} download returns
#' to the prompt retaining the request running on the CDS server.
#' Data can be retrieved later from the CDS webpage once finished.
#' By default the time-out is set to an 3600 seconds (1h). It is advised to
Expand Down
16 changes: 9 additions & 7 deletions R/pr_dl_npn.R
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,10 @@ pr_dl_npn <- function(
request = "phenor"

# set url base
url = "http://www.usanpn.org/npn_portal/observations/getSummarizedData.json?"
url <- "http://www.usanpn.org/npn_portal/observations/getSummarizedData.json?"

# formulate the query
query = list(species_id = species,
query <- list(species_id = species,
phenophase_id = phenophase,
start_date = start,
end_date = end,
Expand All @@ -50,12 +50,14 @@ pr_dl_npn <- function(
request_src = request)

# download data using httr
data = httr::GET(url,
query = query,
httr::progress())
data <- httr::GET(
url,
query = query,
httr::progress()
)

# convert data to a clean data frame
data = as.data.frame(jsonlite::fromJSON(httr::content(data, as = "text")))
data <- as.data.frame(jsonlite::fromJSON(httr::content(data, as = "text")))

# check if the data is valid (contains data)
# if so return the data frame or write to disk for later processing
Expand All @@ -67,7 +69,7 @@ pr_dl_npn <- function(

# sprintf doesn't deal well with NULL
if(is.null(phenophase)){
phenophase = "NA"
phenophase <- "NA"
}

# write data to file as R data file
Expand Down
2 changes: 1 addition & 1 deletion R/pr_fit_comparison.R
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ pr_fit_comparison <- function(
control = control
))

if(inherits("try-error", par)) {
if(inherits(par, "try-error")) {
return(NULL)
}

Expand Down
23 changes: 0 additions & 23 deletions R/pr_fit_parameters.R
Original file line number Diff line number Diff line change
Expand Up @@ -77,29 +77,6 @@ pr_fit_parameters <- function(
)
}

if (tolower(method) == "genoud"){
# optimize model parameters using the
# GENetic Optimization Using Derivatives
# needs more tweaking to work out of the box
# on most models

# stop if no starting parameters are provided
if (is.null(par)){
stop('The genoud algorithm needs defined strating parameters!')
}
optim_par <- rgenoud::genoud(
fn = cost,
nvars = length(par),
max.generations = control$maxit,
Domains = cbind(lower,upper),
boundary.enforcement = 2,
data.type.int = FALSE,
data = data,
model = model,
...
)
}

# BayesianTools
if (tolower(method) == "bayesiantools"){

Expand Down
72 changes: 40 additions & 32 deletions R/pr_fm_be.R
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
#' # run with default settings
#' # extracts values of the referenced publication
#' # see github README
#' be_data <- pr_fm_be()
#' be_data <- pr_fm_be(
#' path = "/your/path",
#' year = 2010)
#'}

# create subset of layers to calculate phenology model output on
Expand All @@ -28,6 +30,13 @@ pr_fm_be <- function(
internal = TRUE
) {

# crop data if necessary, no checks yet
if (!is.null(extent)){
if(length(extent) != 4){
stop("not enough coordinate points to properly crop data!")
}
}

# download all data if it doesn't exist
pr_dl_be(path = path, year = year)

Expand All @@ -48,21 +57,28 @@ pr_fm_be <- function(

# grab raster data from netcdf files, with the climatology
# the long term mean and delta the differences
climatology <- raster::brick(sprintf("%s/%s",path,filenames[1]),
varname = "climatology")
delta <- do.call("stack",
be_raster <- terra::rast(file.path(path,filenames[1]))
be_raster <- terra::crop(be_raster, terra::ext(extent))

climatology <- terra::subset(
be_raster, grepl("climatology", names(be_raster)))

delta <- do.call("c",
lapply(filenames, FUN = function(x){
raster::brick(sprintf("%s/%s",path,x),
varname = "temperature")}))
r <- terra::rast(file.path(path,x))
r <- terra::crop(r, terra::ext(extent))
terra::subset(r, grepl("temperature",names(r)))
})
)

# get date elements from netcdf files
years <- do.call("c",lapply(filenames,FUN = function(x){
nc <- ncdf4::nc_open(sprintf("%s/%s",path,x))
nc <- ncdf4::nc_open(file.path(path, x))
ncdf4::ncvar_get(nc,"year")
}))

yday <- do.call("c",lapply(filenames,FUN = function(x){
nc <- ncdf4::nc_open(sprintf("%s/%s",path,x))
nc <- ncdf4::nc_open(file.path(path, x))
ncdf4::ncvar_get(nc,"day_of_year")
}))

Expand All @@ -77,17 +93,7 @@ pr_fm_be <- function(
}

# subset raster data, do this before cropping to make things faster
delta <- raster::subset(delta, layers)

# crop data if necessary, no checks yet
if (!is.null(extent)){
if(length(extent)==4){
climatology = raster::crop(climatology, raster::extent(extent))
delta = raster::crop(delta, raster::extent(extent))
} else {
stop("not enough coordinate points to properly crop data!")
}
}
delta <- terra::subset(delta, layers)

# shift data when offset is < 365
if (offset < length(layers)){
Expand All @@ -98,27 +104,29 @@ pr_fm_be <- function(

# calculate absolute temperatures not differences
# with the mean
temperature <- raster::stackApply(raster::stack(delta, climatology),
indices = c(doy,1:length(layers)),
fun = sum )
temperature <- terra::tapp(
c(delta, climatology),
index = c(doy,1:length(layers)),
fun = sum
)

# fill values
temperature[temperature == 0] <- NA

# grab coordinates
cells <- terra::cells(be_raster$land_mask)
location <- t(terra::xyFromCell(be_raster$land_mask, cells)[,2:1])

# convert temperature data to matrix
Ti <- t(raster::as.matrix(temperature))
Ti <- t(terra::as.matrix(temperature))

# extract georeferencing info to be passed along
ext <- raster::extent(temperature)
proj <- raster::projection(temperature)
ext <- terra::ext(temperature)
proj <- terra::crs(temperature)
size <- dim(temperature)

cat("calculating daylength \n")

# grab coordinates
location <- sp::SpatialPoints(sp::coordinates(temperature),
proj4string = sp::CRS(proj))
location <- t(sp::spTransform(location,
sp::CRS("+init=epsg:4326"))@coords[,2:1])

# create doy vector
if (offset < length(layers)){
doy <- c(offset:length(layers),1:(offset - 1))
Expand All @@ -134,7 +142,7 @@ pr_fm_be <- function(
Li <- t(do.call("rbind",Li))

# recreate the validation data structure (new format)
# but with concatted data
# but with contacted data
data <- list("site" = NULL,
"location" = location,
"doy" = doy,
Expand Down
Loading
Loading