Skip to content

Commit

Permalink
Explicitly Provide Name Repair Argument to as_tibble
Browse files Browse the repository at this point in the history
Provide this argument in an attempt to fix issues like the one mentioned in #124 where the sf_query() function bombs out because of duplicated column names in the tibble.
  • Loading branch information
StevenMMortimer committed Sep 17, 2022
1 parent e206c12 commit 9ec86a5
Show file tree
Hide file tree
Showing 7 changed files with 19 additions and 20 deletions.
2 changes: 1 addition & 1 deletion R/analytics-report.R
Original file line number Diff line number Diff line change
Expand Up @@ -734,7 +734,7 @@ sf_execute_report <- function(report_id,
if(async){
response_parsed <- response_parsed %>%
set_null_elements_to_na_recursively() %>%
as_tibble_row() %>%
as_tibble_row(.name_repair = "unique") %>%
mutate(across(any_of(c("completionDate", "requestDate")),
~parse_datetime(as.character(.x)))) %>%
type_convert(col_types = cols(.default = col_guess())) %>%
Expand Down
4 changes: 2 additions & 2 deletions R/attachments.R
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ sf_create_attachment_rest <- function(attachment_input_data,
resultset <- safe_bind_rows(list(resultset, fromJSON(sprintf("[%s]", response_parsed))))
}
resultset <- resultset %>%
as_tibble() %>%
as_tibble(.name_repair = "unique") %>%
sf_reorder_cols() %>%
sf_guess_cols()
return(resultset)
Expand Down Expand Up @@ -553,7 +553,7 @@ sf_update_attachment_rest <- function(attachment_input_data,
resultset <- safe_bind_rows(list(resultset, this_resultset))
}
resultset <- resultset %>%
as_tibble() %>%
as_tibble(.name_repair = "unique") %>%
sf_reorder_cols() %>%
sf_guess_cols()
return(resultset)
Expand Down
12 changes: 6 additions & 6 deletions R/bulk-operation.R
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ sf_create_job_bulk_v2 <- function(operation = c("insert", "delete",
}
catch_errors(httr_response)
response_parsed <- content(httr_response, encoding="UTF-8")
job_info <- as_tibble(response_parsed)
job_info <- as_tibble(response_parsed, .name_repair = "unique")
return(job_info)
}

Expand Down Expand Up @@ -374,14 +374,14 @@ sf_get_job_bulk <- function(job_id,
} else if(grepl('json', content_type)){
response_parsed <- content(httr_response, as='parsed', type="application/json", encoding="UTF-8")
response_parsed[sapply(response_parsed, is.null)] <- NA
job_info <- as_tibble(response_parsed)
job_info <- as_tibble(response_parsed, .name_repair = "unique")
} else {
message(sprintf("Unhandled content-type: %s", content_type))
job_info <- content(httr_response, as='parsed', encoding="UTF-8")
}
} else if(api_type == "Bulk 2.0"){
response_parsed <- content(httr_response, encoding="UTF-8")
job_info <- as_tibble(response_parsed)
job_info <- as_tibble(response_parsed, .name_repair = "unique")
} else {
catch_unknown_api(api_type, c("Bulk 1.0", "Bulk 2.0"))
}
Expand Down Expand Up @@ -444,7 +444,7 @@ sf_get_all_jobs_bulk <- function(parameterized_search_list =

if(length(response_parsed$records) > 0){
resultset <- response_parsed$records %>%
map_df(as_tibble) %>%
map_df(~as_tibble(.x, .name_repair = "unique")) %>%
mutate_all(as.character)
} else {
resultset <- tibble()
Expand Down Expand Up @@ -528,7 +528,7 @@ sf_get_all_query_jobs_bulk <- function(parameterized_search_list =

if(length(response_parsed$records) > 0){
resultset <- response_parsed$records %>%
map_df(as_tibble) %>%
map_df(~as_tibble(.x, .name_repair = "unique")) %>%
mutate_all(as.character)
} else {
resultset <- tibble()
Expand Down Expand Up @@ -1179,7 +1179,7 @@ sf_batch_details_bulk <- function(job_id, batch_id,
}

res <- res %>%
as_tibble() %>%
as_tibble(.name_repair = "unique") %>%
sf_reorder_cols() %>%
sf_guess_cols(TRUE)

Expand Down
3 changes: 2 additions & 1 deletion R/describe-metadata.R
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,8 @@ sf_describe_metadata <- function(verbose=FALSE){
xml_ns_strip() %>%
xml_find_all('.//result/partialSaveAllowed|.//result/testRequired') %>%
map_dfc(.f=function(x){
as_tibble(t(unlist(as_list(read_xml(as(object=x, Class="character"))))))
as_tibble(t(unlist(as_list(read_xml(as(object=x, Class="character"))))),
.name_repair = "unique")
})
# add the organizationNamespace separately since it may be null
organization_namespace <- response_parsed %>%
Expand Down
3 changes: 2 additions & 1 deletion R/retrieve-metadata.R
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,8 @@ sf_retrieve_metadata_check_status <- function(id,
xml_ns_strip() %>%
xml_find_all('.//result/id|.//result/status|.//result/success|.//result/done') %>%
map_dfc(.f=function(x){
as_tibble(t(unlist(as_list(read_xml(as(object=x, Class="character"))))))
as_tibble(t(unlist(as_list(read_xml(as(object=x, Class="character"))))),
.name_repair = "unique")
})
summary_elements$fileProperties <- list(file_properties)

Expand Down
2 changes: 1 addition & 1 deletion R/utils-httr.R
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ sf_rest_list <- function(url,
}
if(length(records_list) > 0){
response_parsed <- records_list %>%
map_df(as_tibble) %>%
map_df(~as_tibble(.x, .name_repair = "unique")) %>%
type_convert(col_types = cols(.default = col_guess()))
} else {
response_parsed <- tibble()
Expand Down
13 changes: 5 additions & 8 deletions R/utils-query.R
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ flatten_tbl_df <- function(x){
x_tbl <- x %>%
list_modify("errors" = NULL) %>%
list.flatten() %>%
as_tibble_row()
as_tibble_row(.name_repair = "unique")

# convert errors to list column (since it can have multiple elements)
if(!is.null(errors)){
Expand Down Expand Up @@ -360,7 +360,7 @@ extract_records_from_xml_node <- function(node,
as_list() %>%
xml_drop_and_unlist_recursively() %>%
drop_empty_recursively() %>%
as_tibble_row()
as_tibble_row(.name_repair = "unique")
if(object_name_append){
colnames(x) <- paste(object_name, colnames(x), sep='.')
}
Expand Down Expand Up @@ -443,7 +443,7 @@ extract_records_from_xml_nodeset_of_records <- function(x,
map(drop_empty_recursively)
x <- x_list %>%
map_df(.f=function(x, nms, obj_name_append, obj_name_as_col){
y <- as_tibble_row(x)
y <- as_tibble_row(x, .name_repair = "unique")
if(!is.null(nms) && !any(sapply(nms, is.null))){
if(obj_name_append){
colnames(y) <- paste(nms, colnames(y), sep='.')
Expand Down Expand Up @@ -511,7 +511,7 @@ extract_nested_child_records <- function(x){
map_depth(2, flatten_tbl_df) %>%
pluck(1) %>%
safe_bind_rows() %>%
as_tibble()
as_tibble(.name_repair = "unique")

return(child_records)
}
Expand Down Expand Up @@ -630,7 +630,7 @@ combine_parent_and_child_resultsets <- function(parents_df, child_df_list){
#' @export
safe_bind_rows <- function(l, fill=TRUE, idcol=NULL, ...){
rbindlist(l = l, fill = fill, idcol = idcol, ...) %>%
as_tibble()
as_tibble(.name_repair = "unique")
}

#' Extract tibble based on the "records" element of a list
Expand Down Expand Up @@ -750,9 +750,6 @@ sf_reorder_cols <- function(df){
#' @keywords internal
#' @export
sf_guess_cols <- function(df, guess_types=TRUE, dataType=NULL){




if(guess_types){
if(is.null(dataType) || any(is.na(dataType)) || (length(dataType) == 0)){
Expand Down

0 comments on commit 9ec86a5

Please sign in to comment.