Skip to content

Commit

Permalink
ARROW-6337: [R] Changed as_tible to as_dataframe in the R package
Browse files Browse the repository at this point in the history
See [ARROW-6337](https://issues.apache.org/jira/browse/ARROW-6337) for background on this PR.

Closes apache#5399 from jameslamb/as_dataframe and squashes the following commits:

66137bb <James Lamb> ARROW-6337:  Changed as_tible to as_data_frame in the R package
066f002 <James Lamb> ARROW-6337:  Changed as_tible to as_dataframe in the R package

Authored-by: James Lamb <[email protected]>
Signed-off-by: Neal Richardson <[email protected]>
  • Loading branch information
jameslamb authored and nealrichardson committed Sep 18, 2019
1 parent 1f88560 commit 4b5ca3d
Show file tree
Hide file tree
Showing 11 changed files with 45 additions and 45 deletions.
12 changes: 6 additions & 6 deletions r/R/csv.R
Original file line number Diff line number Diff line change
Expand Up @@ -63,10 +63,10 @@
#' parsing options provided in other arguments (e.g. `delim`, `quote`, etc.).
#' @param convert_options see [csv_convert_options()]
#' @param read_options see [csv_read_options()]
#' @param as_tibble Should the function return a `data.frame` or an
#' @param as_data_frame Should the function return a `data.frame` or an
#' [arrow::Table][Table]?
#'
#' @return A `data.frame`, or an Table if `as_tibble = FALSE`.
#' @return A `data.frame`, or an Table if `as_data_frame = FALSE`.
#' @export
#' @examples
#' \donttest{
Expand Down Expand Up @@ -95,7 +95,7 @@ read_delim_arrow <- function(file,
parse_options = NULL,
convert_options = NULL,
read_options = NULL,
as_tibble = TRUE) {
as_data_frame = TRUE) {

if (is.null(parse_options)) {
parse_options <- readr_to_csv_parse_options(
Expand Down Expand Up @@ -124,7 +124,7 @@ read_delim_arrow <- function(file,

tab <- reader$Read()$select(!!enquo(col_select))

if (isTRUE(as_tibble)) {
if (isTRUE(as_data_frame)) {
tab <- as.data.frame(tab)
}

Expand All @@ -147,7 +147,7 @@ read_csv_arrow <- function(file,
parse_options = NULL,
convert_options = NULL,
read_options = NULL,
as_tibble = TRUE) {
as_data_frame = TRUE) {

mc <- match.call()
mc$delim <- ","
Expand All @@ -171,7 +171,7 @@ read_tsv_arrow <- function(file,
parse_options = NULL,
convert_options = NULL,
read_options = NULL,
as_tibble = TRUE) {
as_data_frame = TRUE) {

mc <- match.call()
mc$delim <- "\t"
Expand Down
6 changes: 3 additions & 3 deletions r/R/feather.R
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ FeatherTableWriter$create <- function(stream) {
#' @inheritParams read_delim_arrow
#' @param ... additional parameters
#'
#' @return A `data.frame` if `as_tibble` is `TRUE` (the default), or an
#' @return A `data.frame` if `as_data_frame` is `TRUE` (the default), or an
#' [arrow::Table][Table] otherwise
#'
#' @export
Expand All @@ -112,7 +112,7 @@ FeatherTableWriter$create <- function(stream) {
#' df <- read_feather(tf, col_select = starts_with("Sepal"))
#' })
#' }
read_feather <- function(file, col_select = NULL, as_tibble = TRUE, ...) {
read_feather <- function(file, col_select = NULL, as_data_frame = TRUE, ...) {
reader <- FeatherTableReader$create(file, ...)

all_columns <- ipc___feather___TableReader__column_names(reader)
Expand All @@ -122,7 +122,7 @@ read_feather <- function(file, col_select = NULL, as_tibble = TRUE, ...) {
}

out <- reader$Read(columns)
if (isTRUE(as_tibble)) {
if (isTRUE(as_data_frame)) {
out <- as.data.frame(out)
}
out
Expand Down
6 changes: 3 additions & 3 deletions r/R/json.R
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
#' @inheritParams read_delim_arrow
#' @param ... Additional options, passed to `json_table_reader()`
#'
#' @return A `data.frame`, or an Table if `as_tibble = FALSE`.
#' @return A `data.frame`, or an Table if `as_data_frame = FALSE`.
#' @export
#' @examples
#' \donttest{
Expand All @@ -37,10 +37,10 @@
#' df <- read_json_arrow(tf)
#' })
#' }
read_json_arrow <- function(file, col_select = NULL, as_tibble = TRUE, ...) {
read_json_arrow <- function(file, col_select = NULL, as_data_frame = TRUE, ...) {
tab <- json_table_reader(file, ...)$Read()$select(!!enquo(col_select))

if (isTRUE(as_tibble)) {
if (isTRUE(as_data_frame)) {
tab <- as.data.frame(tab)
}
tab
Expand Down
6 changes: 3 additions & 3 deletions r/R/parquet.R
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
#' @param props [ParquetReaderProperties]
#' @param ... Additional arguments passed to `ParquetFileReader$create()`
#'
#' @return A [arrow::Table][Table], or a `data.frame` if `as_tibble` is
#' @return A [arrow::Table][Table], or a `data.frame` if `as_data_frame` is
#' `TRUE`.
#' @examples
#' \donttest{
Expand All @@ -35,13 +35,13 @@
#' @export
read_parquet <- function(file,
col_select = NULL,
as_tibble = TRUE,
as_data_frame = TRUE,
props = ParquetReaderProperties$create(),
...) {
reader <- ParquetFileReader$create(file, props = props, ...)
tab <- reader$ReadTable(!!enquo(col_select))

if (as_tibble) {
if (as_data_frame) {
tab <- as.data.frame(tab)
}
tab
Expand Down
10 changes: 5 additions & 5 deletions r/man/read_delim_arrow.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions r/man/read_feather.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions r/man/read_json_arrow.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions r/man/read_parquet.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

18 changes: 9 additions & 9 deletions r/tests/testthat/test-csv.R
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@ test_that("Can read csv file", {

write.csv(iris, tf, row.names = FALSE)

tab1 <- read_csv_arrow(tf, as_tibble = FALSE)
tab2 <- read_csv_arrow(mmap_open(tf), as_tibble = FALSE)
tab3 <- read_csv_arrow(ReadableFile$create(tf), as_tibble = FALSE)
tab1 <- read_csv_arrow(tf, as_data_frame = FALSE)
tab2 <- read_csv_arrow(mmap_open(tf), as_data_frame = FALSE)
tab3 <- read_csv_arrow(ReadableFile$create(tf), as_data_frame = FALSE)

iris$Species <- as.character(iris$Species)
tab0 <- Table$create(!!!iris)
Expand All @@ -34,15 +34,15 @@ test_that("Can read csv file", {
expect_equal(tab0, tab3)
})

test_that("read_csv_arrow(as_tibble=TRUE)", {
test_that("read_csv_arrow(as_data_frame=TRUE)", {
tf <- tempfile()
on.exit(unlink(tf))

write.csv(iris, tf, row.names = FALSE)

tab1 <- read_csv_arrow(tf, as_tibble = TRUE)
tab2 <- read_csv_arrow(mmap_open(tf), as_tibble = TRUE)
tab3 <- read_csv_arrow(ReadableFile$create(tf), as_tibble = TRUE)
tab1 <- read_csv_arrow(tf, as_data_frame = TRUE)
tab2 <- read_csv_arrow(mmap_open(tf), as_data_frame = TRUE)
tab3 <- read_csv_arrow(ReadableFile$create(tf), as_data_frame = TRUE)

iris$Species <- as.character(iris$Species)
expect_equivalent(iris, tab1)
Expand Down Expand Up @@ -170,9 +170,9 @@ test_that("read_csv_arrow() respects col_select", {

write.csv(iris, tf, row.names = FALSE, quote = FALSE)

tab <- read_csv_arrow(tf, col_select = starts_with("Sepal"), as_tibble = FALSE)
tab <- read_csv_arrow(tf, col_select = starts_with("Sepal"), as_data_frame = FALSE)
expect_equal(tab, Table$create(Sepal.Length = iris$Sepal.Length, Sepal.Width = iris$Sepal.Width))

tib <- read_csv_arrow(tf, col_select = starts_with("Sepal"), as_tibble = TRUE)
tib <- read_csv_arrow(tf, col_select = starts_with("Sepal"), as_data_frame = TRUE)
expect_equal(tib, tibble::tibble(Sepal.Length = iris$Sepal.Length, Sepal.Width = iris$Sepal.Width))
})
2 changes: 1 addition & 1 deletion r/tests/testthat/test-feather.R
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ test_that("feather handles col_select = <tidyselect helper>", {
})

test_that("feather read/write round trip", {
tab1 <- read_feather(feather_file, as_tibble = FALSE)
tab1 <- read_feather(feather_file, as_data_frame = FALSE)
expect_is(tab1, "Table")

expect_equal(tib, as.data.frame(tab1))
Expand Down
12 changes: 6 additions & 6 deletions r/tests/testthat/test-json.R
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ test_that("Can read json file with scalars columns (ARROW-5503)", {
{ "hello": 0.0, "world": true, "yo": null }
', tf, useBytes=TRUE)

tab1 <- read_json_arrow(tf, as_tibble = FALSE)
tab2 <- read_json_arrow(mmap_open(tf), as_tibble = FALSE)
tab3 <- read_json_arrow(ReadableFile$create(tf), as_tibble = FALSE)
tab1 <- read_json_arrow(tf, as_data_frame = FALSE)
tab2 <- read_json_arrow(mmap_open(tf), as_data_frame = FALSE)
tab3 <- read_json_arrow(ReadableFile$create(tf), as_data_frame = FALSE)

expect_equal(tab1, tab2)
expect_equal(tab1, tab3)
Expand Down Expand Up @@ -98,9 +98,9 @@ test_that("Can read json file with nested columns (ARROW-5503)", {
{ "arr": [5.0, 6.0], "nuf": { "ps": 19 } }
', tf)

tab1 <- read_json_arrow(tf, as_tibble = FALSE)
tab2 <- read_json_arrow(mmap_open(tf), as_tibble = FALSE)
tab3 <- read_json_arrow(ReadableFile$create(tf), as_tibble = FALSE)
tab1 <- read_json_arrow(tf, as_data_frame = FALSE)
tab2 <- read_json_arrow(mmap_open(tf), as_data_frame = FALSE)
tab3 <- read_json_arrow(ReadableFile$create(tf), as_data_frame = FALSE)

expect_equal(tab1, tab2)
expect_equal(tab1, tab3)
Expand Down

0 comments on commit 4b5ca3d

Please sign in to comment.