diff --git a/.github/workflows/R-CMD-check.yaml b/.github/workflows/R-CMD-check.yaml index 921c120..d8604e9 100644 --- a/.github/workflows/R-CMD-check.yaml +++ b/.github/workflows/R-CMD-check.yaml @@ -117,91 +117,65 @@ jobs: - uses: ./.github/workflows/pkgdown-deploy if: github.event_name == 'push' - # Windows checks can be run in parallel and independently - # when they alone take as long as the smoke and full tests combined. - # To achieve this, remove the "needs:" element below. - rcc-windows: - # Begin custom: early run - needs: rcc-smoke - # End custom: early run - - runs-on: ${{ matrix.config.os }} - - name: ${{ matrix.config.os }} (${{ matrix.config.r }}) ${{ matrix.config.desc }} - - # Begin custom: services - # End custom: services + versions-matrix: + runs-on: ubuntu-22.04 + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} - strategy: - fail-fast: false - matrix: - config: - - {os: windows-latest, r: 'release'} - # Use 3.6 to trigger usage of RTools35 - - {os: windows-latest, r: '3.6'} + name: Collect versions steps: - uses: actions/checkout@v4 with: - ref: ${{ needs.rcc-smoke.outputs.sha }} - - - uses: ./.github/workflows/custom/before-install - if: hashFiles('.github/workflows/custom/before-install/action.yml') != '' + fetch-depth: 0 - - uses: ./.github/workflows/install + - uses: ./.github/workflows/rate-limit with: - r-version: ${{ matrix.config.r }} - cache-version: rcc-windows-1 token: ${{ secrets.GITHUB_TOKEN }} - needs: check - - - uses: ./.github/workflows/custom/after-install - if: hashFiles('.github/workflows/custom/after-install/action.yml') != '' - - - uses: ./.github/workflows/update-snapshots - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository - - uses: ./.github/workflows/check + - uses: r-lib/actions/setup-r@v2 with: - results: ${{ runner.os }}-r${{ matrix.config.r }} + install-r: false + use-public-rspm: true - rcc-full: - needs: rcc-smoke + - id: set-matrix + uses: ./.github/workflows/versions-matrix - runs-on: ${{ matrix.config.os }} + check-versions-matrix: + runs-on: ubuntu-22.04 + needs: versions-matrix - name: ${{ matrix.config.os }} (${{ matrix.config.r }}) ${{ matrix.config.desc }} + name: Check versions deps - # Begin custom: services - # End custom: services + if: ${{ needs.versions-matrix.outputs.matrix != '' }} - strategy: - fail-fast: false - matrix: - config: - - {os: macos-latest, r: 'release'} + steps: + - name: Install json2yaml + run: | + sudo npm install -g json2yaml - - {os: ubuntu-20.04, r: 'release'} + - name: Check matrix definition + run: | + matrix='${{ needs.versions-matrix.outputs.matrix }}' + echo $matrix + echo $matrix | jq . + echo $matrix | json2yaml - # Use older ubuntu to maximise backward compatibility - - {os: ubuntu-22.04, r: 'devel', http-user-agent: 'release'} - - {os: ubuntu-22.04, r: 'release', covr: true, desc: 'with covr'} - - {os: ubuntu-22.04, r: 'oldrel-1'} + rcc-full: + needs: + - rcc-smoke + - versions-matrix - # Begin custom: R 3.6 - - {os: ubuntu-22.04, r: 'oldrel-2'} - # End custom: R 3.6 + runs-on: ${{ matrix.os }} - # Begin custom: R 3.5 - - {os: ubuntu-22.04, r: 'oldrel-3'} - # End custom: R 3.5 + name: ${{ matrix.os }} (${{ matrix.r }}) ${{ matrix.desc }} - # Begin custom: R 3.4 - - {os: ubuntu-22.04, r: 'oldrel-4'} - # End custom: R 3.4 + # Begin custom: services + # End custom: services - # Begin custom: matrix elements - # End custom: matrix elements + strategy: + fail-fast: false + matrix: ${{fromJson(needs.versions-matrix.outputs.matrix)}} steps: - uses: actions/checkout@v4 @@ -213,7 +187,7 @@ jobs: - uses: ./.github/workflows/install with: - r-version: ${{ matrix.config.r }} + r-version: ${{ matrix.r }} cache-version: rcc-full-1 token: ${{ secrets.GITHUB_TOKEN }} needs: check @@ -226,7 +200,7 @@ jobs: - uses: ./.github/workflows/check with: - results: ${{ runner.os }}-r${{ matrix.config.r }} + results: ${{ runner.os }}-r${{ matrix.r }} suggests-matrix: runs-on: ubuntu-22.04 @@ -252,13 +226,13 @@ jobs: - id: set-matrix uses: ./.github/workflows/dep-matrix-suggests - check-matrix: + check-suggests-matrix: runs-on: ubuntu-22.04 needs: suggests-matrix - name: Check deps + name: Check suggests deps - if: ${{ needs.matrix.outputs.matrix != '' }} + if: ${{ needs.suggests-matrix.outputs.matrix != '' }} steps: - name: Install json2yaml @@ -267,15 +241,15 @@ jobs: - name: Check matrix definition run: | - matrix='${{ needs.matrix.outputs.matrix }}' + matrix='${{ needs.suggests-matrix.outputs.matrix }}' echo $matrix echo $matrix | jq . echo $matrix | json2yaml rcc-suggests: needs: - - suggests-matrix - rcc-smoke + - suggests-matrix runs-on: ubuntu-22.04 diff --git a/.github/workflows/versions-matrix/action.R b/.github/workflows/versions-matrix/action.R new file mode 100644 index 0000000..b2b94c9 --- /dev/null +++ b/.github/workflows/versions-matrix/action.R @@ -0,0 +1,57 @@ +# - {os: macos-latest, r: 'release'} +# +# - {os: ubuntu-20.04, r: 'release'} +# +# # Use older ubuntu to maximise backward compatibility +# - {os: ubuntu-22.04, r: 'devel', http-user-agent: 'release'} +# - {os: ubuntu-22.04, r: 'release', covr: true, desc: 'with covr'} +# - {os: ubuntu-22.04, r: 'oldrel-1'} +# +# # Begin custom: R 3.6 +# - {os: ubuntu-22.04, r: 'oldrel-2'} +# # End custom: R 3.6 +# +# # Begin custom: R 3.5 +# - {os: ubuntu-22.04, r: 'oldrel-3'} +# # End custom: R 3.5 +# +# # Begin custom: R 3.4 +# - {os: ubuntu-22.04, r: 'oldrel-4'} +# # End custom: R 3.4 + +# FIXME: Dynamic lookup by parsing https://svn.r-project.org/R/tags/ +r_versions <- c("devel", paste0("4.", 4:0)) +macos <- data.frame(os = "macos-latest", r = r_versions[2:3]) +windows <- data.frame(os = "windows-latest", r = r_versions[1:3]) +linux_devel <- data.frame(os = "ubuntu-22.04", r = r_versions[1], `http-user-agent` = "release", check.names = FALSE) +linux <- data.frame(os = "ubuntu-22.04", r = r_versions[-1]) +covr <- data.frame(os = "ubuntu-22.04", r = r_versions[2], covr = "true", desc = "with covr") + +include_list <- list(macos, windows, linux_devel, linux, covr) +print(include_list) + +filter <- read.dcf("DESCRIPTION")[1,]["Config/gha/filter"] +if (!is.na(filter)) { + filter_expr <- parse(text = filter)[[1]] + subset_fun_expr <- bquote(function(x) subset(x, .(filter_expr))) + subset_fun <- eval(subset_fun_expr) + include_list <- lapply(include_list, subset_fun) + print(include_list) +} + +to_json <- function(x) { + if (nrow(x) == 0) return(character()) + parallel <- vector("list", length(x)) + for (i in seq_along(x)) { + parallel[[i]] <- paste0('"', names(x)[[i]], '":"', x[[i]], '"') + } + paste0("{", do.call(paste, c(parallel, sep = ",")), "}") +} + +configs <- unlist(lapply(include_list, to_json)) +json <- paste0('{"include":[', paste(configs, collapse = ","), ']}') + +if (Sys.getenv("GITHUB_OUTPUT") != "") { + writeLines(paste0("matrix=", json), Sys.getenv("GITHUB_OUTPUT")) +} +writeLines(json) diff --git a/.github/workflows/versions-matrix/action.yml b/.github/workflows/versions-matrix/action.yml new file mode 100644 index 0000000..8adc819 --- /dev/null +++ b/.github/workflows/versions-matrix/action.yml @@ -0,0 +1,13 @@ +name: "Actions to compute a matrix with all R and OS versions" +outputs: + matrix: + description: "Generated matrix" + value: ${{ steps.set-matrix.outputs.matrix }} + +runs: + using: "composite" + steps: + - id: set-matrix + run: | + Rscript ./.github/workflows/versions-matrix/action.R + shell: bash diff --git a/DESCRIPTION b/DESCRIPTION index 658de92..8dec641 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -29,3 +29,4 @@ Config/testthat/edition: 3 Encoding: UTF-8 Roxygen: list(markdown = TRUE) RoxygenNote: 7.3.1 +Config/gha/filter: os != "windows-latest" diff --git a/src/value_conversion.h b/src/value_conversion.h index 0992b7d..4aa71c2 100644 --- a/src/value_conversion.h +++ b/src/value_conversion.h @@ -35,7 +35,7 @@ class PointerWrapper { static SEXP Wrap( void* ptr) { // TODO cache this string? - auto ptr_sexp = R_MakeExternalPtr(ptr, Rf_ScalarString(mkChar(NAME.value)), R_NilValue); + auto ptr_sexp = R_MakeExternalPtr(ptr, Rf_ScalarString(Rf_mkChar(NAME.value)), R_NilValue); R_RegisterCFinalizerEx(ptr_sexp, Finalize, Rboolean::FALSE); return ptr_sexp; } @@ -51,7 +51,7 @@ namespace ValueConversion { template <> SEXP ToR(const char* val) { - return Rf_ScalarString(mkChar(val)); + return Rf_ScalarString(Rf_mkChar(val)); } template <> @@ -100,12 +100,12 @@ namespace ValueConversion { template <> SEXP ToR(duckdb_state val) { - return ScalarInteger(val); + return Rf_ScalarInteger(val); } template <> SEXP ToR(duckdb_type val) { - return ScalarInteger(val); + return Rf_ScalarInteger(val); } diff --git a/tests/testthat/test-dbitest.R b/tests/testthat/test-dbitest.R index beec448..be1ba31 100644 --- a/tests/testthat/test-dbitest.R +++ b/tests/testthat/test-dbitest.R @@ -1,294 +1 @@ skip_on_cran() - -if (rlang::is_installed("DBItest")) { - DBItest::test_all(c( - # Also in adbcsqlite - - # int/int64: https://github.com/r-dbi/DBItest/issues/311 - # 1 value(s) outside integer range set to NA, nanoarrow_warning_lossy_conversion/simpleWarning/warning/condition - "data_64_bit_numeric", - # further 64-bit warnings already skipped below - - # array_stream->get_schema(): [1] - "bind_multi_row_zero_length", - "arrow_bind_multi_row_zero_length", - "stream_bind_multi_row_zero_length", - "arrow_stream_bind_multi_row_zero_length", - - # New in duckdbneo - - # Logical as integers and not logicals - "data_logical", - "append_roundtrip_logical", - "arrow_append_table_arrow_roundtrip_logical", - - # Genunely wrong output - "write_table_value_subset", - "write_table_value_shuffle_subset", - "arrow_fetch_arrow_chunk_multi_row_single_column", - "arrow_fetch_arrow_chunk_multi_row_multi_column", - - # Output order - "write_table_value_shuffle", - - # Fixable with using database file - "create_table_visible_in_other_connection", - - # Error in `!nrow(res)`: invalid argument type - "arrow_write_table_arrow_value_subset", - "arrow_write_table_arrow_value_shuffle", - "arrow_write_table_arrow_value_shuffle_subset", - - # dbDataType for blob objects unknown for type adbc_driver, adbc_xptr - "data_type_driver", - "data_type_connection", - "append_roundtrip_blob", - "arrow_append_table_arrow_roundtrip_blob", - "bind_raw", # Wrong output, probably still related - "bind_blob", # Wrong output, probably still related - "arrow_bind_raw", # Wrong output, probably still related - "arrow_bind_blob", # Wrong output, probably still related - "stream_bind_blob", # Wrong output, probably still related - "arrow_stream_bind_blob", # Wrong output, probably still related - - # Only lists of raw vectors are currently supported - "append_roundtrip_raw", - - # nanoarrow_infer_vector_type_schema(): Expected child of map type to be non-nullable but was nullable - "get_info_connection", - - # From duckdb: - # NOT_IMPLEMENTED: Binding multiple rows at once is not supported yet - "send_query_params", - "get_query_params", - "bind_multi_row", - "arrow_bind_multi_row", - "stream_bind_multi_row", - "arrow_stream_bind_multi_row", - "arrow_send_query_params", - "arrow_get_query_arrow_params", - - # INTERNAL: Conversion Error: timestamp field value out of range - "append_roundtrip_timestamp", - "append_roundtrip_timestamp_extended", - "bind_timestamp", - "arrow_bind_timestamp", - "stream_bind_timestamp", - "stream_bind_timestamp_lt", - "arrow_stream_bind_timestamp", - "arrow_stream_bind_timestamp_lt", - "arrow_append_table_arrow_roundtrip_timestamp", - - # ArrowBasicArrayStreamValidate(): Expected struct child 1 to have length >= 11 but found child with length 1 - "bind_timestamp_lt", - "arrow_bind_timestamp_lt", - - # `dbAppendTable(con, table_name, unclass(test_in))` did not throw the expected error. - "append_table_invalid_value", - - # duckdb - if (!TEST_RE2) { - c( - "send_query_immediate", - "clear_result_return_query_arrow", - "cannot_clear_result_twice_query_arrow", - "get_query_immediate", - "send_statement_params", - "execute_params", - "read_table_missing", - "read_table_row_names_false", - "read_table_row_names_true_exists", - "read_table_row_names_true_missing", - "read_table_row_names_na_exists", - "read_table_row_names_string_exists", - "read_table_row_names_string_missing", - "read_table_row_names_default", - "read_table_check_names", - "read_table_check_names_false", - "read_table_closed_connection", - "read_table_invalid_connection", - "read_table_error", - "read_table_name", - "create_table_error", - "create_table_temporary", - "create_table_row_names_non_null", - "append_table_missing", - "append_table_error", - "append_roundtrip_quotes", - "append_roundtrip_time", - "append_roundtrip_mixed", - "write_table_return", - "write_table_overwrite", - "write_table_append_incompatible", - "roundtrip_.*", - "write_table_name", - "write_table_value_df", - "temporary_table", - "roundtrip_keywords", - "roundtrip_quotes", - "roundtrip_quotes_table_names", - "roundtrip_integer", - "roundtrip_numeric", - "roundtrip_logical", - "roundtrip_null", - "roundtrip_64_bit_numeric", - "roundtrip_64_bit_roundtrip", - "write_table_row_names_false", - "write_table_row_names_true_exists", - "write_table_row_names_na_exists", - "write_table_row_names_string_exists", - "write_table_row_names_default", - "list_tables_temporary", - "exists_table", - "exists_table_temporary", - "exists_table_error", - "exists_table_name", - "remove_table_return", - "remove_table_missing", - "remove_table_closed_connection", - "remove_table_invalid_connection", - "remove_table_error", - "remove_table_temporary_arg", - "remove_table_missing_succeed", - "remove_table_temporary", - "remove_table_name", - "list_objects_temporary", - "list_fields", - "list_fields_temporary", - "list_fields_wrong_table", - "list_fields_quoted", - "list_fields_row_names", - "bind_return_value_statement", - "bind_multi_row_unequal_length", - "bind_multi_row_statement", - "bind_repeated_statement", - "bind_repeated_untouched_statement", - "arrow_bind_multi_row_unequal_length", - "stream_bind_return_value_statement", - "stream_bind_multi_row_statement", - "stream_bind_repeated_statement", - "stream_bind_repeated_untouched_statement", - "column_info", - "column_info_row_names", - "rows_affected_statement", - "begin_write_commit", - "begin_write_rollback", - "begin_write_disconnect", - "with_transaction_success", - "with_transaction_failure", - "with_transaction_break", - "arrow_send_query_immediate", - "arrow_get_query_arrow_immediate", - "arrow_read_table_arrow", - "arrow_read_table_arrow_missing", - "arrow_read_table_arrow_empty", - "arrow_read_table_arrow_closed_connection", - "arrow_read_table_arrow_invalid_connection", - "arrow_read_table_arrow_error", - "arrow_read_table_arrow_name", - "arrow_write_table_arrow_return", - "arrow_write_table_arrow_error_overwrite", - "arrow_write_table_arrow_append_incompatible", - "arrow_write_table_arrow_error", - "arrow_write_table_arrow_name", - "arrow_write_table_arrow_value_df", - "arrow_write_table_arrow_overwrite", - "arrow_write_table_arrow_overwrite_missing", - "arrow_write_table_arrow_append", - "arrow_write_table_arrow_append_new", - "arrow_write_table_arrow_temporary", - "arrow_write_table_arrow_visible_in_other_connection", - "arrow_write_table_arrow_roundtrip_quotes", - "arrow_write_table_arrow_roundtrip_quotes_table_names", - "arrow_write_table_arrow_roundtrip_integer", - "arrow_write_table_arrow_roundtrip_numeric", - "arrow_write_table_arrow_roundtrip_null", - "arrow_write_table_arrow_roundtrip_64_bit_roundtrip", - "arrow_write_table_arrow_roundtrip_character_empty", - "arrow_write_table_arrow_roundtrip_character_empty_after", - "arrow_write_table_arrow_roundtrip_date", - "arrow_write_table_arrow_roundtrip_date_extended", - "arrow_write_table_arrow_roundtrip_time", - "arrow_write_table_arrow_roundtrip_mixed", - "arrow_create_table_arrow_error", - "arrow_create_table_arrow_temporary", - "arrow_create_table_arrow_visible_in_other_connection", - "arrow_append_table_arrow_return", - "arrow_append_table_arrow_missing", - "arrow_append_table_arrow_error", - "arrow_append_table_arrow_roundtrip_quotes", - "arrow_append_table_arrow_roundtrip_64_bit_roundtrip", - "arrow_append_table_arrow_roundtrip_time", - "arrow_append_table_arrow_roundtrip_mixed", - "list_tables", - "list_tables_quote", - "list_fields_object", - "list_objects", - "list_objects_quote", - "list_objects_features", - "remove_table_list", - "remove_table_other_con", - "exists_table_list", - # 1.8.1 - "write_table_name_quoted", - "remove_table_name_quoted", - "arrow_write_table_arrow_name_quoted", - "arrow_write_table_arrow_roundtrip_keywords", - "arrow_write_table_arrow_roundtrip_quotes_column_names", - "arrow_write_table_arrow_roundtrip_logical", - "arrow_write_table_arrow_roundtrip_character", - "arrow_write_table_arrow_roundtrip_character_native", - "arrow_write_table_arrow_roundtrip_factor", - "arrow_write_table_arrow_roundtrip_blob", - "arrow_write_table_arrow_roundtrip_timestamp", - NULL - ) - }, - "package_name", # wontfix - "package_dependencies", # wontfix - "reexport", # wontfix - - "constructor", # wontfix - "send_query_only_one_result_set", # wontfix - "send_statement_only_one_result_set", # wontfix - "send_query_stale_warning", # wontfix - "send_statement_stale_warning", # wontfix - - if (packageVersion("DBItest") < "1.7.0.9004") "roundtrip_timestamp", # broken test - - "data_64_bit_numeric_warning", # 64 bit, not now - "data_64_bit_lossless", - "roundtrip_64_bit_character", - "connect_bigint_integer", - "connect_bigint_character", - "connect_bigint_integer64", - "append_roundtrip_64_bit_numeric", - "append_roundtrip_64_bit_character", - "append_roundtrip_64_bit_roundtrip", - # - "column_info_consistent", # won't fix: https://github.com/r-dbi/DBItest/issues/181 - - "read_table", # these are temporarily skipped because factors can be round tripped - "read_table_empty", - "read_table_row_names_na_missing", - "write_table_error", - "overwrite_table", - "overwrite_table_missing", - "append_table", - "append_table_new", - "table_visible_in_other_connection", - "roundtrip_character", - "roundtrip_factor", - "write_table_row_names_true_missing", - "write_table_row_names_string_missing", - "write_table_row_names_na_missing", - "append_roundtrip_factor", - "bind_factor", - "arrow_bind_factor", - "arrow_send_query_stale_warning", # wontfix - "arrow_send_query_only_one_result_set", # wontfix - "arrow_write_table_arrow_roundtrip_timestamp_extended", # precision - "arrow_append_table_arrow_roundtrip_timestamp_extended", # precision - NULL - )) -}