rhub/0000755000176200001440000000000014762422102011212 5ustar liggesusersrhub/tests/0000755000176200001440000000000014603437121012355 5ustar liggesusersrhub/tests/testthat/0000755000176200001440000000000014762422102014214 5ustar liggesusersrhub/tests/testthat/test-setup.R0000644000176200001440000000630614605206035016462 0ustar liggesuserstest_that("check_rpkg_root", { expect_silent(check_rpkg_root("/foo/bar", "/foo/bar")) expect_snapshot(error = TRUE, { check_rpkg_root("/pkg/root", "/git/root") }) }) test_that("rhub_setup", { withr::local_options(cli.ansi = FALSE) # we do this here, so the web server process starts witg the same # working directory as the tests http$url() # we need to do this because we are wrapping text and also using # `transform` in `export_snapshot()`. withr::local_options(cli.width = Inf) # check this before changing wd wf_hash <- cli::hash_file_sha1(test_path("fixtures/rhub.yaml")) # Do everything in a temporary package tmp <- tempfile() on.exit(unlink(tmp, recursive = TRUE), add = TRUE) dir.create(tmp) file.copy(test_path("fixtures/pkg"), tmp, recursive = TRUE) pkg <- file.path(tmp, "pkg") withr::local_dir(pkg) # must be a git repo as well dir.create(".git") # fails to download workflow file withr::local_envvar(RHUB_WORKFLOW_URL = http$url("/badbadbad")) expect_snapshot(error = TRUE, { rhub_setup() }, transform = function(x) redact_abs_path(redact_port(x))) # no workflow file, copy there withr::local_envvar(RHUB_WORKFLOW_URL = http$url("/rhub.yaml")) expect_snapshot({ rhub_setup() }, transform = redact_abs_path) expect_equal(cli::hash_file_sha1(".github/workflows/rhub.yaml"), wf_hash) # workflow file is up to date expect_snapshot({ rhub_setup() }, transform = redact_abs_path) expect_equal(cli::hash_file_sha1(".github/workflows/rhub.yaml"), wf_hash) # workflow file is outdated cat("This is a change", file = ".github/workflows/rhub.yaml") wf_upd_hash <- cli::hash_file_sha1(".github/workflows/rhub.yaml") expect_snapshot(error = TRUE, { rhub_setup() }, transform = redact_abs_path) expect_equal( cli::hash_file_sha1(".github/workflows/rhub.yaml"), wf_upd_hash ) # workflow file is outdated, overwrite expect_snapshot({ rhub_setup(overwrite = TRUE) }, transform = redact_abs_path) expect_equal(cli::hash_file_sha1(".github/workflows/rhub.yaml"), wf_hash) }) test_that("setup_find_r_package", { withr::local_options(cli.ansi = FALSE) # we need to do this because we are wrapping text and also using # `transform` in `export_snapshot()`. withr::local_options(cli.width = Inf) pkg <- test_path("fixtures/pkg") withr::local_dir(pkg) expect_snapshot({ setup_find_r_package() }, transform = redact_abs_path) unlink(file.path(tempdir(), "DESCRIPTION")) tmp <- tempfile() on.exit(unlink(tmp, recursive = TRUE), add = TRUE) dir.create(tmp) withr::local_dir(tmp) expect_snapshot(error = TRUE, { setup_find_r_package() }) }) test_that("setup_find_git_root", { withr::local_options(cli.ansi = FALSE) # we need to do this because we are wrapping text and also using # `transform` in `export_snapshot()`. withr::local_options(cli.width = Inf) unlink(file.path(tempdir(), ".git"), recursive = TRUE) tmp <- tempfile() on.exit(unlink(tmp, recursive = TRUE), add = TRUE) dir.create(tmp) withr::local_dir(tmp) expect_snapshot(error = TRUE, { setup_find_git_root() }) dir.create(".git") expect_snapshot({ setup_find_git_root() }, transform = redact_abs_path) }) rhub/tests/testthat/test-utils.R0000644000176200001440000001305314762410770016466 0ustar liggesusers# We don't use expect_snapshot because it adds an extra ! at the # beginning of the error message test_that("pkg_error", { err <- tryCatch( throw(pkg_error( "!" = "This is not good!", "i" = "You should not use {.code foo}, use {.code bar} instead.", .data = list(foo = 1:3), call. = FALSE )), error = function(e) e ) expect_snapshot(err) expect_equal(err$foo, 1:3) }) # We don't use expect_snapshot because it adds an extra ! at the # beginning of the error message test_that("stop", { err <- tryCatch( stop(pkg_error( "!" = "This is not good!", "i" = "You should not use {.code foo}, use {.code bar} instead.", call. = FALSE )), error = function(e) e ) expect_snapshot(err) }) test_that("stop with message", { err <- tryCatch( stop("Ooopsie daily!"), error = function(e) e ) expect_snapshot(err) }) test_that("stopifnot", { expect_snapshot(error = TRUE, { stopifnot(1 == 2) }) }) test_that("zip", { expect_snapshot({ zip(character(), character()) zip(letters[1:5], LETTERS[1:5]) zip("1", letters[1:5]) }) }) test_that("first_char", { expect_equal(first_char("foo"), "f") expect_equal(first_char("f"), "f") expect_equal(first_char(letters), letters) expect_equal(first_char(paste(letters, LETTERS)), letters) expect_equal(first_char(""), "") expect_equal(first_char(character()), character()) }) test_that("last_char", { expect_equal(last_char("foo"), "o") expect_equal(last_char("f"), "f") expect_equal(last_char(letters), letters) expect_equal(last_char(paste(letters, LETTERS)), LETTERS) expect_equal(last_char(""), "") expect_equal(last_char(character()), character()) }) test_that("unquote", { keep <- list( "foo", "'foo", "foo'", "\"foo'", "'foo\"", letters, paste0("'", letters), paste0(letters, "'"), character() ) for (k in keep) expect_equal(unquote(k), k, info = k) expect_snapshot({ unquote("'quoted'") unquote(c("'quoted'", "not", '"quoted"')) }) }) test_that("has_emoji", { mockery::stub(has_emoji, "cli::is_utf8_output", FALSE) expect_false(has_emoji()) mockery::stub(has_emoji, "cli::is_utf8_output", TRUE) withr::local_options(pkg.emoji = TRUE) expect_true(has_emoji()) withr::local_options(pkg.emoji = FALSE) expect_false(has_emoji()) withr::local_options(pkg.emoji = NULL) mockery::stub(has_emoji, "Sys.info", list(sysname = "Darwin")) expect_true(has_emoji()) mockery::stub(has_emoji, "Sys.info", list(sysname = "Linux")) expect_false(has_emoji()) }) test_that("parse_url", { expect_snapshot({ parse_url("https://github.com/r-hub/rhub") parse_url("https://user@github.com/r-hub/rhub") parse_url("https://user:pass@github.com/r-hub/rhub") parse_url("https://github.com/r-hub/rhub?q=foo&p=bar") parse_url("git@github.com:/r-hub/rhub") parse_url("git@github.com:/r-hub/rhub.git") }) expect_snapshot(error = TRUE, { parse_url("this is not a URL at all") }) }) test_that("read_file", { tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) cnt <- as.raw(c(0xc3, 0xa9, 0xc3, 0xa1)) writeBin(cnt, tmp) cnt2 <- read_file(tmp) expect_equal(Encoding(cnt2), "UTF-8") expect_equal(charToRaw(cnt2), cnt) writeBin(cnt[1:3], tmp) expect_error(read_file(tmp), "not UTF-8") }) cli::test_that_cli("ansi_align_width", configs = c("plain", "ansi"), { expect_snapshot({ paste0("--", ansi_align_width(c("foo", "bar", "foobar")), "--") paste0( "--", ansi_align_width(c("foo", "bar", cli::col_red("foobar"))), "--" ) ansi_align_width(character()) }) }) test_that("random_id", { expect_true(is.character(random_id())) expect_true(nchar(random_id()) >= 5) }) test_that("readline", { args <- NULL mockery::stub( readline, "base::readline", function(...) args <<- list(...) ) readline(prompt = "prompt") expect_equal(args, list("prompt")) }) test_that("is_interactive", { withr::local_options(rlib_interactive = TRUE) expect_true(is_interactive()) withr::local_options(rlib_interactive = FALSE) expect_false(is_interactive()) withr::local_options(rlib_interactive = NULL) withr::local_options(knitr.in.progress = TRUE) expect_false(is_interactive()) withr::local_options(knitr.in.progress = NULL) withr::local_options(rstudio.notebook.executing = TRUE) expect_false(is_interactive()) withr::local_options(rstudio.notebook.executing = NULL) withr::local_envvar(TESTTHAT = "true") expect_false(is_interactive()) withr::local_envvar(TESTTHAT = NA_character_) mockery::stub(is_interactive, "interactive", FALSE) expect_false(is_interactive()) mockery::stub(is_interactive, "interactive", TRUE) expect_true(is_interactive()) }) test_that("update", { orig <- list(a = 1, b = 2) expect_equal(update(orig, list()), orig) expect_equal(update(orig, list(a = 2, c = 3)), list(a = 2, b = 2, c = 3)) }) test_that("get_maintainer_email", { pkg <- test_path("fixtures/pkg") expect_equal(get_maintainer_email(pkg), "Josiah.Carberry@example.com") pkg2 <- test_path("fixtures/pkg_0.0.0.9000.tar.gz") expect_equal(get_maintainer_email(pkg2), "first.last@example.com") bad <- tempfile() on.exit(unlink(bad, recursive = TRUE), add = TRUE) dir.create(bad) expect_error(get_maintainer_email(bad), "file found") bad2 <- test_path("fixtures/bad.tar.gz") expect_error(get_maintainer_email(bad2), "file in package") }) test_that("is_dir", { tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) file.create(tmp) expect_true(is_dir(tempdir())) expect_false(is_dir(tmp)) }) rhub/tests/testthat/test-gh.R0000644000176200001440000000461214605236573015730 0ustar liggesuserstest_that("parse_gh_url", { expect_snapshot({ parse_gh_url("https://github.com/r-lib/cli") parse_gh_url("http://github.com/r-lib/cli") parse_gh_url("https://my.private.gh/user/repo") parse_gh_url("http://my.private.gh/user/repo") }) }) test_that("gh_headers", { expect_snapshot({ gh_headers("mytoken") }) }) test_that("gh_query_process_response", { resp <- readRDS(test_path("fixtures/gh-response.rds")) expect_snapshot({ gh_query_process_response(resp) }) }) test_that("gh_rest_get, async_gh_rest_get", { called <- FALSE mockery::stub( gh_rest_get, "async_gh_rest_get", function(...) called <<- TRUE ) gh_rest_get("https://github.com", "/repos/r-hub/rhub", "secret") expect_true(called) resp <- readRDS(test_path("fixtures/gh-response.rds")) mockery::stub( async_gh_rest_get, "http_get", function(...) async_constant(resp) ) json <- synchronise(async_gh_rest_get( "https://api.github.com", "/repos/r-hub/rhub/actions/workflows", "secret" )) expect_snapshot(json) }) test_that("gh_rest_post, async_gh_rest_post", { called <- FALSE mockery::stub( gh_rest_post, "async_gh_rest_post", function(...) called <<- TRUE ) data <- "foobar" gh_rest_post( "https://api.github.com", "/repos/r-lib/ps/actions/workflows/rhub.yaml/dispatches", "secret", data ) expect_true(called) resp <- readRDS(test_path("fixtures/gh-response-post.rds")) mockery::stub( async_gh_rest_post, "http_post", function(...) async_constant(resp) ) json <- synchronise(async_gh_rest_post( "https://api.github.com", "/repos/r-lib/ps/actions/workflows/rhub.yaml/dispatches", "secret", data )) expect_snapshot(json) }) test_that("gh_gql_get, async_gh_gql_get", { called <- FALSE mockery::stub( gh_gql_get, "async_gh_gql_get", function(...) called <<- TRUE ) query <- "{ repository(owner: \"r-hub\", name: \"rhub\") { pullRequest(number: 579) { headRefOid } } } " url <- parse_gh_url("https://github.com/r-lib/ps") gh_gql_get(url$graphql, query, "secret") expect_true(called) resp <- readRDS(test_path("fixtures/gh-response-gql.rds")) mockery::stub( async_gh_gql_get, "http_post", function(...) async_constant(resp) ) json <- synchronise(async_gh_gql_get(url$graphql, query, "secret")) expect_snapshot(json) }) rhub/tests/testthat/fixtures/0000755000176200001440000000000014762412555016100 5ustar liggesusersrhub/tests/testthat/fixtures/rhub.yaml0000644000176200001440000000557614605171703017732 0ustar liggesusers# R-hub's generic GitHub Actions workflow file. It's canonical location is at # https://github.com/r-hub/actions/blob/v1/workflows/rhub.yaml # You can update this file to a newer version using the rhub2 package: # # rhub::rhub_setup() # # It is unlikely that you need to modify this file manually. name: R-hub run-name: "${{ github.event.inputs.id }}: ${{ github.event.inputs.name || format('Manually run by {0}', github.triggering_actor) }}" on: workflow_dispatch: inputs: config: description: 'A comma separated list of R-hub platforms to use.' type: string default: 'linux,windows,macos' name: description: 'Run name. You can leave this empty now.' type: string id: description: 'Unique ID. You can leave this empty now.' type: string jobs: setup: runs-on: ubuntu-latest outputs: containers: ${{ steps.rhub-setup.outputs.containers }} platforms: ${{ steps.rhub-setup.outputs.platforms }} steps: # NO NEED TO CHECKOUT HERE - uses: r-hub/actions/setup@v1 with: config: ${{ github.event.inputs.config }} id: rhub-setup linux-containers: needs: setup if: ${{ needs.setup.outputs.containers != '[]' }} runs-on: ubuntu-latest name: ${{ matrix.config.label }} strategy: fail-fast: false matrix: config: ${{ fromJson(needs.setup.outputs.containers) }} container: image: ${{ matrix.config.container }} steps: - uses: r-hub/actions/checkout@v1 - uses: r-hub/actions/platform-info@v1 with: token: ${{ secrets.RHUB_TOKEN }} job-config: ${{ matrix.config.job-config }} - uses: r-hub/actions/setup-deps@v1 with: token: ${{ secrets.RHUB_TOKEN }} job-config: ${{ matrix.config.job-config }} - uses: r-hub/actions/run-check@v1 with: token: ${{ secrets.RHUB_TOKEN }} job-config: ${{ matrix.config.job-config }} other-platforms: needs: setup if: ${{ needs.setup.outputs.platforms != '[]' }} runs-on: ${{ matrix.config.os }} name: ${{ matrix.config.label }} strategy: fail-fast: false matrix: config: ${{ fromJson(needs.setup.outputs.platforms) }} steps: - uses: r-hub/actions/checkout@v1 - uses: r-hub/actions/setup-r@v1 with: job-config: ${{ matrix.config.job-config }} token: ${{ secrets.RHUB_TOKEN }} - uses: r-hub/actions/platform-info@v1 with: token: ${{ secrets.RHUB_TOKEN }} job-config: ${{ matrix.config.job-config }} - uses: r-hub/actions/setup-deps@v1 with: job-config: ${{ matrix.config.job-config }} token: ${{ secrets.RHUB_TOKEN }} - uses: r-hub/actions/run-check@v1 with: job-config: ${{ matrix.config.job-config }} token: ${{ secrets.RHUB_TOKEN }} rhub/tests/testthat/fixtures/gh-response-gql.rds0000644000176200001440000000234014605236573021624 0ustar liggesusersUoE&i F@.0R엽nE!q?Vb-ώi;̬U8p?!q+wpެwےwyޛ7G5˲VUke|W5exoOTd$c'p-Y&-q‡vlB5O<%9&^8C,ᔘ#% D'D*7s=]0yӲ>3ky2ZL4 gMh;`뵐G9t^",Xg` fI2g 1 &RP͈<6,M뉐qv8{zi2СݠAȧNkng7 ]2YPȰK|%k! MFvXQ1% zf865d$/#W ی_RKҜxbN–$h$MSc`+ȉa^טJ(u§\8$ " !zAΒ\nM"@d9R0;3+@jo+M^R^Bdz|qǍ =wTب'{;wrڥaː`Č g#A"l2w茁p-E;my:kCS|xqX&` heo*D mӱߗ`H1.v}j6nU5N$D IJ}sgυ< Bqw VF@&l }߄WlikNzsiuҗҤd9v CO*!rMe =IQtH4 :M:C8iSEKTj-lz<r<[G#MOD}0#3rO*!Is!a6Tb c=_<{̆j%BqEvd}rIN(5d i@Pc%=TjAb9I;mQ-Z=(ʢݽN{6[][VkՅeNYcJW,$p5=>‚(&Aj'G9{a CW{M3YZҠ HrQmU&G0̐ ]~κmrx<3Car6s:$~=CDHY :qq~q̟g)g0;fAbix ^u!c0,KĺP 8Albk'PT9НT TMbǻ;m?1z{kؔL7ե6'N6 mBPXIәK"eĉef{Q?:'ccQQj=$=:}(=hna!$T4%'fg%b>\s964mFbϫ7)ϗtf<y"O))4>|Ǔ4ΧgzID\b 6_Jd&`wwlfzGMȼz9 ͫDkeKXb#ƯW@s~ / ppL=`|r@*k:oSG;MJn7xjyڠG:mNQLe9en1%[vGռn;j@oLOb<@@f\ q ^:V'p Y',Mįˎ5f4,gZ2prhub/tests/testthat/fixtures/validated_emails.csv0000644000176200001440000000023114605451765022102 0ustar liggesusers"csardi.gabor@gmail.com","token1" "csardi.gabor+new@gmail.com","token2" "csardi.gabor+another@gmail.com","token3" "csardi.gabor+fake@gmail.com","token4" rhub/tests/testthat/fixtures/rc-response-list-repos.rds0000644000176200001440000000122114605460203023132 0ustar liggesusersTAo0m n͒f+jLZŦNhr5KcvVB ~gG6m.cJ$'{ywM0 FX4 E=9W`\0J<|ou2VJ㈈`Ates=A8{ 71HQx%KVmEB[u6[^`o[f os`Kհl?x԰LEi$ ]컜A"5>DJ$ u ,Vi*֪op&{ ߱LpəP$8T]]!k`6MpFd;mg=DHeO(OOfp*ZȂs[OXeR7`.눑=/ofͽϵ:Q;y49Y;f q,xD55(f sA)rX6YȒ u*8BW^8u'tc*GuK:.p#p*bCRzpaxg [(1*&4yeίC Ud$y5h+Pέ #ɫ@L<'\f",*E$ 8f =6%ăT;~c}SjK#rhub/tests/testthat/fixtures/rc-response-submit.rds0000644000176200001440000000144014605461355022350 0ustar liggesusersUAk@ۺD*,٤-ۦ"**"vKMd&N&vRWU'xW ^&Z)K L{߼͛]ӴV)RYW8#ZEٮqv'OD&iFcj[}ޱLl$=iGI+0>_k:;2tKeO)] ̹3nc]6t3~ ƿ96u"ou.ҋIX.jC1neCǞŌÐ[\Ѐ23tK='0Kb. RA%8p05Ӭ7fgQ慩Oea I.gyRڑ/I+..,]_d.f`s\iim̗gZ_m=9]j_^-Og2"hBʨ ð2 9.68ͲFD2ܥ\p_UtKR/qY-aM9ε\㞞E*"vy<>"=a6jQӀJEPGn}4^Rݬ/枈.$e`!QE3z')gFi(s@4]s9 RW >-0( S!eio fM]1պƳy{|Q\s8V~ tc%+X.TT3{j<`x Υ#_VW2lDɪ}RV7l_pqQ;㖐 IBlrhub/tests/testthat/fixtures/manifest.json0000644000176200001440000010410114762412555020576 0ustar liggesusers{ "updated": "2024-04-09 06:45:24", "containers": [ { "tag": "ghcr.io/r-hub/containers/atlas:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/atlas:latest", "id": "ghcr.io/r-hub/containers/atlas@sha256:73712127ae04572054645364f1751207654803d30d14db26d2f2ce97a0c637cf", "size": 1578504864, "created": "2024-04-09T06:12:37.255431954Z", "/etc/os-release": "NAME=\"Fedora Linux\"\nVERSION=\"38 (Container Image)\"\nID=fedora\nVERSION_ID=38\nVERSION_CODENAME=\"\"\nPLATFORM_ID=\"platform:f38\"\nPRETTY_NAME=\"Fedora Linux 38 (Container Image)\"\nANSI_COLOR=\"0;38;2;60;110;180\"\nLOGO=fedora-logo-icon\nCPE_NAME=\"cpe:/o:fedoraproject:fedora:38\"\nDEFAULT_HOSTNAME=\"fedora\"\nHOME_URL=\"https://fedoraproject.org/\"\nDOCUMENTATION_URL=\"https://docs.fedoraproject.org/en-US/fedora/f38/system-administrators-guide/\"\nSUPPORT_URL=\"https://ask.fedoraproject.org/\"\nBUG_REPORT_URL=\"https://bugzilla.redhat.com/\"\nREDHAT_BUGZILLA_PRODUCT=\"Fedora\"\nREDHAT_BUGZILLA_PRODUCT_VERSION=38\nREDHAT_SUPPORT_PRODUCT=\"Fedora\"\nREDHAT_SUPPORT_PRODUCT_VERSION=38\nSUPPORT_END=2024-05-14\nVARIANT=\"Container Image\"\nVARIANT_ID=container\n", "uname -a": "Linux 2b4494639b7d 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 GNU/Linux", "sessionInfo()": "R Under development (unstable) (2024-04-08 r86370)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Fedora Linux 38 (Container Image)\n\nMatrix products: default\nBLAS/LAPACK: /usr/lib64/atlas/libsatlas.so.3.10; LAPACK version 3.11.0\n\nlocale:\n [1] LC_CTYPE=en_GB.UTF-8 LC_NUMERIC=C \n [3] LC_TIME=en_GB.UTF-8 LC_COLLATE=en_GB.UTF-8 \n [5] LC_MONETARY=en_GB.UTF-8 LC_MESSAGES=en_GB.UTF-8 \n [7] LC_PAPER=en_GB.UTF-8 LC_NAME=C \n [9] LC_ADDRESS=C LC_TELEPHONE=C \n[11] LC_MEASUREMENT=en_GB.UTF-8 LC_IDENTIFICATION=C \n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.5.0\n" } ] }, { "tag": "ghcr.io/r-hub/containers/centos7:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/centos7:latest", "id": "ghcr.io/r-hub/containers/centos7@sha256:e1696fae6bb2bb8a29e97ebcd6c5a700b7bd29afc4fe31ec89162e991ecd5198", "size": 2397242127, "created": "2024-04-09T06:14:01.814230204Z", "/etc/os-release": "NAME=\"CentOS Linux\"\nVERSION=\"7 (Core)\"\nID=\"centos\"\nID_LIKE=\"rhel fedora\"\nVERSION_ID=\"7\"\nPRETTY_NAME=\"CentOS Linux 7 (Core)\"\nANSI_COLOR=\"0;31\"\nCPE_NAME=\"cpe:/o:centos:centos:7\"\nHOME_URL=\"https://www.centos.org/\"\nBUG_REPORT_URL=\"https://bugs.centos.org/\"\n\nCENTOS_MANTISBT_PROJECT=\"CentOS-7\"\nCENTOS_MANTISBT_PROJECT_VERSION=\"7\"\nREDHAT_SUPPORT_PRODUCT=\"centos\"\nREDHAT_SUPPORT_PRODUCT_VERSION=\"7\"\n\n", "uname -a": "Linux 5251f7226e27 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 x86_64 x86_64 GNU/Linux", "sessionInfo()": "R version 4.4.0 alpha (2024-04-08 r86370)\nPlatform: x86_64-pc-linux-gnu\nRunning under: CentOS Linux 7 (Core)\n\nMatrix products: default\nBLAS/LAPACK: /usr/lib64/libopenblasp-r0.3.3.so; LAPACK version 3.8.0\n\nlocale:\n [1] LC_CTYPE=en_US.UTF-8 LC_NUMERIC=C \n [3] LC_TIME=en_US.UTF-8 LC_COLLATE=en_US.UTF-8 \n [5] LC_MONETARY=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 \n [7] LC_PAPER=en_US.UTF-8 LC_NAME=C \n [9] LC_ADDRESS=C LC_TELEPHONE=C \n[11] LC_MEASUREMENT=en_US.UTF-8 LC_IDENTIFICATION=C \n\ntime zone: UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.4.0\n" } ] }, { "tag": "ghcr.io/r-hub/containers/clang-asan:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/clang-asan:latest", "id": "ghcr.io/r-hub/containers/clang-asan@sha256:830c2567561ffbc0af80f10d173442d81fc6f1f6ae29a9bc11145dda1d659fe2", "size": 2840842353, "created": "2024-04-09T06:12:55.880961968Z", "/etc/os-release": "PRETTY_NAME=\"Ubuntu 22.04.4 LTS\"\nNAME=\"Ubuntu\"\nVERSION_ID=\"22.04\"\nVERSION=\"22.04.4 LTS (Jammy Jellyfish)\"\nVERSION_CODENAME=jammy\nID=ubuntu\nID_LIKE=debian\nHOME_URL=\"https://www.ubuntu.com/\"\nSUPPORT_URL=\"https://help.ubuntu.com/\"\nBUG_REPORT_URL=\"https://bugs.launchpad.net/ubuntu/\"\nPRIVACY_POLICY_URL=\"https://www.ubuntu.com/legal/terms-and-policies/privacy-policy\"\nUBUNTU_CODENAME=jammy\n", "uname -a": "Linux e331221f2aa7 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 x86_64 x86_64 GNU/Linux", "sessionInfo()": "R Under development (unstable) (2024-04-08 r86370)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Ubuntu 22.04.4 LTS\n\nMatrix products: default\nBLAS: /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so.3 \nLAPACK: /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.20.so; LAPACK version 3.10.0\n\nlocale:\n [1] LC_CTYPE=en_GB.UTF-8 LC_NUMERIC=C \n [3] LC_TIME=en_GB.UTF-8 LC_COLLATE=C \n [5] LC_MONETARY=en_GB.UTF-8 LC_MESSAGES=en_GB.UTF-8 \n [7] LC_PAPER=en_GB.UTF-8 LC_NAME=C \n [9] LC_ADDRESS=C LC_TELEPHONE=C \n[11] LC_MEASUREMENT=en_GB.UTF-8 LC_IDENTIFICATION=C \n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.5.0\n" } ] }, { "tag": "ghcr.io/r-hub/containers/clang16:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/clang16:latest", "id": "ghcr.io/r-hub/containers/clang16@sha256:bbeefb96a170c9586336e557bb12d629485724688cc9f0243700ebd2d847a8b7", "size": 2027913227, "created": "2024-04-09T06:13:13.151212646Z", "/etc/os-release": "PRETTY_NAME=\"Ubuntu 22.04.4 LTS\"\nNAME=\"Ubuntu\"\nVERSION_ID=\"22.04\"\nVERSION=\"22.04.4 LTS (Jammy Jellyfish)\"\nVERSION_CODENAME=jammy\nID=ubuntu\nID_LIKE=debian\nHOME_URL=\"https://www.ubuntu.com/\"\nSUPPORT_URL=\"https://help.ubuntu.com/\"\nBUG_REPORT_URL=\"https://bugs.launchpad.net/ubuntu/\"\nPRIVACY_POLICY_URL=\"https://www.ubuntu.com/legal/terms-and-policies/privacy-policy\"\nUBUNTU_CODENAME=jammy\n", "uname -a": "Linux ced42ddabb38 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 x86_64 x86_64 GNU/Linux", "sessionInfo()": "R Under development (unstable) (2024-04-06 r86351)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Ubuntu 22.04.4 LTS\n\nMatrix products: default\nBLAS: /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so.3 \nLAPACK: /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.20.so; LAPACK version 3.10.0\n\nlocale:\n [1] LC_CTYPE=en_GB.UTF-8 LC_NUMERIC=C \n [3] LC_TIME=en_GB.UTF-8 LC_COLLATE=en_GB.UTF-8 \n [5] LC_MONETARY=en_GB.UTF-8 LC_MESSAGES=en_GB.UTF-8 \n [7] LC_PAPER=en_GB.UTF-8 LC_NAME=C \n [9] LC_ADDRESS=C LC_TELEPHONE=C \n[11] LC_MEASUREMENT=en_GB.UTF-8 LC_IDENTIFICATION=C \n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.5.0\n" } ] }, { "tag": "ghcr.io/r-hub/containers/gcc13:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/gcc13:latest", "id": "ghcr.io/r-hub/containers/gcc13@sha256:a82f19afc006347129a47764ba28029dcb3fb2dfdc871452808065dd1e303f98", "size": 1484164831, "created": "2024-04-09T06:14:04.791818391Z", "/etc/os-release": "NAME=\"Fedora Linux\"\nVERSION=\"38 (Container Image)\"\nID=fedora\nVERSION_ID=38\nVERSION_CODENAME=\"\"\nPLATFORM_ID=\"platform:f38\"\nPRETTY_NAME=\"Fedora Linux 38 (Container Image)\"\nANSI_COLOR=\"0;38;2;60;110;180\"\nLOGO=fedora-logo-icon\nCPE_NAME=\"cpe:/o:fedoraproject:fedora:38\"\nDEFAULT_HOSTNAME=\"fedora\"\nHOME_URL=\"https://fedoraproject.org/\"\nDOCUMENTATION_URL=\"https://docs.fedoraproject.org/en-US/fedora/f38/system-administrators-guide/\"\nSUPPORT_URL=\"https://ask.fedoraproject.org/\"\nBUG_REPORT_URL=\"https://bugzilla.redhat.com/\"\nREDHAT_BUGZILLA_PRODUCT=\"Fedora\"\nREDHAT_BUGZILLA_PRODUCT_VERSION=38\nREDHAT_SUPPORT_PRODUCT=\"Fedora\"\nREDHAT_SUPPORT_PRODUCT_VERSION=38\nSUPPORT_END=2024-05-14\nVARIANT=\"Container Image\"\nVARIANT_ID=container\n", "uname -a": "Linux c93de595a41d 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 GNU/Linux", "sessionInfo()": "R Under development (unstable) (2024-04-08 r86370)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Fedora Linux 38 (Container Image)\n\nMatrix products: default\nBLAS/LAPACK: /usr/lib64/libopenblasp-r0.3.21.so; LAPACK version 3.9.0\n\nlocale:\n [1] LC_CTYPE=en_GB.UTF-8 LC_NUMERIC=C \n [3] LC_TIME=en_GB.UTF-8 LC_COLLATE=en_GB.UTF-8 \n [5] LC_MONETARY=en_GB.UTF-8 LC_MESSAGES=en_GB.UTF-8 \n [7] LC_PAPER=en_GB.UTF-8 LC_NAME=C \n [9] LC_ADDRESS=C LC_TELEPHONE=C \n[11] LC_MEASUREMENT=en_GB.UTF-8 LC_IDENTIFICATION=C \n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.5.0\n" } ] }, { "tag": "ghcr.io/r-hub/containers/nold:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/nold:latest", "id": "ghcr.io/r-hub/containers/nold@sha256:11a8b83aea1357836bd71145af72a532350ac3a7ece7454f9423b09d91796228", "size": 862279216, "created": "2024-04-09T06:12:12.256536782Z", "/etc/os-release": "PRETTY_NAME=\"Ubuntu 22.04.4 LTS\"\nNAME=\"Ubuntu\"\nVERSION_ID=\"22.04\"\nVERSION=\"22.04.4 LTS (Jammy Jellyfish)\"\nVERSION_CODENAME=jammy\nID=ubuntu\nID_LIKE=debian\nHOME_URL=\"https://www.ubuntu.com/\"\nSUPPORT_URL=\"https://help.ubuntu.com/\"\nBUG_REPORT_URL=\"https://bugs.launchpad.net/ubuntu/\"\nPRIVACY_POLICY_URL=\"https://www.ubuntu.com/legal/terms-and-policies/privacy-policy\"\nUBUNTU_CODENAME=jammy\n", "uname -a": "Linux 9d280b3dde4b 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 x86_64 x86_64 GNU/Linux", "sessionInfo()": "R Under development (unstable) (2024-04-08 r86370)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Ubuntu 22.04.4 LTS\n\nMatrix products: default\nBLAS: /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so.3 \nLAPACK: /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.20.so; LAPACK version 3.10.0\n\nlocale:\n [1] LC_CTYPE=en_US.UTF-8 LC_NUMERIC=C \n [3] LC_TIME=en_US.UTF-8 LC_COLLATE=en_US.UTF-8 \n [5] LC_MONETARY=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 \n [7] LC_PAPER=en_US.UTF-8 LC_NAME=C \n [9] LC_ADDRESS=C LC_TELEPHONE=C \n[11] LC_MEASUREMENT=en_US.UTF-8 LC_IDENTIFICATION=C \n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.5.0\n" } ] }, { "tag": "ghcr.io/r-hub/containers/ubuntu-clang:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/ubuntu-clang:latest", "id": "ghcr.io/r-hub/containers/ubuntu-clang@sha256:e51927635b5a53770815df9899e46bf5f9a35135f219c65281c954a5654f4767", "size": 1945200899, "created": "2024-04-09T06:12:48.161471464Z", "/etc/os-release": "PRETTY_NAME=\"Ubuntu 22.04.4 LTS\"\nNAME=\"Ubuntu\"\nVERSION_ID=\"22.04\"\nVERSION=\"22.04.4 LTS (Jammy Jellyfish)\"\nVERSION_CODENAME=jammy\nID=ubuntu\nID_LIKE=debian\nHOME_URL=\"https://www.ubuntu.com/\"\nSUPPORT_URL=\"https://help.ubuntu.com/\"\nBUG_REPORT_URL=\"https://bugs.launchpad.net/ubuntu/\"\nPRIVACY_POLICY_URL=\"https://www.ubuntu.com/legal/terms-and-policies/privacy-policy\"\nUBUNTU_CODENAME=jammy\n", "uname -a": "Linux 82704c467c32 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 x86_64 x86_64 GNU/Linux", "sessionInfo()": "R Under development (unstable) (2024-04-08 r86370)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Ubuntu 22.04.4 LTS\n\nMatrix products: default\nBLAS: /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so.3 \nLAPACK: /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.20.so; LAPACK version 3.10.0\n\nlocale:\n[1] C\n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.5.0\n" } ] }, { "tag": "ghcr.io/r-hub/containers/ubuntu-gcc12:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/ubuntu-gcc12:latest", "id": "ghcr.io/r-hub/containers/ubuntu-gcc12@sha256:c6ff20afa6fa31735ee53742f510502c9b43506893467d0ecb7a573728623b36", "size": 1248830456, "created": "2024-04-09T06:16:20.406482824Z", "/etc/os-release": "PRETTY_NAME=\"Ubuntu 22.04.4 LTS\"\nNAME=\"Ubuntu\"\nVERSION_ID=\"22.04\"\nVERSION=\"22.04.4 LTS (Jammy Jellyfish)\"\nVERSION_CODENAME=jammy\nID=ubuntu\nID_LIKE=debian\nHOME_URL=\"https://www.ubuntu.com/\"\nSUPPORT_URL=\"https://help.ubuntu.com/\"\nBUG_REPORT_URL=\"https://bugs.launchpad.net/ubuntu/\"\nPRIVACY_POLICY_URL=\"https://www.ubuntu.com/legal/terms-and-policies/privacy-policy\"\nUBUNTU_CODENAME=jammy\n", "uname -a": "Linux b3575bcafd19 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 x86_64 x86_64 GNU/Linux", "sessionInfo()": "R Under development (unstable) (2024-04-08 r86370)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Ubuntu 22.04.4 LTS\n\nMatrix products: default\nBLAS: /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so.3 \nLAPACK: /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.20.so; LAPACK version 3.10.0\n\nlocale:\n[1] C\n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.5.0\n" }, { "tag": "ghcr.io/r-hub/containers/ubuntu-gcc12:latest", "id": "ghcr.io/r-hub/containers/ubuntu-gcc12@sha256:6331177e064d023b93c759829cda12940a9b746d7998d88b0c81199147308625", "size": 1248652478, "created": "2024-04-08T06:13:33.032270912Z", "/etc/os-release": "PRETTY_NAME=\"Ubuntu 22.04.4 LTS\"\nNAME=\"Ubuntu\"\nVERSION_ID=\"22.04\"\nVERSION=\"22.04.4 LTS (Jammy Jellyfish)\"\nVERSION_CODENAME=jammy\nID=ubuntu\nID_LIKE=debian\nHOME_URL=\"https://www.ubuntu.com/\"\nSUPPORT_URL=\"https://help.ubuntu.com/\"\nBUG_REPORT_URL=\"https://bugs.launchpad.net/ubuntu/\"\nPRIVACY_POLICY_URL=\"https://www.ubuntu.com/legal/terms-and-policies/privacy-policy\"\nUBUNTU_CODENAME=jammy\n", "uname -a": "Linux 9568e23fe0c8 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 x86_64 x86_64 GNU/Linux", "sessionInfo()": "R Under development (unstable) (2024-04-06 r86351)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Ubuntu 22.04.4 LTS\n\nMatrix products: default\nBLAS: /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so.3 \nLAPACK: /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.20.so; LAPACK version 3.10.0\n\nlocale:\n[1] C\n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.5.0\n" } ] }, { "tag": "ghcr.io/r-hub/containers/ubuntu-next:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/ubuntu-next:latest", "id": "ghcr.io/r-hub/containers/ubuntu-next@sha256:1cb48ff4a5b4347f8f19c1500384f3c94b1411a995e9963fe717718509dc0671", "size": 1011441646, "created": "2024-04-09T06:12:59.601235931Z", "/etc/os-release": "PRETTY_NAME=\"Ubuntu 22.04.4 LTS\"\nNAME=\"Ubuntu\"\nVERSION_ID=\"22.04\"\nVERSION=\"22.04.4 LTS (Jammy Jellyfish)\"\nVERSION_CODENAME=jammy\nID=ubuntu\nID_LIKE=debian\nHOME_URL=\"https://www.ubuntu.com/\"\nSUPPORT_URL=\"https://help.ubuntu.com/\"\nBUG_REPORT_URL=\"https://bugs.launchpad.net/ubuntu/\"\nPRIVACY_POLICY_URL=\"https://www.ubuntu.com/legal/terms-and-policies/privacy-policy\"\nUBUNTU_CODENAME=jammy\n", "uname -a": "Linux 231944fe9e74 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 x86_64 x86_64 GNU/Linux", "sessionInfo()": "R version 4.4.0 alpha (2024-04-07 r86351)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Ubuntu 22.04.4 LTS\n\nMatrix products: default\nBLAS: /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so.3 \nLAPACK: /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.20.so; LAPACK version 3.10.0\n\nlocale:\n [1] LC_CTYPE=en_GB.UTF-8 LC_NUMERIC=C \n [3] LC_TIME=en_GB.UTF-8 LC_COLLATE=C \n [5] LC_MONETARY=en_GB.UTF-8 LC_MESSAGES=en_GB.UTF-8 \n [7] LC_PAPER=en_GB.UTF-8 LC_NAME=C \n [9] LC_ADDRESS=C LC_TELEPHONE=C \n[11] LC_MEASUREMENT=en_GB.UTF-8 LC_IDENTIFICATION=C \n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.4.0\n" } ] }, { "tag": "ghcr.io/r-hub/containers/ubuntu-release:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/ubuntu-release:latest", "id": "ghcr.io/r-hub/containers/ubuntu-release@sha256:55ee8f90e44d17e1ef1577f146e8fc1230d4bcc6571d104160ab934992d931c2", "size": 1006497583, "created": "2024-04-09T06:13:01.492561439Z", "/etc/os-release": "PRETTY_NAME=\"Ubuntu 22.04.4 LTS\"\nNAME=\"Ubuntu\"\nVERSION_ID=\"22.04\"\nVERSION=\"22.04.4 LTS (Jammy Jellyfish)\"\nVERSION_CODENAME=jammy\nID=ubuntu\nID_LIKE=debian\nHOME_URL=\"https://www.ubuntu.com/\"\nSUPPORT_URL=\"https://help.ubuntu.com/\"\nBUG_REPORT_URL=\"https://bugs.launchpad.net/ubuntu/\"\nPRIVACY_POLICY_URL=\"https://www.ubuntu.com/legal/terms-and-policies/privacy-policy\"\nUBUNTU_CODENAME=jammy\n", "uname -a": "Linux fd334ff5b20e 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 x86_64 x86_64 GNU/Linux", "sessionInfo()": "R version 4.3.3 (2024-02-29)\nPlatform: x86_64-pc-linux-gnu (64-bit)\nRunning under: Ubuntu 22.04.4 LTS\n\nMatrix products: default\nBLAS: /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so.3 \nLAPACK: /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.20.so; LAPACK version 3.10.0\n\nlocale:\n [1] LC_CTYPE=en_GB.UTF-8 LC_NUMERIC=C \n [3] LC_TIME=en_GB.UTF-8 LC_COLLATE=C \n [5] LC_MONETARY=en_GB.UTF-8 LC_MESSAGES=en_GB.UTF-8 \n [7] LC_PAPER=en_GB.UTF-8 LC_NAME=C \n [9] LC_ADDRESS=C LC_TELEPHONE=C \n[11] LC_MEASUREMENT=en_GB.UTF-8 LC_IDENTIFICATION=C \n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.3.3\n" } ] }, { "tag": "ghcr.io/r-hub/containers/clang17:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/clang17:latest", "id": "ghcr.io/r-hub/containers/clang17@sha256:ea84d4755f2f6a734cb5c81200bc15c23c8d16219054b960e60d282c0798f5b5", "size": 2728052070, "created": "2024-04-09T06:13:02.31898816Z", "/etc/os-release": "PRETTY_NAME=\"Ubuntu 22.04.4 LTS\"\nNAME=\"Ubuntu\"\nVERSION_ID=\"22.04\"\nVERSION=\"22.04.4 LTS (Jammy Jellyfish)\"\nVERSION_CODENAME=jammy\nID=ubuntu\nID_LIKE=debian\nHOME_URL=\"https://www.ubuntu.com/\"\nSUPPORT_URL=\"https://help.ubuntu.com/\"\nBUG_REPORT_URL=\"https://bugs.launchpad.net/ubuntu/\"\nPRIVACY_POLICY_URL=\"https://www.ubuntu.com/legal/terms-and-policies/privacy-policy\"\nUBUNTU_CODENAME=jammy\n", "uname -a": "Linux 7926a1db6191 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 x86_64 x86_64 GNU/Linux", "sessionInfo()": "R Under development (unstable) (2024-04-06 r86351)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Ubuntu 22.04.4 LTS\n\nMatrix products: default\nBLAS: /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so.3 \nLAPACK: /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.20.so; LAPACK version 3.10.0\n\nlocale:\n [1] LC_CTYPE=en_GB.UTF-8 LC_NUMERIC=C \n [3] LC_TIME=en_GB.UTF-8 LC_COLLATE=en_GB.UTF-8 \n [5] LC_MONETARY=en_GB.UTF-8 LC_MESSAGES=en_GB.UTF-8 \n [7] LC_PAPER=en_GB.UTF-8 LC_NAME=C \n [9] LC_ADDRESS=C LC_TELEPHONE=C \n[11] LC_MEASUREMENT=en_GB.UTF-8 LC_IDENTIFICATION=C \n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.5.0\n" } ] }, { "tag": "ghcr.io/r-hub/containers/valgrind:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/valgrind:latest", "id": "ghcr.io/r-hub/containers/valgrind@sha256:61b2e645e1ed133d19bf301ec0c5fcac7e17520cbbb6fc61e830837862249a61", "size": 1510253875, "created": "2024-04-09T06:12:17.372118144Z", "/etc/os-release": "NAME=\"Fedora Linux\"\nVERSION=\"38 (Container Image)\"\nID=fedora\nVERSION_ID=38\nVERSION_CODENAME=\"\"\nPLATFORM_ID=\"platform:f38\"\nPRETTY_NAME=\"Fedora Linux 38 (Container Image)\"\nANSI_COLOR=\"0;38;2;60;110;180\"\nLOGO=fedora-logo-icon\nCPE_NAME=\"cpe:/o:fedoraproject:fedora:38\"\nDEFAULT_HOSTNAME=\"fedora\"\nHOME_URL=\"https://fedoraproject.org/\"\nDOCUMENTATION_URL=\"https://docs.fedoraproject.org/en-US/fedora/f38/system-administrators-guide/\"\nSUPPORT_URL=\"https://ask.fedoraproject.org/\"\nBUG_REPORT_URL=\"https://bugzilla.redhat.com/\"\nREDHAT_BUGZILLA_PRODUCT=\"Fedora\"\nREDHAT_BUGZILLA_PRODUCT_VERSION=38\nREDHAT_SUPPORT_PRODUCT=\"Fedora\"\nREDHAT_SUPPORT_PRODUCT_VERSION=38\nSUPPORT_END=2024-05-14\nVARIANT=\"Container Image\"\nVARIANT_ID=container\n", "uname -a": "Linux a5c2a7f80cfa 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 GNU/Linux", "sessionInfo()": "R Under development (unstable) (2024-04-08 r86370)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Fedora Linux 38 (Container Image)\n\nMatrix products: default\nBLAS/LAPACK: /usr/lib64/libopenblasp-r0.3.21.so; LAPACK version 3.9.0\n\nlocale:\n [1] LC_CTYPE=en_GB.UTF-8 LC_NUMERIC=C \n [3] LC_TIME=en_GB.UTF-8 LC_COLLATE=en_GB.UTF-8 \n [5] LC_MONETARY=en_GB.UTF-8 LC_MESSAGES=en_GB.UTF-8 \n [7] LC_PAPER=en_GB.UTF-8 LC_NAME=C \n [9] LC_ADDRESS=C LC_TELEPHONE=C \n[11] LC_MEASUREMENT=en_GB.UTF-8 LC_IDENTIFICATION=C \n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.5.0\n" } ] }, { "tag": "ghcr.io/r-hub/containers/mkl:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/mkl:latest", "id": "ghcr.io/r-hub/containers/mkl@sha256:ef906ff992318523e416f4cbd64318da2fea988e494346802182b4dbb8ef2d13", "size": 6056784467, "created": "2024-04-09T06:18:14.857992465Z", "/etc/os-release": "NAME=\"Fedora Linux\"\nVERSION=\"38 (Container Image)\"\nID=fedora\nVERSION_ID=38\nVERSION_CODENAME=\"\"\nPLATFORM_ID=\"platform:f38\"\nPRETTY_NAME=\"Fedora Linux 38 (Container Image)\"\nANSI_COLOR=\"0;38;2;60;110;180\"\nLOGO=fedora-logo-icon\nCPE_NAME=\"cpe:/o:fedoraproject:fedora:38\"\nDEFAULT_HOSTNAME=\"fedora\"\nHOME_URL=\"https://fedoraproject.org/\"\nDOCUMENTATION_URL=\"https://docs.fedoraproject.org/en-US/fedora/f38/system-administrators-guide/\"\nSUPPORT_URL=\"https://ask.fedoraproject.org/\"\nBUG_REPORT_URL=\"https://bugzilla.redhat.com/\"\nREDHAT_BUGZILLA_PRODUCT=\"Fedora\"\nREDHAT_BUGZILLA_PRODUCT_VERSION=38\nREDHAT_SUPPORT_PRODUCT=\"Fedora\"\nREDHAT_SUPPORT_PRODUCT_VERSION=38\nSUPPORT_END=2024-05-14\nVARIANT=\"Container Image\"\nVARIANT_ID=container\n", "uname -a": "Linux 2263b0d2665a 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 GNU/Linux", "sessionInfo()": "R Under development (unstable) (2024-04-08 r86370)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Fedora Linux 38 (Container Image)\n\nMatrix products: default\nBLAS/LAPACK: /opt/intel/oneapi/mkl/2023.2.0/lib/intel64/libmkl_gf_lp64.so.2; LAPACK version 3.10.1\n\nlocale:\n [1] LC_CTYPE=en_GB.UTF-8 LC_NUMERIC=C \n [3] LC_TIME=en_GB.UTF-8 LC_COLLATE=en_GB.UTF-8 \n [5] LC_MONETARY=en_GB.UTF-8 LC_MESSAGES=en_GB.UTF-8 \n [7] LC_PAPER=en_GB.UTF-8 LC_NAME=C \n [9] LC_ADDRESS=C LC_TELEPHONE=C \n[11] LC_MEASUREMENT=en_GB.UTF-8 LC_IDENTIFICATION=C \n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.5.0\n" } ] }, { "tag": "ghcr.io/r-hub/containers/donttest:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/donttest:latest", "id": "ghcr.io/r-hub/containers/donttest@sha256:ac9f1517391287c1e341fbb4e7896ef2e9fcebc0a82473a78b0f90564e26e564", "size": 1088057712, "created": "2024-04-09T06:13:16.076828942Z", "/etc/os-release": "PRETTY_NAME=\"Ubuntu 22.04.4 LTS\"\nNAME=\"Ubuntu\"\nVERSION_ID=\"22.04\"\nVERSION=\"22.04.4 LTS (Jammy Jellyfish)\"\nVERSION_CODENAME=jammy\nID=ubuntu\nID_LIKE=debian\nHOME_URL=\"https://www.ubuntu.com/\"\nSUPPORT_URL=\"https://help.ubuntu.com/\"\nBUG_REPORT_URL=\"https://bugs.launchpad.net/ubuntu/\"\nPRIVACY_POLICY_URL=\"https://www.ubuntu.com/legal/terms-and-policies/privacy-policy\"\nUBUNTU_CODENAME=jammy\n", "uname -a": "Linux e3d53d796a51 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 x86_64 x86_64 GNU/Linux", "sessionInfo()": "R Under development (unstable) (2024-04-06 r86351)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Ubuntu 22.04.4 LTS\n\nMatrix products: default\nBLAS: /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so.3 \nLAPACK: /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.20.so; LAPACK version 3.10.0\n\nlocale:\n [1] LC_CTYPE=en_US.UTF-8 LC_NUMERIC=C \n [3] LC_TIME=en_US.UTF-8 LC_COLLATE=en_US.UTF-8 \n [5] LC_MONETARY=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 \n [7] LC_PAPER=en_US.UTF-8 LC_NAME=C \n [9] LC_ADDRESS=C LC_TELEPHONE=C \n[11] LC_MEASUREMENT=en_US.UTF-8 LC_IDENTIFICATION=C \n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.5.0\n" } ] }, { "tag": "ghcr.io/r-hub/containers/intel:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/intel:latest", "id": "ghcr.io/r-hub/containers/intel@sha256:3661a0f9d00ba84e5a3700342d075bb120ed9d25de6c7a27d3eaa82d1aff7d12", "size": 10266420507, "created": "2024-04-09T06:25:47.645001386Z", "/etc/os-release": "NAME=\"Fedora Linux\"\nVERSION=\"38 (Container Image)\"\nID=fedora\nVERSION_ID=38\nVERSION_CODENAME=\"\"\nPLATFORM_ID=\"platform:f38\"\nPRETTY_NAME=\"Fedora Linux 38 (Container Image)\"\nANSI_COLOR=\"0;38;2;60;110;180\"\nLOGO=fedora-logo-icon\nCPE_NAME=\"cpe:/o:fedoraproject:fedora:38\"\nDEFAULT_HOSTNAME=\"fedora\"\nHOME_URL=\"https://fedoraproject.org/\"\nDOCUMENTATION_URL=\"https://docs.fedoraproject.org/en-US/fedora/f38/system-administrators-guide/\"\nSUPPORT_URL=\"https://ask.fedoraproject.org/\"\nBUG_REPORT_URL=\"https://bugzilla.redhat.com/\"\nREDHAT_BUGZILLA_PRODUCT=\"Fedora\"\nREDHAT_BUGZILLA_PRODUCT_VERSION=38\nREDHAT_SUPPORT_PRODUCT=\"Fedora\"\nREDHAT_SUPPORT_PRODUCT_VERSION=38\nSUPPORT_END=2024-05-14\nVARIANT=\"Container Image\"\nVARIANT_ID=container\n", "uname -a": "Linux ba7c264ea546 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 GNU/Linux", "sessionInfo()": "R Under development (unstable) (2024-04-08 r86370)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Fedora Linux 38 (Container Image)\n\nMatrix products: default\nBLAS/LAPACK: /opt/intel/oneapi/mkl/2023.2.0/lib/intel64/libmkl_intel_lp64.so.2; LAPACK version 3.10.1\n\nlocale:\n [1] LC_CTYPE=en_GB.UTF-8 LC_NUMERIC=C \n [3] LC_TIME=en_GB.UTF-8 LC_COLLATE=en_GB.UTF-8 \n [5] LC_MONETARY=en_GB.UTF-8 LC_MESSAGES=en_GB.UTF-8 \n [7] LC_PAPER=en_GB.UTF-8 LC_NAME=C \n [9] LC_ADDRESS=C LC_TELEPHONE=C \n[11] LC_MEASUREMENT=en_GB.UTF-8 LC_IDENTIFICATION=C \n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.5.0\n" } ] }, { "tag": "ghcr.io/r-hub/containers/nosuggests:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/nosuggests:latest", "id": "ghcr.io/r-hub/containers/nosuggests@sha256:ab3185b14949e623daefc9b2438727aa5e82dbff880d5cd43727d6e956a911aa", "size": 1779022921, "created": "2024-04-09T06:12:53.124367631Z", "/etc/os-release": "NAME=\"Fedora Linux\"\nVERSION=\"38 (Container Image)\"\nID=fedora\nVERSION_ID=38\nVERSION_CODENAME=\"\"\nPLATFORM_ID=\"platform:f38\"\nPRETTY_NAME=\"Fedora Linux 38 (Container Image)\"\nANSI_COLOR=\"0;38;2;60;110;180\"\nLOGO=fedora-logo-icon\nCPE_NAME=\"cpe:/o:fedoraproject:fedora:38\"\nDEFAULT_HOSTNAME=\"fedora\"\nHOME_URL=\"https://fedoraproject.org/\"\nDOCUMENTATION_URL=\"https://docs.fedoraproject.org/en-US/fedora/f38/system-administrators-guide/\"\nSUPPORT_URL=\"https://ask.fedoraproject.org/\"\nBUG_REPORT_URL=\"https://bugzilla.redhat.com/\"\nREDHAT_BUGZILLA_PRODUCT=\"Fedora\"\nREDHAT_BUGZILLA_PRODUCT_VERSION=38\nREDHAT_SUPPORT_PRODUCT=\"Fedora\"\nREDHAT_SUPPORT_PRODUCT_VERSION=38\nSUPPORT_END=2024-05-14\nVARIANT=\"Container Image\"\nVARIANT_ID=container\n", "uname -a": "Linux 14d5c71c0aa2 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 GNU/Linux", "sessionInfo()": "R Under development (unstable) (2024-04-08 r86370)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Fedora Linux 38 (Container Image)\n\nMatrix products: default\nBLAS/LAPACK: /usr/lib64/libopenblasp-r0.3.21.so; LAPACK version 3.9.0\n\nlocale:\n [1] LC_CTYPE=en_GB.UTF-8 LC_NUMERIC=C \n [3] LC_TIME=en_GB.UTF-8 LC_COLLATE=en_GB.UTF-8 \n [5] LC_MONETARY=en_GB.UTF-8 LC_MESSAGES=en_GB.UTF-8 \n [7] LC_PAPER=en_GB.UTF-8 LC_NAME=C \n [9] LC_ADDRESS=C LC_TELEPHONE=C \n[11] LC_MEASUREMENT=en_GB.UTF-8 LC_IDENTIFICATION=C \n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.5.0\n" } ] }, { "tag": "ghcr.io/r-hub/containers/clang18:latest", "builds": [ { "tag": "ghcr.io/r-hub/containers/clang18:latest", "id": "ghcr.io/r-hub/containers/clang18@sha256:feab9e4efe20107844dad9cf746056dd8b67a97b3b7df16b49822a8c4c1674ac", "size": 2854098910, "created": "2024-04-09T06:12:55.002344268Z", "/etc/os-release": "PRETTY_NAME=\"Ubuntu 22.04.4 LTS\"\nNAME=\"Ubuntu\"\nVERSION_ID=\"22.04\"\nVERSION=\"22.04.4 LTS (Jammy Jellyfish)\"\nVERSION_CODENAME=jammy\nID=ubuntu\nID_LIKE=debian\nHOME_URL=\"https://www.ubuntu.com/\"\nSUPPORT_URL=\"https://help.ubuntu.com/\"\nBUG_REPORT_URL=\"https://bugs.launchpad.net/ubuntu/\"\nPRIVACY_POLICY_URL=\"https://www.ubuntu.com/legal/terms-and-policies/privacy-policy\"\nUBUNTU_CODENAME=jammy\n", "uname -a": "Linux 24d98552f728 6.5.0-1017-azure #17~22.04.1-Ubuntu SMP Sat Mar 9 04:50:38 UTC 2024 x86_64 x86_64 x86_64 GNU/Linux", "sessionInfo()": "R Under development (unstable) (2024-04-06 r86351)\nPlatform: x86_64-pc-linux-gnu\nRunning under: Ubuntu 22.04.4 LTS\n\nMatrix products: default\nBLAS: /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so.3 \nLAPACK: /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.20.so; LAPACK version 3.10.0\n\nlocale:\n [1] LC_CTYPE=en_GB.UTF-8 LC_NUMERIC=C \n [3] LC_TIME=en_GB.UTF-8 LC_COLLATE=en_GB.UTF-8 \n [5] LC_MONETARY=en_GB.UTF-8 LC_MESSAGES=en_GB.UTF-8 \n [7] LC_PAPER=en_GB.UTF-8 LC_NAME=C \n [9] LC_ADDRESS=C LC_TELEPHONE=C \n[11] LC_MEASUREMENT=en_GB.UTF-8 LC_IDENTIFICATION=C \n\ntime zone: Etc/UTC\ntzcode source: system (glibc)\n\nattached base packages:\n[1] stats graphics grDevices utils datasets methods base \n\nloaded via a namespace (and not attached):\n[1] compiler_4.5.0\n" } ] } ] } rhub/tests/testthat/fixtures/gh-response-post.rds0000644000176200001440000000167114605236573022034 0ustar liggesusersuTn7JJPCNb KEh@sG!u\s w(rM_ho4-K 9u*^Z*U7?>ĄKTYzռ!Qi,Zi1nu$VrK6[0 |kd{{y5b<>n$Q7C\yF=bB2NII4]BvQvF W D.K?gS0ڕ$X*=u#7/+ܾsH,Dk +oC$a[tD9O0Jc[)h{ &[4xB5V4M9RiHjo~7ݩRo-RXWuW1+i)\"@g9D1 y_řCK34ң񱛝`,Wi:) i8oY!hDZ4Xr󝒾uDZcz +`_flht2;7)K$GDzbUZL W(sj&ME% ɜuH6i,RGf`X8lT%qtKgvAL^ D|Vԗ h &* $Z$EШ®`RIxܨ/FOݗ\ErCspdӤ & v^oIdž~Qo5Xͮ=*+3cR7Fӻ_Y׻𗿿OԽ~w}Y%Xf[iT"jL[wARiR.OQ0ifceqe* E`5q7ndSTs*B }NDWӆmrhub/tests/testthat/fixtures/gh-response.rds0000644000176200001440000000260414605236573021046 0ustar liggesusersWnFVl*Tȥ-&)Y#u+h wXQ\v얊T q, pB闤%=W9Do֭#eRq16ڔHUot;ar4m_+.X*rE56sFӵy5MEӰke:nlz5=RU.Q q|\5rDM0h@18దlU  Au0x c+9FmOq 6[) +>a۬Q<07ۉ*"q&0??%݇$ aJch1lΌr9}#'IBw$@B}?אJR0T M &be~HDŽ(^3`@$q$x8̟Ά"}ӀC/Bx,ǥEb"F"*Y VdcG>HƍPҁ}G:J fr6p)vîӰOLy *$?>ĺLHiЩρXxoþ`ɖKK჋;Uõӹwz߲tpnՍfT,qgeiļL1nmBU>0iK!]axo|go/|Uҫ󫆐VUkj}lQ!I4 O!Q$MX挗P$SIحn2~2۪*P0MݽAgVfhe0anm< 8Q072g`c2 1+At Tr=Sjݵ-\]]pIΆ}*@f}M3ugGX;ݘ~2c-Thg \]tsmn,wq^BO YP0 WUD/LG ݧ+W]T9/*AVd|ǀՋ,GcNѱYm t fNLp^ 1bDГ^0׳:"Ö[y N*-GCkxnZmEl#h=V׍ݴ]ӝrhub/tests/testthat/fixtures/pkg/0000755000176200001440000000000014762410770016656 5ustar liggesusersrhub/tests/testthat/fixtures/pkg/R/0000755000176200001440000000000014605464211017052 5ustar liggesusersrhub/tests/testthat/fixtures/pkg/R/pkg.R0000644000176200001440000000000014605464211017744 0ustar liggesusersrhub/tests/testthat/fixtures/pkg/NAMESPACE0000644000176200001440000000005614604774755020111 0ustar liggesusers# Generated by roxygen2: do not edit by hand rhub/tests/testthat/fixtures/pkg/DESCRIPTION0000644000176200001440000000070114762410770020362 0ustar liggesusersPackage: pkg Title: What the Package Does (One Line, Title Case) Version: 0.0.0.9000 Authors@R: person("Josiah", "Carberry", , "Josiah.Carberry@example.com", role = c("aut", "cre"), comment = c(ORCID = "0000-0002-1825-0097")) Description: What the package does (one paragraph). License: `use_mit_license()`, `use_gpl3_license()` or friends to pick a license Encoding: UTF-8 Roxygen: list(markdown = TRUE) RoxygenNote: 7.3.1.9000 rhub/tests/testthat/fixtures/bad.tar.gz0000644000176200001440000000400014604775735017756 0ustar liggesusersbad0000644000076500000000000000000314604771617011733 0ustar00gaborcsardiwheelérhub/tests/testthat/fixtures/pkg_0.0.0.9000.tar.gz0000644000176200001440000000105414605464305021106 0ustar liggesusersTmk0gbCډo N:idcѪؒPاVsɺS*YVry xQB#iAa 8*M$[!SEdƞCݮ"4[ \~ zO1DPΒS3N` !7*<_?Bu  *ܱ/T='FjW)SQM 3ML*vcI6^u2ܮ{fכMl׵&TUZդ5T{oYJR..Nǽ鞔yU iAHIFy@ XzlgMy*2{k-sqLi$جSy!4}TJDYj0yA#/G1@8OZq/}zbuZ?[/Wq2}; QDaX [ h=aAĈfL׆=Վu-ZEQFrhub/tests/testthat/fixtures/gh-response-doctor-rest.rds0000644000176200001440000000251014605256640023302 0ustar liggesusersVAoE$NҦ TXU*']C&NHRA-ώ%ef6q*!q΅ĩ\+ޮwE3y{{o Ø5Jss\E]ߌQ2.k e, ~ IwaӘ GaЭĢ X$*Bv T CPr9ShO58ӳL2\PZ}[c?4~B˦噭LtjMa{֧͘'X!!ܧU6xmBcY6[0< MƎr@-ֱĪu}ުn<6ViQgի*Z6vh ؽiH%mƘl Gi&&o2OW^%V<(Ӵ"'54z!5mA.-Kd5a;niutaNfnnc :/#yngՓO'(A@;MڣCDAԟ&* pBQiq7E ):DcQ C'8,8FgqbnaG?Oh\kӘSuNZf>e]53Ht}AAАa_υO31nWǁE"t:" B"4EoP"y'bI?;ڊ󳤪b>TуFn6ͭvkfUo~-zk™'v#'.H6Pzk"$=]& (J9 gjM%+#p 0,^<+}m历 Ɨ',0$P 1Bf}sEWS'Ȣ.n%H>?~%hTaӣɶIc >O'@f} ؟%\ 9 /fe mS.?)#(܉kֻ8ݼ. {`8yH S rhub/tests/testthat/test-check.R0000644000176200001440000000233514605244077016405 0ustar liggesuserstest_that("rhub_check", { withr::local_envvar( RHUB_PLATFORMS_URL = http$url("/platforms.json"), RHUB_CONTAINERS_URL = http$url("/manifest.json") ) mockery::stub(rhub_check, "doctor_find_pat", "secret") presp <- gh_query_process_response( readRDS(test_path("fixtures/gh-response-post.rds")) ) mockery::stub(rhub_check, "gh_rest_post", presp) mockery::stub(rhub_check, "random_id", "kleptomaniac-harlequinbug") expect_snapshot({ rhub_check( "https://github.com/r-lib/ps", platforms = c("linux", "clang18") ) }) # error presp$status_code <- 401L presp$content <- list(message = "I am so, so sorry!") mockery::stub(rhub_check, "gh_rest_post", presp) expect_snapshot(error = TRUE, { rhub_check( "https://github.com/r-lib/ps", platforms = c("linux", "clang18") ) }) # looks up current branch if it is needed presp$status_code <- 204L mockery::stub(rhub_check, "gh_rest_post", presp) mockery::stub(rhub_check, "setup_find_git_root", getwd()) mockery::stub(rhub_check, "doctor_find_gh_url", "https://github.com/r-lib/ps") mockery::stub(rhub_check, "gert::git_branch", "main") expect_snapshot({ rhub_check(platforms = c("linux", "clang18")) }) }) rhub/tests/testthat/helpers.R0000644000176200001440000000723614762411677016030 0ustar liggesusershttp_app <- function(wd = getwd()) { `%||%` <- function(l, r) if (is.null(l)) r else l force(test_dir) app <- webfakes::httpbin_app() # An error with a JSON response that has a 'message' app$get("/rhub-error", function(req, res) { msg <- req$query[["msg"]] status <- as.integer(req$query[["status"]] %||% 401) res$ set_status(status)$ send_json(object = list(message = msg)) }) # An error with an invalid JSON response app$get("/rhub-error2", function(req, res) { status <- as.integer(req$query[["status"]] %||% 401) res$ set_status(status)$ send_json(text = "[this is not valid json]") }) # An error with a JSON response, without a 'message' app$get("/rhub-error3", function(req, res) { status <- as.integer(req$query[["status"]] %||% 401) res$ set_status(status)$ send_json(object = list(foo = "bar")) }) app$get( c( "/rhub.yaml", "/platforms.json", "/platforms2.json", "/manifest.json" ), function(req, res) { yaml <- testthat::test_path("fixtures", basename(req$path)) res$send_file(yaml) } ) # SSE sse <- function(req, res) { `%||%` <- function(l, r) if (is.null(l)) r else l if (is.null(res$locals$sse)) { progress <- !is.null(req$query$progress) error <- !is.null(req$query$error) duration <- as.double(req$query$duration %||% 2) delay <- as.double(req$query$delay %||% 0) numevents <- as.integer(req$query$numevents %||% 5) pause <- max(duration / numevents, 0.01) res$locals$sse <- list( sent = 0, numevents = numevents, pause = pause, progress = progress, error = error ) res$ set_header("cache-control", "no-cache")$ set_header("content-type", "text/event-stream")$ set_header("access-control-allow-origin", "*")$ set_header("connection", "keep-alive")$ set_status(200) if (delay > 0) { return(res$delay(delay)) } } msg <- paste0( "event: ", res$locals$sse$sent + 1L, "\n", "message: live long and prosper\n\n" ) res$locals$sse$sent <- res$locals$sse$sent + 1L res$write(msg) if (res$locals$sse$progress) { msg <- paste0( "event: progress\n", "data: \"This is {.code it}: ", res$locals$sse$sent, "\"\n\n" ) res$write(msg) } if (res$locals$sse$sent == res$locals$sse$numevents) { if (res$locals$sse$progress) { msg <- if (res$locals$sse$error) { paste0( "event: error\n", "data: \"This is a {.code failure}.\"\n\n" ) } else { paste0( "event: result\n", "data: \"All is {.code good}.\"\n\n" ) } res$write(msg) } res$send("") } else { res$delay(res$locals$sse$pause) } } app$get("/sse", sse) app$post("/sse", sse) app } http <- webfakes::new_app_process(http_app()) redact_port <- function(x) { x <- gsub(":[0-9]+", ":", x) x <- gsub("://", ":/", x, fixed = TRUE) } redact_ae_header <- function(x) { gsub( "\"Accept[-]Encoding\": \"[^\"]*\"", "\"Accept-Encoding\": \"\"", x ) } redact_abs_path <- function(x) { wd <- normalizePath(getwd()) wd2 <- normalizePath(getwd(), winslash = "/") x2 <- gsub(wd, "", x, fixed = TRUE) x3 <- gsub(wd2, "", x2, fixed = TRUE) x3 } # for the rematch tests df <- function(...) { args <- list(...) structure( args, names = names(args), row.names = seq_along(args[[1]]), class = c("data.frame") ) } asdf <- function(...) { as.data.frame(df(...)) } rhub/tests/testthat/test-platforms.R0000644000176200001440000000635114762413216017336 0ustar liggesuserstest_that("get_platforms", { # empty cache rm(list = ls(the_cache), envir = the_cache) withr::local_envvar( RHUB_PLATFORMS_URL = http$url("/platforms.json"), RHUB_CONTAINERS_URL = http$url("/manifest.json") ) plt <- get_platforms() plt[[1]] <- gsub("\r\n", "\n", plt[[1]], fixed = TRUE) plt[[2]] <- gsub("\r\n", "\n", plt[[2]], fixed = TRUE) expect_snapshot({ cli::hash_obj_sha1(plt[[1]]) cli::hash_obj_sha1(plt[[2]]) }) }) test_that("rhub_platforms", { # empty cache rm(list = ls(the_cache), envir = the_cache) withr::local_envvar( RHUB_PLATFORMS_URL = http$url("/platforms.json"), RHUB_CONTAINERS_URL = http$url("/manifest.json") ) expect_snapshot({ rhub_platforms() }) # if a platform refers to a container that does not exit # that's an error, but it is not an error here, here we test the printing withr::local_envvar( RHUB_PLATFORMS_URL = http$url("/platforms2.json"), RHUB_CONTAINERS_URL = http$url("/manifest.json") ) library(pillar) expect_snapshot({ rhub_platforms()[] }) }) test_that("format.rhub_platforms", { # empty cache rm(list = ls(the_cache), envir = the_cache) withr::local_envvar( RHUB_PLATFORMS_URL = http$url("/platforms.json"), RHUB_CONTAINERS_URL = http$url("/manifest.json") ) plt <- rhub_platforms() # VM with a single R version plt$r_version[[1]] <- "R 4.4.0" # Container w/o an alias plt$aliases[[nrow(plt)]] <- character() expect_snapshot({ print(plt) }) }) test_that("summary.rhub_platforms", { # empty cache rm(list = ls(the_cache), envir = the_cache) withr::local_envvar( RHUB_PLATFORMS_URL = http$url("/platforms.json"), RHUB_CONTAINERS_URL = http$url("/manifest.json") ) plt <- rhub_platforms() expect_snapshot({ summary(plt) }) }) test_that("select_platforms", { # empty cache rm(list = ls(the_cache), envir = the_cache) # error getting platforms withr::local_envvar( RHUB_PLATFORMS_URL = http$url("/bad-bad-bad-ooops"), RHUB_CONTAINERS_URL = http$url("/manifest.json") ) expect_snapshot(error = TRUE, { select_platforms() }) withr::local_envvar( RHUB_PLATFORMS_URL = http$url("/platforms.json"), RHUB_CONTAINERS_URL = http$url("/manifest.json") ) # non-interactive sessions need explicit platforms withr::local_options(rlib_interactive = FALSE) expect_snapshot(error = TRUE, { select_platforms() }) # non-interactive mode withr::local_options(rlib_interactive = FALSE) expect_snapshot({ select_platforms(c("linux", "clang18")) }) expect_snapshot(error = TRUE, { select_platforms(c("linux", "clang18", "thisisnotit")) }) # interactive mode uses readline() withr::local_options(rlib_interactive = TRUE) mockery::stub(select_platforms, "readline", function(prompt) { cat(prompt) cat(" 1, 3, 9\n") " 1, 3, 9" }) expect_snapshot({ select_platforms() }) mockery::stub(select_platforms, "readline", function(prompt) { cat(prompt) cat("0\n") "0" }) expect_snapshot(error = TRUE, { select_platforms() }) mockery::stub(select_platforms, "readline", function(prompt) { cat(prompt) cat("10000\n") "10000" }) expect_snapshot(error = TRUE, { select_platforms() }) }) rhub/tests/testthat/test-api.R0000644000176200001440000000316114605470545016100 0ustar liggesuserstest_that("query GET", { withr::local_envvar(RHUB_SERVER = http$url()) expect_snapshot({ cat(rawToChar(query("/get")$content)) }, transform = function(x) redact_port(redact_ae_header(x))) }) test_that("query HTTP errors", { withr::local_envvar(RHUB_SERVER = http$url()) expect_snapshot(error = TRUE, { query("/rhub-error?msg=iamsosorryabouththat") }) expect_snapshot(error = TRUE, { query("/rhub-error2") }) expect_snapshot(error = TRUE, { query("/rhub-error3") }) }) test_that("query POST", { withr::local_envvar(RHUB_SERVER = http$url()) data <- charToRaw(jsonlite::toJSON(list(foo = "bar", foobar = 1:3))) expect_snapshot({ cat(rawToChar(query("/post", method = "POST", data = data)$content)) }, transform = function(x) redact_port(redact_ae_header(x))) }) test_that("query, unknown verb", { withr::local_envvar(RHUB_SERVER = http$url()) expect_snapshot(error = TRUE, { query("/anything", method = "REPORT") query("/anything", method = "REPORT", sse = TRUE) }, transform = redact_port) }) test_that("query SSE", { withr::local_envvar(RHUB_SERVER = http$url()) data <- charToRaw(jsonlite::toJSON(list(foo = "bar", foobar = 1:3))) expect_snapshot({ query("/sse", sse = TRUE)$sse query("/sse", method = "POST", data = data, sse = TRUE)$sse }) # progress, result expect_snapshot({ resp <- query("/sse?progress=true&numevents=2", sse = TRUE) cat(rawToChar(resp$content)) }) # progress, result, error expect_snapshot(error = TRUE, { resp <- query("/sse?progress=true&numevents=2&error=true", sse = TRUE) cat(rawToChar(resp$content)) }) }) rhub/tests/testthat/test-cli.R0000644000176200001440000000050014605205726016065 0ustar liggesuserstest_that("cli_status", { withr::local_options(cli.ansi = FALSE) expect_snapshot({ pid <- cli_status("This is a status message") cli::cli_status_clear(pid, result = "clear") }) expect_snapshot({ pid <- cli_status("This is a status message") cli::cli_status_clear(pid, result = "failed") }) }) rhub/tests/testthat/test-http-cache.R0000644000176200001440000000103414605236573017345 0ustar liggesuserstest_that("async_cached_http_get", { # empty cache rm(list = ls(the_cache), envir = the_cache) resp <- synchronise(async_cached_http_get(http$url("/get"))) expect_equal(length(the_cache), 1) expect_equal(resp, get(ls(the_cache), envir = the_cache)) # will not perform HTTP request now mockery::stub(async_cached_http_get, "http_get", function(...) stop("no")) resp2 <- synchronise(async_cached_http_get(http$url("/get"))) expect_equal(length(the_cache), 1) expect_equal(resp2, get(ls(the_cache), envir = the_cache)) })rhub/tests/testthat/test-rematch.R0000644000176200001440000000450414605176475016761 0ustar liggesuserstest_that("corner cases", { res <- re_match(.text <- c("foo", "bar"), "") expect_equal(res, df(.text = .text, .match = c("", ""))) res <- re_match(.text <- c("foo", "", "bar"), "") expect_equal(res, df(.text = .text, .match = c("", "", ""))) res <- re_match(.text <- character(), "") expect_equal(res, df(.text = .text, .match = character())) res <- re_match(.text <- character(), "foo") expect_equal(res, df(.text = .text, .match = character())) res <- re_match(.text <- character(), "foo (g1) (g2)") expect_equal( res, df(character(), character(), .text = .text, .match = character()) ) res <- re_match(.text <- character(), "foo (g1) (?g2)") expect_equal( res, df(character(), name = character(), .text = .text, .match = character()) ) res <- re_match(.text <- "not", "foo") expect_equal(res, df(.text = .text, .match = NA_character_)) }) test_that("not so corner cases", { dates <- c("2016-04-20", "1977-08-08", "not a date", "2016", "76-03-02", "2012-06-30", "2015-01-21 19:58") isodate <- "([0-9]{4})-([0-1][0-9])-([0-3][0-9])" expect_equal( as.data.frame(re_match(text = dates, pattern = isodate)), asdf( c("2016", "1977", NA, NA, NA, "2012", "2015"), c("04", "08", NA, NA, NA, "06", "01"), c("20", "08", NA, NA, NA, "30", "21"), .text = dates, .match = c(dates[1:2], NA, NA, NA, "2012-06-30", "2015-01-21") ) ) isodaten <- "(?[0-9]{4})-(?[0-1][0-9])-(?[0-3][0-9])" expect_equal( re_match(text = dates, pattern = isodaten), df( year = c("2016", "1977", NA, NA, NA, "2012", "2015"), month = c("04", "08", NA, NA, NA, "06", "01"), day = c("20", "08", NA, NA, NA, "30", "21"), .text = dates, .match = c(dates[1:2], NA, NA, NA, "2012-06-30", "2015-01-21") ) ) }) test_that("UTF8", { res <- re_match(.text <- "Gábor Csárdi", "Gábor") expect_equal(res, df(.text = .text, .match = "Gábor")) }) test_that("text is scalar & capture groups", { res <- re_match(.text <- "foo bar", "(\\w+) (\\w+)") expect_equal( as.data.frame(res), asdf("foo", "bar", .text = .text, .match = "foo bar") ) res <- re_match(.text <- "foo bar", "(?\\w+) (?\\w+)") expect_equal( res, df(g1 = "foo", g2 = "bar", .text = .text, .match = "foo bar") ) }) rhub/tests/testthat/test-assertions.R0000644000176200001440000000417314604736532017525 0ustar liggesuserstest_that("is_character", { expect_snapshot({ is_character(character()) is_character("a") is_character(c("a", "b", "c")) }) expect_snapshot(error = TRUE, { x <- 1 assert_that(is_character(x)) x <- mtcars assert_that(is_character(x)) x <- NULL assert_that(is_character(x)) x <- c("a", "b", NA_character_) assert_that(is_character(x)) }) }) test_that("is_optional_character", { expect_snapshot({ is_optional_character(NULL) is_optional_character(character()) is_optional_character("a") is_optional_character(c("a", "b", "c")) }) expect_snapshot(error = TRUE, { x <- 1 assert_that(is_optional_character(x)) x <- mtcars assert_that(is_optional_character(x)) x <- c("a", "b", NA_character_) assert_that(is_optional_character(x)) }) }) test_that("is_string", { expect_snapshot({ is_string("a") }) expect_snapshot(error = TRUE, { x <- 1 assert_that(is_string(x)) x <- mtcars assert_that(is_string(x)) x <- NULL assert_that(is_string(x)) x <- NA_character_ assert_that(is_string(x)) x <- c("a", "b", NA_character_) assert_that(is_string(x)) x <- character() assert_that(is_string(x)) x <- c("a", "b") assert_that(is_string(x)) }) }) test_that("is_optional_string", { expect_snapshot({ is_optional_string("a") is_optional_string(NULL) }) expect_snapshot(error = TRUE, { x <- 1 assert_that(is_optional_string(x)) x <- mtcars assert_that(is_optional_string(x)) x <- NA_character_ assert_that(is_optional_string(x)) x <- c("a", "b", NA_character_) assert_that(is_optional_string(x)) x <- character() assert_that(is_optional_string(x)) x <- c("a", "b") assert_that(is_optional_string(x)) }) }) test_that("is_optional_gh_url", { expect_snapshot({ is_optional_gh_url(NULL) is_optional_gh_url("https://github.com") is_optional_gh_url("http://github.com") }) expect_snapshot(error = TRUE, { gh_url <- 1:10 assert_that(is_optional_gh_url(gh_url)) gh_url <- "foobar" assert_that(is_optional_gh_url(gh_url)) }) })rhub/tests/testthat/_snaps/0000755000176200001440000000000014762413351015505 5ustar liggesusersrhub/tests/testthat/_snaps/doctor.md0000644000176200001440000003644414762413607017340 0ustar liggesusers# rhub_doctor Code rhub_setup() Message Setting up R-hub v2. > Is the current directory part of an R package? v Found R package at ''. > Is the current directory part of a git repository? v Found git repository at ''. v Created workflow file '/.github/workflows/rhub.yaml'. Notes: * The workflow file must be added to the default branch of the GitHub repository. * GitHub actions must be enabled for the repository. They are disabled for forked repositories by default. Next steps: * Add the workflow file to git using `git add `. * Commit it to git using `git commit`. * Push the commit to GitHub using `git push`. * Call `rhub::rhub_doctor()` to check that you have set up R-hub correctly. * Call `rhub::rhub_check()` to check your package. --- Code rhub_doctor() Message > Is the current directory part of an R package? v Found R package at ''. > Is the current directory part of a git repository? v Found git repository at ''. > WOOT! You are ready to run `rhub::rhub_check()` on this package. # doctor_find_gh_url Code doctor_find_gh_url(".") Condition Error: ! Cannot determine GitHub URL from git remote in repository at '.'. Is your repository on GitHub? i If this repository is on GitHub, call `git remote add origin ` to add GitHub as a remote. i Alternatively, specify the GitHub URL of the repository in the `gh_url` argument. i If it is not on GitHub, then you'll need to put it there. Create a new repository at . # doctor_find_pat Code doctor_find_pat("https://github.com") Message > Do you have a GitHub personal access token (PAT)? x Do you have a GitHub personal access token (PAT)? Condition Error: ! Could not find a GitHub personal access token (PAT) for . i I also could not find a working git installation. If you don't want to install git, but you have a PAT, you can set the GITHUB_PAT_GITHUB_COM environment variable to the PAT. i You can read more about PATs at . --- Code doctor_find_pat("https://github.com") Message > Do you have a GitHub personal access token (PAT)? x Do you have a GitHub personal access token (PAT)? Condition Error: ! Could not find a GitHub personal access token (PAT) for . i If you have a GitHub PAT, you can use `gitcreds::gitcreds_set()` to add it to the git credential store, so R-hub can use it. i If you don't have a PAT, you can create one by running `usethis::create_github_token()`. i You can read more about PATs at . --- Code doctor_find_pat("https://github.com") Message > Do you have a GitHub personal access token (PAT)? x Do you have a GitHub personal access token (PAT)? Condition Error: ! oops --- Code doctor_find_pat("https://github.com") Message > Do you have a GitHub personal access token (PAT)? v Found GitHub PAT. Output [1] "secret" # doctor_check_github Code doctor_check_github("https://github.com/r-lib/ps", resp) Message > Is the package on GitHub at ? v Found repository on GitHub at . --- Code doctor_check_github("https://github.com/r-lib/ps", resp2) Message > Is the package on GitHub at ? x Is the package on GitHub at ? Condition Error: ! Remote repository at does not seem like a GitHub repository. i R-hub only supports GitHub packages in GitHub repositories currently. i If you think that this is a bug in the rhub package, please let us know! # doctor_check_pat_scopes Code doctor_check_pat_scopes(resp) Message > Does your GitHub PAT have the right scopes? x Does your GitHub PAT have the right scopes? Condition Error: ! Could not use the PAT to authenticate to GitHub i Make sure that the URL and your PAT are correct. --- Code doctor_check_pat_scopes(resp2) Message > Does your GitHub PAT have the right scopes? x Does your GitHub PAT have the right scopes? Condition Error: ! Your PAT does not have a `repo` scope. i Without a `repo` scope R-hub cannot start jobs on GitHub. i Change the scopes of the PAT on the GitHub web page, or create a new PAT. --- Code doctor_check_pat_scopes(resp3) Message > Does your GitHub PAT have the right scopes? v GitHub PAT has the right scopes. # doctor_check_workflow Code doctor_check_workflow(url, list(), list()) Message > Does the default branch of your git repo have the R-hub workflow file? x Does the default branch of your git repo have the R-hub workflow file? Condition Error: ! Could not find R-hub's workflow file in the repository at . i The workflow file must be at '.github/workflows/rhub.yaml'. i If you have added and committed the workflow file, you need to push the commit to GitHub with `git push`. --- Code doctor_check_workflow(url, list(is_forked = TRUE), list()) Message > Does the default branch of your git repo have the R-hub workflow file? x Does the default branch of your git repo have the R-hub workflow file? Condition Error: ! Could not find R-hub's workflow file in the repository at . i The workflow file must be at '.github/workflows/rhub.yaml'. i If you have added and committed the workflow file, you need to push the commit to GitHub with `git push`. i This repository is a fork. Make sure you enabled GitHub Actions on it, in the Actions tab of the repository web page. --- Code doctor_check_workflow(url, list(workflow = "ok"), list(workflow = list(state = "bad"))) Message > Does the default branch of your git repo have the R-hub workflow file? x Does the default branch of your git repo have the R-hub workflow file? Condition Error: ! The workflow is disabled. i You need to enable it, click on the `...` button at the top right corner of the web page of the workflow. --- Code doctor_check_workflow(url, list(workflow = "ok"), list(workflow = list(state = "active"))) Message > Does the default branch of your git repo have the R-hub workflow file? v Found R-hub workflow in default branch, and it is active. # doctor_async_gql Code synchronise(doctor_async_gql("https://github.com/r-lib/ps", "secret")) Output $status_code [1] 200 $headers $headers$server [1] "GitHub.com" $headers$date [1] "Tue, 09 Apr 2024 15:19:50 GMT" $headers$`content-type` [1] "application/json; charset=utf-8" $headers$`x-oauth-scopes` [1] "delete:packages, delete_repo, read:org, repo, workflow, write:packages" $headers$`x-accepted-oauth-scopes` [1] "repo" $headers$`x-github-media-type` [1] "github.v4; format=json" $headers$`x-ratelimit-limit` [1] "5000" $headers$`x-ratelimit-remaining` [1] "4999" $headers$`x-ratelimit-reset` [1] "1712679590" $headers$`x-ratelimit-used` [1] "1" $headers$`x-ratelimit-resource` [1] "graphql" $headers$`access-control-expose-headers` [1] "ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, X-GitHub-Request-Id, Deprecation, Sunset" $headers$`access-control-allow-origin` [1] "*" $headers$`strict-transport-security` [1] "max-age=31536000; includeSubdomains; preload" $headers$`x-frame-options` [1] "deny" $headers$`x-content-type-options` [1] "nosniff" $headers$`x-xss-protection` [1] "0" $headers$`referrer-policy` [1] "origin-when-cross-origin, strict-origin-when-cross-origin" $headers$`content-security-policy` [1] "default-src 'none'" $headers$vary [1] "Accept-Encoding, Accept, X-Requested-With" $headers$`content-encoding` [1] "gzip" $headers$`x-github-request-id` [1] "F96D:2784C9:156C97B:157F2DD:66155C96" $is_repo [1] TRUE $workflow_binary [1] FALSE $workflow [1] "# R-hub's generic GitHub Actions workflow file. It's canonical location is at\n# https://github.com/r-hub/actions/blob/v1/workflows/rhub.yaml\n# You can update this file to a newer version using the rhub2 package:\n#\n# rhub::rhub_setup()\n#\n# It is unlikely that you need to modify this file manually.\n\nname: R-hub\nrun-name: \"${{ github.event.inputs.id }}: ${{ github.event.inputs.name || format('Manually run by {0}', github.triggering_actor) }}\"\n\non:\n workflow_dispatch:\n inputs:\n config:\n description: 'A comma separated list of R-hub platforms to use.'\n type: string\n default: 'linux,windows,macos'\n name:\n description: 'Run name. You can leave this empty now.'\n type: string\n id:\n description: 'Unique ID. You can leave this empty now.'\n type: string\n\njobs:\n\n setup:\n runs-on: ubuntu-latest\n outputs:\n containers: ${{ steps.rhub-setup.outputs.containers }}\n platforms: ${{ steps.rhub-setup.outputs.platforms }}\n\n steps:\n # NO NEED TO CHECKOUT HERE\n - uses: r-hub/actions/setup@main\n with:\n config: ${{ github.event.inputs.config }}\n id: rhub-setup\n\n linux-containers:\n needs: setup\n if: ${{ needs.setup.outputs.containers != '[]' }}\n runs-on: ubuntu-latest\n name: ${{ matrix.config.label }}\n strategy:\n fail-fast: false\n matrix:\n config: ${{ fromJson(needs.setup.outputs.containers) }}\n container:\n image: ${{ matrix.config.container }}\n\n steps:\n - uses: r-hub/actions/checkout@main\n - uses: r-hub/actions/platform-info@main\n with:\n token: ${{ secrets.RHUB_TOKEN }}\n job-config: ${{ matrix.config.job-config }}\n - uses: r-hub/actions/setup-deps@main\n with:\n token: ${{ secrets.RHUB_TOKEN }}\n job-config: ${{ matrix.config.job-config }}\n - uses: r-hub/actions/run-check@main\n with:\n token: ${{ secrets.RHUB_TOKEN }}\n job-config: ${{ matrix.config.job-config }}\n\n other-platforms:\n needs: setup\n if: ${{ needs.setup.outputs.platforms != '[]' }}\n runs-on: ${{ matrix.config.os }}\n name: ${{ matrix.config.label }}\n strategy:\n fail-fast: false\n matrix:\n config: ${{ fromJson(needs.setup.outputs.platforms) }}\n\n steps:\n - uses: r-hub/actions/checkout@main\n - uses: r-hub/actions/setup-r@main\n with:\n job-config: ${{ matrix.config.job-config }}\n token: ${{ secrets.RHUB_TOKEN }}\n - uses: r-hub/actions/platform-info@main\n with:\n token: ${{ secrets.RHUB_TOKEN }}\n job-config: ${{ matrix.config.job-config }}\n - uses: r-hub/actions/setup-deps@main\n with:\n job-config: ${{ matrix.config.job-config }}\n token: ${{ secrets.RHUB_TOKEN }}\n - uses: r-hub/actions/run-check@main\n with:\n job-config: ${{ matrix.config.job-config }}\n token: ${{ secrets.RHUB_TOKEN }}\n" $sha [1] "1ee32843d8fd8dbae325ce50458e8eba96ab894f" $branch [1] "main" $is_fork [1] FALSE $errors NULL # doctor_async_rest Code synchronise(doctor_async_rest("https://github.com/r-lib/ps", "secret")) Output $status_code [1] 200 $headers $headers$server [1] "GitHub.com" $headers$date [1] "Tue, 09 Apr 2024 15:24:07 GMT" $headers$`content-type` [1] "application/json; charset=utf-8" $headers$`cache-control` [1] "private, max-age=60, s-maxage=60" $headers$vary [1] "Accept, Authorization, Cookie, X-GitHub-OTP" $headers$etag [1] "W/\"c65a62f3c04bdadcf817bd671991e78e3759e96c2894ab8a301a2e469c6a2ea6\"" $headers$`x-oauth-scopes` [1] "delete:packages, delete_repo, read:org, repo, workflow, write:packages" $headers$`x-accepted-oauth-scopes` [1] "" $headers$`x-github-media-type` [1] "github.v3; format=json" $headers$`x-github-api-version-selected` [1] "2022-11-28" $headers$`x-ratelimit-limit` [1] "5000" $headers$`x-ratelimit-remaining` [1] "4975" $headers$`x-ratelimit-reset` [1] "1712676928" $headers$`x-ratelimit-used` [1] "25" $headers$`x-ratelimit-resource` [1] "core" $headers$`access-control-expose-headers` [1] "ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, X-GitHub-Request-Id, Deprecation, Sunset" $headers$`access-control-allow-origin` [1] "*" $headers$`strict-transport-security` [1] "max-age=31536000; includeSubdomains; preload" $headers$`x-frame-options` [1] "deny" $headers$`x-content-type-options` [1] "nosniff" $headers$`x-xss-protection` [1] "0" $headers$`referrer-policy` [1] "origin-when-cross-origin, strict-origin-when-cross-origin" $headers$`content-security-policy` [1] "default-src 'none'" $headers$vary [1] "Accept-Encoding, Accept, X-Requested-With" $headers$`content-encoding` [1] "gzip" $headers$`x-github-request-id` [1] "F98C:2CED39:16FCAA5:17100BF:66155D97" $workflow $workflow$id [1] 57922738 $workflow$node_id [1] "W_kwDOCDHXuc4Dc9Sy" $workflow$name [1] "R-hub" $workflow$path [1] ".github/workflows/rhub.yaml" $workflow$state [1] "active" $workflow$created_at [1] "2023-05-22T13:01:04.000+02:00" $workflow$updated_at [1] "2023-05-22T13:01:04.000+02:00" $workflow$url [1] "https://api.github.com/repos/r-lib/ps/actions/workflows/57922738" $workflow$html_url [1] "https://github.com/r-lib/ps/blob/main/.github/workflows/rhub.yaml" $workflow$badge_url [1] "https://github.com/r-lib/ps/workflows/R-hub/badge.svg" $errors NULL rhub/tests/testthat/_snaps/gh.md0000644000176200001440000005415614762413607016444 0ustar liggesusers# parse_gh_url Code parse_gh_url("https://github.com/r-lib/cli") Output $host [1] "github.com" $api [1] "https://api.github.com" $graphql [1] "https://api.github.com/graphql" $user [1] "r-lib" $repo [1] "cli" $slug [1] "r-lib/cli" $pat_url [1] "https://github.com/r-lib/cli" Code parse_gh_url("http://github.com/r-lib/cli") Output $host [1] "github.com" $api [1] "http://api.github.com" $graphql [1] "http://api.github.com/graphql" $user [1] "r-lib" $repo [1] "cli" $slug [1] "r-lib/cli" $pat_url [1] "http://github.com/r-lib/cli" Code parse_gh_url("https://my.private.gh/user/repo") Output $host [1] "my.private.gh" $api [1] "https://my.private.gh/api/v3" $graphql [1] "https://my.private.gh/api/graphql" $user [1] "user" $repo [1] "repo" $slug [1] "user/repo" $pat_url [1] "https://my.private.gh/user/repo" Code parse_gh_url("http://my.private.gh/user/repo") Output $host [1] "my.private.gh" $api [1] "http://my.private.gh/api/v3" $graphql [1] "http://my.private.gh/api/graphql" $user [1] "user" $repo [1] "repo" $slug [1] "user/repo" $pat_url [1] "http://my.private.gh/user/repo" # gh_headers Code gh_headers("mytoken") Output Accept Authorization "application/vnd.github+json" "Bearer mytoken" # gh_query_process_response Code gh_query_process_response(resp) Output $url [1] "https://api.github.com/repos/r-hub/rhub/actions/workflows" $status_code [1] 200 $type [1] "application/json; charset=utf-8" $headers $headers$server [1] "GitHub.com" $headers$date [1] "Tue, 09 Apr 2024 11:54:50 GMT" $headers$`content-type` [1] "application/json; charset=utf-8" $headers$`cache-control` [1] "public, max-age=60, s-maxage=60" $headers$vary [1] "Accept, Accept-Encoding, Accept, X-Requested-With" $headers$etag [1] "W/\"1d4178504dffed82cfb18f9c41c9d471a51bacdec453d315cb4d5fdd76b0ccf9\"" $headers$`x-github-media-type` [1] "github.v3; format=json" $headers$`x-github-api-version-selected` [1] "2022-11-28" $headers$`access-control-expose-headers` [1] "ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, X-GitHub-Request-Id, Deprecation, Sunset" $headers$`access-control-allow-origin` [1] "*" $headers$`strict-transport-security` [1] "max-age=31536000; includeSubdomains; preload" $headers$`x-frame-options` [1] "deny" $headers$`x-content-type-options` [1] "nosniff" $headers$`x-xss-protection` [1] "0" $headers$`referrer-policy` [1] "origin-when-cross-origin, strict-origin-when-cross-origin" $headers$`content-security-policy` [1] "default-src 'none'" $headers$`content-encoding` [1] "gzip" $headers$`x-ratelimit-limit` [1] "60" $headers$`x-ratelimit-remaining` [1] "55" $headers$`x-ratelimit-reset` [1] "1712666671" $headers$`x-ratelimit-resource` [1] "core" $headers$`x-ratelimit-used` [1] "5" $headers$`accept-ranges` [1] "bytes" $headers$`content-length` [1] "455" $headers$`x-github-request-id` [1] "F003:3D4DBC:AF8CD6D:B04554D:66152C8A" $modified [1] NA $times redirect namelookup connect pretransfer starttransfer 0.000000 0.020028 0.056345 0.099050 0.302180 total 0.302317 $content $content$total_count [1] 5 $content$workflows $content$workflows[[1]] $content$workflows[[1]]$id [1] 33705939 $content$workflows[[1]]$node_id [1] "W_kwDOBAK7O84CAk_T" $content$workflows[[1]]$name [1] "R-CMD-check" $content$workflows[[1]]$path [1] ".github/workflows/R-CMD-check.yaml" $content$workflows[[1]]$state [1] "active" $content$workflows[[1]]$created_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[1]]$updated_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[1]]$url [1] "https://api.github.com/repos/r-hub/rhub/actions/workflows/33705939" $content$workflows[[1]]$html_url [1] "https://github.com/r-hub/rhub/blob/main/.github/workflows/R-CMD-check.yaml" $content$workflows[[1]]$badge_url [1] "https://github.com/r-hub/rhub/workflows/R-CMD-check/badge.svg" $content$workflows[[2]] $content$workflows[[2]]$id [1] 33705940 $content$workflows[[2]]$node_id [1] "W_kwDOBAK7O84CAk_U" $content$workflows[[2]]$name [1] "pkgdown" $content$workflows[[2]]$path [1] ".github/workflows/pkgdown.yaml" $content$workflows[[2]]$state [1] "active" $content$workflows[[2]]$created_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[2]]$updated_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[2]]$url [1] "https://api.github.com/repos/r-hub/rhub/actions/workflows/33705940" $content$workflows[[2]]$html_url [1] "https://github.com/r-hub/rhub/blob/main/.github/workflows/pkgdown.yaml" $content$workflows[[2]]$badge_url [1] "https://github.com/r-hub/rhub/workflows/pkgdown/badge.svg" $content$workflows[[3]] $content$workflows[[3]]$id [1] 33705941 $content$workflows[[3]]$node_id [1] "W_kwDOBAK7O84CAk_V" $content$workflows[[3]]$name [1] "Commands" $content$workflows[[3]]$path [1] ".github/workflows/pr-commands.yaml" $content$workflows[[3]]$state [1] "active" $content$workflows[[3]]$created_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[3]]$updated_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[3]]$url [1] "https://api.github.com/repos/r-hub/rhub/actions/workflows/33705941" $content$workflows[[3]]$html_url [1] "https://github.com/r-hub/rhub/blob/main/.github/workflows/pr-commands.yaml" $content$workflows[[3]]$badge_url [1] "https://github.com/r-hub/rhub/workflows/Commands/badge.svg" $content$workflows[[4]] $content$workflows[[4]]$id [1] 33705942 $content$workflows[[4]]$node_id [1] "W_kwDOBAK7O84CAk_W" $content$workflows[[4]]$name [1] "test-coverage" $content$workflows[[4]]$path [1] ".github/workflows/test-coverage.yaml" $content$workflows[[4]]$state [1] "active" $content$workflows[[4]]$created_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[4]]$updated_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[4]]$url [1] "https://api.github.com/repos/r-hub/rhub/actions/workflows/33705942" $content$workflows[[4]]$html_url [1] "https://github.com/r-hub/rhub/blob/main/.github/workflows/test-coverage.yaml" $content$workflows[[4]]$badge_url [1] "https://github.com/r-hub/rhub/workflows/test-coverage/badge.svg" $content$workflows[[5]] $content$workflows[[5]]$id [1] 33708295 $content$workflows[[5]]$node_id [1] "W_kwDOBAK7O84CAlkH" $content$workflows[[5]]$name [1] "pages-build-deployment" $content$workflows[[5]]$path [1] "dynamic/pages/pages-build-deployment" $content$workflows[[5]]$state [1] "active" $content$workflows[[5]]$created_at [1] "2022-08-31T10:58:47.000Z" $content$workflows[[5]]$updated_at [1] "2022-08-31T10:58:47.000Z" $content$workflows[[5]]$url [1] "https://api.github.com/repos/r-hub/rhub/actions/workflows/33708295" $content$workflows[[5]]$html_url [1] "https://github.com/r-hub/rhub/actions/workflows/pages/pages-build-deployment" $content$workflows[[5]]$badge_url [1] "https://github.com/r-hub/rhub/actions/workflows/pages/pages-build-deployment/badge.svg" # gh_rest_get, async_gh_rest_get Code json Output $url [1] "https://api.github.com/repos/r-hub/rhub/actions/workflows" $status_code [1] 200 $type [1] "application/json; charset=utf-8" $headers $headers$server [1] "GitHub.com" $headers$date [1] "Tue, 09 Apr 2024 11:54:50 GMT" $headers$`content-type` [1] "application/json; charset=utf-8" $headers$`cache-control` [1] "public, max-age=60, s-maxage=60" $headers$vary [1] "Accept, Accept-Encoding, Accept, X-Requested-With" $headers$etag [1] "W/\"1d4178504dffed82cfb18f9c41c9d471a51bacdec453d315cb4d5fdd76b0ccf9\"" $headers$`x-github-media-type` [1] "github.v3; format=json" $headers$`x-github-api-version-selected` [1] "2022-11-28" $headers$`access-control-expose-headers` [1] "ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, X-GitHub-Request-Id, Deprecation, Sunset" $headers$`access-control-allow-origin` [1] "*" $headers$`strict-transport-security` [1] "max-age=31536000; includeSubdomains; preload" $headers$`x-frame-options` [1] "deny" $headers$`x-content-type-options` [1] "nosniff" $headers$`x-xss-protection` [1] "0" $headers$`referrer-policy` [1] "origin-when-cross-origin, strict-origin-when-cross-origin" $headers$`content-security-policy` [1] "default-src 'none'" $headers$`content-encoding` [1] "gzip" $headers$`x-ratelimit-limit` [1] "60" $headers$`x-ratelimit-remaining` [1] "55" $headers$`x-ratelimit-reset` [1] "1712666671" $headers$`x-ratelimit-resource` [1] "core" $headers$`x-ratelimit-used` [1] "5" $headers$`accept-ranges` [1] "bytes" $headers$`content-length` [1] "455" $headers$`x-github-request-id` [1] "F003:3D4DBC:AF8CD6D:B04554D:66152C8A" $modified [1] NA $times redirect namelookup connect pretransfer starttransfer 0.000000 0.020028 0.056345 0.099050 0.302180 total 0.302317 $content $content$total_count [1] 5 $content$workflows $content$workflows[[1]] $content$workflows[[1]]$id [1] 33705939 $content$workflows[[1]]$node_id [1] "W_kwDOBAK7O84CAk_T" $content$workflows[[1]]$name [1] "R-CMD-check" $content$workflows[[1]]$path [1] ".github/workflows/R-CMD-check.yaml" $content$workflows[[1]]$state [1] "active" $content$workflows[[1]]$created_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[1]]$updated_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[1]]$url [1] "https://api.github.com/repos/r-hub/rhub/actions/workflows/33705939" $content$workflows[[1]]$html_url [1] "https://github.com/r-hub/rhub/blob/main/.github/workflows/R-CMD-check.yaml" $content$workflows[[1]]$badge_url [1] "https://github.com/r-hub/rhub/workflows/R-CMD-check/badge.svg" $content$workflows[[2]] $content$workflows[[2]]$id [1] 33705940 $content$workflows[[2]]$node_id [1] "W_kwDOBAK7O84CAk_U" $content$workflows[[2]]$name [1] "pkgdown" $content$workflows[[2]]$path [1] ".github/workflows/pkgdown.yaml" $content$workflows[[2]]$state [1] "active" $content$workflows[[2]]$created_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[2]]$updated_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[2]]$url [1] "https://api.github.com/repos/r-hub/rhub/actions/workflows/33705940" $content$workflows[[2]]$html_url [1] "https://github.com/r-hub/rhub/blob/main/.github/workflows/pkgdown.yaml" $content$workflows[[2]]$badge_url [1] "https://github.com/r-hub/rhub/workflows/pkgdown/badge.svg" $content$workflows[[3]] $content$workflows[[3]]$id [1] 33705941 $content$workflows[[3]]$node_id [1] "W_kwDOBAK7O84CAk_V" $content$workflows[[3]]$name [1] "Commands" $content$workflows[[3]]$path [1] ".github/workflows/pr-commands.yaml" $content$workflows[[3]]$state [1] "active" $content$workflows[[3]]$created_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[3]]$updated_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[3]]$url [1] "https://api.github.com/repos/r-hub/rhub/actions/workflows/33705941" $content$workflows[[3]]$html_url [1] "https://github.com/r-hub/rhub/blob/main/.github/workflows/pr-commands.yaml" $content$workflows[[3]]$badge_url [1] "https://github.com/r-hub/rhub/workflows/Commands/badge.svg" $content$workflows[[4]] $content$workflows[[4]]$id [1] 33705942 $content$workflows[[4]]$node_id [1] "W_kwDOBAK7O84CAk_W" $content$workflows[[4]]$name [1] "test-coverage" $content$workflows[[4]]$path [1] ".github/workflows/test-coverage.yaml" $content$workflows[[4]]$state [1] "active" $content$workflows[[4]]$created_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[4]]$updated_at [1] "2022-08-31T10:20:12.000Z" $content$workflows[[4]]$url [1] "https://api.github.com/repos/r-hub/rhub/actions/workflows/33705942" $content$workflows[[4]]$html_url [1] "https://github.com/r-hub/rhub/blob/main/.github/workflows/test-coverage.yaml" $content$workflows[[4]]$badge_url [1] "https://github.com/r-hub/rhub/workflows/test-coverage/badge.svg" $content$workflows[[5]] $content$workflows[[5]]$id [1] 33708295 $content$workflows[[5]]$node_id [1] "W_kwDOBAK7O84CAlkH" $content$workflows[[5]]$name [1] "pages-build-deployment" $content$workflows[[5]]$path [1] "dynamic/pages/pages-build-deployment" $content$workflows[[5]]$state [1] "active" $content$workflows[[5]]$created_at [1] "2022-08-31T10:58:47.000Z" $content$workflows[[5]]$updated_at [1] "2022-08-31T10:58:47.000Z" $content$workflows[[5]]$url [1] "https://api.github.com/repos/r-hub/rhub/actions/workflows/33708295" $content$workflows[[5]]$html_url [1] "https://github.com/r-hub/rhub/actions/workflows/pages/pages-build-deployment" $content$workflows[[5]]$badge_url [1] "https://github.com/r-hub/rhub/actions/workflows/pages/pages-build-deployment/badge.svg" # gh_rest_post, async_gh_rest_post Code json Output $url [1] "https://api.github.com/repos/r-lib/ps/actions/workflows/rhub.yaml/dispatches" $status_code [1] 204 $type [1] NA $headers $headers$server [1] "GitHub.com" $headers$date [1] "Tue, 09 Apr 2024 12:09:42 GMT" $headers$`x-oauth-scopes` [1] "delete:packages, delete_repo, read:org, repo, workflow, write:packages" $headers$`x-accepted-oauth-scopes` [1] "" $headers$`x-github-media-type` [1] "github.v3; format=json" $headers$`x-github-api-version-selected` [1] "2022-11-28" $headers$`x-ratelimit-limit` [1] "5000" $headers$`x-ratelimit-remaining` [1] "4979" $headers$`x-ratelimit-reset` [1] "1712665652" $headers$`x-ratelimit-used` [1] "21" $headers$`x-ratelimit-resource` [1] "core" $headers$`access-control-expose-headers` [1] "ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, X-GitHub-Request-Id, Deprecation, Sunset" $headers$`access-control-allow-origin` [1] "*" $headers$`strict-transport-security` [1] "max-age=31536000; includeSubdomains; preload" $headers$`x-frame-options` [1] "deny" $headers$`x-content-type-options` [1] "nosniff" $headers$`x-xss-protection` [1] "0" $headers$`referrer-policy` [1] "origin-when-cross-origin, strict-origin-when-cross-origin" $headers$`content-security-policy` [1] "default-src 'none'" $headers$vary [1] "Accept-Encoding, Accept, X-Requested-With" $headers$`x-github-request-id` [1] "F0EC:3056D9:20B42811:20D7C1CF:66153006" $modified [1] NA $times redirect namelookup connect pretransfer starttransfer 0.000000 0.013760 0.048724 0.086173 0.337608 total 0.341246 $content raw(0) # gh_gql_get, async_gh_gql_get Code json Output $url [1] "https://api.github.com/repos/r-lib/ps/actions/workflows/rhub.yaml" $status_code [1] 200 $type [1] "application/json; charset=utf-8" $headers $headers$server [1] "GitHub.com" $headers$date [1] "Tue, 09 Apr 2024 13:01:07 GMT" $headers$`content-type` [1] "application/json; charset=utf-8" $headers$`cache-control` [1] "private, max-age=60, s-maxage=60" $headers$vary [1] "Accept, Authorization, Cookie, X-GitHub-OTP" $headers$etag [1] "W/\"c65a62f3c04bdadcf817bd671991e78e3759e96c2894ab8a301a2e469c6a2ea6\"" $headers$`x-oauth-scopes` [1] "delete:packages, delete_repo, read:org, repo, workflow, write:packages" $headers$`x-accepted-oauth-scopes` [1] "" $headers$`x-github-media-type` [1] "github.v3; format=json" $headers$`x-github-api-version-selected` [1] "2022-11-28" $headers$`x-ratelimit-limit` [1] "5000" $headers$`x-ratelimit-remaining` [1] "4982" $headers$`x-ratelimit-reset` [1] "1712669308" $headers$`x-ratelimit-used` [1] "18" $headers$`x-ratelimit-resource` [1] "core" $headers$`access-control-expose-headers` [1] "ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, X-GitHub-Request-Id, Deprecation, Sunset" $headers$`access-control-allow-origin` [1] "*" $headers$`strict-transport-security` [1] "max-age=31536000; includeSubdomains; preload" $headers$`x-frame-options` [1] "deny" $headers$`x-content-type-options` [1] "nosniff" $headers$`x-xss-protection` [1] "0" $headers$`referrer-policy` [1] "origin-when-cross-origin, strict-origin-when-cross-origin" $headers$`content-security-policy` [1] "default-src 'none'" $headers$vary [1] "Accept-Encoding, Accept, X-Requested-With" $headers$`content-encoding` [1] "gzip" $headers$`x-github-request-id` [1] "F324:3E972F:3CB25779:3CEEE5ED:66153C13" $modified [1] NA $times redirect namelookup connect pretransfer starttransfer 0.000000 0.045222 0.085429 0.123010 0.299343 total 0.303010 $content $content$id [1] 57922738 $content$node_id [1] "W_kwDOCDHXuc4Dc9Sy" $content$name [1] "R-hub" $content$path [1] ".github/workflows/rhub.yaml" $content$state [1] "active" $content$created_at [1] "2023-05-22T13:01:04.000+02:00" $content$updated_at [1] "2023-05-22T13:01:04.000+02:00" $content$url [1] "https://api.github.com/repos/r-lib/ps/actions/workflows/57922738" $content$html_url [1] "https://github.com/r-lib/ps/blob/main/.github/workflows/rhub.yaml" $content$badge_url [1] "https://github.com/r-lib/ps/workflows/R-hub/badge.svg" rhub/tests/testthat/_snaps/setup.md0000644000176200001440000001233314762413611017170 0ustar liggesusers# check_rpkg_root Code check_rpkg_root("/pkg/root", "/git/root") Condition Error: ! R-hub currently requires that your R package is at the root of the git repository. i Your R package is at '/pkg/root'. i Your git repository root is at '/git/root'. # rhub_setup Code rhub_setup() Message Setting up R-hub v2. > Is the current directory part of an R package? v Found R package at ''. > Is the current directory part of a git repository? v Found git repository at ''. Condition Error: ! Failed to download R-hub worflow file from GitHub. i URL: /badbadbad>. i HTTP status: 404. i Make sure that you are online and GitHub is up. --- Code rhub_setup() Message Setting up R-hub v2. > Is the current directory part of an R package? v Found R package at ''. > Is the current directory part of a git repository? v Found git repository at ''. v Created workflow file '/.github/workflows/rhub.yaml'. Notes: * The workflow file must be added to the default branch of the GitHub repository. * GitHub actions must be enabled for the repository. They are disabled for forked repositories by default. Next steps: * Add the workflow file to git using `git add `. * Commit it to git using `git commit`. * Push the commit to GitHub using `git push`. * Call `rhub::rhub_doctor()` to check that you have set up R-hub correctly. * Call `rhub::rhub_check()` to check your package. --- Code rhub_setup() Message Setting up R-hub v2. > Is the current directory part of an R package? v Found R package at ''. > Is the current directory part of a git repository? v Found git repository at ''. v Workflow file '/.github/workflows/rhub.yaml' already exists and it is current. Notes: * The workflow file must be added to the default branch of the GitHub repository. * GitHub actions must be enabled for the repository. They are disabled for forked repositories by default. Next steps: * Add the workflow file to git using `git add `. * Commit it to git using `git commit` (if not committed already). * Push the commit to GitHub using `git push` (if not pushed already). * Call `rhub::rhub_doctor()` to check that you have set up R-hub correctly. * Call `rhub::rhub_check()` to check your package. --- Code rhub_setup() Message Setting up R-hub v2. > Is the current directory part of an R package? v Found R package at ''. > Is the current directory part of a git repository? v Found git repository at ''. Condition Error: ! Workflow file already exists at '/.github/workflows/rhub.yaml'. i Use `overwrite = TRUE` for overwriting it. --- Code rhub_setup(overwrite = TRUE) Message Setting up R-hub v2. > Is the current directory part of an R package? v Found R package at ''. > Is the current directory part of a git repository? v Found git repository at ''. i Updated existing workflow file at '/.github/workflows/rhub.yaml', as requested Notes: * The workflow file must be added to the default branch of the GitHub repository. * GitHub actions must be enabled for the repository. They are disabled for forked repositories by default. Next steps: * Add the workflow file to git using `git add `. * Commit it to git using `git commit`. * Push the commit to GitHub using `git push`. * Call `rhub::rhub_doctor()` to check that you have set up R-hub correctly. * Call `rhub::rhub_check()` to check your package. # setup_find_r_package Code setup_find_r_package() Message > Is the current directory part of an R package? v Found R package at ''. Output [1] "" --- Code setup_find_r_package() Message > Is the current directory part of an R package? x Is the current directory part of an R package? Condition Error: ! The current directory is not part of an R package. i You can create an R package in the current directory if you run `usethis::create_package('.')`. i Alternatively, if you want to use R-hub for a package that is already on GitHub, supply the `gh_url` argument to `rhub_setup()`. # setup_find_git_root Code setup_find_git_root() Message > Is the current directory part of a git repository? x Is the current directory part of a git repository? Condition Error: ! The current R package is not in a git repository. i You can create a git repository for the current package or project if you run `usethis::use_git()`. i Alternatively, if you want to use R-hub for a package that is already on GitHub, supply the `gh_url` argument to `rhub_setup()`. --- Code setup_find_git_root() Message > Is the current directory part of a git repository? v Found git repository at ''. Output [1] "" rhub/tests/testthat/_snaps/rc.md0000644000176200001440000002325114762413610016434 0ustar liggesusers# rc_new_token Code rc_new_token() Condition Error: ! No email or no token and not in interactive mode Code rc_new_token(email = "user@example.com") Condition Error: ! No email or no token and not in interactive mode Code rc_new_token(token = "secret") Condition Error: ! No email or no token and not in interactive mode --- Code rc_new_token("user@example.com", "secret") Message v Added token for "user@example.com". i R-hub tokens are stored at ''. # rc_list_repos Code rc_list_repos(email = "csardi.gabor@gmail.com") Output repo_name 1 uncrystallised-groundhog-callr 2 uncrystallised-groundhog-dotenv 3 uncrystallised-groundhog-rhub 4 uncrystallised-groundhog-tiff repo_url 1 https://github.com/r-hub2/uncrystallised-groundhog-callr 2 https://github.com/r-hub2/uncrystallised-groundhog-dotenv 3 https://github.com/r-hub2/uncrystallised-groundhog-rhub 4 https://github.com/r-hub2/uncrystallised-groundhog-tiff builds_url 1 https://github.com/r-hub2/uncrystallised-groundhog-callr/actions 2 https://github.com/r-hub2/uncrystallised-groundhog-dotenv/actions 3 https://github.com/r-hub2/uncrystallised-groundhog-rhub/actions 4 https://github.com/r-hub2/uncrystallised-groundhog-tiff/actions # rc_submit Code rc_submit() Condition Error: ! You need to set `confirmation` to "TRUE" to submit packages to R-hub from non-interactive R sessions. --- Code rc_submit(pkg, confirmation = TRUE) (rc_submit(pkg, confirmation = TRUE)) Output $result [1] "OK" $repo_url [1] "https://github.com/r-hub2/uncrystallised-groundhog-rhub" $actions_url [1] "https://github.com/r-hub2/uncrystallised-groundhog-rhub/actions" $id [1] "eonian-crustacean" $name [1] "linux,clang18" --- Code (rc_submit(pkg)) Output -- Confirmation ---------------------------------------------------------------- Message ! Your package will be publicly readable at . > You will need a GitHub account to view the build logs. > Only continue if you are fine with this. > See the `rhub_setup()` function for an alternative way of using R-hub. Output Please type 'yes' to continue: no Message Condition Error: ! Aborted R-hub submission. --- Code (rc_submit(pkg)) Output -- Confirmation ---------------------------------------------------------------- Message ! Your package will be publicly readable at . > You will need a GitHub account to view the build logs. > Only continue if you are fine with this. > See the `rhub_setup()` function for an alternative way of using R-hub. Output Please type 'yes' to continue: yes Message Output $result [1] "OK" $repo_url [1] "https://github.com/r-hub2/uncrystallised-groundhog-rhub" $actions_url [1] "https://github.com/r-hub2/uncrystallised-groundhog-rhub/actions" $id [1] "eonian-crustacean" $name [1] "linux,clang18" --- Code rc_submit(pkg) Condition Error: ! Could not query R package name at 'fixtures/bad.tar.gz'. i Make sure that `path` is an R package or a directory containing an R package. --- Code (rc_submit(pkg)) Output -- R CMD build ----------------------------------------------------------------- * checking for file ' ... OK * preparing 'pkg': * checking DESCRIPTION meta-information ... OK * checking for LF line-endings in source and make files and shell scripts * checking for empty or unneeded directories * building 'pkg_0.0.0.9000.tar.gz' -- Confirmation ---------------------------------------------------------------- Message ! Your package will be publicly readable at . > You will need a GitHub account to view the build logs. > Only continue if you are fine with this. > See the `rhub_setup()` function for an alternative way of using R-hub. Output Please type 'yes' to continue: yes Message Output $result [1] "OK" $repo_url [1] "https://github.com/r-hub2/uncrystallised-groundhog-rhub" $actions_url [1] "https://github.com/r-hub2/uncrystallised-groundhog-rhub/actions" $id [1] "eonian-crustacean" $name [1] "linux,clang18" --- Code (rc_submit(pkg)) Output -- Confirmation ---------------------------------------------------------------- Message ! Your package will be publicly readable at . > You will need a GitHub account to view the build logs. > Only continue if you are fine with this. > See the `rhub_setup()` function for an alternative way of using R-hub. Output Please type 'yes' to continue: yes Message Condition Error: ! Invalid response from R-hub server, please report this. # guess_email Code guess_email() Message i Using maintainer email address "user@example.com". Output [1] "user@example.com" Code guess_email(message = FALSE) Output [1] "user@example.com" --- Code guess_email() Message i Using email address "another@example.com". Output [1] "another@example.com" --- Code guess_email(message = FALSE) Output [1] "another@example.com" # get_auth_header Code get_auth_header("csardi.gabor@gmail.com") Output Authorization "Bearer token1" --- Code get_auth_header("user@example.com") Condition Error: ! Can't find token for email address "user@example.com". i Call `rhub::rc_new_token()` to get a token. # get_email_to_validate Code get_email_to_validate(".") Output -- Choose email address to request token for (or 0 to exit) 1: v csardi.gabor@gmail.com 2: v csardi.gabor+new@gmail.com 3: v csardi.gabor+another@gmail.com 4: v csardi.gabor+fake@gmail.com 5: user@example.com 6: maint@example.com 7: New email address Selection: 0 Condition Error: ! Cancelled requesting new token --- Code get_email_to_validate(".") Output -- Choose email address to request token for (or 0 to exit) 1: v csardi.gabor@gmail.com 2: v csardi.gabor+new@gmail.com 3: v csardi.gabor+another@gmail.com 4: v csardi.gabor+fake@gmail.com 5: user@example.com 6: maint@example.com 7: New email address Selection: 5 [1] "user@example.com" --- Code get_email_to_validate(".") Output -- Choose email address to request token for (or 0 to exit) 1: v csardi.gabor@gmail.com 2: v csardi.gabor+new@gmail.com 3: v csardi.gabor+another@gmail.com 4: v csardi.gabor+fake@gmail.com 5: user@example.com 6: maint@example.com 7: New email address Selection: 7 Email address: custom@example.com [1] "custom@example.com" --- Code get_email_to_validate(".") Output Email address: custom@example.com [1] "custom@example.com" # list_validated_emails2 Code list_validated_emails2(message = FALSE) Output email token 1 csardi.gabor@gmail.com token1 2 csardi.gabor+new@gmail.com token2 3 csardi.gabor+another@gmail.com token3 4 csardi.gabor+fake@gmail.com token4 Code list_validated_emails2(message = TRUE) Message > R-hub tokens are stored at 'fixtures/validated_emails.csv'. Output email token 1 csardi.gabor@gmail.com token1 2 csardi.gabor+new@gmail.com token2 3 csardi.gabor+another@gmail.com token3 4 csardi.gabor+fake@gmail.com token4 --- Code list_validated_emails2(message = FALSE) Message i No R-hub tokens found. Code list_validated_emails2(message = TRUE) Message i No R-hub tokens found. # email_file Code email_file() Output [1] "/config/validated_emails.csv" # rc_new_token_interactive Code rc_new_token_interactive(email = "maint@example.com") Message i Please check your emails for the R-hub access token. Output [[1]] [1] "maint@example.com" [[2]] [1] "token" --- Code rc_new_token_interactive() Message i Please check your emails for the R-hub access token. Output [[1]] [1] "user@example.com" [[2]] [1] "token" # email_add_token Code read_token_file(ef) Output email token 1 newemail@example.com new-token --- Code read_token_file(ef) Output email token 1 newemail@example.com new-token 2 newemail2@example.com new-token2 --- Code read_token_file(ef) Output email token 1 newemail@example.com new-new-token 2 newemail2@example.com new-token2 rhub/tests/testthat/_snaps/api.md0000644000176200001440000001063514762413605016607 0ustar liggesusers# query GET Code cat(rawToChar(query("/get")$content)) Output { "args": {}, "headers": { "Host": "127.0.0.1:", "Accept-Encoding": "", "accept": "application/json", "content-type": "application/json", "user-agent": "R-hub client" }, "origin": "127.0.0.1", "path": "/get", "url": "http://127.0.0.1:/get" } # query HTTP errors Code query("/rhub-error?msg=iamsosorryabouththat") Condition Error: ! iamsosorryabouththat Caused by error: ! Unauthorized (HTTP 401). --- Code query("/rhub-error2") Condition Error: ! Unauthorized (HTTP 401). --- Code query("/rhub-error3") Condition Error: ! Unauthorized (HTTP 401). # query POST Code cat(rawToChar(query("/post", method = "POST", data = data)$content)) Output { "args": {}, "data": "{\"foo\":[\"bar\"],\"foobar\":[1,2,3]}", "files": {}, "form": {}, "headers": { "Host": "127.0.0.1:", "Accept-Encoding": "", "accept": "application/json", "content-type": "application/json", "user-agent": "R-hub client", "Content-Length": "32" }, "json": { "foo": [ "bar" ], "foobar": [ 1, 2, 3 ] }, "method": "post", "path": "/post", "origin": "127.0.0.1", "url": "http://127.0.0.1:/post" } # query, unknown verb Code query("/anything", method = "REPORT") Condition Error: ! Unexpected HTTP verb, internal rhub error Code query("/anything", method = "REPORT", sse = TRUE) Condition Error: ! Unexpected HTTP verb, internal rhub error # query SSE Code query("/sse", sse = TRUE)$sse Output [[1]] event message "1" "live long and prosper" [[2]] event message "2" "live long and prosper" [[3]] event message "3" "live long and prosper" [[4]] event message "4" "live long and prosper" [[5]] event message "5" "live long and prosper" Code query("/sse", method = "POST", data = data, sse = TRUE)$sse Output [[1]] event message "1" "live long and prosper" [[2]] event message "2" "live long and prosper" [[3]] event message "3" "live long and prosper" [[4]] event message "4" "live long and prosper" [[5]] event message "5" "live long and prosper" --- Code resp <- query("/sse?progress=true&numevents=2", sse = TRUE) Message > This is `it`: 1 > This is `it`: 2 v Done. Code cat(rawToChar(resp$content)) Output event: 1 message: live long and prosper event: progress data: "This is {.code it}: 1" event: 2 message: live long and prosper event: progress data: "This is {.code it}: 2" event: result data: "All is {.code good}." --- Code resp <- query("/sse?progress=true&numevents=2&error=true", sse = TRUE) Message > This is `it`: 1 > This is `it`: 2 x This is a `failure`. Condition Error: ! Aborting Code cat(rawToChar(resp$content)) Output event: 1 message: live long and prosper event: progress data: "This is {.code it}: 1" event: 2 message: live long and prosper event: progress data: "This is {.code it}: 2" event: result data: "All is {.code good}." rhub/tests/testthat/_snaps/assertions.md0000644000176200001440000001134014762413606020223 0ustar liggesusers# is_character Code is_character(character()) Output [1] TRUE Code is_character("a") Output [1] TRUE Code is_character(c("a", "b", "c")) Output [1] TRUE --- Code x <- 1 assert_that(is_character(x)) Condition Error: ! `x` must be a character vector without `NA`, but it is a number. Code x <- mtcars assert_that(is_character(x)) Condition Error: ! `x` must be a character vector without `NA`, but it is a data frame. Code x <- NULL assert_that(is_character(x)) Condition Error: ! `x` must be a character vector without `NA`, but it is NULL. Code x <- c("a", "b", NA_character_) assert_that(is_character(x)) Condition Error: ! `x` must be a character vector without `NA`, but it has 1 `NA` value. # is_optional_character Code is_optional_character(NULL) Output [1] TRUE Code is_optional_character(character()) Output [1] TRUE Code is_optional_character("a") Output [1] TRUE Code is_optional_character(c("a", "b", "c")) Output [1] TRUE --- Code x <- 1 assert_that(is_optional_character(x)) Condition Error: ! `x` must be a character vector without `NA`, or NULL, but it is a number. Code x <- mtcars assert_that(is_optional_character(x)) Condition Error: ! `x` must be a character vector without `NA`, or NULL, but it is a data frame. Code x <- c("a", "b", NA_character_) assert_that(is_optional_character(x)) Condition Error: ! `x` must not have `NA` values, but it has 1 `NA` value. # is_string Code is_string("a") Output [1] TRUE --- Code x <- 1 assert_that(is_string(x)) Condition Error: ! `x` must be a string (character scalar), but it is a number. Code x <- mtcars assert_that(is_string(x)) Condition Error: ! `x` must be a string (character scalar), but it is a data frame. Code x <- NULL assert_that(is_string(x)) Condition Error: ! `x` must be a string (character scalar), but it is NULL. Code x <- NA_character_ assert_that(is_string(x)) Condition Error: ! `x` must not be `NA`. Code x <- c("a", "b", NA_character_) assert_that(is_string(x)) Condition Error: ! `x` must be a string (character scalar), but it is a character vector. Code x <- character() assert_that(is_string(x)) Condition Error: ! `x` must be a string (character scalar), but it is an empty character vector. Code x <- c("a", "b") assert_that(is_string(x)) Condition Error: ! `x` must be a string (character scalar), but it is a character vector. # is_optional_string Code is_optional_string("a") Output [1] TRUE Code is_optional_string(NULL) Output [1] TRUE --- Code x <- 1 assert_that(is_optional_string(x)) Condition Error: ! `x` must be a string (character scalar) or NULL, but it is a number. Code x <- mtcars assert_that(is_optional_string(x)) Condition Error: ! `x` must be a string (character scalar) or NULL, but it is a data frame. Code x <- NA_character_ assert_that(is_optional_string(x)) Condition Error: ! `x` must be a string (character scalar) or NULL, but it is a character `NA`. Code x <- c("a", "b", NA_character_) assert_that(is_optional_string(x)) Condition Error: ! `x` must be a string (character scalar) or NULL, but it is a character vector. Code x <- character() assert_that(is_optional_string(x)) Condition Error: ! `x` must be a string (character scalar) or NULL, but it is an empty character vector. Code x <- c("a", "b") assert_that(is_optional_string(x)) Condition Error: ! `x` must be a string (character scalar) or NULL, but it is a character vector. # is_optional_gh_url Code is_optional_gh_url(NULL) Output [1] TRUE Code is_optional_gh_url("https://github.com") Output [1] TRUE Code is_optional_gh_url("http://github.com") Output [1] TRUE --- Code gh_url <- 1:10 assert_that(is_optional_gh_url(gh_url)) Condition Error: ! `gh_url` must be a character string. You supplied an integer vector. Code gh_url <- "foobar" assert_that(is_optional_gh_url(gh_url)) Condition Error: ! `gh_url` must be an HTTP or HTTPS URL. You supplied: "foobar". rhub/tests/testthat/_snaps/utils.md0000644000176200001440000000634614762413611017177 0ustar liggesusers# pkg_error Code err Output Error: ! This is not good! i You should not use `foo`, use `bar` instead. # stop Code err Output Error: ! This is not good! i You should not use `foo`, use `bar` instead. # stop with message Code err Output Error in `stop("Ooopsie daily!")`: ! Ooopsie daily! # stopifnot Code stopifnot(1 == 2) Condition Error: ! `1` must equal `2`. # zip Code zip(character(), character()) Output list() Code zip(letters[1:5], LETTERS[1:5]) Output [[1]] [1] "a" "A" [[2]] [1] "b" "B" [[3]] [1] "c" "C" [[4]] [1] "d" "D" [[5]] [1] "e" "E" Code zip("1", letters[1:5]) Output [[1]] [1] "1" "a" [[2]] [1] "1" "b" [[3]] [1] "1" "c" [[4]] [1] "1" "d" [[5]] [1] "1" "e" # unquote Code unquote("'quoted'") Output [1] "quoted" Code unquote(c("'quoted'", "not", "\"quoted\"")) Output [1] "quoted" "not" "quoted" # parse_url Code parse_url("https://github.com/r-hub/rhub") Output protocol host path 1 https github.com /r-hub/rhub Code parse_url("https://user@github.com/r-hub/rhub") Output protocol host path 1 https github.com /r-hub/rhub Code parse_url("https://user:pass@github.com/r-hub/rhub") Output protocol host path 1 https github.com /r-hub/rhub Code parse_url("https://github.com/r-hub/rhub?q=foo&p=bar") Output protocol host path 1 https github.com /r-hub/rhub?q=foo&p=bar Code parse_url("git@github.com:/r-hub/rhub") Output protocol host path 1 https github.com /r-hub/rhub Code parse_url("git@github.com:/r-hub/rhub.git") Output protocol host path 1 https github.com /r-hub/rhub --- Code parse_url("this is not a URL at all") Condition Error in `parse_url()`: ! Invalid URL: # ansi_align_width [plain] Code paste0("--", ansi_align_width(c("foo", "bar", "foobar")), "--") Output [1] "--foo --" "--bar --" "--foobar--" Code paste0("--", ansi_align_width(c("foo", "bar", cli::col_red("foobar"))), "--") Output [1] "--foo --" "--bar --" "--foobar--" Code ansi_align_width(character()) Output character(0) # ansi_align_width [ansi] Code paste0("--", ansi_align_width(c("foo", "bar", "foobar")), "--") Output [1] "--foo --" "--bar --" "--foobar--" Code paste0("--", ansi_align_width(c("foo", "bar", cli::col_red("foobar"))), "--") Output [1] "--foo --" "--bar --" [3] "--\033[31mfoobar\033[39m--" Code ansi_align_width(character()) Output character(0) rhub/tests/testthat/_snaps/cli.md0000644000176200001440000000062614762413606016605 0ustar liggesusers# cli_status Code pid <- cli_status("This is a status message") Message > This is a status message Code cli::cli_status_clear(pid, result = "clear") --- Code pid <- cli_status("This is a status message") Message > This is a status message Code cli::cli_status_clear(pid, result = "failed") Message x This is a status message rhub/tests/testthat/_snaps/platforms.md0000644000176200001440000003664714762413607020062 0ustar liggesusers# get_platforms Code cli::hash_obj_sha1(plt[[1]]) Output [1] "c98c9abd5bf37f3c6e63ede7dc475cc598dd716e" Code cli::hash_obj_sha1(plt[[2]]) Output [1] "b638216bc57a197de6cbec7db245b42b09f4813f" # rhub_platforms Code rhub_platforms() Output -- Virtual machines --------------------------------------------------------- 1 [VM] linux All R versions on GitHub Actions ubuntu-latest 2 [VM] macos All R versions on GitHub Actions macos-latest 3 [VM] macos-arm64 All R versions on GitHub Actions macos-14 4 [VM] windows All R versions on GitHub Actions windows-latest -- Containers --------------------------------------------------------------- 5 [CT] atlas [ATLAS] R Under development (unstable) (2024-04-08 r86370) on Fedora Linux 38 (Con... ghcr.io/r-hub/containers/atlas:latest 6 [CT] clang-asan [asan, clang-ASAN, clang-UBSAN, ubsan] R Under development (unstable) (2024-04-08 r86370) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/clang-asan:latest 7 [CT] clang16 [clang16] R Under development (unstable) (2024-04-06 r86351) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/clang16:latest 8 [CT] clang17 [clang17] R Under development (unstable) (2024-04-06 r86351) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/clang17:latest 9 [CT] clang18 [clang18] R Under development (unstable) (2024-04-06 r86351) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/clang18:latest 10 [CT] donttest [donttest] R Under development (unstable) (2024-04-06 r86351) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/donttest:latest 11 [CT] gcc13 [gcc13] R Under development (unstable) (2024-04-08 r86370) on Fedora Linux 38 (Con... ghcr.io/r-hub/containers/gcc13:latest 12 [CT] intel [Intel] R Under development (unstable) (2024-04-08 r86370) on Fedora Linux 38 (Con... ghcr.io/r-hub/containers/intel:latest 13 [CT] mkl [MKL] R Under development (unstable) (2024-04-08 r86370) on Fedora Linux 38 (Con... ghcr.io/r-hub/containers/mkl:latest 14 [CT] nold [noLD] R Under development (unstable) (2024-04-08 r86370) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/nold:latest 15 [CT] nosuggests [noSuggests] R Under development (unstable) (2024-04-08 r86370) on Fedora Linux 38 (Con... ghcr.io/r-hub/containers/nosuggests:latest 16 [CT] ubuntu-clang [r-devel-linux-x86_64-debian-clang] R Under development (unstable) (2024-04-08 r86370) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/ubuntu-clang:latest 17 [CT] ubuntu-gcc12 [r-devel-linux-x86_64-debian-gcc] R Under development (unstable) (2024-04-08 r86370) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/ubuntu-gcc12:latest 18 [CT] ubuntu-next [r-next, r-patched, r-patched-linux-x86_64] R version 4.4.0 alpha (2024-04-07 r86351) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/ubuntu-next:latest 19 [CT] ubuntu-release [r-release, r-release-linux-x86_64, ubuntu] R version 4.3.3 (2024-02-29) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/ubuntu-release:latest 20 [CT] valgrind [valgrind] R Under development (unstable) (2024-04-08 r86370) on Fedora Linux 38 (Con... ghcr.io/r-hub/containers/valgrind:latest --- Code rhub_platforms()[] Output # A data frame: 3 x 9 name description aliases type os_type container github_os r_version os_name * 1 windo~ os Windows windows-~ * 2 ubunt~ cont~ Linux ghcr.io/~ R Under ~ Ubuntu~ 3 ubunt~ cont~ Linux ghcr.io/~ # format.rhub_platforms Code print(plt) Output -- Virtual machines --------------------------------------------------------- 1 [VM] linux R 4.4.0 2 [VM] macos All R versions on GitHub Actions macos-latest 3 [VM] macos-arm64 All R versions on GitHub Actions macos-14 4 [VM] windows All R versions on GitHub Actions windows-latest -- Containers --------------------------------------------------------------- 5 [CT] atlas [ATLAS] R Under development (unstable) (2024-04-08 r86370) on Fedora Linux 38 (Con... ghcr.io/r-hub/containers/atlas:latest 6 [CT] clang-asan [asan, clang-ASAN, clang-UBSAN, ubsan] R Under development (unstable) (2024-04-08 r86370) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/clang-asan:latest 7 [CT] clang16 [clang16] R Under development (unstable) (2024-04-06 r86351) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/clang16:latest 8 [CT] clang17 [clang17] R Under development (unstable) (2024-04-06 r86351) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/clang17:latest 9 [CT] clang18 [clang18] R Under development (unstable) (2024-04-06 r86351) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/clang18:latest 10 [CT] donttest [donttest] R Under development (unstable) (2024-04-06 r86351) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/donttest:latest 11 [CT] gcc13 [gcc13] R Under development (unstable) (2024-04-08 r86370) on Fedora Linux 38 (Con... ghcr.io/r-hub/containers/gcc13:latest 12 [CT] intel [Intel] R Under development (unstable) (2024-04-08 r86370) on Fedora Linux 38 (Con... ghcr.io/r-hub/containers/intel:latest 13 [CT] mkl [MKL] R Under development (unstable) (2024-04-08 r86370) on Fedora Linux 38 (Con... ghcr.io/r-hub/containers/mkl:latest 14 [CT] nold [noLD] R Under development (unstable) (2024-04-08 r86370) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/nold:latest 15 [CT] nosuggests [noSuggests] R Under development (unstable) (2024-04-08 r86370) on Fedora Linux 38 (Con... ghcr.io/r-hub/containers/nosuggests:latest 16 [CT] ubuntu-clang [r-devel-linux-x86_64-debian-clang] R Under development (unstable) (2024-04-08 r86370) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/ubuntu-clang:latest 17 [CT] ubuntu-gcc12 [r-devel-linux-x86_64-debian-gcc] R Under development (unstable) (2024-04-08 r86370) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/ubuntu-gcc12:latest 18 [CT] ubuntu-next [r-next, r-patched, r-patched-linux-x86_64] R version 4.4.0 alpha (2024-04-07 r86351) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/ubuntu-next:latest 19 [CT] ubuntu-release [r-release, r-release-linux-x86_64, ubuntu] R version 4.3.3 (2024-02-29) on Ubuntu 22.04.4 LTS ghcr.io/r-hub/containers/ubuntu-release:latest 20 [CT] valgrind R Under development (unstable) (2024-04-08 r86370) on Fedora Linux 38 (Con... ghcr.io/r-hub/containers/valgrind:latest # summary.rhub_platforms Code summary(plt) Output 1 [VM] linux R-* (any version) ubuntu-latest on Git 2 [VM] macos R-* (any version) macos-latest on GitH 3 [VM] macos-arm64 R-* (any version) macos-14 on GitHub 4 [VM] windows R-* (any version) windows-latest on Gi... 5 [CT] atlas R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 6 [CT] clang-asan R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 7 [CT] clang16 R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 8 [CT] clang17 R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 9 [CT] clang18 R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 10 [CT] donttest R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 11 [CT] gcc13 R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 12 [CT] intel R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 13 [CT] mkl R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 14 [CT] nold R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 15 [CT] nosuggests R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 16 [CT] ubuntu-clang R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 17 [CT] ubuntu-gcc12 R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 18 [CT] ubuntu-next R-4.4.0 alpha (2024-04-07 r86351) Ubuntu 22.04.4 LTS 19 [CT] ubuntu-release R-4.3.3 (2024-02-29) Ubuntu 22.04.4 LTS 20 [CT] valgrind R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... # select_platforms Code select_platforms() Condition Error: ! Failed to download the list of R-hub platforms. i Make sure that you are online and Github is also online. Caused by error: ! Not Found (HTTP 404). --- Code select_platforms() Condition Error: ! `platforms` argument is missing for `rhub_check()`. i You need to specify `platforms` in non-interactive sessions --- Code select_platforms(c("linux", "clang18")) Output [1] "linux" "clang18" --- Code select_platforms(c("linux", "clang18", "thisisnotit")) Condition Error: ! Unknown platform: "thisisnotit". i See `rhub::rhub_platforms()` for the list of platforms --- Code select_platforms() Message Available platforms (see `rhub::rhub_platforms()` for details): 1 [VM] linux R-* (any version) ubuntu-latest on Git 2 [VM] macos R-* (any version) macos-latest on GitH 3 [VM] macos-arm64 R-* (any version) macos-14 on GitHub 4 [VM] windows R-* (any version) windows-latest on Gi... 5 [CT] atlas R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 6 [CT] clang-asan R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 7 [CT] clang16 R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 8 [CT] clang17 R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 9 [CT] clang18 R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 10 [CT] donttest R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 11 [CT] gcc13 R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 12 [CT] intel R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 13 [CT] mkl R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 14 [CT] nold R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 15 [CT] nosuggests R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 16 [CT] ubuntu-clang R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 17 [CT] ubuntu-gcc12 R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 18 [CT] ubuntu-next R-4.4.0 alpha (2024-04-07 r86351) Ubuntu 22.04.4 LTS 19 [CT] ubuntu-release R-4.3.3 (2024-02-29) Ubuntu 22.04.4 LTS 20 [CT] valgrind R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... Output Selection (comma separated numbers, 0 to cancel): 1, 3, 9 [1] "linux" "macos-arm64" "clang18" --- Code select_platforms() Message Available platforms (see `rhub::rhub_platforms()` for details): 1 [VM] linux R-* (any version) ubuntu-latest on Git 2 [VM] macos R-* (any version) macos-latest on GitH 3 [VM] macos-arm64 R-* (any version) macos-14 on GitHub 4 [VM] windows R-* (any version) windows-latest on Gi... 5 [CT] atlas R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 6 [CT] clang-asan R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 7 [CT] clang16 R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 8 [CT] clang17 R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 9 [CT] clang18 R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 10 [CT] donttest R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 11 [CT] gcc13 R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 12 [CT] intel R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 13 [CT] mkl R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 14 [CT] nold R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 15 [CT] nosuggests R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 16 [CT] ubuntu-clang R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 17 [CT] ubuntu-gcc12 R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 18 [CT] ubuntu-next R-4.4.0 alpha (2024-04-07 r86351) Ubuntu 22.04.4 LTS 19 [CT] ubuntu-release R-4.3.3 (2024-02-29) Ubuntu 22.04.4 LTS 20 [CT] valgrind R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... Output Selection (comma separated numbers, 0 to cancel): 0 Condition Error: ! R-hub check cancelled --- Code select_platforms() Message Available platforms (see `rhub::rhub_platforms()` for details): 1 [VM] linux R-* (any version) ubuntu-latest on Git 2 [VM] macos R-* (any version) macos-latest on GitH 3 [VM] macos-arm64 R-* (any version) macos-14 on GitHub 4 [VM] windows R-* (any version) windows-latest on Gi... 5 [CT] atlas R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 6 [CT] clang-asan R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 7 [CT] clang16 R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 8 [CT] clang17 R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 9 [CT] clang18 R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 10 [CT] donttest R-devel (2024-04-06 r86351) Ubuntu 22.04.4 LTS 11 [CT] gcc13 R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 12 [CT] intel R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 13 [CT] mkl R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 14 [CT] nold R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 15 [CT] nosuggests R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... 16 [CT] ubuntu-clang R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 17 [CT] ubuntu-gcc12 R-devel (2024-04-08 r86370) Ubuntu 22.04.4 LTS 18 [CT] ubuntu-next R-4.4.0 alpha (2024-04-07 r86351) Ubuntu 22.04.4 LTS 19 [CT] ubuntu-release R-4.3.3 (2024-02-29) Ubuntu 22.04.4 LTS 20 [CT] valgrind R-devel (2024-04-08 r86370) Fedora Linux 38 (Con... Output Selection (comma separated numbers, 0 to cancel): 10000 Condition Error: ! Invalid platform number: "10000". rhub/tests/testthat/_snaps/check.md0000644000176200001440000000141414762413606017107 0ustar liggesusers# rhub_check Code rhub_check("https://github.com/r-lib/ps", platforms = c("linux", "clang18")) Message v Check started: linux, clang18 (kleptomaniac-harlequinbug). See for live output! --- Code rhub_check("https://github.com/r-lib/ps", platforms = c("linux", "clang18")) Condition Error: ! :( Failed to start check: I am so, so sorry!. i If you think this is a bug in the rhub package, please open an issues at . --- Code rhub_check(platforms = c("linux", "clang18")) Message v Check started: linux, clang18 (kleptomaniac-harlequinbug). See for live output! rhub/tests/testthat/test-rc.R0000644000176200001440000001576014605502050015726 0ustar liggesuserstest_that("rc_new_token", { # need interactive if missing arg withr::local_options(rlib_interactive = FALSE) expect_snapshot(error = TRUE, { rc_new_token() rc_new_token(email = "user@example.com") rc_new_token(token = "secret") }) # calls interactive function in interactive mode withr::local_options(rlib_interactive = TRUE) called <- FALSE mockery::stub( rc_new_token, "rc_new_token_interactive", function(...) called <<- TRUE ) rc_new_token() expect_true(called) # otherwise adds the supplied token args <- NULL mockery::stub(rc_new_token, "email_add_token", function(email, token) { args <<- list(email, token) }) mockery::stub(rc_new_token, "email_file", "") expect_snapshot({ rc_new_token("user@example.com", "secret") }) expect_equal(args, list("user@example.com", "secret")) }) test_that("rc_list_local_tokens", { mockery::stub(rc_list_local_tokens, "list_validated_emails2", 113) expect_equal(rc_list_local_tokens(), 113) }) test_that("rc_list_repos", { mockery::stub( rc_list_repos, "get_auth_header", c("Authorization" = "Bearer secret") ) resp <- readRDS(test_path("fixtures/rc-response-list-repos.rds")) mockery::stub(rc_list_repos, "query", resp) expect_snapshot({ rc_list_repos(email = "csardi.gabor@gmail.com") }) }) test_that("rc_submit", { # confirmation is non-interactive withr::local_options(rlib_interactive = FALSE) expect_snapshot(error = TRUE, { rc_submit() }) mockery::stub(rc_submit, "select_platforms", c("linux", "clang18")) mockery::stub(rc_submit, "random_id", "kleptomaniac-harlequinbug") resp <- readRDS(test_path("fixtures/rc-response-submit.rds")) mockery::stub(rc_submit, "query", resp) mockery::stub(rc_submit, "get_auth_header", c("Authorization" = "Bearer token")) pkg <- test_path("fixtures/pkg_0.0.0.9000.tar.gz") expect_snapshot({ rc_submit(pkg, confirmation = TRUE) (rc_submit(pkg, confirmation = TRUE)) }) # confirmation, abort withr::local_options(rlib_interactive = TRUE) ans <- "no" mockery::stub(rc_submit, "readline", function(prompt) { cat(prompt) cat(ans) ans }) expect_snapshot(error = TRUE, { (rc_submit(pkg)) }) # confirmation, yes ans <- "yes" expect_snapshot({ (rc_submit(pkg)) }) # bad package file pkg <- test_path("fixtures/bad.tar.gz") expect_snapshot(error = TRUE, { rc_submit(pkg) }) # need to build if directory withr::local_options(useFancyQuotes = FALSE) tmp <- tempfile() on.exit(unlink(tmp, recursive = TRUE), add = TRUE) dir.create(tmp) file.copy(test_path("fixtures/pkg"), tmp, recursive = TRUE) pkg <- file.path(tmp, "pkg") expect_snapshot({ (rc_submit(pkg)) }, transform = function(x) { x <- sub( "checking for file .*[.][.][.]", "checking for file ' ...", x ) x <- gsub("\x91", "'", x, fixed = TRUE, useBytes = TRUE) x <- gsub("\x92", "'", x, fixed = TRUE, useBytes = TRUE) x <- gsub("\xe2\x80\x98", "'", x, fixed = TRUE, useBytes = TRUE) x <- gsub("\xe2\x80\x99", "'", x, fixed = TRUE, useBytes = TRUE) x }) # error response mockery::stub(rc_submit, "query", list()) pkg <- test_path("fixtures/pkg_0.0.0.9000.tar.gz") expect_snapshot(error = TRUE, { (rc_submit(pkg)) }) }) # == Internals ============================================================ test_that("guess_email", { # maint mockery::stub(guess_email, "get_maintainer_email", "user@example.com") expect_snapshot({ guess_email() guess_email(message = FALSE) }) mockery::stub( guess_email, "get_maintainer_email", function(...) stop("no") ) mockery::stub(guess_email, "email_address", "another@example.com") expect_snapshot(guess_email()) expect_snapshot(guess_email(message = FALSE)) }) test_that("get_auth_header", { valid <- read_token_file(test_path("fixtures/validated_emails.csv")) mockery::stub(get_auth_header, "list_validated_emails2", valid) # ok expect_snapshot({ get_auth_header("csardi.gabor@gmail.com") }) # not ok expect_snapshot(error = TRUE, { get_auth_header("user@example.com") }) }) test_that("get_email_to_validate", { valid <- read_token_file(test_path("fixtures/validated_emails.csv")) mockery::stub(get_email_to_validate, "list_validated_emails2", valid) mockery::stub(get_email_to_validate, "email_address", "user@example.com") mockery::stub(get_email_to_validate, "get_maintainer_email", "maint@example.com") mockery::stub(get_email_to_validate, "menu", function(choices, title) { cat(title, sep = "\n\n") cat(paste0(seq_along(choices), ": ", choices), sep = "\n") cat("\nSelection: ") cat(ans) ans }) ans <- 0 expect_snapshot(error = TRUE, { get_email_to_validate(".") }) ans <- 5L expect_snapshot({ get_email_to_validate(".") }) mockery::stub(get_email_to_validate, "readline", function(prompt) { cat(prompt) ea <- "custom@example.com" cat(ea) ea }) ans <- 7L expect_snapshot({ get_email_to_validate(".") }) # single validated address mockery::stub(get_email_to_validate, "list_validated_emails2", valid[FALSE, ]) mockery::stub(get_email_to_validate, "email_address", function() stop("no")) mockery::stub(get_email_to_validate, "get_maintainer_email", function() stop("no")) expect_snapshot({ get_email_to_validate(".") }) }) test_that("list_validated_emails2", { ef <- test_path("fixtures/validated_emails.csv") mockery::stub(list_validated_emails2, "email_file", ef) expect_snapshot({ list_validated_emails2(message = FALSE) list_validated_emails2(message = TRUE) }) ef <- tempfile() mockery::stub(list_validated_emails2, "email_file", ef) withr::local_options(rlib_interactive = TRUE) expect_snapshot({ list_validated_emails2(message = FALSE) list_validated_emails2(message = TRUE) }) }) test_that("email_file", { mockery::stub(email_file, "user_data_dir", "/config") expect_snapshot(email_file()) }) test_that("rc_new_token_interactive", { mockery::stub(rc_new_token_interactive, "query", "done") mockery::stub(rc_new_token_interactive, "readline", "token") mockery::stub( rc_new_token_interactive, "rc_new_token", function(email, token) list(email, token) ) expect_snapshot({ rc_new_token_interactive(email = "maint@example.com") }) mockery::stub(rc_new_token_interactive, "get_email_to_validate", "user@example.com") expect_snapshot({ rc_new_token_interactive() }) }) test_that("email_add_token", { tmp <- tempfile() ef <- file.path(tmp, "emails.csv") on.exit(unlink(tmp), add = TRUE) # file does not exist yet mockery::stub(email_add_token, "email_file", ef) email_add_token("newemail@example.com", "new-token") expect_snapshot(read_token_file(ef)) # file exists already, append email_add_token("newemail2@example.com", "new-token2") expect_snapshot(read_token_file(ef)) # file exists already, replace email_add_token("newemail@example.com", "new-new-token") expect_snapshot(read_token_file(ef)) }) rhub/tests/testthat/test-doctor.R0000644000176200001440000001155514605256751016630 0ustar liggesuserstest_that("rhub_doctor", { withr::local_options(cli.ansi = FALSE) # we do this here, so the web server process starts witg the same # working directory as the tests http$url() # we need to do this because we are wrapping text and also using # `transform` in `export_snapshot()`. withr::local_options(cli.width = Inf) # Do everything in a temporary package tmp <- tempfile() on.exit(unlink(tmp, recursive = TRUE), add = TRUE) dir.create(tmp) file.copy(test_path("fixtures/pkg"), tmp, recursive = TRUE) pkg <- file.path(tmp, "pkg") withr::local_dir(pkg) # must be a git repo as well dir.create(".git") withr::local_envvar(RHUB_WORKFLOW_URL = http$url("/rhub.yaml")) expect_snapshot({ rhub_setup() }, transform = redact_abs_path) mockery::stub(rhub_doctor, "doctor_find_gh_url", "https://github.com/r-lib/ps") mockery::stub(rhub_doctor, "doctor_find_pat", "secret") mockery::stub(rhub_doctor, "doctor_async_gql", list()) mockery::stub(rhub_doctor, "doctor_async_rest", list()) mockery::stub(rhub_doctor, "doctor_check_github", NULL) mockery::stub(rhub_doctor, "doctor_check_pat_scopes", NULL) mockery::stub(rhub_doctor, "doctor_check_workflow", NULL) expect_snapshot({ rhub_doctor() }, transform = redact_abs_path) }) test_that("doctor_find_gh_url", { # error mockery::stub( doctor_find_gh_url, "gert::git_info", list(remote = NA_character_) ) expect_snapshot(error = TRUE, { doctor_find_gh_url(".") }) # success mockery::stub( doctor_find_gh_url, "gert::git_info", list(remote = "origin") ) mockery::stub( doctor_find_gh_url, "gert::git_remote_info", list(url = "https://github.com/blahblah") ) expect_equal(doctor_find_gh_url("."), "https://github.com/blahblah") }) test_that("doctor_find_pat", { withr::local_options(cli.ansi = FALSE) # no git mockery::stub( doctor_find_pat, "gitcreds::gitcreds_get", function(...) { throw(pkg_error("oops", .class = "gitcreds_nogit_error")) } ) expect_snapshot(error = TRUE, { doctor_find_pat("https://github.com") }) # no credentials mockery::stub( doctor_find_pat, "gitcreds::gitcreds_get", function(...) { throw(pkg_error("oops", .class = "gitcreds_no_credentials")) } ) expect_snapshot(error = TRUE, { doctor_find_pat("https://github.com") }) # other error mockery::stub( doctor_find_pat, "gitcreds::gitcreds_get", function(...) { throw(pkg_error("oops")) } ) expect_snapshot(error = TRUE, { doctor_find_pat("https://github.com") }) # ok mockery::stub( doctor_find_pat, "gitcreds::gitcreds_get", list(password = "secret") ) expect_snapshot({ doctor_find_pat("https://github.com") }) }) test_that("doctor_check_github", { withr::local_options(cli.ansi = FALSE) # ok resp <- list(headers = list("x-ratelimit-limit" = 10000)) expect_snapshot({ doctor_check_github("https://github.com/r-lib/ps", resp) }) # not ok resp2 <- list(headers = list()) expect_snapshot(error = TRUE, { doctor_check_github("https://github.com/r-lib/ps", resp2) }) }) test_that("doctor_check_pat_scopes", { withr::local_options(cli.ansi = FALSE) # no pat? resp <- list(headers = list()) expect_snapshot(error = TRUE, { doctor_check_pat_scopes(resp) }) # bad scopes resp2 <- list(headers = list("x-oauth-scopes" = "foo, bar")) expect_snapshot(error = TRUE, { doctor_check_pat_scopes(resp2) }) # ok resp3 <- list(headers = list("x-oauth-scopes" = "foo, repo, bar")) expect_snapshot({ doctor_check_pat_scopes(resp3) }) }) test_that("doctor_check_workflow", { withr::local_options(cli.ansi = FALSE) url <- "https://github.com/r-lib/ps" # no workflow expect_snapshot(error = TRUE, { doctor_check_workflow(url, list(), list()) }) # no workflow, forked expect_snapshot(error = TRUE, { doctor_check_workflow(url, list(is_forked = TRUE), list()) }) # not active expect_snapshot(error = TRUE, { doctor_check_workflow( url, list(workflow = "ok"), list(workflow = list(state = "bad")) ) }) # ok expect_snapshot({ doctor_check_workflow( url, list(workflow = "ok"), list(workflow = list(state = "active")) ) }) }) test_that("doctor_async_gql", { resp <- readRDS(test_path("fixtures/gh-response-doctor-gql.rds")) mockery::stub( doctor_async_gql, "async_gh_gql_get", function(...) async_constant(resp) ) expect_snapshot({ synchronise(doctor_async_gql("https://github.com/r-lib/ps", "secret")) }) }) test_that("doctor_async_rest", { resp <- readRDS(test_path("fixtures/gh-response-doctor-rest.rds")) mockery::stub( doctor_async_rest, "async_gh_rest_get", function(...) async_constant(resp) ) expect_snapshot({ synchronise(doctor_async_rest("https://github.com/r-lib/ps", "secret")) }) })rhub/tests/testthat.R0000644000176200001440000000060414604730177014350 0ustar liggesusers# This file is part of the standard setup for testthat. # It is recommended that you do not modify it. # # Where should you do additional test configuration? # Learn more about the roles of various files in: # * https://r-pkgs.org/testing-design.html#sec-tests-files-overview # * https://testthat.r-lib.org/articles/special-files.html library(testthat) library(rhub) test_check("rhub") rhub/MD50000644000176200001440000001331714762422102011527 0ustar liggesusers352de3f2e4fa99c535738aba28b56b51 *DESCRIPTION b28f100bf3316e47fe62778c59ba1306 *LICENSE 9c3a94c48dba131f6c64aa7d1136e08b *NAMESPACE 05be7889c99abce56b0665894963aaf5 *NEWS.md 6dd76b1e3d7ef84704a3196945e6b0bb *R/a-rstudio-detect.R d119cdcfc02a46af86b116ad1e0118b5 *R/aa-assertthat.R 4dfc64e41e97114c9d22c4a099b7925a *R/aaa-async.R 2d0c266d7aa23dd05790d828729d4a1b *R/api.R c7086873b9943dcb8cecfe485011297d *R/assertions.R 191097c0d492fc5579d7decca50b7e7b *R/check.R 72317b595a2350471dbfc2cc4d091643 *R/cli.R f356a614687076967f07bd1bcbd636e3 *R/compat-vctrs.R 272fef6c707bb16d5d9807deaba56e5d *R/doctor.R 7ee87bc71cf3f00ef4a20ad53f505771 *R/errors.R 3bdf7ad2604492768652185787c5f420 *R/gh.R 2f08b1f4f21ef81772d91b5c2c071ed6 *R/http-cache.R f4b03fe2947cb7a359be7fb3930c75b2 *R/onload.R 1ba6815bc3eced9ab03c7f2ff83bf838 *R/platforms.R 2abcb2239aa4b49216c66a82e50df952 *R/rc.R c4ab122c1305e63f77250d04d5664a73 *R/rematch.R 87f4a0d692b81da00a893ac74ebcea5c *R/rhub-app.R c7ccdcf352bef81924322f763e30899b *R/rhubv1.R dcb314511bd4d5f7b445c7482d4407e3 *R/rhubv2.R 2f622fd3b6d83a82fcf1ea77d00f7724 *R/setup.R 1fcecdfece7678649e2efe4902f50c02 *R/utils.R 674c4c343fb1b12dc925048c4af8fe8f *inst/header.md 236930da695cfc889e6048228b2f7f62 *man/check.Rd ef07180ec10f9b18e1dbb643db82476b *man/check_for_cran.Rd a249b995fb5baba3df2abbca3d241442 *man/check_shortcuts.Rd 841166b4294b0f56f8d0e39f8fc44361 *man/figures/logo.png 1857b2185307df0f29ce6e09d4e11061 *man/figures/rhub-check-dark.svg 404630c957086b5872ddf4a87883c234 *man/figures/rhub-check.svg c4cda033515183da83bfd11ea4b87864 *man/figures/rhub-doctor-dark.svg e5c65e56d63aebf0325a45bf50d38e11 *man/figures/rhub-doctor.svg dc2dda6a3e0528851c77fec81d21a173 *man/figures/rhub-platforms-dark.svg 57cf3e309dba6987fa8e494183078eb9 *man/figures/rhub-platforms.svg 4821160dacf85ecd444ed95cf7a929a4 *man/figures/rhub-setup-dark.svg bec9a4e9dd9f28d6e3abefc0262bd0f6 *man/figures/rhub-setup.svg fb04f2c5f19236b8844a1feddee04f19 *man/get_check.Rd 634e43fb9acdfc0e05c59d64183068ca *man/last_check.Rd 47c9430df2fe7d13252b9f0b9291abe0 *man/list_my_checks.Rd dd2f5b1a67e523f562d20cabc023527f *man/list_package_checks.Rd aaccd0081db841db32e36a9308d7150b *man/list_validated_emails.Rd fade433e4b204824670104ad695924c1 *man/local_check_linux.Rd e0bfc461025789e619250d53706694fd *man/local_check_linux_images.Rd 2732be71083611a42a644ae6e5509749 *man/platforms.Rd 2ab6c1b2509b60b700d309720f65f827 *man/rc_list_local_tokens.Rd d2127a46eabddb34288a0bd3b263fdcd *man/rc_list_repos.Rd 60ddbe463aa3eceae05764590512d22a *man/rc_new_token.Rd 09597174b08c5f66fcf0ace3c60a8d45 *man/rc_submit.Rd b4eeb02f0f17ef90d89de49891de515d *man/rhub-package.Rd ae77fe9e3645fa6b2120a5fb9a73a682 *man/rhub_check.Rd 77e2300aed993644619a769cad090f0a *man/rhub_doctor.Rd 776fb296a7f5a456824831cd9b4f62d7 *man/rhub_platforms.Rd 3b83f66786e2639648c7c6e2e031cb96 *man/rhub_setup.Rd 2d97c9ffeabadeecc8271b904f16b98b *man/rhubv2.Rd c3e9aabbd570a7825225dd29273f0dae *man/roxygen/meta.R 0c130939818b337755d4d5f13be7e428 *man/validate_email.Rd 8df98a8066203e627bfe45c1e3d55829 *tests/testthat.R 945a8cd93733dc290668d5d5d12bb75c *tests/testthat/_snaps/api.md cbb700f904899bc679c1f0b663697d1d *tests/testthat/_snaps/assertions.md 36e487f8cec7331f09b7685431f133bf *tests/testthat/_snaps/check.md e2fcdfb42369b260a80fc748c3c26ae2 *tests/testthat/_snaps/cli.md dc53408f00844ffe443a802c0f188588 *tests/testthat/_snaps/doctor.md 338fa3120f0b91b274169bf9f4f0ae18 *tests/testthat/_snaps/gh.md 739cfb9f7ef18cc4c0eeaaef5fadc3a6 *tests/testthat/_snaps/platforms.md 8c96c599d723c6c342b3af088343bce1 *tests/testthat/_snaps/rc.md 474b017eccd6c83f5e4f2553e20c263c *tests/testthat/_snaps/setup.md 31badbe78fc9bea3e896b96869ba174c *tests/testthat/_snaps/utils.md ab7ce86dd1310da3b1c4f4b3ef9b3727 *tests/testthat/fixtures/bad.tar.gz 96f882cdafd1e0af1ee421624dd445df *tests/testthat/fixtures/gh-response-doctor-gql.rds 0b97b120c5ee0ab037c2ad67727e08c7 *tests/testthat/fixtures/gh-response-doctor-rest.rds 8eedac4d54838281b7dfb6fef257ef31 *tests/testthat/fixtures/gh-response-gql.rds f375e1f1e06d6140355e7b27e87808d0 *tests/testthat/fixtures/gh-response-post.rds d002e8363566f64edf49cca547da0064 *tests/testthat/fixtures/gh-response.rds 8acb67ddd2064ebbfcde6b0161a9709d *tests/testthat/fixtures/manifest.json 0104f5201326a521470e00a3c4d7bb6f *tests/testthat/fixtures/pkg/DESCRIPTION dc21c19f0d6968ee25d441b2cf46017d *tests/testthat/fixtures/pkg/NAMESPACE d41d8cd98f00b204e9800998ecf8427e *tests/testthat/fixtures/pkg/R/pkg.R dbac35f974f1dcaa98cc5564a3089f37 *tests/testthat/fixtures/pkg_0.0.0.9000.tar.gz 5ba9594745a83fa79eaf3bb4c64ef9ec *tests/testthat/fixtures/platforms.json 944b8f9543f48a375587df1b84079ec2 *tests/testthat/fixtures/platforms2.json e3c96b55a57e8184896090a509b7ad0b *tests/testthat/fixtures/rc-response-list-repos.rds bd09934fa0b4a97ae03787e75028663c *tests/testthat/fixtures/rc-response-submit.rds c551fc6e34069e7b6747c58145e9cc70 *tests/testthat/fixtures/rhub.yaml 7e2c435f83a4e705c5743455a01ad21b *tests/testthat/fixtures/validated_emails.csv 6380686ca56d27d813a80ff46d01f4e6 *tests/testthat/helpers.R 5bbf852ca90159832c27f1f2ababe71d *tests/testthat/test-api.R 061bfbf005d79cccfee7679cdb1b142c *tests/testthat/test-assertions.R 3bb0cb80492453f0a72e6815c7c1e130 *tests/testthat/test-check.R da961ab1b9d0ac7e6f288e50432a95d0 *tests/testthat/test-cli.R c1f9f5101fc7255d8b4b08469e33518a *tests/testthat/test-doctor.R 15724bcb31fbbeb63d7c93b3d0fec331 *tests/testthat/test-gh.R 22016d8fe2e8e7536b691aff8f69b524 *tests/testthat/test-http-cache.R 312c6576982e699ed8b3244966f8fe62 *tests/testthat/test-platforms.R bbf99e4ca1037ee93daad4ada996a810 *tests/testthat/test-rc.R 4c4d4dd16d2e8cb03fa4c164855d53db *tests/testthat/test-rematch.R aad241e1482fb0764679d294da2998d6 *tests/testthat/test-setup.R 0bdfeed2e415e59b926508df8d490195 *tests/testthat/test-utils.R rhub/R/0000755000176200001440000000000014762413400011414 5ustar liggesusersrhub/R/doctor.R0000644000176200001440000002226414762410770013046 0ustar liggesusers #' Check if the current or the specified package is ready to use with R-hub #' #' Errors if the package or repository is not set up correctly, and #' advises on possible solutions. #' #' @param gh_url Use `NULL` for the package in the current working #' directory. Alternatively, use the URL of a GitHub repository that #' contains an R package that was set up to use with R-hub. #' #' @export rhub_doctor <- function(gh_url = NULL) { assert_that( is_optional_gh_url(gh_url) ) rpkg_root <- if (is.null(gh_url)) setup_find_r_package() git_root <- if (is.null(gh_url)) setup_find_git_root() if (is.null(gh_url)) check_rpkg_root(rpkg_root, git_root) gh_url <- gh_url %||% doctor_find_gh_url(repo = git_root) pat <- doctor_find_pat(gh_url) # ----------------------------------------------------------------------- # Do these up front, concurrently # We need the following pieces: # 1 check if we are indeed talking to GitHub # 2 check that the token is valid, and we have access to the repo # 3 check that the token has the right scopes # 4 check that the workflow file exists on the default branch # 5 check that the workflow exists (e.g. not a fork with disabled actions) # 6 check that the workflow is enabled # 7 check that the workflow file is the latest version # # Unfortunately we cannot do all this with a single graphql query, because # (AFAICT) you cannot currently query the workflows of a repository with # GraphQL. # # So we'll have # - a graphql query for (1), (2), (3), (4), (7) # - a REST query for (5) and (6) resp <- synchronise(when_all( gql = doctor_async_gql(gh_url, token = pat), wfl = doctor_async_rest(gh_url, token = pat) )) doctor_check_github(gh_url, resp$gql) doctor_check_pat_scopes(resp$gql) doctor_check_workflow(gh_url, resp$gql, resp$wfl) cli::cli_alert( "WOOT! You are ready to run {.run rhub::rhub_check()} on this package.", wrap = TRUE ) invisible(NULL) } # TODO: multiple remotes, what if it is not origin? # TODO: what if there is a remote, but it does not have a URL? doctor_find_gh_url <- function(repo) { remote <- gert::git_info(repo)$remote if (is.na(remote)) { throw(pkg_error( call. = FALSE, "Cannot determine GitHub URL from git remote in repository at {.file {repo}}. Is your repository on GitHub?", i = "If this repository is on GitHub, call {.code git remote add origin } to add GitHub as a remote.", i = "Alternatively, specify the GitHub URL of the repository in the {.arg gh_url} argument.", i = "If it is not on GitHub, then you'll need to put it there. Create a new repository at {.url https://github.com/new}." )) } gert::git_remote_info(repo = repo)$url } doctor_find_pat <- function(pat_url) { pid <- cli_status("Do you have a GitHub personal access token (PAT)?") # TODO: get GH URL from git remote, if any tryCatch({ url <- parse_gh_url(pat_url)$pat_url pat <- gitcreds::gitcreds_get(url = url)$password }, gitcreds_nogit_error = function(e) { cli::cli_status_clear(pid, result = "failed") env <- gitcreds::gitcreds_cache_envvar(url) throw(pkg_error( call. = FALSE, "Could not find a GitHub personal access token (PAT) for {.url {pat_url}}.", i = "I also could not find a working git installation. If you don't want to install git, but you have a PAT, you can set the {.env {env}} environment variable to the PAT.", i = "You can read more about PATs at {.url https://usethis.r-lib.org/articles/git-credentials.html}." )) }, gitcreds_no_credentials = function(e) { cli::cli_status_clear(pid, result = "failed") env <- gitcreds::gitcreds_cache_envvar(url) throw(pkg_error( call. = FALSE, "Could not find a GitHub personal access token (PAT) for {.url {pat_url}}.", i = "If you have a GitHub PAT, you can use {.run gitcreds::gitcreds_set()} to add it to the git credential store, so R-hub can use it.", i = "If you don't have a PAT, you can create one by running {.run usethis::create_github_token()}.", i = "You can read more about PATs at {.url https://usethis.r-lib.org/articles/git-credentials.html}." )) }, error = function(e) { cli::cli_status_clear(pid, result = "failed") throw(e) } ) cli::cli_status_clear(pid, result = "clear") cli::cli_alert_success("Found GitHub PAT.") pat } doctor_check_github <- function(gh_url, resp) { pid <- cli_status(cli::format_inline("Is the package on GitHub at {.url {gh_url}}?")) if (!"x-ratelimit-limit" %in% names(resp$headers)) { cli::cli_status_clear(pid, result = "failed") throw(pkg_error( call. = FALSE, "Remote repository at {.url {gh_url}} does not seem like a GitHub repository.", i = "R-hub only supports GitHub packages in GitHub repositories currently.", i = "If you think that this is a bug in the {.pkg rhub} package, please let us know!" )) } cli::cli_status_clear(pid, result = "clear") cli::cli_alert_success( "Found repository on GitHub at {.url {gh_url}}.", wrap = TRUE ) } # we can assume a GH response at this point doctor_check_pat_scopes <- function(resp) { pid <- cli_status("Does your GitHub PAT have the right scopes?") scopes <- trimws(strsplit( resp[["headers"]][["x-oauth-scopes"]] %||% "NOPE", ",", fixed = TRUE )[[1]]) if (identical(scopes, "NOPE")) { cli::cli_status_clear(pid, result = "failed") throw(pkg_error( call. = FALSE, "Could not use the PAT to authenticate to GitHub", i = "Make sure that the URL and your PAT are correct." )) } if (!"repo" %in% scopes) { cli::cli_status_clear(pid, result = "failed") throw(pkg_error( call. = FALSE, "Your PAT does not have a {.code repo} scope.", i = "Without a {.code repo} scope R-hub cannot start jobs on GitHub.", i = "Change the scopes of the PAT on the GitHub web page, or create a new PAT." )) } cli::cli_status_clear(pid, result = "clear") cli::cli_alert_success("GitHub PAT has the right scopes.") } doctor_check_workflow <- function(gh_url, gql, rest) { pid <- cli_status( "Does the default branch of your git repo have the R-hub workflow file?" ) if (is.null(gql$workflow)) { cli::cli_status_clear(pid, result = "failed") throw(pkg_error( call. = FALSE, "Could not find R-hub's workflow file in the repository at {.url {gh_url}}.", i = "The workflow file must be at {.path .github/workflows/rhub.yaml}.", i = "If you have added and committed the workflow file, you need to push the commit to GitHub with {.code git push}.", i = if (isTRUE(gql$is_forked)) "This repository is a fork. Make sure you enabled GitHub Actions on it, in the {.emph Actions} tab of the repository web page." )) } if (rest$workflow$state != "active") { cli::cli_status_clear(pid, result = "failed") throw(pkg_error( call. = FALSE, "The workflow is disabled.", i = "You need to enable it, click on the {.code ...} button at the top right corner of the web page of the workflow." )) } cli::cli_status_clear(pid, result = "clear") cli::cli_alert_success( "Found R-hub workflow in default branch, and it is active." ) } # We need the following pieces: # - check if we are indeed talking to GitHub # - check that the token is valid, and we have access to the repo # - check that the token has the right scopes # - check that the workflow file exists on the default branch # - check that the workflow file is the latest version doctor_async_gql <- function(gh_url, token) { url <- parse_gh_url(gh_url) query <- glue::glue("{ repository(owner: \"\", name: \"\") { workflow_file: object(expression: \"HEAD:.github/workflows/rhub.yaml\") { ... on Blob { isBinary text } } sha: object(expression: \"HEAD\") { oid } branch: defaultBranchRef { name } isFork } }", .open = "<", .close = ">") async_gh_gql_get(url$graphql, query, token)$ then(function(resp) { data <- resp$content$data list( status_code = resp$status_code, headers = resp$headers, is_repo = !is.null(data$repository), workflow_binary = data$repository$workflow_file$isBinary, workflow = data$repository$workflow_file$text, sha = data$repository$sha$oid, branch = data$repository$branch$name, is_fork = data$repository$isFork, errors = resp$content$errors ) }) } # Goal is to # - check if workflow exist (e.g. not a form with disabled actions) # - check that workflow is enabled doctor_async_rest <- function(gh_url, token) { url <- parse_gh_url(gh_url) ep <- glue::glue("/repos/{url$user}/{url$repo}/actions/workflows/rhub.yaml") async_gh_rest_get(url$api, ep, token)$ then(function(resp) { list( status_code = resp$status_code, headers = resp$headers, workflow = resp$content, errors = resp$content$errors ) }) } rhub/R/rematch.R0000644000176200001440000000165214603437121013166 0ustar liggesusersre_match <- function(text, pattern, perl = TRUE, ...) { stopifnot(is.character(pattern), length(pattern) == 1, !is.na(pattern)) text <- as.character(text) match <- regexpr(pattern, text, perl = perl, ...) start <- as.vector(match) length <- attr(match, "match.length") end <- start + length - 1L matchstr <- substring(text, start, end) matchstr[ start == -1 ] <- NA_character_ res <- data.frame( stringsAsFactors = FALSE, .text = text, .match = matchstr ) if (!is.null(attr(match, "capture.start"))) { gstart <- attr(match, "capture.start") glength <- attr(match, "capture.length") gend <- gstart + glength - 1L groupstr <- substring(text, gstart, gend) groupstr[ gstart == -1 ] <- NA_character_ dim(groupstr) <- dim(gstart) res <- cbind(groupstr, res, stringsAsFactors = FALSE) } names(res) <- c(attr(match, "capture.names"), ".text", ".match") res } rhub/R/http-cache.R0000644000176200001440000000101414603437121013553 0ustar liggesusers the_cache <- new.env(parent = emptyenv()) async_cached_http_get <- function(url, headers = character(), options = list()) { hash <- cli::hash_md5(paste0("http-get-", url)) if (hash %in% names(the_cache)) { async_constant(the_cache[[hash]]) } else { http_get(url, headers = headers, options = options)$ then(http_stop_for_status)$ then(function(response) { json <- rawToChar(response$content) the_cache[[hash]] <- json json }) } } rhub/R/gh.R0000644000176200001440000000422214605501143012133 0ustar liggesusersparse_gh_url <- function(url) { pcs <- parse_url(url) host <- pcs$host if (pcs$host == "github.com") { api <- paste0(pcs$protocol, "://api.github.com") graphql <- paste0(pcs$protocol, "://api.github.com/graphql") } else { api <- paste0(pcs$protocol, "://", pcs$host, "/api/v3") graphql <- paste0(pcs$protocol, "://", pcs$host, "/api/graphql") } cmps <- strsplit(pcs$path, "/", fixed = TRUE)[[1]] if (length(cmps) >= 1 && cmps[1] == "") cmps <- cmps[-1] if (length(cmps) < 2) cmps <- c(cmps, "", "")[1:2] cmps[2] <- sub("[.]git$", "", cmps[2]) list( host = host, api = api, graphql = graphql, user = cmps[1], repo = cmps[2], slug = paste0(cmps[1], "/", cmps[2]), pat_url = paste0(pcs$protocol, "://", host, "/", cmps[1], "/", cmps[2]) ) } gh_headers <- function(token) { c( Accept = "application/vnd.github+json", Authorization = paste0("Bearer ", token) ) } gh_query_process_response <- function(resp) { if (grepl("^application/json\\b", resp$type)) { resp$content <- jsonlite::fromJSON( rawToChar(resp$content), simplifyVector = FALSE ) } resp$headers <- curl::parse_headers_list(resp$headers) resp } gh_rest_get <- function(host, endpoint, token) { synchronise(async_gh_rest_get(host, endpoint, token = token)) } async_gh_rest_get <- function(host, endpoint, token) { url <- paste0(host, endpoint) headers <- gh_headers(token) http_get(url, headers = headers)$ then(gh_query_process_response) } gh_rest_post <- function(host, endpoint, token, data) { synchronise(async_gh_rest_post(host, endpoint, token, data)) } async_gh_rest_post <- function(host, endpoint, token, data) { url <- paste0(host, endpoint) headers <- gh_headers(token) http_post(url, data = data, headers = headers)$ then(gh_query_process_response) } gh_gql_get <- function(host, query, token) { synchronise(async_gh_gql_get(host, query, token)) } async_gh_gql_get <- function(host, query, token) { headers <- gh_headers(token) data <- jsonlite::toJSON(list(query = query), auto_unbox = TRUE) http_post(host, headers = headers, data = data)$ then(gh_query_process_response) } rhub/R/aaa-async.R0000644000176200001440000043347014762412721013414 0ustar liggesusers# nocov start #' Create an async function #' #' Create an async function, that returns a deferred value, from a #' regular function. If `fun` is already an async function, then it does #' nothing, just returns it. #' #' The result function will have the same arguments, with the same default #' values, and the same environment as the original input function. #' #' @param fun Original function. #' @return Async version of the original function. #' #' @noRd #' @examples #' f <- function(x) 42 #' af <- async(f) #' is_async(f) #' is_async(af) #' f() #' synchronise(dx <- af()) #' dx async <- function(fun) { fun <- as.function(fun) if (is_async(fun)) return(fun) async_fun <- fun body(async_fun) <- bquote({ mget(ls(environment(), all.names = TRUE), environment()) fun2 <- function() { evalq( .(body(fun)), envir = parent.env(environment()) ) } deferred$new( type = "async", action = function(resolve) resolve(fun2()) ) }) # This is needed, otherwise async_fun might not find 'deferred' async_env <- new.env(parent = environment(async_fun)) async_env$deferred <- deferred environment(async_fun) <- async_env mark_as_async(async_fun) } mark_as_async <- function(fun) { attr(body(fun), "async")$async <- TRUE ## These are not valid any more, anyway attr(fun, "srcref") <- NULL attr(body(fun), "srcref") <- NULL fun } #' Checks if a function is async #' #' If `fun` is not a function, an error is thrown. #' #' Currently, it checks for the `async` attribute, which is set by #' [async()]. #' #' @param fun Function. #' @return Logical scalar, whether `fun` is async. #' #' @noRd #' @examples #' f <- function(x) 42 #' af <- async(f) #' is_async(f) #' is_async(af) #' f() #' synchronise(dx <- af()) #' dx is_async <- function(fun) { assert_that(is.function(fun)) is.list(a <- attr(body(fun), "async")) && identical(a$async, TRUE) } is_string <- function(x) { is.character(x) && length(x) == 1 && !is.na(x) } on_failure(is_string) <- function(call, env) { paste0(deparse(call$x), " is not a string (length 1 character)") } is_flag <- function(x) { is.logical(x) && length(x) == 1 && !is.na(x) } on_failure(is_flag) <- function(call, env) { paste0(deparse(call$x), " is not a flag (length 1 logical)") } is_action_function <- function(x) { is.function(x) && length(formals(x)) %in% 1:2 } on_failure(is_action_function) <- function(call, env) { paste0(deparse(call$x), " is not a function with two arguments") } is_time_interval <- function(x) { inherits(x, "difftime") || (is.numeric(x) && length(x) == 1 && !is.na(x) && x >= 0) } on_failure(is_time_interval) <- function(call, env) { paste0(deparse(call$x), " is not a valid time interval") } is_count <- function(x) { is.numeric(x) && length(x) == 1 && !is.na(x) && as.integer(x) == x } on_failure(is_count) <- function(call, env) { paste0(deparse(call$x), " is not a count (non-negative integer)") } is_flag <- function(x) { is.logical(x) && length(x) == 1 && !is.na(x) } on_failure(is_flag) <- function(call, env) { paste0(deparse(call$x), " must be a flag (length 1 logical)") } #' Retry an asynchronous function with exponential backoff #' #' Keeps trying until the function's deferred value resolves without #' error, or `times` tries have been performed, or `time_limit` seconds #' have passed since the start of the first try. #' #' Note that all unnamed arguments are passed to `task`. #' #' @param task An asynchronous function. #' @param ... Arguments to pass to `task`. #' @param .args More arguments to pass to `task`. #' @param times Maximum number of tries. #' @param time_limit Maximum number of seconds to try. #' @param custom_backoff If not `NULL` then a callback function to #' calculate waiting time, after the `i`the try. `i` is passed as an #' argument. If `NULL`, then the default is used, which is a uniform #' random number of seconds between 1 and 2^i. #' @param on_progress Callback function for a progress bar. Retries are #' announced here, if not `NULL`. `on_progress` is called with two #' arguments. The first is a named list with entries: #' * `event`: string that is either `"retry"` or `"givenup"`, #' * `tries`: number of tried so far, #' * `spent`: number of seconds spent trying so far, #' * `error`: the error object for the last failure, #' * `retry_in`: number of seconds before the next try. #' The second argument is `progress_data`. #' @param progress_data `async_backoff()` will pass this object to #' `on_progress` as the second argument. #' @return Deferred value for the operation with retries. #' #' @family async control flow #' @noRd #' @examples #' \donttest{ #' afun <- function() { #' wait_100_ms <- function(i) 0.1 #' async_backoff( #' function() if (runif(1) < 0.8) stop("nope") else "yes!", #' times = 5, #' custom_backoff = wait_100_ms #' ) #' } #' #' # There is a slight chance that it fails #' tryCatch(synchronise(afun()), error = function(e) e) #' } async_backoff <- function(task, ..., .args = list(), times = Inf, time_limit = Inf, custom_backoff = NULL, on_progress = NULL, progress_data = NULL) { task <- async(task) args <- c(list(...), .args) times <- times time_limit <- time_limit custom_backoff <- custom_backoff %||% default_backoff on_progress <- on_progress progress_data <- progress_data did <- 0L started <- NULL limit <- NULL self <- deferred$new( type = "backoff", call = sys.call(), action = function(resolve) { started <<- Sys.time() limit <<- started + time_limit do.call(task, args)$then(self) }, parent_reject = function(value, resolve) { did <<- did + 1L now <- Sys.time() if (did < times && now < limit) { wait <- custom_backoff(did) if (!is.null(on_progress)) { on_progress(list( event = "retry", tries = did, spent = now - started, error = value, retry_in = wait ), progress_data) } delay(wait)$ then(function() do.call(task, args))$ then(self) } else { if (!is.null(on_progress)) { on_progress(list( event = "givenup", tries = did, spent = now - started, error = value, retry_in = NA_real_ ), progress_data) } stop(value) } } ) } async_backoff <- mark_as_async(async_backoff) default_backoff <- function(i) { as.integer(stats::runif(1, min = 1, max = 2^i) * 1000) / 1000 } #' Asynchronous function call, in a worker pool #' #' The function will be called on another process, very much like #' [callr::r()]. #' #' @param func Function to call. See also the notes at [callr::r()]. #' @param args Arguments to pass to the function. They will be copied #' to the worker process. #' @return Deferred object. #' #' @noRd call_function <- function(func, args = list()) { func; args id <- NULL deferred$new( type = "pool-task", call = sys.call(), action = function(resolve) { resolve reject <- environment(resolve)$private$reject id <<- get_default_event_loop()$add_pool_task( function(err, res) if (is.null(err)) resolve(res) else reject(err), list(func = func, args = args)) }, on_cancel = function(reason) { if (!is.null(id)) { get_default_event_loop()$cancel(id) } } ) } call_function <- mark_as_async(call_function) #' Make a minimal deferred that resolves to the specified value #' #' This is sometimes useful to start a deferred chain. #' #' Note that the evaluation of `value` is forced when the deferred value #' is created. #' #' @param value The value to resolve to. #' @return A deferred value. #' #' @noRd #' @examples #' afun <- async(function() { #' async_constant(1/100)$ #' then(function(x) delay(x))$ #' then(function(x) print(x)) #' }) #' synchronise(afun()) async_constant <- function(value = NULL) { force(value) deferred$new( type = "constant", call = sys.call(), function(resolve) resolve(value)) } async_constant <- mark_as_async(async_constant) async_env <- new.env(parent = emptyenv()) async_env$loops <- list() get_default_event_loop <- function() { num_loops <- length(async_env$loops) if (num_loops == 0) { err <- make_error( "You can only call async functions from an async context", class = "async_synchronization_barrier_error" ) stop(err) } async_env$loops[[num_loops]] } push_event_loop <- function() { num_loops <- length(async_env$loops) if (num_loops > 0) async_env$loops[[num_loops]]$suspend() new_el <- event_loop$new() async_env$loops <- c(async_env$loops, list(new_el)) new_el } pop_event_loop <- function() { num_loops <- length(async_env$loops) async_env$loops[[num_loops]] <- NULL if (num_loops > 1) async_env$loops[[num_loops - 1]]$wakeup() } #' Async debugging utilities #' #' Helper function to help with the non-trivial debugging of async code. #' #' Async debugging can be turned on by setting the `async_debug` global #' option to `TRUE`: #' ``` #' options(async_debug = TRUE) #' ``` #' Setting this value to `FALSE` will turn off debugging. #' #' If debugging is on, a [synchronise()] call will stop at the beginning #' of the event loop. No deferred actions of other callbacks have run at #' this point. [synchronise()] stops by calling [base::browser()]. All the #' usual [browser()] commands (see its manual) can be used here, plus some #' extra commands to help async debugging. The extra commands: #' #' `async_debug_shortcuts()` adds handy shortcuts to most of the helper #' functions. E.g. `async_next()` can be invoked as `.an` (without the #' parens). You only need to run it once per R session. Note that it adds #' the shortcuts to the global environment. #' #' `async_debug_remove_shortcuts()` removes the shortcuts from the global #' environment. #' #' `.an` (or `async_next()`) runs the next iteration of the event loop. #' Note that it does not return until _something_ happens in the event loop: #' an action or a parent callback is executed, or HTTP or other I/O is #' performed. Also note, that a single iteration of the event loop typically #' runs multiple action, parent or other callbacks. Once the iteration is #' done, the control is returned to the browser. #' #' `.as` (or `async_step()`) is similar to `.an`, but it also starts the #' debugging of the action or parent callbacks. I.e. another [browser()] is #' called at the beginning of _all_ callbacks in the next iteration of the #' event loop. #' #' `.asb` (or `async_step_back()`) stops the debugging of the callbacks. #' It does not actually exdecutes anything from the event loop, so to go #' back to the main async browser, you also need to execute `c` (continue). #' #' `.al` (or `async_list()`) lists all deferred values in the current async #' phase. (Only the ones that already exist, some may be created in the #' future.) It returns a data frame with columns: #' #' * `id`: The integer id of the deferred value. #' * `parents`: Integer vector, the parents of the deferred value. #' * `label`: A character label, that is used by `async_tree()` to nicely #' format information about a deferred value. #' * `call`: The call (language object) that created the deferred value. #' * `children`: The list of children, an integer vector. A deferred value #' can only have one child, unless it is shared. #' * `type`: The type of the deferred value. This is an arbitrary label, #' specified when the deferred value was created. #' * `running`: Whether the deferred value is already running. #' * `state`: The state of the deferred value, `"pending"`, `"fulfilled"` or #' `"rejected"`. This is typically pending, since resolved deferred #' values are removed from the async DAG (in the next event loop #' iteration.) #' * `cancelled`: Whether the deferred value was cancelled. #' * `shared`: Whether the deferred value is shared. #' * `filename`: The file name for the source code that created the #' deferred value. Only present if this code was parsed with source #' references enabled. #' * `position`: The start file position, in line:column format, as a #' string. Only present if this code was parsed with source references #' enabled. #' #' `.at` (or `async_tree()`) prints the DAG of the deferred values. #' #' `async_debug()` can be used to debug the action and/or parent callbacks #' of the specified deferred value. #' #' `async_wait_for()` runs the event loop until the specified deferred #' value is resolved (i.e. fulfilled or rejected). #' #' `.aw` (or `async_where()`) prints a call stack and marks the frame the #' corresponds to an action or parent callback. #' #' @param el Event loop, defaults to the current event loop. #' @param def Deferred value that is used at the root of the DAG. Defaults #' to the deferred value corresponding to the result of the async phase. #' @param id Integer scalar, the if of the deferred to debug or to wait for. #' @param action Whether to debug the action callback. #' @param parent Whether to debug the parent callbacks. #' @param calls The calls to print, result of `sys.calls()`. Defaults to #' the current call stack. #' @param parents The parent frames in the call stack, result of #' `sys.parents()`. Defaults to the current parents. #' @param frm The async frame to mark. Defaults to the most recent async #' frame in the stack. #' #' @name async_debug #' @noRd NULL #' @noRd #' @aliases .an #' @rdname async_debug async_next <- function(el = NULL) { el <- el %||% find_sync_frame()$new_el if (is.null(el)) stop("No async context") ## TODO: some visual indication that something has happened? if (! el$run("once")) message("[ASYNC] async phase complete") } # nocov start #' @noRd #' @aliases .as #' @rdname async_debug async_step <- function() { el <- find_sync_frame()$new_el if (is.null(el)) stop("No async context") ## TODO: some visual indication that something has happened? old <- options(async_debug_steps = TRUE) on.exit(options(old)) if (! el$run("once")) { message("[ASYNC] async phase complete") } } #' @noRd #' @aliases .asb #' @rdname async_debug async_step_back <- function() { options(async_debug_steps = FALSE) message("[ASYNC] step back, you still need to 'c'ontinue") } # nocov end #' @noRd #' @aliases .al #' @rdname async_debug async_list <- function(def = NULL) { def <- def %||% find_sync_frame()$res if (is.null(def)) stop("No async context") info <- list() find_parents <- function(def) { info <<- c(info, list(get_private(def)$get_info())) prn <- get_private(def)$parents lapply(prn, find_parents) } find_parents(def) do.call(rbind, info) } #' @noRd #' @aliases .at #' @rdname async_debug async_tree <- function(def = NULL) { def <- def %||% find_sync_frame()$res data <- async_list(def) root <- as.character(get_private(def)$id) cli::tree(data, root = root) } #' @noRd #' @rdname async_debug async_debug <- function(id, action = TRUE, parent = TRUE) { def <- find_deferred(id) if (is.null(def)) stop("Cannot find deferred `", id, "`") prv <- get_private(def) if (prv$state != "pending") { message("[ASYNC] ", id, " already resolved") return(invisible()) } what <- character() if (action) { if (prv$running) { message("[ASYNC] ", id, " action already running") } else if (is.null(prv$action)) { message("[ASYNC] ", id, " has no action") } else { ## TODO: make a copy? Or should the deferred make a copy? debug1(prv$action) what <- "action" } } if (parent) { ## TODO: make copies? debug_all(prv$parent_resolve) debug_all(prv$parent_reject) what <- c(what, "parent callbacks") } if (length(what) == 1) { message("[ASYNC] ", id, " debugging ", what) } if (length(what) == 2) { message("[ASYNC] ", id, " debugging ", what[1], " and ", what[2]) } invisible(def) } #' @noRd #' @rdname async_debug async_wait_for <- function(id) { el <- find_sync_frame()$new_el if (is.null(el)) stop("No async context") def <- find_deferred(id) if (is.null(def)) stop("Cannot find deferred `", id, "`") priv <- get_private(def) while (priv$state == "pending") el$run("once") message("[ASYNC] ", id, " resolved") } #' @noRd #' @aliases .aw #' @rdname async_debug async_where <- function(calls = sys.calls(), parents = sys.parents(), frm = get_async_frames()) { afrm <- viapply(frm, "[[", "frame") num <- seq_along(calls) src <- lapply(calls, get_source_position) res <- data.frame( stringsAsFactors = FALSE, call = I(calls), parent = parents, filename = vcapply(src, "[[", "filename"), position = vcapply(src, "[[", "position"), async = num %in% afrm ) res$def_id <- NA_integer_ res$def_id[afrm] <- viapply(frm, function(x) x$deferred) res$def_cb_type <- NA_character_ res$def_cb_type[afrm] <- vcapply(frm, function(x) x$type) res$def_call <- I(list(NULL)) res$def_call[afrm] <- lapply(frm, "[[", "call") def_src <- lapply(res$def_call[afrm], get_source_position) res$def_filename <- NA_character_ res$def_filename[afrm] <- vcapply(def_src, "[[", "filename") res$def_position <- NA_character_ res$def_position[afrm] <- vcapply(def_src, "[[", "position") class(res) <- c("async_where", class(res)) res } # nocov start #' @noRd print.async_where <- function(x, ...) { cat(format(x, ...)) invisible(x) } # nocov end #' @noRd format.async_where <- function(x, ...) { paste0(paste( formatC(seq_len(nrow(x)), width = 3), vcapply(x$call, expr_name), paste0(" ", x$filename, ":", x$position), ifelse (! x$async, "", paste0("\n ", x$def_id, " ", x$def_cb_type, " ", x$def_call, " ", x$def_filename, ":", x$def_position)), collapse = "\n" ), "\n") } get_async_frames <- function() { drop_nulls(lapply(seq_along(sys.frames()), function(i) { if (! is.null(data <- sys.frame(i)$`__async_data__`)) { list(frame = i + data$skip %||% 1L, deferred = data[[1]], type = data[[2]], call = get_private(data[[3]])$mycall) } })) } find_sync_frame <- function() { for (i in seq_along(sys.frames())) { cand <- sys.frame(-i) if (isTRUE(cand$`__async_synchronise_frame__`)) return(cand) } } find_async_data_frame <- function() { frames <- sys.frames() for (i in seq_along(frames)) { cand <- sys.frame(-i) if (!is.null(data <- cand$`__async_data__`)) { return(list(frame = length(frames) - i + 1L, data = data)) } } } find_deferred <- function(id, def = NULL) { def <- def %||% find_sync_frame()$res if (is.null(def)) stop("No async context") search_parents <- function(def) { if (get_private(def)$id == id) return(def) prn <- get_private(def)$parents for (p in lapply(prn, search_parents)) { if (!is.null(p)) return(p) } } search_parents(def) } # nocov start debug1 <- function(fun) { debugonce(fun) } #' @noRd #' @rdname async_debug async_debug_shortcuts <- function() { as <- function(name, fun) { makeActiveBinding(name, fun, .GlobalEnv) } as(".an", async_next) as(".as", async_step) as(".asb", async_step_back) as(".al", async_list) as(".at", async_tree) as(".aw", async_where) } #' @noRd #' @rdname async_debug async_debug_remove_shortcuts <- function() { tryCatch( rm(list = c(".an", ".as", ".asb", ".al", ".at", ".aw"), envir = .GlobalEnv), error = function(x) x) } # nocov end debug_all <- function(fun) { debug(fun) } #' Deferred value #' #' @section Usage: #' ``` #' dx <- deferred$new(action = NULL, on_progress = NULL, on_cancel = NULL, #' parents = NULL, parent_resolve = NULL, parent_reject = NULL, #' type = NULL) #' dx$then(on_fulfilled) #' dx$catch(...) #' dx$finally(on_finally) #' dx$cancel(reason = "Cancelled") #' dx$share() #' ``` #' #' @section Arguments: #' * `action`: Function to call when the deferred value starts running. #' it needs to have at least two arguments: `resolve` and `reject`, #' and the third `progress` argument is optional. See details below. #' * `on_progress`: A function to call to report progress. See details #' below. #' * `on_cancel`: A function to call when the deferred is cancelled. See #' details below. #' * `parents`: A list of deferred values that will be the parents of the #' deferred value being created. If some of them are already owned, #' an error is thrown. #' * `parent_resolve`: A function to call when a parent is resolved. #' See details below. #' * `parent_reject`: A function to call when a parent throws an error. #' See details below. #' * `type`: A label that can be used to indicate the type of the deferred #' value to create. This might be useful for debugging, but otherwise #' it is not used. #' * `on_fulfilled`: Function to call when the parent deferred is resolved. #' Essentially this is the `parent_resolve` function of the `then()` #' deferred. #' * `...` Error handlers, as in `tryCatch()`, see details below. #' * `on_finally`: Function to call, after the deferred value is resolved #' or after it has thrown an error. It will be called without arguments. #' * `reason` Error message or error object that will be used to cancel the #' deferred. #' #' @section Deferred values: #' #' Asynchronous computation is represented by deferred values. #' A deferred value is an [R6](https://github.com/wch/R6) object. #' #' ``` #' deferred$new(action = NULL, on_progress = NULL, on_cancel = NULL, #' parents = NULL, parent_resolve = NULL, parent_reject = NULL, #' type = NULL) #' ``` #' #' Creates a new deferred value. `action` is a function that is called #' once the deferred value is _started_ (i.e. _not_ when `dx` is created). #' It must have one or two arguments: `resolve`, or `resolve` and `progress` #' It should call `resolve` when it is done, with the final value of the #' deferred as the argument. (See examples below.) If it has two arguments, #' then the second one is a callback function for creating progress bars. #' The deferred value may report its progress through this function. #' See details in the _Progress bars_ section below. #' #' `action` is called when the evaluation of the deferred value is started. #' Only deferred values that are needed to calculate the value of the #' async phase, are evaluated. (See also _Lazy Evaluation_ below.) #' #' Note that `action` is optional, for some deferred values, no action is #' takes when they are started. (These typically depend on their parent #' nodes.) #' #' `on_cancel` is a function that is called without arguments when a #' deferred value is cancelled. This includes explicit cancellation by #' calling its `$cancel()` method, or auto-cancellation (see below). #' #' `parents` is a list of deferred values that need to be computed before #' the current deferred value. When a parent deferred is resolved, the #' `parent_resolve` function is called. When a parent referred throws an #' error, the parent_reject` function is called. #' #' `parent_resolve` is a function with (up to) two arguments: #' `value` and `resolve`. It will be called with the value of the #' parent, the `resolve` callback of the deferred. #' `parent_resolve` can resolve the deferred by calling the supplied `resolve` #' callback, or it can keep waiting on other parents and/or external #' computation. It may throw an error to fail the deferred. #' #' `parent_resolve` allows some shorthands as well: #' * `NULL`: the deferred is resolved with the value of the parent. #' * A function with no arguments: this function is called, and the deferred #' resolves to its return value. #' * A function with one argument: this function is called with the value #' of the parent as the argument, and the deferred is resolved to its #' return value. #' * A function with arguments `value` and `resolve`. This function is #' called with the value of the parent, and the resolve callback of the #' deferred. #' #' `parent_reject` is a function with (up to) two arguments: #' `value`, `resolve`. It will be called with the error object #' thrown by the parent. #' #' `parent_resolve` can resolve the deferred by calling the supplied #' `resolve` callback, or it can keep waiting on other parents and/or #' external computation. It may throw an error to fail the deferred. It may #' also re-throw the error received from the parent, if it does not wish #' to handle it. #' #' `parent_reject` also accepts some shorthands as well: #' * `NULL`: the deferred throws the same error as the parent. #' * A function with no arguments: this function is called, and the deferred #' resolves to its return value. #' * A function with one argument: this function is called with the value #' of the parent as the argument, and the deferred is resolved to its #' return value. #' * A function with arguments `value` and `resolve`. This function is #' called with the value of the parent, and the resolve callback of the #' deferred. #' * A list of named error handlers, corresponding to the error handlers #' of `$catch()` (and `tryCatch()`). If these error handlers handle the #' parent's error, the deferred is resolved with the result of the #' handlers. Otherwise the deferred will be failed with the parent's #' error. The error handlers may also throw a new error. #' #' @section Error handling: #' #' The action function of the deferred, and also the `parent_resolve` and #' `parent_reject` handlers may throw errors if the deferred cannot be #' computed. Errors can be handled wit the `$catch()` member function: #' #' ``` #' dx$catch(...) #' ``` #' #' It takes the same named error handler arguments as `tryCatch()`. #' #' Technically, `$catch()` creates a new deferred value, and this new #' deferred value is resolved to the result of the error handlers. Of the #' handlers do not handle the error, then the new deferred will fail #' with the same error. #' #' The `$finally()` method can be used to run create finalizer code that #' runs when a deferred is resolved or when it fails. It can be used to #' close database connections or other resources: #' #' ``` #' dx$finally(on_finally) #' ``` #' #' Technically, `$finally()` creates a new deferred, which will resolve #' or fail the same way as the original one, but before doing that it will #' call the `on_finally` function with no arguments. #' #' @section Builtin async functions: #' #' The async package comes with some basic async functions: #' * [delay()] sets a timer and then resolves to `TRUE`. #' * [async_constant()] resolves successfully to its argument. #' * [http_get()] and [http_head()] make HTTP GET and HEAD requests. #' #' @section Combining async values: #' #' Async computation (just like ordinary sync computation) usually #' consists of several steps that needs to be performed in the specified #' order. The `$then()` method specifies that a step of computation needs #' to be performed after the deferred value is known: #' #' ``` #' dx$then(on_fulfilled) #' ``` #' #' `on_fulfilled` is a function with zero or one formal arguments. #' It will be called once the result of the deferred is known, with its #' result. (The result is omitted if it has no arguments). #' #' `$then()` creates another deferred value, that will resolve to the #' result of the `on_fulfilled` callback. Should this callback return #' with a deferred value, then `$then()` the deferred value will be a #' child of this newly creted deferred, and only resolve after that. #' #' See also [when_all()], [when_some()] and [when_any()], which can combine #' multiple deferred values into one. #' #' You cannot call `$then()` (or [when_any()], [when_all()], etc. on the #' same deferred value multiple times, unless it is a shared deferred #' value. See _Ownership_ below. #' #' The [async_reflect()], [async_retry()], [async_sequence()], #' [async_timeout()], [async_until()] and [async_whilst()] functions are #' helpers for more complex async control flow. #' #' @section Ownership: #' #' async follows a strong ownership model. Each deferred value must be #' owned by exactly one other deferred value (unless they are shared, see #' below). #' #' After a `dx2 <- dx$then()` call, the `dx` deferred is _owned_ by the #' newly created deferred value. (The same applied to [when_any()], etc.) #' This means that it is not possible to call `$then()` on the same #' deferred value multiple times. The deferred value that is synchronized #' by calling [synchronise()] on it, is owned by [synchronise()], see #' _Synchronization_ below. #' #' The deferred values of an async phase form a directed graph, which we #' call the async DAG (directed, acyclic graph). Usually (when no deferred #' is shared, see below), this DAG is a rooted tree, the root of the tree #' is the synchronised deferred, the final result of the async phase. #' #' @section Shared Deferred Values: #' #' In the rare cases when the strong ownership model is too restrictive, #' a deferred value can be marked as _shared_: #' #' ``` #' dx$share() #' ``` #' #' This has the following implications: #' * A shared deferred value can have multiple children (owners) in the #' async DAG. #' * A shared deferred value is started after its first child is started. #' * A shared deferred value is not auto-cancelled when all of its children #' are finished. (Because it might have more children in the future.) #' * A shared deferred value is still auto-cancelled at the end of the #' event loop. #' #' Use shared deferred values sparingly, only when they are really needed, #' as they forbid auto-cancellation, so deferred values will hold on to #' resources longer, until the async phase is finished. #' #' @section Synchronization: #' #' async allows embedding asynchronous computation in synchronous code. #' The execution of such a program has a sync phase and async phases. When the #' program starts, it is in the sync phase. In the sync phase you cannot #' create deferred values. (But you can still define (async) functions, that #' will create deferred values when called.) #' #' To enter into an async phase, call [synchronise()] on an expression that #' evaluates to a deferred value. The async phase will last until this #' deferred value is computed or an error is thrown (and the error reaches #' [synchronise()]). #' #' [synchronise()] creates an event loop, which manages the computation of #' the deferred values in this particular async phase. #' #' Async phases can be embedded into each other. I.e. a program may call #' [synchronise()] while in the async phase. The outer async phase's event #' loop then stops until the inner async phase terminates. Deferred values #' cannot be passed through a `synchronise()` barrier, to anoter (sync or #' async phase). Should this happen, an error is reported on the first #' operation on the leaked deferred value. #' #' In a typical application, a function is implemented asynchronously, and #' then used synchronously by the interactive user, or another piece of #' synchronous code, via [synchronise()] calls. The following example makes #' three HTTP requests in parallel: #' #' ``` #' http_status3 <- function() { #' http_status <- function(url) { #' http_get(url)$then(function(response) response$status_code) #' } #' r1 <- http_status("https://eu.httpbin.org/status/403") #' r2 <- http_status("https://eu.httpbin.org/status/404") #' r3 <- http_status("https://eu.httpbin.org/status/200") #' when_all(r1, r2, r3) #' } #' synchronise(http_status3()) #' ``` #' #' This async function can also be used asychronously, as a parent of #' another deferred value, in an async phase. #' #' @section Lazy evaluation: #' #' async does not evaluate deferred values that are not part of the async #' DAG of the async phase. These are clearly not needed to compute the #' result of the async phase, so it would be a waste of resources working on #' them. (It is also unclear how their errors should be handled.) #' #' In the following example, `d1` and `d2` are created, but they are not #' part of the async DAG, so they are never evaluated. #' #' ``` #' do <- function() { #' d1 <- delay(1/100)$then(function() print("d1")) #' d2 <- d1$then(function() print("d2")) #' d3 <- delay(1/100)$then(function() print("d3")) #' d4 <- d3$then(function() print("d4")) #' d4 #' } #' invisible(synchronise(do())) #' ``` #' #' @section Cancellation: #' #' The computation of a deferred can be cancelled when it is not needed #' any more: #' #' ``` #' dx$cancel(reason = "Cancelled") #' ``` #' #' This will _fail_ the children of the deferred, unless they have been #' completed already. It will also auto-cancel the parent DAG of the #' deferred, unless they are shared deferreds, see the next Section. #' #' @section Auto-cancellation: #' #' In an async phase, it might happen that parts of the async DAG are not #' needed for the final result any more. E.g. if a parent of a `when_all()` #' node throws an error, then the other parents don't have to be computed. #' In this case the event loop of the phase automatically cancels these #' deferred values. Similarly, if a single parent of a [when_any()] node is #' resolved, the other parents can be cancelled. #' #' In general, if a node of the async DAG is resolved, the whole directed #' DAG, rooted at that node, can be cancelled (except for nodes that were #' already resolved and nodes that have already failed). #' #' Auto-cancellation is very convenient, as you can be sure that resources #' are free as soon as they are not needed. Some practical examples: #' #' * Making HTTP requests to many mirror web sites, to check their response #' time. As soon as the first reply is in, the rest of the HTTP requests #' are cancelled. #' * In multi-process computation, as soon as one process fails, the rest are #' automatically cancelled. (Unless the failure is handled, of course.) #' #' async also has another type of cancellation, when [synchronise()] is #' interrupted externally, either by the user or some system error. In this #' case all processes and resources that were created in the event loop, #' are cancelled and freed. #' #' Shared deferred values (see `$share()`) are not auto-cancelled when their #' children are resolved or errored, but they are always cancelled at the #' end of the async phase. #' #' @section Progress bars: #' #' A deferred value may report on its progress, if its action has a progress #' callback. The progress callback is called with a list that describes #' and event. We suggest that it always has an `event` entry, which is a #' simple string. The rest of the list entries can be defined as needed, #' but typically there will be a counter counting ticks, or a ratio #' describing what part of the computation is already. See [http_get()] #' for an async function that reports progress. #' #' @section Collections helper functions: #' #' async provides some utilities that make it easier to deal with #' collections of deferred values: #' #' The current iterators: #' * [async_map()] applies an async function to all elements of a vector or #' list (collection). #' * [async_detect()] finds an element of a collection that passed an async #' truth test. #' * [async_every()] checks if every element of a collection satisfies an #' async predicate. [async_some()] checks if any element does that. #' * [async_filter()] keeps elements that pass an async truth test. #' #' @section Control flow helper functions: #' #' Control flow with deferred values can be challenging. Some helpers: #' * [async_reflect()] creates an async function that always succeeds. #' This is useful if you want to apply it to a collection, and don't #' want to stop at the first error. #' * [async_retry()] tries an async function a number of times. #' [async_retryable()] turns a regular function into a retryable one. #' * [async_sequence()] chains two async functions. Calling their sequence #' is equivalent calling '$then()` on them, but [async_sequence()] is #' easier to use programmatically. #' * [async_until()] and [async_whilst()] let you call an async function #' repeatedly, until or while a (syncronous) condition holds. #' * [async_timeout()] runs an async function with a timeout. #' #' @section Examples: #' Please see the README and the vignettes for examples. #' @name deferred #' @noRd NULL #' @importFrom R6 R6Class #' @noRd deferred <- R6Class( "deferred", public = list( initialize = function(action = NULL, on_progress = NULL, on_cancel = NULL, parents = NULL, parent_resolve = NULL, parent_reject = NULL, type = NULL, call = sys.call(-1), event_emitter = NULL) async_def_init(self, private, action, on_progress, on_cancel, parents, parent_resolve, parent_reject, type, call, event_emitter), then = function(on_fulfilled) def_then(self, private, on_fulfilled), catch = function(...) def_catch(self, private, ...), finally = function(on_finally) def_finally(self, private, on_finally), cancel = function(reason = "Cancelled") def_cancel(self, private, reason), share = function() { private$shared <<- TRUE; invisible(self) }, event_emitter = NULL ), private = list( action = NULL, running = FALSE, id = NULL, type = NULL, state = c("pending", "fulfilled", "rejected")[1], event_loop = NULL, value = NULL, children = list(), progress_callback = NULL, cancel_callback = NULL, cancelled = FALSE, dead_end = FALSE, parents = NULL, parent_resolve = NULL, parent_reject = NULL, shared = FALSE, mycall = NULL, run_action = function() def__run_action(self, private), null = function() def__null(self, private), resolve = function(value) def__resolve(self, private, value), reject = function(reason) def__reject(self, private, reason), progress = function(data) def__progress(self, private, data), make_error_object = function(err) def__make_error_object(self, private, err), maybe_cancel_parents = function(reason) def__maybe_cancel_parents(self, private, reason), add_as_parent = function(child) def__add_as_parent(self, private, child), update_parent = function(old, new) def__update_parent(self, private, old, new), get_info = function() def__get_info(self, private) ) ) async_def_init <- function(self, private, action, on_progress, on_cancel, parents, parent_resolve, parent_reject, type, call, event_emitter) { private$type <- type private$id <- get_id() private$event_loop <- get_default_event_loop() private$parents <- parents private$action <- action private$mycall <- call self$event_emitter <- event_emitter "!DEBUG NEW `private$id` (`type`)" assert_that(is.null(on_progress) || is.function(on_progress)) private$progress_callback <- on_progress assert_that(is.null(on_cancel) || is.function(on_cancel)) private$cancel_callback <- on_cancel ## Handle the parents private$parent_resolve <- def__make_parent_resolve(parent_resolve) private$parent_reject <- def__make_parent_reject(parent_reject) for (prt in parents) { prt_pvt <- get_private(prt) prt_pvt$add_as_parent(self) } invisible(self) } def__run_action <- function(self, private) { if (private$running) return() action <- private$action private$running <- TRUE private$action <- NULL "!DEBUG ACTION `private$type` `private$id`" if (!is.null(action)) { if (!is.function(action)) { action <- as.function(action) formals(action) <- alist(resolve = NULL, progress = NULL) } assert_that(is_action_function(action)) action_args <- names(formals(action)) args <- list(private$resolve) if (!is.na(pr_arg <- match("progress", action_args))) { args$progress <- private$progress } private$event_loop$add_next_tick( function() { if (isTRUE(getOption("async_debug_steps", FALSE))) debug1(action) `__async_data__` <- list(private$id, "action", self, skip = 2L) do.call(action, args) }, function(err, res) if (!is.null(err)) private$reject(err)) } ## If some parents are done, we want them to notify us. ## We also start the ones that are not running yet for (prt in private$parents) { prt_priv <- get_private(prt) if (prt_priv$state != "pending") { def__call_then( if (prt_priv$state == "fulfilled") "parent_resolve" else "parent_reject", self, prt_priv$value) } prt_priv$run_action() } } def_then <- function(self, private, on_fulfilled = NULL, on_rejected = NULL) { force(self) force(private) if (! identical(private$event_loop, get_default_event_loop())) { err <- make_error( "Cannot create deferred chain across synchronization barrier", class = "async_synchronization_barrier_error") stop(err) } if (!is_deferred(on_fulfilled)) { parent_resolve <- def__make_parent_resolve(on_fulfilled) parent_reject <- def__make_parent_reject(on_rejected) deferred$new(parents = list(self), type = paste0("then-", private$id), parent_resolve = parent_resolve, parent_reject = parent_reject, call = sys.call(-1)) } else { private$add_as_parent(on_fulfilled) child_private <- get_private(on_fulfilled) child_private$parents <- c(child_private$parents, self) self } } def_catch <- function(self, private, ...) { def_then(self, private, on_rejected = list(...)) } def_finally <- function(self, private, on_finally) { force(on_finally) def_then( self, private, on_fulfilled = function(value) { on_finally() value }, on_rejected = function(reason) { on_finally() stop(reason) } ) } def_cancel <- function(self, private, reason) { if (private$state != "pending") return() cancel_cond <- structure( list(message = reason %||% "Deferred computation cancelled", call = NULL), class = c("async_cancelled", "error", "condition") ) private$reject(cancel_cond) invisible(self) } def__null <- function(self, private) { self$.__enclos_env__$private$dead_end <- TRUE invisible(self) } def__resolve <- function(self, private, value) { if (private$cancelled) return() if (private$state != "pending") return() if (is_deferred(value)) { private$parent_resolve <- def__make_parent_resolve(NULL) private$parent_reject <- def__make_parent_reject(NULL) # we need this in case self was shared and had multiple children val_pvt <- get_private(value) val_pvt$id <- private$id val_pvt$shared <- private$shared val_pvt$dead_end <- private$dead_end # This should not happen, though for (child in private$children) { ch_pvt <- get_private(child) ch_pvt$update_parent(self, value) } val_pvt$run_action() } else { if (!private$dead_end && !length(private$children) && !private$shared) { ## This cannot happen currently "!DEBUG ??? DEAD END `private$id`" # nocov warning("Computation going nowhere...") # nocov } "!DEBUG +++ RESOLVE `private$id`" private$state <- "fulfilled" private$value <- value for (child in private$children) { def__call_then("parent_resolve", child, value) } private$maybe_cancel_parents(private$value) private$parents <- NULL } } #' Create an error object for a rejected deferred computation #' #' * Make sure that the error is an error object. #' * Make sure that the error has the correct classes. #' #' @param self self #' @param private private self #' @return error object #' #' @noRd #' @keywords internal def__make_error_object <- function(self, private, err) { class(err) <- unique(c("async_rejected", class(err))) err } def__make_parent_resolve <- function(fun) { if (is.null(fun)) { function(value, resolve) resolve(value) } else if (!is.function(fun)) { fun <- as.function(fun) function(value, resolve) resolve(fun(value)) } else if (num_args(fun) == 0) { function(value, resolve) resolve(fun()) } else if (num_args(fun) == 1) { function(value, resolve) resolve(fun(value)) } else if (identical(names(formals(fun)), c("value", "resolve"))) { fun } else { stop("Invalid parent_resolve callback") } } def__make_parent_reject <- function(fun) { if (is.null(fun)) { function(value, resolve) stop(value) } else if (is.list(fun)) { def__make_parent_reject_catch(fun) } else if (!is.function(fun)) { fun <- as.function(fun) function(value, resolve) resolve(fun(value)) } else if (num_args(fun) == 0) { function(value, resolve) resolve(fun()) } else if (num_args(fun) == 1) { function(value, resolve) resolve(fun(value)) } else if (identical(names(formals(fun)), c("value", "resolve"))) { fun } else { stop("Invalid parent_reject callback") } } def__make_parent_reject_catch <- function(handlers) { handlers <- lapply(handlers, as.function) function(value, resolve) { ok <- FALSE ret <- tryCatch({ quo <- as.call(c(list(quote(tryCatch), quote(stop(value))), handlers)) ret <- eval(quo) ok <- TRUE ret }, error = function(x) x) if (ok) resolve(ret) else stop(ret) } } def__reject <- function(self, private, reason) { if (private$cancelled) return() if (private$state != "pending") return() ## 'reason' cannot be a deferred here "!DEBUG !!! REJECT `private$id`" private$state <- "rejected" private$value <- private$make_error_object(reason) if (inherits(private$value, "async_cancelled")) { private$cancelled <- TRUE } if (!is.null(private$cancel_callback)) { private$cancel_callback(conditionMessage(private$value)) } for (child in private$children) { def__call_then("parent_reject", child, private$value) } private$maybe_cancel_parents(private$value) private$parents <- NULL } def__maybe_cancel_parents <- function(self, private, reason) { for (parent in private$parents) { if (is.null(parent)) next parent_priv <- get_private(parent) if (parent_priv$state != "pending") next if (parent_priv$shared) next parent$cancel(reason) } } def__call_then <- function(which, x, value) { force(value); private <- get_private(x) if (!private$running) return() if (private$state != "pending") return() cb <- private[[which]] private$event_loop$add_next_tick( function() { if (isTRUE(getOption("async_debug_steps", FALSE))) { debug1(private[[which]]) # nocov } `__async_data__` <- list(private$id, "parent", x) private[[which]](value, private$resolve) }, function(err, res) if (!is.null(err)) private$reject(err)) } def__add_as_parent <- function(self, private, child) { "!DEBUG EDGE [`private$id` -> `get_private(child)$id`]" if (! identical(private$event_loop, get_private(child)$event_loop)) { err <- make_error( "Cannot create deferred chain across synchronization barrier", class = "async_synchronization_barrier_error") stop(err) } if (length(private$children) && !private$shared) { stop("Deferred value is already owned") } private$children <- c(private$children, child) if (get_private(child)$running) private$run_action() if (private$state == "pending") { ## Nothing to do } else if (private$state == "fulfilled") { def__call_then("parent_resolve", child, private$value) } else { def__call_then("parent_reject", child, private$value) } } def__update_parent <- function(self, private, old, new) { for (i in seq_along(private$parents)) { if (identical(private$parents[[i]], old)) { private$parents[[i]] <- new } } new_pvt <- get_private(new) new_pvt$add_as_parent(self) } def__progress <- function(self, private, data) { if (private$state != "pending") return() if (is.null(private$progress_callback)) return() private$progress_callback(data) } def__get_info <- function(self, private) { res <- data.frame( stringsAsFactors = FALSE, id = private$id, parents = I(list(viapply(private$parents, function(x) get_private(x)$id))), label = as.character(private$id), call = I(list(private$mycall)), children = I(list(viapply(private$children, function(x) get_private(x)$id))), type = private$type %||% "unknown", running = private$running, state = private$state, cancelled = private$cancelled, shared = private$shared ) src <- get_source_position(private$mycall) res$filename <- src$filename res$position <- src$position res$label <- paste0( res$id, " ", if (private$state == "fulfilled") paste0(cli::symbol$tick, " "), if (private$state == "rejected") paste0(cli::symbol$cross, " "), deparse(private$mycall)[1], " @ ", res$filename, ":", res$position) res } #' Is object a deferred value? #' #' @param x object #' @return Whether it is a deferred value. #' #' @noRd #' @examples #' is_deferred(1:10) #' afun <- function() { #' print(is_deferred(dx <- delay(1/100))) #' dx #' } #' synchronise(afun()) is_deferred <- function(x) { inherits(x, "deferred") } #' Delay async computation for the specified time #' #' Since R is single-threaded, the deferred value might be resolved (much) #' later than the specified time period. #' #' @param delay Time interval in seconds, the amount of time to delay #' to delay the execution. It can be a fraction of a second. #' @return A deferred object. #' #' @noRd #' @examples #' \donttest{ #' ## Two HEAD requests with 1/2 sec delay between them #' resp <- list() #' afun <- async(function() { #' http_head("https://eu.httpbin.org?q=2")$ #' then(function(value) resp[[1]] <<- value$status_code)$ #' then(function(...) delay(1/2))$ #' then(function(...) http_head("https://eu.httpbin.org?q=2"))$ #' then(function(value) resp[[2]] <<- value$status_code) #' }) #' synchronise(afun()) #' resp #' } delay <- function(delay) { force(delay) id <- NULL deferred$new( type = "delay", call = sys.call(), action = function(resolve) { assert_that(is_time_interval(delay)) force(resolve) id <<- get_default_event_loop()$add_delayed( delay, function() TRUE, function(err, res) resolve(TRUE) ) }, on_cancel = function(reason) { if (!is.null(id)) get_default_event_loop()$cancel(id) } ) } delay <- mark_as_async(delay) #' Find the value of a match, asynchronously #' #' All predicates are running in parallel, and the returned match #' is not guaranteed to be the first one. #' #' @param .x A list or atomic vector. #' @param .p An asynchronous predicate function. #' @param ... Additional arguments to the predicate function. #' @param .limit Number of elements to process simulateneously. #' If it is 1, then the predicate is applied sequentially. #' @return A deferred value for the result. #' #' @family async iterators #' @noRd #' @examples #' \donttest{ #' synchronise(async_detect( #' c("https://eu.httpbin.org/status/404", "https://eu.httpbin.org", #' "https://eu.httpbin.org/status/403"), #' async_sequence(http_head, function(x) x$status_code == 200) #' )) #' } async_detect <- function(.x, .p, ..., .limit = Inf) { if (.limit < length(.x)) { async_detect_limit(.x, .p, ..., .limit = .limit) } else { async_detect_nolimit(.x, .p, ...) } } async_detect <- mark_as_async(async_detect) async_detect_nolimit <- function(.x, .p, ...) { defs <- lapply(.x, async(.p), ...) nx <- length(defs) done <- FALSE self <- deferred$new( type = "async_detect", call = sys.call(), action = function(resolve) { lapply(seq_along(defs), function(idx) { defs[[idx]]$then(function(val) if (isTRUE(val)) idx)$then(self) }) if (nx == 0) resolve(NULL) }, parent_resolve = function(value, resolve) { if (!done && !is.null(value)) { done <<- TRUE resolve(.x[[value]]) } else if (!done) { nx <<- nx - 1L if (nx == 0) resolve(NULL) } } ) } async_detect_limit <- function(.x, .p, ..., .limit = .limit) { len <- length(.x) nx <- len .p <- async(.p) args <- list(...) done <- FALSE nextone <- .limit + 1L firsts <- lapply(.x[seq_len(.limit)], .p, ...) self <- deferred$new( type = "async_detect (limit)", call = sys.call(), action = function(resolve) { lapply(seq_along(firsts), function(idx) { firsts[[idx]]$then(function(val) if (isTRUE(val)) idx)$then(self) }) if (nx == 0) resolve(NULL) }, parent_resolve = function(value, resolve) { if (!done && !is.null(value)) { done <<- TRUE resolve(.x[[value]]) } else if (!done) { nx <<- nx - 1L if (nx == 0) { resolve(NULL) } else if (nextone <= len) { idx <- nextone dx <- .p(.x[[nextone]], ...) dx$then(function(val) if (isTRUE(val)) idx)$then(self) nextone <<- nextone + 1L } } } ) self } #' @importFrom R6 R6Class event_loop <- R6Class( "event_loop", public = list( initialize = function() el_init(self, private), add_http = function(handle, callback, file = NULL, progress = NULL, data = NULL) el_add_http(self, private, handle, callback, file, progress, data), http_setopt = function(total_con = NULL, host_con = NULL, multiplex = NULL) el_http_setopt(self, private, total_con, host_con, multiplex), add_process = function(conns, callback, data) el_add_process(self, private, conns, callback, data), add_r_process = function(conns, callback, data) el_add_r_process(self, private, conns, callback, data), add_pool_task = function(callback, data) el_add_pool_task(self, private, callback, data), add_delayed = function(delay, func, callback, rep = FALSE) el_add_delayed(self, private, delay, func, callback, rep), add_next_tick = function(func, callback, data = NULL) el_add_next_tick(self, private, func, callback, data), cancel = function(id) el_cancel(self, private, id), cancel_all = function() el_cancel_all(self, private), run = function(mode = c("default", "nowait", "once")) el_run(self, private, mode = match.arg(mode)), suspend = function() el_suspend(self, private), wakeup = function() el_wakeup(self, private) ), private = list( create_task = function(callback, ..., id = NULL, type = "foobar") el__create_task(self, private, callback, ..., id = id, type = type), ensure_pool = function() el__ensure_pool(self, private), get_poll_timeout = function() el__get_poll_timeout(self, private), run_pending = function() el__run_pending(self, private), run_timers = function() el__run_timers(self, private), is_alive = function() el__is_alive(self, private), update_time = function() el__update_time(self, private), io_poll = function(timeout) el__io_poll(self, private, timeout), update_curl_data = function() el__update_curl_data(self, private), id = NULL, time = Sys.time(), stop_flag = FALSE, tasks = list(), timers = Sys.time()[numeric()], pool = NULL, curl_fdset = NULL, # return value of multi_fdset() curl_poll = TRUE, # should we poll for curl sockets? curl_timer = NULL, # call multi_run() before this next_ticks = character(), worker_pool = NULL, http_opts = NULL ) ) el_init <- function(self, private) { private$id <- new_event_loop_id() invisible(self) } el_add_http <- function(self, private, handle, callback, progress, file, data) { self; private; handle; callback; progress; outfile <- file; data id <- private$create_task(callback, list(handle = handle, data = data), type = "http") private$ensure_pool() if (!is.null(outfile)) cat("", file = outfile) content <- NULL curl::multi_add( handle = handle, pool = private$pool, done = function(response) { task <- private$tasks[[id]] task$data$data$event_emitter$emit("end") private$tasks[[id]] <- NULL response$content <- do.call(c, as.list(content)) response$file <- outfile task$callback(NULL, response) }, data = function(bytes, ...) { task <- private$tasks[[id]] task$data$data$event_emitter$emit("data", bytes) if (!is.null(outfile)) { ## R runs out of connections very quickly, especially because they ## are not removed until a gc(). However, calling gc() is ## expensive, so we only do it if we have to. This is a temporary ## solution until we can use our own connections, that are not ## so limited in their numbers. con <- tryCatch( file(outfile, open = "ab"), error = function(e) { gc(); file(outfile, open = "ab") } # nocov ) writeBin(bytes, con) close(con) } else { content <<- c(content, list(bytes)) } }, fail = function(error) { task <- private$tasks[[id]] private$tasks[[id]] <- NULL error <- make_error(message = error) class(error) <- unique(c("async_rejected", "async_http_error", class(error))) task$callback(error, NULL) } ) id } el_add_process <- function(self, private, conns, callback, data) { self; private; conns; callback; data data$conns <- conns private$create_task(callback, data, type = "process") } el_add_r_process <- function(self, private, conns, callback, data) { self; private; conns; callback; data data$conns <- conns private$create_task(callback, data, type = "r-process") } el_add_pool_task <- function(self, private, callback, data) { self; private; callback; data id <- private$create_task(callback, data, type = "pool-task") if (is.null(async_env$worker_pool)) { async_env$worker_pool <- worker_pool$new() } async_env$worker_pool$add_task(data$func, data$args, id, private$id) id } el_add_delayed <- function(self, private, delay, func, callback, rep) { force(self); force(private); force(delay); force(func); force(callback) force(rep) id <- private$create_task( callback, data = list(delay = delay, func = func, rep = rep), type = "delayed" ) # This has to be real time, because our event loop time might # be very much in the past when his is called. private$timers[id] <- Sys.time() + as.difftime(delay, units = "secs") id } el_add_next_tick <- function(self, private, func, callback, data) { force(self) ; force(private) ; force(callback); force(data) data$func <- func id <- private$create_task(callback, data = data, type = "nexttick") private$next_ticks <- c(private$next_ticks, id) } el_cancel <- function(self, private, id) { private$next_ticks <- setdiff(private$next_ticks, id) private$timers <- private$timers[setdiff(names(private$timers), id)] if (id %in% names(private$tasks) && private$tasks[[id]]$type == "http") { curl::multi_cancel(private$tasks[[id]]$data$handle) } else if (id %in% names(private$tasks) && private$tasks[[id]]$type %in% c("process", "r-process")) { private$tasks[[id]]$data$process$kill() } else if (id %in% names(private$tasks) && private$tasks[[id]]$type == "pool-task") { async_env$worker_pool$cancel_task(id) } private$tasks[[id]] <- NULL invisible(self) } el_cancel_all <- function(self, private) { http <- curl::multi_list(pool = private$pool) lapply(http, curl::multi_cancel) private$next_ticks <- character() private$timers <- Sys.time()[numeric()] ## Need to cancel pool tasks, these are interrupts for the workers types <- vcapply(private$tasks, "[[", "type") ids <- vcapply(private$tasks, "[[", "id") for (id in ids[types == "pool-task"]) { self$cancel(id) } private$tasks <- list() invisible(self) } el_run <- function(self, private, mode) { ## This is closely modeled after the libuv event loop, on purpose, ## because some time we might switch to that. alive <- private$is_alive() if (! alive) private$update_time() while (alive && !private$stop_flag) { private$update_time() private$update_curl_data() private$run_timers() ran_pending <- private$run_pending() ## private$run_idle() ## private$run_prepare() timeout <- 0 if ((mode == "once" && !ran_pending) || mode == "default") { timeout <- private$get_poll_timeout() } private$io_poll(timeout) ## private$run_check() ## private$run_closing_handles() if (mode == "once") { ## If io_poll returned without doing anything, that means that ## we have some timers that are due, so run those. ## At this point we have surely made progress private$update_time() private$run_timers() } alive <- private$is_alive() if (mode == "once" || mode == "nowait") break } private$stop_flag <- FALSE alive } el_suspend <- function(self, private) { ## TODO } el_wakeup <- function(self, private) { ## TODO } el__run_pending <- function(self, private) { next_ticks <- private$next_ticks private$next_ticks <- character() for (id in next_ticks) { task <- private$tasks[[id]] private$tasks[[id]] <- NULL call_with_callback(task$data$func, task$callback, info = task$data$error_info) } ## Check for workers from the pool finished before, while another ## event loop was active finished_pool <- FALSE pool <- async_env$worker_pool if (!is.null(pool)) { done_pool <- pool$list_tasks(event_loop = private$id, status = "done") finished_pool <- nrow(done_pool) > 0 for (tid in done_pool$id) { task <- private$tasks[[tid]] private$tasks[[tid]] <- NULL res <- pool$get_result(tid) err <- res$error res <- res[c("result", "stdout", "stderr")] task$callback(err, res) } } length(next_ticks) > 0 || finished_pool } el__io_poll <- function(self, private, timeout) { types <- vcapply(private$tasks, "[[", "type") ## The things we need to poll, and their types ## We put the result here as well pollables <- data.frame( stringsAsFactors = FALSE, id = character(), pollable = I(list()), type = character(), ready = character() ) ## HTTP. if (private$curl_poll) { curl_pollables <- data.frame( stringsAsFactors = FALSE, id = "curl", pollable = I(list(processx::curl_fds(private$curl_fdset))), type = "curl", ready = "silent") pollables <- rbind(pollables, curl_pollables) } ## Processes proc <- types %in% c("process", "r-process") if (sum(proc)) { conns <- unlist(lapply( private$tasks[proc], function(t) t$data$conns), recursive = FALSE) proc_pollables <- data.frame( stringsAsFactors = FALSE, id = names(private$tasks)[proc], pollable = I(conns), type = types[proc], ready = rep("silent", sum(proc))) pollables <- rbind(pollables, proc_pollables) } ## Pool px_pool <- if (!is.null(async_env$worker_pool)) { async_env$worker_pool$get_poll_connections() } if (length(px_pool)) { pool_pollables <- data.frame( stringsAsFactors = FALSE, id = names(px_pool), pollable = I(px_pool), type = rep("pool", length(px_pool)), ready = rep("silent", length(px_pool))) pollables <- rbind(pollables, pool_pollables) } if (!is.null(private$curl_timer) && private$curl_timer <= private$time) { curl::multi_run(timeout = 0L, poll = TRUE, pool = private$pool) private$curl_timer <- NULL } if (nrow(pollables)) { ## OK, ready to poll pollables$ready <- unlist(processx::poll(pollables$pollable, timeout)) ## Any HTTP? if (private$curl_poll && pollables$ready[match("curl", pollables$type)] == "event") { curl::multi_run(timeout = 0L, poll = TRUE, pool = private$pool) } ## Any processes proc_ready <- pollables$type %in% c("process", "r-process") & pollables$ready == "ready" for (id in pollables$id[proc_ready]) { p <- private$tasks[[id]] private$tasks[[id]] <- NULL ## TODO: this should be async p$data$process$wait(1000) p$data$process$kill() res <- list( status = p$data$process$get_exit_status(), stdout = read_all(p$data$stdout, p$data$encoding), stderr = read_all(p$data$stderr, p$data$encoding), timeout = FALSE ) error <- FALSE if (p$type == "r-process") { res$result <- tryCatch({ p$data$process$get_result() }, error = function(e) { error <<- TRUE; e }) } unlink(c(p$data$stdout, p$data$stderr)) if (p$data$error_on_status && (error || res$status != 0)) { err <- make_error("process exited with non-zero status") err$data <- res res <- NULL } else { err <- NULL } p$callback(err, res) } ## Worker pool pool_ready <- pollables$type == "pool" & pollables$ready == "ready" if (sum(pool_ready)) { pool <- async_env$worker_pool done <- pool$notify_event(as.integer(pollables$id[pool_ready]), event_loop = private$id) mine <- intersect(done, names(private$tasks)) for (tid in mine) { task <- private$tasks[[tid]] private$tasks[[tid]] <- NULL res <- pool$get_result(tid) err <- res$error res <- res[c("result", "stdout", "stderr")] task$callback(err, res) } } } else if (length(private$timers) || !is.null(private$curl_timer)) { Sys.sleep(timeout / 1000) } } el__create_task <- function(self, private, callback, data, ..., id, type) { id <- id %||% get_uuid() private$tasks[[id]] <- list( type = type, id = id, callback = callback, data = data, error = NULL, result = NULL ) id } el__ensure_pool <- function(self, private) { getopt <- function(nm) { anm <- paste0("async_http_", nm) if (!is.null(v <- getOption(anm))) return(v) if (!is.na(v <- Sys.getenv(toupper(anm), NA_character_))) return(v) NULL } if (is.null(private$pool)) { private$http_opts <- list( total_con = getopt("total_con") %||% 100, host_con = getopt("host_con") %||% 6, multiplex = getopt("multiplex") %||% TRUE ) private$pool <- curl::new_pool( total_con = private$http_opts$total_con, host_con = private$http_opts$host_con, multiplex = private$http_opts$multiplex ) } } el_http_setopt <- function(self, private, total_con, host_con, multiplex) { private$ensure_pool() if (!is.null(total_con)) private$http_opts$total_con <- total_con if (!is.null(host_con)) private$http_opts$host_con <- host_con if (!is.null(multiplex)) private$http_opts$multiplex <- multiplex curl::multi_set( pool = private$pool, total_con = private$http_opts$total_con, host_con = private$http_opts$host_con, multiplex = private$http_opts$multiplex ) } el__get_poll_timeout <- function(self, private) { t <- if (length(private$next_ticks)) { ## TODO: can this happen at all? Probably not, but it does not hurt... 0 # nocov } else { max(0, min(Inf, private$timers - private$time)) } if (!is.null(private$curl_timer)) { t <- min(t, private$curl_timer - private$time) } t <- max(t, 0) if (is.finite(t)) as.integer(t * 1000) else -1L } el__run_timers <- function(self, private) { expired <- names(private$timers)[private$timers <= private$time] expired <- expired[order(private$timers[expired])] for (id in expired) { task <- private$tasks[[id]] if (private$tasks[[id]]$data$rep) { ## If it is repeated, then re-init private$timers[id] <- private$time + as.difftime(task$data$delay, units = "secs") } else { ## Otherwise remove private$tasks[[id]] <- NULL private$timers <- private$timers[setdiff(names(private$timers), id)] } call_with_callback(task$data$func, task$callback) } } el__is_alive <- function(self, private) { length(private$tasks) > 0 || length(private$timers) > 0 || length(private$next_ticks) > 0 } el__update_time <- function(self, private) { private$time <- Sys.time() } el__update_curl_data <- function(self, private) { private$curl_fdset <- curl::multi_fdset(private$pool) num_fds <- length(unique(unlist(private$curl_fdset[1:3]))) private$curl_poll <- num_fds > 0 private$curl_timer <- if ((t <- private$curl_fdset$timeout) != -1) { private$time + as.difftime(t / 1000.0, units = "secs") } } #' Generic Event Emitter #' #' This is a generic class that can be used to create event emitters. #' It is mostly modelled after the 'node.js' `EventEmitter` class #' #' @section Usage: #' ``` #' ee <- event_emitter$new(async = TRUE) #' ee$listen_on(event, callback) #' ee$listen_off(event, callback) #' ee$listen_once(event, callback) #' ee$emit(event, ...) #' ee$get_event_names() #' ee$get_listener_count(event) #' ee$remove_all_listeners(event) #' ``` #' #' @section Arguments: #' * `async`: Whether to call listeners asynchronously, i.e. in the next #' tick of the event loop. #' * `event`: String, name of the event. #' * `callback`: Function, listener to call when the event is emitted. #' Its arguments must match the arguments passed to the `$emit()` #' method. It is possible to add the same callback function multiple #' times as a listener. It will be called as many times, as many times #' it was added. #' * `...`: Arguments to pass to the listeners. They can be named or #' unnnamed. #' #' @section Details: #' #' `ee$listen_on()` adds `callback` as a new listener for `event`. It is #' always added to the end of the listener list. Listeners will be called in #' the order they were added. It returns a reference to the `event_emitter` #' object, so calls can be chained. #' #' `ee$listen_off()` removes the first instance of `callback` from the #' listener list of `event`. It uses [base::identical()] to find the #' listener to remove. If `callback` is not among the listeners, nothing #' happens. Note that if you call this method from an event handler, that #' does not affect the already emitted events. It returns a reference to #' the `event_emitter` object, so calls can be chained. #' #' `ee$listen_once` is similar to `ee$listen_on()`, but the callback will #' be only called for a single event, and then it will be removed. #' (Technically, the listener is removed before the callback is called.) #' It returns a reference to the `event_emitter` object, so calls can be #' chained. #' #' `ee$emit()` emits an event. All listeners in its listener list will be #' called, in the order they were added. The arguments are passed to the #' listeners, so they have to be compatible with them. #' #' `ee$get_event_names()` returns the names of the active events, #' in a character vector. An event is active if it has at least one #' listener. #' #' `ee$get_listener_count()` returns the number of listeners for an event. #' #' `ee$remove_all_listener()` removes all listeners for an an event. #' #' @section Error handling: #' Errors are handled by special `error` events. If a listener errors, #' and the event emitter has an active `error` event (i.e. some listeners #' exist for `error`, then _all_ listeners are called, in the order they #' were specified. They receive the originally thrown error object as the #' single argument. The error object has an `event` entry, which contains #' the event name the failed listener was called on. #' #' If the event emitter does not have any listeners for the `error` event, #' then it throws an error. This error propagates to the next #' synchronization barrier, i.e. the last `synchronise()` or #' `run_event_loop()` call, which fails. #' #' In an error happen within an `error` listener, then the same happens, #' the last `synchronise()` or `run_event_loop()` call fails. You can #' wrap the body of the error listeners in a `tryCatch()` call, #' if you want to avoid this. #' #' @noRd #' @importFrom R6 R6Class event_emitter <- R6Class( "event_emitter", public = list( initialize = function(async = TRUE) ee_init(self, private, async), listen_on = function(event, callback) ee_listen_on(self, private, event, callback), listen_off = function(event, callback) ee_listen_off(self, private, event, callback), listen_once = function(event, callback) ee_listen_once(self, private, event, callback), emit = function(event, ...) ee_emit(self, private, event, ...), get_event_names = function() ee_get_event_names(self, private), get_listener_count = function(event) ee_get_listener_count(self, private, event), remove_all_listeners = function(event) ee_remove_all_listeners(self, private, event) ), private = list( lsts = NULL, async = NULL, cleanup_events = function() ee__cleanup_events(self, private), error_callback = function(err, res) ee__error_callback(self, private, err, res) ) ) ee_init <- function(self, private, async) { assert_that(is_flag(async)) private$lsts <- structure(list(), names = character()) private$async <- async invisible(self) } ee_listen_on <- function(self, private, event, callback) { assert_that(is_string(event), is.function(callback)) private$lsts[[event]] <- c(private$lsts[[event]], list(list(cb = callback, once = FALSE))) invisible(self) } ee_listen_off <- function(self, private, event, callback) { assert_that(is_string(event), is.function(callback)) for (idx in seq_along(private$lsts[[event]])) { if (identical(private$lsts[[event]][[idx]]$cb, callback)) { private$lsts[[event]] <- private$lsts[[event]][-idx] break } } invisible(self) } ee_listen_once <- function(self, private, event, callback) { assert_that(is_string(event), is.function(callback)) private$lsts[[event]] <- c(private$lsts[[event]], list(list(cb = callback, once = TRUE))) invisible(self) } ee_emit <- function(self, private, event, ...) { assert_that(is_string(event)) list(...) tocall <- private$lsts[[event]] once <- vlapply(tocall, "[[", "once") if (any(once)) private$lsts[[event]] <- tocall[!once] ## a for loop is not good here, because it does not create ## a closure for lst lapply(tocall, function(lst) { lst if (private$async) { get_default_event_loop()$add_next_tick( function() lst$cb(...), private$error_callback, data = list(error_info = list(event = event))) } else { call_with_callback( function() lst$cb(...), private$error_callback, info = list(event = event)) } }) invisible(self) } ee_get_event_names <- function(self, private) { private$cleanup_events() names(private$lsts) } ee_get_listener_count <- function(self, private, event) { assert_that(is_string(event)) length(private$lsts[[event]]) } ee_remove_all_listeners <- function(self, private, event) { assert_that(is_string(event)) private$lsts[[event]] <- NULL invisible(self) } ee__cleanup_events <- function(self, private) { len <- viapply(private$lsts, length) private$lsts <- private$lsts[len > 0] } ee__error_callback <- function(self, private, err, res) { if (is.null(err)) return() tocall <- private$lsts[["error"]] once <- vlapply(tocall, "[[", "once") if (any(once)) private$lsts[["error"]] <- tocall[!once] if (length(tocall)) { for (lst in tocall) lst$cb(err) } else { stop(err) } } #' Do every or some elements of a list satisfy an asynchronous predicate? #' #' @param .x A list or atomic vector. #' @param .p An asynchronous predicate function. #' @param ... Additional arguments to the predicate function. #' @return A deferred value for the result. #' #' @family async iterators #' @noRd #' @examples #' # Check if all numbers are odd #' # Note the use of force() here. Otherwise x will be evaluated later, #' # and by then its value might change. #' is_odd <- async(function(x) { #' force(x) #' delay(1/1000)$then(function() as.logical(x %% 2)) #' }) #' synchronise(async_every(c(1,3,5,7,10,11), is_odd)) #' synchronise(async_every(c(1,3,5,7,11), is_odd)) async_every <- function(.x, .p, ...) { defs <- lapply(.x, async(.p), ...) nx <- length(defs) done <- FALSE deferred$new( type = "async_every", call = sys.call(), parents = defs, action = function(resolve) if (nx == 0) resolve(TRUE), parent_resolve = function(value, resolve) { if (!done && !isTRUE(value)) { done <<- TRUE resolve(FALSE) } else if (!done) { nx <<- nx - 1L if (nx == 0) resolve(TRUE) } } ) } async_every <- mark_as_async(async_every) #' Keep or drop elements using an asyncronous predicate function #' #' `async_filter` keep the elements for which `.p` is true. (Tested #' via `isTRUE()`. `async_reject` is the opposite, it drops them. #' #' @param .x A list or atomic vector. #' @param .p An asynchronous predicate function. #' @param ... Additional arguments to the predicate function. #' @return A deferred value for the result. #' #' @family async iterators #' @noRd #' @examples #' \donttest{ #' ## Filter out non-working URLs #' afun <- async(function(urls) { #' test_url <- async_sequence( #' http_head, function(x) identical(x$status_code, 200L)) #' async_filter(urls, test_url) #' }) #' urls <- c("https://eu.httpbin.org/get", #' "https://eu.httpbin.org/status/404") #' synchronise(afun(urls)) #' } async_filter <- function(.x, .p, ...) { when_all(.list = lapply(.x, async(.p), ...))$ then(function(res) .x[vlapply(res, isTRUE)]) } async_filter <- mark_as_async(async_filter) #' @rdname async_filter #' @noRd async_reject <- function(.x, .p, ...) { when_all(.list = lapply(.x, async(.p), ...))$ then(function(res) .x[! vlapply(res, isTRUE)]) } async_reject <- mark_as_async(async_reject) #' HTTP event emitter for server-sent events #' #' Server-sent events are a technique to stream events from a web server #' to a client, through an open HTTP connection. #' #' This class implements an event emitter on an async HTTP query created #' with [http_get()] and friends, that fires an `"event"` event when the #' server sends an event. An `"end"` event is emitted when the server #' closes the connection. #' #' An event is a named character vector, the names are the keys of the #' events. #' #' Example using our built-in toy web app: #' ```r #' http <- webfakes::new_app_process(async:::sseapp()) #' stream_events <- function() { #' query <- http_get(http$url("/sse")) #' sse <- sse_events$new(query) #' sse$ #' listen_on("event", function(event) { #' writeLines("Got an event:") #' print(event) #' })$ #' listen_on("end", function() { #' writeLines("Done.") #' }) #' query #' } #' #' response <- synchronise(stream_events()) #' ``` #' #' #' @noRd sse_events <- R6Class( "sse_events", inherit = event_emitter, public = list( initialize = function(http_handle) { super$initialize(async = FALSE) http_handle$event_emitter$listen_on("data", function(bytes) { private$data <- c(private$data, bytes) private$emit_events() }) http_handle$event_emitter$listen_on("end", function() { self$emit("end") }) } ), private = list( data = NULL, sep = as.raw(c(0xaL, 0xaL)), emit_events = function() { evs <- chunk_sse_events(private$data, private$sep) private$data <- evs$rest for (ev in evs$events) { self$emit("event", ev) } } ) ) chunk_sse_events <- function(data, sep = NULL) { # skip leading \n no <- 0L while (no <= length(data) && data[no + 1] == 0x0a) { no <- no + 1L } if (no > 0) { data <- data[(no + 1L):length(data)] } sep <- sep %||% as.raw(c(0xaL, 0xaL)) mtch <- grepRaw(sep, data, fixed = TRUE, all = TRUE) # shortcut for no events if (length(mtch) == 0) { return(list(events = list(), rest = data)) } events <- vector("list", length(mtch)) for (p in seq_along(mtch)) { from <- if (p == 1) 1L else mtch[p - 1] + 2L to <- mtch[p] - 1L events[[p]] <- parse_sse_event(data[from:to]) } events <- drop_nulls(events) restfrom <- mtch[length(mtch)] + 2L rest <- if (restfrom <= length(data)) { data[restfrom:length(data)] } else { raw() } list(events = events, rest = rest) } parse_sse_event <- function(data) { txt <- rawToChar(data) Encoding(txt) <- "UTF-8" lines <- strsplit(txt, "\n", fixed = TRUE)[[1]] lines <- lines[lines != ""] if (length(lines) == 0) { return(NULL) } keys <- sub(":.*$", "", lines) vals <- sub("^[^:]*:[ ]*", "", lines) structure(vals, names = keys) } drop_nulls <- function(x) { x[!vapply(x, is.null, logical(1))] } sseapp <- function() { app <- webfakes::new_app() app$get("/sse", function(req, res) { `%||%` <- function(l, r) if (is.null(l)) r else l if (is.null(res$locals$sse)) { duration <- as.double(req$query$duration %||% 2) delay <- as.double(req$query$delay %||% 0) numevents <- as.integer(req$query$numevents %||% 5) pause <- max(duration / numevents, 0.01) res$locals$sse <- list( sent = 0, numevents = numevents, pause = pause ) res$ set_header("cache-control", "no-cache")$ set_header("content-type", "text/event-stream")$ set_header("access-control-allow-origin", "*")$ set_header("connection", "keep-alive")$ set_status(200) if (delay > 0) { return(res$delay(delay)) } } msg <- paste0( "event: ", res$locals$sse$sent + 1L, "\n", "message: live long and prosper\n\n" ) res$locals$sse$sent <- res$locals$sse$sent + 1L res$write(msg) if (res$locals$sse$sent == res$locals$sse$numevents) { res$send("") } else { res$delay(res$locals$sse$pause) } }) } #' Asynchronous HTTP GET request #' #' Start an HTTP GET request in the background, and report its completion #' via a deferred. #' #' @section HTTP event emitters: #' An async HTTP deferred object is also an event emitter, see #' [event_emitter]. Use `$event_emitter` to access the event emitter API, #' and call `$event_emitter$listen_on()` etc. to listen on HTTP events, #' etc. #' #' * `"data"` is emitted when we receive data from the server, the data is #' passed on to the listeners as a raw vector. Note that zero-length #' raw vectors might also happen. #' * `"end"` is emitted at the end of the HTTP data stream, without #' additional arguments (Also on error.) #' #' Here is an example, that uses the web server from the webfakes #' package: #' ```r #' http <- webfakes::new_app_process(webfakes::httpbin_app()) #' stream_http <- function() { #' query <- http_get(http$url("/drip?duration=3&numbytes=10")) #' query$event_emitter$ #' listen_on("data", function(bytes) { #' writeLines(paste("Got", length(bytes), "byte(s):")) #' print(bytes) #' })$ #' listen_on("end", function() { #' writeLines("Done.") #' }) #' query #' } #' #' response <- synchronise(stream_http()) #' ``` #' #' @param url URL to connect to. #' @param headers HTTP headers to send. #' @param file If not `NULL`, it must be a string, specifying a file. #' The body of the response is written to this file. #' @param options Options to set on the handle. Passed to #' [curl::handle_setopt()]. #' @param on_progress Progress handler function. It is only used if the #' response body is written to a file. See details below. #' @return Deferred object. #' #' @section Progress bars: #' #' `http_get` can report on the progress of the download, via the #' `on_progress` argument. This is called with a list, with entries: #' * `url`: the specified url to download #' * `handle`: the curl handle of the request. This can be queried using #' [curl::handle_data()] to get the response status_code, the final #' URL (after redirections), timings, etc. #' * `file`: the `file` argument. #' * `total`: total bytes of the response. If this is unknown, it is set #' to zero. #' * `current`: already received bytes of the response. #' #' @family asyncronous HTTP calls #' @noRd #' @examples #' \donttest{ #' afun <- async(function() { #' http_get("https://eu.httpbin.org/status/200")$ #' then(function(x) x$status_code) #' }) #' synchronise(afun()) #' } http_get <- function(url, headers = character(), file = NULL, options = list(), on_progress = NULL) { url; headers; file; options; on_progress options <- get_default_curl_options(options) make_deferred_http( function() { assert_that(is_string(url)) handle <- curl::new_handle(url = url) curl::handle_setheaders(handle, .list = headers) if (!is.null(on_progress)) { options$noprogress <- FALSE fun <- options$progressfunction <- function(down, up) { on_progress(list( url = url, handle = handle, file = file, total = down[[1]], current = down[[2]] )) TRUE } ## This is a workaround for curl not PROTECT-ing the progress ## callback function reg.finalizer(handle, function(...) fun, onexit = TRUE) } curl::handle_setopt(handle, .list = options) list(handle = handle, options = options) }, file ) } http_get <- mark_as_async(http_get) #' Asynchronous HTTP HEAD request #' #' An async HTTP deferred object is also an event emitter, see #' [http_get()] for details, and also [event_emitter]. #' #' @inheritParams http_get #' @return Deferred object. #' #' @family asyncronous HTTP calls #' @noRd #' @examples #' \donttest{ #' afun <- async(function() { #' dx <- http_head("https://eu.httpbin.org/status/200")$ #' then(function(x) x$status_code) #' }) #' synchronise(afun()) #' #' # Check a list of URLs in parallel #' afun <- function(urls) { #' when_all(.list = lapply(urls, http_head))$ #' then(function(x) lapply(x, "[[", "status_code")) #' } #' urls <- c("https://google.com", "https://eu.httpbin.org") #' synchronise(afun(urls)) #' } http_head <- function(url, headers = character(), file = NULL, options = list(), on_progress = NULL) { url; headers; file; options; on_progress options <- get_default_curl_options(options) make_deferred_http( function() { assert_that(is_string(url)) handle <- curl::new_handle(url = url) curl::handle_setheaders(handle, .list = headers) curl::handle_setopt(handle, customrequest = "HEAD", nobody = TRUE, .list = options) list(handle = handle, options = options) }, file ) } http_head <- mark_as_async(http_head) #' Asynchronous HTTP POST request #' #' Start an HTTP POST request in the background, and report its completion #' via a deferred value. #' #' An async HTTP deferred object is also an event emitter, see #' [http_get()] for details, and also [event_emitter]. #' #' @inheritParams http_get #' @param data Data to send. Either a raw vector, or a character string #' that will be converted to raw with [base::charToRaw]. At most one of #' `data`, `data_file` and `data_form` can be non `NULL`. #' @param data_file Data file to send. At most one of `data`, `data_file` #' and `data_form` can be non `NULL`. #' @param data_form Form data to send. A name list, where each element #' is created with either [curl::form_data()] or [curl::form_file()]. #' At most one of `data`, `data_file` and `data_form` can be non `NULL`. #' @param on_progress Progress handler function. It is only used if the #' response body is written to a file. See details at [http_get()]. #' #' @noRd #' @examples #' json <- jsonlite::toJSON(list(baz = 100, foo = "bar")) #' #' do <- function() { #' headers <- c("content-type" = "application/json") #' http_post("https://eu.httpbin.org/post", data = json, headers = headers)$ #' then(http_stop_for_status)$ #' then(function(x) { #' jsonlite::fromJSON(rawToChar(x$content))$json #' }) #' } #' #' synchronise(do()) http_post <- function(url, data = NULL, data_file = NULL, data_form = NULL, headers = character(), file = NULL, options = list(), on_progress = NULL) { url; data; data_file; data_form; headers; file; options; on_progress if ((!is.null(data) + !is.null(data_file) + !is.null(data_form)) > 1) { stop( "At most one of `data`, `data_file` and `data_form` ", "can be non `NULL`." ) } if (!is.null(data_file)) { data <- readBin(data_file, "raw", file.size(data_file)) } if (!is.null(data) && !is.raw(data)) data <- charToRaw(data) options <- get_default_curl_options(options) make_deferred_http( function() { assert_that(is_string(url)) handle <- curl::new_handle(url = url) curl::handle_setheaders(handle, .list = headers) curl::handle_setopt(handle, customrequest = "POST", postfieldsize = length(data), postfields = data, .list = options) if (!is.null(data_form)) { curl::handle_setform(handle, .list = data_form) } list(handle = handle, options = options) }, file ) } http_post <- mark_as_async(http_post) http_delete <- function(url, headers = character(), file = NULL, options = list()) { url; headers; options; make_deferred_http( function() { assert_that(is_string(url)) handle <- curl::new_handle(url = url) curl::handle_setheaders(handle, .list = headers) curl::handle_setopt(handle, customrequest = "DELETE", .list = options) list(handle = handle, options = options) }, file ) } http_delete <- mark_as_async(http_delete) #' @importFrom utils modifyList get_default_curl_options <- function(options) { getopt <- function(nm) { if (!is.null(v <- options[[nm]])) return(v) anm <- paste0("async_http_", nm) if (!is.null(v <- getOption(anm))) return(v) if (!is.na(v <- Sys.getenv(toupper(anm), NA_character_))) return (v) } modifyList( options, drop_nulls(list( timeout = as.integer(getopt("timeout") %||% 0), connecttimeout = as.integer(getopt("connecttimeout") %||% 300), low_speed_time = as.integer(getopt("low_speed_time") %||% 0), low_speed_limit = as.integer(getopt("low_speed_limit") %||% 0), cainfo = getopt("cainfo") )) ) } http_events <- R6Class( "http_events", inherit = event_emitter, public = list( listen_on = function(event, callback) { private$check(event) super$listen_on(event, callback) }, listen_off = function(event, callback) { private$check(event) super$listen_off(event, callback) } ), private = list( check = function(event) { stopifnot(event %in% c("data", "end")) } ) ) make_deferred_http <- function(cb, file) { cb; file id <- NULL ee <- http_events$new() deferred$new( type = "http", call = sys.call(), action = function(resolve, progress) { resolve; progress ## This is a temporary hack until we have proper pollables ## Then the deferred will have a "work" callback, which will ## be able to throw. reject <- environment(resolve)$private$reject ho <- cb() id <<- get_default_event_loop()$add_http( ho$handle, function(err, res) if (is.null(err)) resolve(res) else reject(err), progress, file, data = c(ho$options, list(event_emitter = ee)) ) }, on_cancel = function(reason) { if (!is.null(id)) get_default_event_loop()$cancel(id) }, event_emitter = ee ) } #' Throw R errors for HTTP errors #' #' Status codes below 400 are considered successful, others will trigger #' errors. Note that this is different from the `httr` package, which #' considers the 3xx status code errors as well. #' #' @param resp HTTP response from [http_get()], [http_head()], etc. #' @return The HTTP response invisibly, if it is considered successful. #' Otherwise an error is thrown. #' #' @noRd #' @examples #' \donttest{ #' afun <- async(function() { #' http_get("https://eu.httpbin.org/status/404")$ #' then(http_stop_for_status) #' }) #' #' tryCatch(synchronise(afun()), error = function(e) e) #' } http_stop_for_status <- function(resp) { if (!is.integer(resp$status_code)) stop("Not an HTTP response") if (resp$status_code < 400) return(invisible(resp)) stop(http_error(resp)) } http_error <- function(resp, call = sys.call(-1)) { status <- resp$status_code reason <- http_status(status)$reason message <- sprintf("%s (HTTP %d).", reason, status) status_type <- (status %/% 100) * 100 if (length(resp[["content"]]) == 0 && !is.null(resp$file) && file.exists(resp$file)) { tryCatch({ n <- file.info(resp$file, extra_cols = FALSE)$size resp$content <- readBin(resp$file, what = raw(), n = n) }, error = identity) } http_class <- paste0("async_http_", unique(c(status, status_type, "error"))) structure( list(message = message, call = call, response = resp), class = c(http_class, "error", "condition") ) } http_status <- function(status) { status_desc <- http_statuses[as.character(status)] if (is.na(status_desc)) { stop("Unknown http status code: ", status, call. = FALSE) } status_types <- c("Information", "Success", "Redirection", "Client error", "Server error") status_type <- status_types[[status %/% 100]] # create the final information message message <- paste(status_type, ": (", status, ") ", status_desc, sep = "") list( category = status_type, reason = status_desc, message = message ) } http_statuses <- c( "100" = "Continue", "101" = "Switching Protocols", "102" = "Processing (WebDAV; RFC 2518)", "200" = "OK", "201" = "Created", "202" = "Accepted", "203" = "Non-Authoritative Information", "204" = "No Content", "205" = "Reset Content", "206" = "Partial Content", "207" = "Multi-Status (WebDAV; RFC 4918)", "208" = "Already Reported (WebDAV; RFC 5842)", "226" = "IM Used (RFC 3229)", "300" = "Multiple Choices", "301" = "Moved Permanently", "302" = "Found", "303" = "See Other", "304" = "Not Modified", "305" = "Use Proxy", "306" = "Switch Proxy", "307" = "Temporary Redirect", "308" = "Permanent Redirect (experimental Internet-Draft)", "400" = "Bad Request", "401" = "Unauthorized", "402" = "Payment Required", "403" = "Forbidden", "404" = "Not Found", "405" = "Method Not Allowed", "406" = "Not Acceptable", "407" = "Proxy Authentication Required", "408" = "Request Timeout", "409" = "Conflict", "410" = "Gone", "411" = "Length Required", "412" = "Precondition Failed", "413" = "Request Entity Too Large", "414" = "Request-URI Too Long", "415" = "Unsupported Media Type", "416" = "Requested Range Not Satisfiable", "417" = "Expectation Failed", "418" = "I'm a teapot (RFC 2324)", "420" = "Enhance Your Calm (Twitter)", "422" = "Unprocessable Entity (WebDAV; RFC 4918)", "423" = "Locked (WebDAV; RFC 4918)", "424" = "Failed Dependency (WebDAV; RFC 4918)", "424" = "Method Failure (WebDAV)", "425" = "Unordered Collection (Internet draft)", "426" = "Upgrade Required (RFC 2817)", "428" = "Precondition Required (RFC 6585)", "429" = "Too Many Requests (RFC 6585)", "431" = "Request Header Fields Too Large (RFC 6585)", "444" = "No Response (Nginx)", "449" = "Retry With (Microsoft)", "450" = "Blocked by Windows Parental Controls (Microsoft)", "451" = "Unavailable For Legal Reasons (Internet draft)", "499" = "Client Closed Request (Nginx)", "500" = "Internal Server Error", "501" = "Not Implemented", "502" = "Bad Gateway", "503" = "Service Unavailable", "504" = "Gateway Timeout", "505" = "HTTP Version Not Supported", "506" = "Variant Also Negotiates (RFC 2295)", "507" = "Insufficient Storage (WebDAV; RFC 4918)", "508" = "Loop Detected (WebDAV; RFC 5842)", "509" = "Bandwidth Limit Exceeded (Apache bw/limited extension)", "510" = "Not Extended (RFC 2774)", "511" = "Network Authentication Required (RFC 6585)", "598" = "Network read timeout error (Unknown)", "599" = "Network connect timeout error (Unknown)" ) #' Set curl HTTP options in an event loop #' #' The event loop must be already running. In other words, you can only #' call this function from async functions. #' #' The default values are set when the first deferred HTTP operation of the #' event loop is created, and they are taken from the `async_http_total_con`, #' `async_http_host_con` and `async_http_multiplex` options. #' #' @param total_con,host_con,multiplex They are passed to #' [curl::multi_set()]. If an argument is `NULL` (the default) then it is #' ignored. #' @noRd #' @family asyncronous HTTP calls http_setopt <- function(total_con = NULL, host_con = NULL, multiplex = NULL) { get_default_event_loop()$http_setopt(total_con, host_con, multiplex) invisible() } #' Apply an asynchronous function to each element of a vector #' #' @param .x A list or atomic vector. #' @param .f Asynchronous function to apply. #' @param ... Additional arguments to `.f`. #' @param .args More additional arguments to `.f`. #' @param .limit Number of elements to process simulateneously. #' @return Deferred value that is resolved after all deferred values #' from the application of `.f` are resolved. #' #' @family async iterators #' @noRd #' @examples #' synchronise(async_map( #' seq(10, 100, by = 10) / 100, #' function(wait) delay(wait)$then(function() "OK") #' )) async_map <- function(.x, .f, ..., .args = list(), .limit = Inf) { if (.limit < length(.x)) { async_map_limit(.x, .f, ..., .args = .args, .limit = .limit) } else { defs <- do.call(lapply, c(list(.x, async(.f), ...), .args)) when_all(.list = defs) } } async_map <- mark_as_async(async_map) async_map_limit <- function(.x, .f, ..., .args = list(), .limit = Inf) { len <- length(.x) nx <- len .f <- async(.f) args <- c(list(...), .args) nextone <- .limit + 1L firsts <- lapply_args(.x[seq_len(.limit)], .f, .args = args) result <- structure( vector(mode = "list", length = len), names = names(.x) ) self <- deferred$new( type = "async_map (limit)", call = sys.call(), action = function(resolve) { self; nx; firsts lapply(seq_along(firsts), function(idx) { firsts[[idx]]$then(function(val) list(idx, val))$then(self) }) if (nx == 0) resolve(result) }, parent_resolve = function(value, resolve) { self; nx; nextone; result; .f nx <<- nx - 1L result[ value[[1]] ] <<- value[2] if (nx == 0) { resolve(result) } else if (nextone <= len) { idx <- nextone dx <- do.call(".f", c(list(.x[[nextone]]), args)) dx$then(function(val) list(idx, val))$then(self) nextone <<- nextone + 1L } } ) self } ## nocov start .onLoad <- function(libname, pkgname) { if (Sys.getenv("DEBUGME") != "" && requireNamespace("debugme", quietly = TRUE)) { debugme::debugme() } } ## nocov end #' Asynchronous external process execution #' #' Start an external process in the background, and report its completion #' via a deferred. #' #' @inheritParams processx::run #' @param error_on_status Whether to reject the referred value if the #' program exits with a non-zero status. #' @return Deferred object. #' #' @family asynchronous external processes #' @noRd #' @examples #' \dontrun{ #' afun <- function() { #' run_process("ls", "-l")$ #' then(function(x) strsplit(x$stdout, "\r?\n")[[1]]) #' } #' synchronise(afun()) #' } run_process <- function(command = NULL, args = character(), error_on_status = TRUE, wd = NULL, env = NULL, windows_verbatim_args = FALSE, windows_hide_window = FALSE, encoding = "", ...) { command; args; error_on_status; wd; env; windows_verbatim_args; windows_hide_window; encoding; list(...) id <- NULL deferred$new( type = "process", call = sys.call(), action = function(resolve) { resolve reject <- environment(resolve)$private$reject stdout <- tempfile() stderr <- tempfile() px <- processx::process$new(command, args = args, stdout = stdout, stderr = stderr, poll_connection = TRUE, env = env, cleanup = TRUE, cleanup_tree = TRUE, wd = wd, encoding = encoding, ...) pipe <- px$get_poll_connection() id <<- get_default_event_loop()$add_process( list(pipe), function(err, res) if (is.null(err)) resolve(res) else reject(err), list(process = px, stdout = stdout, stderr = stderr, error_on_status = error_on_status, encoding = encoding)) }, on_cancel = function(reason) { if (!is.null(id)) get_default_event_loop()$cancel(id) } ) } run_process <- mark_as_async(run_process) #' Asynchronous call to an R function, in a background R process #' #' Start a background R process and evaluate a function call in it. #' It uses [callr::r_process] internally. #' #' @inheritParams callr::r_bg #' @noRd #' #' @examples #' \dontrun{ #' afun <- function() { #' run_r_process(function() Sys.getpid()) #' } #' synchronise(afun()) #' } run_r_process <- function(func, args = list(), libpath = .libPaths(), repos = c(getOption("repos"), c(CRAN = "https://cloud.r-project.org")), cmdargs = c("--no-site-file", "--slave", "--no-save", "--no-restore"), system_profile = FALSE, user_profile = FALSE, env = callr::rcmd_safe_env()) { func; args; libpath; repos; cmdargs; system_profile; user_profile; env id <- NULL deferred$new( type = "r-process", call = sys.calls(), action = function(resolve) { resolve reject <- environment(resolve)$private$reject stdout <- tempfile() stderr <- tempfile() opts <- callr::r_process_options( func = func, args = args, libpath = libpath, repos = repos, cmdargs = cmdargs, system_profile = system_profile, user_profile = user_profile, env = env, stdout = stdout, stderr = stderr, extra = list(cleanup_tree = TRUE)) rx <- callr::r_process$new(opts) pipe <- rx$get_poll_connection() id <<- get_default_event_loop()$add_r_process( list(pipe), function(err, res) if (is.null(err)) resolve(res) else reject(err), list(process = rx, stdout = stdout, stderr = stderr, error_on_status = TRUE, encoding = "")) }, on_cancel = function(reason) { if (!is.null(id)) get_default_event_loop()$cancel(id) } ) } run_r_process <- mark_as_async(run_r_process) #' A deferred value that resolves when the specified number of deferred #' values resolve, or is rejected when one of them is rejected #' #' These functions are similar to [when_some()] and [when_any()], but they #' do not ignore errors. If a deferred is rejected, then `async_race_some()` and #' `async_race()` are rejected as well. #' #' `async_race()` is a special case of `count = `: it resolves or is rejected #' as soon as one deferred resolves or is rejected. #' #' async has auto-cancellation, so if the required number of deferred values #' are resolved, or any deferred value is rejected, the rest are cancelled. #' #' @param count Number of deferred values that need to resolve. #' @param ... Deferred values. #' @param .list More deferred values. #' @return A deferred value, that is conditioned on all deferred values #' in `...` and `.list`. #' #' @noRd async_race_some <- function(count, ..., .list = list()) { when_some_internal(count, ..., .list = .list, .race = TRUE) } async_race_some <- mark_as_async(async_race_some) #' @noRd #' @rdname async_race_some async_race <- function(..., .list = list()) { when_some_internal(1L, ..., .list = .list, .race = TRUE)$ then(function(x) x[[1]]) } async_race <- mark_as_async(async_race) #' Make an asynchronous function that always succeeds #' #' This is sometimes useful, if the function is applied to entries in #' a vector or list. #' #' @param task Function to transform. #' @return Async function returning a deferred value that is never #' rejected. Instead its value is a list with entries `error` and #' `result`. If the original deferred was resolved, then `error` is #' `NULL`. If the original deferred was rejected, then `result` is #' `NULL`. #' #' @family async control flow #' @noRd #' @examples #' badfun <- async(function() stop("oh no!")) #' safefun <- async_reflect(badfun) #' synchronise(when_all(safefun(), "good")) async_reflect <- function(task) { task <- async(task) function(...) { task(...)$ then(function(value) list(error = NULL, result = value))$ catch(error = function(reason) list(error = reason, result = NULL)) } } async_reflect <- mark_as_async(async_reflect) #' Replicate an async function a number of times #' #' Similar to [base::replicate()], with some differences: #' * it takes an async function, instead of an expression, and #' * it always returns a list. #' #' @param n Number of replications. #' @param task Async function to call. #' @param ... Additional arguments to `task`. #' @param .limit Number of concurrent async processes to create. #' @return Resolves to a list of the results of the `n` `task` calls. #' #' @noRd #' @examples #' \donttest{ #' ## perform an HTTP request three times, and list the reponse times #' do <- function() { #' async_replicate(3, #' function() http_get("https://eu.httpbin.org")$then(function(x) x$times)) #' } #' synchronise(do()) #' } async_replicate <- function(n, task, ..., .limit = Inf) { assert_that( is_count(n), .limit == Inf || is_count(.limit), .limit >= 1L) force(list(...)) task <- async(task) if (n == 0) { async_constant(list()) } else if (n <= .limit) { async_replicate_nolimit(n, task, ...) } else { async_replicate_limit(n, task, ..., .limit = .limit) } } async_replicate_nolimit <- function(n, task, ...) { defs <- lapply(seq_len(n), function(i) task(...)) when_all(.list = defs) } async_replicate_limit <- function(n, task, ..., .limit = .limit) { n; .limit defs <- nextone <- result <- NULL self <- deferred$new( type = "async_replicate", call = sys.call(), action = function(resolve) { defs <<- lapply(seq_len(n), function(i) task(...)) result <<- vector(n, mode = "list") lapply(seq_len(.limit), function(idx) { defs[[idx]]$then(function(val) list(idx, val))$then(self) }) nextone <<- .limit + 1L }, parent_resolve = function(value, resolve) { result[ value[[1]] ] <<- value[2] if (nextone > n) { resolve(result) } else { idx <- nextone defs[[nextone]]$then(function(val) list(idx, val))$then(self) nextone <<- nextone + 1L } } ) self } #' Retry an asynchronous function a number of times #' #' Keeps trying until the function's deferred value resolves without #' error, or `times` tries have been performed. #' #' @param task An asynchronous function. #' @param times Number of tries. #' @param ... Arguments to pass to `task`. #' @return Deferred value for the operation with retries. #' #' @family async control flow #' @noRd #' @examples #' \donttest{ #' ## Try a download at most 5 times #' afun <- async(function() { #' async_retry( #' function() http_get("https://eu.httpbin.org"), #' times = 5 #' )$then(function(x) x$status_code) #' }) #' #' synchronise(afun()) #' } async_retry <- function(task, times, ...) { task <- async(task) times <- times force(list(...)) self <- deferred$new( type = "retry", call = sys.call(), parents = list(task(...)), parent_reject = function(value, resolve) { times <<- times - 1L if (times > 0) { task(...)$then(self) } else { stop(value) } } ) } async_retry <- mark_as_async(async_retry) #' Make an asynchronous funcion retryable #' #' @param task An asynchronous function. #' @param times Number of tries. #' @return Asynchronous retryable function. #' #' @family async control flow #' @noRd #' @examples #' \donttest{ #' ## Create a downloader that retries five times #' http_get_5 <- async_retryable(http_get, times = 5) #' ret <- synchronise( #' http_get_5("https://eu.httpbin.org/get?q=1")$ #' then(function(x) rawToChar(x$content)) #' ) #' cat(ret) #' } async_retryable <- function(task, times) { task <- async(task) force(times) function(...) { async_retry(task, times, ...) } } #' Compose asynchronous functions #' #' This is equivalent to using the `$then()` method of a deferred, but #' it is easier to use programmatically. #' #' @param ... Asynchronous functions to compose. #' @param .list Mose asynchronous functions to compose. #' @return Asynchronous function, the composition of all input functions. #' They are performed left to right, the ones in `.list` are the last #' ones. #' #' @family async control flow #' @noRd #' @examples #' \donttest{ #' check_url <- async_sequence( #' http_head, function(x) identical(x$status_code, 200L)) #' synchronise(check_url("https://eu.httpbin.org/status/404")) #' synchronise(check_url("https://eu.httpbin.org/status/200")) #' } async_sequence <- function(..., .list = NULL) { funcs <- c(list(...), .list) if (length(funcs) == 0) stop("Function list empty in `async_sequence`") function(...) { dx <- async(funcs[[1]])(...) for (i in seq_along(funcs)[-1]) dx <- dx$then(funcs[[i]]) dx } } async_sequence <- mark_as_async(async_sequence) #' @noRd #' @rdname async_every async_some <- function(.x, .p, ...) { defs <- lapply(.x, async(.p), ...) nx <- length(defs) done <- FALSE deferred$new( type = "async_some", call = sys.call(), parents = defs, action = function(resolve) if (nx == 0) resolve(FALSE), parent_resolve = function(value, resolve) { if (!done && isTRUE(value)) { done <<- TRUE resolve(TRUE) } else if (!done) { nx <<- nx - 1L if (nx == 0) resolve(FALSE) } } ) } async_some <- mark_as_async(async_some) #' Synchronously wrap asynchronous code #' #' Evaluate an expression in an async phase. It creates an event loop, #' then evaluates the supplied expression. If its result is a deferred #' value, it keeps running the event loop, until the deferred value is #' resolved, and returns its resolved value. #' #' If an error is not handled in the async phase, `synchronise()` will #' re-throw that error. #' #' `synchronise()` cancels all async processes on interrupt or external #' error. #' #' @param expr Async function call expression. If it does not evaluate #' to a deferred value, then it is just returned. #' #' @noRd #' @examples #' \donttest{ #' http_status <- function(url, ...) { #' http_get(url, ...)$ #' then(function(x) x$status_code) #' } #' #' synchronise(http_status("https://eu.httpbin.org/status/418")) #' } synchronise <- function(expr) { new_el <- push_event_loop() on.exit({ new_el$cancel_all(); pop_event_loop() }, add = TRUE) ## Mark this frame as a synchronization point, for debugging `__async_synchronise_frame__` <- TRUE ## This is to allow `expr` to contain `async_list()` etc ## calls that look for the top promise. Without this there ## is no top promise. This is a temporary top promise that ## is never started. res <- async_constant(NULL) res <- expr if (!is_deferred(res)) return(res) ## We need an extra final promise that cannot be replaced, ## so priv stays the same. res <- res$then(function(x) x) priv <- get_private(res) if (! identical(priv$event_loop, new_el)) { err <- make_error( "Cannot create deferred chain across synchronization barrier", class = "async_synchronization_barrier_error") stop(err) } priv$null() priv$run_action() if (isTRUE(getOption("async_debug"))) start_browser() while (priv$state == "pending") new_el$run("once") if (priv$state == "fulfilled") priv$value else stop(priv$value) } start_browser <- function() { async_debug_shortcuts() on.exit(async_debug_remove_shortcuts(), add = TRUE) cat("This is a standard `browser()` call, but you can also use the\n") cat("following extra commands:\n") cat("- .an / async_next(): next event loop iteration.\n") cat("- .as / async_step(): next event loop, debug next action or parent callback.\n") cat("- .asb / async_step_back(): stop debugging of callbacks.\n") cat("- .al / async_list(): deferred values in the current async phase.\n") cat("- .at / async_tree(): DAG of the deferred values.\n") cat("- .aw / async_where(): print call stack, mark async callback.\n") cat("- async_wait_for(): run until deferred is resolved.\n") cat("- async_debug(): debug action and/or parent callbacks of deferred.\n") cat("\n") browser(skipCalls = 1) } #' Run event loop to completion #' #' Creates a new event loop, evaluates `expr` in it, and then runs the #' event loop to completion. It stops when the event loop does not have #' any tasks. #' #' The expression typically creates event loop tasks. It should not create #' deferred values, though, because those will never be evaluated. #' #' Unhandled errors propagate to the `run_event_loop()` call, which fails. #' #' In case of an (unhandled) error, all event loop tasks will be cancelled. #' #' @param expr Expression to run after creating a new event loop. #' @return `NULL`, always. If the event loop is to return some value, #' you can use lexical scoping, see the example below. #' #' @noRd #' @examples #' counter <- 0L #' do <- function() { #' callback <- function() { #' counter <<- counter + 1L #' if (runif(1) < 1/10) t$cancel() #' } #' t <- async_timer$new(1/1000, callback) #' } #' run_event_loop(do()) #' counter run_event_loop <- function(expr) { new_el <- push_event_loop() on.exit({ new_el$cancel_all(); pop_event_loop() }, add = TRUE) ## Mark this frame as a synchronization point, for debugging `__async_synchronise_frame__` <- TRUE expr new_el$run() invisible() } distill_error <- function(err) { if (is.null(err$aframe)) return(err) err$aframe <- list( frame = err$aframe$frame, deferred = err$aframe$data[[1]], type = err$aframe$data[[2]], call = get_private(err$aframe$data[[3]])$mycall ) err } # nocov start #' @noRd print.async_rejected <- function(x, ...) { cat(format(x, ...)) invisible(x) } # nocov end #' @noRd format.async_rejected <- function(x, ...) { x <- distill_error(x) src <- get_source_position(x$aframe$call) paste0( "" ) } #' @noRd summary.async_rejected <- function(object, ...) { x <- distill_error(object) fmt_out <- format(object, ...) stack <- async_where(calls = x$calls, parents = x$parents, frm = list(x$aframe)) stack_out <- format(stack) structure( paste0(fmt_out, "\n\n", stack_out), class = "async_rejected_summary") } # nocov start #' @noRd print.async_rejected_summary <- function(x, ...) { cat(x) invisible(x) } # nocov end #' Asynchronous function call with a timeout #' #' If the deferred value is not resolved before the timeout expires, #' `async_timeout()` throws an `async_timeout` error. #' #' @param task Asynchronous function. #' @param timeout Timeout as a `difftime` object, or number of seconds. #' @param ... Additional arguments to `task`. #' @return A deferred value. An `async_timeout` error is thrown if it is #' not resolved within the specified timeout. #' #' @family async utilities #' @noRd #' @examples #' ## You can catch the error, asynchronously #' synchronise( #' async_timeout(function() delay(1/10)$then(function() "OK"), 1/1000)$ #' catch(async_timeout = function(e) "Timed out", #' error = function(e) "Other error") #' ) #' #' ## Or synchronously #' tryCatch( #' synchronise( #' async_timeout(function() delay(1/10)$then(function() "OK"), 1/1000) #' ), #' async_timeout = function(e) "Timed out. :(", #' error = function(e) paste("Other error:", e$message) #' ) async_timeout <- function(task, timeout, ...) { task <- async(task) force(timeout) list(...) done <- FALSE self <- deferred$new( type = "timeout", call = sys.call(), action = function(resolve) { task(...)$then(function(x) list("ok", x))$then(self) delay(timeout)$then(function() list("timeout"))$then(self) }, parent_resolve = function(value, resolve) { if (!done) { done <<- TRUE if (value[[1]] == "ok") { resolve(value[[2]]) } else { cnd <- structure( list(message = "Aync operation timed out"), class = c("async_timeout", "error", "condition") ) stop(cnd) } } } ) } async_timeout <- mark_as_async(async_timeout) #' Repeated timer #' #' The supplied callback function will be called by the event loop #' every `delay` seconds. #' #' @section Usage: #' ``` #' t <- async_timer$new(delay, callback) #' t$cancel() #' ``` #' #' @section Arguments: #' * `delay`: Time interval in seconds, the amount of time to delay #' to delay the execution. It can be a fraction of a second. #' * `callback`: Callback function without arguments. It will be called #' from the event loop every `delay` seconds. #' #' @section Details: #' #' An `async_timer` is an `[event_emitter]` object with a `timeout` event. #' It is possible to add multiple listeners to this event, once the timer #' is created. Note, however, that removing all listeners does not cancel #' the timer, `timeout` events will be still emitted as usual. #' For proper cancellation you'll need to call the `cancel()` method. #' #' It is only possible to create `async_timer` objects in an asynchronous #' context, i.e. within a `synchronise()` or `run_event_loop()` call. #' A `synchronise()` call finishes as soon as its returned deferred value #' is resolved (or rejected), even if some timers are still active. The #' active timers will be automatically cancelled in this case. #' #' @section Errors: #' Errors are handled the same way as for generic event emitters. I.e. to #' catch errors thrown in the `callback` function, you need to add a #' listener to the `error` event, see the example below. #' #' @section Congestion: #' `async_timer` is _not_ a real-time timer. In particular, if `callback` #' does not return in time, before the next timer event, then all future #' timer events will be delayed. Even if `callback` returns promptly, the #' event loop might be busy with other events, and then the next timer #' event is not emitted in time. In general there is no guarantee about #' the timing of the timer events. #' #' @importFrom R6 R6Class #' @noRd #' @examples #' ## Call 10 times a second, cancel with 1/10 probability #' counter <- 0L #' do <- function() { #' cb <- function() { #' cat("called\n") #' counter <<- counter + 1L #' if (runif(1) < 0.1) t$cancel() #' } #' t <- async_timer$new(1/10, cb) #' } #' #' run_event_loop(do()) #' counter #' #' ## Error handling #' counter <- 0L #' do <- function() { #' cb <- function() { #' cat("called\n") #' counter <<- counter + 1L #' if (counter == 2L) stop("foobar") #' if (counter == 3L) t$cancel() #' } #' t <- async_timer$new(1/10, cb) #' handler <- function(err) { #' cat("Got error:", sQuote(conditionMessage(err)), ", handled\n") #' } #' t$listen_on("error", handler) #' } #' #' run_event_loop(do()) #' counter #' #' ## Error handling at the synchonization point #' counter <- 0L #' do <- function() { #' cb <- function() { #' cat("called\n") #' counter <<- counter + 1L #' if (counter == 2L) stop("foobar") #' if (counter == 3L) t$cancel() #' } #' t <- async_timer$new(1/10, cb) #' } #' #' tryCatch(run_event_loop(do()), error = function(x) x) #' counter async_timer <- R6Class( "async_timer", inherit = event_emitter, public = list( initialize = function(delay, callback) async_timer_init(self, private, super, delay, callback), cancel = function() async_timer_cancel(self, private) ), private = list( id = NULL ) ) async_timer_init <- function(self, private, super, delay, callback) { assert_that( is_time_interval(delay), is.function(callback) && length(formals(callback)) == 0) ## event emitter super$initialize() private$id <- get_default_event_loop()$add_delayed( delay, function() self$emit("timeout"), function(err, res) { if (!is.null(err)) self$emit("error", err) # nocov }, rep = TRUE) self$listen_on("timeout", callback) invisible(self) } async_timer_cancel <- function(self, private) { self; private self$remove_all_listeners("timeout") get_default_event_loop()$cancel(private$id) invisible(self) } #' It runs each task in series but stops whenever any of the functions were #' successful. If one of the tasks were successful, the callback will be #' passed the result of the successful task. If all tasks fail, the #' callback will be passed the error and result (if any) of the final #' attempt. #' @param ... Deferred values to run in series. #' @param .list More deferred values to run, `.list` is easier to use #' programmatically. #' @return Resolves to the result of the first successful deferred. #' Otherwise throws an error. The error objects of all failed deferreds #' will be in the `errors` member of the error object. #' #' @family async control flow #' @noRd #' @examples #' do <- function() { #' async_try_each( #' async(function() stop("doh"))(), #' async(function() "cool")(), #' async(function() stop("doh2"))(), #' async(function() "cool2")() #' ) #' } #' synchronise(do()) async_try_each <- function(..., .list = list()) { defs <- c(list(...), .list) wh <- nx <- NULL errors <- list() self <- deferred$new( type = "async_try_each", call = sys.call(), action = function(resolve) { nx <<- length(defs) if (nx == 0) resolve(NULL) wh <<- 1L defs[[wh]]$then(self) }, parent_resolve = function(value, resolve) { resolve(value) }, parent_reject = function(value, resolve) { errors <<- c(errors, list(value)) if (wh == nx) { err <- structure( list(errors = errors, message = "async_try_each failed"), class = c("async_rejected", "error", "condition")) stop(err) } else { wh <<- wh + 1 defs[[wh]]$then(self) } } ) self } async_try_each <- mark_as_async(async_try_each) #' Repeatedly call task until it its test function returns `TRUE` #' #' @param test Synchronous test function. #' @param task Asynchronous function to call repeatedly. #' @param ... Arguments to pass to `task`. #' @return Deferred value, that is resolved when the iteration is done. #' #' @family async control flow #' @noRd #' @examples #' ## Keep calling until it "returns" a number less than < 0.1 #' calls <- 0 #' number <- Inf #' synchronise(async_until( #' function() number < 0.1, #' function() { #' calls <<- calls + 1 #' number <<- runif(1) #' } #' )) #' calls async_until <- function(test, task, ...) { force(test) task <- async(task) self <- deferred$new( type = "async_until", call = sys.call(), parents = list(task(...)), parent_resolve = function(value, resolve) { if (test()) { resolve(value) } else { task(...)$then(self) } } ) self } async_until <- mark_as_async(async_until) `%||%` <- function(l, r) if (is.null(l)) r else l vlapply <- function(X, FUN, ..., FUN.VALUE = logical(1)) { vapply(X, FUN, FUN.VALUE = FUN.VALUE, ...) } viapply <- function(X, FUN, ..., FUN.VALUE = integer(1)) { vapply(X, FUN, FUN.VALUE = FUN.VALUE, ...) } vcapply <- function(X, FUN, ..., FUN.VALUE = character(1)) { vapply(X, FUN, FUN.VALUE = FUN.VALUE, ...) } make_error <- function(message, class = "simpleError", call = NULL) { class <- c(class, "error", "condition") structure( list(message = as.character(message), call = call), class = class ) } num_args <- function(fun) { length(formals(fun)) } get_private <- function(x) { x$.__enclos_env__$private } #' Call `func` and then call `callback` with the result #' #' `callback` will be called with two arguments, the first one will the #' error object if `func()` threw an error, or `NULL` otherwise. The second #' argument is `NULL` on error, and the result of `func()` otherwise. #' #' @param func Function to call. #' @param callback Callback to call with the result of `func()`, #' or the error thrown. #' @param info Extra info to add to the error object. Must be a named list. #' #' @noRd #' @keywords internal call_with_callback <- function(func, callback, info = NULL) { recerror <- NULL result <- NULL tryCatch( withCallingHandlers( result <- func(), error = function(e) { recerror <<- e recerror$aframe <<- recerror$aframe %||% find_async_data_frame() recerror$calls <<- recerror$calls %||% sys.calls() if (is.null(recerror[["call"]])) recerror[["call"]] <<- sys.call() recerror$parents <<- recerror$parents %||% sys.parents() recerror[names(info)] <<- info handler <- getOption("async.error") if (is.function(handler)) handler() } ), error = identity ) callback(recerror, result) } get_id <- local({ id <- 0L function() { id <<- id + 1L id } }) new_event_loop_id <- local({ id <- 0L function() { id <<- id + 1L id } }) lapply_args <- function(X, FUN, ..., .args = list()) { do.call("lapply", c(list(X = X, FUN = FUN), list(...), .args)) } drop_nulls <- function(x) { x[!vlapply(x, is.null)] } #' @importFrom utils getSrcDirectory getSrcFilename getSrcLocation get_source_position <- function(call) { list( filename = file.path( c(getSrcDirectory(call), "?")[1], c(getSrcFilename(call), "?")[1]), position = paste0( getSrcLocation(call, "line", TRUE) %||% "?", ":", getSrcLocation(call, "column", TRUE) %||% "?") ) } file_size <- function(...) { file.info(..., extra_cols = FALSE)$size } read_all <- function(filename, encoding) { if (is.null(filename)) return(NULL) r <- readBin(filename, what = raw(0), n = file_size(filename)) s <- rawToChar(r) Encoding(s) <- encoding s } crash <- function () { get("attach")(structure(list(), class = "UserDefinedDatabase")) } str_trim <- function(x) { sub("\\s+$", "", sub("^\\s+", "", x)) } expr_name <- function(expr) { if (is.null(expr)) { return("NULL") } if (is.symbol(expr)) { return(as.character(expr)) } if (is.call(expr)) { cl <- as.list(expr)[[1]] if (is.symbol(cl)) { return(as.character(cl)) } else { return(paste0(format(cl), collapse = "")) } } if (is.atomic(expr) && length(expr) == 1) { return(as.character(expr)) } gsub("\n.*$", "...", as.character(expr)) } get_uuid <- function() { async_env$pid <- async_env$pid %||% Sys.getpid() async_env$counter <- async_env$counter %||% 0 async_env$counter <- async_env$counter + 1L paste0(async_env$pid, "-", async_env$counter) } #' Deferred value for a set of deferred values #' #' Create a deferred value that is resolved when all listed deferred values #' are resolved. Note that the error of an input deferred value #' triggers the error `when_all` as well. #' #' async has auto-cancellation, so if one deferred value errors, the rest #' of them will be automatically cancelled. #' #' @param ... Deferred values. #' @param .list More deferred values. #' @return A deferred value, that is conditioned on all deferred values #' in `...` and `.list`. #' #' @seealso [when_any()], [when_some()] #' @noRd #' @examples #' \donttest{ #' ## Check that the contents of two URLs are the same #' afun <- async(function() { #' u1 <- http_get("https://eu.httpbin.org") #' u2 <- http_get("https://eu.httpbin.org/get") #' when_all(u1, u2)$ #' then(function(x) identical(x[[1]]$content, x[[2]]$content)) #' }) #' synchronise(afun()) #' } when_all <- function(..., .list = list()) { defs <- c(list(...), .list) nx <- 0L self <- deferred$new( type = "when_all", call = sys.call(), action = function(resolve) { self; nx; defs lapply(seq_along(defs), function(idx) { idx if (is_deferred(defs[[idx]])) { nx <<- nx + 1L defs[[idx]]$then(function(val) list(idx, val))$then(self) } }) if (nx == 0) resolve(defs) }, parent_resolve = function(value, resolve) { defs[ value[[1]] ] <<- value[2] nx <<- nx - 1L if (nx == 0L) resolve(defs) } ) } when_all <- mark_as_async(when_all) #' Resolve a deferred as soon as some deferred from a list resolve #' #' `when_some` creates a deferred value that is resolved as soon as the #' specified number of deferred values resolve. #' #' `when_any` is a special case for a single. #' #' If the specified number of deferred values cannot be resolved, then #' `when_any` throws an error. #' #' async has auto-cancellation, so if the required number of deferred values #' are resolved, or too many of them throw error, the rest of the are #' cancelled. #' #' If `when_any` throws an error, then all the underlying error objects #' are returned in the `errors` member of the error object thrown by #' `when_any`. #' #' @param count Number of deferred values that need to resolve. #' @param ... Deferred values. #' @param .list More deferred values. #' @return A deferred value, that is conditioned on all deferred values #' in `...` and `.list`. #' #' @seealso [when_all()] #' @noRd #' @examples #' \donttest{ #' ## Use the URL that returns first #' afun <- function() { #' u1 <- http_get("https://eu.httpbin.org") #' u2 <- http_get("https://eu.httpbin.org/get") #' when_any(u1, u2)$then(function(x) x$url) #' } #' synchronise(afun()) #' } when_some <- function(count, ..., .list = list()) { when_some_internal(count, ..., .list = .list, .race = FALSE) } when_some <- mark_as_async(when_some) when_some_internal <- function(count, ..., .list, .race) { force(count) force(.race) defs <- c(list(...), .list) num_defs <- length(defs) num_failed <- 0L ifdef <- vlapply(defs, is_deferred) resolved <- defs[!ifdef] errors <- list() cancel_all <- function() lapply(defs[ifdef], function(x) x$cancel()) deferred$new( type = "when_some", call = sys.call(), parents = defs[ifdef], action = function(resolve) { if (num_defs < count) { stop("Cannot resolve enough deferred values") } else if (length(resolved) >= count) { resolve(resolved[seq_len(count)]) } }, parent_resolve = function(value, resolve) { resolved <<- c(resolved, list(value)) if (length(resolved) == count) { resolve(resolved) } }, parent_reject = function(value, resolve) { if (.race) { stop(value) } num_failed <<- num_failed + 1L errors <<- c(errors, list(value)) if (num_failed + count == num_defs + 1L) { err <- structure( list(errors = errors, message = "when_some / when_any failed"), class = c("async_rejected", "error", "condition")) stop(err) } } ) } #' @noRd #' @rdname when_some when_any <- function(..., .list = list()) { when_some(1, ..., .list = .list)$then(function(x) x[[1]]) } when_any <- mark_as_async(when_any) #' Repeatedly call task, while test returns true #' #' @param test Synchronous test function. #' @param task Asynchronous function to call repeatedly. #' @param ... Arguments to pass to `task`. #' @return Deferred value, that is resolved when the iteration is done. #' #' @family async control flow #' @noRd #' @examples #' ## Keep calling while result is bigger than 0.1 #' calls <- 0 #' number <- Inf #' synchronise(async_whilst( #' function() number >= 0.1, #' function() { #' calls <<- calls + 1 #' number <<- runif(1) #' } #' )) #' calls async_whilst <- function(test, task, ...) { force(test) task <- async(task) self <- deferred$new( type = "async_whilst", call = sys.call(), action = function(resolve) { if (!test()) { resolve(NULL) } else { task(...)$then(self) } }, parent_resolve = function(value, resolve) { if (!test()) { resolve(value) } else { task(...)$then(self) } } ) self } async_whilst <- mark_as_async(async_whilst) #' Worker pool #' #' The worker pool functions are independent of the event loop, to allow #' independent testing. #' #' @family worker pool functions #' @name worker_pool #' @noRd #' @keywords internal #' @importFrom R6 R6Class NULL worker_pool <- R6Class( public = list( initialize = function() wp_init(self, private), add_task = function(func, args, id, event_loop) wp_add_task(self, private, func, args, id, event_loop), get_fds = function() wp_get_fds(self, private), get_pids = function() wp_get_pids(self, private), get_poll_connections = function() wp_get_poll_connections(self, private), notify_event = function(pids, event_loop) wp_notify_event(self, private, pids, event_loop), start_workers = function() wp_start_workers(self, private), kill_workers = function() wp_kill_workers(self, private), cancel_task = function(id) wp_cancel_task(self, private, id), cancel_all_tasks = function() wp_cancel_all_tasks(self, private), get_result = function(id) wp_get_result(self, private, id), list_workers = function() wp_list_workers(self, private), list_tasks = function(event_loop = NULL, status = NULL) wp_list_tasks(self, private, event_loop, status) ), private = list( workers = list(), tasks = list(), finalize = function() self$kill_workers(), try_start = function() wp__try_start(self, private), interrupt_worker = function(pid) wp__interrupt_worker(self, private, pid) ) ) wp_init <- function(self, private) { self$start_workers() invisible(self) } wp_start_workers <- function(self, private) { num <- worker_pool_size() ## See if we need to start more if (NROW(private$workers) >= num) return(invisible()) ## Yeah, start some more to_start <- num - NROW(private$workers) sess <- lapply(1:to_start, function(x) callr::r_session$new(wait = FALSE)) fd <- viapply(sess, function(x) processx::conn_get_fileno(x$get_poll_connection())) new_workers <- data.frame( stringsAsFactors = FALSE, session = I(sess), task = NA_character_, pid = viapply(sess, function(x) x$get_pid()), fd = fd, event_loop = NA_integer_ ) private$workers <- rbind(private$workers, new_workers) invisible() } wp_add_task <- function(self, private, func, args, id, event_loop) { private$tasks <- rbind( private$tasks, data.frame( stringsAsFactors = FALSE, event_loop = event_loop, id = id, func = I(list(func)), args = I(list(args)), status = "waiting", result = I(list(NULL))) ) private$try_start() invisible() } ## We only need to poll the sessions that actually do something... wp_get_fds <- function(self, private) { sts <- vcapply(private$workers$session, function(x) x$get_state()) private$workers$fd[sts %in% c("starting", "busy")] } wp_get_pids <- function(self, private) { sts <- vcapply(private$workers$session, function(x) x$get_state()) private$workers$pid[sts %in% c("starting", "busy")] } wp_get_poll_connections <- function(self, private) { sts <- vcapply(private$workers$session, function(x) x$get_state()) busy <- sts %in% c("starting", "busy") structure( lapply(private$workers$session[busy], function(x) x$get_poll_connection()), names = private$workers$pid[busy]) } wp_notify_event <- function(self, private, pids, event_loop) { done <- NULL dead <- integer() which <- match(pids, private$workers$pid) for (w in which) { msg <- private$workers$session[[w]]$read() if (is.null(msg)) next if (msg$code == 200 || (msg$code >= 500 && msg$code < 600)) { if (msg$code >= 500 && msg$code < 600) dead <- c(dead, w) wt <- match(private$workers$task[[w]], private$tasks$id) if (is.na(wt)) stop("Internal error, no such task") private$tasks$result[[wt]] <- msg private$tasks$status[[wt]] <- "done" private$workers$task[[w]] <- NA_character_ done <- c(done, private$tasks$id[[wt]]) } } if (length(dead)) { private$workers <- private$workers[-dead,] self$start_workers() } private$try_start() done } worker_pool_size <- function() { getOption("async.worker_pool_size") %||% as.integer(Sys.getenv("ASYNC_WORKER_POOL_SIZE", 4)) } wp_kill_workers <- function(self, private) { lapply(private$workers$session, function(x) x$kill()) private$workers <- NULL invisible() } wp_cancel_task <- function(self, private, id) { wt <- match(id, private$tasks$id) if (is.na(wt)) stop("Unknown task") if (private$tasks$status[[wt]] == "running") { wk <- match(id, private$workers$task) if (!is.na(wk)) private$interrupt_worker(private$workers$pid[wk]) } private$tasks <- private$tasks[-wt, ] invisible() } wp_cancel_all_tasks <- function(self, private) { stop("`cancel_all_tasks` method is not implemented yet") } wp_get_result <- function(self, private, id) { wt <- match(id, private$tasks$id) if (is.na(wt)) stop("Unknown task") if (private$tasks$status[[wt]] != "done") stop("Task not done yet") result <- private$tasks$result[[wt]] private$tasks <- private$tasks[-wt, ] result } wp_list_workers <- function(self, private) { private$workers[, setdiff(colnames(private$workers), "session")] } wp_list_tasks <- function(self, private, event_loop, status) { dont_show <- c("func", "args", "result") ret <- private$tasks if (!is.null(event_loop)) ret <- ret[ret$event_loop %in% event_loop, ] if (!is.null(status)) ret <- ret[ret$status %in% status, ] ret[, setdiff(colnames(private$tasks), dont_show)] } ## Internals ------------------------------------------------------------- #' @importFrom utils head wp__try_start <- function(self, private) { sts <- vcapply(private$workers$session, function(x) x$get_state()) if (all(sts != "idle")) return() can_work <- sts == "idle" can_run <- private$tasks$status == "waiting" num_start <- min(sum(can_work), sum(can_run)) will_run <- head(which(can_run), num_start) will_work <- head(which(can_work), num_start) for (i in seq_along(will_run)) { wt <- will_run[[i]] ww <- will_work[[i]] func <- private$tasks$func[[wt]] args <- private$tasks$args[[wt]] private$workers$session[[ww]]$call(func, args) private$tasks$status[[wt]] <- "running" private$workers$task[[ww]] <- private$tasks$id[[wt]] } invisible() } #' Interrupt a worker process #' #' We need to make sure that the worker is in a usable state after this. #' #' For speed, we try to interrupt with a SIGINT first, and if that does #' not work, then we kill the worker and start a new one. #' #' When we interrupt with a SIGINT a number of things can happen: #' 1. we successfully interrupted a computation, then #' we'll just poll_io(), and read() and we'll get back an #' interrupt error. #' 2. The computation has finished, so we did not interrupt it. #' In this case the background R process will apply the interrupt #' to the next computation (at least on Unix) so the bg process #' needs to run a quick harmless call to absorb the interrupt. #' We can use `Sys.sleep()` for this, and `write_input()` directly #' for speed and simplicity. #' 3. The process has crashed already, in this case `interrupt()` will #' return `FALSE`. `poll_io()` will return with "ready" immediately, #' `read()` will return with an error, and `write_input()` throws #' an error. #' 4. We could not interrupt the process, because it was in a #' non-interruptable state. In this case we kill it, and start a #' new process. `poll_io()` will not return with "ready" in this case. #' #' @param self self #' @param private private self #' @param pid pid of process #' @noRd wp__interrupt_worker <- function(self, private, pid) { ww <- match(pid, private$workers$pid) if (is.na(ww)) stop("Unknown task in interrupt_worker() method") kill <- FALSE sess <- private$workers$session[[ww]] int <- sess$interrupt() pr <- sess$poll_io(100)["process"] if (pr == "ready") { msg <- sess$read() if (! inherits(msg, "interrupt")) { tryCatch({ sess$write_input("base::Sys.sleep(0)\n") sess$read_output() sess$read_error() }, error = function(e) kill <<- TRUE) } private$workers$task[[ww]] <- NA_character_ } else { kill <- TRUE } if (kill) { sess$close() private$workers <- private$workers[-ww, ] ## Make sure that we have enough workers running self$start_workers() } invisible() } #' External process via a process generator #' #' Wrap any [processx::process] object into a deferred value. The #' process is created by a generator function. #' #' @param process_generator Function that returns a [processx::process] #' object. See details below about the current requirements for the #' returned process. #' @param error_on_status Whether to fail if the process terminates #' with a non-zero exit status. #' @param ... Extra arguments, passed to `process_generator`. #' @return Deferred object. #' #' Current requirements for `process_generator`: #' * It must take a `...` argument, and pass it to #' `processx::process$new()`. #' * It must use the `poll_connection = TRUE` argument. #' These requirements might be relaxed in the future. #' #' If you want to obtain the standard output and/or error of the #' process, then `process_generator` must redirect them to files. #' If you want to discard them, `process_generator` can set them to #' `NULL`. #' #' `process_generator` should not use pipes (`"|"`) for the standard #' output or error, because the process will stop running if the #' pipe buffer gets full. We currently never read out the pipe buffer. #' #' @noRd #' @examples #' \dontrun{ #' lsgen <- function(dir = ".", ...) { #' processx::process$new( #' "ls", #' dir, #' poll_connection = TRUE, #' stdout = tempfile(), #' stderr = tempfile(), #' ... #' ) #' } #' afun <- function() { #' external_process(lsgen) #' } #' synchronise(afun()) #' } external_process <- function(process_generator, error_on_status = TRUE, ...) { process_generator; error_on_status; args <- list(...) args$encoding <- args$encoding %||% "" args$cleanup_tree <- args$cleanup_tree %||% TRUE id <- NULL deferred$new( type = "external_process", call = sys.call(), action = function(resolve) { resolve reject <- environment(resolve)$private$reject px <- do.call(process_generator, args) stdout <- px$get_output_file() stderr <- px$get_error_file() pipe <- px$get_poll_connection() id <<- get_default_event_loop()$add_process( list(pipe), function(err, res) if (is.null(err)) resolve(res) else reject(err), list(process = px, stdout = stdout, stderr = stderr, error_on_status = error_on_status, encoding = args$encoding) ) }, on_cancel = function(reason) { if (!is.null(id)) get_default_event_loop()$cancel(id) } ) } # nocov end rhub/R/onload.R0000644000176200001440000000007514604762156013027 0ustar liggesusers.onLoad <- function(libname, pkgname) { err$onload_hook() }rhub/R/rhub-app.R0000644000176200001440000000021714604753117013264 0ustar liggesusers# nocov start rhub_app <- function() { app <- webfakes::new_app() app$use("json body parser" = webfakes::mw_json()) app } # nocov end rhub/R/rhubv2.R0000644000176200001440000000075214603437121012753 0ustar liggesusers#' @title The rhub package #' #' @description Tools for R package developers #' #' @details #' ```{r man-readme, child = "README.Rmd"} #' ``` #' #' @keywords internal #' @name rhub-package #' @rdname rhub-package #' @aliases rhub NULL #' @title R-hub v2 #' @description Start here to learn about R-hub v2, especially if you #' used the previous version of R-hub before. #' #' @details #' ```{r include = FALSE, child = "vignettes/rhubv2.Rmd"} #' ``` #' @name rhubv2 #' @rdname rhubv2 NULLrhub/R/errors.R0000644000176200001440000010622314604762771013074 0ustar liggesusers # nocov start # # Standalone file for better error handling ---------------------------- # # If can allow package dependencies, then you are probably better off # using rlang's functions for errors. # # The canonical location of this file is in the processx package: # https://github.com/r-lib/processx/blob/main/R/errors.R # # ## Dependencies # - rstudio-detect.R for better printing in RStudio # # ## Features # # - Throw conditions and errors with the same API. # - Automatically captures the right calls and adds them to the conditions. # - Sets `.Last.error`, so you can easily inspect the errors, even if they # were not caught. # - It only sets `.Last.error` for the errors that are not caught. # - Hierarchical errors, to allow higher level error messages, that are # more meaningful for the users, while also keeping the lower level # details in the error object. (So in `.Last.error` as well.) # - `.Last.error` always includes a stack trace. (The stack trace is # common for the whole error hierarchy.) The trace is accessible within # the error, e.g. `.Last.error$trace`. The trace of the last error is # also at `.Last.error.trace`. # - Can merge errors and traces across multiple processes. # - Pretty-print errors and traces, if the cli package is loaded. # - Automatically hides uninformative parts of the stack trace when # printing. # # ## API # # ``` # new_cond(..., call. = TRUE, srcref = NULL, domain = NA) # new_error(..., call. = TRUE, srcref = NULL, domain = NA) # throw(cond, parent = NULL, frame = environment()) # throw_error(cond, parent = NULL, frame = environment()) # chain_error(expr, err, call = sys.call(-1)) # chain_call(.NAME, ...) # chain_clean_call(.NAME, ...) # onload_hook() # add_trace_back(cond, frame = NULL) # format$advice(x) # format$call(call) # format$class(x) # format$error(x, trace = FALSE, class = FALSE, advice = !trace, ...) # format$error_heading(x, prefix = NULL) # format$header_line(x, prefix = NULL) # format$srcref(call, srcref = NULL) # format$trace(x, ...) # ``` # # ## Roadmap: # - better printing of anonymous function in the trace # # ## NEWS: # # ### 1.0.0 -- 2019-06-18 # # * First release. # # ### 1.0.1 -- 2019-06-20 # # * Add `rlib_error_always_trace` option to always add a trace # # ### 1.0.2 -- 2019-06-27 # # * Internal change: change topenv of the functions to baseenv() # # ### 1.1.0 -- 2019-10-26 # # * Register print methods via onload_hook() function, call from .onLoad() # * Print the error manually, and the trace in non-interactive sessions # # ### 1.1.1 -- 2019-11-10 # # * Only use `trace` in parent errors if they are `rlib_error`s. # Because e.g. `rlang_error`s also have a trace, with a slightly # different format. # # ### 1.2.0 -- 2019-11-13 # # * Fix the trace if a non-thrown error is re-thrown. # * Provide print_this() and print_parents() to make it easier to define # custom print methods. # * Fix annotating our throw() methods with the incorrect `base::`. # # ### 1.2.1 -- 2020-01-30 # # * Update wording of error printout to be less intimidating, avoid jargon # * Use default printing in interactive mode, so RStudio can detect the # error and highlight it. # * Add the rethrow_call_with_cleanup function, to work with embedded # cleancall. # # ### 1.2.2 -- 2020-11-19 # # * Add the `call` argument to `catch_rethrow()` and `rethrow()`, to be # able to omit calls. # # ### 1.2.3 -- 2021-03-06 # # * Use cli instead of crayon # # ### 1.2.4 -- 2021-04-01 # # * Allow omitting the call with call. = FALSE in `new_cond()`, etc. # # ### 1.3.0 -- 2021-04-19 # # * Avoid embedding calls in trace with embed = FALSE. # # ### 2.0.0 -- 2021-04-19 # # * Versioned classes and print methods # # ### 2.0.1 -- 2021-06-29 # # * Do not convert error messages to native encoding before printing, # to be able to print UTF-8 error messages on Windows. # # ### 2.0.2 -- 2021-09-07 # # * Do not translate error messages, as this converts them to the native # encoding. We keep messages in UTF-8 now. # # ### 3.0.0 -- 2022-04-19 # # * Major rewrite, use rlang compatible error objects. New API. # # ### 3.0.1 -- 2022-06-17 # # * Remove the `rlang_error` and `rlang_trace` classes, because our new # deparsed `call` column in the trace is not compatible with rlang. # # ### 3.0.2 -- 2022-08-01 # # * Use a `procsrcref` column for processed source references. # Otherwise testthat (and probably other rlang based packages), will # pick up the `srcref` column, and they expect an `srcref` object there. # # ### 3.1.0 -- 2022-10-04 # # * Add ANSI hyperlinks to stack traces, if we have a recent enough # cli package that supports this. # # ### 3.1.1 -- 2022-11-17 # # * Use `[[` instead of `$` to fix some partial matches. # * Use fully qualified `base::stop()` to enable overriding `stop()` # in a package. (Makes sense if compat files use `stop()`. # * The `is_interactive()` function is now exported. # # ### 3.1.2 -- 2022-11-18 # # * The `parent` condition can now be an interrupt. err <- local({ # -- dependencies ----------------------------------------------------- rstudio_detect <- rstudio$detect # -- condition constructors ------------------------------------------- #' Create a new condition #' #' @noRd #' @param ... Parts of the error message, they will be converted to #' character and then concatenated, like in [stop()]. #' @param call. A call object to include in the condition, or `TRUE` #' or `NULL`, meaning that [throw()] should add a call object #' automatically. If `FALSE`, then no call is added. #' @param srcref Alternative source reference object to use instead of #' the one of `call.`. #' @param domain Translation domain, see [stop()]. We set this to #' `NA` by default, which means that no translation occurs. This #' has the benefit that the error message is not re-encoded into #' the native locale. #' @return Condition object. Currently a list, but you should not rely #' on that. new_cond <- function(..., call. = TRUE, srcref = NULL, domain = NA) { message <- .makeMessage(..., domain = domain) structure( list(message = message, call = call., srcref = srcref), class = c("condition")) } #' Create a new error condition #' #' It also adds the `rlib_error` class. #' #' @noRd #' @param ... Passed to [new_cond()]. #' @param call. Passed to [new_cond()]. #' @param srcref Passed tp [new_cond()]. #' @param domain Passed to [new_cond()]. #' @return Error condition object with classes `rlib_error`, `error` #' and `condition`. new_error <- function(..., call. = TRUE, srcref = NULL, domain = NA) { cond <- new_cond(..., call. = call., domain = domain, srcref = srcref) class(cond) <- c("rlib_error_3_1", "rlib_error", "error", "condition") cond } # -- throwing conditions ---------------------------------------------- #' Throw a condition #' #' If the condition is an error, it will also call [stop()], after #' signalling the condition first. This means that if the condition is #' caught by an exiting handler, then [stop()] is not called. #' #' @noRd #' @param cond Condition object to throw. If it is an error condition, #' then it calls [stop()]. #' @param parent Parent condition. #' @param frame The throwing context. Can be used to hide frames from #' the backtrace. throw <- throw_error <- function(cond, parent = NULL, frame = environment()) { if (!inherits(cond, "condition")) { cond <- new_error(cond) } if (!is.null(parent) && !inherits(parent, "condition")) { throw(new_error("Parent condition must be a condition object")) } if (isTRUE(cond[["call"]])) { cond[["call"]] <- sys.call(-1) %||% sys.call() } else if (identical(cond[["call"]], FALSE)) { cond[["call"]] <- NULL } cond <- process_call(cond) if (!is.null(parent)) { cond$parent <- process_call(parent) } # We can set an option to always add the trace to the thrown # conditions. This is useful for example in context that always catch # errors, e.g. in testthat tests or knitr. This options is usually not # set and we signal the condition here always_trace <- isTRUE(getOption("rlib_error_always_trace")) .hide_from_trace <- 1L # .error_frame <- cond if (!always_trace) signalCondition(cond) if (is.null(cond$`_pid`)) cond$`_pid` <- Sys.getpid() if (is.null(cond$`_timestamp`)) cond$`_timestamp` <- Sys.time() # If we get here that means that the condition was not caught by # an exiting handler. That means that we need to create a trace. # If there is a hand-constructed trace already in the error object, # then we'll just leave it there. if (is.null(cond$trace)) cond <- add_trace_back(cond, frame = frame) # Set up environment to store .Last.error, it will be just before # baseenv(), so it is almost as if it was in baseenv() itself, like # .Last.value. We save the print methods here as well, and then they # will be found automatically. if (! "org:r-lib" %in% search()) { do.call("attach", list(new.env(), pos = length(search()), name = "org:r-lib")) } env <- as.environment("org:r-lib") env$.Last.error <- cond env$.Last.error.trace <- cond$trace # If we always wanted a trace, then we signal the condition here if (always_trace) signalCondition(cond) # If this is not an error, then we'll just return here. This allows # throwing interrupt conditions for example, with the same UI. if (! inherits(cond, "error")) return(invisible()) .hide_from_trace <- NULL # Top-level handler, this is intended for testing only for now, # and its design might change. if (!is.null(th <- getOption("rlib_error_handler")) && is.function(th)) { return(th(cond)) } # In non-interactive mode, we print the error + the traceback # manually, to make sure that it won't be truncated by R's error # message length limit. out <- format( cond, trace = !is_interactive(), class = FALSE, full = !is_interactive() ) writeLines(out, con = default_output()) # Dropping the classes and adding "duplicate_condition" is a workaround # for the case when we have non-exiting handlers on throw()-n # conditions. These would get the condition twice, because stop() # will also signal it. If we drop the classes, then only handlers # on "condition" objects (i.e. all conditions) get duplicate signals. # This is probably quite rare, but for this rare case they can also # recognize the duplicates from the "duplicate_condition" extra class. class(cond) <- c("duplicate_condition", "condition") # Turn off the regular error printing to avoid printing # the error twice. opts <- options(show.error.messages = FALSE) on.exit(options(opts), add = TRUE) base::stop(cond) } # -- rethrow with parent ----------------------------------------------- #' Re-throw an error with a better error message #' #' Evaluate `expr` and if it errors, then throw a new error `err`, #' with the original error set as its parent. #' #' @noRd #' @param expr Expression to evaluate. #' @param err Error object or message to use for the child error. #' @param call Call to use in the re-thrown error. See [throw()]. chain_error <- function(expr, err, call = sys.call(-1), srcref = NULL) { .hide_from_trace <- 1 force(call) srcref <- srcref %||% utils::getSrcref(sys.call()) withCallingHandlers({ expr }, error = function(e) { .hide_from_trace <- 0:1 e$srcref <- srcref e$procsrcref <- NULL if (!inherits(err, "condition")) { err <- new_error(err, call. = call) } throw_error(err, parent = e) }) } # -- rethrowing conditions from C code --------------------------------- #' Version of .Call that throw()s errors #' #' It re-throws error from compiled code. If the error had class #' `simpleError`, like all errors, thrown via `error()` in C do, it also #' adds the `c_error` class. #' #' @noRd #' @param .NAME Compiled function to call, see [.Call()]. #' @param ... Function arguments, see [.Call()]. #' @return Result of the call. chain_call <- function(.NAME, ...) { .hide_from_trace <- 1:3 # withCallingHandlers + do.call + .handleSimpleError (?) call <- sys.call() call1 <- sys.call(-1) srcref <- utils::getSrcref(call) withCallingHandlers( do.call(".Call", list(.NAME, ...)), error = function(e) { .hide_from_trace <- 0:1 e$srcref <- srcref e$procsrcref <- NULL e[["call"]] <- call name <- native_name(.NAME) err <- new_error("Native call to `", name, "` failed", call. = call1) cerror <- if (inherits(e, "simpleError")) "c_error" class(err) <- c(cerror, "rlib_error_3_1", "rlib_error", "error", "condition") throw_error(err, parent = e) } ) } package_env <- topenv() #' Version of entrace_call that supports cleancall #' #' This function is the same as [entrace_call()], except that it #' uses cleancall's [.Call()] wrapper, to enable resource cleanup. #' See https://github.com/r-lib/cleancall#readme for more about #' resource cleanup. #' #' @noRd #' @param .NAME Compiled function to call, see [.Call()]. #' @param ... Function arguments, see [.Call()]. #' @return Result of the call. chain_clean_call <- function(.NAME, ...) { .hide_from_trace <- 1:3 call <- sys.call() call1 <- sys.call(-1) srcref <- utils::getSrcref(call) withCallingHandlers( package_env$call_with_cleanup(.NAME, ...), error = function(e) { .hide_from_trace <- 0:1 e$srcref <- srcref e$procsrcref <- NULL e[["call"]] <- call name <- native_name(.NAME) err <- new_error("Native call to `", name, "` failed", call. = call1) cerror <- if (inherits(e, "simpleError")) "c_error" class(err) <- c(cerror, "rlib_error_3_1", "rlib_error", "error", "condition") throw_error(err, parent = e) } ) } # -- create traceback ------------------------------------------------- #' Create a traceback #' #' [throw()] calls this function automatically if an error is not caught, #' so there is currently not much use to call it directly. #' #' @param cond Condition to add the trace to #' @param frame Use this context to hide some frames from the traceback. #' #' @return A condition object, with the trace added. add_trace_back <- function(cond, frame = NULL) { idx <- seq_len(sys.parent(1L)) frames <- sys.frames()[idx] # TODO: remove embedded objects from calls calls <- as.list(sys.calls()[idx]) parents <- sys.parents()[idx] namespaces <- unlist(lapply( seq_along(frames), function(i) { if (is_operator(calls[[i]])) { "o" } else { env_label(topenvx(environment(sys.function(i)))) } } )) pids <- rep(cond$`_pid` %||% Sys.getpid(), length(calls)) mch <- match(format(frame), sapply(frames, format)) if (is.na(mch)) { visibles <- TRUE } else { visibles <- c(rep(TRUE, mch), rep(FALSE, length(frames) - mch)) } scopes <- vapply(idx, FUN.VALUE = character(1), function(i) { tryCatch( get_call_scope(calls[[i]], namespaces[[i]]), error = function(e) "" ) }) namespaces <- ifelse(scopes %in% c("::", ":::"), namespaces, NA_character_) funs <- ifelse( is.na(namespaces), ifelse(scopes != "", paste0(scopes, " "), ""), paste0(namespaces, scopes) ) funs <- paste0( funs, vapply(calls, function(x) format_name(x[[1]])[1], character(1)) ) visibles <- visibles & mark_invisible_frames(funs, frames) pcs <- lapply(calls, function(c) process_call(list(call = c))) calls <- lapply(pcs, "[[", "call") srcrefs <- I(lapply(pcs, "[[", "srcref")) procsrcrefs <- I(lapply(pcs, "[[", "procsrcref")) cond$trace <- new_trace( calls, parents, visibles = visibles, namespaces = namespaces, scopes = scopes, srcrefs = srcrefs, procsrcrefs = procsrcrefs, pids ) cond } is_operator <- function(cl) { is.call(cl) && length(cl) >= 1 && is.symbol(cl[[1]]) && grepl("^[^.a-zA-Z]", as.character(cl[[1]])) } mark_invisible_frames <- function(funs, frames) { visibles <- rep(TRUE, length(frames)) hide <- lapply(frames, "[[", ".hide_from_trace") w_hide <- unlist(mapply(seq_along(hide), hide, FUN = function(i, w) { i + w }, SIMPLIFY = FALSE)) w_hide <- w_hide[w_hide <= length(frames)] visibles[w_hide] <- FALSE hide_from <- which(funs %in% names(invisible_frames)) for (start in hide_from) { hide_this <- invisible_frames[[ funs[start] ]] for (i in seq_along(hide_this)) { if (start + i > length(funs)) break if (funs[start + i] != hide_this[i]) break visibles[start + i] <- FALSE } } visibles } invisible_frames <- list( "base::source" = c("base::withVisible", "base::eval", "base::eval"), "base::stop" = "base::.handleSimpleError", "cli::cli_abort" = c( "rlang::abort", "rlang:::signal_abort", "base::signalCondition"), "rlang::abort" = c("rlang:::signal_abort", "base::signalCondition") ) call_name <- function(x) { if (is.call(x)) { if (is.symbol(x[[1]])) { as.character(x[[1]]) } else if (x[[1]][[1]] == quote(`::`) || x[[1]][[1]] == quote(`:::`)) { as.character(x[[1]][[2]]) } else { NULL } } else { NULL } } get_call_scope <- function(call, ns) { if (is.na(ns)) return("global") if (!is.call(call)) return("") if (is.call(call[[1]]) && (call[[1]][[1]] == quote(`::`) || call[[1]][[1]] == quote(`:::`))) return("") if (ns == "base") return("::") if (! ns %in% loadedNamespaces()) return("") name <- call_name(call) nsenv <- asNamespace(ns)$.__NAMESPACE__. if (is.null(nsenv)) return("::") if (is.null(nsenv$exports)) return(":::") if (exists(name, envir = nsenv$exports, inherits = FALSE)) { "::" } else if (exists(name, envir = asNamespace(ns), inherits = FALSE)) { ":::" } else { "local" } } topenvx <- function(x) { topenv(x, matchThisEnv = err_env) } new_trace <- function (calls, parents, visibles, namespaces, scopes, srcrefs, procsrcrefs, pids) { trace <- data.frame( stringsAsFactors = FALSE, parent = parents, visible = visibles, namespace = namespaces, scope = scopes, srcref = srcrefs, procsrcref = procsrcrefs, pid = pids ) trace[["call"]] <- calls class(trace) <- c("rlib_trace_3_0", "rlib_trace", "tbl", "data.frame") trace } env_label <- function(env) { nm <- env_name(env) if (nzchar(nm)) { nm } else { env_address(env) } } env_address <- function(env) { class(env) <- "environment" sub("^.*(0x[0-9a-f]+)>$", "\\1", format(env), perl = TRUE) } env_name <- function(env) { if (identical(env, err_env)) { return(env_name(package_env)) } if (identical(env, globalenv())) { return(NA_character_) } if (identical(env, baseenv())) { return("base") } if (identical(env, emptyenv())) { return("empty") } nm <- environmentName(env) if (isNamespace(env)) { return(nm) } nm } # -- S3 methods ------------------------------------------------------- format_error <- function(x, trace = FALSE, class = FALSE, advice = !trace, full = trace, header = TRUE, ...) { if (has_cli()) { format_error_cli(x, trace, class, advice, full, header, ...) } else { format_error_plain(x, trace, class, advice, full, header, ...) } } print_error <- function(x, trace = TRUE, class = TRUE, advice = !trace, ...) { writeLines(format_error(x, trace, class, advice, ...)) } format_trace <- function(x, ...) { if (has_cli()) { format_trace_cli(x, ...) } else { format_trace_plain(x, ...) } } print_trace <- function(x, ...) { writeLines(format_trace(x, ...)) } cnd_message <- function(cond) { paste(cnd_message_(cond, full = FALSE), collapse = "\n") } cnd_message_ <- function(cond, full = FALSE) { if (has_cli()) { cnd_message_cli(cond, full) } else { cnd_message_plain(cond, full) } } # -- format API ------------------------------------------------------- format_advice <- function(x) { if (has_cli()) { format_advice_cli(x) } else { format_advice_plain(x) } } format_call <- function(call) { if (has_cli()) { format_call_cli(call) } else { format_call_plain(call) } } format_class <- function(x) { if (has_cli()) { format_class_cli(x) } else { format_class_plain(x) } } format_error_heading <- function(x, prefix = NULL) { if (has_cli()) { format_error_heading_cli(x, prefix) } else { format_error_heading_plain(x, prefix) } } format_header_line <- function(x, prefix = NULL) { if (has_cli()) { format_header_line_cli(x, prefix) } else { format_header_line_plain(x, prefix) } } format_srcref <- function(call, srcref = NULL) { if (has_cli()) { format_srcref_cli(call, srcref) } else { format_srcref_plain(call, srcref) } } # -- condition message with cli --------------------------------------- cnd_message_robust <- function(cond) { class(cond) <- setdiff(class(cond), "rlib_error_3_1") conditionMessage(cond) %||% (if (inherits(cond, "interrupt")) "interrupt") %||% "" } cnd_message_cli <- function(cond, full = FALSE) { msg <- cnd_message_robust(cond) exp <- paste0(cli::col_yellow("!"), " ") add_exp <- is.null(names(cond$message)) && substr(cli::ansi_strip(msg[1]), 1, 1) != "!" c( paste0(if (add_exp) exp, msg), if (inherits(cond$parent, "condition")) { msg <- if (full && inherits(cond$parent, "rlib_error_3_1")) { format(cond$parent, trace = FALSE, full = TRUE, class = FALSE, header = FALSE, advice = FALSE ) } else if (inherits(cond$parent, "interrupt")) { "interrupt" } else { conditionMessage(cond$parent) } add_exp <- substr(cli::ansi_strip(msg[1]), 1, 1) != "!" if (add_exp) msg[1] <- paste0(exp, msg[1]) c(format_header_line_cli(cond$parent, prefix = "Caused by error"), msg ) } ) } # -- condition message w/o cli ---------------------------------------- cnd_message_plain <- function(cond, full = FALSE) { exp <- "! " msg <- cnd_message_robust(cond) add_exp <- is.null(names(cond$message)) && substr(msg[1], 1, 1) != "!" c( paste0(if (add_exp) exp, msg), if (inherits(cond$parent, "condition")) { msg <- if (full && inherits(cond$parent, "rlib_error_3_1")) { format(cond$parent, trace = FALSE, full = TRUE, class = FALSE, header = FALSE, advice = FALSE ) } else if (inherits(cond$parent, "interrupt")) { "interrupt" } else { conditionMessage(cond$parent) } add_exp <- substr(msg[1], 1, 1) != "!" if (add_exp) { msg[1] <- paste0(exp, msg[1]) } c(format_header_line_plain(cond$parent, prefix = "Caused by error"), msg ) } ) } # -- printing error with cli ------------------------------------------ # Error parts: # - "Error:" or "Error in " prefix, the latter if the error has a call # - the call, possibly syntax highlightedm possibly trimmed (?) # - source ref, with link to the file, potentially in a new line in cli # - error message, just `conditionMessage()` # - advice about .Last.error and/or .Last.error.trace format_error_cli <- function(x, trace = TRUE, class = TRUE, advice = !trace, full = trace, header = TRUE, ...) { p_class <- if (class) format_class_cli(x) p_header <- if (header) format_header_line_cli(x) p_msg <- cnd_message_cli(x, full) p_advice <- if (advice) format_advice_cli(x) else NULL p_trace <- if (trace && !is.null(x$trace)) { c("---", "Backtrace:", format_trace_cli(x$trace)) } c(p_class, p_header, p_msg, p_advice, p_trace) } format_header_line_cli <- function(x, prefix = NULL) { p_error <- format_error_heading_cli(x, prefix) p_call <- format_call_cli(x[["call"]]) p_srcref <- format_srcref_cli(conditionCall(x), x$procsrcref %||% x$srcref) paste0(p_error, p_call, p_srcref, if (!is.null(conditionCall(x))) ":") } format_class_cli <- function(x) { cls <- unique(setdiff(class(x), "condition")) cls # silence codetools cli::format_inline("{.cls {cls}}") } format_error_heading_cli <- function(x, prefix = NULL) { str_error <- if (is.null(prefix)) { cli::style_bold(cli::col_yellow("Error")) } else { cli::style_bold(paste0(prefix)) } if (is.null(conditionCall(x))) { paste0(str_error, ": ") } else { paste0(str_error, " in ") } } format_call_cli <- function(call) { if (is.null(call)) { NULL } else { cl <- trimws(format(call)) if (length(cl) > 1) cl <- paste0(cl[1], " ", cli::symbol$ellipsis) cli::format_inline("{.code {cl}}") } } format_srcref_cli <- function(call, srcref = NULL) { ref <- get_srcref(call, srcref) if (is.null(ref)) return("") link <- if (ref$file != "") { if (Sys.getenv("R_CLI_HYPERLINK_STYLE") == "iterm") { cli::style_hyperlink( cli::format_inline("{basename(ref$file)}:{ref$line}:{ref$col}"), paste0("file://", ref$file, "#", ref$line, ":", ref$col) ) } else { cli::style_hyperlink( cli::format_inline("{basename(ref$file)}:{ref$line}:{ref$col}"), paste0("file://", ref$file), params = c(line = ref$line, col = ref$col) ) } } else { paste0("line ", ref$line) } cli::col_silver(paste0(" at ", link)) } str_advice <- "Type .Last.error to see the more details." format_advice_cli <- function(x) { cli::col_silver(str_advice) } format_trace_cli <- function(x, ...) { x$num <- seq_len(nrow(x)) scope <- ifelse( is.na(x$namespace), ifelse(x$scope != "", paste0(x$scope, " "), ""), paste0(x$namespace, x$scope) ) visible <- if ("visible" %in% names(x)) { x$visible } else { rep(TRUE, nrow(x)) } srcref <- if ("srcref" %in% names(x) || "procsrcref" %in% names(x)) { vapply( seq_len(nrow(x)), function(i) format_srcref_cli(x[["call"]][[i]], x$procsrcref[[i]] %||% x$srcref[[i]]), character(1) ) } else { unname(vapply(x[["call"]], format_srcref_cli, character(1))) } lines <- paste0( cli::col_silver(format(x$num), ". "), ifelse (visible, "", "| "), scope, vapply(seq_along(x$call), function(i) { format_trace_call_cli(x$call[[i]], x$namespace[[i]]) }, character(1)), srcref ) lines[!visible] <- cli::col_silver(cli::ansi_strip( lines[!visible], link = FALSE )) lines } format_trace_call_cli <- function(call, ns = "") { envir <- tryCatch(asNamespace(ns), error = function(e) .GlobalEnv) cl <- trimws(format(call)) if (length(cl) > 1) { cl <- paste0(cl[1], " ", cli::symbol$ellipsis) } # Older cli does not have 'envir'. if ("envir" %in% names(formals(cli::code_highlight))) { fmc <- cli::code_highlight(cl, envir = envir)[1] } else { fmc <- cli::code_highlight(cl)[1] } cli::ansi_strtrim(fmc, cli::console_width() - 5) } # ---------------------------------------------------------------------- format_error_plain <- function(x, trace = TRUE, class = TRUE, advice = !trace, full = trace, header = TRUE, ...) { p_class <- if (class) format_class_plain(x) p_header <- if (header) format_header_line_plain(x) p_msg <- cnd_message_plain(x, full) p_advice <- if (advice) format_advice_plain(x) else NULL p_trace <- if (trace && !is.null(x$trace)) { c("---", "Backtrace:", format_trace_plain(x$trace)) } c(p_class, p_header, p_msg, p_advice, p_trace) } format_trace_plain <- function(x, ...) { x$num <- seq_len(nrow(x)) scope <- ifelse( is.na(x$namespace), ifelse(x$scope != "", paste0(x$scope, " "), ""), paste0(x$namespace, x$scope) ) visible <- if ("visible" %in% names(x)) { x$visible } else { rep(TRUE, nrow(x)) } srcref <- if ("srcref" %in% names(x) || "procsrfref" %in% names(x)) { vapply( seq_len(nrow(x)), function(i) format_srcref_plain(x[["call"]][[i]], x$procsrcref[[i]] %||% x$srcref[[i]]), character(1) ) } else { unname(vapply(x[["call"]], format_srcref_plain, character(1))) } lines <- paste0( paste0(format(x$num), ". "), ifelse (visible, "", "| "), scope, vapply(x[["call"]], format_trace_call_plain, character(1)), srcref ) lines } format_advice_plain <- function(x, ...) { str_advice } format_header_line_plain <- function(x, prefix = NULL) { p_error <- format_error_heading_plain(x, prefix) p_call <- format_call_plain(x[["call"]]) p_srcref <- format_srcref_plain(conditionCall(x), x$procsrcref %||% x$srcref) paste0(p_error, p_call, p_srcref, if (!is.null(conditionCall(x))) ":") } format_error_heading_plain <- function(x, prefix = NULL) { str_error <- if (is.null(prefix)) "Error" else prefix if (is.null(conditionCall(x))) { paste0(str_error, ": ") } else { paste0(str_error, " in ") } } format_class_plain <- function(x) { cls <- unique(setdiff(class(x), "condition")) paste0("<", paste(cls, collapse = "/"), ">") } format_call_plain <- function(call) { if (is.null(call)) { NULL } else { cl <- trimws(format(call)) if (length(cl) > 1) cl <- paste0(cl[1], " ...") paste0("`", cl, "`") } } format_srcref_plain <- function(call, srcref = NULL) { ref <- get_srcref(call, srcref) if (is.null(ref)) return("") link <- if (ref$file != "") { paste0(basename(ref$file), ":", ref$line, ":", ref$col) } else { paste0("line ", ref$line) } paste0(" at ", link) } format_trace_call_plain <- function(call) { fmc <- trimws(format(call)[1]) if (length(fmc) > 1) { fmc <- paste0(fmc[1], " ...") } strtrim(fmc, getOption("width") - 5) } # -- utilities --------------------------------------------------------- cli_version <- function() { # this loads cli! package_version(asNamespace("cli")[[".__NAMESPACE__."]]$spec[["version"]]) } has_cli <- function() { "cli" %in% loadedNamespaces() && cli_version() >= "3.3.0" } `%||%` <- function(l, r) if (is.null(l)) r else l bytes <- function(x) { nchar(x, type = "bytes") } process_call <- function(cond) { cond[c("call", "srcref", "procsrcref")] <- list( call = if (is.null(cond[["call"]])) { NULL } else if (is.character(cond[["call"]])) { cond[["call"]] } else { deparse(cond[["call"]], nlines = 2) }, srcref = NULL, procsrcref = get_srcref(cond[["call"]], cond$procsrcref %||% cond$srcref) ) cond } get_srcref <- function(call, srcref = NULL) { ref <- srcref %||% utils::getSrcref(call) if (is.null(ref)) return(NULL) if (inherits(ref, "processed_srcref")) return(ref) file <- utils::getSrcFilename(ref, full.names = TRUE)[1] if (is.na(file)) file <- "" line <- utils::getSrcLocation(ref) %||% "" col <- utils::getSrcLocation(ref, which = "column") %||% "" structure( list(file = file, line = line, col = col), class = "processed_srcref" ) } is_interactive <- function() { opt <- getOption("rlib_interactive") if (isTRUE(opt)) { TRUE } else if (identical(opt, FALSE)) { FALSE } else if (tolower(getOption("knitr.in.progress", "false")) == "true") { FALSE } else if (tolower(getOption("rstudio.notebook.executing", "false")) == "true") { FALSE } else if (identical(Sys.getenv("TESTTHAT"), "true")) { FALSE } else { interactive() } } no_sink <- function() { sink.number() == 0 && sink.number("message") == 2 } rstudio_stdout <- function() { rstudio <- rstudio_detect() rstudio$type %in% c( "rstudio_console", "rstudio_console_starting", "rstudio_build_pane", "rstudio_job", "rstudio_render_pane" ) } default_output <- function() { if ((is_interactive() || rstudio_stdout()) && no_sink()) { stdout() } else { stderr() } } onload_hook <- function() { reg_env <- Sys.getenv("R_LIB_ERROR_REGISTER_PRINT_METHODS", "TRUE") if (tolower(reg_env) != "false") { registerS3method("format", "rlib_error_3_1", format_error, baseenv()) registerS3method("format", "rlib_trace_3_0", format_trace, baseenv()) registerS3method("print", "rlib_error_3_1", print_error, baseenv()) registerS3method("print", "rlib_trace_3_0", print_trace, baseenv()) registerS3method("conditionMessage", "rlib_error_3_1", cnd_message, baseenv()) } } native_name <- function(x) { if (inherits(x, "NativeSymbolInfo")) { x$name } else { format(x) } } # There is no format() for 'name' in R 3.6.x and before format_name <- function(x) { if (is.name(x)) { as.character(x) } else { format(x) } } # -- public API -------------------------------------------------------- err_env <- environment() parent.env(err_env) <- baseenv() structure( list( .internal = err_env, new_cond = new_cond, new_error = new_error, throw = throw, throw_error = throw_error, chain_error = chain_error, chain_call = chain_call, chain_clean_call = chain_clean_call, add_trace_back = add_trace_back, process_call = process_call, onload_hook = onload_hook, is_interactive = is_interactive, format = list( advice = format_advice, call = format_call, class = format_class, error = format_error, error_heading = format_error_heading, header_line = format_header_line, srcref = format_srcref, trace = format_trace ) ), class = c("standalone_errors", "standalone")) }) # These are optional, and feel free to remove them if you prefer to # call them through the `err` object. new_cond <- err$new_cond new_error <- err$new_error throw <- err$throw throw_error <- err$throw_error chain_error <- err$chain_error chain_call <- err$chain_call chain_clean_call <- err$chain_clean_call # nocov end rhub/R/a-rstudio-detect.R0000644000176200001440000001073614603437121014723 0ustar liggesusers rstudio <- local({ standalone_env <- environment() parent.env(standalone_env) <- baseenv() # -- Collect data ------------------------------------------------------ data <- NULL get_data <- function() { envs <- c( "R_BROWSER", "R_PDFVIEWER", "RSTUDIO", "RSTUDIO_TERM", "RSTUDIO_CONSOLE_COLOR", "ASCIICAST") d <- list( pid = Sys.getpid(), envs = Sys.getenv(envs), api = tryCatch( asNamespace("rstudioapi")$isAvailable(), error = function(err) FALSE ), tty = isatty(stdin()), gui = .Platform$GUI, args = commandArgs(), search = search() ) d$ver <- if (d$api) asNamespace("rstudioapi")$getVersion() d$desktop <- if (d$api) asNamespace("rstudioapi")$versionInfo()$mode d } # -- Auto-detect environment ------------------------------------------- is_rstudio <- function() { Sys.getenv("RSTUDIO") == "1" } detect <- function(clear_cache = FALSE) { # Cached? if (clear_cache) data <<- list() if (!is.null(data)) return(get_caps(data)) # Otherwise get data new <- get_data() # Cache unless told otherwise cache <- TRUE new$type <- if (new$envs[["RSTUDIO"]] != "1") { # 1. Not RStudio at all "not_rstudio" } else if (new$gui == "RStudio" && new$api) { # 2. RStudio console, properly initialized "rstudio_console" } else if (new$gui == "RStudio" && ! new$api) { # 3. RStudio console, initilizing cache <- FALSE "rstudio_console_starting" } else if (new$tty && new$envs[["ASCIICAST"]] != "true") { # 4. R in the RStudio terminal # This could also be a subprocess of the console or build pane # with a pseudo-terminal. There isn't really a way to rule that # out, without inspecting some process data with ps::ps_*(). # At least we rule out asciicast "rstudio_terminal" } else if (! new$tty && new$envs[["RSTUDIO_TERM"]] == "" && new$envs[["R_BROWSER"]] == "false" && new$envs[["R_PDFVIEWER"]] == "false" && is_build_pane_command(new$args)) { # 5. R in the RStudio build pane # https://github.com/rstudio/rstudio/blob/master/src/cpp/session/ # modules/build/SessionBuild.cpp#L231-L240 "rstudio_build_pane" } else { # Otherwise it is a subprocess of the console, terminal or # build pane, and it is hard to say which, so we do not try. "rstudio_subprocess" } if (cache) data <<- new get_caps(new) } is_build_pane_command <- function(args) { cmd <- gsub("[\"']", "", args[[length(args)]]) rcmd <- sub("[(].*$", "", cmd) rcmd %in% c("devtools::build", "devtools::test", "devtools::check") } # -- Capabilities ------------------------------------------------------ caps <- list() caps$not_rstudio <- function(data) { list( type = "not_rstudio", dynamic_tty = FALSE, ansi_tty = FALSE, ansi_color = FALSE, num_colors = 1L ) } caps$rstudio_console <- function(data) { list( type = "rstudio_console", dynamic_tty = TRUE, ansi_tty = FALSE, ansi_color = data$envs[["RSTUDIO_CONSOLE_COLOR"]] != "", num_colors = as.integer(data$envs[["RSTUDIO_CONSOLE_COLOR"]]) ) } caps$rstudio_console_starting <- function(data) { res <- caps$rstudio_console(data) res$type <- "rstudio_console_starting" res } caps$rstudio_terminal <- function(data) { list( type = "rstudio_terminal", dynamic_tty = TRUE, ansi_tty = TRUE, ansi_color = data$envs[["RSTUDIO_CONSOLE_COLOR"]] != "", num_colors = as.integer(data$envs[["RSTUDIO_CONSOLE_COLOR"]]) ) } caps$rstudio_build_pane <- function(data) { list( type = "rstudio_build_pane", dynamic_tty = TRUE, ansi_tty = FALSE, ansi_color = data$envs[["RSTUDIO_CONSOLE_COLOR"]] != "", num_colors = as.integer(data$envs[["RSTUDIO_CONSOLE_COLOR"]]) ) } caps$rstudio_subprocess <- function(data) { list( type = "rstudio_subprocess", dynamic_tty = FALSE, ansi_tty = FALSE, ansi_color = FALSE, num_colors = 1L ) } get_caps <- function(data, type = data$type) { ret <- caps[[type]](data) ret$version <- data$ver ret } structure( list( .internal = standalone_env, is_rstudio = is_rstudio, detect = detect ), class = c("standalone_rstudio_detect", "standalone") ) }) rhub/R/platforms.R0000644000176200001440000001663114762413400013555 0ustar liggesusers get_platforms <- function() { url_platforms <- Sys.getenv( "RHUB_PLATFORMS_URL", "https://raw.githubusercontent.com/r-hub/actions/v1/setup/platforms.json" ) url_containers <- Sys.getenv( "RHUB_CONTAINERS_URL", "https://r-hub.github.io/containers/manifest.json", ) ret <- synchronise(when_all( async_cached_http_get(url_platforms), async_cached_http_get(url_containers) )) ret } #' List R-hub platforms #' #' @return Data frame with columns: #' * `name`: platform name. Use this in the `platforms` argument of #' [rhub_check()]. #' * `aliases`: alternative platform names. They can also be used in the #' `platforms` argument of [rhub_check()]. #' * `type`: `"os"` or `"container"`. #' * `os_type`: Linux, macOS or Windows currently. #' * `container`: URL of the container image for container platforms. #' * `github_os`: name of the OS on GitHub Actions for non-container #' platforms. #' * `r_version`: R version string. If `"*"` then any supported R version #' can be selected for this platform. #' * `os_name`: name of the operating system, including Linux distribution #' name and version for container actions. #' #' @export rhub_platforms <- function() { ret <- get_platforms() platforms <- jsonlite::fromJSON(ret[[1]]) containers <- jsonlite::fromJSON(ret[[2]], simplifyVector = FALSE)$containers res <- data_frame( name = platforms[["name"]], description = platforms[["description"]] %||% NA_character_, aliases = lapply(zip(platforms[["cran-names"]], platforms[["aliases"]]), unique), type = platforms[["type"]], os_type = platforms[["os-type"]], container = platforms[["container"]], github_os = platforms[["os"]], r_version = platforms[["r-version"]], os_name = NA_character_ ) wcnt <- res$type == "container" cnt_tags <- vcapply(containers, "[[", "tag") res$r_version[wcnt] <- vcapply(res$container[wcnt], function(x) { if (! x %in% cnt_tags) return(NA_character_) sess <- containers[[match(x, cnt_tags)]]$builds[[1]]$`sessionInfo()` strsplit(sess, "\n", fixed = TRUE)[[1]][1] }) res$os_name[wcnt] <- vcapply(res$container[wcnt], function(x) { if (! x %in% cnt_tags) return(NA_character_) osr <- containers[[match(x, cnt_tags)]]$builds[[1]]$`/etc/os-release` osr <- strsplit(osr, "\n", fixed = TRUE)[[1]] pn <- grep("^PRETTY_NAME", osr, value = TRUE)[1] pn <- sub("^PRETTY_NAME=", "", pn) pn <- unquote(pn) pn }) res <- res[order(res$type == "container", res$name), ] res <- add_class(res, "rhub_platforms") res } #' @export format.rhub_platforms <- function(x, ...) { ret <- character() wvms <- which(x$type == "os") wcts <- which(x$type == "container") counter <- 1L grey <- cli::make_ansi_style("gray70", grey = TRUE) if (length(wvms)) { vm <- if (has_emoji()) "\U1F5A5 " else "[VM] " ret <- c(ret, cli::rule("Virtual machines")) for (p in wvms) { note <- if (!is.na(x$description[p])) paste0(", ", x$description[p]) ret <- c( ret, paste0( format(counter, width = 2), " ", vm, " ", cli::style_bold(cli::col_blue(x$name[p])) ), if (x$r_version[p] == "*") { grey(paste0(" All R versions on GitHub Actions ", x$github_os[p], note)) } else { grey(paste0(" ", x$r_version[p], note)) } ) counter <- counter + 1L } } if (length(wcts)) { if (length(ret)) ret <- c(ret, "") ret <- c(ret, cli::rule("Containers")) for (p in wcts) { ct <- if (has_emoji()) "\U1F40B" else "[CT] " rv <- x$r_version[p] os <- x$os_name[p] al <- sort(unique(x$aliases[[p]])) al <- if (length(al)) { grey(paste0(" [", paste(al, collapse = ", "), "]")) } else { "" } ret <- c( ret, paste0( format(counter, width = 2), " ", ct, " ", cli::style_bold(cli::col_blue(x$name[p])), al ), grey(paste0( " ", if (!is.na(rv)) rv, if (!is.na(rv) && !is.na(os)) " on ", if (!is.na(os)) os )), cli::style_italic(grey(paste0(" ", x$container[p]))) ) counter <- counter + 1L } } ret } #' @export print.rhub_platforms <- function(x, ...) { writeLines(cli::ansi_strtrim(format(x, ...))) } #' @export `[.rhub_platforms` <- function(x, i, j, drop = FALSE) { class(x) <- setdiff(class(x), "rhub_platforms") NextMethod("[") } #' @export summary.rhub_platforms <- function(object, ...) { class(object) <- c("rhub_platforms_summary", class(object)) object } #' @export format.rhub_platforms_summary <- function(x, ...) { num <- format(seq_len(nrow(x))) icon <- if (!has_emoji()) { ifelse(x$type == "os", "[VM]", "[CT]") } else { ifelse(x$type == "os", "\U1F5A5", "\U1F40B") # nocov } name <- cli::style_bold(cli::col_blue(x$name)) rv <- abbrev_version(x$r_version) note <- ifelse(is.na(x$description), "", paste0(", ", x$description)) os <- ifelse( is.na(x$os_name), paste0(x$github_os, " on GitHub", note), paste0(x$os_name, note) ) lines <- paste( ansi_align_width(num), ansi_align_width(icon), ansi_align_width(name), ansi_align_width(rv), ansi_align_width(os) ) trimws(lines, which = "right") } #' @export print.rhub_platforms_summary <- function(x, ...) { writeLines(cli::ansi_strtrim(format(x, ...))) } abbrev_version <- function(x) { sel <- grepl("^R Under development", x) x[sel] <- sub("R Under development [(]unstable[)]", "R-devel", x[sel]) sel <- grepl("R version [0-9.]+ Patched", x) x[sel] <- sub("R version ([0-9.]+) Patched", "R-\\1 (patched)", x[sel]) sel <- grepl("R version [0-9.]+", x) x[sel] <- sub("R version ([0-9.]+)", "R-\\1", x[sel]) x[x == "*"] <- "R-* (any version)" x } select_platforms <- function(platforms = NULL) { tryCatch( plat <- rhub_platforms(), error = function(e) { throw(parent = e, pkg_error( "Failed to download the list of R-hub platforms.", i = "Make sure that you are online and Github is also online." )) } ) if (is.null(platforms)) { if (!is_interactive()) { throw(pkg_error( "{.arg platforms} argument is missing for {.fun rhub_check}.", i = "You need to specify {.arg platforms} in non-interactive sessions" )) } cli::cli_text() cli::cli_text( "Available platforms (see {.code rhub::rhub_platforms()} for details):" ) cli::cli_text() cli::cli_verbatim(paste( cli::ansi_strtrim(format(summary(plat))), collapse = "\n" )) pnums <- trimws(readline( prompt = "\nSelection (comma separated numbers, 0 to cancel): " )) if (pnums == "" || pnums == "0") { throw(pkg_error("R-hub check cancelled")) } pnums <- unique(trimws(strsplit(pnums, ",", fixed = TRUE)[[1]])) pnums2 <- suppressWarnings(as.integer(pnums)) bad <- is.na(pnums2) | pnums2 < 1 | pnums2 > nrow(plat) if (any(bad)) { throw(pkg_error( "Invalid platform number{?s}: {.val {pnums[bad]}}." )) } platforms <- plat$name[pnums2] } else { platforms <- unique(platforms) bad <- !platforms %in% unlist(c(plat$name, plat$aliases)) if (any(bad)) { throw(pkg_error( "Unknown platform{?s}: {.val {platforms[bad]}}.", i = "See {.run rhub::rhub_platforms()} for the list of platforms" )) } } platforms } rhub/R/check.R0000644000176200001440000000434514762410770012631 0ustar liggesusers #' Check a package on R-hub #' #' @param gh_url GitHub URL of a package to check, or `NULL` to check #' the package in the current directory. #' @param platforms Platforms to use, a character vector. Use `NULL` to #' select from a list in interactive sessions. See [rhub_platforms()]. #' @param r_versions Which R version(s) to use for the platforms that #' supports multiple R versions. This argument is not implemented yet. #' @param branch Branch to use to run R-hub. Defaults to the current #' branch if `gh_url` is `NULL`. Otherwise defaults to `"main"`. Note that #' this branch also need to include the `rhub.yaml` workflow file. #' @return TODO #' #' @export rhub_check <- function(gh_url = NULL, platforms = NULL, r_versions = NULL, branch = NULL) { assert_that( is_optional_gh_url(gh_url), is_optional_character(platforms), is_optional_string(branch) ) git_root <- if (is.null(gh_url)) setup_find_git_root() gh_url <- gh_url %||% doctor_find_gh_url(repo = git_root) pat <- doctor_find_pat(gh_url) if (is.null(branch)) { if (!is.null(git_root)) { branch <- gert::git_branch(repo = git_root) } else { branch <- "main" } } platforms <- select_platforms(platforms) url <- parse_gh_url(gh_url) ep <- glue::glue("/repos/{url$user}/{url$repo}/actions/workflows/rhub.yaml/dispatches") config <- list(platforms = platforms) name <- paste(platforms, collapse = ", ") id <- random_id() data <- list( ref = branch, inputs = list( config = jsonlite::toJSON(config, auto_unbox = TRUE), name = name, id = id ) ) jsondata <- jsonlite::toJSON(data, auto_unbox = TRUE) resp <- gh_rest_post(url$api, ep, token = pat, data = jsondata) if (resp$status_code != 204) { throw(pkg_error( ":( Failed to start check: {resp$content$message}.", i = "If you think this is a bug in the {.pkg rhub} package, please open an issues at {.url https://github.com/r-hub/rhub/issues}." )) } aurl <- paste0("https://", url$host, "/", url$user, "/", url$repo, "/actions") cli::cli_text() cli::cli_bullets(c( "v" = "Check started: {name} ({id}).", " " = "See {.url {aurl}} for live output!" )) invisible(NULL) } rhub/R/utils.R0000644000176200001440000001046514605453245012714 0ustar liggesusers pkg_error <- function(..., .data = NULL, .class = NULL, .envir = parent.frame(), call. = TRUE) { .hide_from_trace <- TRUE cnd <- new_error( call. = call., cli::format_error( .envir = .envir, c( ... ) ) ) if (length(.data)) cnd[names(.data)] <- .data if (length(class)) class(cnd) <- c(.class, class(cnd)) cnd } stop <- function(..., call. = TRUE, domain = NA) { .hide_from_trace <- TRUE args <- list(...) if (length(args) == 1L && inherits(args[[1L]], "condition")) { throw( add_class(args[[1]], c("rlib_error_3_1", "rlib_error"), "end"), frame = parent.frame() ) } else { throw(new_error(..., call. = call., domain = domain)) } } stopifnot <- function(...) { assert_that(..., env = parent.frame()) } add_class <- function(obj, classes, where = c("start", "end")) { where <- match.arg(where) nc <- c( if (where == "start") classes, class(obj), if (where == "end") classes ) class(obj) <- unique(nc) obj } zip <- function(x, y) { mapply(FUN = c, x, y, SIMPLIFY = FALSE, USE.NAMES = FALSE) } first_char <- function(x) { substr(x, 1, 1) } last_char <- function(x) { substr(x, nchar(x), nchar(x)) } unquote <- function(x) { as.character(ifelse( first_char(x) == last_char(x) & first_char(x) %in% c("'", '"'), substr(x, 2L, nchar(x) - 1L), x )) } has_emoji <- function() { if (!cli::is_utf8_output()) return(FALSE) if (isTRUE(opt <- getOption("pkg.emoji"))) return(TRUE) if (identical(opt, FALSE)) return(FALSE) if (Sys.info()[["sysname"]] != "Darwin") return(FALSE) TRUE } parse_url <- function(url) { re_url <- paste0( "^(?[a-zA-Z0-9]+)://", "(?:(?[^@/:]+)(?::(?[^@/]+))?@)?", "(?[^/]+)", "(?.*)$" # don't worry about query params here... ) mch <- re_match(url, re_url) if (is.na(mch[[1]])) { ssh_re_url <- "^git@(?[^:]+):(?.*)[.]git$" mch <- re_match(url, ssh_re_url) # try without the trailing .git as well if (is.na(mch[[1]])) { ssh_re_url2 <- "^git@(?[^:]+):(?.*)$" mch <- re_match(url, ssh_re_url2) } if (is.na(mch[[1]])) { cli::cli_abort("Invalid URL: {.url {url}}") } # Used for accessing the server's API mch$protocol <- "https" } mch[c("protocol", "host", "path")] } read_file <- function(path) { bin <- readBin(path, "raw", file.size(path)) chr <- rawToChar(bin) if (is.na(iconv(chr, "UTF-8", "UTF-8"))) { throw(pkg_error("{.path {path}} is not UTF-8, giving up. :(")) } Encoding(chr) <- "UTF-8" chr } ansi_align_width <- function(text) { if (length(text) == 0) return(text) width <- max(cli::ansi_nchar(text, type = "width")) cli::ansi_align(text, width = width) } random_id <- function() { r <- paste0(sample(c(letters, LETTERS, 0:9), 20, replace = TRUE), collapse = "") gsub(" ", "-", cli::hash_animal(r, n_adj = 1)$hash) } readline <- function(prompt) { base::readline(prompt) } is_interactive <- function() { opt <- getOption("rlib_interactive") if (isTRUE(opt)) { TRUE } else if (identical(opt, FALSE)) { FALSE } else if (tolower(getOption("knitr.in.progress", "false")) == "true") { FALSE } else if (tolower(getOption("rstudio.notebook.executing", "false")) == "true") { FALSE } else if (identical(Sys.getenv("TESTTHAT"), "true")) { FALSE } else { interactive() } } update <- function (original, new) { if (length(new)) { original[names(new)] <- new } original } get_maintainer_email <- function(path = ".") { path <- normalizePath(path) if (is_dir(path)) { if (!file.exists(file.path(path, "DESCRIPTION"))) { stop("No 'DESCRIPTION' file found") } parse_email(desc::desc_get_maintainer(path)) } else { dir.create(tmp <- tempfile()) files <- utils::untar(path, list = TRUE, tar = "internal") desc <- grep("^[^/]+/DESCRIPTION$", files, value = TRUE) if (length(desc) < 1) stop("No 'DESCRIPTION' file in package") utils::untar(path, desc, exdir = tmp, tar = "internal") parse_email(desc::desc_get_maintainer(file.path(tmp, desc))) } } is_dir <- function(x) { file.info(x)$isdir } #' @importFrom rematch re_match parse_email <- function(x) { unname( re_match(pattern = "<(?[^>]+)>", x)[, "email"] ) } rhub/R/assertions.R0000644000176200001440000000416414604736674013756 0ustar liggesusers is_character <- function(x) { if (!is.character(x)) { structure( FALSE, msg = "{.arg {(.arg)}} must be a character vector without {.code NA}, but it is {.type {x}}.", env = environment() ) } else if (anyNA(x)) { structure( FALSE, msg = "{.arg {(.arg)}} must be a character vector without {.code NA}, but it has {sum(is.na(x))} {.code NA} value{?s}.", env = environment() ) } else { TRUE } } is_optional_character <- function(x) { if (is.null(x) || is_character(x)) return(TRUE) if (!is.character(x)) { structure( FALSE, msg = "{.arg {(.arg)}} must be a character vector without {.code NA}, or NULL, but it is {.type {x}}.", env = environment() ) } else if (anyNA(x)) { structure( FALSE, msg = "{.arg {(.arg)}} must not have {.code NA} values, but it has {sum(is.na(x))} {.code NA} value{?s}.", env = environment() ) } } is_string <- function(x) { if (is.character(x) && length(x) == 1 && !is.na(x)) return(TRUE) if (is.character(x) && length(x) == 1 && is.na(x)) { structure( FALSE, msg = "{.arg {(.arg)}} must not be {.code NA}.", env = environment() ) } else { structure( FALSE, msg = "{.arg {(.arg)}} must be a string (character scalar), but it is {.type {x}}.", env = environment() ) } } is_optional_string <- function(x) { if (is.null(x) || is_string(x)) return(TRUE) structure( FALSE, msg = "{.arg {(.arg)}} must be a string (character scalar) or NULL, but it is {.type {x}}.", env = environment() ) } is_optional_gh_url <- function(x) { if (is.null(x)) return(TRUE) if (!is_string(x)) { structure( FALSE, msg = "{.arg {(.arg)}} must be a character string. You supplied {.type {x}}.", env = environment() ) } else if (!grepl("^https?://", x)) { structure( FALSE, msg = "{.arg {(.arg)}} must be an HTTP or HTTPS URL. You supplied: {.val {x}}.", env = environment() ) } else { TRUE } } rhub/R/compat-vctrs.R0000644000176200001440000003177514603437121014176 0ustar liggesusers # nocov start compat_vctrs <- local({ # Modified from https://github.com/r-lib/rlang/blob/master/R/compat-vctrs.R # Construction ------------------------------------------------------------ # Constructs data frames inheriting from `"tbl"`. This allows the # pillar package to take over printing as soon as it is loaded. # The data frame otherwise behaves like a base data frame. data_frame <- function(...) { new_data_frame(df_list(...), .class = "tbl") } new_data_frame <- function(.x = list(), ..., .size = NULL, .class = NULL) { n_cols <- length(.x) if (n_cols != 0 && is.null(names(.x))) { stop("Columns must be named.", call. = FALSE) } if (is.null(.size)) { if (n_cols == 0) { .size <- 0 } else { .size <- vec_size(.x[[1]]) } } structure( .x, class = c(.class, "data.frame"), row.names = .set_row_names(.size), ... ) } df_list <- function(..., .size = NULL) { vec_recycle_common(list(...), size = .size) } # Binding ----------------------------------------------------------------- vec_rbind <- function(...) { xs <- vec_cast_common(list(...)) do.call(base::rbind, xs) } vec_cbind <- function(...) { xs <- list(...) ptype <- vec_ptype_common(lapply(xs, `[`, 0)) class <- setdiff(class(ptype), "data.frame") xs <- vec_recycle_common(xs) out <- do.call(base::cbind, xs) new_data_frame(out, .class = class) } # Slicing ----------------------------------------------------------------- vec_size <- function(x) { if (is.data.frame(x)) { nrow(x) } else { length(x) } } vec_rep <- function(x, times) { i <- rep.int(seq_len(vec_size(x)), times) vec_slice(x, i) } vec_recycle_common <- function(xs, size = NULL) { sizes <- vapply(xs, vec_size, integer(1)) n <- unique(sizes) if (length(n) == 1 && is.null(size)) { return(xs) } n <- setdiff(n, 1L) ns <- length(n) if (ns == 0) { if (is.null(size)) { return(xs) } } else if (ns == 1) { if (is.null(size)) { size <- n } else if (ns != size) { stop("Inputs can't be recycled to `size`.", call. = FALSE) } } else { stop("Inputs can't be recycled to a common size.", call. = FALSE) } to_recycle <- sizes == 1L xs[to_recycle] <- lapply(xs[to_recycle], vec_rep, size) xs } vec_slice <- function(x, i) { if (is.logical(i)) { i <- which(i) } stopifnot(is.numeric(i) || is.character(i)) if (is.null(x)) { return(NULL) } if (is.data.frame(x)) { # We need to be a bit careful to be generic. First empty all # columns and expand the df to final size. out <- x[i, 0, drop = FALSE] # Then fill in with sliced columns out[seq_along(x)] <- lapply(x, vec_slice, i) # Reset automatic row names to work around `[` weirdness if (is.numeric(attr(x, "row.names"))) { row_names <- .set_row_names(nrow(out)) } else { row_names <- attr(out, "row.names") } return(out) } d <- vec_dims(x) if (d == 1) { if (is.object(x)) { out <- x[i] } else { out <- x[i, drop = FALSE] } } else if (d == 2) { out <- x[i, , drop = FALSE] } else { j <- rep(list(quote(expr = )), d - 1) out <- eval(as.call(list(quote(`[`), quote(x), quote(i), j, drop = FALSE))) } out } vec_dims <- function(x) { d <- dim(x) if (is.null(d)) { 1L } else { length(d) } } vec_as_location <- function(i, n, names = NULL) { out <- seq_len(n) names(out) <- names # Special-case recycling to size 0 if (is_logical(i, n = 1) && !length(out)) { return(out) } unname(out[i]) } vec_init <- function(x, n = 1L) { vec_slice(x, rep_len(NA_integer_, n)) } vec_assign <- function(x, i, value) { if (is.null(x)) { return(NULL) } if (is.logical(i)) { i <- which(i) } stopifnot( is.numeric(i) || is.character(i) ) value <- vec_recycle(value, vec_size(i)) value <- vec_cast(value, to = x) d <- vec_dims(x) if (d == 1) { x[i] <- value } else if (d == 2) { x[i, ] <- value } else { stop("Can't slice-assign arrays.", call. = FALSE) } x } vec_recycle <- function(x, size) { if (is.null(x) || is.null(size)) { return(NULL) } n_x <- vec_size(x) if (n_x == size) { x } else if (size == 0L) { vec_slice(x, 0L) } else if (n_x == 1L) { vec_slice(x, rep(1L, size)) } else { stop("Incompatible lengths: ", n_x, ", ", size, call. = FALSE) } } # Coercion ---------------------------------------------------------------- vec_cast_common <- function(xs, to = NULL) { ptype <- vec_ptype_common(xs, ptype = to) lapply(xs, vec_cast, to = ptype) } vec_cast <- function(x, to) { if (is.null(x)) { return(NULL) } if (is.null(to)) { return(x) } if (vec_is_unspecified(x)) { return(vec_init(to, vec_size(x))) } stop_incompatible_cast <- function(x, to) { stop( sprintf("Can't convert <%s> to <%s>.", .rlang_vctrs_typeof(x), .rlang_vctrs_typeof(to) ), call. = FALSE ) } lgl_cast <- function(x, to) { lgl_cast_from_num <- function(x) { if (any(!x %in% c(0L, 1L))) { stop_incompatible_cast(x, to) } as.logical(x) } switch( .rlang_vctrs_typeof(x), logical = x, integer = , double = lgl_cast_from_num(x), stop_incompatible_cast(x, to) ) } int_cast <- function(x, to) { int_cast_from_dbl <- function(x) { out <- suppressWarnings(as.integer(x)) if (any((out != x) | xor(is.na(x), is.na(out)))) { stop_incompatible_cast(x, to) } else { out } } switch( .rlang_vctrs_typeof(x), logical = as.integer(x), integer = x, double = int_cast_from_dbl(x), stop_incompatible_cast(x, to) ) } dbl_cast <- function(x, to) { switch( .rlang_vctrs_typeof(x), logical = , integer = as.double(x), double = x, stop_incompatible_cast(x, to) ) } chr_cast <- function(x, to) { switch( .rlang_vctrs_typeof(x), character = x, stop_incompatible_cast(x, to) ) } list_cast <- function(x, to) { switch( .rlang_vctrs_typeof(x), list = x, stop_incompatible_cast(x, to) ) } df_cast <- function(x, to) { # Check for extra columns if (length(setdiff(names(x), names(to))) > 0 ) { stop("Can't convert data frame because of missing columns.", call. = FALSE) } # Avoid expensive [.data.frame method out <- as.list(x) # Coerce common columns common <- intersect(names(x), names(to)) out[common] <- Map(vec_cast, out[common], to[common]) # Add new columns from_type <- setdiff(names(to), names(x)) out[from_type] <- lapply(to[from_type], vec_init, n = vec_size(x)) # Ensure columns are ordered according to `to` out <- out[names(to)] new_data_frame(out) } rlib_df_cast <- function(x, to) { new_data_frame(df_cast(x, to), .class = "tbl") } tib_cast <- function(x, to) { new_data_frame(df_cast(x, to), .class = c("tbl_df", "tbl")) } switch( .rlang_vctrs_typeof(to), logical = lgl_cast(x, to), integer = int_cast(x, to), double = dbl_cast(x, to), character = chr_cast(x, to), list = list_cast(x, to), base_data_frame = df_cast(x, to), rlib_data_frame = rlib_df_cast(x, to), tibble = tib_cast(x, to), stop_incompatible_cast(x, to) ) } vec_ptype_common <- function(xs, ptype = NULL) { if (!is.null(ptype)) { return(vec_ptype(ptype)) } xs <- Filter(function(x) !is.null(x), xs) if (length(xs) == 0) { return(NULL) } if (length(xs) == 1) { out <- vec_ptype(xs[[1]]) } else { xs <- map(xs, vec_ptype) out <- Reduce(vec_ptype2, xs) } vec_ptype_finalise(out) } vec_ptype_finalise <- function(x) { if (is.data.frame(x)) { x[] <- lapply(x, vec_ptype_finalise) return(x) } if (inherits(x, "rlang_unspecified")) { logical() } else { x } } vec_ptype <- function(x) { if (vec_is_unspecified(x)) { return(.rlang_vctrs_unspecified()) } if (is.data.frame(x)) { out <- new_data_frame(lapply(x, vec_ptype)) attrib <- attributes(x) attrib$row.names <- attr(out, "row.names") attributes(out) <- attrib return(out) } vec_slice(x, 0) } vec_ptype2 <- function(x, y) { stop_incompatible_type <- function(x, y) { stop( sprintf("Can't combine types <%s> and <%s>.", .rlang_vctrs_typeof(x), .rlang_vctrs_typeof(y)), call. = FALSE ) } x_type <- .rlang_vctrs_typeof(x) y_type <- .rlang_vctrs_typeof(y) if (x_type == "unspecified" && y_type == "unspecified") { return(.rlang_vctrs_unspecified()) } if (x_type == "unspecified") { return(y) } if (y_type == "unspecified") { return(x) } df_ptype2 <- function(x, y) { set_partition <- function(x, y) { list( both = intersect(x, y), only_x = setdiff(x, y), only_y = setdiff(y, x) ) } # Avoid expensive [.data.frame x <- as.list(vec_slice(x, 0)) y <- as.list(vec_slice(y, 0)) # Find column types names <- set_partition(names(x), names(y)) if (length(names$both) > 0) { common_types <- Map(vec_ptype2, x[names$both], y[names$both]) } else { common_types <- list() } only_x_types <- x[names$only_x] only_y_types <- y[names$only_y] # Combine and construct out <- c(common_types, only_x_types, only_y_types) out <- out[c(names(x), names$only_y)] new_data_frame(out) } rlib_df_ptype2 <- function(x, y) { new_data_frame(df_ptype2(x, y), .class = "tbl") } tib_ptype2 <- function(x, y) { new_data_frame(df_ptype2(x, y), .class = c("tbl_df", "tbl")) } ptype <- switch( x_type, logical = switch( y_type, logical = x, integer = y, double = y, stop_incompatible_type(x, y) ), integer = switch( .rlang_vctrs_typeof(y), logical = x, integer = x, double = y, stop_incompatible_type(x, y) ), double = switch( .rlang_vctrs_typeof(y), logical = x, integer = x, double = x, stop_incompatible_type(x, y) ), character = switch( .rlang_vctrs_typeof(y), character = x, stop_incompatible_type(x, y) ), list = switch( .rlang_vctrs_typeof(y), list = x, stop_incompatible_type(x, y) ), base_data_frame = switch( .rlang_vctrs_typeof(y), base_data_frame = , s3_data_frame = df_ptype2(x, y), rlib_data_frame = rlib_df_ptype2(x, y), tibble = tib_ptype2(x, y), stop_incompatible_type(x, y) ), rlib_data_frame = switch( .rlang_vctrs_typeof(y), base_data_frame = , rlib_data_frame = , s3_data_frame = rlib_df_ptype2(x, y), tibble = tib_ptype2(x, y), stop_incompatible_type(x, y) ), tibble = switch( .rlang_vctrs_typeof(y), base_data_frame = , rlib_data_frame = , tibble = , s3_data_frame = tib_ptype2(x, y), stop_incompatible_type(x, y) ), stop_incompatible_type(x, y) ) vec_slice(ptype, 0) } .rlang_vctrs_typeof <- function(x) { if (is.object(x)) { class <- class(x) if (identical(class, "rlang_unspecified")) { return("unspecified") } if (identical(class, "data.frame")) { return("base_data_frame") } if (identical(class, c("tbl", "data.frame"))) { return("rlib_data_frame") } if (identical(class, c("tbl_df", "tbl", "data.frame"))) { return("tibble") } if (inherits(x, "data.frame")) { return("s3_data_frame") } class <- paste0(class, collapse = "/") stop(sprintf("Unimplemented class <%s>.", class), call. = FALSE) } type <- typeof(x) switch( type, NULL = return("null"), logical = if (vec_is_unspecified(x)) { return("unspecified") } else { return(type) }, integer = , double = , character = , raw = , list = return(type) ) stop(sprintf("Unimplemented type <%s>.", type), call. = FALSE) } vec_is_unspecified <- function(x) { !is.object(x) && typeof(x) == "logical" && length(x) && all(vapply(x, identical, logical(1), NA)) } .rlang_vctrs_unspecified <- function(x = NULL) { structure( rep(NA, length(x)), class = "rlang_unspecified" ) } .rlang_vctrs_s3_method <- function(generic, class, env = parent.frame()) { fn <- get(generic, envir = env) ns <- asNamespace(topenv(fn)) tbl <- ns$.__S3MethodsTable__. for (c in class) { name <- paste0(generic, ".", c) if (exists(name, envir = tbl, inherits = FALSE)) { return(get(name, envir = tbl)) } if (exists(name, envir = globalenv(), inherits = FALSE)) { return(get(name, envir = globalenv())) } } NULL } environment() }) data_frame <- compat_vctrs$data_frame as_data_frame <- function(x) { if (is.matrix(x)) { x <- as.data.frame(x, stringsAsFactors = FALSE) } else { x <- compat_vctrs$vec_recycle_common(x) } compat_vctrs$new_data_frame(x, .class = "tbl") } # nocov end rhub/R/rhubv1.R0000644000176200001440000000643014604740074012756 0ustar liggesusers# nocov start deprecated <- function() { message( "This function is deprecated and defunct since rhub v2.\n", "Please see `?rhubv2` on transitioning to the new rhub functions." ) } #' This function is deprecated and defunct. Please see [rhubv2]. #' #' @param ... Deprecated. #' @export get_check <- function(...) { deprecated() } #' This function is deprecated and defunct. Please see [rhubv2]. #' #' @param ... Deprecated. #' @export check_for_cran <- function(...) { deprecated() } #' This function is deprecated and defunct. Please see [rhubv2]. #' #' @param ... Deprecated. #' @export #' @rdname check_shortcuts check_on_linux <- function(...) { deprecated() } #' @export #' @rdname check_shortcuts check_on_windows <- function(...) { deprecated() } #' @export #' @rdname check_shortcuts check_on_macos <- function(...) { deprecated() } #' @export #' @rdname check_shortcuts check_on_debian <- function(...) { deprecated() } #' @export #' @rdname check_shortcuts check_on_ubuntu <- function(...) { deprecated() } #' @export #' @rdname check_shortcuts check_on_fedora <- function(...) { deprecated() } #' @export #' @rdname check_shortcuts check_on_solaris <- function(...) { deprecated() } #' @export #' @rdname check_shortcuts check_on_centos <- function(...) { deprecated() } #' @export #' @rdname check_shortcuts check_with_roldrel <- function(...) { deprecated() } #' @export #' @rdname check_shortcuts check_with_rrelease <- function(...) { deprecated() } #' @export #' @rdname check_shortcuts check_with_rpatched <- function(...) { deprecated() } #' @export #' @rdname check_shortcuts check_with_rdevel <- function(...) { deprecated() } #' @export #' @rdname check_shortcuts check_with_valgrind <- function(...) { deprecated() } #' @export #' @rdname check_shortcuts check_with_sanitizers <- function(...) { deprecated() } #' This function is deprecated and defunct. Please see [rhubv2]. #' #' @param ... Deprecated. #' @export check <- function(...) { deprecated() } #' This function is deprecated and defunct. Please see [rhubv2]. #' #' @param ... Deprecated. #' @export validate_email <- function(...) { deprecated() } #' This function is deprecated and defunct. Please see [rhubv2]. #' #' @param ... Deprecated. #' @export list_validated_emails <- function(...) { deprecated() } #' This function is deprecated and defunct. Please see [rhubv2]. #' #' @param ... Deprecated. #' @export last_check <- function(...) { deprecated() } #' This function is deprecated and defunct. Please see [rhubv2]. #' #' @param ... Deprecated. #' @export list_my_checks <- function(...) { deprecated() } #' This function is deprecated and defunct. Please see [rhubv2]. #' #' @param ... Deprecated. #' @export list_package_checks <- function(...) { deprecated() } #' This function is deprecated and defunct. Please see [rhubv2]. #' #' @param ... Deprecated. #' @export local_check_linux <- function(...) { deprecated() } #' This function is deprecated and defunct. Please see [rhubv2]. #' #' @param ... Deprecated. #' @export local_check_linux_images <- function(...) { deprecated() } #' This function is deprecated and defunct. Please see [rhubv2]. #' #' @param ... Deprecated. #' @export platforms <- function(...) { deprecated() } # nocov end rhub/R/cli.R0000644000176200001440000000035614603437121012312 0ustar liggesusers cli_status <- function(msg, ..., .auto_close = FALSE) { msg cli::cli_status( msg = "{.alert {msg}}", msg_done = "{.alert-success {msg}}", msg_failed = "{.alert-danger {msg}}", .auto_close = .auto_close, ... ) } rhub/R/api.R0000644000176200001440000000520414604751617012323 0ustar liggesusersbaseurl <- function() { Sys.getenv("RHUB_SERVER", "https://builder2.rhub.io/api/-") } default_headers <- c( "accept" = "application/json", "content-type" = "application/json", "user-agent" = "R-hub client" ) #' @importFrom jsonlite toJSON query <- function(endpoint, method = "GET", headers = character(), data = NULL, data_form = NULL, sse = FALSE) { url <- paste0(baseurl(), endpoint) headers <- update(default_headers, headers) response <- if (sse) { query_sse(method, url, headers, data, data_form) } else { query_plain(method, url, headers, data, data_form) } if (response$status_code >= 400) { cnd <- http_error(response) tryCatch({ bdy <- jsonlite::fromJSON( rawToChar(response$content), simplifyVector = FALSE ) }, error = function(err) { stop(cnd) }) if ("message" %in% names(bdy)) { throw(new_error(bdy[["message"]]), parent = cnd) } else { stop(cnd) } } response } query_sse <- function(method, url, headers, data, data_form) { synchronise( query_sse_async(method, url, headers, data, data_form) ) } query_sse_async <- function(method, url, headers, data, data_form) { if (method == "GET") { q <- http_get(url, headers = headers) } else if (method == "POST") { q <- http_post( url, headers = headers, data = data, data_form = data_form ) } else { stop("Unexpected HTTP verb, internal rhub error") } msgs <- list() handle_sse <- function(evt) { msgs <<- c(msgs, list(evt)) if (evt[["event"]] == "progress") { # ignore malformed event messages tryCatch({ msg <- jsonlite::fromJSON(evt[["data"]]) cli::cli_alert(msg, .envir = emptyenv()) }, error = function(e) NULL) } else if (evt[["event"]] == "result") { cli::cli_alert_success("Done.") } else if (evt[["event"]] == "error") { tryCatch({ msg <- jsonlite::fromJSON(evt[["data"]]) cli::cli_alert_danger(msg, .envir = emptyenv()) }, error = function(e) cli::cli_alert_danger("Error from server")) stop("Aborting") } } evs <- sse_events$new(q) evs$listen_on("event", handle_sse) q$then(function(response) { response$sse <- msgs response }) } query_plain <- function(method, url, headers, data, data_form) { response <- if (method == "GET") { synchronise(http_get(url, headers = headers)) } else if (method == "POST") { synchronise(http_post( url, headers = headers, data = data, data_form = data_form )) } else { stop("Unexpected HTTP verb, internal rhub error") } response }rhub/R/rc.R0000644000176200001440000002643414605470156012163 0ustar liggesusers# ========================================================================= # API # ========================================================================= #' Request a new token for submissions to the R Consortium runners #' #' To build and check R packages on the RC runners of R-hub, you'll need #' to verify your email address. R-hub will send a token to your email #' address, and this token will be stored on your computer. #' #' You need to store a token on every computer you want to submit #' jobs from, either using the same token from the email you got, or #' you can request additional tokens for the new machines. Your old token #' will stay valid as well. #' #' If you already have a token from a previous version of R-hub, you can #' reuse that and you don't need to do anything. #' #' Run #' ``` #' rhub:::email_file() #' ``` #' to see the file rhub uses to store your tokens. #' #' @param email Email address to verify We try to detect this, but #' if the detection fails, you can specify it explicitly. #' If this argument is missing (or `NULL`), then you can specify it #' interactively. #' @param token Token to add. If you already received a token in an email #' from R-hub, you can specify that here. #' #' @export #' @family RC runners API rc_new_token <- function(email = NULL, token = NULL) { if (is.null(email) || is.null(token)) { if (!is_interactive()) { throw(pkg_error("No email or no token and not in interactive mode")) } return(rc_new_token_interactive(email, token)) } email_add_token(email, token) cli::cli_alert_success("Added token for {.val {email}}.", wrap = TRUE) cli::cli_alert_info("R-hub tokens are stored at {.path {email_file()}}.") invisible() } # ------------------------------------------------------------------------- #' Show your tokens for the R Consortium runners #' #' Lists all tokens stored on the local machine. #' #' @return Data frame with string columns `email` and `token`. #' @export #' @family RC runners API rc_list_local_tokens <- function() { list_validated_emails2(message = FALSE, msg_if_empty = FALSE) } # ------------------------------------------------------------------------- #' List your repositories created by the R Consortium runners #' #' Lists repositories created by [rc_submit()] submissions. #' #' @param email Email address. We try to detect this, but #' if the detection fails, you can specify it explicitly. #' #' @return Data frame with columns: #' #' * `repo_name`: Name of the repository. #' * `repo_url`: URL of the repository. #' * `builds_url`: URL to the builds of the repository. #' #' Additional columns and customized printing will be probably added #' later to the result. #' #' @export #' @family RC runners API rc_list_repos <- function(email = NULL) { email <- email %||% guess_email(message = TRUE) resp <- query("/repos", headers = get_auth_header(email)) jsonlite::fromJSON(rawToChar(resp$content)) } # ------------------------------------------------------------------------- #' Submit a package to the R Consortium runners #' #' @param path Path to package file or package directory. #' @param platforms Platforms to checks. See [rhub_platforms()] for a #' current list. If not specified, then you can select the platforms #' interactively. Must be specified in non-interactive sessions. #' @param email Email address. You must have a token on the local machhine, #' that corresponds to the email address, see [rc_new_token()]. #' If not specified (or `NULL`) then the email address of the package #' maintainer is used. #' @param confirmation You must set this to `TRUE` to submit a package #' from a non-interactive session. #' @return A list with data about the submission, invisibly. #' Currently it has: #' #' * `result`: Should be the string `"OK"`. #' * `repo_url`: URL to the repository. #' * `actions_url`: URL to the builds inside the repository. #' * `id`: Build id. This is a string with a randomly generated id. #' * `name`: Build name, this is a string, the concatenation of the #' build platforms. #' #' More fields might be added later. #' #' @export #' @family RC runners API #' @seealso [rhub_platforms()] for a list of supported platforms. rc_submit <- function(path = ".", platforms = NULL, email = NULL, confirmation = NULL) { if (!isTRUE(confirmation) && !is_interactive()) { throw(pkg_error( "You need to set {.arg confirmation} to {.val TRUE} to submit packages to R-hub from non-interactive R sessions." )) } tryCatch({ pkg_name <- suppressWarnings(desc::desc_get("Package", file = path)[[1]]) if (is.na(pkg_name)) stop() }, error = function(e) { throw(pkg_error( "Could not query R package name at {.path {path}}.", i = paste( "Make sure that {.arg path} is an R package or a directory", "containing an R package." ) )) }) email <- email %||% get_maintainer_email(path = path) platforms <- select_platforms(platforms) if (is_dir(path)) { path <- pkgbuild::build(path = path) } id <- random_id() ep <- paste0("/job/", pkg_name) form <- list( config = curl::form_data(paste(platforms, collapse = ",")), id = curl::form_data(id), package = curl::form_file(path) ) if (!isTRUE(confirmation)) { cat(cli::col_cyan(cli::rule("Confirmation"))) cli::cli_bullets(c( "!" = "Your package will be publicly readable at {.url https://github.com/r-hub2}.", ">" = "You will need a GitHub account to view the build logs.", ">" = "Only continue if you are fine with this.", ">" = "See the {.fn rhub_setup} function for an alternative way of using R-hub." )) ans <- trimws(readline( prompt = "\nPlease type 'yes' to continue: " )) cli::cli_text() if (ans != 'yes' && ans != "'yes'") { throw(pkg_error("Aborted R-hub submission.")) } } resp <- query( method = "POST", ep, sse = TRUE, data_form = form, headers = c( get_auth_header(email), "content-type" = "multipart/form-data", "accept" = "text/event-stream", "cache-control" = "no-cache", "connection" = "keep-alive" ) ) resevt <- Filter(function(x) x[["event"]] == "result", resp$sse) if (length(resevt) == 0) { stop("Invalid response from R-hub server, please report this.") } retval <- jsonlite::fromJSON( resevt[[1]][["data"]], simplifyVector = FALSE ) invisible(retval) } # ========================================================================= # Internals # ========================================================================= guess_email <- function(path = ".", message = TRUE) { maint <- tryCatch(get_maintainer_email(path), error = function(e) NULL) if (!is.null(maint)) { if (message) { cli::cli_alert_info( wrap = TRUE, "Using maintainer email address {.val {maint}}." ) } return(maint) } guess <- email_address() if (message) { cli::cli_alert_info( wrap = TRUE, "Using email address {.val {guess}}." ) } guess } get_auth_header <- function(email) { valid <- list_validated_emails2(message = FALSE) if (! email %in% valid$email) { throw(pkg_error( "Can't find token for email address {.val {email}}.", i = "Call {.code rhub::rc_new_token()} to get a token." )) } token <- valid$token[match(email, valid$email)] c("Authorization" = paste("Bearer", token)) } #' @importFrom cli symbol #' @importFrom utils menu #' @importFrom whoami email_address get_email_to_validate <- function(path) { ## Find out email first. List currently validated addresses, ## Offer address by whoami::email_address(), and also the ## maintainer address, if any. valid <- list_validated_emails2(msg_if_empty = FALSE) guess <- tryCatch(email_address(), error = function(e) NULL) maint <- tryCatch(get_maintainer_email(path), error = function(e) NULL) choices <- rbind( if (nrow(valid)) cbind(valid = TRUE, valid), if (!is.null(guess) && ! guess %in% valid$email) { data_frame(valid = FALSE, email = guess, token = NA) }, if (!is.null(maint) && ! maint %in% valid$email && maint != guess) { data_frame(valid = FALSE, email = maint, token = NA) }, data_frame(valid = NA, email = "New email address", token = NA) ) ## Only show the menu if there is more than one thing there if (nrow(choices) != 1) { choices_str <- paste( sep = " ", ifelse( choices$valid & !is.na(choices$valid), cli::col_green(cli::symbol$tick), " " ), choices$email ) cat("\n") title <- cli::col_yellow(paste0( cli::symbol$line, cli::symbol$line, " Choose email address to request token for (or 0 to exit)" )) ch <- menu(choices_str, title = title) if (ch == 0) throw(pkg_error("Cancelled requesting new token")) } else { ch <- 1 } ## Get another address if that is selected if (is.na(choices$valid[ch])) { cat("\n") email <- readline("Email address: ") } else { email <- choices$email[ch] } email } list_validated_emails2 <- function(message = is_interactive(), msg_if_empty = TRUE) { file <- email_file() res <- if (file.exists(file)) { if (message) { cli::cli_alert( "R-hub tokens are stored at {.path {email_file()}}." ) } read_token_file(file) } else { data.frame( email = character(), token = character(), stringsAsFactors = FALSE ) } if (is_interactive() && nrow(res) == 0) { if (msg_if_empty) { cli::cli_alert_info("No R-hub tokens found.") } invisible(res) } else { res } } #' @importFrom rappdirs user_data_dir email_file <- function() { rhub_data_dir <- user_data_dir("rhub", "rhub") file.path(rhub_data_dir, "validated_emails.csv") } rc_new_token_interactive <- function(email = NULL, token = NULL, path = ".") { if (is.null(email)) email <- get_email_to_validate(path) ## Token next. For this we need to make an API query. if (is.null(token)) { query( method = "POST", "/user/validate", headers = c("content-type" = "application/x-www-form-urlencoded"), data = jsonlite::toJSON(list(email = jsonlite::unbox(email))) ) cli::cli_alert_info( "Please check your emails for the R-hub access token." ) token <- readline("Token: ") } ## We got everything now rc_new_token(email, token) } #' @importFrom utils read.csv write.table email_add_token <- function(email, token) { file <- email_file() if (!file.exists(file)) { parent <- dirname(file) if (!file.exists(parent)) dir.create(parent, recursive = TRUE) tokens <- data.frame( V1 = character(), V2 = character(), stringsAsFactors = FALSE ) } else { tokens <- read.csv(file, stringsAsFactors = FALSE, header = FALSE) } if (! email %in% tokens[,1]) { tokens <- rbind(tokens, c(email, token)) } else{ tokens[match(email, tokens[,1]), 2] <- token } write_token_file(tokens, file) invisible() } read_token_file <- function(path) { structure( read.csv(path, stringsAsFactors = FALSE, header = FALSE), names = c("email", "token") ) } write_token_file <- function(tokens, path) { write.table( tokens, file = path, sep = ",", col.names = FALSE, row.names = FALSE ) }rhub/R/setup.R0000644000176200001440000001155214605003227012701 0ustar liggesuserscheck_rpkg_root <- function(rpkg_root, git_root) { if (rpkg_root != git_root) { throw(pkg_error( "R-hub currently requires that your R package is at the root of the git repository.", i = "Your R package is at {.path {rpkg_root}}.", i = "Your git repository root is at {.path {git_root}}." )) } } #' Setup the current R package for use with R-hub #' #' It adds or updates the R-hub workflow file to the current package, #' and advises on next steps. #' #' @param overwrite if `TRUE`, [rhub_setup()] will overwrite an already #' existing workflow file. #' @return Nothing. #' #' @export rhub_setup <- function(overwrite = FALSE) { cli::cli_bullets("Setting up R-hub v2.") rpkg_root <- setup_find_r_package() git_root <- setup_find_git_root() check_rpkg_root(rpkg_root, git_root) url <- Sys.getenv( "RHUB_WORKFLOW_URL", "https://raw.githubusercontent.com/r-hub/actions/v1/workflows/rhub.yaml" ) resp <- synchronise(http_get(url)) if (resp$status_code != 200) { throw(pkg_error( "Failed to download R-hub worflow file from GitHub.", i = "URL: {.url {url}}.", i = "HTTP status: {resp$status_code}.", i = "Make sure that you are online and GitHub is up." )) } wf <- resp$content wfc <- rawToChar(wf) Encoding(wfc) <- "UTF-8" updated <- FALSE wf_file <- file.path(git_root, ".github", "workflows", "rhub.yaml") if (file.exists(wf_file)) { wf_current <- read_file(wf_file) if (wfc != wf_current) { if (overwrite) { dir.create(dirname(wf_file), showWarnings = FALSE, recursive = TRUE) writeBin(wf, wf_file) updated <- TRUE cli::cli_bullets(c( i = "Updated existing workflow file at {.file {wf_file}}, as requested" )) } else { throw(pkg_error( "Workflow file already exists at {.file {wf_file}}.", i = "Use {.code overwrite = TRUE} for overwriting it." )) } } else { cli::cli_bullets(c( v = "Workflow file {.file {wf_file}} already exists and it is current." )) } } else { dir.create(dirname(wf_file), showWarnings = FALSE, recursive = TRUE) writeBin(wf, wf_file) updated <- TRUE cli::cli_bullets(c( v = "Created workflow file {.file {wf_file}}." )) } cli::cli_text() cli::cli_bullets(c( "Notes:", "*" = "The workflow file must be added to the {.emph default} branch of the GitHub repository.", "*" = "GitHub actions must be enabled for the repository. They are disabled for forked repositories by default." )) cli::cli_text() cli::cli_bullets(c( "Next steps:", "*" = "Add the workflow file to git using {.code git add }.", "*" = if (updated) "Commit it to git using {.code git commit}.", "*" = if (!updated) "Commit it to git using {.code git commit} (if not committed already).", "*" = if (updated) "Push the commit to GitHub using {.code git push}.", "*" = if (!updated) "Push the commit to GitHub using {.code git push} (if not pushed already).", "*" = "Call {.run rhub::rhub_doctor()} to check that you have set up R-hub correctly.", "*" = "Call {.run rhub::rhub_check()} to check your package." )) invisible(NULL) } setup_find_r_package <- function() { pid <- cli_status("Is the current directory part of an R package?") tryCatch( rpkg_root <- rprojroot::find_root(rprojroot::is_r_package), error = function(e) { cli::cli_status_clear(pid, result = "failed") throw(pkg_error( call. = FALSE, "The current directory is not part of an R package.", i = "You can create an R package in the current directory if you run {.run usethis::create_package('.')}.", i = "Alternatively, if you want to use R-hub for a package that is already on GitHub, supply the {.arg gh_url} argument to {.fun rhub_setup}." )) } ) cli::cli_status_clear(pid, result = "clear") cli::cli_alert_success("Found R package at {.file {rpkg_root}}.") rpkg_root } setup_find_git_root <- function() { pid <- cli_status( "Is the current directory part of a git repository?" ) tryCatch( git_root <- rprojroot::find_root(rprojroot::is_git_root), error = function(e) { cli::cli_status_clear(pid, result = "failed") throw(pkg_error( call. = FALSE, "The current R package is not in a git repository.", i = "You can create a git repository for the current package or project if you run {.run usethis::use_git()}.", i = "Alternatively, if you want to use R-hub for a package that is already on GitHub, supply the {.arg gh_url} argument to {.fun rhub_setup}." )) } ) cli::cli_status_clear(result = "clear") cli::cli_alert_success("Found git repository at {.file {git_root}}.") git_root } rhub/R/aa-assertthat.R0000644000176200001440000001212214604736426014311 0ustar liggesusers# nocov start # This is for the embedded async `on_failure<-` <- function (x, value) { stopifnot( is.function(x), identical(names(formals(value)), c("call", "env")) ) attr(x, "msg") <- value x } assert_that <- function(..., env = parent.frame(), msg = NULL) { asserts <- eval(substitute(alist(...))) for (assertion in asserts) { res <- tryCatch({ eval(assertion, env) }, assertError = function(e) { structure(FALSE, msg = e$message) }) check_result(res) if (res) next if (is.null(msg)) { msg <- get_message(res, assertion, env) evalenv <- attr(res, "env") %||% env } else { evalenv <- env } throw(assert_error( assertion, res, msg, call. = sys.call(-1), .envir = evalenv, ), frame = env) } invisible(TRUE) } assert_error <- function(assertion, result, msg, .data = NULL, .class = NULL, .envir = parent.frame(), call. = TRUE) { myenv <- new.env(parent = .envir) myenv$.arg <- if (length(assertion) >= 2) deparse(assertion[[2]]) myenv$.arg2 <- if (length(assertion) >= 3) deparse(assertion[[3]]) .hide_from_trace <- TRUE cnd <- new_error( call. = call., cli::format_error( .envir = myenv, msg ) ) if (length(.data)) cnd[names(.data)] <- .data if (length(class)) class(cnd) <- unique(c(.class, "assertError", class(cnd))) cnd } check_result <- function(x) { if (!is.logical(x)) { throw(pkg_error( "{.fun assert_that}: assertion must return a logical value.", "i" = "it returned {.type {x}} instead." )) } if (length(x) != 1) { throw(pkg_error( "{.fun assert_that}: assertion must return a scalar.", "i" = "it returned a vector of length {length(x)}." )) } if (any(is.na(x))) { throw(pkg_error( "{.fun assert_that}: assertion must not return {.code NA}." )) } TRUE } get_message <- function(res, call, env = parent.frame()) { if (has_attr(res, "msg")) { return(attr(res, "msg")) } f <- eval(call[[1]], env) if (is.call(call) && !is.primitive(f)) call <- match.call(f, call) fname <- deparse(call[[1]]) base_fs[[fname]] %||% fail_default(call, env) } # The default failure message works in the same way as stopifnot, so you can # continue to use any function that returns a logical value: you just won't # get a friendly error message. # The code below says you get the first 60 characters plus a ... fail_default <- function(call, env) { call_string <- deparse(call, width.cutoff = 60L) if (length(call_string) > 1L) { call_string <- paste0(call_string[1L], "...") } paste0(call_string, " is not true") } has_attr <- function(x, which) { if (!is.null(attr(x, which, exact = TRUE))) return(TRUE) structure( FALSE, msg = "{.arg {(.arg)}} must have attribute {.code {which}}.", env = environment() ) } "%has_attr%" <- has_attr base_fs <- new.env(parent = emptyenv()) # nocov start logical_is_not <- function(failed) { paste0("{.arg {(.arg)}} must ", failed, " {.arg {(.arg2)}}.") } base_fs$"==" <- logical_is_not("equal") base_fs$"<" <- logical_is_not("be less than") base_fs$">" <- logical_is_not("be greater than") base_fs$">=" <- logical_is_not("be greater than or equal to") base_fs$"<=" <- logical_is_not("be less than or equal to") base_fs$"!=" <- logical_is_not("not be equal to") is_not <- function(thing) { paste0("{.arg {(.arg)}} must be ", thing, ".") } # nocov end # Vectors base_fs$is.atomic <- is_not("an atomic vector") base_fs$is.character <- is_not("a character vector") base_fs$is.complex <- is_not("a complex vector") base_fs$is.double <- is_not("a numeric vector") base_fs$is.integer <- is_not("an integer vector") base_fs$is.numeric <- is_not("a numeric or integer vector") base_fs$is.raw <- is_not("a raw vector") base_fs$is.vector <- is_not("an atomic vector without attributes") # Factors base_fs$is.factor <- is_not("a factor") base_fs$is.ordered <- is_not("an ordered factor") # More complicated data structures base_fs$is.array <- is_not("an array") base_fs$is.data.frame <- is_not("a data frame") base_fs$is.list <- is_not("a list") base_fs$is.matrix <- is_not("a matrix") base_fs$is.null <- is_not("{.code NULL}") # Functions and environments base_fs$is.environment <- is_not("an environment") base_fs$is.function <- is_not("a function") base_fs$is.primitive <- is_not("a primitive function") # Computing on the language base_fs$is.call <- is_not("a quoted call") base_fs$is.expression <- is_not("an expression object") base_fs$is.name <- is_not("a name") base_fs$is.pairlist <- is_not("a pairlist") base_fs$is.recursive <- is_not("a recursive object") base_fs$is.symbol <- is_not("a name") # Catch all base_fs$"&&" <- "{.arg {(.arg)}} and {.arg {(.arg2)}} must both be true." base_fs$"||" <- "One of {.arg {(.arg)}} and {.arg {(.arg2)}} must be true." base_fs$any <- "At least one of {.arg {(.arg)}} must be true." base_fs$all <- "All of {.arg {(.arg)}} must be true." base_fs$file.exists <- "Path {.arg {(.arg)}} must exist." base_fs$identical <- "{.arg {(.arg)}} must be identical to {.arg {(.arg2)}}." # nocov end rhub/NAMESPACE0000644000176200001440000000263614603437121012441 0ustar liggesusers# Generated by roxygen2: do not edit by hand S3method("[",rhub_platforms) S3method(format,rhub_platforms) S3method(format,rhub_platforms_summary) S3method(print,rhub_platforms) S3method(print,rhub_platforms_summary) S3method(summary,rhub_platforms) export(check) export(check_for_cran) export(check_on_centos) export(check_on_debian) export(check_on_fedora) export(check_on_linux) export(check_on_macos) export(check_on_solaris) export(check_on_ubuntu) export(check_on_windows) export(check_with_rdevel) export(check_with_roldrel) export(check_with_rpatched) export(check_with_rrelease) export(check_with_sanitizers) export(check_with_valgrind) export(get_check) export(last_check) export(list_my_checks) export(list_package_checks) export(list_validated_emails) export(local_check_linux) export(local_check_linux_images) export(platforms) export(rc_list_local_tokens) export(rc_list_repos) export(rc_new_token) export(rc_submit) export(rhub_check) export(rhub_doctor) export(rhub_platforms) export(rhub_setup) export(validate_email) importFrom(R6,R6Class) importFrom(cli,symbol) importFrom(jsonlite,toJSON) importFrom(rappdirs,user_data_dir) importFrom(rematch,re_match) importFrom(utils,getSrcDirectory) importFrom(utils,getSrcFilename) importFrom(utils,getSrcLocation) importFrom(utils,head) importFrom(utils,menu) importFrom(utils,modifyList) importFrom(utils,read.csv) importFrom(utils,write.table) importFrom(whoami,email_address) rhub/LICENSE0000644000176200001440000000007214603437121012217 0ustar liggesusersYEAR: 2019-2024 COPYRIGHT HOLDER: R Consortium, Posit PBC rhub/NEWS.md0000644000176200001440000000513314762416740012325 0ustar liggesusers# rhub 2.0.1 * rhub now avoids a message from recent versions of the R6 package, about the finalize method of an R6 class having to be private. * Using platform aliases works correctly now when selecting platforms. # rhub 2.0.0 ## R-hub v2 This is a completely new system, see `?rhubv2` manual page or the 'Getting started with R-hub v2' article at https://r-hub.github.io/rhub to start. Previous functions are now deprecated and defunct. They will be removed in the next version of the package. # rhub 1.1.2 * Replace `platform` parameter with `platforms` in `check()` (#497). * Update shortcut for Mac OS platform (@echasnovski, #393) * `cran_summary()` now works even if there is a NOTE/WARNING/ERROR in one platform and nothing on other platforms (@fabian-s, #259). * Check results have now a `get_ids()` method, to easily query the id(s) of the check. # rhub 1.1.1 ## Enhancements * `cran_summary()` now messages that we recommend to fix all NOTEs, WARNINGs and ERRORs before a CRAN submission when the check results aren't 0 NOTE, 0 WARNING, 0 ERROR. * `cran_summary()` now outputs informative messages when any of the builds of the group hasn't completed (yet, or at all). ## Bug fixes * `cran_summary()` now works for packages whose R CMD Check result include no NOTE/WARNING/ERROR, and gives an informative error message when not all builds are completed yet. * `cran_summary()` now prints lines to screen without unwanted indentation. # rhub 1.1.0 ## New features * New `local_check_linux()` function to run an R-hub check on the local host's Docker. New `local_check_linux_images()` function to list R-hub Docker images. * New `check_on_solaris()` shortcut to check on Solaris X86, without building the PDF manual or the vignettes. * New `get_check()` function that works with check ids, or a check group id. * `list_package_checks()` and `list_my_checks()` now output a `tibble`, that is nicely formatted when printed to the screen. * The output of `get_check()`, `check()`, `check_on_`, `check_for_cran()`, etc. functions gained * an `urls()` method returning a `data.frame` with URLs to the html and text logs, as well as the artifacts, of the check(s); * a `browse()` method replacing the `web()` method for opening the URLs corresponding to a `rhub_check` object. * New `cran_summary()` method to print a summary for a group or set of checks. ## Bug fixes * In printing methods the submitted time is now always correct thanks to explicitly specifying units for `as.numeric.difftime` (@jimhester, #94 and @schloerke, #135). # rhub 1.0.2 First public release. rhub/inst/0000755000176200001440000000000014603437121012170 5ustar liggesusersrhub/inst/header.md0000644000176200001440000000151514603437121013744 0ustar liggesusers # rhub > R-hub v2 [![Lifecycle: experimental](https://img.shields.io/badge/lifecycle-experimental-orange.svg)](https://lifecycle.r-lib.org/articles/stages.html#experimental) [![R-CMD-check](https://github.com/r-hub/rhub/actions/workflows/R-CMD-check.yaml/badge.svg)](https://github.com/r-hub/rhub/actions/workflows/R-CMD-check.yaml) [![](https://www.r-pkg.org/badges/version/rhub)](https://www.r-pkg.org/pkg/rhub) [![Codecov test coverage](https://codecov.io/gh/r-hub/rhub/branch/main/graph/badge.svg)](https://app.codecov.io/gh/r-hub/rhub?branch=main) R-hub v2 uses GitHub Actions to run `R CMD check` and similar package checks. The rhub package helps you set up R-hub v2 for your R package, and start running checks. --- rhub/man/0000755000176200001440000000000014762416757012010 5ustar liggesusersrhub/man/rc_list_local_tokens.Rd0000644000176200001440000000077414603437121016461 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rc.R \name{rc_list_local_tokens} \alias{rc_list_local_tokens} \title{Show your tokens for the R Consortium runners} \usage{ rc_list_local_tokens() } \value{ Data frame with string columns \code{email} and \code{token}. } \description{ Lists all tokens stored on the local machine. } \seealso{ Other RC runners API: \code{\link{rc_list_repos}()}, \code{\link{rc_new_token}()}, \code{\link{rc_submit}()} } \concept{RC runners API} rhub/man/list_validated_emails.Rd0000644000176200001440000000057314603437121016604 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rhubv1.R \name{list_validated_emails} \alias{list_validated_emails} \title{This function is deprecated and defunct. Please see \link{rhubv2}.} \usage{ list_validated_emails(...) } \arguments{ \item{...}{Deprecated.} } \description{ This function is deprecated and defunct. Please see \link{rhubv2}. } rhub/man/list_package_checks.Rd0000644000176200001440000000056514603437121016231 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rhubv1.R \name{list_package_checks} \alias{list_package_checks} \title{This function is deprecated and defunct. Please see \link{rhubv2}.} \usage{ list_package_checks(...) } \arguments{ \item{...}{Deprecated.} } \description{ This function is deprecated and defunct. Please see \link{rhubv2}. } rhub/man/list_my_checks.Rd0000644000176200001440000000054614603437121015262 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rhubv1.R \name{list_my_checks} \alias{list_my_checks} \title{This function is deprecated and defunct. Please see \link{rhubv2}.} \usage{ list_my_checks(...) } \arguments{ \item{...}{Deprecated.} } \description{ This function is deprecated and defunct. Please see \link{rhubv2}. } rhub/man/platforms.Rd0000644000176200001440000000052714603437121014270 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rhubv1.R \name{platforms} \alias{platforms} \title{This function is deprecated and defunct. Please see \link{rhubv2}.} \usage{ platforms(...) } \arguments{ \item{...}{Deprecated.} } \description{ This function is deprecated and defunct. Please see \link{rhubv2}. } rhub/man/validate_email.Rd0000644000176200001440000000054614603437121015222 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rhubv1.R \name{validate_email} \alias{validate_email} \title{This function is deprecated and defunct. Please see \link{rhubv2}.} \usage{ validate_email(...) } \arguments{ \item{...}{Deprecated.} } \description{ This function is deprecated and defunct. Please see \link{rhubv2}. } rhub/man/rhub_doctor.Rd0000644000176200001440000000106314603437121014567 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/doctor.R \name{rhub_doctor} \alias{rhub_doctor} \title{Check if the current or the specified package is ready to use with R-hub} \usage{ rhub_doctor(gh_url = NULL) } \arguments{ \item{gh_url}{Use \code{NULL} for the package in the current working directory. Alternatively, use the URL of a GitHub repository that contains an R package that was set up to use with R-hub.} } \description{ Errors if the package or repository is not set up correctly, and advises on possible solutions. } rhub/man/check_for_cran.Rd0000644000176200001440000000054614603437121015210 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rhubv1.R \name{check_for_cran} \alias{check_for_cran} \title{This function is deprecated and defunct. Please see \link{rhubv2}.} \usage{ check_for_cran(...) } \arguments{ \item{...}{Deprecated.} } \description{ This function is deprecated and defunct. Please see \link{rhubv2}. } rhub/man/local_check_linux_images.Rd0000644000176200001440000000060414603437121017250 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rhubv1.R \name{local_check_linux_images} \alias{local_check_linux_images} \title{This function is deprecated and defunct. Please see \link{rhubv2}.} \usage{ local_check_linux_images(...) } \arguments{ \item{...}{Deprecated.} } \description{ This function is deprecated and defunct. Please see \link{rhubv2}. } rhub/man/figures/0000755000176200001440000000000014762410770013441 5ustar liggesusersrhub/man/figures/rhub-setup.svg0000644000176200001440000002146714762414175016275 0ustar liggesusersSettingupR-hubv2.FoundRpackageat/private/tmp/cli.Foundgitrepositoryat/private/tmp/cli.Createdworkflowfile/private/tmp/cli/.github/workflows/rhub.yaml.Notes:TheworkflowfilemustbeaddedtothedefaultbranchoftheGitHubrepository.GitHubactionsmustbeenabledfortherepository.Theyaredisabledforforkedrepositoriesbydefault.Nextsteps:Addtheworkflowfiletogitusing`gitadd<filename>`.Commitittogitusing`gitcommit`.PushthecommittoGitHubusing`gitpush`.Call`rhub::rhub_doctor()`tocheckthatyouhavesetupR-hubcorrectly.Call`rhub::rhub_check()`tocheckyourpackage.rhub/man/figures/rhub-check-dark.svg0000644000176200001440000006061614762414203017120 0ustar liggesusersFoundgitrepositoryat/private/tmp/cli.FoundGitHubPAT.Availableplatforms(see`rhub::rhub_platforms()`fordetails):1[VM]linuxR-*(anyversion)ubuntu-latestonG…2[VM]m1-sanR-*(anyversion)macos-latestonGi…3[VM]macosR-*(anyversion)macos-13onGitHub4[VM]macos-arm64R-*(anyversion)macos-latestonGi…5[VM]windowsR-*(anyversion)windows-lateston6[CT]atlasR-devel(2025-03-01r87860)FedoraLinux38(C…7[CT]c23R-devel(2025-02-28r87848)Ubuntu22.04.5LTS8[CT]clang-asanR-devel(2025-03-01r87860)Ubuntu22.04.5LTS9[CT]clang-ubsanR-devel(2025-03-01r87860)Ubuntu22.04.5LTS10[CT]clang16R-devel(2025-02-28r87848)Ubuntu22.04.5LTS11[CT]clang17R-devel(2025-02-28r87848)Ubuntu22.04.5LTS12[CT]clang18R-devel(2025-02-28r87848)Ubuntu22.04.5LTS13[CT]clang19R-devel(2025-02-28r87848)Ubuntu22.04.5LTS14[CT]clang20R-devel(2025-02-28r87848)Ubuntu22.04.5LTS15[CT]donttestR-devel(2025-02-28r87848)Ubuntu22.04.5LTS16[CT]gcc-asanR-devel(2025-03-01r87860)FedoraLinux40(C…17[CT]gcc13R-devel(2025-03-01r87860)FedoraLinux38(C…18[CT]gcc14R-devel(2025-03-01r87860)FedoraLinux40(C…19[CT]gcc15R-devel(2025-03-01r87860)FedoraLinux43(C…20[CT]intelR-devel(2025-03-01r87860)FedoraLinux38(C…21[CT]mklR-devel(2025-03-01r87860)FedoraLinux38(C…22[CT]noldR-devel(2025-03-01r87860)Ubuntu22.04.5LTS23[CT]noremapR-devel(2025-02-28r87848)Ubuntu22.04.5LTS24[CT]nosuggestsR-devel(2025-03-01r87860)FedoraLinux38(C…25[CT]rchkR-devel(2025-03-01r87860)Ubuntu22.04.5LTS26[CT]ubuntu-clangR-devel(2025-03-01r87860)Ubuntu22.04.5LTS27[CT]ubuntu-gcc12R-devel(2025-03-01r87860)Ubuntu22.04.5LTS28[CT]ubuntu-nextR-4.4.3(patched)(2025-02-28r87848)Ubuntu22.04.5LTS29[CT]ubuntu-releaseR-4.4.3(2025-02-28)Ubuntu22.04.5LTS30[CT]valgrindR-devel(2025-03-01r87860)FedoraLinux38(C…Selection(commaseparatednumbers,0tocancel):1,5Checkstarted:linux,windows(cinnamic-elver).See<https://github.com/r-lib/cli/actions>forliveoutput!rhub/man/figures/rhub-doctor.svg0000644000176200001440000001155714762414176016427 0ustar liggesusersFoundRpackageat/private/tmp/cli.Foundgitrepositoryat/private/tmp/cli.FoundGitHubPAT.FoundrepositoryonGitHubat<https://github.com/r-lib/cli>.GitHubPAThastherightscopes.FoundR-hubworkflowindefaultbranch,anditisactive.WOOT!Youarereadytorun`rhub::rhub_check()`onthispackage.rhub/man/figures/rhub-setup-dark.svg0000644000176200001440000002146414762414175017211 0ustar liggesusersSettingupR-hubv2.FoundRpackageat/private/tmp/cli.Foundgitrepositoryat/private/tmp/cli.Createdworkflowfile/private/tmp/cli/.github/workflows/rhub.yaml.Notes:TheworkflowfilemustbeaddedtothedefaultbranchoftheGitHubrepository.GitHubactionsmustbeenabledfortherepository.Theyaredisabledforforkedrepositoriesbydefault.Nextsteps:Addtheworkflowfiletogitusing`gitadd<filename>`.Commitittogitusing`gitcommit`.PushthecommittoGitHubusing`gitpush`.Call`rhub::rhub_doctor()`tocheckthatyouhavesetupR-hubcorrectly.Call`rhub::rhub_check()`tocheckyourpackage.rhub/man/figures/rhub-doctor-dark.svg0000644000176200001440000001155414762414176017343 0ustar liggesusersFoundRpackageat/private/tmp/cli.Foundgitrepositoryat/private/tmp/cli.FoundGitHubPAT.FoundrepositoryonGitHubat<https://github.com/r-lib/cli>.GitHubPAThastherightscopes.FoundR-hubworkflowindefaultbranch,anditisactive.WOOT!Youarereadytorun`rhub::rhub_check()`onthispackage.rhub/man/figures/logo.png0000644000176200001440000007170114303631026015103 0ustar liggesusersPNG  IHDRޫhgAMA a cHRMz&u0`:pQ<bKGD pHYs B(xtIME 2TrIDATxyeUwOwϾeB! ;vD@QAţ砲h "(a }2oU~Tg23I澮$3S]V><0TuFPί}]|F0Q\\p袋6&PF  dY .?"'xB,ZHX, ;w#A 8q;ž}Dw466KL<؍@Ō`G?QZZ*ZvZq(_^\ ^࿀p*C,]TK"LɤxW/NAUp8 xK$͛'z!ovG0??$I 9#=hرcw!v=hvG]]O~"ƍgG0Z{@~qW+W ML#o&VZ%-[& "nF{1F0/E`l6q駋{N7DB⬳v]e V3@ fDkkkΉmmmG}ǀhZX#A!a:+ 555ؾ};v)QFDnEJ`3 W\!}jbŊ+^ V[@uq#jKbO>YW\/K.rskwqVPפw$DJeV-fBmF~O>SN9UUٱc3Co PlGz!pQ\ |fzছnb̘1Ht(H,O76eE^$+pʼQrwj>l͛Gss3{)4d dĬF|hXЫnϚ5KwG:4W>n@´,I4E,uq촊!”)S8s?~<{? }7>k/y`}c}'555믿ロ3<5d'O6@6ä́ $g?&yp\{챜s9x<on BӀ@vKGF3*;4 ,x d!.6:yO_s5s=wyxҒ|&+=,]4t{p,\3<H(q%ܖ'dW\.y=òe˨̟)^_YϪ-9#&F9qDYY~:-" k׮T*8 zڎtÑ]^Ym ,&\pEEE>urnlX-E^ŋ3o1ϱ|rVX1SӴ_B#18܃o\s W]uչ BTU%Jxȩi+Ic &%UIMXYz8ab|q|-NsA}SZZ?\s-&N#Pٽ'8nn-EݡiTT*E2$Ji6TNvb¬Cu-`׾--g\k=6cy_g}6n;vXCttxT8\.8̯,ɤG+G ҐeAu.p8eY<ϒcj):D")!@tn0eM_ǒyCDJYǦh%I-U8+))SOeDQCxky 5: K7ESLk+Sp!rJD)diGqQVTE 0α;v6h s`vTU`rqExbq~˦M>?^ADžlBO]淨ܾl2?>e~Bfy &PQ`&C #X 6n?/}7gf.x<.\gfcǎH$2 ?DVl1NP( 媽袋xꩧ{1c!iSAA4V!032Db)L^xB!o6mwuO=pqu!D!tt?w$I p7X'CS&HC.Y,2)I'ļy}ݗ)[ݬ~-  XlW]u>,KX,2B jY1"Khڅ=v?+%K#Zo~*zLt'Yoh 痿e^>C$I4͙,I0yR e !bh]>p,{'՜hS{^/^i;v8bei@0_w\ny}\wuu#I>wvY3+(Fj v 5H&sg3}\qǪ3d{n'@אC}g/$I3w\?1~;Gui$I 2?$ŬX Ќ>%ʇL4M0i/Ϥbaĉ,]ɓ'SWW'566Vg-芧!K; ==zM7_SN9i~槒$IbrF 0\,IPszILMNs~oL[-sz'Xá5u23}>/{ _BΦɲ&goTb p&1 J} ̓骚`RcL}{3 K,!J;ݬΩl1wWl6[ْ%KƎ; PHY2󏩡0w_^$qUN6jgWc$gB[/α3CtXWWW9m$r,[ŝ9 0sLo?1s4?!hH$J4&SHʜwt5!9u(t' vj]tt >pMKA!G_iGaXƁ )lNMhʴl@n .䦛nU+hF(""r'y~h-FQ5ݗAz9Vjj8jz95ՅhkkCQ>;@ M^{x=A±魀d BUSYh'̪ӭ9A*"Jp:|b/bZ![\@>8L׿uJt4M# ŰX,|>RBňECl6 r7jT 2{(TFmP-@r7c*q(4J@QN'~HwL477cqyfw%[ '_j˹+7/PU`0H<bq8M4I%-LYeK޻UdB%@k$@EQp\|`/o8~ C D^Ƽ)ÜNl\t٢^d]wn/>3Le#wQ!$H n;/1^D^(.[<묳?>mmmٳǐ-.RDN?o }M$9̙_ϥ^3@`T)V>"KW8SwIRg宿/IBÎ"aȝ_6Ua77^hQhFQQEE4o{:@O!`['^Λ6p٩cpQ^'^/oe#?dI"|cqExt%IJ1vHTaͺVvt6|5M=ΜY̚Qa<'R*&~:N~h`(}f3B#F06 MG_=3Id͆V@H\Ոh348루ɵAKdIB9o}7wsI;\lKwݮ]Ėmm e̚ooXR*#}crA!o⽵YP}GuS:JVm_rC"+t_LK@J X )HDk0Ϯ#yF(dæSJ 6nn1eʊZQǧ[[L2%}-QxeacInhT=:LNس7@{GMH,]o|=s:bXA&SZ-bHV5 /c՘Hi#9F0$P`z̈́$A,Pߐ=(}!6jXHYhh&y@!@.fU1ml1>ˊjAB/4AJ(FJш%!qM)B$HHL!MO*(jj(Kk>sL~5E$#N{&MM"l&ֺ:ƒtu;ڸd|0 N?_\O}"ݕbAQ)U#WD M|~?Z"C@ KƧ 4&'/\#IHf8诅P8I2fU5AUupꛣ(6[f I9@^Ɨ$)'nUzi£^׶7vF75 ')~"j+LA3#Mԏh<{8AB(w6icޗk0ڃf'K{ٽ?ƚ Ir"I˄c;OpjqQ4RIplr \.+,'] W{s3ȒĄZﰟ-̎wG> ?3cfYh Vb9IBTU%Ht95KK\Y$, p9̝Rn435zl[,;g?&kNڂ.4d2I$VIs'ʰZej?`q3+6}L 3{R {{"ofy*{٥ ;`U>I(R(i+Aev;4'4[RUU4mVN%MKXd?fXQ dn*J\YqE0-OC4YZgH$V+6-eZNn!no35jLZӄaYsXxeij+9{lX6nR#4GJՆ܄4 EQH&i⪪ϕ$, Nݞ&lO},9*ijd''4 `b&+6Z"nd6֚:U7~6UC|@2U9fQ\\hUid1 JrL AX8`_a锖L5﾿xYB*=;͜w{Z?X45Mu[©,3wqC1WJ/V0fAbcX,Z1C$q32͔;*~qB~OXUkUΝYCx÷=6ҟ~ STA"}-1)54Vkz,2[妬 m45Gҟ۟cX&/昣\Vj+*[7ܻzO.S։s⣘3G  ҰK!G$ UU B]|R'TƻƆ}A4TU=,4sΓHAн?離Ieؾ@0!?p&vLES;gj3;Pkk9q]|}-IE;f)/vr2;q<'ϭI7B(Ob օ4BEQD^@$I[0{1{'EF)L ]L-=nsgW1eR){;ػh\CU%4Mov)-qR]UDy {V}t'f9aVUIckA4i%Є3äQ>FWaiwB0 (y4 YxkYhF$Y͋졥#nZjX|t(h^*BQ"0~K$6"3?ħ[Z(/vbj8"aZcZ4i$S 1-$[%^Ёp[dg˜5Ԅ`Le{>, |0 ]ID[˲۫Xˊfn%|+NdJAQ4T!R4UoHi$Sj]l"q ;|/_A^#}$s$0 q 8I,I:‰^urUTAUKL+4`0H,4yt7-"$uꂇ]xfO,vHf aN`8H\P5 |I^!ɤ/( Bٌʾۢ0е|P$8N~?,DBæp`Z39c|`>y;"pXFv⡇z/=; eרL3#L&By+Nb!b7ج|T9irE^!T* P\.AbI(**!P5A/UvX |2s&&#>rCY8fj97^:SɤCW Gv G[̈́@;åN'S[i:y /t-,&0Fh4)C8m=q_=w3Ǘh(ț),Þ{@ Fh6qpΕsq0,E)3}TH/!Ihf4{=YpC W7T’vC`ȎXʞ:*l61sz}HaR?0lZBvd^-=aIGwDVoٷv"dJo}Yǝ8OK(E^Sim1>XgK]`a%n㪽;s<߯ C~*QWd6Z"$v6G)AZ/S'RQK0nwccXڜ$ S $=RA7?7;Z0d?ñ9rvI^8P>m?Pg_^ֺ&XIlkPU𥳦0ulqq>]̇>*- 0}j9fq?,,-ΜӓYveB v41~n,wDJքDJm`2Znu盼pͦ|rݎ6z|5on չح$Z'y3G|=t. Q?#Kc֍ü4y WvŔb&I0N8N(r:|>JKK)--rq|>0HdP=D"T%kw聰o\ts&=F~qi,Mu;ںw0eP4/X˓mG9J`joFtttJ[[`D"Á르2JJJx<Ƚ%N^$g+=9xmH4~a iXB^Ȯ|=WgE^$kYgEX権m }ÏAhkk#҅4a^/#'Kb!ppX/$);3:K;>sL _[x{ui:iY@,vf$ޑ`,ʆ-RuH$fq[[[,6|,.++,,Jblȝ#_^.z YZdz4Ue)rK )u(Jv=5Go׶R̕ʒ;YL #%صh,(ewH A*$ŚQg Hv:M^Y#F{BE/ͶM1ӎ׶sr_<߿c7H`5]ld}r&LR"Iu|nOk CoɤƎ]-ttt5B ǃ׫1fdęQo=$ p܌J;I]$lkR"ya4!`>fIO8oIBdNgC v޿0}>ߐŃEO$(Ӑee/92͔eu?p,fp y3/ Ieos$|HD)R$=^nwA$D"'oعsPM׶g I׊:xe{A^8P7o4$ IX%ށIմUCUw$BH4k`(\)% H**>| &ɂ#/ f`2Li9@XBIDF:5XM(Vflnbݴ"O+;ӟ&Aǎf݇#zk$o(hsQ^+U{w_?M($,\B<.+㫽9P OW  AHv@S{"15\qʱ|쩘[$5lӞ Yw[-2NBK*ug[U%W&QhkkKBGo҅;p5.95榖x:vOtCvϩӣ [l2;C53~"&PU塼܍i]j[[[ ߀m6$ĺ{ zVZz}{wla 5^XPk!jƗpʼ.k&eFʜ(/bn7VUUb1dqap="0މR7?7 ӼRJC,= 5^BisSwt2gL+kqd^NuUQZlri)QeXPH$EF ]:m zy22IlR K詝!~I'OS&"h@Һ~!`Oc^P?|nN$aBM߼d&^)$eM걩AkyֶeS&2kfE)*YT"ju#HYY!)2`Z==ur)n,KzDIW" #r6<3EEE&Ip{xRrhP`t'k {"$jN*@XB(DUU\.'~FS-l9-4DH$8tC Y)xV~Zti#UxV>$nt6e~g?׫_QfX;I$[ L\:莊Ǝktub̪lj]za_Ht'`{bI[ĬpI\8ëbeCvim8^|~<||xf)&Mnۍ5nJy;^.˺i>Ldx<U%?wTfhzSAfliC4\pxv4w̚gaZ.nYLІ;$iW6vbkp$q&yeÑƵڥ Lw?hؓ;P薡,[Lp5[g"6bI$Hi"XQ$0l8Jс(Þ4M#F\flGC$U% ز,cwIRts$jx4ܥYO-$N&޻B ֹB*+?ir3'E=+??0:G$cph>I {#Qt26o>hZzwLW =Iy6?_U&L8|N=Hcn~pޣͬzLqbANl+v-*"HJbtAIwB<N$J,J2@ 2J3`: w Q6ȫi^s29awP .g#ňb W3nVoB'y%ֱH9_*zWCk_>zp8F70Cn#YPb: *$Btݲᇂ~43HfG ii;TAIf wo%׋$Iw'vm|j%ӍF6jaOx-Mޞ[AI\2 Abr . 7-ks6g:k9Kwy =QIh4% f95FӥFM|O\#s`ۇ v6ȫjA'dϠxcޡ 0fN$N1"q4Mjvq\^CfP<3y{ԲX,idO%V5=wP5ELsvg3P8lH$5ÙHL&|ء( 4~7ti9&Ñs{C9;To"`vX8Atxނ$|>Ng%x<ݐء3@pX3#?7\LfŒv}ap')b#ŔYPt:NO%-K,-4 4g{>RkC H` S7L̊$M@ &>A_f飡y>k Ӝ6H,IG߭|rp82 J740CI_52L\cXH%Yz*% $q5|\PMaǑ; UAIfݲױ|2/aGӼWd4OڰS0j,ޫ Ϫi U# ~GSc1h˱%.&4UU@rFQPbY"no##d Eߏ6Mir!aEBI{7ǴnXG%AGq ޱ(yshXA7=G?E'2߮#kF٧?)FYt((-bwj ;y;) ];*t0^z|;]dv=CZ2y`f !Y,h$; o Yuɵ9B5 DR%RI$tVTަ0vw>Hk0NJp;Ԗ6)c=N,LlIs&Ldͷ* 1'B eyaIB,4n63fPi#NCc(pGm6MPa8p SƇx͝n?M1)UԎ>/bp;LqƂ\px&`O e1BT/<'g< D4 ˕.0*zK1 gH kVO{-1|Ra4M`TH wF3nji唕t5yoۻh%$}z,I;Gb v{gΛFE 1qwLJw,VwvS@9Dޞ8,:-KbݷYXa: TY R>#)6nnao}9*L%mxSm$&m ?|̪ |Ty;gyyʪj\hJxK J,_Aߥi'Yªr}A__YmąBb6;EF<WE@{#?/mO=I'0veͪ>d[i{'L"Nz$]m#Di^l?$q{,K$*OU~"5P`!SU Q\4m7{%}á]H}j-y>,iAE d-%1K^Ou MUHvswlO_goן Xq͘[t2S>8X7Rxe[yE`fdY☣XM!pS{zxɛyB31at)^83!m6^Ex3ʪ.v7ѧO~_DM+?! a}zv>D`(]% $XDbXB`x8jי,+zVβreT-8ɟ<O=Hbmj2/S*i, xS6niM7[hBnc3N&OEjG^B6I҃`=MY43Njqwy3aq8|ɥ̽N2}L?,|m BQR]$(A;+77߼ ߄w}LJi$ͯ]SJc|J4@ @,K) m1ohN`YD8K4koS@3aA$DJ٠s:s/_$1K:CKD*$:$IiHt=C)ԯT5F-9_yXŬ.}=e,ظҴft蠭6 xK; 歭#IS'JD[{:>VV a_k;OLxgM#w_0PH2ۊ{YHhB*keoQрRJ>=݆tRRRjo)V)%.G]s-z1Jv>( Bp8(--׋D$$pvhooׇ'hJ(>H. !صDB8Y8IF3F I`w7CY#@SFSUZ׭ES.j C\TTDqq1eeeiB/31AݻS9G.Iٍ\͊Mv7L} MOM%C giv_~VNހ&AQ8rgR\. "Fј8K' $1j)ؽ^L$B{v z-4:8^/%%%RRRBQQQC5 J[* ]! ]9L$^UDK[dJJ*ax.I-4OM-[6$DPlݓ*RP !DցM蔚QP4E25v>ڽs ͆5%Lcu=pkbB!DZ0 ɌS,X"֟tѣ)$)1lϪUx'n\6l&u{(P-0C),4Ibm9Ȳӑ{˕1+Ֆq;9ۅrW0 Dдۚ(J'FK,Nݞub0,}v""FWZđ IYKqXH2ujHQls;X?L8?lضD[k0!Xu5Psca nҒ솷guv^; gU};fWe ֙Ѧ,1$F>*H,ݿ%ֿGi4!=˝ueUYfW",I,=a,~ q3+=XP(t}l2!2uv hkkK zrφĒ,ؾm[}}>#?łLl߲naĒ6 S&ffO*Ǚ Evt.14~_A  #PZVhifߛo+jY#X"$ome_D1d=4gfR ㊩)2uB0~\155/)펬 l|̞TjZՊ$eMfs;7*$Y:Q3K_[@Cc+֯WQEq:YX$vM̪J]|)|~h$ =^/cN?Q$YF 42"/MQ4b`KxQ@'IBU4} 㮮FF6'D{;BS}WLU~l9m,YuI]otژ4Y3+\oz %{"^p/O5[ (8hGdjK=<ϟ6Sʐ%P(D8NU qg/ec :GU%N,21 1} D:/2)/K+)_d*Ӧ}%./Nh U2byT1qB >W9 l\ y`Vh =N+ef+)ev uv!1xnjaƗßHdؓ"ĉddryG Bwg# C2}j]eTMh`(I8ܑȁg崦:M\ `83c#E@90t#ĒĤK>Gii@63 5Rpw;bx( IE;Ef^ z?@bEfNix4-ݻHi,V&,:%sPB^(aX|l|ʥvSXt/@2Ge6D"'|BMF"nGl !H$tttQ;y?OO\4*βrJ0P&AnӿKePC'|׿un>+p, ^#\},[q$I' )T*E4l٪qݸ/c3%ڿ.B :GzV;7BU|q)~Rs"F}T6>m[iW,N'-dU_eigǨ2Nسg==uuumV`3Mg?uK/8`XVHdʰ.MUD;GwBC頻ܫcH7&(XV\.n;xjjY1ϲٿ{Di:Mlœ2xkhYMQ;i?u;VhbڗBɟadzO ܁~ϑ$.JøΡv8MȜPPtmva]zqWĒITÁt0FJс&e=ցB"{$[لTE! H$~V+,z,ihSΟ4M EQ(---mźuo~ÓO>I[[^!?ݓ %,[QF"8c3vDMjxL15!hkmEQ*#N&:N&aX(++&tCC?0> ;tihݭXI/zZF_@eGG_;r0aB^F^5M#LJlC˄AP($I$O$X,<OZ,` ( iZy(!2vEQH$(n7`xD"upr-<裴7Y <WdڵTUU1f̘!B^Mӈu TJ*2s 04$F(VhEQxw۸{صkBO;}C֨ƀOQEQ&mڴsϱo>ƏOEEŐ.$VTp88X MVKzS0p8=4'9P(D$A4>Ǔr +ɤ$6oy͛;Q-K\k@є)S׿ΕW^IU4-ZB/`Ǿi`&v5ta&xY0GW3%ܝ^Gvj=N|8' lz Bc0c)d Tz]YX6JJrL477cqyftSy0 l.p6ppb.\nK2c&wwc^9:(Ha"1 gLfteQȫ*X,mY,n톅H`I66{hc3 -IUlee\㦗rQ̝VX,Ƌ/ywQ /۬YZ`sod…CfTU5]niL3^M^{#K3M$1{R)^1S|D"ϛ81Lh4J3%t:?+TLoW>'I"9ڵmBPYsL3*- i|w}7[4efmQte@IR֯_yhnnf„ ~*]2|bIy1ExZKbho~n0uC=JRR)TUf҇|+xw~Rwt$(B"Wds+w1c|1%A;vp]w{d2?Vt9b}g[W_8{^}U&Ms;UEO]Ik >6$)I̘RpPgjT2 ߣ߯'aoSc뻻1꭭̙\ם~[n~P(C܊NwL @mss_~Y)--eܸq&M*wײ%uG㍿!¢Y8, ϕe|`Q9v>X=\A6^du/ַ~Gcc^|;3taW)`ެiĝ;w>l۶1cPSS]e$Dn>(֨8| = ,}g}^ˌ3yJ(ļ<+E¥Т ÚX1͛yijjţV(cb񖖖k.c{EIm\腀QJ|CߑP$ !@U,V e-Ydj,_ /_W455E'ПR䅡^Dikkꫯ^ &qd6֖ Nϩ7Xw#I5ٶm;G~_fnm$hl}99P\`ө*ıX{[o?455{+vR~n_(D'Ā/z;3O?U(I_^߮GM9ivޜ<Ϧj `˶6v  %:w9Xd|>3eR.cUPQ좩=jiB0}\1kz !)-Q;pwDAĉˋ]ζSwaM>3ODUR ,)UUmG;ˮq=g/)%b aμ|Ev6`îv-$,7ܹ}{2?I2p#C}ݴlqĉnVo~@$2AՄͷ0Q.D"tNq$)EXv?j$MرX} !$$s4/Xdb?gj \-Mk#<-_WBP=-AL :@OlF777׾G}-#!bCjgAʭW͹mc"}huJd6̀ VkbͺΖ@?$I)"SY򝸺M8MͦOc/1So/?444h9ہ;%-!3 $9 [l-dO;aڠTMv ^x_]: UFeV+ӉH TU%Ju!i9+P^Ht(;).ځq,1c| ز'՚6~pT$V;'?!;P7T8l .[| ]8iծ^xh$ĥǨJ6j'49qPſ~_;o:$aX GiAfcwN&iy;s0{D옒ʂ7|;osܬj?m>qBckD wh+KYd8NN'6 Yݽ?;Ma+IX FUen5=s¬j4!ض7@,"qh^Gc/%yp-?ِ_nVyDa?z2!q\:eykM3ni=F4iY8T;YU]"@UUdzɲ&A}4/ԋJg6 #?FVVlh#'JT"QUpgZK/L"X>) 60}W1%\~e\oP;v b$R*R2e>GND"&'d6V u!^y}WD6̙M(g vƾ(ф팭.b޴ YIS4?7˫Rh(pظcЧI\ &L5\}UTU4MKwi4Qb`ز#국98p(1#rBJш'U)I݊ii??l۾tYi~ >p_h@ }_or8qbƔfA0=ݣv [U !NF*)YavZq;1yin[MD+z{8H#0'+_|E֮]Kuu5Gkwނ`D{PRdXޮ7{mwΝ;Bl@Cכ&8 l njDƴƍ7{7l )G-[|GRo[TwH&vuľ'NXb_&J1i$|c:z:e_}U2~ ] $IL\JyB2?2еGDP!pW(Ft"7!&ٳ^`ӦM5QFM>ih9R ʜYx,Ɂӧ?g?[nEӴρ2m[ #T*5iݺu]d%%%C~RvpYBPSUČC2;v=mݖ){a**o([|}b|\bQ59UT YhooG[o婧2d~/o0BCCCy=4 b.-f[d%JĶBƌ1wN!겙L&:e~2f #?ZݡMӴ;v(yپ};cǎ:*S䱳!L2e]!x,:~!h'`i~ׯGӴ=  PaG!3dWZz L4 ?yԢ";N4B\. IU]]˗/;:x2p #< bz[oi$ JKx6ZZ$w~!> F1a|IN뫃`zߟ':::oq G!:Nofɒ%9okQFF!6Z_΃VTz˗/A}ه!!.[SRRe]u]ٳsɤJ w%W4 ^[-tXp3iB cFyoO} \.śo)E7ﲆ(^zp}݀n{G00 o6m7-[[oUTUUmDo:%ߋ=2p8o(N*^|hnn˗/ӧO7kL;Nob'|xꩧD47?{E,Ƚ#8GжC,]TK"LdRNuW"G0u(--^{Xv!ɻ~zq׋2{F0# p,( &N(No߾(.1ydAv6 KRUnderdevelopment(unstable)(2025-03-01r87860)onFedoraLinux38(ContaRUnderdevelopment(unstable)(2025-02-28r87848)onUbuntu22.04.5LTSRUnderdevelopment(unstable)(2025-03-01r87860)onUbuntu22.04.5LTSRUnderdevelopment(unstable)(2025-03-01r87860)onFedoraLinux40(Conta──Virtualmachines───────────────────────────────────────────────────────────1[VM]linuxAllRversionsonGitHubActionsubuntu-latest2[VM]m1-sanAllRversionsonGitHubActionsmacos-latest,ASAN+UBSANonmacOS3[VM]macosAllRversionsonGitHubActionsmacos-134[VM]macos-arm64AllRversionsonGitHubActionsmacos-latest5[VM]windowsAllRversionsonGitHubActionswindows-latest──Containers─────────────────────────────────────────────────────────────────6[CT]atlas[ATLAS]ghcr.io/r-hub/containers/atlas:latest7[CT]c23[C23]ghcr.io/r-hub/containers/c23:latest8[CT]clang-asan[asan,clang-ASAN]ghcr.io/r-hub/containers/clang-asan:latest9[CT]clang-ubsan[clang-UBSAN,ubsan]ghcr.io/r-hub/containers/clang-ubsan:latest10[CT]clang16[clang16]ghcr.io/r-hub/containers/clang16:latest11[CT]clang17[clang17]ghcr.io/r-hub/containers/clang17:latest12[CT]clang18[clang18]ghcr.io/r-hub/containers/clang18:latest13[CT]clang19[clang19]ghcr.io/r-hub/containers/clang19:latest14[CT]clang20[clang20]ghcr.io/r-hub/containers/clang20:latest15[CT]donttest[donttest]ghcr.io/r-hub/containers/donttest:latest16[CT]gcc-asan[gcc-ASAN,gcc-UBSAN]ghcr.io/r-hub/containers/gcc-asan:latest17[CT]gcc13[gcc13]ghcr.io/r-hub/containers/gcc13:latest18[CT]gcc14[gcc14]ghcr.io/r-hub/containers/gcc14:latest19[CT]gcc15[gcc15]RUnderdevelopment(unstable)(2025-03-01r87860)onFedoraLinux43(Contaghcr.io/r-hub/containers/gcc15:latest20[CT]intel[Intel]ghcr.io/r-hub/containers/intel:latest21[CT]mkl[MKL]ghcr.io/r-hub/containers/mkl:latest22[CT]nold[noLD]ghcr.io/r-hub/containers/nold:latest23[CT]noremap[noRemap]ghcr.io/r-hub/containers/noremap:latest24[CT]nosuggests[noSuggests]ghcr.io/r-hub/containers/nosuggests:latest25[CT]rchk[rchk]ghcr.io/r-hub/containers/rchk:latest26[CT]ubuntu-clang[r-devel-linux-x86_64-debian-clang]ghcr.io/r-hub/containers/ubuntu-clang:latest27[CT]ubuntu-gcc12[r-devel-linux-x86_64-debian-gcc]ghcr.io/r-hub/containers/ubuntu-gcc12:latest28[CT]ubuntu-next[r-next,r-patched,r-patched-linux-x86_64]Rversion4.4.3Patched(2025-02-28r87848)onUbuntu22.04.5LTSghcr.io/r-hub/containers/ubuntu-next:latest29[CT]ubuntu-release[r-release,r-release-linux-x86_64,ubuntu]Rversion4.4.3(2025-02-28)onUbuntu22.04.5LTSghcr.io/r-hub/containers/ubuntu-release:latest30[CT]valgrind[valgrind]ghcr.io/r-hub/containers/valgrind:latestrhub/man/figures/rhub-platforms.svg0000644000176200001440000005633414762414200017132 0ustar liggesusersRUnderdevelopment(unstable)(2025-03-01r87860)onFedoraLinux38(ContaRUnderdevelopment(unstable)(2025-02-28r87848)onUbuntu22.04.5LTSRUnderdevelopment(unstable)(2025-03-01r87860)onUbuntu22.04.5LTSRUnderdevelopment(unstable)(2025-03-01r87860)onFedoraLinux40(Conta──Virtualmachines───────────────────────────────────────────────────────────1[VM]linuxAllRversionsonGitHubActionsubuntu-latest2[VM]m1-sanAllRversionsonGitHubActionsmacos-latest,ASAN+UBSANonmacOS3[VM]macosAllRversionsonGitHubActionsmacos-134[VM]macos-arm64AllRversionsonGitHubActionsmacos-latest5[VM]windowsAllRversionsonGitHubActionswindows-latest──Containers─────────────────────────────────────────────────────────────────6[CT]atlas[ATLAS]ghcr.io/r-hub/containers/atlas:latest7[CT]c23[C23]ghcr.io/r-hub/containers/c23:latest8[CT]clang-asan[asan,clang-ASAN]ghcr.io/r-hub/containers/clang-asan:latest9[CT]clang-ubsan[clang-UBSAN,ubsan]ghcr.io/r-hub/containers/clang-ubsan:latest10[CT]clang16[clang16]ghcr.io/r-hub/containers/clang16:latest11[CT]clang17[clang17]ghcr.io/r-hub/containers/clang17:latest12[CT]clang18[clang18]ghcr.io/r-hub/containers/clang18:latest13[CT]clang19[clang19]ghcr.io/r-hub/containers/clang19:latest14[CT]clang20[clang20]ghcr.io/r-hub/containers/clang20:latest15[CT]donttest[donttest]ghcr.io/r-hub/containers/donttest:latest16[CT]gcc-asan[gcc-ASAN,gcc-UBSAN]ghcr.io/r-hub/containers/gcc-asan:latest17[CT]gcc13[gcc13]ghcr.io/r-hub/containers/gcc13:latest18[CT]gcc14[gcc14]ghcr.io/r-hub/containers/gcc14:latest19[CT]gcc15[gcc15]RUnderdevelopment(unstable)(2025-03-01r87860)onFedoraLinux43(Contaghcr.io/r-hub/containers/gcc15:latest20[CT]intel[Intel]ghcr.io/r-hub/containers/intel:latest21[CT]mkl[MKL]ghcr.io/r-hub/containers/mkl:latest22[CT]nold[noLD]ghcr.io/r-hub/containers/nold:latest23[CT]noremap[noRemap]ghcr.io/r-hub/containers/noremap:latest24[CT]nosuggests[noSuggests]ghcr.io/r-hub/containers/nosuggests:latest25[CT]rchk[rchk]ghcr.io/r-hub/containers/rchk:latest26[CT]ubuntu-clang[r-devel-linux-x86_64-debian-clang]ghcr.io/r-hub/containers/ubuntu-clang:latest27[CT]ubuntu-gcc12[r-devel-linux-x86_64-debian-gcc]ghcr.io/r-hub/containers/ubuntu-gcc12:latest28[CT]ubuntu-next[r-next,r-patched,r-patched-linux-x86_64]Rversion4.4.3Patched(2025-02-28r87848)onUbuntu22.04.5LTSghcr.io/r-hub/containers/ubuntu-next:latest29[CT]ubuntu-release[r-release,r-release-linux-x86_64,ubuntu]Rversion4.4.3(2025-02-28)onUbuntu22.04.5LTSghcr.io/r-hub/containers/ubuntu-release:latest30[CT]valgrind[valgrind]ghcr.io/r-hub/containers/valgrind:latestrhub/man/figures/rhub-check.svg0000644000176200001440000006062114762414203016175 0ustar liggesusersFoundgitrepositoryat/private/tmp/cli.FoundGitHubPAT.Availableplatforms(see`rhub::rhub_platforms()`fordetails):1[VM]linuxR-*(anyversion)ubuntu-latestonG…2[VM]m1-sanR-*(anyversion)macos-latestonGi…3[VM]macosR-*(anyversion)macos-13onGitHub4[VM]macos-arm64R-*(anyversion)macos-latestonGi…5[VM]windowsR-*(anyversion)windows-lateston6[CT]atlasR-devel(2025-03-01r87860)FedoraLinux38(C…7[CT]c23R-devel(2025-02-28r87848)Ubuntu22.04.5LTS8[CT]clang-asanR-devel(2025-03-01r87860)Ubuntu22.04.5LTS9[CT]clang-ubsanR-devel(2025-03-01r87860)Ubuntu22.04.5LTS10[CT]clang16R-devel(2025-02-28r87848)Ubuntu22.04.5LTS11[CT]clang17R-devel(2025-02-28r87848)Ubuntu22.04.5LTS12[CT]clang18R-devel(2025-02-28r87848)Ubuntu22.04.5LTS13[CT]clang19R-devel(2025-02-28r87848)Ubuntu22.04.5LTS14[CT]clang20R-devel(2025-02-28r87848)Ubuntu22.04.5LTS15[CT]donttestR-devel(2025-02-28r87848)Ubuntu22.04.5LTS16[CT]gcc-asanR-devel(2025-03-01r87860)FedoraLinux40(C…17[CT]gcc13R-devel(2025-03-01r87860)FedoraLinux38(C…18[CT]gcc14R-devel(2025-03-01r87860)FedoraLinux40(C…19[CT]gcc15R-devel(2025-03-01r87860)FedoraLinux43(C…20[CT]intelR-devel(2025-03-01r87860)FedoraLinux38(C…21[CT]mklR-devel(2025-03-01r87860)FedoraLinux38(C…22[CT]noldR-devel(2025-03-01r87860)Ubuntu22.04.5LTS23[CT]noremapR-devel(2025-02-28r87848)Ubuntu22.04.5LTS24[CT]nosuggestsR-devel(2025-03-01r87860)FedoraLinux38(C…25[CT]rchkR-devel(2025-03-01r87860)Ubuntu22.04.5LTS26[CT]ubuntu-clangR-devel(2025-03-01r87860)Ubuntu22.04.5LTS27[CT]ubuntu-gcc12R-devel(2025-03-01r87860)Ubuntu22.04.5LTS28[CT]ubuntu-nextR-4.4.3(patched)(2025-02-28r87848)Ubuntu22.04.5LTS29[CT]ubuntu-releaseR-4.4.3(2025-02-28)Ubuntu22.04.5LTS30[CT]valgrindR-devel(2025-03-01r87860)FedoraLinux38(C…Selection(commaseparatednumbers,0tocancel):1,5Checkstarted:linux,windows(cinnamic-elver).See<https://github.com/r-lib/cli/actions>forliveoutput!rhub/man/local_check_linux.Rd0000644000176200001440000000055714603437121015732 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rhubv1.R \name{local_check_linux} \alias{local_check_linux} \title{This function is deprecated and defunct. Please see \link{rhubv2}.} \usage{ local_check_linux(...) } \arguments{ \item{...}{Deprecated.} } \description{ This function is deprecated and defunct. Please see \link{rhubv2}. } rhub/man/rhub_setup.Rd0000644000176200001440000000074514603437121014443 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/setup.R \name{rhub_setup} \alias{rhub_setup} \title{Setup the current R package for use with R-hub} \usage{ rhub_setup(overwrite = FALSE) } \arguments{ \item{overwrite}{if \code{TRUE}, \code{\link[=rhub_setup]{rhub_setup()}} will overwrite an already existing workflow file.} } \value{ Nothing. } \description{ It adds or updates the R-hub workflow file to the current package, and advises on next steps. } rhub/man/rhubv2.Rd0000644000176200001440000001235414762411337013501 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rhubv2.R \name{rhubv2} \alias{rhubv2} \title{R-hub v2} \description{ Start here to learn about R-hub v2, especially if you used the previous version of R-hub before. } \section{R-hub v2}{ \subsection{Introduction}{ R-hub v2, i.e. version 2 or later of the rhub package, is a completely new check system. In this article we highlight the differences between the old and the new system. There are two ways to use R-hub v2. The recommended way is to store your R package in a GitHub repository and use the \verb{rhub_*()} functions to start checks on GitHub Actions, using your own GitHub account. Alternatively, if you don't want to store your R package at GitHub, you can use the \verb{rc_*()} functions to run checks in a shared GitHub organization at https://github.com/r-hub2, using the R Consortium runners. See more about the R Consortium runners below. } \subsection{Transitioning from R-hub v1}{ \subsection{Requirements for using R-hub v2}{ \itemize{ \item First, you need a GitHub account. \item Second, you need to have your R package in a GitHub repository. In your local git clone make sure that the \code{origin} git remote is set to the GitHub repository. \item Third, you need a GitHub \href{https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token}{Personal Access Token} and you need to store it in the git credential store on your machine. You can use \code{gitcreds::gitcreds_set()} to add the token to the git credential store. } Call \code{rhub_setup()} from the local git clone to set up R-hub v2 for your package. This adds a GitHub Actions workflow to your local repository. Push this change to GitHub, into your default git branch and then you are ready to call start checks with \code{rhub_check()}. } \subsection{Differences from R-hub v1}{ \itemize{ \item The check picks up the package from GitHub, so it does not use changes in your local git clone. You need to push the changes to GitHub first. You can use a non-default branch, with the \code{branch} argument of \code{rhub_check()}. \item You won't get an email about the check results. But you'll receive regular GitHub notifications about check failures, unless you opt out. Github can also turn these into emails if you like. \item There is no live output from the check at the R console. See the 'Actions' tab of your repository on GitHub for a live check log. \item Many more specialized platforms are available. \item Most platforms use binary packages, so checks and in particular installing dependencies is much faster. } } \subsection{Private repositories}{ GitHub Actions is free for public repositories. For private repositories you also get some minutes for free, depending on the GitHub subscription you have. See \href{https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions}{About billing for GitHub Actions} for details. } \subsection{Branches}{ You can run checks on any branch that you push to GitHub, but you'll need to add the R-hub workflow file (\code{.github/workflows/rhub.yaml} within your repo) must be present in \strong{both} the default branch (usually \code{main}) and also in the branch you want to run the check on. } } \subsection{The R Consortium runners}{ If you don't want to put your package on GitHub, you can still use the rhub package to run package checks on any supported platform using a shared pool of runners in the https://github.com/r-hub2 GitHub organization. The process is similar to the first version of R-hub: \itemize{ \item Set your working directory to the R package you want to check. \item Obtain a token from R-hub, to verify your email address: \if{html}{\out{
}}\preformatted{rc_new_token() }\if{html}{\out{
}} (You do not need to do this, if you already submitted packages to a previous version of R-hub from the same machine, using the same email address. Call \code{rc_list_local_tokens()} to check if you already have tokens.) \item Submit a build with \if{html}{\out{
}}\preformatted{rc_submit() }\if{html}{\out{
}} \item Select the platforms you want to use, and follow the instructions and the link provided to see your check results. } \subsection{Limitations of the R Consortium runners}{ \itemize{ \item You package will be public for the world, and will be stored in the https://github.com/r-hub2 organization. Your check output and results will be public for anyone with a GitHub account. If you want to keep your package private, you can put it in a private GitHub repository, and use the \code{rhub_setup()} and \code{rhub_check()} functions instead. \item The R Consortium runners are shared among all users, so you might need to wait for your builds to start. \item You have to wait at least five minutes between submissions with \code{rc_submit()}. \item Currently you need to create a GitHub account to see the check logs of your package. You don't need a GitHub account to submit the checks. } To avoid these limitations (except for the need for a GitHub account), put your package in a GitHub repository, and use the \code{rhub_setup()} and \code{rhub_check()} functions instead of \code{rc_submit()} and the R Consortium runners. } } } rhub/man/rc_submit.Rd0000644000176200001440000000321314603437121014243 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rc.R \name{rc_submit} \alias{rc_submit} \title{Submit a package to the R Consortium runners} \usage{ rc_submit(path = ".", platforms = NULL, email = NULL, confirmation = NULL) } \arguments{ \item{path}{Path to package file or package directory.} \item{platforms}{Platforms to checks. See \code{\link[=rhub_platforms]{rhub_platforms()}} for a current list. If not specified, then you can select the platforms interactively. Must be specified in non-interactive sessions.} \item{email}{Email address. You must have a token on the local machhine, that corresponds to the email address, see \code{\link[=rc_new_token]{rc_new_token()}}. If not specified (or \code{NULL}) then the email address of the package maintainer is used.} \item{confirmation}{You must set this to \code{TRUE} to submit a package from a non-interactive session.} } \value{ A list with data about the submission, invisibly. Currently it has: \itemize{ \item \code{result}: Should be the string \code{"OK"}. \item \code{repo_url}: URL to the repository. \item \code{actions_url}: URL to the builds inside the repository. \item \code{id}: Build id. This is a string with a randomly generated id. \item \code{name}: Build name, this is a string, the concatenation of the build platforms. } More fields might be added later. } \description{ Submit a package to the R Consortium runners } \seealso{ \code{\link[=rhub_platforms]{rhub_platforms()}} for a list of supported platforms. Other RC runners API: \code{\link{rc_list_local_tokens}()}, \code{\link{rc_list_repos}()}, \code{\link{rc_new_token}()} } \concept{RC runners API} rhub/man/rc_new_token.Rd0000644000176200001440000000265514603437121014742 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rc.R \name{rc_new_token} \alias{rc_new_token} \title{Request a new token for submissions to the R Consortium runners} \usage{ rc_new_token(email = NULL, token = NULL) } \arguments{ \item{email}{Email address to verify We try to detect this, but if the detection fails, you can specify it explicitly. If this argument is missing (or \code{NULL}), then you can specify it interactively.} \item{token}{Token to add. If you already received a token in an email from R-hub, you can specify that here.} } \description{ To build and check R packages on the RC runners of R-hub, you'll need to verify your email address. R-hub will send a token to your email address, and this token will be stored on your computer. } \details{ You need to store a token on every computer you want to submit jobs from, either using the same token from the email you got, or you can request additional tokens for the new machines. Your old token will stay valid as well. If you already have a token from a previous version of R-hub, you can reuse that and you don't need to do anything. Run \if{html}{\out{
}}\preformatted{rhub:::email_file() }\if{html}{\out{
}} to see the file rhub uses to store your tokens. } \seealso{ Other RC runners API: \code{\link{rc_list_local_tokens}()}, \code{\link{rc_list_repos}()}, \code{\link{rc_submit}()} } \concept{RC runners API} rhub/man/last_check.Rd0000644000176200001440000000053214603437121014355 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rhubv1.R \name{last_check} \alias{last_check} \title{This function is deprecated and defunct. Please see \link{rhubv2}.} \usage{ last_check(...) } \arguments{ \item{...}{Deprecated.} } \description{ This function is deprecated and defunct. Please see \link{rhubv2}. } rhub/man/check.Rd0000644000176200001440000000051314603437121013331 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rhubv1.R \name{check} \alias{check} \title{This function is deprecated and defunct. Please see \link{rhubv2}.} \usage{ check(...) } \arguments{ \item{...}{Deprecated.} } \description{ This function is deprecated and defunct. Please see \link{rhubv2}. } rhub/man/roxygen/0000755000176200001440000000000014603437121013461 5ustar liggesusersrhub/man/roxygen/meta.R0000644000176200001440000000111514603437121014530 0ustar liggesusersif (exists(".knitr_asciicast_process", envir = .GlobalEnv)) { rm(list = ".knitr_asciicast_process", envir = .GlobalEnv) } asciicast::init_knitr_engine( echo = TRUE, echo_input = FALSE, timeout = as.integer(Sys.getenv("ASCIICAST_TIMEOUT", 10)), startup = quote(options(cli.num_colors = 256)) ) knitr::opts_chunk$set( asciicast_knitr_output = "html", asciicast_include_style = FALSE, cache = TRUE, cache.path = file.path(getwd(), "man/_cache/"), fig.path = file.path(getwd(), "man/figures"), error = TRUE ) list( markdown = TRUE, restrict_image_formats = TRUE ) rhub/man/get_check.Rd0000644000176200001440000000052714603437121014175 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rhubv1.R \name{get_check} \alias{get_check} \title{This function is deprecated and defunct. Please see \link{rhubv2}.} \usage{ get_check(...) } \arguments{ \item{...}{Deprecated.} } \description{ This function is deprecated and defunct. Please see \link{rhubv2}. } rhub/man/rhub-package.Rd0000644000176200001440000005044314762411337014623 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rhubv2.R \name{rhub-package} \alias{rhub-package} \alias{rhub} \title{The rhub package} \description{ Tools for R package developers } \details{ \subsection{Installation}{ Install rhub from CRAN: \if{html}{\out{
}}\preformatted{pak::pkg_install("rhub") }\if{html}{\out{
}} } \subsection{Usage}{ \subsection{Requirements}{ \itemize{ \item A Github account. \item Your R package must be in a GitHub repository. \item You need a GitHub \href{https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token}{Personal Access Token}. You can use the \href{https://gitcreds.r-lib.org/}{gitcreds package} to add the token to the git credential store. } See the \href{#the-r-consortium-runners}{R Consortium runners} section for using rhub if your package is not on GitHub. } \subsection{Private repositories}{ rhub uses GitHub Actions, which is free for public repositories. For private repositories you also get some minutes for free, depending on the GitHub subscription you have. See \href{https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions}{About billing for GitHub Actions} for details. } \subsection{Setup}{ \enumerate{ \item Switch to the directory of your package, and call \code{rhub::rhub_setup()} to add the R-hub workflow file to your package. } \if{html}{\out{
}}\preformatted{rhub::rhub_setup() }\if{html}{\out{
}}\if{html}{\out{
#> Setting up R-hub v2.                                                            
#>  Found R package at /private/tmp/cli.                                          
#>  Found git repository at /private/tmp/cli.                                     
#>  Created workflow file /private/tmp/cli/.github/workflows/rhub.yaml.           
#>                                                                                 
#> Notes:                                                                          
#>  The workflow file must be added to the default branch of the GitHub           
#>   repository.                                                                   
#>  GitHub actions must be enabled for the repository. They are disabled for      
#>   forked repositories by default.                                               
#>                                                                                 
#> Next steps:                                                                     
#>  Add the workflow file to git using `git add <filename>`.                      
#>  Commit it to git using `git commit`.                                          
#>  Push the commit to GitHub using `git push`.                                   
#>  Call `rhub::rhub_doctor()` to check that you have set up R-hub correctly.     
#>  Call `rhub::rhub_check()` to check your package.                              
}} \enumerate{ \item Run \verb{git commit} and \verb{git push} to push the workflow file to GitHub. \item Run \code{rhub::rhub_doctor()} to check if everything is set up correctly: } \if{html}{\out{
}}\preformatted{rhub::rhub_doctor() }\if{html}{\out{
}}\if{html}{\out{
#>  Found R package at /private/tmp/cli.                                          
#>  Found git repository at /private/tmp/cli.                                     
#>  Found GitHub PAT.                                                             
#>  Found repository on GitHub at <https://github.com/r-lib/cli>.                 
#>  GitHub PAT has the right scopes.                                              
#>  Found R-hub workflow in default branch, and it is active.                     
#> → WOOT! You are ready to run `rhub::rhub_check()` on this package.              
}} } \subsection{Run checks}{ Use \code{rhub::rhub_platforms()} to get a list of supported platforms and checks: \if{html}{\out{
}}\preformatted{rhub::rhub_platforms() }\if{html}{\out{
}}\if{html}{\out{
#> ── Virtual machines ─────────────────────────────────────────────────────────── 
#>  1 [VM]  linux                                                                  
#>    All R versions on GitHub Actions ubuntu-latest                               
#>  2 [VM]  macos                                                                  
#>    All R versions on GitHub Actions macos-latest                                
#>  3 [VM]  macos-arm64                                                            
#>    All R versions on GitHub Actions macos-14                                    
#>  4 [VM]  windows                                                                
#>    All R versions on GitHub Actions windows-latest                              
#>                                                                                 
#> ── Containers ───────────────────────────────────────────────────────────────── 
#>  5 [CT]  atlas  [ATLAS]                                                         
#>    R Under development (unstable) (2024-03-19 r86153) on Fedora Linux 38 (Conta…
#>    ghcr.io/r-hub/containers/atlas:latest                                        
#>  6 [CT]  clang-asan  [asan, clang-ASAN, clang-UBSAN, ubsan]                     
#>    R Under development (unstable) (2024-03-19 r86153) on Ubuntu 22.04.4 LTS     
#>    ghcr.io/r-hub/containers/clang-asan:latest                                   
#>  7 [CT]  clang16  [clang16]                                                     
#>    R Under development (unstable) (2024-03-18 r86148) on Ubuntu 22.04.4 LTS     
#>    ghcr.io/r-hub/containers/clang16:latest                                      
#>  8 [CT]  clang17  [clang17]                                                     
#>    R Under development (unstable) (2024-03-18 r86148) on Ubuntu 22.04.4 LTS     
#>    ghcr.io/r-hub/containers/clang17:latest                                      
#>  9 [CT]  clang18  [clang18]                                                     
#>    R Under development (unstable) (2024-03-18 r86148) on Ubuntu 22.04.4 LTS     
#>    ghcr.io/r-hub/containers/clang18:latest                                      
#> 10 [CT]  donttest  [donttest]                                                   
#>    R Under development (unstable) (2024-03-18 r86148) on Ubuntu 22.04.4 LTS     
#>    ghcr.io/r-hub/containers/donttest:latest                                     
#> 11 [CT]  gcc13  [gcc13]                                                         
#>    R Under development (unstable) (2024-03-19 r86153) on Fedora Linux 38 (Conta…
#>    ghcr.io/r-hub/containers/gcc13:latest                                        
#> 12 [CT]  intel  [Intel]                                                         
#>    R Under development (unstable) (2024-03-19 r86153) on Fedora Linux 38 (Conta…
#>    ghcr.io/r-hub/containers/intel:latest                                        
#> 13 [CT]  mkl  [MKL]                                                             
#>    R Under development (unstable) (2024-03-19 r86153) on Fedora Linux 38 (Conta…
#>    ghcr.io/r-hub/containers/mkl:latest                                          
#> 14 [CT]  nold  [noLD]                                                           
#>    R Under development (unstable) (2024-03-19 r86153) on Ubuntu 22.04.4 LTS     
#>    ghcr.io/r-hub/containers/nold:latest                                         
#> 15 [CT]  nosuggests  [noSuggests]                                               
#>    R Under development (unstable) (2024-03-19 r86153) on Fedora Linux 38 (Conta…
#>    ghcr.io/r-hub/containers/nosuggests:latest                                   
#> 16 [CT]  ubuntu-clang  [r-devel-linux-x86_64-debian-clang]                      
#>    R Under development (unstable) (2024-03-19 r86153) on Ubuntu 22.04.4 LTS     
#>    ghcr.io/r-hub/containers/ubuntu-clang:latest                                 
#> 17 [CT]  ubuntu-gcc12  [r-devel-linux-x86_64-debian-gcc]                        
#>    R Under development (unstable) (2024-03-19 r86153) on Ubuntu 22.04.4 LTS     
#>    ghcr.io/r-hub/containers/ubuntu-gcc12:latest                                 
#> 18 [CT]  ubuntu-next  [r-next, r-patched, r-patched-linux-x86_64]               
#>    R version 4.3.3 Patched (2024-02-29 r86153) on Ubuntu 22.04.4 LTS            
#>    ghcr.io/r-hub/containers/ubuntu-next:latest                                  
#> 19 [CT]  ubuntu-release  [r-release, r-release-linux-x86_64, ubuntu]            
#>    R version 4.3.3 (2024-02-29) on Ubuntu 22.04.4 LTS                           
#>    ghcr.io/r-hub/containers/ubuntu-release:latest                               
#> 20 [CT]  valgrind  [valgrind]                                                   
#>    R Under development (unstable) (2024-03-19 r86153) on Fedora Linux 38 (Conta…
#>    ghcr.io/r-hub/containers/valgrind:latest                                     
}} Run \code{rhub::rhub_check()} to start R-hub v2 checks on GitHub Actions: \if{html}{\out{
}}\preformatted{rhub::rhub_check() }\if{html}{\out{
}}\if{html}{\out{
#>  Found git repository at /private/tmp/cli.                                     
#>  Found GitHub PAT.                                                             
#>                                                                                 
#> Available platforms (see `rhub::rhub_platforms()` for details):                 
#>                                                                                 
#>  1 [VM] linux          R-* (any version)                     ubuntu-latest on G…
#>  2 [VM] macos          R-* (any version)                     macos-latest on Gi…
#>  3 [VM] macos-arm64    R-* (any version)                     macos-14 on GitHub 
#>  4 [VM] windows        R-* (any version)                     windows-latest on …
#>  5 [CT] atlas          R-devel (2024-03-19 r86153)           Fedora Linux 38 (C…
#>  6 [CT] clang-asan     R-devel (2024-03-19 r86153)           Ubuntu 22.04.4 LTS 
#>  7 [CT] clang16        R-devel (2024-03-18 r86148)           Ubuntu 22.04.4 LTS 
#>  8 [CT] clang17        R-devel (2024-03-18 r86148)           Ubuntu 22.04.4 LTS 
#>  9 [CT] clang18        R-devel (2024-03-18 r86148)           Ubuntu 22.04.4 LTS 
#> 10 [CT] donttest       R-devel (2024-03-18 r86148)           Ubuntu 22.04.4 LTS 
#> 11 [CT] gcc13          R-devel (2024-03-19 r86153)           Fedora Linux 38 (C…
#> 12 [CT] intel          R-devel (2024-03-19 r86153)           Fedora Linux 38 (C…
#> 13 [CT] mkl            R-devel (2024-03-19 r86153)           Fedora Linux 38 (C…
#> 14 [CT] nold           R-devel (2024-03-19 r86153)           Ubuntu 22.04.4 LTS 
#> 15 [CT] nosuggests     R-devel (2024-03-19 r86153)           Fedora Linux 38 (C…
#> 16 [CT] ubuntu-clang   R-devel (2024-03-19 r86153)           Ubuntu 22.04.4 LTS 
#> 17 [CT] ubuntu-gcc12   R-devel (2024-03-19 r86153)           Ubuntu 22.04.4 LTS 
#> 18 [CT] ubuntu-next    R-4.3.3 (patched) (2024-02-29 r86153) Ubuntu 22.04.4 LTS 
#> 19 [CT] ubuntu-release R-4.3.3 (2024-02-29)                  Ubuntu 22.04.4 LTS 
#> 20 [CT] valgrind       R-devel (2024-03-19 r86153)           Fedora Linux 38 (C…
#>                                                                                 
#> Selection (comma separated numbers, 0 to cancel): 1, 5                          
#>                                                                                 
#>  Check started: linux, atlas (apricot-flycatcher).                             
#>   See <https://github.com/r-lib/cli/actions> for live output!                   
}} } } \subsection{The R Consortium runners}{ If you don't want to put your package on GitHub, you can still use the rhub package to run package checks on any supported platform using a shared pool of runners in the https://github.com/r-hub2 GitHub organization. The process is similar to the first version of R-hub: \itemize{ \item Set your working directory to the R package you want to check. \item Obtain a token from R-hub, to verify your email address: \if{html}{\out{
}}\preformatted{rc_new_token() }\if{html}{\out{
}} (You do not need to do this, if you already submitted packages to a previous version of R-hub from the same machine, using the same email address. Call \code{rc_list_local_tokens()} to check if you already have tokens.) \item Submit a build with \if{html}{\out{
}}\preformatted{rc_submit() }\if{html}{\out{
}} \item Select the platforms you want to use, and follow the instructions and the link provided to see your check results. } \subsection{Limitations of the R Consortium runners}{ \itemize{ \item You package will be public for the world, and will be stored in the https://github.com/r-hub2 organization. Your check output and results will be public for anyone with a GitHub account. If you want to keep your package private, you can put it in a private GitHub repository, and use the \code{rhub_setup()} and \code{rhub_check()} functions instead. \item The R Consortium runners are shared among all users, so you might need to wait for your builds to start. \item You have to wait at least five minutes between submissions with \code{rc_submit()}. \item Currently you need to create a GitHub account to see the check logs of your package. You don't need a GitHub account to submit the checks. } To avoid these limitations (except for the need for a GitHub account), put your package in a GitHub repository, and use the \code{rhub_setup()} and \code{rhub_check()} functions instead of \code{rc_submit()} and the R Consortium runners. } } \subsection{Code of Conduct}{ Please note that the rhub package is released with a \href{https://r-hub.github.io/rhub/dev/CODE_OF_CONDUCT.html}{Contributor Code of Conduct}. By contributing to this project, you agree to abide by its terms. } \subsection{License}{ MIT © R Consortium } } \keyword{internal} rhub/man/rc_list_repos.Rd0000644000176200001440000000160214603437121015123 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rc.R \name{rc_list_repos} \alias{rc_list_repos} \title{List your repositories created by the R Consortium runners} \usage{ rc_list_repos(email = NULL) } \arguments{ \item{email}{Email address. We try to detect this, but if the detection fails, you can specify it explicitly.} } \value{ Data frame with columns: \itemize{ \item \code{repo_name}: Name of the repository. \item \code{repo_url}: URL of the repository. \item \code{builds_url}: URL to the builds of the repository. } Additional columns and customized printing will be probably added later to the result. } \description{ Lists repositories created by \code{\link[=rc_submit]{rc_submit()}} submissions. } \seealso{ Other RC runners API: \code{\link{rc_list_local_tokens}()}, \code{\link{rc_new_token}()}, \code{\link{rc_submit}()} } \concept{RC runners API} rhub/man/rhub_check.Rd0000644000176200001440000000167714762411337014374 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/check.R \name{rhub_check} \alias{rhub_check} \title{Check a package on R-hub} \usage{ rhub_check(gh_url = NULL, platforms = NULL, r_versions = NULL, branch = NULL) } \arguments{ \item{gh_url}{GitHub URL of a package to check, or \code{NULL} to check the package in the current directory.} \item{platforms}{Platforms to use, a character vector. Use \code{NULL} to select from a list in interactive sessions. See \code{\link[=rhub_platforms]{rhub_platforms()}}.} \item{r_versions}{Which R version(s) to use for the platforms that supports multiple R versions. This argument is not implemented yet.} \item{branch}{Branch to use to run R-hub. Defaults to the current branch if \code{gh_url} is \code{NULL}. Otherwise defaults to \code{"main"}. Note that this branch also need to include the \code{rhub.yaml} workflow file.} } \value{ TODO } \description{ Check a package on R-hub } rhub/man/check_shortcuts.Rd0000644000176200001440000000175414603437121015457 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rhubv1.R \name{check_on_linux} \alias{check_on_linux} \alias{check_on_windows} \alias{check_on_macos} \alias{check_on_debian} \alias{check_on_ubuntu} \alias{check_on_fedora} \alias{check_on_solaris} \alias{check_on_centos} \alias{check_with_roldrel} \alias{check_with_rrelease} \alias{check_with_rpatched} \alias{check_with_rdevel} \alias{check_with_valgrind} \alias{check_with_sanitizers} \title{This function is deprecated and defunct. Please see \link{rhubv2}.} \usage{ check_on_linux(...) check_on_windows(...) check_on_macos(...) check_on_debian(...) check_on_ubuntu(...) check_on_fedora(...) check_on_solaris(...) check_on_centos(...) check_with_roldrel(...) check_with_rrelease(...) check_with_rpatched(...) check_with_rdevel(...) check_with_valgrind(...) check_with_sanitizers(...) } \arguments{ \item{...}{Deprecated.} } \description{ This function is deprecated and defunct. Please see \link{rhubv2}. } rhub/man/rhub_platforms.Rd0000644000176200001440000000204314603437121015303 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/platforms.R \name{rhub_platforms} \alias{rhub_platforms} \title{List R-hub platforms} \usage{ rhub_platforms() } \value{ Data frame with columns: \itemize{ \item \code{name}: platform name. Use this in the \code{platforms} argument of \code{\link[=rhub_check]{rhub_check()}}. \item \code{aliases}: alternative platform names. They can also be used in the \code{platforms} argument of \code{\link[=rhub_check]{rhub_check()}}. \item \code{type}: \code{"os"} or \code{"container"}. \item \code{os_type}: Linux, macOS or Windows currently. \item \code{container}: URL of the container image for container platforms. \item \code{github_os}: name of the OS on GitHub Actions for non-container platforms. \item \code{r_version}: R version string. If \code{"*"} then any supported R version can be selected for this platform. \item \code{os_name}: name of the operating system, including Linux distribution name and version for container actions. } } \description{ List R-hub platforms } rhub/DESCRIPTION0000644000176200001440000000246214762422102012724 0ustar liggesusersPackage: rhub Title: Tools for R Package Developers Version: 2.0.1 Authors@R: c( person("Gábor", "Csárdi",, "csardi.gabor@gmail.com", role = c("aut", "cre")), person("Maëlle", "Salmon", role = "aut", email = "maelle.salmon@yahoo.se", comment = c(ORCID = "0000-0002-2815-0399")), person("R Consortium", role = c("fnd"))) Description: R-hub v2 uses GitHub Actions to run 'R CMD check' and similar package checks. The 'rhub' package helps you set up R-hub v2 for your R package, and start running checks. License: MIT + file LICENSE URL: https://github.com/r-hub/rhub, https://r-hub.github.io/rhub/ BugReports: https://github.com/r-hub/rhub/issues RoxygenNote: 7.3.2 Depends: R (>= 4.0) Imports: callr, cli, curl, desc, gert, glue, gitcreds, jsonlite, pkgbuild, processx, rappdirs, rematch, R6, rprojroot, utils, whoami Suggests: asciicast, debugme, knitr, mockery, pillar, rmarkdown, testthat (>= 3.0.0), webfakes, withr Encoding: UTF-8 Config/testthat/edition: 3 NeedsCompilation: no Packaged: 2025-03-06 22:13:05 UTC; gaborcsardi Author: Gábor Csárdi [aut, cre], Maëlle Salmon [aut] (), R Consortium [fnd] Maintainer: Gábor Csárdi Repository: CRAN Date/Publication: 2025-03-06 22:40:02 UTC