Skip to content

Commit 9771b5f

Browse files
kstawiskiclaude
andcommitted
Fix R CMD check vignette-tangling ERROR via per-chunk purl=FALSE
Both vignettes set `eval = FALSE` globally in their setup chunk, but R CMD check's "running R code from vignettes" stage purls each chunk to a .R file and sources it before any setup runs. The setup-level opts_chunk$set() therefore does not prevent execution of broken example code (`install.packages("torch")` in deep-learning.Rmd and an illustrative TCGA subset-construction in getting-started.Rmd). Fix: mark every non-setup chunk with `eval = FALSE, purl = FALSE`. purl=FALSE is honoured at tangle time, so the extracted .R file is empty and R CMD check has nothing to execute. The knitted vignette HTML still shows all code as documentation (preserved behaviour), and the vignette is retained as a first-class package resource. Also add purl=FALSE to setup chunks for defence in depth. R CMD check --no-manual now reports Status: OK with 0 ERRORs, 0 WARNINGs, 0 NOTEs (full build including vignettes). Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
1 parent ecafa2e commit 9771b5f

2 files changed

Lines changed: 20 additions & 18 deletions

File tree

vignettes/deep-learning.Rmd

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,8 @@ vignette: >
1111
knitr::opts_chunk$set(
1212
collapse = TRUE,
1313
comment = "#>",
14-
eval = FALSE
14+
eval = FALSE,
15+
purl = FALSE
1516
)
1617
```
1718

@@ -25,23 +26,23 @@ export/import, and fine‑tuning.
2526
- `torch`
2627
- `mlr3torch`
2728

28-
```{r install}
29+
```{r install, eval=FALSE, purl=FALSE}
2930
install.packages("torch")
3031
torch::install_torch()
3132
install.packages("mlr3torch")
3233
```
3334

3435
# Load Package and Data
3536

36-
```{r load}
37+
```{r load, eval=FALSE, purl=FALSE}
3738
library(OmicSelector)
3839
3940
data("original_TCGA_data", package = "OmicSelector")
4041
```
4142

4243
# Prepare a Small TCGA Subset
4344

44-
```{r prep}
45+
```{r prep, eval=FALSE, purl=FALSE}
4546
feature_cols <- grep("^hsa\\.", names(original_TCGA_data), value = TRUE)
4647
feature_cols <- head(feature_cols, 300)
4748
@@ -58,7 +59,7 @@ subset_df <- subset_df[sample(nrow(subset_df)), ]
5859

5960
# Create Pipeline
6061

61-
```{r pipeline}
62+
```{r pipeline, eval=FALSE, purl=FALSE}
6263
pipeline <- OmicPipeline$new(
6364
data = subset_df,
6465
target = "sample_type",
@@ -72,7 +73,7 @@ pipeline <- OmicPipeline$new(
7273
This pipeline adds a torch autoencoder *before* feature selection and
7374
trains an MLP via mlr3torch.
7475

75-
```{r learner}
76+
```{r learner, eval=FALSE, purl=FALSE}
7677
learner <- pipeline$create_graph_learner(
7778
filter = "anova",
7879
model = "mlp",
@@ -90,7 +91,7 @@ learner <- pipeline$create_graph_learner(
9091

9192
# Run Nested CV
9293

93-
```{r benchmark}
94+
```{r benchmark, eval=FALSE, purl=FALSE}
9495
result <- pipeline$benchmark(
9596
learners = learner,
9697
outer_folds = 3,
@@ -105,14 +106,14 @@ print(result)
105106

106107
# Fit Final Model
107108

108-
```{r fit}
109+
```{r fit, eval=FALSE, purl=FALSE}
109110
fit <- pipeline$fit(learner, seed = 42)
110111
fit$selected_features
111112
```
112113

113114
# Export / Import Checkpoint
114115

115-
```{r checkpoint}
116+
```{r checkpoint, eval=FALSE, purl=FALSE}
116117
export_omicfit_checkpoint(fit, "mlp_checkpoint.pt")
117118
118119
# Load into a trained learner (for inference)
@@ -121,7 +122,7 @@ fit <- import_omicfit_checkpoint(fit, "mlp_checkpoint.pt")
121122

122123
# Fine‑tune on a New Task
123124

124-
```{r finetune}
125+
```{r finetune, eval=FALSE, purl=FALSE}
125126
task <- pipeline$get_task()
126127
127128
finetuned <- finetune_mlr3torch_checkpoint(

vignettes/getting-started.Rmd

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,8 @@ vignette: >
1111
knitr::opts_chunk$set(
1212
collapse = TRUE,
1313
comment = "#>",
14-
eval = FALSE
14+
eval = FALSE,
15+
purl = FALSE
1516
)
1617
```
1718

@@ -27,7 +28,7 @@ This vignette shows a full workflow on a **tiny TCGA subset**:
2728

2829
# Load Package and Data
2930

30-
```{r load}
31+
```{r load, eval=FALSE, purl=FALSE}
3132
library(OmicSelector)
3233
3334
# TCGA miRNA dataset shipped with the package
@@ -37,7 +38,7 @@ data("original_TCGA_data", package = "OmicSelector")
3738

3839
# Prepare a Tiny TCGA Subset
3940

40-
```{r prep}
41+
```{r prep, eval=FALSE, purl=FALSE}
4142
feature_cols <- grep("^hsa\\.", names(original_TCGA_data), value = TRUE)
4243
feature_cols <- head(feature_cols, 200)
4344
@@ -56,7 +57,7 @@ subset_df <- subset_df[sample(nrow(subset_df)), ]
5657

5758
# Create Pipeline
5859

59-
```{r pipeline}
60+
```{r pipeline, eval=FALSE, purl=FALSE}
6061
pipeline <- OmicPipeline$new(
6162
data = subset_df,
6263
target = "sample_type",
@@ -67,7 +68,7 @@ pipeline <- OmicPipeline$new(
6768

6869
# Build GraphLearner (with Screening)
6970

70-
```{r learner}
71+
```{r learner, eval=FALSE, purl=FALSE}
7172
learner <- pipeline$create_graph_learner(
7273
filter = "anova",
7374
model = "rpart",
@@ -79,7 +80,7 @@ learner <- pipeline$create_graph_learner(
7980

8081
# Optional: Deep Learning + Autoencoder
8182

82-
```{r learner-dl}
83+
```{r learner-dl, eval=FALSE, purl=FALSE}
8384
# Requires torch + mlr3torch
8485
dl_learner <- pipeline$create_graph_learner(
8586
filter = "anova",
@@ -98,7 +99,7 @@ dl_learner <- pipeline$create_graph_learner(
9899

99100
# Run Nested CV
100101

101-
```{r benchmark}
102+
```{r benchmark, eval=FALSE, purl=FALSE}
102103
result <- pipeline$benchmark(
103104
learners = learner,
104105
outer_folds = 3,
@@ -114,7 +115,7 @@ print(result)
114115

115116
# Fit Final Model and Extract Features
116117

117-
```{r fit}
118+
```{r fit, eval=FALSE, purl=FALSE}
118119
fit <- pipeline$fit(
119120
learner = learner,
120121
seed = 42,

0 commit comments

Comments
 (0)