Skip to content

Commit

Permalink
Code Review Fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
HubertR21 committed Feb 20, 2024
1 parent 733527d commit 745a570
Show file tree
Hide file tree
Showing 6 changed files with 12 additions and 11 deletions.
2 changes: 1 addition & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ Authors@R:
Description: The forester package is an open-source AutoML package implemented in R designed for
training high-quality tree-based models on tabular data. It fully supports regression, binary
classification, and multiclass classification tasks, and provides a limited support for
teh sruvival analysis task. A single line of code allows the use of unprocessed datasets, informs
the survival analysis task. A single line of code allows the use of unprocessed datasets, informs
about potential issues concerning them, and handles feature engineering automatically.
Moreover, hyperparameter tuning is performed by Bayesian optimization, which provides
high-quality outcomes. The results are later served as a ranked list of models. Finally, the
Expand Down
4 changes: 2 additions & 2 deletions R/custom_preprocessing.R
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,8 @@ custom_preprocessing <- function(data,
type = 'auto',
na_indicators = c(''),
removal_parameters = list(
active_modules = c(duplicate_cols = TRUE, id_like_cols = TRUE,
static_cols = TRUE, sparse_cols = TRUE,
active_modules = c(duplicate_cols = TRUE, id_like_cols = TRUE,
static_cols = TRUE, sparse_cols = TRUE,
corrupt_rows = TRUE, correlated_cols = TRUE),
id_names = c('id', 'nr', 'number', 'idx', 'identification', 'index'),
static_threshold = 0.99,
Expand Down
6 changes: 3 additions & 3 deletions R/plot_classification.R
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ plot.binary_clf <- function(x,
test_y <- data.frame(metric = rownames(test_scores), value = unlist(test_scores))
test_data <- x$score_test[1:10, ]
test_data$name <- factor(test_data$name, levels = unique(test_data$name))
test_all <- cbind(test_data[rep(seq_len(nrow(test_data)), each = 3),],
test_all <- cbind(test_data[rep(seq_len(nrow(test_data)), each = 3), ],
data.frame(metric = rownames(test_scores), value = unlist(test_scores)))
test_all <- test_all[, c('name', 'engine', 'tuning', 'metric', 'value')]

Expand All @@ -92,7 +92,7 @@ plot.binary_clf <- function(x,
valid_y <- data.frame(metric = rownames(valid_scores), value = unlist(valid_scores))
valid_data <- x$score_valid[1:10, ]
valid_data$name <- factor(valid_data$name, levels = unique(valid_data$name))
valid_all <- cbind(valid_data[rep(seq_len(nrow(valid_data)), each = 3),],
valid_all <- cbind(valid_data[rep(seq_len(nrow(valid_data)), each = 3), ],
data.frame(metric = rownames(valid_scores), value = unlist(valid_scores)))
valid_all <- valid_all[, c('name', 'engine', 'tuning', 'metric', 'value')]

Expand Down Expand Up @@ -124,7 +124,7 @@ plot.binary_clf <- function(x,
return(p)
}

plot_test <- comparison_plot(TRUE , test_all)
plot_test <- comparison_plot(TRUE, test_all)
plot_valid <- comparison_plot(FALSE, valid_all)
return(patchwork::wrap_plots(list(plot_test, plot_valid), ncol = 1))
}
Expand Down
4 changes: 2 additions & 2 deletions R/plot_multiclass.R
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ plot.multiclass <- function(x,
test_y <- data.frame(metric = rownames(test_scores), value = unlist(test_scores))
test_data <- x$score_test[1:10, ]
test_data$name <- factor(test_data$name, levels = unique(test_data$name))
test_all <- cbind(test_data[rep(seq_len(nrow(test_data)), each = 4),],
test_all <- cbind(test_data[rep(seq_len(nrow(test_data)), each = 4), ],
data.frame(metric = rownames(test_scores), value = unlist(test_scores)))
test_all <- test_all[, c('name', 'engine', 'tuning', 'metric', 'value')]

Expand All @@ -92,7 +92,7 @@ plot.multiclass <- function(x,
valid_y <- data.frame(metric = rownames(valid_scores), value = unlist(valid_scores))
valid_data <- x$score_valid[1:10, ]
valid_data$name <- factor(valid_data$name, levels = unique(valid_data$name))
valid_all <- cbind(valid_data[rep(seq_len(nrow(valid_data)), each = 4),],
valid_all <- cbind(valid_data[rep(seq_len(nrow(valid_data)), each = 4), ],
data.frame(metric = rownames(valid_scores), value = unlist(valid_scores)))
valid_all <- valid_all[, c('name', 'engine', 'tuning', 'metric', 'value')]

Expand Down
2 changes: 1 addition & 1 deletion R/report.R
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ report <- function(train_output,
'guides for installation from GitHub repository README. The',
'report() fucntion is unable to work properly wihtout it. \n\n',
verbose = TRUE)
stop('Package not found: tinytex, to use it please follow guides for installation from GitHub repository README. The report() fucntion is unable to work properly wihtout it. \n\n')
stop('Package not found: tinytex, to use it please follow guides for installation from GitHub repository README. The report() function is unable to work properly wihtout it. \n\n')
})
if (train_output$type == 'regression') {
input_file_path <- system.file('rmd', 'report_regression.Rmd', package = 'forester')
Expand Down
5 changes: 3 additions & 2 deletions misc/old_tests_03_02_2023/testthat/test-predict_models_all.R
Original file line number Diff line number Diff line change
Expand Up @@ -103,14 +103,15 @@ test_that('test-predict_models_all', {
)
test_data <-
prepare_data(split_data$test,
y = 'Price',c('ranger', 'xgboost', 'decision_tree','lightgbm', 'catboost'),
y = 'Price',
engine = c('ranger', 'xgboost', 'decision_tree', 'lightgbm', 'catboost'),
predict = TRUE,
train = split_data$train)
suppressWarnings(
model <-
train_models(train_data,
y = 'Price',
engine = c('ranger', 'xgboost', 'decision_tree','lightgbm', 'catboost'),
engine = c('ranger', 'xgboost', 'decision_tree', 'lightgbm', 'catboost'),
type = type)
)
suppressWarnings(
Expand Down

0 comments on commit 745a570

Please sign in to comment.