diff --git a/NAMESPACE b/NAMESPACE index e5df44c7..138ecf21 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -51,6 +51,8 @@ export(activation) export(activation_2) export(adjust_deg_free) export(all_neighbors) +export(average_before_softmax) +export(balance_probabilities) export(batch_size) export(buffer) export(cal_method_class) @@ -119,6 +121,7 @@ export(no_global_pruning) export(num_breaks) export(num_clusters) export(num_comp) +export(num_estimators) export(num_hash) export(num_knots) export(num_leaves) @@ -172,6 +175,7 @@ export(shrinkage_variance) export(signed_hash) export(significance_threshold) export(smoothness) +export(softmax_temperature) export(spline_degree) export(splitting_rule) export(stop_iter) @@ -182,6 +186,7 @@ export(svm_margin) export(target_weight) export(threshold) export(token) +export(training_set_limit) export(tree_depth) export(trees) export(trim_amount) diff --git a/NEWS.md b/NEWS.md index c1384934..b9e49d5c 100644 --- a/NEWS.md +++ b/NEWS.md @@ -2,6 +2,8 @@ * A bug was fixed where some space-filling designs did not respect the `original` argument (#409). +* Parameters were added for the `tab_pfn` model: `num_estimators()`, `softmax_temperature()`, `balance_probabilities()`, `average_before_softmax()`, and `training_set_limit()`. + # dials 1.4.2 * `prop_terms()` is a new parameter object used for recipes that do supervised feature selection (#395). diff --git a/R/param_schedulers.R b/R/param_schedulers.R index 1cfb5fcc..f0824f05 100644 --- a/R/param_schedulers.R +++ b/R/param_schedulers.R @@ -1,5 +1,5 @@ #' Parameters for neural network learning rate schedulers -# +#' #' These parameters are used for constructing neural network models. #' #' @inheritParams Laplace diff --git a/R/param_tab_pfn.R b/R/param_tab_pfn.R new file mode 100644 index 00000000..36fd3d5b --- /dev/null +++ b/R/param_tab_pfn.R @@ -0,0 +1,70 @@ +#' Parameters for TabPFN models +#' +#' These parameters are used for constructing Prior data fitted network (TabPFN) +#' models. +#' +#' @inheritParams Laplace +#' @inheritParams select_features +#' +#' @details +#' These parameters are often used with TabPFN models via `parsnip::tab_pfn()`. +#' @name tab-pfn-param +#' @export +num_estimators <- function(range = c(1, 25), trans = NULL) { + new_quant_param( + type = "integer", + range = range, + inclusive = c(TRUE, TRUE), + trans = trans, + label = c(num_estimators = "# Estimators"), + finalize = NULL + ) +} + +#' @rdname tab-pfn-param +#' @export +softmax_temperature <- function(range = c(0, 10), trans = NULL) { + new_quant_param( + type = "double", + range = range, + inclusive = c(FALSE, TRUE), + trans = trans, + label = c(softmax_temperature = "Softmax Temperature"), + finalize = NULL + ) +} + +#' @rdname tab-pfn-param +#' @export +balance_probabilities <- function(values = c(TRUE, FALSE)) { + new_qual_param( + type = "logical", + values = values, + label = c(balance_probabilities = "Balance Probabilities?"), + finalize = NULL + ) +} + +#' @rdname tab-pfn-param +#' @export +average_before_softmax <- function(values = c(TRUE, FALSE)) { + new_qual_param( + type = "logical", + values = values, + label = c(average_before_softmax = "Average Before Softmax?"), + finalize = NULL + ) +} + +#' @rdname tab-pfn-param +#' @export +training_set_limit <- function(range = c(2L, 10000L), trans = NULL) { + new_quant_param( + type = "integer", + range = range, + inclusive = c(TRUE, TRUE), + trans = trans, + label = c(training_set_limit = "Training Set Size"), + finalize = NULL + ) +} diff --git a/_pkgdown.yml b/_pkgdown.yml index 5a19beef..d1ec29a2 100644 --- a/_pkgdown.yml +++ b/_pkgdown.yml @@ -82,6 +82,7 @@ reference: - neighbors - num_clusters - num_comp + - num_estimators - num_knots - penalty - predictor_prop diff --git a/man/scheduler-param.Rd b/man/scheduler-param.Rd index 0410054c..867c54bf 100644 --- a/man/scheduler-param.Rd +++ b/man/scheduler-param.Rd @@ -11,8 +11,7 @@ \alias{rate_decay} \alias{rate_schedule} \alias{values_scheduler} -\title{Parameters for neural network learning rate schedulers -These parameters are used for constructing neural network models.} +\title{Parameters for neural network learning rate schedulers} \format{ An object of class \code{character} of length 5. } @@ -47,7 +46,6 @@ transformation, \code{NULL}.} in examples below.} } \description{ -Parameters for neural network learning rate schedulers These parameters are used for constructing neural network models. } \details{ diff --git a/man/tab-pfn-param.Rd b/man/tab-pfn-param.Rd new file mode 100644 index 00000000..ead8a341 --- /dev/null +++ b/man/tab-pfn-param.Rd @@ -0,0 +1,40 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/param_tab_pfn.R +\name{tab-pfn-param} +\alias{tab-pfn-param} +\alias{num_estimators} +\alias{softmax_temperature} +\alias{balance_probabilities} +\alias{average_before_softmax} +\alias{training_set_limit} +\title{Parameters for TabPFN models} +\usage{ +num_estimators(range = c(1, 25), trans = NULL) + +softmax_temperature(range = c(0, 10), trans = NULL) + +balance_probabilities(values = c(TRUE, FALSE)) + +average_before_softmax(values = c(TRUE, FALSE)) + +training_set_limit(range = c(2L, 10000L), trans = NULL) +} +\arguments{ +\item{range}{A two-element vector holding the \emph{defaults} for the smallest and +largest possible values, respectively. If a transformation is specified, +these values should be in the \emph{transformed units}.} + +\item{trans}{A \code{trans} object from the \code{scales} package, such as +\code{scales::transform_log10()} or \code{scales::transform_reciprocal()}. If not provided, +the default is used which matches the units used in \code{range}. If no +transformation, \code{NULL}.} + +\item{values}{A vector of possible values (TRUE or FALSE).} +} +\description{ +These parameters are used for constructing Prior data fitted network (TabPFN) +models. +} +\details{ +These parameters are often used with TabPFN models via \code{parsnip::tab_pfn()}. +} diff --git a/tests/testthat/test-params.R b/tests/testthat/test-params.R index 431faf52..dd110e5b 100644 --- a/tests/testthat/test-params.R +++ b/tests/testthat/test-params.R @@ -143,6 +143,15 @@ test_that("param ranges", { expect_equal(mtry_prop(c(.1, .2))$range, list(lower = .1, upper = .2)) expect_equal(dropout(c(.1, .2))$range, list(lower = .1, upper = .2)) expect_equal(prop_terms(c(.1, .2))$range, list(lower = .1, upper = .2)) + expect_equal(num_estimators(c(1L, 10L))$range, list(lower = 1L, upper = 10L)) + expect_equal( + softmax_temperature(c(0.1, 2.0))$range, + list(lower = 0.1, upper = 2.0) + ) + expect_equal( + training_set_limit(c(2L, 10L))$range, + list(lower = 2L, upper = 10L) + ) }) @@ -179,4 +188,6 @@ test_that("param values", { expect_equal(all_neighbors(TRUE)$values, TRUE) expect_equal(cal_method_class()$values, values_cal_cls) expect_equal(cal_method_reg()$values, values_cal_reg) + expect_equal(balance_probabilities(TRUE)$values, TRUE) + expect_equal(average_before_softmax(TRUE)$values, TRUE) })