From 1c2ffec356956674906cd8bec3ce859855a63e9d Mon Sep 17 00:00:00 2001 From: Matthew Wardrop Date: Fri, 20 Dec 2024 11:29:30 -0800 Subject: [PATCH] wip --- formulaic/model_spec.py | 27 +++++++++++++++++---------- tests/test_model_spec.py | 7 ++++++- 2 files changed, 23 insertions(+), 11 deletions(-) diff --git a/formulaic/model_spec.py b/formulaic/model_spec.py index 66ce16a3..472ac62e 100644 --- a/formulaic/model_spec.py +++ b/formulaic/model_spec.py @@ -596,19 +596,17 @@ def subset(self, terms_spec: FormulaSpec, **formula_kwargs: Any) -> ModelSpec: def differentiate(self, *wrt: str, use_sympy: bool = False) -> ModelSpec: """ - EXPERIMENTAL: Take the gradient of this model spec. When used a linear - regression, evaluating a trained model on model matrices generated by - this formula is equivalent to estimating the gradient of that fitted - form with respect to `vars`. + Take the gradient of the formula associated with this model spec with + respect to the variables in `wrt`. + + When used a linear regression context, making predictions based on the + generated model matrices is equivalent to estimating the gradient of the + fitted model with respect to `wrt`. Args: wrt: The variables with respect to which the gradient should be taken. use_sympy: Whether to use sympy to perform symbolic differentiation. - - Notes: - This method is provisional and may be removed in any future major - version. """ return self.update( formula=self.formula.differentiate(*wrt, use_sympy=use_sympy), @@ -789,8 +787,17 @@ def map_formula_structure_onto_model_spec( def differentiate(self, *wrt: str, use_sympy: Any = False) -> ModelSpecs: """ - This method proxies the experimental `ModelSpec.differentiate(...)` API. - See `ModelSpec.differentiate` for more details. + Take the gradient of the formulae associated with this `ModelSpecs` + instance with respect to the variables in `wrt`. + + When used a linear regression context, making predictions based on the + generated model matrices is equivalent to estimating the gradient of the + fitted model with respect to `wrt`. + + Args: + wrt: The variables with respect to which the gradient should be + taken. + use_sympy: Whether to use sympy to perform symbolic differentiation. """ return cast( ModelSpecs, diff --git a/tests/test_model_spec.py b/tests/test_model_spec.py index 083b1395..d4ecb85f 100644 --- a/tests/test_model_spec.py +++ b/tests/test_model_spec.py @@ -174,9 +174,14 @@ def test_get_linear_constraints(self, model_spec): assert lc.constraint_values == [3] assert lc.variable_names == model_spec.column_names - def test_differentiate(self, model_spec, formula): + def test_differentiate(self, data, model_spec, formula): assert model_spec.differentiate("a").formula == formula.differentiate("a") + model_spec2 = Formula("log(a)").get_model_matrix(data).model_spec + mm = model_spec2.differentiate("a").get_model_matrix(data) + assert mm.model_spec.column_names == model_spec2.column_names + assert mm.model_spec.formula == ["0", "(1/a)"] + def test_empty(self): model_spec = ModelSpec([])