Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 17 additions & 10 deletions formulaic/model_spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -596,19 +596,17 @@ def subset(self, terms_spec: FormulaSpec, **formula_kwargs: Any) -> ModelSpec:

def differentiate(self, *wrt: str, use_sympy: bool = False) -> ModelSpec:
"""
EXPERIMENTAL: Take the gradient of this model spec. When used a linear
regression, evaluating a trained model on model matrices generated by
this formula is equivalent to estimating the gradient of that fitted
form with respect to `vars`.
Take the gradient of the formula associated with this model spec with
respect to the variables in `wrt`.

When used a linear regression context, making predictions based on the
generated model matrices is equivalent to estimating the gradient of the
fitted model with respect to `wrt`.

Args:
wrt: The variables with respect to which the gradient should be
taken.
use_sympy: Whether to use sympy to perform symbolic differentiation.

Notes:
This method is provisional and may be removed in any future major
version.
"""
return self.update(
formula=self.formula.differentiate(*wrt, use_sympy=use_sympy),
Expand Down Expand Up @@ -789,8 +787,17 @@ def map_formula_structure_onto_model_spec(

def differentiate(self, *wrt: str, use_sympy: Any = False) -> ModelSpecs:
"""
This method proxies the experimental `ModelSpec.differentiate(...)` API.
See `ModelSpec.differentiate` for more details.
Take the gradient of the formulae associated with this `ModelSpecs`
instance with respect to the variables in `wrt`.

When used a linear regression context, making predictions based on the
generated model matrices is equivalent to estimating the gradient of the
fitted model with respect to `wrt`.

Args:
wrt: The variables with respect to which the gradient should be
taken.
use_sympy: Whether to use sympy to perform symbolic differentiation.
"""
return cast(
ModelSpecs,
Expand Down
7 changes: 6 additions & 1 deletion tests/test_model_spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,9 +174,14 @@ def test_get_linear_constraints(self, model_spec):
assert lc.constraint_values == [3]
assert lc.variable_names == model_spec.column_names

def test_differentiate(self, model_spec, formula):
def test_differentiate(self, data, model_spec, formula):
assert model_spec.differentiate("a").formula == formula.differentiate("a")

model_spec2 = Formula("log(a)").get_model_matrix(data).model_spec
mm = model_spec2.differentiate("a").get_model_matrix(data)
assert mm.model_spec.column_names == model_spec2.column_names
assert mm.model_spec.formula == ["0", "(1/a)"]

def test_empty(self):
model_spec = ModelSpec([])

Expand Down