Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
167f345
initial commit; solved issue #237
Jun 25, 2025
c3eba70
Refactor kwargs to clearly pass encoder_type='linear'; Refactoring SK…
Jun 26, 2025
59210d7
Add type hints
Jul 8, 2025
f47f12e
Retain some parameters for encoder
Jul 8, 2025
9316b5d
configure width, layers, or other metamodel args in _build_metamodel
Jul 13, 2025
915df88
Run format sh
Jul 13, 2025
6225d48
Expanding encoder_kwargs in other metamodels
Jul 15, 2025
3ee5fce
_build_metamodel keep optional args in kwargs
Jul 15, 2025
1fe9940
Add kwargs in Sklearn to allow special args for all metamodels
Jul 18, 2025
b73136e
Expand for tasksplitmodel
Jul 19, 2025
9c48790
Expand init and _build for more class
Jul 19, 2025
cdcc4be
Add all needed args in base constructor
Jul 20, 2025
a24e323
Expanding init and _build_metamodel in lightning modules to all explicit
Jul 23, 2025
467fa3c
Finish Expanding
Jul 23, 2025
6762d15
restore base constructor in SKlearn to use kwargs, since subclass nee…
Jul 23, 2025
5f0ba2c
Expand encoder kwargs to width layer link_fn thoroughly in SKlearn
Jul 24, 2025
62b44be
Keep width and layers (no encoder prefix); set both link_fn and encod…
Jul 25, 2025
f0b0de3
Expand encoder kwargs in test_naive
Jul 25, 2025
4fa3077
Restore kwargs in Base init for compatiblity
Jul 25, 2025
4c1e686
every class (except the four about correlation)in Lightning modules p…
Jul 26, 2025
333b996
small adjustment
Jul 26, 2025
307b92f
Add unittest for lightning modules invalid params
Jul 27, 2025
e99e87c
depreciate univariate arg entirely. Only used for internal configurat…
cnellington Aug 5, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 2 additions & 7 deletions contextualized/analysis/embeddings.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,10 +134,7 @@ def plot_lowdim_rep(
order = np.argsort(tag_names)
tag_names = np.array(tag_names)[order]
tag = np.array([list(order).index(int(x)) for x in tag])
good_tags = [
np.sum(tag == i) > min_samples
for i in range(len(tag_names))
]
good_tags = [np.sum(tag == i) > min_samples for i in range(len(tag_names))]
tag_names = np.array(tag_names)[good_tags]
good_idxs = np.array([good_tags[int(tag[i])] for i in range(len(tag))])
tag = tag[good_idxs]
Expand Down Expand Up @@ -224,8 +221,6 @@ def plot_lowdim_rep(
plt.legend(handles=[nan_legend], loc="best")

if cbar_label is not None:
color_bar.ax.set_ylabel(
cbar_label, fontsize=cbar_fontsize
)
color_bar.ax.set_ylabel(cbar_label, fontsize=cbar_fontsize)
if figname is not None:
plt.savefig(f"{figname}.pdf", dpi=300, bbox_inches="tight")
10 changes: 10 additions & 0 deletions contextualized/easy/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -395,6 +395,16 @@ def test_regressor_normalization(self):
preds = model.predict(C, X)
assert preds.shape == Y.shape

def test_linear_encoder_pass(self):
C = np.random.normal(0, 1, (100, 2))
X = np.random.normal(0, 1, (100, 2))
Y = np.random.normal(0, 1, 100)

try:
model = ContextualizedRegressor(encoder_type="linear")
model.fit(C, X, Y)
except Exception as e:
self.fail(f"Linear encoder crashed with exception: {e}")


if __name__ == "__main__":
Expand Down
41 changes: 19 additions & 22 deletions contextualized/easy/wrappers/SKLearnWrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,9 @@ def __init__(
"link_fn",
"univariate",
"encoder_type",
"encoder_kwargs",
"width",
"layers",
"encoder_link_fn",
"model_regularizer",
"num_archetypes",
"learning_rate",
Expand Down Expand Up @@ -142,16 +144,16 @@ def __init__(
"encoder_link_fn",
]
self.constructor_kwargs = self._organize_constructor_kwargs(**kwargs)
self.constructor_kwargs["encoder_kwargs"]["width"] = kwargs.pop(
"width", self.constructor_kwargs["encoder_kwargs"]["width"]
self.constructor_kwargs["width"] = kwargs.pop(
"width", self.constructor_kwargs["width"]
)
self.constructor_kwargs["encoder_kwargs"]["layers"] = kwargs.pop(
"layers", self.constructor_kwargs["encoder_kwargs"]["layers"]
self.constructor_kwargs["layers"] = kwargs.pop(
"layers", self.constructor_kwargs["layers"]
)
self.constructor_kwargs["encoder_kwargs"]["link_fn"] = kwargs.pop(
self.constructor_kwargs["encoder_link_fn"] = kwargs.pop(
"encoder_link_fn",
self.constructor_kwargs["encoder_kwargs"].get(
"link_fn", self.default_encoder_link_fn
self.constructor_kwargs.get(
"encoder_link_fn", self.default_encoder_link_fn
),
)
self.not_constructor_kwargs = {
Expand Down Expand Up @@ -303,19 +305,13 @@ def maybe_add_constructor_kwarg(kwarg, default_val):
constructor_kwargs[kwarg] = kwargs.get(kwarg, default_val)

maybe_add_constructor_kwarg("link_fn", LINK_FUNCTIONS["identity"])
maybe_add_constructor_kwarg("univariate", False)
maybe_add_constructor_kwarg("encoder_type", self.default_encoder_type)
maybe_add_constructor_kwarg("loss_fn", LOSSES["mse"])
maybe_add_constructor_kwarg(
"encoder_kwargs",
{
"width": kwargs.get("encoder_width", self.default_encoder_width),
"layers": kwargs.get("encoder_layers", self.default_encoder_layers),
"link_fn": kwargs.get("encoder_link_fn", self.default_encoder_link_fn),
},
)
maybe_add_constructor_kwarg("width", self.default_encoder_width)
maybe_add_constructor_kwarg("layers", self.default_encoder_layers)
maybe_add_constructor_kwarg("encoder_link_fn", self.default_encoder_link_fn)
if kwargs.get("subtype_probabilities", False):
constructor_kwargs["encoder_kwargs"]["link_fn"] = LINK_FUNCTIONS["softmax"]
constructor_kwargs["encoder_link_fn"] = LINK_FUNCTIONS["softmax"]

# Make regularizer
if "model_regularizer" in self.acceptable_kwargs["model"]:
Expand Down Expand Up @@ -449,7 +445,9 @@ def predict(
preds = np.mean(predictions, axis=0)
if self.normalize and self.scalers["Y"] is not None:
if individual_preds:
preds = np.array([self.scalers["Y"].inverse_transform(p) for p in preds])
preds = np.array(
[self.scalers["Y"].inverse_transform(p) for p in preds]
)
else:
preds = self.scalers["Y"].inverse_transform(preds)
return preds
Expand Down Expand Up @@ -488,12 +486,11 @@ def predict_params(
get_dataloader = lambda i: self.models[i].dataloader(
self._maybe_scale_C(C),
np.zeros((len(C), self.x_dim)),
np.zeros((len(C), self.y_dim))
np.zeros((len(C), self.y_dim)),
)
else:
get_dataloader = lambda i: self.models[i].dataloader(
self._maybe_scale_C(C),
np.zeros((len(C), self.x_dim))
self._maybe_scale_C(C), np.zeros((len(C), self.x_dim))
)
predictions = [
self.trainers[i].predict_params(self.models[i], get_dataloader(i), **kwargs)
Expand Down
Loading
Loading