From 8caf353d8fd85acf485d5d617203ded58c18e872 Mon Sep 17 00:00:00 2001
From: PabloNA97
Date: Sun, 24 Aug 2025 16:05:44 +0200
Subject: [PATCH] fix: Moved the dropout layer after the normalization
Dropout will alter the statistics used by batch norm to normalize
---
mlcolvar/core/nn/feedforward.py | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/mlcolvar/core/nn/feedforward.py b/mlcolvar/core/nn/feedforward.py
index f84596dd..89e5a593 100644
--- a/mlcolvar/core/nn/feedforward.py
+++ b/mlcolvar/core/nn/feedforward.py
@@ -79,10 +79,10 @@ def __init__(
n_layers = len(layers) - 1
# -- activation
activation_list = parse_nn_options(activation, n_layers, last_layer_activation)
- # -- dropout
- dropout_list = parse_nn_options(dropout, n_layers, last_layer_activation)
# -- batchnorm
batchnorm_list = parse_nn_options(batchnorm, n_layers, last_layer_activation)
+ # -- dropout
+ dropout_list = parse_nn_options(dropout, n_layers, last_layer_activation)
# Create network
modules = []
@@ -93,11 +93,11 @@ def __init__(
if activ is not None:
modules.append(get_activation(activ))
- if drop is not None:
- modules.append(torch.nn.Dropout(p=drop))
-
if norm:
modules.append(torch.nn.BatchNorm1d(layers[i + 1]))
+
+ if drop is not None:
+ modules.append(torch.nn.Dropout(p=drop))
# store model and attributes
self.nn = torch.nn.Sequential(*modules)