Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
17 commits
Select commit Hold shift + click to select a range
633358d
Add Tanh activation function to the module exports
kebtes May 13, 2025
fcada70
Fix formatting in Activation class by adding missing newline and ensu…
kebtes May 13, 2025
5593d99
Add get_params and set_params methods to LeakyReLU class for paramete…
kebtes May 13, 2025
dfa1149
Add get_params method to ReLU class for parameter management
kebtes May 13, 2025
75b5ddb
Add get_params method to Sigmoid class for parameter management
kebtes May 13, 2025
c24b80d
Add get_params method to Tanh class for parameter management
kebtes May 13, 2025
784bf6d
Add get_params method to Softmax class for parameter management
kebtes May 13, 2025
6a91f6c
Add get_params and set_params methods to Layer class for parameter ma…
kebtes May 13, 2025
5c9d806
Add model attribute management methods and JSON serialization for sav…
kebtes May 13, 2025
40588d7
Add set_params method to Dense class for parameter management
kebtes May 13, 2025
f0d4e82
Add get_params and set_params methods to Optimizer class for paramete…
kebtes May 13, 2025
5dc9ef0
Add get_params and set_params methods to GradientDescent class for pa…
kebtes May 13, 2025
77e7478
Add get_params and set_params methods to Momentum class for parameter…
kebtes May 13, 2025
8f5c7a0
Add __init__.py to expose LAYER_CLASSES for utility module
kebtes May 13, 2025
73bb877
Add layer_config.py to define LAYER_CLASSES for activations, layers, …
kebtes May 13, 2025
5061d80
Add model attribute management and file handling tests
kebtes May 13, 2025
72a3a19
Remove specific CSV file from LFS tracking in .gitattributes
kebtes May 13, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .gitattributes
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
.csv filter=lfs diff=lfs merge=lfs -text
resources/mnist/mnist_train.csv filter=lfs diff=lfs merge=lfs -text
4 changes: 3 additions & 1 deletion nnf/activations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,7 @@
from nnf.activations.softmax import Softmax
from nnf.activations.sigmoid import Sigmoid
from nnf.activations.leaky_relu import LeakyReLU
from nnf.activations.tanh import Tanh

__all__ = ['Activation', 'ReLU', 'Softmax', 'Sigmoid', 'LeakyReLU']

__all__ = ['Activation', 'ReLU', 'Softmax', 'Sigmoid', 'LeakyReLU', 'Tanh']
5 changes: 4 additions & 1 deletion nnf/activations/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,7 @@ def forward(self, inputs):
raise NotImplementedError

def backward(self, dvalues):
raise NotImplementedError
raise NotImplementedError

def get_params(self):
return super().get_params()
17 changes: 17 additions & 0 deletions nnf/activations/leaky_relu.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import numpy as np
from typing import override, Dict

from nnf.activations.base import Activation

class LeakyReLU(Activation):
Expand Down Expand Up @@ -50,3 +52,18 @@ def backward(self, dvalues):
# For inputs <= 0, multiply the gradient by alpha
self.dinputs[self.inputs <= 0] *= self.alpha
return self.dinputs

@override
def get_params(self):
return {
"type" : "LeakyReLU",
"attrs" : {
"alpha": self.alpha
}
}

@override
def set_params(self, params : Dict):
for key, val in params.items():
setattr(self, key, val)

11 changes: 10 additions & 1 deletion nnf/activations/relu.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import numpy as np
from typing import override

from nnf.activations.base import Activation

class ReLU(Activation):
Expand All @@ -25,4 +27,11 @@ def backward(self, dvalues):

self.dinputs = dvalues.copy()
self.dinputs[self.inputs <= 0] = 0
return self.dinputs
return self.dinputs

@override
def get_params(self):
return {
"type" : "ReLU",
"attrs" : {}
}
11 changes: 10 additions & 1 deletion nnf/activations/sigmoid.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import numpy as np
from typing import override

from nnf.activations.base import Activation

class Sigmoid(Activation):
Expand Down Expand Up @@ -26,4 +28,11 @@ def backward(self, dvalues):

# derivative of the sigmoid: f'(x) = f(x) * (1 - f(x))
self.dinputs = dvalues * (self.output * (1 - self.output))
return self.dinputs
return self.dinputs

@override
def get_params(self):
return {
"type" : "Sigmoid",
"attrs" : {}
}
9 changes: 9 additions & 0 deletions nnf/activations/softmax.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@
"""

import numpy as np
from typing import override

from nnf.activations.base import Activation

class Softmax(Activation):
Expand Down Expand Up @@ -84,3 +86,10 @@ def backward(self, dvalues):
"""
self.dinputs = dvalues # usually combined with loss
return self.dinputs

@override
def get_params(self):
return {
"type" : "Softmax",
"attrs" : {}
}
9 changes: 9 additions & 0 deletions nnf/activations/tanh.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@
"""

import numpy as np
from typing import override

from nnf.activations.base import Activation

class Tanh(Activation):
Expand Down Expand Up @@ -70,3 +72,10 @@ def backward(self, dvalues):
"""
self.dinputs = dvalues * (1 - self.output ** 2)
return self.dinputs

@override
def get_params(self):
return {
"type" : "Tanh",
"attrs" : {}
}
7 changes: 7 additions & 0 deletions nnf/layers/base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from abc import ABC, abstractmethod
from typing import Dict

class Layer(ABC):
def __init__(self):
Expand All @@ -19,4 +20,10 @@ def forward(self, inputs):

@abstractmethod
def backward(self, dvalues):
pass

def get_params(self):
return {}

def set_params(self, params : Dict):
pass
36 changes: 33 additions & 3 deletions nnf/layers/dense.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import numpy as np
from typing import override, Dict, List

from nnf.layers.base import Layer

class Dense(Layer):
Expand All @@ -8,12 +10,12 @@ class Dense(Layer):
--------------------------------------------
"""

def __init__(self, n_inputs, n_neurons):
def __init__(self, n_inputs = 1, n_neurons = 1):
"""
The function initializes weights with random values and biases with zeros.
"""
super().__init__()

self.n_inputs = n_inputs
self.n_neurons = n_neurons

Expand All @@ -30,6 +32,7 @@ def __init__(self, n_inputs, n_neurons):
self.dbiases = None

# Parameters

self.params = self.weights.size + self.biases.size

def forward(self, inputs):
Expand All @@ -52,4 +55,31 @@ def backward(self, dvalues):
self.dbiases = np.sum(dvalues, axis=0, keepdims=True)

self.dinputs = np.dot(dvalues, self.weights.T)
return self.dinputs
return self.dinputs

@override
def get_params(self):
return {
"type" : "Dense",
"attrs" : {
"n_inputs" : self.n_inputs,
"n_neurons" : self.n_neurons,
"trainable" : self.trainable,
"weights" : self.weights,
"biases" : self.biases,
"dweights" : self.dweights,
"dbiases" : self.dbiases
}
}

@override
def set_params(self, params : Dict):
for key, val in params.items():
# If the key is one of the specified attributes (e.g., "n_inputs", "n_neurons", etc.)
# and the value is a list, convert the value to a NumPy array and set it as an attribute.
# Basically, to how they were originally!
if key in ("n_inputs", "n_neurons", "weights", "biases", "dweights", "dbiases") and type(key) == List:
setattr(self, key, np.array(val))
else:
setattr(self, key, val)

Loading
Loading