-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathneuralNetwork.cu
More file actions
78 lines (58 loc) · 2.39 KB
/
neuralNetwork.cu
File metadata and controls
78 lines (58 loc) · 2.39 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
#include <stdio.h>
__device__ double sigmoid(double x){
return 1.0f / (1.0f + exp(-x));
}
__device__ double dSigmoid(double x){
return x * (1.0f - x);
}
__global__ void forwardFeed(double* inputLayer, double* hiddenWeights, double* hiddenLayer, double* outputLayer, double* outputWeights, double* outputLayerBias, double* hiddenLayerBias, int numHiddenNodes, int numInputs, int numOutputs, int trainingSetIndex){
int i = trainingSetIndex;
for(int j = 0 ; j < numHiddenNodes; j++){
double activation = hiddenLayerBias[j];
for(int k = 0; k < numInputs; k++){
activation += inputLayer[(i * numInputs) + k] * hiddenWeights[(j * numInputs) + k];
}
hiddenLayer[j] = sigmoid(activation);
}
// Compute Output Layer Activation
for(int j = 0; j < numOutputs; j++){
double activation = outputLayerBias[j];
for(int k = 0; k < numHiddenNodes; k++){
activation += hiddenLayer[k] * outputWeights[j + (k * numOutputs)];
}
outputLayer[j] = sigmoid(activation);
}
}
// Training Outputs Checks If Our Values Are Correct
__global__ void backpropogate(double* trainingInputs, double* hiddenLayer, double* hiddenWeights, double* outputLayer, double* outputWeights, double* trainingOutputs, double* hiddenLayerBias, double* outputLayerBias, int numHiddenNodes, int numInputs, int numOutputs, int trainingSetIndex, double lr){
int i = trainingSetIndex;
double deltaOutput[1];
// Calcualte Mean Squared Error In Output Weights
for(int j = 0; j < numOutputs; j++){
double dError = (trainingOutputs[i * numOutputs + j] - outputLayer[j]);
deltaOutput[j] = dError * dSigmoid(outputLayer[j]);
}
double deltaHidden[4];
// Calcuate Mean Squared Error in Hidden Weights
for(int j = 0; j < numHiddenNodes; j++){
double dError = 0.0f;
for(int k = 0; k < numOutputs; k++){
dError += deltaOutput[k] * outputWeights[(j * 1) + k];
}
deltaHidden[j] = dError * dSigmoid(hiddenLayer[j]);
}
// Apply Change in Output Weights
for(int j = 0; j < numOutputs; j++){
outputLayerBias[j] += deltaOutput[j] * lr;
for(int k = 0; k < numHiddenNodes; k++){
outputWeights[(k * numOutputs) + j] += hiddenLayer[k] * deltaOutput[j] * lr;
}
}
// Apply Change in Hidden Weights
for(int j = 0; j < numHiddenNodes; j++){
hiddenLayerBias[j] += deltaHidden[j] * lr;
for(int k = 0; k < numInputs; k++){
hiddenWeights[(k * numOutputs) + j] += trainingInputs[(i * numInputs) + k] * deltaHidden[j] * lr;
}
}
}