Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions machine-learning-ex1/gradientDescent.m
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@
% Hint: While debugging, it can be useful to print out the values
% of the cost function (computeCost) and gradient here.
%
temp0 = theta(1) - alpha*sum(X*theta-y)/m;
temp1 = theta(2) - alpha*sum((X*theta-y).*X(:,2))/m;
theta = [temp0;temp1];


theta=theta-alpha*X'*(X*theta-y)/m;

% ============================================================

Expand Down
10 changes: 2 additions & 8 deletions machine-learning-ex2/costFunction.m
Original file line number Diff line number Diff line change
Expand Up @@ -19,19 +19,13 @@
%
% Note: grad should have the same dimensions as theta
%
% for i=1:m
% hx = sigmoid(X(i,:)*theta);
% J = J + (-y(i)*log(hx)-(1-y(i))*log(1-hx));
% end
% J = J/m;


%Vectorization
hx = sigmoid(X*theta);
J = sum(-y.*log(hx)-(1-y).*log(1-hx))/m;

for j=1:size(theta)
grad(j) = sum((hx-y).*X(:,j))/m;
end
grad=X'*(hx-y)/m;


% =============================================================
Expand Down
8 changes: 1 addition & 7 deletions machine-learning-ex2/costFunctionReg.m
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
% Initialize some useful values
m = length(y); % number of training examples
n = length(theta);

% You need to return the following variables correctly
J = 0;
grad = zeros(size(theta));
Expand All @@ -17,17 +16,12 @@
% You should set J to the cost.
% Compute the partial derivatives and set grad to the partial
% derivatives of the cost w.r.t. each parameter in theta

%Vectorization
hx = sigmoid(X*theta);
J = sum(-y.*log(hx)-(1-y).*log(1-hx))/m + lambda*sum(theta(2:n).^2)/(2*m);

grad=X'*(hx-y)/m + lambda*theta/m;
grad(1) = sum((hx-y).*X(:,1))/m;

for j=2:n
grad(j) = sum((hx-y).*X(:,j))/m + lambda*theta(j)/m;
end

% =============================================================

end
59 changes: 59 additions & 0 deletions machine-learning-ex3/displayData.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
function [h, display_array] = displayData(X, example_width)
%DISPLAYDATA Display 2D data in a nice grid
% [h, display_array] = DISPLAYDATA(X, example_width) displays 2D data
% stored in X in a nice grid. It returns the figure handle h and the
% displayed array if requested.

% Set example_width automatically if not passed in
if ~exist('example_width', 'var') || isempty(example_width)
example_width = round(sqrt(size(X, 2)));
end

% Gray Image
colormap(gray);

% Compute rows, cols
[m n] = size(X);
example_height = (n / example_width);

% Compute number of items to display
display_rows = floor(sqrt(m));
display_cols = ceil(m / display_rows);

% Between images padding
pad = 1;

% Setup blank display
display_array = - ones(pad + display_rows * (example_height + pad), ...
pad + display_cols * (example_width + pad));

% Copy each example into a patch on the display array
curr_ex = 1;
for j = 1:display_rows
for i = 1:display_cols
if curr_ex > m,
break;
end
% Copy the patch

% Get the max value of the patch
max_val = max(abs(X(curr_ex, :)));
display_array(pad + (j - 1) * (example_height + pad) + (1:example_height), ...
pad + (i - 1) * (example_width + pad) + (1:example_width)) = ...
reshape(X(curr_ex, :), example_height, example_width) / max_val;
curr_ex = curr_ex + 1;
end
if curr_ex > m,
break;
end
end

% Display Image
h = imagesc(display_array, [-1 1]);

% Do not show axis
axis image off

drawnow;

end
69 changes: 69 additions & 0 deletions machine-learning-ex3/ex3.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
%% Machine Learning Online Class - Exercise 3 | Part 1: One-vs-all

% Instructions
% ------------
%
% This file contains code that helps you get started on the
% linear exercise. You will need to complete the following functions
% in this exericse:
%
% lrCostFunction.m (logistic regression cost function)
% oneVsAll.m
% predictOneVsAll.m
% predict.m
%
% For this exercise, you will not need to change any code in this file,
% or any other files other than those mentioned above.
%

%% Initialization
clear ; close all; clc

%% Setup the parameters you will use for this part of the exercise
input_layer_size = 400; % 20x20 Input Images of Digits
num_labels = 10; % 10 labels, from 1 to 10
% (note that we have mapped "0" to label 10)

%% =========== Part 1: Loading and Visualizing Data =============
% We start the exercise by first loading and visualizing the dataset.
% You will be working with a dataset that contains handwritten digits.
%

% Load Training Data
fprintf('Loading and Visualizing Data ...\n')

load('ex3data1.mat'); % training data stored in arrays X, y
m = size(X, 1);

% Randomly select 100 data points to display
rand_indices = randperm(m);
sel = X(rand_indices(1:100), :);

displayData(sel);

fprintf('Program paused. Press enter to continue.\n');
pause;

%% ============ Part 2: Vectorize Logistic Regression ============
% In this part of the exercise, you will reuse your logistic regression
% code from the last exercise. You task here is to make sure that your
% regularized logistic regression implementation is vectorized. After
% that, you will implement one-vs-all classification for the handwritten
% digit dataset.
%

fprintf('\nTraining One-vs-All Logistic Regression...\n')

lambda = 0.1;
[all_theta] = oneVsAll(X, y, num_labels, lambda);

fprintf('Program paused. Press enter to continue.\n');
pause;


%% ================ Part 3: Predict for One-Vs-All ================
% After ...
pred = predictOneVsAll(all_theta, X);

fprintf('\nTraining Set Accuracy: %f\n', mean(double(pred == y)) * 100);

Binary file added machine-learning-ex3/ex3.pdf
Binary file not shown.
88 changes: 88 additions & 0 deletions machine-learning-ex3/ex3_nn.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
%% Machine Learning Online Class - Exercise 3 | Part 2: Neural Networks

% Instructions
% ------------
%
% This file contains code that helps you get started on the
% linear exercise. You will need to complete the following functions
% in this exericse:
%
% lrCostFunction.m (logistic regression cost function)
% oneVsAll.m
% predictOneVsAll.m
% predict.m
%
% For this exercise, you will not need to change any code in this file,
% or any other files other than those mentioned above.
%

%% Initialization
clear ; close all; clc

%% Setup the parameters you will use for this exercise
input_layer_size = 400; % 20x20 Input Images of Digits
hidden_layer_size = 25; % 25 hidden units
num_labels = 10; % 10 labels, from 1 to 10
% (note that we have mapped "0" to label 10)

%% =========== Part 1: Loading and Visualizing Data =============
% We start the exercise by first loading and visualizing the dataset.
% You will be working with a dataset that contains handwritten digits.
%

% Load Training Data
fprintf('Loading and Visualizing Data ...\n')

load('ex3data1.mat');
m = size(X, 1);

% Randomly select 100 data points to display
sel = randperm(size(X, 1));
sel = sel(1:100);

displayData(X(sel, :));

fprintf('Program paused. Press enter to continue.\n');
pause;

%% ================ Part 2: Loading Pameters ================
% In this part of the exercise, we load some pre-initialized
% neural network parameters.

fprintf('\nLoading Saved Neural Network Parameters ...\n')

% Load the weights into variables Theta1 and Theta2
load('ex3weights.mat');

%% ================= Part 3: Implement Predict =================
% After training the neural network, we would like to use it to predict
% the labels. You will now implement the "predict" function to use the
% neural network to predict the labels of the training set. This lets
% you compute the training set accuracy.

pred = predict(Theta1, Theta2, X);

fprintf('\nTraining Set Accuracy: %f\n', mean(double(pred == y)) * 100);

fprintf('Program paused. Press enter to continue.\n');
pause;

% To give you an idea of the network's output, you can also run
% through the examples one at the a time to see what it is predicting.

% Randomly permute examples
rp = randperm(m);

for i = 1:m
% Display
fprintf('\nDisplaying Example Image\n');
displayData(X(rp(i), :));

pred = predict(Theta1, Theta2, X(rp(i),:));
fprintf('\nNeural Network Prediction: %d (digit %d)\n', pred, mod(pred, 10));

% Pause
fprintf('Program paused. Press enter to continue.\n');
pause;
end

Loading