Skip to content

Commit 2c4e65f

Browse files
octave compatibility fixes
1 parent 4743c1b commit 2c4e65f

File tree

7 files changed

+18
-45
lines changed

7 files changed

+18
-45
lines changed

NN/nnchecknumgrad.m

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,9 @@ function nnchecknumgrad(nn, x, y)
88
nn_m = nn; nn_p = nn;
99
nn_m.W{l}(i, j) = nn.W{l}(i, j) - epsilon;
1010
nn_p.W{l}(i, j) = nn.W{l}(i, j) + epsilon;
11-
rng(0);
11+
rand('state',0)
1212
nn_m = nnff(nn_m, x, y);
13-
rng(0);
13+
rand('state',0)
1414
nn_p = nnff(nn_p, x, y);
1515
dW = (nn_p.L - nn_m.L) / (2 * epsilon);
1616
e = abs(dW - nn.dW{l}(i, j));

NN/nnupdatefigures.m

Lines changed: 2 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -43,40 +43,21 @@ function nnupdatefigures(nn,fhandle,L,opts,i)
4343
legend(p1, M,'Location','NorthEast');
4444
set(p1, 'Xlim',[0,opts.numepochs + 1])
4545

46-
if i ==2 % speeds up plotting by factor of ~2
47-
set(gca,'LegendColorbarListeners',[]);
48-
setappdata(gca,'LegendColorbarManualSpace',1);
49-
setappdata(gca,'LegendColorbarReclaimSpace',1);
50-
end
51-
5246
p2 = subplot(1,2,2);
5347
plot(plot_x,plot_yfrac);
5448
xlabel('Number of epochs'); ylabel('Misclassification rate');
5549
title('Misclassification rate')
5650
legend(p2, M,'Location','NorthEast');
5751
set(p2, 'Xlim',[0,opts.numepochs + 1])
5852

59-
if i ==2 % speeds up plotting by factor of ~2
60-
set(gca,'LegendColorbarListeners',[]);
61-
setappdata(gca,'LegendColorbarManualSpace',1);
62-
setappdata(gca,'LegendColorbarReclaimSpace',1);
63-
end
64-
6553
else
6654

6755
p = plot(plot_x,plot_ye);
6856
xlabel('Number of epochs'); ylabel('Error');title('Error');
6957
legend(p, M,'Location','NorthEast');
7058
set(gca, 'Xlim',[0,opts.numepochs + 1])
71-
72-
if i ==2 % speeds up plotting by factor of ~2
73-
set(gca,'LegendColorbarListeners',[]);
74-
setappdata(gca,'LegendColorbarManualSpace',1);
75-
setappdata(gca,'LegendColorbarReclaimSpace',1);
76-
77-
end
78-
59+
7960
end
8061
drawnow;
8162
end
82-
end
63+
end

tests/test_example_CNN.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
%% ex1 Train a 6c-2s-12c-2s Convolutional neural network
1010
%will run 1 epoch in about 200 second and get around 11% error.
1111
%With 100 epochs you'll get around 1.2% error
12-
rng(0)
12+
rand('state',0)
1313
cnn.layers = {
1414
struct('type', 'i') %input layer
1515
struct('type', 'c', 'outputmaps', 6, 'kernelsize', 5) %convolution layer

tests/test_example_DBN.m

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
test_y = double(test_y);
88

99
%% ex1 train a 100 hidden unit RBM and visualize its weights
10-
rng(0);
10+
rand('state',0)
1111
dbn.sizes = [100];
1212
opts.numepochs = 1;
1313
opts.batchsize = 100;
@@ -18,7 +18,7 @@
1818
figure; visualize(dbn.rbm{1}.W'); % Visualize the RBM weights
1919

2020
%% ex2 train a 100-100 hidden unit DBN and use its weights to initialize a NN
21-
rng(0);
21+
rand('state',0)
2222
%train dbn
2323
dbn.sizes = [100 100];
2424
opts.numepochs = 1;

tests/test_example_NN.m

Lines changed: 7 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
test_x = normalize(test_x, mu, sigma);
1212

1313
%% ex1 vanilla neural net
14-
rng(0);
14+
rand('state',0)
1515
nn = nnsetup([784 100 10]);
1616
opts.numepochs = 1; % Number of full sweeps through data
1717
opts.batchsize = 100; % Take a mean gradient step over this many samples
@@ -21,16 +21,8 @@
2121

2222
assert(er < 0.08, 'Too big error');
2323

24-
% Make an artificial one and verify that we can predict it
25-
x = zeros(1,28,28);
26-
x(:, 14:15, 6:22) = 1;
27-
x = reshape(x,1,28^2);
28-
figure; visualize(x');
29-
predicted = nnpredict(nn,x)-1;
30-
31-
assert(predicted == 1);
3224
%% ex2 neural net with L2 weight decay
33-
rng(0);
25+
rand('state',0)
3426
nn = nnsetup([784 100 10]);
3527

3628
nn.weightPenaltyL2 = 1e-4; % L2 weight decay
@@ -44,7 +36,7 @@
4436

4537

4638
%% ex3 neural net with dropout
47-
rng(0);
39+
rand('state',0)
4840
nn = nnsetup([784 100 10]);
4941

5042
nn.dropoutFraction = 0.5; % Dropout fraction
@@ -57,7 +49,7 @@
5749
assert(er < 0.1, 'Too big error');
5850

5951
%% ex4 neural net with sigmoid activation function
60-
rng(0);
52+
rand('state',0)
6153
nn = nnsetup([784 100 10]);
6254

6355
nn.activation_function = 'sigm'; % Sigmoid activation function
@@ -71,7 +63,7 @@
7163
assert(er < 0.1, 'Too big error');
7264

7365
%% ex5 plotting functionality
74-
rng(0);
66+
rand('state',0)
7567
nn = nnsetup([784 20 10]);
7668
opts.numepochs = 5; % Number of full sweeps through data
7769
nn.output = 'softmax'; % use softmax output
@@ -90,7 +82,7 @@
9082
vy = train_y(1:10000,:);
9183
ty = train_y(10001:end,:);
9284

93-
rng(0);
85+
rand('state',0)
9486
nn = nnsetup([784 20 10]);
9587
nn.output = 'softmax'; % use softmax output
9688
opts.numepochs = 5; % Number of full sweeps through data
@@ -99,4 +91,4 @@
9991
nn = nntrain(nn, tx, ty, opts, vx, vy); % nntrain takes validation set as last two arguments (optionally)
10092

10193
[er, bad] = nntest(nn, test_x, test_y);
102-
assert(er < 0.1, 'Too big error');
94+
assert(er < 0.1, 'Too big error');

tests/test_example_SAE.m

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
%% ex1 train a 100 hidden unit SDAE and use it to initialize a FFNN
1010
% Setup and train a stacked denoising autoencoder (SDAE)
11-
rng(0);
11+
rand('state',0)
1212
sae = saesetup([784 100]);
1313
sae.ae{1}.activation_function = 'sigm';
1414
sae.ae{1}.learningRate = 1;
@@ -33,7 +33,7 @@
3333

3434
%% ex2 train a 100-100 hidden unit SDAE and use it to initialize a FFNN
3535
% Setup and train a stacked denoising autoencoder (SDAE)
36-
rng(0);
36+
rand('state',0)
3737
sae = saesetup([784 100 100]);
3838
sae.ae{1}.activation_function = 'sigm';
3939
sae.ae{1}.learningRate = 1;

tests/test_nn_gradients_are_numerically_correct.m

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,10 +17,10 @@
1717
nn.output = output{1};
1818
nn.dropoutFraction = dropoutFraction{1};
1919

20-
rng(0)
20+
rand('state',0)
2121
nn = nnff(nn, batch_x, y);
2222
nn = nnbp(nn);
2323
nnchecknumgrad(nn, batch_x, y);
2424
end
2525
end
26-
end
26+
end

0 commit comments

Comments
 (0)