Commit d7619d48 authored by Shawn Nithyan Stanley's avatar Shawn Nithyan Stanley
Browse files

Replace connected_layer.c

parent 82640837
......@@ -49,9 +49,9 @@ matrix forward_connected_layer(layer l, matrix x)
*l.x = copy_matrix(x);
// TODO: 3.1 - run the network forward
matrix y = make_matrix(x.rows, l.w.cols); // Going to want to change this!
//matrix y = make_matrix(x.rows, l.w.cols); // Going to want to change this!
matrix y = matmul(x, l.w);
y = forward_bias(y, l.b);
return y;
......@@ -71,9 +71,12 @@ matrix backward_connected_layer(layer l, matrix dy)
// updates for our weights, which are stored in l.dw
// Calculate dL/dx and return it
matrix dx = copy_matrix(x); // Change this
axpy_matrix(1.0, backward_bias(dy), l.db);
matrix dw = matmul(transpose_matrix(x), dy); // Change this
axpy_matrix(1.0, dw, l.dw);
matrix dx = matmul(dy, transpose_matrix(l.w));
return dx;
......@@ -93,6 +96,12 @@ void update_connected_layer(layer l, float rate, float momentum, float decay)
// we want it to be (-momentum * update) so we just need to scale it a little
// Do the same for biases as well but no need to use weight decay on biases
axpy_matrix(decay, l.w, l.dw);
axpy_matrix(-rate, l.dw, l.w);
scal_matrix(momentum, l.dw);
axpy_matrix(-rate, l.db, l.b);
scal_matrix(momentum, l.db);
layer make_connected_layer(int inputs, int outputs)
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment