Facebook
From Aqua Lion, 4 Years ago, written in Python.
Embed
Download Paste or View Raw
Hits: 173
  1. # fit method from NeuralNetwork class which calls layers' backward method
  2. def fit(self, X, y, n_epochs=1000):
  3.   self._initialize(X)
  4.   losses = []
  5.   for _ in range(n_epochs):
  6.     last_output = X
  7.       for layer in self.layers:
  8.         last_output = layer.forward(last_output)
  9.  
  10.         loss = self.loss(last_output, y)
  11.         losses.append(loss)
  12.         last_d = self.dloss(last_output, y)
  13.  
  14.         for layer in reversed(self.layers):
  15.             last_d = layer.backward(last_d, self.lr)
  16.  
  17.         print(f"Epoch: {_}; Loss: {loss};")
  18.  
  19.     return losses
  20.  
  21. #backward method for backpropagation in Dense class
  22. def backward(self, last_derivative, lr):
  23.   """
  24.  Parameters
  25.  - last_derivative : derivative from the previous layer
  26.  - lr (learning rate) : determines how big changes are made to network's weights
  27.  
  28.  """
  29.   w = self.weights
  30.  
  31.   dloss_1 = self.dactivate(last_derivative)
  32.   d_w = np.dot(self.layer_input.T, dloss_1)
  33.  
  34.   self.weights -= -np.dot(lr, d_w)
  35.  
  36.   return np.dot(w, dloss_1.T)
  37.  
  38. # The problem is:
  39. """ Gradient should point to the highest direction so I should substract it from weights, but then the loss increases. What's wrong? (I use self.weights -= -np.dot(...) because the loss decreases when it shouldn't)