# fit method from NeuralNetwork class which calls layers' backward method def fit(self, X, y, n_epochs=1000): self._initialize(X) losses = [] for _ in range(n_epochs): last_output = X for layer in self.layers: last_output = layer.forward(last_output) loss = self.loss(last_output, y) losses.append(loss) last_d = self.dloss(last_output, y) for layer in reversed(self.layers): last_d = layer.backward(last_d, self.lr) print(f"Epoch: {_}; Loss: {loss};") return losses #backward method for backpropagation in Dense class def backward(self, last_derivative, lr): """ Parameters - last_derivative : derivative from the previous layer - lr (learning rate) : determines how big changes are made to network's weights """ w = self.weights dloss_1 = self.dactivate(last_derivative) d_w = np.dot(self.layer_input.T, dloss_1) self.weights -= -np.dot(lr, d_w) return np.dot(w, dloss_1.T) # The problem is: """ Gradient should point to the highest direction so I should substract it from weights, but then the loss increases. What's wrong? (I use self.weights -= -np.dot(...) because the loss decreases when it shouldn't)