Task 5: Update weights Once you have calculated the gradients you can update the weights using gradient descent. In this
Posted: Mon May 23, 2022 10:14 am
In [ ]: def update(grads, eta, params, n_examples): Update parameters Arguments: grads (dict): The gradients calculated during backprop eta (float): The Learning rate params (dict): The current parameter values n_examples (int): The number of examples in the mini-batch Returns: updated_params (dict): A dictionary of the updated parameters updated_params = { 'W1': None, 'bi': None, 'w2': None 'b2': None } # YOUR CODE HERE raise Not ImplementedError() return updated_params In [ ]: # free In [ ]: sizes_grade = [2, 3, 2] # set seed for repeatable test initialisation np.random.seed() params_grade = init(sizes_grade) eta_grade = 0.01 X_grade = np.array([[0.1, -0.2], [-0.1, 0.2]]) y_grade = np.array([[0, 1], [1, 0]]) n_examples_grade = x_grade.shape[@] outputs_grade = forward(params_grade, X_grade) grads_grade = backprop (outputs_grade, params_grade, X_grade, y_grade) updated_params_grade = update (grads_grade, eta_grade, params_grade, n_examples _grade) updated_params_test = { 'wi': np.array([[ 1.76399035, 0.4001897, 0.97879742], [ 2.2410172, 1.86749301, -0.97739675]]), 'b1': np.array([ 0.00061999, -0.0003249 0.00059436]), 'W2': np.array([[ 0.95023747, -0.15153479], [-0.10303594, 0.41038059], [ 0.14385645, 1.45441192]]), 'b2': np.array([ -8.94551854e-85, -1.81529826e-84]) } for param, value in updated_params_test.items(): npt.assert_allclose(updated_params_grade [param], value, rtol=1e-5)