(批量)梯度下降法


1 import numpy as np
2 import matplotlib.pyplot as plt
3 import scipy.io
4 import math
5 import sklearn
6 import sklearn.datasets
7
8 from opt_utils import load_params_and_grads, initialize_parameters, forward_propagation, backward_propagation
9 from opt_utils import compute_cost, predict, predict_dec, plot_decision_boundary, load_dataset
10 from testCases_v3 import *
11
12 # %matplotlib inline
13 plt.rcParams['figure.figsize'] = (7.0, 4.0) # set default size of plots
14 plt.rcParams['image.interpolation'] = 'nearest'
15 plt.rcParams['image.cmap'] = 'gray'
16
17
18 # GRADED FUNCTION: update_parameters_with_gd
19 def update_parameters_with_gd(parameters, grads, learning_rate):
20 """
21 Update parameters using one step of gradient descent
22
23 Arguments:
24 parameters -- python dictionary containing your parameters to be updated:
25 parameters['W' + str(l)] = Wl
26 parameters['b' + str(l)] = bl
27 grads -- python dictionary containing your gradients to update each parameters:
28 grads['dW' + str(l)] = dWl
29 grads['db' + str(l)] = dbl
30 learning_rate -- the learning rate, scalar.
31
32 Returns:
33 parameters -- python dictionary containing your updated parameters
34 """
35
36 L = len(parameters) // 2 # number of layers in the neural networks
37
38 # Update rule for each parameter
39 for l in range(L):
40 ### START CODE HERE ### (approx. 2 lines)
41 parameters['W'+str(l+1)]=parameters['W'+str(l+1)]-learning_rate*grads['dW'+str(l+1)]
42 parameters['b'+str(l+1)]=parameters['b'+str(l+1)]-learning_rate*grads['db'+str(l+1)]
43 ### END CODE HERE ###
44
45 return parameters
View Code