# Batch gradient descent(批量梯度下降)
for i in range(nb_epochs):
    params_grad = evaluate_gradient(loss_function, data, params)
    params = params - learning_rate * params_grad


# Stochastic gradient descent(随机梯度下降)
for i in range(nb_epochs):
    np.random.shuffle(data)
    for example in data:
        params_grad = evaluate_gradient(loss_function, example, params)
        params = params - learning_rate * params_grad


# Mini-batch gradient descent(小批量梯度下降)
for i in range(nb_epochs):
    np.random.shuffle(data)
    for batch in get_batches(data, batch_size=50):
        params_grad = evaluate_gradient(loss_function, batch, params)
        params = params - learning_rate * params_grad

 

相关文章:

  • 2021-11-29
  • 2021-08-05
  • 2021-12-06
  • 2022-12-23
  • 2021-11-04
  • 2021-07-12
  • 2022-01-04
猜你喜欢
  • 2022-12-23
  • 2022-12-23
  • 2021-07-06
  • 2022-12-23
  • 2021-12-24
  • 2021-08-08
  • 2022-01-08
相关资源
相似解决方案