先借助公式y=0.1X+0.3随机生成1000个点,在均方误差和随机梯度下降求W和b

tensorflow笔记-----1-----tensorflow实现线性回归

#! /usr/bin/python
# -*-coding:utf-8 -*-
__author__ = "chunming"
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
#随机生成1000个点,模拟y=0.1X+0.3+噪声
num=1000
data=[]
for i in range(num):
    x=np.random.normal(0.0,1.0)
    y=x*0.1+0.3+np.random.normal(0.0,0.03)
    data.append([x,y])
xdata=[x[0] for x in data]
ydata=[x[1] for x in data]

#借助均值平方误差和随机梯度实现线性回归
W=tf.Variable(tf.random_uniform([1],-1.0,1.0),name="W")
b=tf.Variable(tf.zeros([1]),name="b")
y=W*xdata+b
loss=tf.reduce_mean(tf.square(y-ydata),name="loss")
optimizer=tf.train.GradientDescentOptimizer(0.05)
train=optimizer.minimize(loss,name="train")
sess=tf.Session()
init=tf.global_variables_initializer()
sess.run(init)
print("初始化W:",sess.run(W),"初始化b:",sess.run(b),"初始化loss:",sess.run(loss))
for i in range(1000):
    sess.run(train)
    print("第",i,"次的W:",sess.run(W),"b:",sess.run(b),"loss:",sess.run(loss))
    # writer=tf.train.SummaryWriter("./linerR",sess.graph)
plt.scatter(xdata,ydata,c="r")
plt.plot(xdata,xdata*sess.run(W)+sess.run(b))
plt.show()

 

相关文章:

  • 2021-08-15
  • 2022-01-29
  • 2021-11-08
  • 2022-12-23
  • 2022-12-23
  • 2021-08-11
  • 2021-12-09
  • 2021-07-09
猜你喜欢
  • 2019-02-02
  • 2018-09-30
  • 2022-03-05
  • 2021-10-31
  • 2017-11-24
相关资源
相似解决方案