1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67
| import numpy as np import tensorflow as tf
def data_create_3d(w1, w2, b, amount, size, turb): X = np.random.normal(size=(amount, 2), scale=size) w = np.array((w1, w2)) delta = np.random.normal(size=amount, scale=size) * turb y = np.dot(X, w) + b + delta return X[:, 0], X[:, 1], y
print("目标值->","w1:", 2, "w2:", 3, "b:", 20)
x, y, z = data_create_3d(2, 3, 20, 1000, 10, 3)
def loss(w1, w2, b): f = 0 for i in range(0, 1000): f += (w1*x[i]+w2*y[i]+b-z[i])**2 f / 1000 return f
if __name__ == "__main__":
w1=tf.Variable(1.0) w2=tf.Variable(2.0) b=tf.Variable(25.0)
cnt = 0 foot1 = 0.000001 foot2 = 0.000001 foot3 = 0.0001 yuzhi = 100
while True: cnt += 1 with tf.GradientTape(persistent=True) as tape: losss = loss(w1, w2, b)
dw1 = tape.gradient(losss, w1) dw2 = tape.gradient(losss, w2) db = tape.gradient(losss, b)
pre = loss(w1, w2, b)
w1.assign_sub(foot1*dw1) w2.assign_sub(foot2*dw2) b.assign_sub(foot3*db)
derta = abs(loss(w1, w2, b)-pre)
print("=========\n迭代次数:",cnt) tf.print("loss:",loss(w1,w2,b)) a=tf.print("w1:"w1) d=tf.print("w2:",w2) c=tf.print("b:",b)
if derta < yuzhi: print("迭代结束!") break
|