Tensorflow编程模型
。。。。后续完善
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '' import numpy as np num_points =
data_array = []
for i in xrange(num_points):
x1 = np.random.normal(0.0,0.5)
y1 = x1 * 0.1 + 0.3 + np.random.normal(0.0, 0.03)
data_array.append([x1, y1]) x_data = [v[] for v in data_array]
y_data = [v[] for v in data_array] import matplotlib.pyplot as plt plt.plot(x_data, y_data, 'ro', label='Original data')
plt.legend()
plt.show() import tensorflow as tf w = tf.Variable(tf.random_uniform([], -1.0, 1.0))
b = tf.Variable(tf.zeros([]))
y = w * x_data + b loss = tf.reduce_mean(tf.square(y - y_data)) optimizer = tf.train.GradientDescentOptimizer(0.5)
train = optimizer.minimize(loss) init = tf.global_variables_initializer() sess = tf.Session()
sess.run(init) for step in xrange():
sess.run(train)
print(step, sess.run(w), sess.run(b))
print(step, sess.run(loss)) #Graphic display
plt.plot(x_data, y_data, 'ro', label='Original data')
plt.plot(x_data, sess.run(w) * x_data + sess.run(b))
plt.xlabel('x')
plt.xlim(-,)
plt.ylim(0.1,0.6)
plt.ylabel('y')
plt.legend()
plt.show()