|
|
|
@@ -14,7 +14,7 @@ rng = numpy.random |
|
|
|
# Parameters |
|
|
|
learning_rate = 0.01 |
|
|
|
training_epochs = 1000 |
|
|
|
display_step = 50 |
|
|
|
display_step = 10 |
|
|
|
|
|
|
|
# Training Data |
|
|
|
train_X = numpy.asarray([3.3,4.4,5.5,6.71,6.93,4.168,9.779,6.182,7.59,2.167, |
|
|
|
@@ -23,28 +23,41 @@ train_Y = numpy.asarray([1.7,2.76,2.09,3.19,1.694,1.573,3.366,2.596,2.53,1.221, |
|
|
|
2.827,3.465,1.65,2.904,2.42,2.94,1.3]) |
|
|
|
n_samples = train_X.shape[0] |
|
|
|
|
|
|
|
# tf Graph Input |
|
|
|
X = tf.placeholder("float") |
|
|
|
Y = tf.placeholder("float") |
|
|
|
|
|
|
|
# Set model weights |
|
|
|
W = tf.Variable(rng.randn(), name="weight") |
|
|
|
b = tf.Variable(rng.randn(), name="bias") |
|
|
|
|
|
|
|
# Construct a linear model |
|
|
|
mul = tf.multiply(X, W) |
|
|
|
pred = tf.add(mul, b) |
|
|
|
|
|
|
|
# Mean squared error |
|
|
|
sub = pred-Y |
|
|
|
pow = tf.pow(sub, 2) |
|
|
|
|
|
|
|
reduce = tf.reduce_sum(pow) |
|
|
|
cost = reduce/(2*n_samples) |
|
|
|
# Gradient descent |
|
|
|
# Note, minimize() knows to modify W and b because Variable objects are trainable=True by default |
|
|
|
grad = tf.train.GradientDescentOptimizer(learning_rate) |
|
|
|
optimizer = grad.minimize(cost) |
|
|
|
if False: |
|
|
|
# tf Graph Input |
|
|
|
X = tf.placeholder("float") |
|
|
|
Y = tf.placeholder("float") |
|
|
|
|
|
|
|
# Set model weights |
|
|
|
W = tf.Variable(-0.06, name="weight") |
|
|
|
b = tf.Variable(-0.73, name="bias") |
|
|
|
|
|
|
|
# Construct a linear model |
|
|
|
mul = tf.multiply(X, W) |
|
|
|
pred = tf.add(mul, b) |
|
|
|
|
|
|
|
# Mean squared error |
|
|
|
sub = pred-Y |
|
|
|
pow = tf.pow(sub, 2) |
|
|
|
|
|
|
|
reduce = tf.reduce_sum(pow) |
|
|
|
cost = reduce/(2*n_samples) |
|
|
|
# Gradient descent |
|
|
|
# Note, minimize() knows to modify W and b because Variable objects are trainable=True by default |
|
|
|
grad = tf.train.GradientDescentOptimizer(learning_rate) |
|
|
|
optimizer = grad.minimize(cost) |
|
|
|
# tf.train.export_meta_graph(filename='save_model.meta'); |
|
|
|
else: |
|
|
|
# tf Graph Input |
|
|
|
new_saver = tf.train.import_meta_graph("save_model.meta") |
|
|
|
nodes = tf.get_default_graph()._nodes_by_name; |
|
|
|
optimizer = nodes["GradientDescent"] |
|
|
|
cost = nodes["truediv"].outputs[0] |
|
|
|
X = nodes["Placeholder"].outputs[0] |
|
|
|
Y = nodes["Placeholder_1"].outputs[0] |
|
|
|
W = nodes["weight"].outputs[0] |
|
|
|
b = nodes["bias"].outputs[0] |
|
|
|
pred = nodes["Add"].outputs[0] |
|
|
|
|
|
|
|
# Initialize the variables (i.e. assign their default value) |
|
|
|
init = tf.global_variables_initializer() |
|
|
|
|