From 649623b003e9522c8e0c524376e1d622a70d0dbb Mon Sep 17 00:00:00 2001 From: Oceania2018 Date: Thu, 21 Feb 2019 09:08:38 -0600 Subject: [PATCH] fix name_scope default_name. --- data/linear_regression.zip | Bin 0 -> 3417 bytes .../Operations/OpDefLibrary.cs | 4 +- .../Train/Saving/BaseSaverBuilder.cs | 2 +- src/TensorFlowNET.Core/Train/Saving/Saver.cs | 2 +- src/TensorFlowNET.Core/ops.name_scope.cs | 5 +- .../LinearRegression.cs | 2 +- .../python/linear_regression.py | 59 +++++++++++------- 7 files changed, 43 insertions(+), 31 deletions(-) create mode 100644 data/linear_regression.zip diff --git a/data/linear_regression.zip b/data/linear_regression.zip new file mode 100644 index 0000000000000000000000000000000000000000..2d17085f1e286605dcb1cedd0f4e5d50fc36e564 GIT binary patch literal 3417 zcmV-f4W{x?O9KQH000080BkZAn+Xdx%4w8Ck@B0e^T?oNY7DNYp1QKFPj7RJlcx8 z25v&-8Pl>fja+LUI87iDjk{e>ZQR2RhL%W+x%L5wUy(NVNvJ(z4Tts>^LD`;T=kL= zJ`)JII+Q1IUYh9@qw~g^vZJm`y?@+fb!L`HIYN< ztzWNo&`-+ztM(;au zjelB{sdZRr&o5I;;bEu{75iMYAr!DtjtBemuyEbbt%=k6WqQ5u9gqCips3ebHJhfJ&FPR{7wNpwQz!ZhG=cJgVd<{1m}b zU`Nn$1+5njFkqpXtpyrNs!;}OmG81hf7LpYYRc%Z$D!KoS-RtpO1Pvo$OWCJe3hhR zHUi~@)cYDE=MY;yiVWdj0d3dR9o6uC;UAsfDa8P8L4^wb1cLWN1P#xIMCd&ISYTeh z!<56e&OEyW4Xy5ZmMVlVL)}xH2$_c|U%yoBvwj0aUoh8`#8V`aFCEmhlcLij`hq=a z5nPqaFG1NMEJUdYdouXaq9Wo;Fi-lS%|m&z&CSg{rIa(IS@wD>`hqDi?2A@`(GGea z?-f%Cx#5AU(6DJsw(0d_p#fTGK>hMUt*p&;(3}eTawN`c;N;@u`NE0o!n`L=^ZP~| z$BKP3QbJc@r9o9ba)_30cm2UR`C&nMoMT4l{BoZ%Ex8PWsqT%FtM7gx!G$vsIE9Go?A`3MqD_PCE!0HZhuTF4BChzeG~C ztfpq*!WW99YG-@On*1Y_kq~|t6i(-*sHsJY*^gR_Y<5q_g3O6~xjEC0awZwXB02IK zBrkB|1Cp6uymGg0d(Qr(RHb&$E zy6lEe-ORQFpUyd75HGT(|6w?% zJ>)h0dHUgR2(}B_N5UUjm@~X*Y2P-p?;Jd&ol&q4`mhn5K~plyKFwdL^E>wC*peAd zL54-8w}qb|SQn|G>vrDi9hRbqcsmI}=7mR?7f!<|?PS6WymWX6!7;%>J)F7ujW?E| zkI~d~b7H z%MQ+PqB%o6GnuIt!}C`pEC$?EH^+g}yvw+7vN>`w)80aGQgD#AE1TSc+$ZDdKZxsL zY(kkakZ`so1xI*l%2~{ke{;PMJILQo=I<^GYrS7)CAC4$4#Iq+zKFcAKyOf&HcD#l zC}jM1x#pD}4DP;0i!7f^Qhg*Yzu~>$@TM@%jTg=|s$hNIR$veY%ycSL0146U`QA zWO-6x{r?qVbV)q3SD{sxWV=EG*u4`Ur_i$=gxpyb!7oz4-wlyKP;rkfbPovtUHqUS zkiLk#M}G3aZ2q`B*AmFZDRhGhx9K zU^(~KlqL=c_JziV51EBeoB7F>2}Z%qFo32&#Bg9ZUCZ1zW?p&sa`bubn?)^n4+$g1 zGzHDIg7rD)_)zm*Ex4oe9R$Zksp&sf6+B)gtoOM|!0;&`gO`ORfIeqPA2!5tj4Hyf z$ERED`g`<=9X}A!(>s2GDad@6YYr2jNU>CLoy26EjaCKStI{&w&S6sij<{UIp;W^~ zUxO3?&hndz>i3aQH??Csn=vlpmf%S7dnsz1@s@b}3s&W&=BRX9s_XH4a7G|ZvKD{h zrW`pwnQ^eY4Sz)Nh*VSN`g;b&;OYhh#q4TsWWg|JC&G7CH2Ce5Ik1E8g8{c zGyFv@l5x%^v*#f=qn*yNhs1}Q;E-|`%PK~*m-_uFX?7IvA$Tp(@@wKTqUkj9a~`I0 z(m4;08E05C-JItGuKkTiUfij+=LK(i?!sW&kt$+bpC328qS zwP>;_i6A~!d_(Qj#FU%6^`@a`ODclx6Wx+zfd@i}TMng@fg4bdQGHDO4nG3QkF^zQt`6tiJK+ z;#8L%u9;I^GGG#_WrOirET|$#8)uoi46)BxP)g|>Ns4mwU z8P%ozRMeu$s7?@{T{szLdhl;XHZ4Q92j*a}YwK>tW=#0Yna`=+dHNTEc2dcB+d5~w>Gz`PH3%kQ|Py0 zC25IYHnwjTbpgI8yCfq|nx)!_=DP^iTw6B>bgysscChK!(cVDX>j+57`T{yz8o$#T zFU4Ae&Q|-FXrpy!>v04fs8ZZSLVUX~Zm^B3 values) diff --git a/test/TensorFlowNET.Examples/LinearRegression.cs b/test/TensorFlowNET.Examples/LinearRegression.cs index 48713be9..5d02122a 100644 --- a/test/TensorFlowNET.Examples/LinearRegression.cs +++ b/test/TensorFlowNET.Examples/LinearRegression.cs @@ -57,7 +57,7 @@ namespace TensorFlowNET.Examples var grad = tf.train.GradientDescentOptimizer(learning_rate); var optimizer = grad.minimize(cost);*/ - var new_saver = tf.train.import_meta_graph("save_model.meta", import_scope: "import"); + var new_saver = tf.train.import_meta_graph("linear_regression.meta"); var X = graph.OperationByName("Placeholder"); var Y = graph.OperationByName("Placeholder_1"); diff --git a/test/TensorFlowNET.Examples/python/linear_regression.py b/test/TensorFlowNET.Examples/python/linear_regression.py index eb9bfc87..f8e63cc9 100644 --- a/test/TensorFlowNET.Examples/python/linear_regression.py +++ b/test/TensorFlowNET.Examples/python/linear_regression.py @@ -14,7 +14,7 @@ rng = numpy.random # Parameters learning_rate = 0.01 training_epochs = 1000 -display_step = 50 +display_step = 10 # Training Data train_X = numpy.asarray([3.3,4.4,5.5,6.71,6.93,4.168,9.779,6.182,7.59,2.167, @@ -23,28 +23,41 @@ train_Y = numpy.asarray([1.7,2.76,2.09,3.19,1.694,1.573,3.366,2.596,2.53,1.221, 2.827,3.465,1.65,2.904,2.42,2.94,1.3]) n_samples = train_X.shape[0] -# tf Graph Input -X = tf.placeholder("float") -Y = tf.placeholder("float") - -# Set model weights -W = tf.Variable(rng.randn(), name="weight") -b = tf.Variable(rng.randn(), name="bias") - -# Construct a linear model -mul = tf.multiply(X, W) -pred = tf.add(mul, b) - -# Mean squared error -sub = pred-Y -pow = tf.pow(sub, 2) - -reduce = tf.reduce_sum(pow) -cost = reduce/(2*n_samples) -# Gradient descent -# Note, minimize() knows to modify W and b because Variable objects are trainable=True by default -grad = tf.train.GradientDescentOptimizer(learning_rate) -optimizer = grad.minimize(cost) +if False: + # tf Graph Input + X = tf.placeholder("float") + Y = tf.placeholder("float") + + # Set model weights + W = tf.Variable(-0.06, name="weight") + b = tf.Variable(-0.73, name="bias") + + # Construct a linear model + mul = tf.multiply(X, W) + pred = tf.add(mul, b) + + # Mean squared error + sub = pred-Y + pow = tf.pow(sub, 2) + + reduce = tf.reduce_sum(pow) + cost = reduce/(2*n_samples) + # Gradient descent + # Note, minimize() knows to modify W and b because Variable objects are trainable=True by default + grad = tf.train.GradientDescentOptimizer(learning_rate) + optimizer = grad.minimize(cost) + # tf.train.export_meta_graph(filename='save_model.meta'); +else: + # tf Graph Input + new_saver = tf.train.import_meta_graph("save_model.meta") + nodes = tf.get_default_graph()._nodes_by_name; + optimizer = nodes["GradientDescent"] + cost = nodes["truediv"].outputs[0] + X = nodes["Placeholder"].outputs[0] + Y = nodes["Placeholder_1"].outputs[0] + W = nodes["weight"].outputs[0] + b = nodes["bias"].outputs[0] + pred = nodes["Add"].outputs[0] # Initialize the variables (i.e. assign their default value) init = tf.global_variables_initializer()