48、tensorflow入门二,线性模型的拟合

import tensorflow as tf
import numpy as np
#生成2维的100个0-1的随机数 x_data = np.float32(np.random.rand(2,100))
计算内积,x_data一共100个值,每个值都是2维的向量,用[0.1,0.2]和每一个向量计算数量积,然后加起来
x_data=

[[ 0.27559635 0.35930911 0.77030689 0.71818703 0.49122271 0.43190494

0.644674 0.32078174 0.64300877 0.81156862 0.30409896 0.56623858

0.97026539 0.60468578 0.34658566 0.86189109 0.5117926 0.46588144

0.27591956 0.49946061 0.47744861 0.80954593 0.10624354 0.80820572

0.24388497 0.75275064 0.11153043 0.01847375 0.70894343 0.86511648

0.05999189 0.94242656 0.35399687 0.53131646 0.80690706 0.28856653

0.2685678 0.86655128 0.49340782 0.84330899 0.26634833 0.94808429

0.32813659 0.60548925 0.37914801 0.93819922 0.16300483 0.28346273

0.25481561 0.59326059 0.64435166 0.71002674 0.47835174 0.16478723

0.83618289 0.89197201 0.77212745 0.83254766 0.1176443 0.45999372

0.17508474 0.99125117 0.19204263 0.88548642 0.16025347 0.58622926

0.14167576 0.6784007 0.77483946 0.90998834 0.79064935 0.76124579

0.11255023 0.63665706 0.84133714 0.01388079 0.7478959 0.34365693

0.66228282 0.56429321 0.97419363 0.46427366 0.71639329 0.67420006

0.72603422 0.35240087 0.43481046 0.04478104 0.83910578 0.03852031

0.63502115 0.54327303 0.05116724 0.75744271 0.23107423 0.25379685

0.1543453 0.65174055 0.60880935 0.41448417]

[ 0.53852242 0.24170624 0.51843584 0.41295227 0.22256 0.05581184

0.42046902 0.49984431 0.83284378 0.44403863 0.43289256 0.04277489

0.97858369 0.32724616 0.69393569 0.80431139 0.20591183 0.10109164

0.07850602 0.61202133 0.04476574 0.99151891 0.1714984 0.83303201

0.1769124 0.97038633 0.71856993 0.97560126 0.80447757 0.48544171

0.95302963 0.21392477 0.72407377 0.32749009 0.87037027 0.23632777

0.09344739 0.49172315 0.27751547 0.3205907 0.42732051 0.0938397

0.65851027 0.75118226 0.30484736 0.69336241 0.96847987 0.08743072

0.5060789 0.128803 0.12509818 0.77400607 0.99729323 0.25656971

0.28877217 0.26310787 0.22661451 0.38361222 0.64689898 0.26246113

0.41836309 0.96913052 0.34863174 0.26865834 0.96321774 0.02932074

0.51096094 0.93037766 0.3862699 0.77660888 0.50103205 0.35242727

0.96469277 0.71796703 0.90261179 0.9502635 0.2554118 0.41087386

0.13807607 0.10848427 0.27238116 0.81126028 0.35296583 0.5478636

0.35726911 0.54948765 0.3683508 0.57419771 0.28177765 0.80673724

0.14792147 0.5277251 0.17725706 0.5937981 0.86571193 0.09969555

0.92383957 0.95939624 0.76108253 0.30095646]]

y_data = 

[ 0.43526412 0.38427216 0.48071786 0.45440916 0.39363427 0.35435286

0.4485612 0.43204704 0.53086963 0.46996459 0.41698841 0.36517884

0.59274328 0.42591781 0.4734457 0.54705139 0.39236163 0.36680647

0.34329316 0.47235033 0.35669801 0.57925838 0.34492403 0.54742697

0.35977098 0.56935233 0.45486703 0.49696763 0.53178986 0.48359999

0.49660512 0.43702761 0.48021444 0.41862966 0.55476476 0.37612221

0.34554626 0.48499976 0.40484388 0.44844904 0.41209894 0.41357637

0.46451571 0.51078538 0.39888427 0.53249241 0.50999646 0.34583242

0.42669734 0.38508666 0.3894548 0.52580389 0.54729382 0.36779267

0.44137272 0.44181877 0.42253565 0.45997721 0.44114423 0.3984916

0.40118109 0.59295122 0.38893061 0.44228031 0.50866889 0.36448708

0.41635976 0.5539156 0.45473793 0.54632061 0.47927135 0.44661003

0.50419358 0.50725911 0.56465607 0.49144078 0.42587195 0.41654046

0.3938435 0.37812617 0.45189559 0.50867942 0.4422325 0.47699273

0.44405724 0.44513762 0.41715121 0.41931765 0.44026611 0.46519948

0.39308641 0.45987232 0.34056814 0.49450389 0.49624981 0.34531879

0.50020244 0.5570533 0.51309744 0.40163971]

y_data = np.dot([0.100,0.200],x_data) + 0.300
b = tf.Variable(tf.zeros([1])) W = tf.Variable(tf.random_uniform([1,2],-1.0,1.0)) y = tf.matmul(W,x_data) + b loss = tf.reduce_mean(tf.square(y - y_data)) optimizer = tf.train.GradientDescentOptimizer(0.5) train = optimizer.minimize(loss) init = tf.global_variables_initializer() sess = tf.Session() sess.run(init) for step in np.arange(0,201): sess.run(train) if step % 20 == 0: print(step,sess.run(W),sess.run(b))
结果如下所示

>>> import testTensorflow

0 [[ 0.61865866 0.54848659]] [-0.35512698]

20 [[ 0.27226886 0.31722128]] [ 0.14856057]

40 [[ 0.15020847 0.23340638]] [ 0.25623968]

60 [[ 0.11460328 0.20955895]] [ 0.28735051]

80 [[ 0.10424114 0.20274341]] [ 0.29634258]

100 [[ 0.10123044 0.20078911]] [ 0.2989423]

120 [[ 0.10035671 0.20022736]] [ 0.29969406]

140 [[ 0.10010336 0.20006558]] [ 0.2999115]

160 [[ 0.10002995 0.20001893]] [ 0.29997438]

180 [[ 0.10000868 0.20000547]] [ 0.29999259]

200 [[ 0.10000249 0.20000155]] [ 0.29999787]

>>>