標籤:

TensorFlow-線性回歸

import tensorflow as tfimport matplotlib.pyplot as pltimport numpy as npfrom sklearn.model_selection import train_test_split# 設置超參數learning_rate = 0.01training_epochs = 2000display_step = 50# 數據集np.random.seed(123)X = 2 * np.random.rand(500, 1)y = 5 + 3 * X + np.random.randn(500, 1)n_samples = X.shape[0]# 將數據集分為訓練集和測試集X_train, X_test, y_train, y_test = train_test_split(X, y)print(fShape X_train: {X_train.shape})print(fShape y_train: {y_train.shape})print(fShape X_test: {X_test.shape})print(fShape y_test: {y_test.shape})# 設置佔位符X = tf.placeholder(tf.float32)Y = tf.placeholder(tf.float32)# 設置權重W = tf.Variable(tf.random_uniform([1]))b = tf.Variable(tf.zeros([1]))# 建立線性回歸模型pred = tf.add(tf.multiply(X, W), b)# 均方誤差cost = tf.reduce_sum(tf.pow(pred-Y, 2))/n_samples# 梯度下降法optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)# 初始化變數init = tf.global_variables_initializer()# 開始訓練with tf.Session() as sess: # 運行初始化 sess.run(init) # 放入所有數據 for epoch in range(training_epochs): sess.run(optimizer, feed_dict={X:X_train, Y: y_train}) # 顯示每一個階段的參數 if (epoch+1) % display_step == 0: c = sess.run(cost, feed_dict={X: X_train, Y:y_train}) print("Epoch:", %04d % (epoch+1), "cost=", "{:.9f}".format(c),"W=", sess.run(W), "b=", sess.run(b)) # 輸出結果 print("Optimization Finished!") training_cost = sess.run(cost, feed_dict={X: X_train, Y: y_train}) print("Training cost=", training_cost, "W=", sess.run(W), "b=", sess.run(b),
) # 訓練集可視化 plt.plot(X_train, y_train, ro, label=Original data) plt.plot(X_train, sess.run(W) * X_train + sess.run(b), label=Fitted line) plt.legend() plt.show() # 在測試集上的效果 print("Testing... (Mean square loss Comparison)") testing_cost = sess.run( tf.reduce_sum(tf.pow(pred - Y, 2)) / X_test.shape[0], feed_dict={X: X_test, Y: y_test}) print("Testing cost=", testing_cost) print("Absolute mean square loss difference:", abs(training_cost - testing_cost)) # 測試集可視化 plt.plot(X_test, y_test, bo, label=Testing data) plt.plot(X_train, sess.run(W) * X_train + sess.run(b), label=Fitted line) plt.legend() plt.show()

Shape X_train: (375, 1)

Shape y_train: (375, 1)

Shape X_test: (125, 1)

Shape y_test: (125, 1)

Epoch: 0050 cost= 2.669345379 W= [3.6990309] b= [2.7084947]

Epoch: 0100 cost= 1.215232730 W= [4.1226506] b= [3.3702278]

Epoch: 0150 cost= 1.083126068 W= [4.102973] b= [3.618202]

Epoch: 0200 cost= 1.017029405 W= [4.0078444] b= [3.7726097]

Epoch: 0250 cost= 0.965919912 W= [3.9078498] b= [3.8973136]

Epoch: 0300 cost= 0.925307155 W= [3.815697] b= [4.006011]

Epoch: 0350 cost= 0.892992318 W= [3.7329175] b= [4.1024785]

Epoch: 0400 cost= 0.867278516 W= [3.6589615] b= [4.1884327]

Epoch: 0450 cost= 0.846817434 W= [3.592969] b= [4.265088]

Epoch: 0500 cost= 0.830535710 W= [3.534097] b= [4.3334656]

Epoch: 0550 cost= 0.817579985 W= [3.48158] b= [4.394459]

Epoch: 0600 cost= 0.807270765 W= [3.4347332] b= [4.4488664]

Epoch: 0650 cost= 0.799067318 W= [3.3929431] b= [4.4974008]

Epoch: 0700 cost= 0.792539537 W= [3.3556657] b= [4.5406957]

Epoch: 0750 cost= 0.787345171 W= [3.3224125] b= [4.579315]

Epoch: 0800 cost= 0.783211946 W= [3.2927494] b= [4.6137657]

Epoch: 0850 cost= 0.779922843 W= [3.2662878] b= [4.644499]

Epoch: 0900 cost= 0.777305722 W= [3.242684] b= [4.671912]

Epoch: 0950 cost= 0.775223196 W= [3.2216284] b= [4.6963654]

Epoch: 1000 cost= 0.773565888 W= [3.2028458] b= [4.718181]

Epoch: 1050 cost= 0.772247374 W= [3.1860902] b= [4.7376394]

Epoch: 1100 cost= 0.771198034 W= [3.1711445] b= [4.7549973]

Epoch: 1150 cost= 0.770363152 W= [3.1578128] b= [4.77048]

Epoch: 1200 cost= 0.769698679 W= [3.14592] b= [4.784292]

Epoch: 1250 cost= 0.769170165 W= [3.1353118] b= [4.7966123]

Epoch: 1300 cost= 0.768749356 W= [3.1258485] b= [4.807605]

Epoch: 1350 cost= 0.768414676 W= [3.1174057] b= [4.817409]

Epoch: 1400 cost= 0.768148184 W= [3.109875] b= [4.8261547]

Epoch: 1450 cost= 0.767936230 W= [3.1031573] b= [4.8339562]

Epoch: 1500 cost= 0.767767549 W= [3.0971658] b= [4.8409147]

Epoch: 1550 cost= 0.767633379 W= [3.0918195] b= [4.8471227]

Epoch: 1600 cost= 0.767526567 W= [3.0870519] b= [4.852661]

Epoch: 1650 cost= 0.767441571 W= [3.0827982] b= [4.8576016]

Epoch: 1700 cost= 0.767373860 W= [3.079004] b= [4.862008]

Epoch: 1750 cost= 0.767320216 W= [3.0756197] b= [4.8659387]

Epoch: 1800 cost= 0.767277420 W= [3.0726006] b= [4.8694453]

Epoch: 1850 cost= 0.767243326 W= [3.0699077] b= [4.872573]

Epoch: 1900 cost= 0.767216206 W= [3.0675051] b= [4.875363]

Epoch: 1950 cost= 0.767194688 W= [3.0653622] b= [4.877852]

Epoch: 2000 cost= 0.767177463 W= [3.0634503] b= [4.8800716]

Optimization Finished!

Training cost= 0.76717746 W= [3.0634503] b= [4.8800716]

Testing... (Mean square loss Comparison)

Testing cost= 1.0441029

Absolute mean square loss difference: 0.27692544


推薦閱讀:

學習筆記TF014:卷積層、激活函數、池化層、歸一化層、高級層
【博客存檔】TensoFlow之深入理解GoogLeNet
手頭沒有GPU,學習tensorflow有意義嗎?
學習筆記TF003:數據流圖定義、執行、可視化
實例介紹TensorFlow的輸入流水線

TAG:TensorFlow |