- 텐서플로우 선형회귀분석
1 단순[ | ]
Python
CPU
12.5s
MEM
356M
17.5s
Reload
Copy
import numpy as np
import tensorflow as tf
x_train = [1, 2, 3, 4]
y_train = [0, -1, -2, -3]
tf.model = tf.keras.Sequential()
tf.model.add(tf.keras.layers.Dense(units=1, input_dim=1))
sgd = tf.keras.optimizers.SGD(lr=0.1)
tf.model.compile(loss='mse', optimizer=sgd)
tf.model.summary()
tf.model.fit(x_train, y_train, epochs=200)
y_predict = tf.model.predict(np.array([5, 4]))
print(y_predict)
/usr/local/lib/python3.8/site-packages/keras/optimizer_v2/optimizer_v2.py:355: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. warnings.warn(
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense (Dense) (None, 1) 2 ================================================================= Total params: 2 Trainable params: 2 Non-trainable params: 0 _________________________________________________________________ Epoch 1/200 1/1 [==============================] - 1s 535ms/step - loss: 3.7597 Epoch 2/200 1/1 [==============================] - 0s 4ms/step - loss: 1.7389 Epoch 3/200 1/1 [==============================] - 0s 4ms/step - loss: 0.8286 Epoch 4/200 1/1 [==============================] - 0s 4ms/step - loss: 0.4171 Epoch 5/200 1/1 [==============================] - 0s 4ms/step - loss: 0.2297 Epoch 6/200 1/1 [==============================] - 0s 5ms/step - loss: 0.1430 Epoch 7/200 1/1 [==============================] - 0s 6ms/step - loss: 0.1018 Epoch 8/200 1/1 [==============================] - 0s 5ms/step - loss: 0.0810 Epoch 9/200 1/1 [==============================] - 0s 13ms/step - loss: 0.0696 Epoch 10/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0626 Epoch 11/200 1/1 [==============================] - 0s 7ms/step - loss: 0.0575 Epoch 12/200 1/1 [==============================] - 0s 5ms/step - loss: 0.0535 Epoch 13/200 1/1 [==============================] - 0s 12ms/step - loss: 0.0501 Epoch 14/200 1/1 [==============================] - 0s 4ms/step - loss: 0.0470 Epoch 15/200 1/1 [==============================] - 0s 4ms/step - loss: 0.0442 Epoch 16/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0416 Epoch 17/200 1/1 [==============================] - 0s 4ms/step - loss: 0.0391 Epoch 18/200 1/1 [==============================] - 0s 9ms/step - loss: 0.0368 Epoch 19/200 1/1 [==============================] - 0s 6ms/step - loss: 0.0346 Epoch 20/200 1/1 [==============================] - 0s 10ms/step - loss: 0.0326 Epoch 21/200 1/1 [==============================] - 0s 4ms/step - loss: 0.0307 Epoch 22/200 1/1 [==============================] - 0s 6ms/step - loss: 0.0289 Epoch 23/200 1/1 [==============================] - 0s 5ms/step - loss: 0.0272 Epoch 24/200 1/1 [==============================] - 0s 4ms/step - loss: 0.0255 Epoch 25/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0240 Epoch 26/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0226 Epoch 27/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0213 Epoch 28/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0200 Epoch 29/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0189 Epoch 30/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0177 Epoch 31/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0167 Epoch 32/200 1/1 [==============================] - 0s 2ms/step - loss: 0.0157 Epoch 33/200 1/1 [==============================] - 0s 4ms/step - loss: 0.0148 Epoch 34/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0139 Epoch 35/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0131 Epoch 36/200 1/1 [==============================] - 0s 2ms/step - loss: 0.0123 Epoch 37/200 1/1 [==============================] - 0s 2ms/step - loss: 0.0116 Epoch 38/200 1/1 [==============================] - 0s 2ms/step - loss: 0.0109 Epoch 39/200 1/1 [==============================] - 0s 17ms/step - loss: 0.0103 Epoch 40/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0097 Epoch 41/200 1/1 [==============================] - 0s 5ms/step - loss: 0.0091 Epoch 42/200 1/1 [==============================] - 0s 5ms/step - loss: 0.0086 Epoch 43/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0080 Epoch 44/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0076 Epoch 45/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0071 Epoch 46/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0067 Epoch 47/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0063 Epoch 48/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0059 Epoch 49/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0056 Epoch 50/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0053 Epoch 51/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0049 Epoch 52/200 1/1 [==============================] - 0s 4ms/step - loss: 0.0047 Epoch 53/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0044 Epoch 54/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0041 Epoch 55/200 1/1 [==============================] - 0s 4ms/step - loss: 0.0039 Epoch 56/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0037 Epoch 57/200 1/1 [==============================] - 0s 5ms/step - loss: 0.0034 Epoch 58/200 1/1 [==============================] - 0s 4ms/step - loss: 0.0032 Epoch 59/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0030 Epoch 60/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0029 Epoch 61/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0027 Epoch 62/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0025 Epoch 63/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0024 Epoch 64/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0022 Epoch 65/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0021 Epoch 66/200 1/1 [==============================] - 0s 5ms/step - loss: 0.0020 Epoch 67/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0019 Epoch 68/200 1/1 [==============================] - 0s 5ms/step - loss: 0.0018 Epoch 69/200 1/1 [==============================] - 0s 4ms/step - loss: 0.0017 Epoch 70/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0016 Epoch 71/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0015 Epoch 72/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0014 Epoch 73/200 1/1 [==============================] - 0s 2ms/step - loss: 0.0013 Epoch 74/200 1/1 [==============================] - 0s 4ms/step - loss: 0.0012 Epoch 75/200 1/1 [==============================] - 0s 4ms/step - loss: 0.0012 Epoch 76/200 1/1 [==============================] - 0s 3ms/step - loss: 0.0011 Epoch 77/200 1/1 [==============================] - 0s 4ms/step - loss: 0.0010 Epoch 78/200 1/1 [==============================] - 0s 3ms/step - loss: 9.5866e-04 Epoch 79/200 1/1 [==============================] - 0s 3ms/step - loss: 9.0212e-04 Epoch 80/200 1/1 [==============================] - 0s 3ms/step - loss: 8.4891e-04 Epoch 81/200 1/1 [==============================] - 0s 3ms/step - loss: 7.9884e-04 Epoch 82/200 1/1 [==============================] - 0s 3ms/step - loss: 7.5172e-04 Epoch 83/200 1/1 [==============================] - 0s 3ms/step - loss: 7.0739e-04 Epoch 84/200 1/1 [==============================] - 0s 5ms/step - loss: 6.6566e-04 Epoch 85/200 1/1 [==============================] - 0s 3ms/step - loss: 6.2640e-04 Epoch 86/200 1/1 [==============================] - 0s 4ms/step - loss: 5.8945e-04 Epoch 87/200 1/1 [==============================] - 0s 3ms/step - loss: 5.5469e-04 Epoch 88/200 1/1 [==============================] - 0s 4ms/step - loss: 5.2197e-04 Epoch 89/200 1/1 [==============================] - 0s 3ms/step - loss: 4.9118e-04 Epoch 90/200 1/1 [==============================] - 0s 3ms/step - loss: 4.6221e-04 Epoch 91/200 1/1 [==============================] - 0s 3ms/step - loss: 4.3495e-04 Epoch 92/200 1/1 [==============================] - 0s 3ms/step - loss: 4.0930e-04 Epoch 93/200 1/1 [==============================] - 0s 3ms/step - loss: 3.8516e-04 Epoch 94/200 1/1 [==============================] - 0s 3ms/step - loss: 3.6244e-04 Epoch 95/200 1/1 [==============================] - 0s 3ms/step - loss: 3.4106e-04 Epoch 96/200 1/1 [==============================] - 0s 4ms/step - loss: 3.2095e-04 Epoch 97/200 1/1 [==============================] - 0s 4ms/step - loss: 3.0202e-04 Epoch 98/200 1/1 [==============================] - 0s 4ms/step - loss: 2.8420e-04 Epoch 99/200 1/1 [==============================] - 0s 3ms/step - loss: 2.6744e-04 Epoch 100/200 1/1 [==============================] - 0s 4ms/step - loss: 2.5167e-04 Epoch 101/200 1/1 [==============================] - 0s 3ms/step - loss: 2.3682e-04 Epoch 102/200 1/1 [==============================] - 0s 6ms/step - loss: 2.2286e-04 Epoch 103/200 1/1 [==============================] - 0s 3ms/step - loss: 2.0971e-04 Epoch 104/200 1/1 [==============================] - 0s 4ms/step - loss: 1.9734e-04 Epoch 105/200 1/1 [==============================] - 0s 3ms/step - loss: 1.8570e-04 Epoch 106/200 1/1 [==============================] - 0s 3ms/step - loss: 1.7475e-04 Epoch 107/200 1/1 [==============================] - 0s 3ms/step - loss: 1.6444e-04 Epoch 108/200 1/1 [==============================] - 0s 3ms/step - loss: 1.5474e-04 Epoch 109/200 1/1 [==============================] - 0s 8ms/step - loss: 1.4562e-04 Epoch 110/200 1/1 [==============================] - 0s 4ms/step - loss: 1.3703e-04 Epoch 111/200 1/1 [==============================] - 0s 3ms/step - loss: 1.2894e-04 Epoch 112/200 1/1 [==============================] - 0s 3ms/step - loss: 1.2134e-04 Epoch 113/200 1/1 [==============================] - 0s 3ms/step - loss: 1.1418e-04 Epoch 114/200 1/1 [==============================] - 0s 3ms/step - loss: 1.0745e-04 Epoch 115/200 1/1 [==============================] - 0s 3ms/step - loss: 1.0111e-04 Epoch 116/200 1/1 [==============================] - 0s 3ms/step - loss: 9.5147e-05 Epoch 117/200 1/1 [==============================] - 0s 3ms/step - loss: 8.9535e-05 Epoch 118/200 1/1 [==============================] - 0s 3ms/step - loss: 8.4254e-05 Epoch 119/200 1/1 [==============================] - 0s 3ms/step - loss: 7.9285e-05 Epoch 120/200 1/1 [==============================] - 0s 3ms/step - loss: 7.4609e-05 Epoch 121/200 1/1 [==============================] - 0s 5ms/step - loss: 7.0208e-05 Epoch 122/200 1/1 [==============================] - 0s 3ms/step - loss: 6.6067e-05 Epoch 123/200 1/1 [==============================] - 0s 3ms/step - loss: 6.2170e-05 Epoch 124/200 1/1 [==============================] - 0s 3ms/step - loss: 5.8503e-05 Epoch 125/200 1/1 [==============================] - 0s 4ms/step - loss: 5.5052e-05 Epoch 126/200 1/1 [==============================] - 0s 4ms/step - loss: 5.1805e-05 Epoch 127/200 1/1 [==============================] - 0s 5ms/step - loss: 4.8750e-05 Epoch 128/200 1/1 [==============================] - 0s 3ms/step - loss: 4.5874e-05 Epoch 129/200 1/1 [==============================] - 0s 3ms/step - loss: 4.3168e-05 Epoch 130/200 1/1 [==============================] - 0s 4ms/step - loss: 4.0623e-05 Epoch 131/200 1/1 [==============================] - 0s 3ms/step - loss: 3.8227e-05 Epoch 132/200 1/1 [==============================] - 0s 3ms/step - loss: 3.5972e-05 Epoch 133/200 1/1 [==============================] - 0s 3ms/step - loss: 3.3850e-05 Epoch 134/200 1/1 [==============================] - 0s 3ms/step - loss: 3.1853e-05 Epoch 135/200 1/1 [==============================] - 0s 2ms/step - loss: 2.9975e-05 Epoch 136/200 1/1 [==============================] - 0s 4ms/step - loss: 2.8207e-05 Epoch 137/200 1/1 [==============================] - 0s 2ms/step - loss: 2.6543e-05 Epoch 138/200 1/1 [==============================] - 0s 3ms/step - loss: 2.4978e-05 Epoch 139/200 1/1 [==============================] - 0s 2ms/step - loss: 2.3504e-05 Epoch 140/200 1/1 [==============================] - 0s 2ms/step - loss: 2.2118e-05 Epoch 141/200 1/1 [==============================] - 0s 3ms/step - loss: 2.0814e-05 Epoch 142/200 1/1 [==============================] - 0s 3ms/step - loss: 1.9586e-05 Epoch 143/200 1/1 [==============================] - 0s 3ms/step - loss: 1.8431e-05 Epoch 144/200 1/1 [==============================] - 0s 3ms/step - loss: 1.7344e-05 Epoch 145/200 1/1 [==============================] - 0s 3ms/step - loss: 1.6321e-05 Epoch 146/200 1/1 [==============================] - 0s 3ms/step - loss: 1.5358e-05 Epoch 147/200 1/1 [==============================] - 0s 3ms/step - loss: 1.4452e-05 Epoch 148/200 1/1 [==============================] - 0s 3ms/step - loss: 1.3600e-05 Epoch 149/200 1/1 [==============================] - 0s 5ms/step - loss: 1.2798e-05 Epoch 150/200 1/1 [==============================] - 0s 3ms/step - loss: 1.2043e-05 Epoch 151/200 1/1 [==============================] - 0s 3ms/step - loss: 1.1332e-05 Epoch 152/200 1/1 [==============================] - 0s 3ms/step - loss: 1.0664e-05 Epoch 153/200 1/1 [==============================] - 0s 3ms/step - loss: 1.0035e-05 Epoch 154/200 1/1 [==============================] - 0s 3ms/step - loss: 9.4433e-06 Epoch 155/200 1/1 [==============================] - 0s 3ms/step - loss: 8.8863e-06 Epoch 156/200 1/1 [==============================] - 0s 7ms/step - loss: 8.3622e-06 Epoch 157/200 1/1 [==============================] - 0s 3ms/step - loss: 7.8692e-06 Epoch 158/200 1/1 [==============================] - 0s 3ms/step - loss: 7.4049e-06 Epoch 159/200 1/1 [==============================] - 0s 3ms/step - loss: 6.9682e-06 Epoch 160/200 1/1 [==============================] - 0s 3ms/step - loss: 6.5571e-06 Epoch 161/200 1/1 [==============================] - 0s 3ms/step - loss: 6.1705e-06 Epoch 162/200 1/1 [==============================] - 0s 3ms/step - loss: 5.8065e-06 Epoch 163/200 1/1 [==============================] - 0s 4ms/step - loss: 5.4638e-06 Epoch 164/200 1/1 [==============================] - 0s 3ms/step - loss: 5.1417e-06 Epoch 165/200 1/1 [==============================] - 0s 5ms/step - loss: 4.8385e-06 Epoch 166/200 1/1 [==============================] - 0s 4ms/step - loss: 4.5529e-06 Epoch 167/200 1/1 [==============================] - 0s 3ms/step - loss: 4.2844e-06 Epoch 168/200 1/1 [==============================] - 0s 3ms/step - loss: 4.0316e-06 Epoch 169/200 1/1 [==============================] - 0s 3ms/step - loss: 3.7938e-06 Epoch 170/200 1/1 [==============================] - 0s 2ms/step - loss: 3.5702e-06 Epoch 171/200 1/1 [==============================] - 0s 3ms/step - loss: 3.3595e-06 Epoch 172/200 1/1 [==============================] - 0s 3ms/step - loss: 3.1613e-06 Epoch 173/200 1/1 [==============================] - 0s 3ms/step - loss: 2.9750e-06 Epoch 174/200 1/1 [==============================] - 0s 2ms/step - loss: 2.7995e-06 Epoch 175/200 1/1 [==============================] - 0s 4ms/step - loss: 2.6344e-06 Epoch 176/200 1/1 [==============================] - 0s 3ms/step - loss: 2.4791e-06 Epoch 177/200 1/1 [==============================] - 0s 3ms/step - loss: 2.3327e-06 Epoch 178/200 1/1 [==============================] - 0s 3ms/step - loss: 2.1952e-06 Epoch 179/200 1/1 [==============================] - 0s 3ms/step - loss: 2.0657e-06 Epoch 180/200 1/1 [==============================] - 0s 3ms/step - loss: 1.9440e-06 Epoch 181/200 1/1 [==============================] - 0s 3ms/step - loss: 1.8292e-06 Epoch 182/200 1/1 [==============================] - 0s 3ms/step - loss: 1.7213e-06 Epoch 183/200 1/1 [==============================] - 0s 3ms/step - loss: 1.6198e-06 Epoch 184/200 1/1 [==============================] - 0s 3ms/step - loss: 1.5242e-06 Epoch 185/200 1/1 [==============================] - 0s 3ms/step - loss: 1.4343e-06 Epoch 186/200 1/1 [==============================] - 0s 3ms/step - loss: 1.3497e-06 Epoch 187/200 1/1 [==============================] - 0s 3ms/step - loss: 1.2701e-06 Epoch 188/200 1/1 [==============================] - 0s 3ms/step - loss: 1.1952e-06 Epoch 189/200 1/1 [==============================] - 0s 2ms/step - loss: 1.1247e-06 Epoch 190/200 1/1 [==============================] - 0s 2ms/step - loss: 1.0584e-06 Epoch 191/200 1/1 [==============================] - 0s 3ms/step - loss: 9.9591e-07 Epoch 192/200 1/1 [==============================] - 0s 3ms/step - loss: 9.3720e-07 Epoch 193/200 1/1 [==============================] - 0s 3ms/step - loss: 8.8196e-07 Epoch 194/200 1/1 [==============================] - 0s 3ms/step - loss: 8.2990e-07 Epoch 195/200 1/1 [==============================] - 0s 4ms/step - loss: 7.8095e-07 Epoch 196/200 1/1 [==============================] - 0s 3ms/step - loss: 7.3490e-07 Epoch 197/200 1/1 [==============================] - 0s 3ms/step - loss: 6.9156e-07 Epoch 198/200 1/1 [==============================] - 0s 2ms/step - loss: 6.5080e-07 Epoch 199/200 1/1 [==============================] - 0s 3ms/step - loss: 6.1233e-07 Epoch 200/200 1/1 [==============================] - 0s 3ms/step - loss: 5.7627e-07 [[-3.9987378] [-2.9993508]]
2 다중[ | ]
Python
Copy
import tensorflow as tf
x_data = [[1,1],[2,2],[3,3]]
y_data = [[1],[2],[3]]
X = tf.placeholder(tf.float32, shape=[None,2])
Y = tf.placeholder(tf.float32, shape=[None,1])
W = tf.Variable(tf.random_normal([2,1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')
hypothesis = tf.matmul(X,W) + b
cost = tf.reduce_mean(tf.square(hypothesis - Y))
train = tf.train.GradientDescentOptimizer(learning_rate=0.01).minimize(cost)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for step in range(2001):
cost_val, W_val, b_val, _ = sess.run([cost, W, b, train], feed_dict={X: x_data, Y: y_data})
if step % 200 == 0:
print(step, cost_val, W_val, b_val)
실행결과
text
Copy
0 0.630719 [[ 1.96448588]
[-1.46632338]] [ 0.50560945]
200 0.0159201 [[ 2.1430881 ]
[-1.28772187]] [ 0.33275333]
400 0.00558863 [[ 2.17255759]
[-1.25825167]] [ 0.19715267]
600 0.00196186 [[ 2.19001746]
[-1.24079025]] [ 0.11681108]
800 0.000688694 [[ 2.20036244]
[-1.23044467]] [ 0.06920907]
1000 0.000241762 [[ 2.20649195]
[-1.22431529]] [ 0.04100555]
1200 8.48685e-05 [[ 2.21012259]
[-1.22068286]] [ 0.02429538]
1400 2.97948e-05 [[ 2.21227384]
[-1.21853077]] [ 0.01439506]
1600 1.04602e-05 [[ 2.21354699]
[-1.21725464]] [ 0.00852949]
1800 3.67325e-06 [[ 2.2142992 ]
[-1.21649647]] [ 0.00505449]
2000 1.29013e-06 [[ 2.21474481]
[-1.21604705]] [ 0.0029954]
3 같이 보기[ | ]
편집자 Jmnote Jmnote bot
로그인하시면 댓글을 쓸 수 있습니다.