>>> import tensorflow.compat.v1 as tf
>>> tf.disable_v2_behavior()

 

 

>>> x_train = [1,2,3]
>>> y_train = [1,2,3]
>>>
>>> W = tf.Variable(tf.random_normal([1]), name = 'weight')
>>> b = tf.Variable(tf.random_normal([1]), name = 'bias')

variable노드로 정의

tensorflow가 사용하는 variable -> 실행시키면 tensorflow가 자체적으로 사용하는 것

 

shap이 무엇이냐(랜덤), 이름

 

 

>>> cost = tf.reduce_mean(tf.square(hypothesis - y_train))

reduce = 평균내주는것

 

>>> optimizer = tf.train.GradientDescentOptimizer(learning_rate = 0.01)
>>> train = optimizer.minimize(cost)

optimizer정의, minimize-> W와 b 스스로 조정해서 cost구함 

 

그래프 구현됨,

 

 

 

실행 하려면 Session해야함

>>> sess.run(tf.global_variables_initializer())
>>>
>>> for step in range(2001):
...     sess.run(train)
...     if step % 20 == 0:
...             print(step, sess.run(cost), sess.run(W), sess.run(b))

variable실행하기 전에는 global_variable_initializer해야 한다. 

 

 

결과 값

0 0.004482827 [0.9443451] [0.06213798]
20 0.0007200699 [0.9673146] [0.06817147]
40 0.0006235919 [0.97078884] [0.06582024]
60 0.00056608004 [0.9723461] [0.0628081]
80 0.0005141205 [0.9736634] [0.05986408]
100 0.00046693053 [0.97490275] [0.05705139]
120 0.00042407462 [0.9760824] [0.05437026]
140 0.00038515366 [0.9772064] [0.05181508]
160 0.0003498014 [0.9782776] [0.04937999]
180 0.00031769645 [0.9792984] [0.04705936]
200 0.0002885371 [0.9802714] [0.04484779]
220 0.00026205392 [0.98119855] [0.04274009]
240 0.00023800116 [0.9820821] [0.04073146]
260 0.00021615803 [0.98292416] [0.03881725]
280 0.00019631743 [0.98372674] [0.036993]
300 0.00017829782 [0.9844915] [0.03525446]
320 0.00016193358 [0.9852203] [0.03359763]
340 0.00014707215 [0.9859149] [0.03201869]
360 0.00013357187 [0.9865769] [0.03051394]
380 0.00012131266 [0.98720765] [0.0290799]
400 0.000110177694 [0.9878088] [0.02771328]
420 0.00010006648 [0.9883818] [0.02641088]
440 9.0881986e-05 [0.98892784] [0.02516968]
460 8.253934e-05 [0.9894482] [0.02398681]
480 7.496376e-05 [0.9899441] [0.02285952]
500 6.808351e-05 [0.9904167] [0.02178513]
520 6.1833765e-05 [0.99086714] [0.02076128]
540 5.6157976e-05 [0.99129635] [0.01978554]
560 5.100429e-05 [0.99170536] [0.01885567]
580 4.6322017e-05 [0.99209523] [0.0179695]
600 4.2071086e-05 [0.9924667] [0.01712498]
620 3.8209277e-05 [0.9928207] [0.01632018]
640 3.4702516e-05 [0.9931581] [0.01555322]
660 3.151784e-05 [0.99347967] [0.01482228]
680 2.86249e-05 [0.99378604] [0.0141257]
700 2.5997879e-05 [0.9940781] [0.01346186]
720 2.3611e-05 [0.9943564] [0.0128292]
740 2.1444072e-05 [0.99462163] [0.01222627]
760 1.9476416e-05 [0.99487436] [0.01165172]
780 1.7688893e-05 [0.9951152] [0.01110416]
800 1.606514e-05 [0.9953449] [0.0105823]
820 1.4590369e-05 [0.9955636] [0.01008495]
840 1.3251399e-05 [0.9957721] [0.00961099]
860 1.2034613e-05 [0.9959708] [0.00915931]
880 1.0930289e-05 [0.99616015] [0.00872885]
900 9.926851e-06 [0.9963406] [0.00831865]
920 9.016116e-06 [0.99651253] [0.00792773]
940 8.188387e-06 [0.99667645] [0.00755517]
960 7.436943e-06 [0.99683267] [0.00720011]
980 6.7545097e-06 [0.9969815] [0.00686174]
1000 6.134145e-06 [0.99712336] [0.00653927]
1020 5.571436e-06 [0.99725854] [0.00623195]
1040 5.060231e-06 [0.9973874] [0.00593907]
1060 4.595509e-06 [0.9975102] [0.00565995]
1080 4.17375e-06 [0.9976272] [0.00539395]
1100 3.7907303e-06 [0.9977387] [0.00514045]
1120 3.442783e-06 [0.997845] [0.00489886]
1140 3.1268198e-06 [0.9979462] [0.00466865]
1160 2.8398774e-06 [0.99804276] [0.00444925]
1180 2.5791542e-06 [0.99813473] [0.00424015]
1200 2.3423493e-06 [0.9982224] [0.00404088]
1220 2.127582e-06 [0.998306] [0.00385098]
1240 1.932089e-06 [0.99838555] [0.00367]
1260 1.7548537e-06 [0.99846137] [0.00349755]
1280 1.593911e-06 [0.99853367] [0.00333321]
1300 1.4475062e-06 [0.9986026] [0.00317659]
1320 1.314707e-06 [0.99866825] [0.00302731]
1340 1.1940488e-06 [0.99873084] [0.00288505]
1360 1.0845918e-06 [0.99879044] [0.00274948]
1380 9.850884e-07 [0.99884737] [0.00262029]
1400 8.9452607e-07 [0.9989015] [0.00249717]
1420 8.1248487e-07 [0.99895304] [0.00237982]
1440 7.3786003e-07 [0.9990023] [0.00226801]
1460 6.701368e-07 [0.9990492] [0.00216144]
1480 6.086809e-07 [0.9990939] [0.00205987]
1500 5.528352e-07 [0.9991364] [0.00196308]
1520 5.0212356e-07 [0.999177] [0.00187084]
1540 4.560474e-07 [0.99921566] [0.00178294]
1560 4.1417343e-07 [0.99925244] [0.00169918]
1580 3.7618702e-07 [0.9992876] [0.00161937]
1600 3.4163455e-07 [0.9993211] [0.00154327]
1620 3.1034298e-07 [0.99935293] [0.00147077]
1640 2.818894e-07 [0.99938333] [0.00140169]
1660 2.560083e-07 [0.9994123] [0.00133585]
1680 2.3252926e-07 [0.99943995] [0.00127311]
1700 2.1121163e-07 [0.9994663] [0.00121331]
1720 1.9178516e-07 [0.9994914] [0.00115631]
1740 1.7425428e-07 [0.99951524] [0.00110202]
1760 1.5823713e-07 [0.99953794] [0.00105029]
1780 1.4376748e-07 [0.9995596] [0.00100099]
1800 1.3057314e-07 [0.9995802] [0.000954]
1820 1.1861422e-07 [0.99959993] [0.00090921]
1840 1.0773581e-07 [0.99961877] [0.00086652]
1860 9.7852535e-08 [0.99963665] [0.00082589]
1880 8.8909566e-08 [0.9996536] [0.00078714]
1900 8.074753e-08 [0.9996699] [0.00075019]
1920 7.332803e-08 [0.9996854] [0.00071503]
1940 6.665119e-08 [0.99970007] [0.00068147]
1960 6.0536365e-08 [0.9997143] [0.00064951]
1980 5.4979093e-08 [0.99972755] [0.00061905]
2000 4.9936585e-08 [0.9997405] [0.00058999]

진행될수록 cost 값 작아지고, w값 : 1로 , b = 0으로 수렴하는 걸 볼 수 있다.

 

 

 

 

값을 던져주고 싶을 때는 placehloders를 사용하는데

먼저 정의해주고

>>> X = tf.placeholder(tf.float32)
>>> Y = tf.placeholder(tf.float32)

 

 

>>> for step in range(2001):
...     cost_val, W_val, b_val, _ = \
...             sess.run([cost, W, b, train],
...                     feed_dict = {X: [1,2,3], Y:[1,2,3]})
...     if step % 20 == 0:
...             print(step, cost_val, W_val, b_val)

fedd_dict를 통해서 넘겨 줄 수 있다. 

sess을 실행 시킬 때에도, 리스트로 묶어서 한꺼번에 다 실행 시키기 가능 ( 그 때, 필요한 데이터 넘겨주기 가능)

 

>>> X = tf.placeholder(tf.float32, shape = [None])
>>> Y = tf.placeholder(tf.float32, shape = [None])

shape도 줄 수 있는데, 갯수는 여러개 -> 넘겨줄때, 

    fee_dict = {x: [1,2,3,4,5], Y: [2.1, 3.1, 4.1, 5.1 ....]})

이렇게 넘겨 줄 수 있다는 말

 

 

 

 

 

#5일때 값 간단하게 구할 수 있다. 
print(sess.run(hypothesis, feed_dict = {X: [5]}))

 

https://www.youtube.com/watch?v=mQGwjrStQgg&feature=youtu.be

 

+ Recent posts