Shared구봄과 함께하는 softmax classfiy.sagewsOpen in CoCalc
Author: 희동 윤
Views : 5
import tensorflow as tf tf.set_random_seed(777) # for reproducibility ##xdata 점수 Y_-data : 등급 x_data = [[ 9.6 , 10 , 4 , 4.2 , 9 , 7.36 ], [ 2.4 , 2 , 0 , 2.4 , 0 , 1.36 ], [ 6 , 2 , 7 , 5.4 , 6 , 5.28 ], [ 7.8 , 10 , 8.5 , 1.8 , 2 , 6.02 ], [ 4 , 10 , 8.5 , 4.8 , 9.5 , 7.36 ], [ 9.6 , 10 , 7 , 6 , 10.5 , 8.62 ], [ 6 , 8 , 4.5 , 1.8 , 6.5 , 5.36 ] ] y_data = [[0,1,0,0,0], [0,0,0,1,0], [0,0,1,0,0], [0,0,1,0,0], [0,1,0,0,0], [1,0,0,0,0], [0,0,1,0,0], ] X = tf.placeholder("float", [None, 6]) Y = tf.placeholder("float", [None, 5]) nb_classes = 5 W = tf.Variable(tf.random_normal([6, nb_classes]), name='weight') b = tf.Variable(tf.random_normal([nb_classes]), name='bias') # tf.nn.softmax computes softmax activations # softmax = exp(logits) / reduce_sum(exp(logits), dim) hypothesis = tf.nn.softmax(tf.matmul(X, W) + b) # Cross entropy cost/loss cost = tf.reduce_mean(-tf.reduce_sum(Y * tf.log(hypothesis), axis=1)) optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01).minimize(cost) # Launch graph with tf.Session() as sess: sess.run(tf.global_variables_initializer()) for step in range(2401): sess.run(optimizer, feed_dict={X: x_data, Y: y_data}) if step % 200 == 0: print(step, sess.run(cost, feed_dict={X: x_data, Y: y_data})) print('--------------') L=[] for i in range(0,len(x_data )): L+=[sess.run(hypothesis, feed_dict={X: [x_data[i]]})] print(L[i], sess.run(tf.arg_max(L[i], 1))) a=sess.run(hypothesis, feed_dict={X: [[ 6.8 , 10 , 3.5 , 4.8 , 10 , 7.02 ]]}) print(a, sess.run(tf.arg_max(a, 1)))
(0, nan) (200, nan) (400, nan) (600, nan) (800, nan) (1000, nan) (1200, nan) (1400, nan) (1600, nan) (1800, nan) (2000, nan) (2200, nan) (2400, nan)
WARNING:tensorflow:From :11: arg_max (from tensorflow.python.ops.gen_math_ops) is deprecated and will be removed in a future version. Instructions for updating: Use `argmax` instead
-------------- (array([[nan, nan, nan, nan, nan]], dtype=float32), array([0])) (array([[nan, nan, nan, nan, nan]], dtype=float32), array([0])) (array([[nan, nan, nan, nan, nan]], dtype=float32), array([0])) (array([[nan, nan, nan, nan, nan]], dtype=float32), array([0])) (array([[nan, nan, nan, nan, nan]], dtype=float32), array([0])) (array([[nan, nan, nan, nan, nan]], dtype=float32), array([0])) (array([[nan, nan, nan, nan, nan]], dtype=float32), array([0])) (array([[nan, nan, nan, nan, nan]], dtype=float32), array([0]))