Basic neural network in tensorflow

I am trying to implement a basic neural network in tensorflow, the inputs are just random data from ones / zeros to (x, y, z), however I want my network to output 1 when x = 1 and output 0 otherwise.

This is my network code

import tensorflow as tf
import numpy as np

x_data = np.array([[0,0,1],
         [0,1,1],
         [1,0,0],
         [0,1,0],
         [1,1,1],
         [0,1,1],
         [1,1,1]])

x_test = np.array([[1,1,1], [0,1,0], [0,0,0]])
y_data = np.array([0,0,1,0,1,0,1])


iters = 1000
learning_rate = 0.1
weights = {
'w1': tf.Variable(tf.random_normal([3, 5])),
'w2': tf.Variable(tf.random_normal([5, 1])),
}
bias = {
'b1': tf.Variable(tf.random_normal([5])),
'b2': tf.Variable(tf.random_normal([1])),
}

def predict(x, weights, bias):
    l1 = tf.add(tf.matmul(x, weights['w1']), bias['b1'])
    l1 = tf.nn.sigmoid(l1)
    out = tf.add(tf.matmul(l1, weights['w2']), bias['b2'])
    return out


x = tf.placeholder(tf.float32, shape=(None,3))
y = tf.placeholder(tf.float32, shape=(None))

pred = predict(x, weights, bias)

cost = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=pred, labels=y))
optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(cost)

init = tf.global_variables_initializer()

# graph
with tf.Session() as sess:
sess.run(init)

for i in range(0, iters):
    _, c = sess.run([optimizer, cost], feed_dict={x: x_data, y: y_data})
    if i % 100 == 0:
        print("cost: " + str(c))

print(sess.run(weights['w1']))
print(sess.run(pred, feed_dict={x: x_test}))

      

What are the outputs

[-0.37119362]
[-0.23264697]
[-0.14701667]

      

However my test data should output [1,0,0], I'm really not sure what is wrong here. I've tried playing with hyperparameters and looking at stackoverflow. I also tried using softmax_cross_entropy as a cost function, although it gives me an error saying that the logits do not match the labels.

Does anyone know why this doesn't output what I expect?

+3


source to share


1 answer


You first need to go through the activation function (i.e. tf.nn.sigmoid

) before exiting.

Make sure to tf.nn.sigmoid_cross_entropy_with_logits

accept the logic (before activating the sigmoid).

Also you had a problem with the form with your input y_data

, which was (7)

instead of(7, 1)

Here's a working version of your code:



import tensorflow as tf
import numpy as np

x_data = np.array([[0,0,1],
         [0,1,1],
         [1,0,0],
         [0,1,0],
         [1,1,1],
         [0,1,1],
         [1,1,1]])

x_test = np.array([[1,1,1], [0,1,0], [0,0,0]])
y_data = np.array([[0],[0],[1],[0],[1],[0],[1]])


iters = 1000
learning_rate = 0.1
weights = {
'w1': tf.Variable(tf.random_normal([3, 5])),
'w2': tf.Variable(tf.random_normal([5, 1])),
}
bias = {
'b1': tf.Variable(tf.random_normal([5])),
'b2': tf.Variable(tf.random_normal([1])),
}

def predict(x, weights, bias):
    l1 = tf.add(tf.matmul(x, weights['w1']), bias['b1'])
    l1 = tf.nn.sigmoid(l1)    
    out = tf.add(tf.matmul(l1, weights['w2']), bias['b2'])
    return out


x = tf.placeholder(tf.float32, shape=(None,3))
y = tf.placeholder(tf.float32, shape=(None,1))

pred = predict(x, weights, bias)
pred_postactivation = tf.nn.sigmoid(pred)

cost = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=pred, labels=y))
optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(cost)

init = tf.global_variables_initializer()

# graph
with tf.Session() as sess:
    sess.run(init)

    for i in range(0, iters):
        _, c = sess.run([optimizer, cost], feed_dict={x: x_data, y: y_data})
        if i % 100 == 0:
            print("cost: " + str(c))

    print(sess.run(weights['w1']))
    print(sess.run(pred_postactivation, feed_dict={x: x_test}))

      

What are the outputs:

cost: 1.23954
cost: 0.583582
cost: 0.455403
cost: 0.327644
cost: 0.230051
cost: 0.165296
cost: 0.123712
cost: 0.0962315
cost: 0.0772587
cost: 0.0636141
[[ 0.94488049  0.78105074  0.81608331  1.75763154 -4.47565413]
 [-2.61545444  0.26020721  0.151407    1.33066297  1.00578034]
 [-1.2027328   0.05413296 -0.13530347 -0.39841765  0.16014417]]
[[ 0.92521071]
 [ 0.05481482]
 [ 0.07227208]]

      

+1


source







All Articles