天天看点

Tensorflow-Logistic regression

1、sigmoid-不使用relu

#!/usr/bin/python3
# -*- coding:utf-8 -*-
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt

"""
Logistic regression
"""

# 数据
x1 = np.random.normal(-, , )[:,np.newaxis]  # 1000x1
x2 = np.random.normal(, , )[:,np.newaxis]
train_x = np.vstack((x1, x2)) # 2000x1
train_y = np.asarray([] * len(x1) + [] * len(x2))[:,np.newaxis] # 2000x1

plt.scatter(train_x, train_y)
# plt.show()
x=tf.placeholder(tf.float32,[None,],'x')
y_=tf.placeholder(tf.float32,[None,],'y_')

with tf.variable_scope('wb'):
    w=tf.get_variable('w',(,),dtype=tf.float32,initializer=tf.random_uniform_initializer)
    b= tf.Variable(tf.zeros([, ]) + )
with tf.variable_scope('wb2') as scope:
    # scope.reuse_variables()
    w2=tf.get_variable('w2',(,),dtype=tf.float32,initializer=tf.random_uniform_initializer)
    b2= tf.Variable(tf.zeros([, ]) + )


y=tf.nn.sigmoid(tf.add(tf.matmul(x,w),b))
# y=tf.nn.relu(tf.add(tf.matmul(x,w),b))
# y=tf.nn.sigmoid(tf.add(tf.matmul(y,w2),b2))


# loss function
# loss=tf.reduce_mean(tf.reduce_sum(tf.square(y-y_),reduction_indices=[1]))
loss=tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y_,logits=y))

train_op=tf.train.GradientDescentOptimizer().minimize(loss)

sess=tf.InteractiveSession(graph=tf.get_default_graph())

tf.global_variables_initializer().run()


for step in range():
    sess.run(train_op,feed_dict={x:train_x,y_:train_y})

all_xs = np.linspace(-, , )[:,np.newaxis]
prdiction_value = sess.run(y, feed_dict={x: all_xs})
lines = plt.plot(all_xs, prdiction_value, 'r-', lw=)
plt.show()
sess.close()
           

结果:

Tensorflow-Logistic regression

2、sigmoid-使用relu

#!/usr/bin/python3
# -*- coding:utf-8 -*-
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt

"""
Logistic regression
"""

# 数据
x1 = np.random.normal(-, , )[:,np.newaxis]  # 1000x1
x2 = np.random.normal(, , )[:,np.newaxis]
train_x = np.vstack((x1, x2)) # 2000x1
train_y = np.asarray([] * len(x1) + [] * len(x2))[:,np.newaxis] # 2000x1

plt.scatter(train_x, train_y)
# plt.show()
x=tf.placeholder(tf.float32,[None,],'x')
y_=tf.placeholder(tf.float32,[None,],'y_')

with tf.variable_scope('wb'):
    w=tf.get_variable('w',(,),dtype=tf.float32,initializer=tf.random_uniform_initializer)
    b= tf.Variable(tf.zeros([, ]) + )
with tf.variable_scope('wb2') as scope:
    # scope.reuse_variables()
    w2=tf.get_variable('w2',(,),dtype=tf.float32,initializer=tf.random_uniform_initializer)
    b2= tf.Variable(tf.zeros([, ]) + )


# y=tf.nn.sigmoid(tf.add(tf.matmul(x,w),b))
y=tf.nn.relu(tf.add(tf.matmul(x,w),b))
y=tf.nn.sigmoid(tf.add(tf.matmul(y,w2),b2))


# loss function
# loss=tf.reduce_mean(tf.reduce_sum(tf.square(y-y_),reduction_indices=[1]))
loss=tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y_,logits=y))

train_op=tf.train.GradientDescentOptimizer().minimize(loss)

sess=tf.InteractiveSession(graph=tf.get_default_graph())

tf.global_variables_initializer().run()


for step in range():
    sess.run(train_op,feed_dict={x:train_x,y_:train_y})

all_xs = np.linspace(-, , )[:,np.newaxis]
prdiction_value = sess.run(y, feed_dict={x: all_xs})
lines = plt.plot(all_xs, prdiction_value, 'r-', lw=)
plt.show()
sess.close()
           

结果:

Tensorflow-Logistic regression

3、softmax(多分类)

sigmoid只能针对二分类问题,对于多分类可以使用softmax

#!/usr/bin/python3
# -*- coding:utf-8 -*-
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt

"""
Logistic regression
"""

from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)

# mnist图像大小是28x28 分成0~9 共10类
x=tf.placeholder(tf.float32,[None,**])
y_=tf.placeholder(tf.float32,[None,])

with tf.variable_scope('wb'):
    w=tf.get_variable('w',[*,],initializer=tf.random_uniform_initializer)*
    b=tf.Variable(tf.zeros([])+,dtype=tf.float32)

y=tf.nn.softmax(tf.add(tf.matmul(x,w),b))

loss=tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y_,logits=y))

train_op=tf.train.AdamOptimizer().minimize(loss)

correct_prediction = tf.equal(tf.argmax(y, ), tf.argmax(y_, ))
# Calculate accuracy
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))

sess=tf.InteractiveSession(graph=tf.get_default_graph())

tf.global_variables_initializer().run()

for step in range():
    batch_xs, batch_ys = mnist.train.next_batch()
    train_op.run({x:batch_xs,y_:batch_ys})
    if step % ==:
        print("step",step,'acc',accuracy.eval({x:batch_xs,y_:batch_ys}),'loss',loss.eval({x:batch_xs,y_:batch_ys}))

# test acc
print('test acc',accuracy.eval({x:mnist.test.images,y_:mnist.test.labels}))

sess.close()
           

结果:

Tensorflow-Logistic regression

说明:

没有使用隐藏层,所以精度并不是很高,

接下来会使用DNN、CNN以及RNN来提升其精度!