Skip to content

Instantly share code, notes, and snippets.

@yhtgrace
Last active February 21, 2017 18:04
Show Gist options
  • Save yhtgrace/bcf174bff6ced2e630e4cacddb6ac8ce to your computer and use it in GitHub Desktop.
Save yhtgrace/bcf174bff6ced2e630e4cacddb6ac8ce to your computer and use it in GitHub Desktop.
Tensorflow implementation of logistic regression with L1 regularizer
# placeholders
x_data = tf.placeholder(shape = [None, x.shape[1]], dtype = tf.float32)
y_target = tf.placeholder(shape = [None, 1], dtype = tf.float32)
# prediction
W = tf.Variable(tf.random_normal(shape = [x.shape[1], 1]))
b = tf.Variable(tf.random_normal(shape = [1]))
y_logits = tf.matmul(x_data, W) + b
y_hat = tf.round(tf.sigmoid(y_logits))
# loss function
loss = tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(
logits = y_logits,
labels = y_target))
regularizer = tf.reduce_sum(tf.abs(W))
loss = loss + 1. * regularizer
# training
train_step = tf.train.AdamOptimizer(0.1).minimize(loss)
sess = tf.InteractiveSession()
init = tf.global_variables_initializer()
sess.run(init)
# loop
n_epochs = 10
batch_size = 1024
for _ in range(n_epochs):
for i in range(0, X.shape[1], batch_size):
xs = x_[i:i+batch_size,:]
ys = y_[i:i+batch_size]
sess.run(train_step, feed_dict = {x_data: xs, y_target: ys})
y_pred = sess.run(y_hat, feed_dict = {x_data: x_, y_target: y_})
print(sk.metrics.classification_report(y.values.astype(int), y_pred))
sess.close()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment