-- Oct 25 In-Class Exercise Thread
import tensorflow as tf
import numpy as np
def perceptron(weights, inputs, biases, activation):
nodes = tf.matmul(inputs, weights) + biases
return activation(nodes)
def step(nodes):
return tf.ceil(tf.clip_by_value(nodes, 0, 1))
x = tf.placeholder(tf.float32, shape=(3))
uniform_init = tf.random_uniform_initializer(0, 1)
- G1: x+y+z >= 1/2; G2: x+y+z <= 3/2; G3: x+y+z >= 2.5
- G1: -x-y-z+0.5 <=0 ; G2: x+y+z-1.5 <=0; -x-y-z+2.5 <=0
- W1 = tf.get_variable("W1", shape=[3,3], initializer = [[-1,-1,-1], [1,1,1], [-1,-1,-1]])
weights_1 = [[-1,1,-1], [-1,1,-1], [-1,1,-1]]
w_1_init = tf.constant_initializer(weights_1)
bias_1 = [0.5,-1.5,2.5]
b_1_init = tf.constant_initializer(bias_1)
W1 = tf.get_variable("W1", shape=[3,3], initializer = w_1_init)
b1 = tf.get_variable("b1", shape=[3], initializer = b_1_init)
my_layer1 = perceptron(W1, x, b1, step)
- G4: out_G1 + out_G2 + 2*out_G3 >= 2
- G4: -out_G1 - out_G2 - 2*out_G3 +2 <= 0
weights_2 = [[-1],[-1],[-2]]
w_2_init = tf.constant_initializer(weights_2)
bias_2 = [2]
b_2_init = tf.constant_initializer(bias_2)
W2 = tf.get_variable("W2", shape=[3,1], initializer = w_2_init)
b2 = tf.get_variable("b2", shape=[1,1], initializer = b_2_init)
my_layer2 = perceptron(W2, my_layer1, b2, step)
session = tf.Session()
init = tf.global_variables_initializer()
session.run(init)
out = session.run(my_layer1, {x:
1 ,[1],[1]})
print (out)
import tensorflow as tf
import numpy as np
def perceptron(weights, inputs, biases, activation):
nodes = tf.matmul(inputs, weights) + biases
return activation(nodes)
def step(nodes):
return tf.ceil(tf.clip_by_value(nodes, 0, 1))
x = tf.placeholder(tf.float32, shape=(3))
uniform_init = tf.random_uniform_initializer(0, 1)
# G1: x+y+z >= 1/2; G2: x+y+z <= 3/2; G3: x+y+z >= 2.5
# G1: -x-y-z+0.5 <=0 ; G2: x+y+z-1.5 <=0; -x-y-z+2.5 <=0
#W1 = tf.get_variable("W1", shape=[3,3], initializer = [[-1,-1,-1], [1,1,1], [-1,-1,-1]])
weights_1 = [[-1,1,-1], [-1,1,-1], [-1,1,-1]]
w_1_init = tf.constant_initializer(weights_1)
bias_1 = [0.5,-1.5,2.5]
b_1_init = tf.constant_initializer(bias_1)
W1 = tf.get_variable("W1", shape=[3,3], initializer = w_1_init)
b1 = tf.get_variable("b1", shape=[3], initializer = b_1_init)
my_layer1 = perceptron(W1, x, b1, step)
# G4: out_G1 + out_G2 + 2*out_G3 >= 2
# G4: -out_G1 - out_G2 - 2*out_G3 +2 <= 0
weights_2 = [[-1],[-1],[-2]]
w_2_init = tf.constant_initializer(weights_2)
bias_2 = [2]
b_2_init = tf.constant_initializer(bias_2)
W2 = tf.get_variable("W2", shape=[3,1], initializer = w_2_init)
b2 = tf.get_variable("b2", shape=[1,1], initializer = b_2_init)
my_layer2 = perceptron(W2, my_layer1, b2, step)
session = tf.Session()
init = tf.global_variables_initializer()
session.run(init)
out = session.run(my_layer1, {x:[[1]],[1],[1]})
print (out)