Regression example using tf eager


#1

Hi @josh and @nathan,

Could you provide an example of regression using tf eager? Like the examples here: https://pennylane.readthedocs.io/en/latest/code/interfaces/tfe.html

Thanks!

Wei

#2

Hi @cubicgate. You can have a look at some of the TensorFlow examples here: https://github.com/tensorflow/tensorflow/tree/master/tensorflow/contrib/eager/python/examples/linear_regression


#3

Thanks @josh for this example. I have a simple program, but it did not work. Could you take a look at it? Thanks!


#5
import tensorflow as tf
import tensorflow.contrib.eager as tfe
from pennylane import numpy as np
tf.enable_eager_execution()


import pennylane as qml

dev = qml.device('default.qubit', wires=2)

@qml.qnode(dev, interface='tfe')
def circuit(phi, theta, x):
	# print('x ', x)
	qml.RX(x, wires=0)
	qml.RX(phi[0], wires=0)
	qml.RY(phi[1], wires=1)
	qml.CNOT(wires=[0, 1])
	qml.PhaseShift(theta, wires=0)
	return qml.expval(qml.PauliZ(0))

phi = tfe.Variable([0.5, 0.1], dtype=tf.float64)
theta = tfe.Variable(0.2, dtype=tf.float64)
X=[0.1, 0.2, 0.3]
Y=[0.2, 0.3, 0.4]
X = np.array(X)
Y = np.array(Y)

def loss(phi, theta, X, Y):
	res = 0
	for i in range(len(X)):
	  res = res + tf.square(circuit(phi, theta, X[i]) - Y[i])
	return tf.reduce_mean(res)
	

opt = tf.train.AdamOptimizer()
steps = 200

grads = tfe.implicit_gradients(loss)
for i in range(steps):
	opt.apply_gradients(grads(phi, theta, X, Y))
	
print(phi)
print(theta)