Hey team.
I’m not sure what the current state of KerasLayer development for Keras 3 is at, but I have a temp workaround that sorta works with the Keras 3 generic backend prototype.
class DataReuploader(Layer):
def __init__(self,qnode,qubits,name,sum=False,weight_specs=None,**kwargs):
weight_shapes = {"weights": (qubits,2)}
self.qubits = qubits
if k.backend.backend() =="tensorflow":
self.circ = qml.qnn.KerasLayer(circuit,weight_shapes,output_dim=qubits)
elif k.backend.backend() =="torch":
self.circ = qml.qnn.TorchLayer(circuit,weight_shapes)
super().__init__(name=name,*kwargs)
self.sum = sum
self.name = name
self.weight_specs = weight_specs if weight_specs is not None else {}
@property
def trainable_weights(self):
return self.circ.trainable_weights
@property
def trainable_variables(self):
return self.circ.trainable_variables
def get_config(self):
config = super().get_config()
config.update(self.circ.get_config())
config.update({"name":self.name,"sum_units":self.sum})
return config
def compute_output_shape(self,input_shape):
return self.circ.compute_output_shape(input_shape)
def call(self, x):
x = self.circ(x)
return x
Essentially a hacky way of leveraging the exist TFKeras and Torch wrappers for qnodes to extend the functionallity to Keras 3.
Currently testing just replacing all the tf.** calls on the source code with keras.op.* to see if that also work with keras 3