The parameters in qnn.KerasLayer cannot be trained

I have a hybrid neural network with the following code:

n_qubits = 8
layers = 10
data_dimension = 8
dev = qml.device("default.qubit", wires=n_qubits)
    

@qml.qnode(dev)
def qnode(con_pars, weights_0, weights_1, weights_2, weights_3, weights_4, inputs=None):
    qml.templates.AmplitudeEmbedding(features=inputs, wires=range(n_qubits),normalize=True)
        
    qml.broadcast(unitary=qml.RY, pattern="single", wires=range(n_qubits),parameters = con_pars[0])
    qml.templates.StronglyEntanglingLayers(weights_0, wires=range(n_qubits))
    print(con_pars[0 ])
    
    qml.broadcast(unitary=qml.RY, pattern="single", wires=range(n_qubits),parameters = con_pars[1])
    qml.templates.StronglyEntanglingLayers(weights_1, wires=range(n_qubits))
    
    qml.broadcast(unitary=qml.RY, pattern="single", wires=range(n_qubits),parameters = con_pars[2])
    qml.templates.StronglyEntanglingLayers(weights_2, wires=range(n_qubits))
    
    qml.broadcast(unitary=qml.RY, pattern="single", wires=range(n_qubits),parameters = con_pars[3])
    qml.templates.StronglyEntanglingLayers(weights_3, wires=range(n_qubits))
    
    qml.broadcast(unitary=qml.RY, pattern="single", wires=range(n_qubits),parameters = con_pars[4])
    qml.templates.StronglyEntanglingLayers(weights_4, wires=range(n_qubits))


    return [qml.expval(qml.PauliZ(i)) for i in range(n_qubits)]
con_pars = (np.ones((5,8),requires_grad = True)) * 0.2
    
weight_shapes = {"con_pars":con_pars, "weights_0": (1,n_qubits,3),"weights_1": (1,n_qubits,3),"weights_2": (1,n_qubits,3),"weights_3": (1,n_qubits,3)
                ,"weights_4": (1,n_qubits,3)}
class RSEModel(tf.keras.Model):
    def __init__(self, init_sigma=0.2, other_sigma=0.15):
        super(RSEModel, self).__init__()

        self.flatten = tf.keras.layers.Flatten(input_shape=[16, 16])
        self.quantum = qml.qnn.KerasLayer(qnode, weight_shapes, output_dim=n_qubits)
        self.fc = tf.keras.layers.Dense(2, activation='softmax')
        
        self.model_layers = [
            self.flatten,
            self.quantum,
            self.fc
        ]
        self.model = tf.keras.Sequential(self.model_layers)

    def call(self, inputs):
        return self.model(inputs)

rsemodel = RSEModel()
opt = tf.keras.optimizers.Adam(learning_rate=0.003)
rsemodel.compile(loss='categorical_crossentropy', optimizer=opt, metrics=["accuracy"])
history = rsemodel.fit(X_train, y_train, epochs=40, batch_size=8)

My purpose is to use the parameters in con_pars for the parameters in qml.broadcast for optimization, but the system tells me that I cannot do this.

TypeError: Exception encountered when calling layer “sequential_6” (type Sequential).

only integer scalar arrays can be converted to a scalar index

Call arguments received:
• inputs=tf.Tensor(shape=(8, 16, 16), dtype=float32)
• training=True
• mask=None

The code doesn’t report an error when I don’t use con_pars.

I noticed that weight_shapes can only input shape, but I want to input fixed parameters at the time of initialization, what should I do?

I have solved my problem, I noticed that using weight_specs can solve my problem.

Hi @Cc1, I’m glad you solved your problem! If you can add more details of how you solved your problem it can help others in the future too :slight_smile: