Hey @kamzam! Welcome to the forum
Great question! TorchLayer
behaves exactly like native layers in PyTorch. So, you can use model.parameters() . Here’s an example:
import pennylane as qml
import torch
n_qubits = 2
dev = qml.device("default.qubit", wires=n_qubits)
@qml.qnode(dev)
def qnode(inputs, weights):
qml.AngleEmbedding(inputs, wires=range(n_qubits))
qml.BasicEntanglerLayers(weights, wires=range(n_qubits))
return [qml.expval(qml.PauliZ(wires=i)) for i in range(n_qubits)]
n_layers = 2
weight_shapes = {"weights": (n_layers, n_qubits)}
qlayer = qml.qnn.TorchLayer(qnode, weight_shapes)
clayer_1 = torch.nn.Linear(2, 2)
clayer_2 = torch.nn.Linear(2, 2)
softmax = torch.nn.Softmax(dim=1)
layers = [clayer_1, qlayer, clayer_2, softmax]
model = torch.nn.Sequential(*layers)
for param, layer in zip(model.parameters(), model.named_children()):
print(param, layer)
Parameter containing:
tensor([[ 0.0943, -0.2240],
[ 0.4012, -0.4323]], requires_grad=True) ('0', Linear(in_features=2, out_features=2, bias=True))
Parameter containing:
tensor([0.5016, 0.2354], requires_grad=True) ('1', <Quantum Torch Layer: func=qnode>)
Parameter containing:
tensor([[6.2790, 2.8270],
[2.4543, 5.2021]], requires_grad=True) ('2', Linear(in_features=2, out_features=2, bias=True))
Parameter containing:
tensor([[ 0.4883, 0.1023],
[-0.3336, -0.3181]], requires_grad=True) ('3', Softmax(dim=1))
Hope this helps!