Skip to content

Commit

Permalink
last corrections by andrea
Browse files Browse the repository at this point in the history
  • Loading branch information
Simone-Bordoni committed Oct 9, 2024
1 parent 0be1e8c commit c01cd33
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 8 deletions.
5 changes: 1 addition & 4 deletions src/qibo/models/circuit.py
Original file line number Diff line number Diff line change
Expand Up @@ -1177,10 +1177,7 @@ def to_qasm(self):

qubits = ",".join(f"q[{i}]" for i in gate.qubits)
if isinstance(gate, gates.ParametrizedGate):
if any(x.__class__.__name__ == "Tensor" for x in gate.parameters):
params = (str(x.detach().item()) for x in gate.parameters)
else:
params = (str(x) for x in gate.parameters)
params = (str(float(x)) for x in gate.parameters)
name = f"{gate.qasm_label}({', '.join(params)})"
else:
name = gate.qasm_label
Expand Down
5 changes: 1 addition & 4 deletions src/qibo/models/error_mitigation.py
Original file line number Diff line number Diff line change
Expand Up @@ -330,6 +330,7 @@ def _curve_fit(
if backend.name == "pytorch":
# pytorch has some problems with the `scipy.optim.curve_fit` function
# thus we use a `torch.optim` optimizer
params.requires_grad = True
loss = lambda pred, target: backend.np.mean((pred - target) ** 2)
optimizer = backend.np.optim.LBFGS(
[params], lr=lr, max_iter=max_iter, tolerance_grad=tolerance_grad
Expand Down Expand Up @@ -431,8 +432,6 @@ def CDR(
len(signature(model).parameters) - 1
) # first arg is the input and the *params afterwards
params = backend.cast(local_state.random(nparams), backend.precision)
if backend.name == "pytorch":
params.requires_grad = True
optimal_params = _curve_fit(
backend,
model,
Expand Down Expand Up @@ -554,8 +553,6 @@ def vnCDR(
-1, len(noise_levels)
)
params = backend.cast(local_state.random(len(noise_levels)), backend.precision)
if backend.name == "pytorch":
params.requires_grad = True
optimal_params = _curve_fit(
backend,
model,
Expand Down

0 comments on commit c01cd33

Please sign in to comment.