Skip to content

Commit

Permalink
Merge branch 'main' into ifg/fix_ckpt_match
Browse files Browse the repository at this point in the history
  • Loading branch information
inafergra authored Jul 3, 2024
2 parents 8723693 + bfd4c4c commit 336a892
Showing 1 changed file with 162 additions and 2 deletions.
164 changes: 162 additions & 2 deletions qadence/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,36 @@ class QuantumModel(nn.Module):
This class should be used as base class for any new quantum model supported in the qadence
framework for information on the implementation of custom models see
[here](../tutorials/advanced_tutorials/custom-models.md).
Example:
```python exec="on" source="material-block" result="json"
import torch
from qadence import QuantumModel, QuantumCircuit, RX, RY, Z, PI, chain, kron
from qadence import FeatureParameter, VariationalParameter
theta = VariationalParameter("theta")
phi = FeatureParameter("phi")
block = chain(
kron(RX(0, theta), RY(1, theta)),
kron(RX(0, phi), RY(1, phi)),
)
circuit = QuantumCircuit(2, block)
observable = Z(0) + Z(1)
model = QuantumModel(circuit, observable)
values = {"phi": torch.tensor([PI, PI/2]), "theta": torch.tensor([PI, PI/2])}
wf = model.run(values)
xs = model.sample(values, n_shots=100)
ex = model.expectation(values)
print(wf)
print(xs)
print(ex)
```
```
"""

backend: Backend | DifferentiableBackend
Expand Down Expand Up @@ -120,6 +150,7 @@ def _is_feature_param(p: Parameter) -> bool:

@property
def vparams(self) -> OrderedDict:
"""Variational parameters."""
return OrderedDict({k: v.data for k, v in self._params.items() if v.requires_grad})

@property
Expand All @@ -145,9 +176,26 @@ def num_vparams(self) -> int:
return len(self.vals_vparams)

def circuit(self, circuit: QuantumCircuit) -> ConvertedCircuit:
"""Get backend-converted circuit.
Args:
circuit: QuantumCircuit instance.
Returns:
Backend circuit.
"""
return self.backend.circuit(circuit)

def observable(self, observable: AbstractBlock, n_qubits: int) -> Any:
"""Get backend observable.
Args:
observable: Observable block.
n_qubits: Number of qubits
Returns:
Backend observable.
"""
return self.backend.observable(observable, n_qubits)

def reset_vparams(self, values: Sequence) -> None:
Expand All @@ -161,6 +209,11 @@ def reset_vparams(self, values: Sequence) -> None:
current_vparams[k].data = torch.tensor([values[i]])

def forward(self, *args: Any, **kwargs: Any) -> Tensor:
"""Calls run method with arguments.
Returns:
Tensor: A torch.Tensor representing output.
"""
return self.run(*args, **kwargs)

def run(
Expand All @@ -169,6 +222,21 @@ def run(
state: Tensor | None = None,
endianness: Endianness = Endianness.BIG,
) -> Tensor:
r"""Run model.
Given an input state $| \psi_0 \rangle$,
a set of variational parameters $\vec{\theta}$
and the unitary representation of the model $U(\vec{\theta})$
we return $U(\vec{\theta}) | \psi_0 \rangle$.
Arguments:
values: Values dict which contains values for the parameters.
state: Optional input state to apply model on.
endianness: Storage convention for binary information.
Returns:
A torch.Tensor representing output.
"""
if values is None:
values = {}

Expand All @@ -185,6 +253,19 @@ def sample(
mitigation: Mitigations | None = None,
endianness: Endianness = Endianness.BIG,
) -> list[Counter]:
"""Obtain samples from model.
Arguments:
values: Values dict which contains values for the parameters.
n_shots: Observable part of the expectation.
state: Optional input state to apply model on.
noise: A noise model to use.
mitigation: A mitigation protocol to use.
endianness: Storage convention for binary information.
Returns:
A list of Counter instances with the sample results.
"""
params = self.embedding_fn(self._params, values)
if noise is None:
noise = self._noise
Expand All @@ -210,7 +291,27 @@ def expectation(
mitigation: Mitigations | None = None,
endianness: Endianness = Endianness.BIG,
) -> Tensor:
"""Compute expectation using the given backend.
r"""Compute expectation using the given backend.
Given an input state $|\psi_0 \rangle$,
a set of variational parameters $\vec{\theta}$
and the unitary representation of the model $U(\vec{\theta})$
we return $\langle \psi_0 | U(\vec{\theta}) | \psi_0 \rangle$.
Arguments:
values: Values dict which contains values for the parameters.
observable: Observable part of the expectation.
state: Optional input state.
measurement: Optional measurement protocol. If None, use
exact expectation value with a statevector simulator.
noise: A noise model to use.
mitigation: A mitigation protocol to use.
endianness: Storage convention for binary information.
Raises:
ValueError: when no observable is set.
Returns:
A torch.Tensor of shape n_batches x n_obs
Expand Down Expand Up @@ -245,9 +346,22 @@ def expectation(
)

def overlap(self) -> Tensor:
"""Overlap of model.
Raises:
NotImplementedError: The overlap method is not implemented for this model.
"""
raise NotImplementedError("The overlap method is not implemented for this model.")

def _to_dict(self, save_params: bool = False) -> dict[str, Any]:
"""Convert QuantumModel to a dictionary for serialization.
Arguments:
save_params: Optionally save parameters. Defaults to False.
Returns:
The dictionary
"""
d = dict()
try:
if isinstance(self._observable, list):
Expand Down Expand Up @@ -277,6 +391,15 @@ def _to_dict(self, save_params: bool = False) -> dict[str, Any]:

@classmethod
def _from_dict(cls, d: dict, as_torch: bool = False) -> QuantumModel:
"""Initialize instance of QuantumModel from dictionary.
Args:
d: Dictionary.
as_torch: Load parameters as torch tensors. Defaults to False.
Returns:
QuantumModel instance
"""
from qadence.serialization import deserialize

qm: QuantumModel
Expand Down Expand Up @@ -312,6 +435,16 @@ def _from_dict(cls, d: dict, as_torch: bool = False) -> QuantumModel:
def save(
self, folder: str | Path, file_name: str = "quantum_model.pt", save_params: bool = True
) -> None:
"""Save model.
Arguments:
folder: Folder where model is saved.
file_name: File name for saving model. Defaults to "quantum_model.pt".
save_params: Save parameters if True. Defaults to True.
Raises:
FileNotFoundError: If folder is not a directory.
"""
if not os.path.isdir(folder):
raise FileNotFoundError
try:
Expand All @@ -323,6 +456,16 @@ def save(
def load(
cls, file_path: str | Path, as_torch: bool = False, map_location: str | torch.device = "cpu"
) -> QuantumModel:
"""Load QuantumModel.
Arguments:
file_path: File path to load model from.
as_torch: Load parameters as torch tensor. Defaults to False.
map_location (str | torch.device, optional): Location for loading. Defaults to "cpu".
Returns:
QuantumModel from file_path.
"""
qm_pt = {}
if isinstance(file_path, str):
file_path = Path(file_path)
Expand All @@ -338,11 +481,23 @@ def load(
return cls._from_dict(qm_pt, as_torch)

def assign_parameters(self, values: dict[str, Tensor]) -> Any:
"""Return the final, assigned circuit that is used in e.g. `backend.run`."""
"""Return the final, assigned circuit that is used in e.g. `backend.run`.
Arguments:
values: Values dict which contains values for the parameters.
Returns:
Final, assigned circuit that is used in e.g. `backend.run`
"""
params = self.embedding_fn(self._params, values)
return self.backend.assign_parameters(self._circuit, params)

def to(self, *args: Any, **kwargs: Any) -> QuantumModel:
"""Conversion method for device or types.
Returns:
QuantumModel with conversions.
"""
from pyqtorch import QuantumCircuit as PyQCircuit

try:
Expand Down Expand Up @@ -371,6 +526,11 @@ def to(self, *args: Any, **kwargs: Any) -> QuantumModel:

@property
def device(self) -> torch.device:
"""Get device.
Returns:
torch.device
"""
return (
self._circuit.native.device
if self.backend.backend.name == "pyqtorch" # type: ignore[union-attr]
Expand Down

0 comments on commit 336a892

Please sign in to comment.