From ab76c5fd1d616a147837fa8a54bca9f147027542 Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Wed, 12 Jul 2023 17:42:28 +0200 Subject: [PATCH 01/32] finite & spsr derivatives done --- src/qibo/derivative.py | 639 +++++++++++++++++++++++++++++++++++++ src/qibo/models/circuit.py | 157 ++++++++- tests/test_derivative.py | 149 ++++++++- 3 files changed, 933 insertions(+), 12 deletions(-) diff --git a/src/qibo/derivative.py b/src/qibo/derivative.py index b13c271867..3f22139443 100644 --- a/src/qibo/derivative.py +++ b/src/qibo/derivative.py @@ -1,7 +1,126 @@ +import copy +import random + import numpy as np +from qibo import gates +from qibo.backends import GlobalBackend from qibo.config import raise_error from qibo.hamiltonians.abstract import AbstractHamiltonian +from qibo.hamiltonians.hamiltonians import SymbolicHamiltonian +from qibo.models import Circuit +from qibo.symbols import Z + + +class Parameter: + def __init__(self, func, trainablep, featurep=None): + self._trainablep = trainablep + self._featurep = featurep + self.nparams = len(trainablep) + self.lambdaf = func + + def _apply_func(self, fixed_params=None): + params = [] + if self._featurep is not None: + params.append(self._featurep) + if fixed_params: + params.extend(fixed_params) + else: + params.extend(self._trainablep) + return self.lambdaf(*params) + + def _update_params(self, trainablep=None, feature=None): + if trainablep: + self._trainablep = trainablep + if feature: + self._featurep = feature + + def get_params(self, trainablep=None, feature=None): + self._update_params(trainablep=trainablep, feature=feature) + return self._apply_func() + + def get_indices(self, start_index): + return [start_index + i for i in range(self.nparams)] + + def get_fixed_part(self, trainablep_idx): + params = [0] * self.nparams + params[trainablep_idx] = self._trainablep[trainablep_idx] + return self._apply_func(fixed_params=params) + + def get_scaling_factor(self, trainablep_idx): + params = [0] * self.nparams + params[trainablep_idx] = 1.0 + return self._apply_func(fixed_params=params) + + +def calculate_gradients(optimizer, feature): + """ + Full parameter-shift rule's implementation + Args: + this_feature: np.array 2**nqubits-long containing the state vector assciated to a data + Returns: np.array of the observable's gradients with respect to the variational parameters + """ + + obs_gradients = np.zeros(optimizer.nparams, dtype=np.float64) + + ham = SymbolicHamiltonian( + np.prod([Z(i) for i in range(1)]), backend=GlobalBackend() + ) + + # parameter shift + if optimizer.options["shift_rule"] == "psr": + if isinstance(optimizer.paramInputs, np.ndarray): + for ipar in range(optimizer.nparams): + obs_gradients[ipar] = parameter_shift( + optimizer._circuit, + ham, + ipar, + initial_state=None, + scale_factor=1, + nshots=None, + ) + else: + count = 0 + for ipar, Param in enumerate(optimizer.paramInputs): + for nparam in range(Param.nparams): + scaling = Param.get_scaling_factor(nparam, feature) + + obs_gradients[count] = parameter_shift( + optimizer._circuit, + ham, + ipar, + initial_state=None, + scale_factor=scaling, + nshots=None, + ) + count += 1 + + # stochastic parameter shift + elif optimizer.options["shift_rule"] == "spsr": + for ipar, Param in enumerate(optimizer.parameters): + ntrainable_params = Param.nparams + obs_gradients[ipar : ipar + ntrainable_params] = stochastic_parameter_shift( + optimizer._circuit, + ham, + ipar, + initial_state=None, + scale_factor=1, + nshots=None, + ) + + # finite differences (central difference) + else: + for ipar in range(optimizer.nparams): + obs_gradients[ipar] = finite_differences( + optimizer._circuit, + ham, + ipar, + initial_state=None, + scale_factor=1, + nshots=None, + ) + + return obs_gradients def parameter_shift( @@ -156,5 +275,525 @@ def circuit(nqubits = 1): # float() necessary to not return a 0-dim ndarray result = float(generator_eigenval * (forward - backward) * scale_factor) + return result + + +def generate_new_stochastic_params(Param, ipar): + """Generates the three-gate parameters needed for the stochastic parameter-shift rule""" + + sampling = random.random() + trainable_param = Param._trainablep[ipar] + F = Param.get_fixed_part(ipar) + scaling = Param.get_scaling_factor(ipar) + + return [sampling * F, trainable_param, (1 - sampling) * F], scaling + + +def stochastic_parameter_shift( + circuit, + hamiltonian, + parameter_index, + parameter: Parameter, + initial_state=None, + nshots=None, +): + """In this method the stochastic parameter shift rule (SPSR) is implemented. + Given a circuit U and an observable H, the SPSR allows to calculate the derivative + of the expected value of H on the final state with respect to a variational + parameter of the circuit. The SPSR can calculate gradient approximations on + a larger family of gates than the standard PSR. + There is also the possibility of setting a scale factor. It is useful when a + circuit's parameter is obtained by combination of a variational + parameter and an external object, such as a training variable in a Quantum + Machine Learning problem. For example, performing a re-uploading strategy + to embed some data into a circuit, we apply to the quantum state rotations + whose angles are in the form: theta' = theta * x, where theta is a variational + parameter and x an input variable. The PSR allows to calculate the derivative + with respect of theta' but, if we want to optimize a system with respect its + variational parameters we need to "free" this procedure from the x depencency. + If the `scale_factor` is not provided, it is set equal to one and doesn't + affect the calculation. + If the PSR is needed to be executed on a real quantum device, it is important + to set `nshots` to some integer value. This enables the execution on the + hardware by calling the proper methods. + + Args: + circuit (:class:`qibo.models.circuit.Circuit`): custom quantum circuit. + hamiltonian (:class:`qibo.hamiltonians.Hamiltonian`): target observable. + if you want to execute on hardware, a symbolic hamiltonian must be + provided as follows (example with Pauli Z and ``nqubits=1``): + ``SymbolicHamiltonian(np.prod([ Z(i) for i in range(1) ]))``. + parameter_index (int): the index which identifies the target parameter + in the ``circuit.get_parameters()`` list. + initial_state (ndarray, optional): initial state on which the circuit + acts. Default is ``None``. + scale_factor (float, optional): parameter scale factor. Default is ``1``. + nshots (int, optional): number of shots if derivative is evaluated on + hardware. If ``None``, the simulation mode is executed. + Default is ``None``. + + Returns: + (float): Value of the derivative of the expectation value of the hamiltonian + with respect to the target variational parameter. + + Example: + + .. testcode:: + + import qibo + import numpy as np + from qibo import hamiltonians, gates + from qibo.models import Circuit + from qibo.derivative import parameter_shift + + # defining an observable + def hamiltonian(nqubits = 1): + m0 = (1/nqubits)*hamiltonians.Z(nqubits).matrix + ham = hamiltonians.Hamiltonian(nqubits, m0) + + return ham + + # defining a dummy circuit + def circuit(nqubits = 1): + c = Circuit(nqubits = 1) + c.add(gates.RY(q = 0, theta = 0)) + c.add(gates.RX(q = 0, theta = 0)) + c.add(gates.M(0)) + + return c + + # initializing the circuit + c = circuit(nqubits = 1) + + # some parameters + test_params = np.random.randn(2) + c.set_parameters(test_params) + + test_hamiltonian = hamiltonian() + + # running the psr with respect to the two parameters + grad_0 = stochastic_parameter_shift(circuit=c, hamiltonian=test_hamiltonian, parameter_index=0) + grad_1 = stochastic_parameter_shift(circuit=c, hamiltonian=test_hamiltonian, parameter_index=1) + + """ + + # some raise_error + if parameter_index > len(circuit.get_parameters()): + raise_error(ValueError, """This index is out of bounds.""") + + if not isinstance(hamiltonian, AbstractHamiltonian): + raise_error( + TypeError, + "hamiltonian must be a qibo.hamiltonians.Hamiltonian or qibo.hamiltonians.SymbolicHamiltonian object", + ) + + # inheriting hamiltonian's backend + backend = hamiltonian.backend + + # getting the gate's type + gate = circuit.associate_gates_with_parameters()[parameter_index] + + # getting the generator_eigenvalue + generator_eigenval = gate.generator_eigenvalue() + + # defining the shift according to the psr + s = np.pi / (4 * generator_eigenval) + + # saving original parameters and making a copy + original = np.asarray(circuit.get_parameters()).copy() + shifted = original.copy() + + # new circuit + ancilla_gate = copy.deepcopy(gate) + ancilla_gate2 = copy.deepcopy(gate) + circuit.add(ancilla_gate, position=parameter_index) + circuit.add(ancilla_gate2, position=parameter_index) + + # gradients + grads = np.zeros(parameter.nparams) + + # forward shift + shifted = np.insert(shifted, parameter_index, [0, 0]) + + for iparam in range(parameter.nparams): + new_params, scaling = generate_new_stochastic_params(parameter, iparam) + new_params[1] += s + shifted[parameter_index : parameter_index + 3] = new_params + + circuit.set_parameters(shifted) + + if nshots is None: + # forward evaluation + forward = hamiltonian.expectation( + backend.execute_circuit( + circuit=circuit, initial_state=initial_state + ).state() + ) + + # backward shift and evaluation + shifted[parameter_index + 1] -= s * 2 + circuit.set_parameters(shifted) + + backward = hamiltonian.expectation( + backend.execute_circuit( + circuit=circuit, initial_state=initial_state + ).state() + ) + + # same but using expectation from samples + else: + forward = backend.execute_circuit( + circuit=circuit, initial_state=initial_state, nshots=nshots + ) + + shifted[parameter_index + 1] -= s * 2 + circuit.set_parameters(shifted) + + backward = backend.execute_circuit( + circuit=circuit, initial_state=initial_state, nshots=nshots + ).expectation_from_samples(hamiltonian) + + # float() necessary to not return a 0-dim ndarray + result = float(generator_eigenval * (forward - backward) * scaling) + grads[iparam] = result + + # cleanup + circuit.remove(ancilla_gate) + circuit.remove(ancilla_gate2) + circuit.set_parameters(original) + + return grads + + +def finite_differences( + circuit, + hamiltonian, + parameter_index, + initial_state=None, + step_size=1e-1, + nshots=None, +): + """ + Calculate derivative of the expectation value of `hamiltonian` on the + final state obtained by executing `circuit` on `initial_state` with + respect to the variational parameter identified by `parameter_index` + in the circuit's parameters list. This method can be used only in + exact simulation mode. + + Args: + circuit (:class:`qibo.models.circuit.Circuit`): custom quantum circuit. + hamiltonian (:class:`qibo.hamiltonians.Hamiltonian`): target observable. + if you want to execute on hardware, a symbolic hamiltonian must be + provided as follows (example with Pauli Z and ``nqubits=1``): + ``SymbolicHamiltonian(np.prod([ Z(i) for i in range(1) ]))``. + parameter_index (int): the index which identifies the target parameter + in the ``circuit.get_parameters()`` list. + initial_state (ndarray, optional): initial state on which the circuit + acts. Default is ``None``. + step_size (float): step size used to evaluate the finite difference + (default 1e-7). + + Returns: + (float): Value of the derivative of the expectation value of the hamiltonian + with respect to the target variational parameter. + """ + + if parameter_index > len(circuit.get_parameters()): + raise_error(ValueError, f"""Index {parameter_index} is out of bounds.""") + + if not isinstance(hamiltonian, AbstractHamiltonian): + raise_error( + TypeError, + "hamiltonian must be a qibo.hamiltonians.Hamiltonian or qibo.hamiltonians.SymbolicHamiltonian object", + ) + + backend = hamiltonian.backend + + # parameters copies + parameters = np.asarray(circuit.get_parameters()).copy() + shifted = parameters.copy() + + # shift the parameter_index element + shifted[parameter_index] += step_size + circuit.set_parameters(shifted) + + if nshots is None: + # forward evaluation + forward = hamiltonian.expectation( + backend.execute_circuit( + circuit=circuit, initial_state=initial_state + ).state() + ) + + # backward shift and evaluation + shifted[parameter_index] -= 2 * step_size + circuit.set_parameters(shifted) + + backward = hamiltonian.expectation( + backend.execute_circuit( + circuit=circuit, initial_state=initial_state + ).state() + ) + + # same but using expectation from samples + else: + forward = backend.execute_circuit( + circuit=circuit, initial_state=initial_state, nshots=nshots + ).expectation_from_samples(hamiltonian) + + shifted[parameter_index] -= 2 * step_size + circuit.set_parameters(shifted) + + backward = backend.execute_circuit( + circuit=circuit, initial_state=initial_state, nshots=nshots + ).expectation_from_samples(hamiltonian) + + circuit.set_parameters(parameters) + + result = (forward - backward) / (2 * step_size) return result + + +################################################################################################## +### Natural Gradient +################################################################################################## + + +class Node: + """Parent class to create gate nodes""" + + def __init__(self, gate, trainable_params, gate_params): + self.gate = gate + self.trainable_params = trainable_params # index of optimisable parameters + self.gate_params = gate_params # gate parameters + self.prev = None + self.next = None + + +class ConvergeNode(Node): + """Node for two-qubit gates""" + + def __init__(self, gate, trainable_params, gate_params): + super().__init__(gate, trainable_params, gate_params) + self.prev_target = None + self.next_target = None + self.waiting = None + + +class Graph: + """Creates a graph representation of a circuit""" + + def __init__(self, nqubits, gates, trainable_params, gate_params): + self.gates = gates + self.trainable_params = trainable_params + self.gate_params = gate_params + self.nqubits = nqubits + + def build_graph(self): + """ + Builds graph based on the circuit gates and associates parameters to each gate. + """ + + # setup + start = [None] * self.nqubits + ends = [None] * self.nqubits + depth = [0] * self.nqubits + nodes = list() + + count = 0 + # run through each gate in circuit queue + for i, gate in enumerate(self.gates): + n = len(gate.init_args) - 1 + + # store parameters for ParametrizedGate + if isinstance(gate, gates.ParametrizedGate): + trainp = self.trainable_params[count] + gatep = self.gate_params[count] + count += 1 + else: + trainp = None + gatep = None + + # two-qubit gates + if n == 1: + node = ConvergeNode(gate, trainp, gatep) + control = gate.init_args[0] + target = gate.init_args[1] + + # control qubit + # start of graph + if start[control] is None: + start[control] = i + ends[control] = i + # link to existing graph node + else: + nodes[ends[control]].next = i + node.prev = ends[control] + ends[control] = i + + # target qubit + # start of graph + if start[target] is None: + start[target] = i + ends[target] = i + # link to existing graph node + else: + nodes[ends[target]].next = i + node.prev = ends[target] + ends[target] = i + + depth[control] += 1 + depth[target] += 1 + + # one-qubit gate + else: + node = Node(gate, trainp, gatep) + qubit = gate.init_args[0] + + # start of graph + if start[qubit] is None: + start[qubit] = i + ends[qubit] = i + # link to existing graph node + else: + nodes[ends[qubit]].next = i + node.prev = ends[qubit] + ends[qubit] = i + + depth[qubit] += 1 + + # add node to list + nodes.append(node) + + self.start = start + self.end = ends + self.nodes = nodes + self.depth = max(depth) + + def _determine_basis(self, gate): + gname = gate.name + + if gname == "rx": + return gates.X + elif gname == "ry": + return gates.Y + else: + return gates.Z + + def run_layer(self, layer): + """Runs through one layer of the circuit parameters + Args: + layer: int, layer number N + Returns: + c: circuit up to nth layer + trainable_qubits: qubits on which we find trainable gates + affected_params: which trainable parameters are linked to the trainable gates + """ + + # empty circuit + c = Circuit(self.nqubits, density_matrix=True) + + current = self.start[:] + + trainable_qubits = [] + affected_params = [] + + # run through layer up to N + for iter in range(layer + 1): + # run through all qubits + for q in range(self.nqubits): + node = self.nodes[current[q]] + + # wait for both qubits to reach two-qubit node + if isinstance(node, ConvergeNode): + # first arrived + if node.waiting is None: + node.waiting = q + # second arrived + elif node.waiting != q: + c.add(node.gate) + control = node.gate.init_args[0] + target = node.gate.init_args[1] + current[control] = node.next + current[target] = node.next_target + node.waiting = None + + # replace last layer by M gate + elif iter == layer and isinstance(node.gate, gates.ParametrizedGate): + c.add(gates.M(q, basis=self._determine_basis(node.gate))) + trainable_qubits.append(q) + affected_params.append(node.trainable_params) + + # simple one-qubit node + else: + c.add(node.gate) + if node.next: + current[q] = node.next + + return c, trainable_qubits, affected_params + + +def generate_fubini(optimizer, feature, method="variance"): + """Generate the Fubini-Study metric tensor""" + + fubini = np.zeros((optimizer.nparams, optimizer.nparams)) + original = optimizer.params.copy() + + if method == "hessian": + shifted = optimizer.params.copy() + + phi = optimizer.retrieve_state(feature) + + for i in range(optimizer.nparams): + if i % 2 == 0: + factor = feature + else: + factor = 1 + shifted[i] = optimizer.forward_diff( + original=original[i], factor=factor, param=np.pi + ) + optimizer.set_parameters(shifted) + phi_prime = optimizer.retrieve_state(feature) + + optimizer.set_parameters(original) + fubini[i, i] = 1 / 4 * (1 - (np.abs(np.dot(phi, phi_prime))) ** 2) + + elif method == "variance": + # trainable and gate parameters + gate_params = optimizer._circuit.associate_gates_with_parameters() + + if isinstance(optimizer.paramInputs, list): + trainable_params = [] + count = 0 + for Param in optimizer.paramInputs: + indices = Param.get_indices(count) + count += len(indices) + trainable_params.append(indices) + else: + trainable_params = [[i] for i in range(optimizer.nparams)] + # build graph from circuit gates + graph = Graph( + optimizer.nqubits, optimizer._circuit.queue, trainable_params, gate_params + ) + graph.build_graph() + + # run through layers + for i in range(graph.depth): + c, qubits, affected_param = graph.run_layer(i) + if len(qubits) == 0: + continue + + state = c().state() + + # run through parametrized gate + for qubit, params in zip(qubits, affected_param): + hamiltonian = optimizer.create_hamiltonian(qubit, optimizer.nqubits) + + result = hamiltonian.expectation(state) + + for p in params: + # update Fubini-Study matrix + fubini[p, p] = result - result**2 + + return fubini diff --git a/src/qibo/models/circuit.py b/src/qibo/models/circuit.py index 94b915d9ec..75a41e1e68 100644 --- a/src/qibo/models/circuit.py +++ b/src/qibo/models/circuit.py @@ -28,6 +28,16 @@ def append(self, gate): self.set.add(gate) self.nparams += gate.nparams + def insert(self, pos, gate): + super().insert(pos, gate) + self.set.add(gate) + self.nparams += gate.nparams + + def remove(self, gate): + super().remove(gate) + self.set.remove(gate) + self.nparams -= gate.nparams + class _Queue(list): """List that holds the queue of gates of a circuit. @@ -99,6 +109,44 @@ def append(self, gate: gates.Gate): self.moments[idx][q] = gate self.moment_index[q] = idx + 1 + def insert(self, pos, gate: gates.Gate): + super().insert(pos, gate) + if gate.qubits: + qubits = gate.qubits + else: # special gate acting on all qubits + qubits = tuple(range(self.nqubits)) + + if isinstance(gate, gates.M): + self.nmeasurements += 1 + + # calculate moment index for this gate + idx = max(self.moment_index[q] for q in qubits) + for q in qubits: + if idx >= len(self.moments): + # Add a moment + self.moments.insert(pos, len(self.moments[-1]) * [None]) + self.moments[pos][q] = gate + self.moment_index[q] = idx + 1 + + def remove(self, gate): + pos = super().index(gate) + super().remove(gate) + + if isinstance(gate, gates.M): + self.nmeasurements -= 1 + + if gate.qubits: + qubits = gate.qubits + else: # special gate acting on all qubits + qubits = tuple(range(self.nqubits)) + + # calculate moment index for this gate + for q in qubits: + self.moment_index[q] -= 1 + del self.moments[pos] + + return pos + class Circuit: """Circuit object which holds a list of gates. @@ -438,6 +486,26 @@ def decompose(self, *free: int): decomp_circuit.add(gate.decompose(*free)) return decomp_circuit + def to_clifford(self): + """Translate a circuit into an equivalent one composed of only Clifford gates. + + In `Qibo` we refers to [``X``, ``CNOT``, ``RX(pi/2)``, ``RZ(theta)``] as + Clifford gates. + + Returns: + Circuit object containing only Clifford gates. + """ + + clifford_circuit = self.__class__(**self.init_kwargs) + # cycle on gates replacing non-clifford with clifford + for gate in self.queue: + if gate.is_clifford(): + clifford_circuit.add(gate) + else: + clifford_circuit.add(gate.decompose_into_clifford()) + + return clifford_circuit + def with_pauli_noise(self, noise_map: NoiseMapType): """Creates a copy of the circuit with Pauli noise gates after each gate. @@ -512,7 +580,7 @@ def with_pauli_noise(self, noise_map: NoiseMapType): noisy_circuit.add(noise_gate) return noisy_circuit - def add(self, gate): + def add(self, gate, position=-1): """Add a gate to a given queue. Args: @@ -528,7 +596,11 @@ def add(self, gate): """ if isinstance(gate, collections.abc.Iterable): for g in gate: - self.add(g) + if position >= 0: + self.add(g, position) + position += 1 + else: + self.add(g) else: if self.accelerators: # pragma: no cover @@ -549,12 +621,18 @@ def add(self, gate): if not isinstance(gate, gates.Gate): raise_error(TypeError, f"Unknown gate type {type(gate)}.") - if self._final_state is not None: + if self._final_state is not None and position == -1: raise_error( RuntimeError, "Cannot add gates to a circuit after it is executed.", ) + if isinstance(gate, gates.M) and position >= 0: + raise_error( + RuntimeError, + "Cannot add Measurement gate at a specific location.", + ) + for q in gate.target_qubits: if q >= self.nqubits: raise_error( @@ -594,19 +672,78 @@ def add(self, gate): return gate.result else: - self.queue.append(gate) + if position >= 0: + self.queue.insert(position, gate) + else: + self.queue.append(gate) + + for measurement in list(self.measurements): + if set(measurement.qubits) & set(gate.qubits): + measurement.collapse = False + self.repeated_execution = False + self.measurements.remove(measurement) + + if isinstance(gate, gates.UnitaryChannel): + self.repeated_execution = not self.density_matrix + if isinstance(gate, gates.ParametrizedGate): + if position >= 0: + param_loc = 0 + trainable_loc = 0 + for g in self.queue[:position]: + if isinstance(g, gates.ParametrizedGate): + param_loc += 1 + if g.trainable: + trainable_loc += 1 + + self.parametrized_gates.insert(param_loc, gate) + if gate.trainable: + self.trainable_gates.insert(trainable_loc, gate) + + else: + self.parametrized_gates.append(gate) + if gate.trainable: + self.trainable_gates.append(gate) + + def remove(self, gate, replacement_gates=[]): + if isinstance(gate, list): + for g in gate: + self.remove(g) + + else: + if isinstance(gate, gates.M): + # The following loop is useful when two circuits are added together: + # all the gates in the basis of the measure gates should not + # be added to the new circuit, otherwise once the measure gate is added in the circuit + # there will be two of the same. + + for base in gate.basis: + if base not in self.queue: + self.remove(base) + + pos = self.queue.remove(gate) + + if gate.collapse: + self.repeated_execution = False + else: + self.measurements.remove(gate) + + else: + pos = self.queue.remove(gate) for measurement in list(self.measurements): - if set(measurement.qubits) & set(gate.qubits): - measurement.collapse = True - self.repeated_execution = True + if not bool(set(measurement.qubits) & set(gate.qubits)): + measurement.collapse = False + self.repeated_execution = False self.measurements.remove(measurement) if isinstance(gate, gates.UnitaryChannel): - self.repeated_execution = not self.density_matrix + self.repeated_execution = self.density_matrix if isinstance(gate, gates.ParametrizedGate): - self.parametrized_gates.append(gate) + self.parametrized_gates.remove(gate) if gate.trainable: - self.trainable_gates.append(gate) + self.trainable_gates.remove(gate) + + for rep_gate in replacement_gates: + self.add(rep_gate, pos) @property def measurement_tuples(self): diff --git a/tests/test_derivative.py b/tests/test_derivative.py index 2a6a1a20d9..dd1515260c 100644 --- a/tests/test_derivative.py +++ b/tests/test_derivative.py @@ -1,14 +1,24 @@ import numpy as np import pytest +import tensorflow as tf +import qibo from qibo import gates, hamiltonians -from qibo.derivative import parameter_shift +from qibo.backends import GlobalBackend +from qibo.derivative import ( + Parameter, + finite_differences, + parameter_shift, + stochastic_parameter_shift, +) from qibo.models import Circuit from qibo.symbols import Z +qibo.set_backend("tensorflow") + # defining an observable -def hamiltonian(nqubits, backend): +def hamiltonian(nqubits, backend=GlobalBackend()): return hamiltonians.hamiltonians.SymbolicHamiltonian( np.prod([Z(i) for i in range(nqubits)]), backend=backend ) @@ -83,3 +93,138 @@ def test_derivative(backend, nshots, atol, scale_factor, grads): backend.assert_allclose(grad_0, grads[0], atol=atol) backend.assert_allclose(grad_1, grads[1], atol=atol) backend.assert_allclose(grad_2, grads[2], atol=atol) + + +def hamiltonian2(nqubits): + return hamiltonians.hamiltonians.SymbolicHamiltonian( + np.prod([Z(i) for i in range(nqubits)]), backend=GlobalBackend() + ) + + +def gradient_exact(): + backend = GlobalBackend() + + test_params = tf.Variable(np.linspace(0.1, 1, 3)) + + with tf.GradientTape() as tape: + c = circuit(nqubits=1) + c.set_parameters(test_params) + + ham = hamiltonian2(1) + results = ham.expectation( + backend.execute_circuit(circuit=c, initial_state=None).state() + ) + + gradients = tape.gradient(results, test_params) + + return gradients + + +@pytest.mark.parametrize("nshots, atol", [(None, 1e-1), (100000, 1e-1)]) +def test_finite_differences(backend, nshots, atol): + # exact gradients + grads = gradient_exact() + + # initializing the circuit + c = circuit(nqubits=1) + + # some parameters + # we know the derivative's values with these params + test_params = np.linspace(0.1, 1, 3) + c.set_parameters(test_params) + + test_hamiltonian = hamiltonian(nqubits=1) + + # testing parameter out of bounds + with pytest.raises(ValueError): + grad_0 = finite_differences( + circuit=c, hamiltonian=test_hamiltonian, parameter_index=5 + ) + + # testing hamiltonian type + with pytest.raises(TypeError): + grad_0 = finite_differences( + circuit=c, hamiltonian=c, parameter_index=0, nshots=nshots + ) + + # executing all the procedure + grad_0 = finite_differences( + circuit=c, + hamiltonian=test_hamiltonian, + parameter_index=0, + nshots=nshots, + ) + grad_1 = finite_differences( + circuit=c, + hamiltonian=test_hamiltonian, + parameter_index=1, + nshots=nshots, + ) + grad_2 = finite_differences( + circuit=c, + hamiltonian=test_hamiltonian, + parameter_index=2, + nshots=nshots, + ) + + # check of known values + # calculated using tf.GradientTape + backend.assert_allclose(grad_0, grads[0], atol=atol) + backend.assert_allclose(grad_1, grads[1], atol=atol) + backend.assert_allclose(grad_2, grads[2], atol=atol) + + +@pytest.mark.parametrize("nshots, atol", [(None, 1e-1), (1024, 1e-1)]) +def test_spsr(backend, nshots, atol): + # exact gradients + grads = gradient_exact() + + # initializing the circuit + c = circuit(nqubits=1) + + # some parameters + # we know the derivative's values with these params + test_params = [] + param_values = np.linspace(0.1, 1, 3) * 0.5 + + for i in range(3): + test_params.append( + Parameter(lambda th1, th2: th1 + th2, [param_values[i], param_values[i]]) + ) + + parameter_values = [param.get_params() for param in test_params] + print(parameter_values) + c.set_parameters(parameter_values) + + test_hamiltonian = hamiltonian(nqubits=1, backend=GlobalBackend()) + + # testing parameter out of bounds + with pytest.raises(ValueError): + grad_0 = stochastic_parameter_shift( + circuit=c, + hamiltonian=test_hamiltonian, + parameter_index=5, + parameter=Parameter(lambda th: 2 * th, [0.1]), + ) + + # testing hamiltonian type + with pytest.raises(TypeError): + grad_0 = stochastic_parameter_shift( + circuit=c, + hamiltonian=c, + parameter_index=0, + parameter=Parameter(lambda th: 2 * th, [0.1]), + nshots=nshots, + ) + + # executing all the procedure + grads_0 = stochastic_parameter_shift(c, test_hamiltonian, 0, test_params[0])[0] + grads_1 = stochastic_parameter_shift(c, test_hamiltonian, 1, test_params[1])[0] + grads_2 = stochastic_parameter_shift(c, test_hamiltonian, 2, test_params[2])[0] + + print(grads_0, grads_1, grads_2, grads) + + # check of known values + backend.assert_allclose(grads_0, grads[0], atol=atol) + backend.assert_allclose(grads_1, grads[1], atol=atol) + backend.assert_allclose(grads_2, grads[2], atol=atol) From b491e6da008d84a34069753c6f6c1e7201bcccbd Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Thu, 13 Jul 2023 10:20:49 +0200 Subject: [PATCH 02/32] removed superfluous functions for PR --- src/qibo/derivative.py | 244 ----------------------------------------- 1 file changed, 244 deletions(-) diff --git a/src/qibo/derivative.py b/src/qibo/derivative.py index 3f22139443..0ddccf91b9 100644 --- a/src/qibo/derivative.py +++ b/src/qibo/derivative.py @@ -553,247 +553,3 @@ def finite_differences( result = (forward - backward) / (2 * step_size) return result - - -################################################################################################## -### Natural Gradient -################################################################################################## - - -class Node: - """Parent class to create gate nodes""" - - def __init__(self, gate, trainable_params, gate_params): - self.gate = gate - self.trainable_params = trainable_params # index of optimisable parameters - self.gate_params = gate_params # gate parameters - self.prev = None - self.next = None - - -class ConvergeNode(Node): - """Node for two-qubit gates""" - - def __init__(self, gate, trainable_params, gate_params): - super().__init__(gate, trainable_params, gate_params) - self.prev_target = None - self.next_target = None - self.waiting = None - - -class Graph: - """Creates a graph representation of a circuit""" - - def __init__(self, nqubits, gates, trainable_params, gate_params): - self.gates = gates - self.trainable_params = trainable_params - self.gate_params = gate_params - self.nqubits = nqubits - - def build_graph(self): - """ - Builds graph based on the circuit gates and associates parameters to each gate. - """ - - # setup - start = [None] * self.nqubits - ends = [None] * self.nqubits - depth = [0] * self.nqubits - nodes = list() - - count = 0 - # run through each gate in circuit queue - for i, gate in enumerate(self.gates): - n = len(gate.init_args) - 1 - - # store parameters for ParametrizedGate - if isinstance(gate, gates.ParametrizedGate): - trainp = self.trainable_params[count] - gatep = self.gate_params[count] - count += 1 - else: - trainp = None - gatep = None - - # two-qubit gates - if n == 1: - node = ConvergeNode(gate, trainp, gatep) - control = gate.init_args[0] - target = gate.init_args[1] - - # control qubit - # start of graph - if start[control] is None: - start[control] = i - ends[control] = i - # link to existing graph node - else: - nodes[ends[control]].next = i - node.prev = ends[control] - ends[control] = i - - # target qubit - # start of graph - if start[target] is None: - start[target] = i - ends[target] = i - # link to existing graph node - else: - nodes[ends[target]].next = i - node.prev = ends[target] - ends[target] = i - - depth[control] += 1 - depth[target] += 1 - - # one-qubit gate - else: - node = Node(gate, trainp, gatep) - qubit = gate.init_args[0] - - # start of graph - if start[qubit] is None: - start[qubit] = i - ends[qubit] = i - # link to existing graph node - else: - nodes[ends[qubit]].next = i - node.prev = ends[qubit] - ends[qubit] = i - - depth[qubit] += 1 - - # add node to list - nodes.append(node) - - self.start = start - self.end = ends - self.nodes = nodes - self.depth = max(depth) - - def _determine_basis(self, gate): - gname = gate.name - - if gname == "rx": - return gates.X - elif gname == "ry": - return gates.Y - else: - return gates.Z - - def run_layer(self, layer): - """Runs through one layer of the circuit parameters - Args: - layer: int, layer number N - Returns: - c: circuit up to nth layer - trainable_qubits: qubits on which we find trainable gates - affected_params: which trainable parameters are linked to the trainable gates - """ - - # empty circuit - c = Circuit(self.nqubits, density_matrix=True) - - current = self.start[:] - - trainable_qubits = [] - affected_params = [] - - # run through layer up to N - for iter in range(layer + 1): - # run through all qubits - for q in range(self.nqubits): - node = self.nodes[current[q]] - - # wait for both qubits to reach two-qubit node - if isinstance(node, ConvergeNode): - # first arrived - if node.waiting is None: - node.waiting = q - # second arrived - elif node.waiting != q: - c.add(node.gate) - control = node.gate.init_args[0] - target = node.gate.init_args[1] - current[control] = node.next - current[target] = node.next_target - node.waiting = None - - # replace last layer by M gate - elif iter == layer and isinstance(node.gate, gates.ParametrizedGate): - c.add(gates.M(q, basis=self._determine_basis(node.gate))) - trainable_qubits.append(q) - affected_params.append(node.trainable_params) - - # simple one-qubit node - else: - c.add(node.gate) - if node.next: - current[q] = node.next - - return c, trainable_qubits, affected_params - - -def generate_fubini(optimizer, feature, method="variance"): - """Generate the Fubini-Study metric tensor""" - - fubini = np.zeros((optimizer.nparams, optimizer.nparams)) - original = optimizer.params.copy() - - if method == "hessian": - shifted = optimizer.params.copy() - - phi = optimizer.retrieve_state(feature) - - for i in range(optimizer.nparams): - if i % 2 == 0: - factor = feature - else: - factor = 1 - shifted[i] = optimizer.forward_diff( - original=original[i], factor=factor, param=np.pi - ) - optimizer.set_parameters(shifted) - phi_prime = optimizer.retrieve_state(feature) - - optimizer.set_parameters(original) - fubini[i, i] = 1 / 4 * (1 - (np.abs(np.dot(phi, phi_prime))) ** 2) - - elif method == "variance": - # trainable and gate parameters - gate_params = optimizer._circuit.associate_gates_with_parameters() - - if isinstance(optimizer.paramInputs, list): - trainable_params = [] - count = 0 - for Param in optimizer.paramInputs: - indices = Param.get_indices(count) - count += len(indices) - trainable_params.append(indices) - else: - trainable_params = [[i] for i in range(optimizer.nparams)] - # build graph from circuit gates - graph = Graph( - optimizer.nqubits, optimizer._circuit.queue, trainable_params, gate_params - ) - graph.build_graph() - - # run through layers - for i in range(graph.depth): - c, qubits, affected_param = graph.run_layer(i) - if len(qubits) == 0: - continue - - state = c().state() - - # run through parametrized gate - for qubit, params in zip(qubits, affected_param): - hamiltonian = optimizer.create_hamiltonian(qubit, optimizer.nqubits) - - result = hamiltonian.expectation(state) - - for p in params: - # update Fubini-Study matrix - fubini[p, p] = result - result**2 - - return fubini From 1b51e48f3f846914c3046f956456e869837a3127 Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Thu, 13 Jul 2023 10:24:26 +0200 Subject: [PATCH 03/32] removed functions --- tests/test_derivative.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/tests/test_derivative.py b/tests/test_derivative.py index dd1515260c..0d40b33f7b 100644 --- a/tests/test_derivative.py +++ b/tests/test_derivative.py @@ -95,12 +95,6 @@ def test_derivative(backend, nshots, atol, scale_factor, grads): backend.assert_allclose(grad_2, grads[2], atol=atol) -def hamiltonian2(nqubits): - return hamiltonians.hamiltonians.SymbolicHamiltonian( - np.prod([Z(i) for i in range(nqubits)]), backend=GlobalBackend() - ) - - def gradient_exact(): backend = GlobalBackend() @@ -110,7 +104,7 @@ def gradient_exact(): c = circuit(nqubits=1) c.set_parameters(test_params) - ham = hamiltonian2(1) + ham = hamiltonian(1) results = ham.expectation( backend.execute_circuit(circuit=c, initial_state=None).state() ) From 44201c9e4c07de2784dc1de43c493e65483fba7c Mon Sep 17 00:00:00 2001 From: MatteoRobbiati Date: Mon, 31 Jul 2023 16:54:43 +0200 Subject: [PATCH 04/32] rm parameter class from derivative module --- src/qibo/derivative.py | 46 ++---------------------------------------- 1 file changed, 2 insertions(+), 44 deletions(-) diff --git a/src/qibo/derivative.py b/src/qibo/derivative.py index 0ddccf91b9..c2bb226149 100644 --- a/src/qibo/derivative.py +++ b/src/qibo/derivative.py @@ -3,56 +3,14 @@ import numpy as np -from qibo import gates from qibo.backends import GlobalBackend from qibo.config import raise_error from qibo.hamiltonians.abstract import AbstractHamiltonian from qibo.hamiltonians.hamiltonians import SymbolicHamiltonian -from qibo.models import Circuit +from qibo.models.parameter import Parameter from qibo.symbols import Z -class Parameter: - def __init__(self, func, trainablep, featurep=None): - self._trainablep = trainablep - self._featurep = featurep - self.nparams = len(trainablep) - self.lambdaf = func - - def _apply_func(self, fixed_params=None): - params = [] - if self._featurep is not None: - params.append(self._featurep) - if fixed_params: - params.extend(fixed_params) - else: - params.extend(self._trainablep) - return self.lambdaf(*params) - - def _update_params(self, trainablep=None, feature=None): - if trainablep: - self._trainablep = trainablep - if feature: - self._featurep = feature - - def get_params(self, trainablep=None, feature=None): - self._update_params(trainablep=trainablep, feature=feature) - return self._apply_func() - - def get_indices(self, start_index): - return [start_index + i for i in range(self.nparams)] - - def get_fixed_part(self, trainablep_idx): - params = [0] * self.nparams - params[trainablep_idx] = self._trainablep[trainablep_idx] - return self._apply_func(fixed_params=params) - - def get_scaling_factor(self, trainablep_idx): - params = [0] * self.nparams - params[trainablep_idx] = 1.0 - return self._apply_func(fixed_params=params) - - def calculate_gradients(optimizer, feature): """ Full parameter-shift rule's implementation @@ -282,7 +240,7 @@ def generate_new_stochastic_params(Param, ipar): """Generates the three-gate parameters needed for the stochastic parameter-shift rule""" sampling = random.random() - trainable_param = Param._trainablep[ipar] + trainable_param = Param._variational_parameters[ipar] F = Param.get_fixed_part(ipar) scaling = Param.get_scaling_factor(ipar) From fdc93da931ad2bf153eed13efb4bee3ba4342c15 Mon Sep 17 00:00:00 2001 From: MatteoRobbiati Date: Mon, 31 Jul 2023 16:55:09 +0200 Subject: [PATCH 05/32] fix tests with new params location --- tests/test_derivative.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_derivative.py b/tests/test_derivative.py index 0d40b33f7b..f7285c1cb4 100644 --- a/tests/test_derivative.py +++ b/tests/test_derivative.py @@ -6,12 +6,12 @@ from qibo import gates, hamiltonians from qibo.backends import GlobalBackend from qibo.derivative import ( - Parameter, finite_differences, parameter_shift, stochastic_parameter_shift, ) from qibo.models import Circuit +from qibo.models.parameter import Parameter from qibo.symbols import Z qibo.set_backend("tensorflow") From 1abd488ec02e3a7c29d7e35c3630b8d3b630ea31 Mon Sep 17 00:00:00 2001 From: MatteoRobbiati Date: Mon, 31 Jul 2023 16:55:39 +0200 Subject: [PATCH 06/32] create parameter model --- src/qibo/models/parameter.py | 42 ++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 src/qibo/models/parameter.py diff --git a/src/qibo/models/parameter.py b/src/qibo/models/parameter.py new file mode 100644 index 0000000000..7b3c4a92d9 --- /dev/null +++ b/src/qibo/models/parameter.py @@ -0,0 +1,42 @@ +"""Model for combining trainable parameters and possible features into circuit parameters.""" + + +class Parameter: + def __init__(self, func, variational_parameters, features=None): + self._variational_parameters = variational_parameters + self._featurep = features + self.nparams = len(variational_parameters) + self.lambdaf = func + + def _apply_func(self, fixed_params=None): + params = [] + if self._featurep is not None: + params.append(self._featurep) + if fixed_params: + params.extend(fixed_params) + else: + params.extend(self._variational_parameters) + return self.lambdaf(*params) + + def _update_params(self, trainablep=None, feature=None): + if trainablep: + self._variational_parameters = trainablep + if feature: + self._featurep = feature + + def get_params(self, trainablep=None, feature=None): + self._update_params(trainablep=trainablep, feature=feature) + return self._apply_func() + + def get_indices(self, start_index): + return [start_index + i for i in range(self.nparams)] + + def get_fixed_part(self, trainablep_idx): + params = [0] * self.nparams + params[trainablep_idx] = self._variational_parameters[trainablep_idx] + return self._apply_func(fixed_params=params) + + def get_scaling_factor(self, trainablep_idx): + params = [0] * self.nparams + params[trainablep_idx] = 1.0 + return self._apply_func(fixed_params=params) From e5816fb7504f06662ab341b21a685a2298c5e7f1 Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Wed, 16 Aug 2023 16:30:26 +0200 Subject: [PATCH 07/32] ready for PR --- src/qibo/gates/gates.py | 88 +++++++++++++++++++++++++++++++++++++--- tests/test_derivative.py | 38 +++++++++++++++++ 2 files changed, 121 insertions(+), 5 deletions(-) diff --git a/src/qibo/gates/gates.py b/src/qibo/gates/gates.py index abe215defa..bfbffa1530 100644 --- a/src/qibo/gates/gates.py +++ b/src/qibo/gates/gates.py @@ -2,11 +2,89 @@ from typing import List import numpy as np +import sympy as sp from qibo.config import PRECISION_TOL, raise_error from qibo.gates.abstract import Gate, ParametrizedGate +class Parameter: + """Object which allows complex gate parameters. Several trainable parameter + and possibly features are linked through a lambda function which returns the + final gate parameter""" + + def __init__(self, func, trainablep, featurep=None): + self._trainablep = trainablep + self._featurep = featurep + self.nparams = len(trainablep) + + if isinstance(featurep, list): + self.nfeat = len(featurep) + else: + self.nfeat = 0 + self.lambdaf = func + self.derivatives = self.calculate_derivatives() + + def _apply_func(self, function, fixed_params=None): + """Applies lambda function and returns final gate parameter""" + params = [] + if self._featurep is not None: + if isinstance(self._featurep, list): + params.extend(self._featurep) + else: + params.append(self._featurep) + if fixed_params: + params.extend(fixed_params) + else: + params.extend(self._trainablep) + return float(function(*params)) + + def _update_params(self, trainablep=None, feature=None): + """Update gate trainable parameter and feature values""" + if trainablep is not None: + self._trainablep = trainablep + if feature and self._featurep: + self._featurep = feature + + def get_params(self, trainablep=None, feature=None): + """Update values with trainable parameter and calculate current gate parameter""" + self._update_params(trainablep=trainablep, feature=feature) + return self._apply_func(self.lambdaf) + + def get_indices(self, start_index): + """Return list of respective indices of trainable parameters within + the optimizer's trainable parameter list""" + return [start_index + i for i in range(self.nparams)] + + def get_fixed_part(self, trainablep_idx): + """Retrieve parameter constant unaffected by a specific trainable parameter""" + params = self._trainablep.copy() + params[trainablep_idx] = 0.0 + return self._apply_func(self.lambdaf, fixed_params=params) + + def calculate_derivatives(self): + """Calculates derivatives w.r.t to all trainable parameters""" + vars = [] + for i in range(self.nfeat): + vars.append(sp.Symbol(f"x{i}")) + for i in range(self.nparams): + vars.append(sp.Symbol(f"th{i}")) + + expr = sp.sympify(self.lambdaf(*vars)) + + derivatives = [] + for i in range(self.nfeat, len(vars)): + derivative_expr = sp.diff(expr, vars[i]) + derivatives.append(sp.lambdify(vars, derivative_expr)) + + return derivatives + + def get_scaling_factor(self, trainablep_idx): + """Get derivative w.r.t a trainable parameter""" + deriv = self.derivatives[trainablep_idx] + return self._apply_func(deriv) + + class H(Gate): """The Hadamard gate. @@ -495,12 +573,12 @@ def __init__(self, q, theta, trainable=True): self.name = None self._controlled_gate = None self.target_qubits = (q,) - self.unitary = True - - if isinstance(theta, (float, int)) and (theta % (np.pi / 2)).is_integer(): - self.clifford = True - self.parameters = theta + self.initparams = theta + if isinstance(theta, Parameter): + self.parameters = theta.get_params() + else: + self.parameters = theta self.init_args = [q] self.init_kwargs = {"theta": theta, "trainable": trainable} diff --git a/tests/test_derivative.py b/tests/test_derivative.py index ec7a501720..26af07735b 100644 --- a/tests/test_derivative.py +++ b/tests/test_derivative.py @@ -3,6 +3,7 @@ from qibo import Circuit, gates, hamiltonians from qibo.derivative import finite_differences, parameter_shift +from qibo.gates.gates import Parameter from qibo.symbols import Z @@ -123,3 +124,40 @@ def test_finite_differences(backend, step_size): backend.assert_allclose(grad_0, grads[0], atol=atol) backend.assert_allclose(grad_1, grads[1], atol=atol) backend.assert_allclose(grad_2, grads[2], atol=atol) + + +def test_parameter(): + # single feature + param = Parameter( + lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, + [1.5, 2.0, 3.0], + featurep=[7.0], + ) + + indices = param.get_indices(10) + assert indices == [10, 11, 12] + + fixed = param.get_fixed_part(1) + assert fixed == 73.5 + + factor = param.get_scaling_factor(2) + assert factor == 12.0 + + gate_value = param.get_params(trainablep=[15.0, 10.0, 7.0], feature=[5.0]) + assert gate_value == 865 + + # multiple features + param = Parameter( + lambda x1, x2, th1, th2, th3: x1**2 * th1 + x2 * th2 * th3, + [1.5, 2.0, 3.0], + featurep=[7.0, 4.0], + ) + + fixed = param.get_fixed_part(1) + assert fixed == 73.5 + + factor = param.get_scaling_factor(2) + assert factor == 8.0 + + gate_value = param.get_params(trainablep=[15.0, 10.0, 7.0], feature=[5.0, 3.0]) + assert gate_value == 585 From c41998856bc5bb50ef0dfe073548edab681220ca Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Wed, 23 Aug 2023 01:46:48 +0200 Subject: [PATCH 08/32] implemented stavros comments --- src/qibo/gates/gates.py | 65 +++++++++++++++++++++--------------- src/qibo/models/circuit.py | 68 ++++++++++++++++++++++++++++++++++++++ tests/test_derivative.py | 44 +++++++++++++++++++++--- 3 files changed, 147 insertions(+), 30 deletions(-) diff --git a/src/qibo/gates/gates.py b/src/qibo/gates/gates.py index bfbffa1530..251d2bc065 100644 --- a/src/qibo/gates/gates.py +++ b/src/qibo/gates/gates.py @@ -9,17 +9,23 @@ class Parameter: - """Object which allows complex gate parameters. Several trainable parameter + """Object which allows for variational gate parameters. Several trainable parameter and possibly features are linked through a lambda function which returns the - final gate parameter""" + final gate parameter - def __init__(self, func, trainablep, featurep=None): - self._trainablep = trainablep - self._featurep = featurep - self.nparams = len(trainablep) + Args: + func (function): lambda function describing the gate parameter + trainable (list or np.ndarray): array with initial trainable parameters theta + feature (list or np.ndarray): array containing possible input features x + """ - if isinstance(featurep, list): - self.nfeat = len(featurep) + def __init__(self, func, trainable, feature=None): + self._trainable = trainable + self._feature = feature + self.nparams = len(trainable) + + if isinstance(feature, list): + self.nfeat = len(feature) else: self.nfeat = 0 self.lambdaf = func @@ -28,27 +34,34 @@ def __init__(self, func, trainablep, featurep=None): def _apply_func(self, function, fixed_params=None): """Applies lambda function and returns final gate parameter""" params = [] - if self._featurep is not None: - if isinstance(self._featurep, list): - params.extend(self._featurep) + if self._feature is not None: + if isinstance(self._feature, list): + params.extend(self._feature) else: - params.append(self._featurep) + params.append(self._feature) if fixed_params: params.extend(fixed_params) else: - params.extend(self._trainablep) + params.extend(self._trainable) return float(function(*params)) - def _update_params(self, trainablep=None, feature=None): + def update_parameters(self, trainable=None, feature=None): """Update gate trainable parameter and feature values""" - if trainablep is not None: - self._trainablep = trainablep - if feature and self._featurep: - self._featurep = feature + if not isinstance(trainable, (list, np.ndarray)): + raise_error( + ValueError, "Trainable parameters must be given as list or numpy array" + ) + + if not isinstance(trainable, (list, np.ndarray)): + raise_error(ValueError, "Features must be given as list or numpy array") + + if trainable is not None: + self._trainable = trainable + if feature and self._feature: + self._feature = feature - def get_params(self, trainablep=None, feature=None): + def get_gate_parameters(self): """Update values with trainable parameter and calculate current gate parameter""" - self._update_params(trainablep=trainablep, feature=feature) return self._apply_func(self.lambdaf) def get_indices(self, start_index): @@ -56,10 +69,10 @@ def get_indices(self, start_index): the optimizer's trainable parameter list""" return [start_index + i for i in range(self.nparams)] - def get_fixed_part(self, trainablep_idx): + def get_fixed_part(self, trainable_idx): """Retrieve parameter constant unaffected by a specific trainable parameter""" - params = self._trainablep.copy() - params[trainablep_idx] = 0.0 + params = self._trainable.copy() + params[trainable_idx] = 0.0 return self._apply_func(self.lambdaf, fixed_params=params) def calculate_derivatives(self): @@ -79,9 +92,9 @@ def calculate_derivatives(self): return derivatives - def get_scaling_factor(self, trainablep_idx): + def get_scaling_factor(self, trainable_idx): """Get derivative w.r.t a trainable parameter""" - deriv = self.derivatives[trainablep_idx] + deriv = self.derivatives[trainable_idx] return self._apply_func(deriv) @@ -576,7 +589,7 @@ def __init__(self, q, theta, trainable=True): self.initparams = theta if isinstance(theta, Parameter): - self.parameters = theta.get_params() + self.parameters = theta.get_gate_parameters() else: self.parameters = theta self.init_args = [q] diff --git a/src/qibo/models/circuit.py b/src/qibo/models/circuit.py index d0ee240ef0..10ab2d88c9 100644 --- a/src/qibo/models/circuit.py +++ b/src/qibo/models/circuit.py @@ -1423,3 +1423,71 @@ def chunkstring(string, length): output += table return output.rstrip("\n") + + +class VariationalCircuit(Circuit): + def __init__(self, nqubits, accelerators=None, density_matrix=False): + super().__init__(nqubits, accelerators, density_matrix) + + def _get_initparams(self): + """Retrieve parameter values or objects directly from gates""" + + params = [] + for gate in self.queue: + if isinstance(gate, (gates.ParametrizedGate)): + try: + params.append(gate.initparams) + except Exception as e: + params.append(gate.parameters) + + if isinstance(params[0], (float, int, tuple)): + params = self.get_parameters() + if isinstance(params[0], tuple): + params = np.array([val for t in params for val in t]) + + return params + + def _get_train_params(self): + # for array + if isinstance(self.initparams, np.ndarray): + return self.initparams + + # for Parameter objects + else: + params = [] + for Param in self.initparams: + # update trainable params and retrieve gate param + params.extend(Param._trainable) + + return params + + def set_variational_parameters(self, input_params, feature=None): + """Retrieve gate parameters based on initial parameter values given to gates + Args: + feature (int or list): input feature if embedded in Parameter lambda function + + Returns: + (list or np.ndarray) gate parameters + """ + + # for array + if isinstance(self.initparams, np.ndarray): + gate_params = self.initparams + + # for Parameter objects + else: + gate_params = [] + count = 0 + for Param in self.initparams: + trainable = input_params[count : count + Param.nparams] + count += Param.nparams + # update trainable params and retrieve gate param + Param.update_parameters(trainable, feature) + gate_params.append(Param.get_gate_parameters()) + + self.set_parameters(gate_params) + + def add(self, gate): + super().add(gate) + if isinstance(gate, gates.ParametrizedGate): + self.initparams = self._get_initparams() diff --git a/tests/test_derivative.py b/tests/test_derivative.py index 26af07735b..6aae176977 100644 --- a/tests/test_derivative.py +++ b/tests/test_derivative.py @@ -4,6 +4,7 @@ from qibo import Circuit, gates, hamiltonians from qibo.derivative import finite_differences, parameter_shift from qibo.gates.gates import Parameter +from qibo.models.circuit import VariationalCircuit from qibo.symbols import Z @@ -131,7 +132,7 @@ def test_parameter(): param = Parameter( lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, [1.5, 2.0, 3.0], - featurep=[7.0], + feature=[7.0], ) indices = param.get_indices(10) @@ -143,14 +144,15 @@ def test_parameter(): factor = param.get_scaling_factor(2) assert factor == 12.0 - gate_value = param.get_params(trainablep=[15.0, 10.0, 7.0], feature=[5.0]) + param.update_parameters(trainable=[15.0, 10.0, 7.0], feature=[5.0]) + gate_value = param.get_gate_parameters() assert gate_value == 865 # multiple features param = Parameter( lambda x1, x2, th1, th2, th3: x1**2 * th1 + x2 * th2 * th3, [1.5, 2.0, 3.0], - featurep=[7.0, 4.0], + feature=[7.0, 4.0], ) fixed = param.get_fixed_part(1) @@ -159,5 +161,39 @@ def test_parameter(): factor = param.get_scaling_factor(2) assert factor == 8.0 - gate_value = param.get_params(trainablep=[15.0, 10.0, 7.0], feature=[5.0, 3.0]) + param.update_parameters(trainable=np.array([15.0, 10.0, 7.0]), feature=[5.0, 3.0]) + gate_value = param.get_gate_parameters() assert gate_value == 585 + + +def test_variational_circuit(): + c = VariationalCircuit(1) + c.add(gates.RX(q=0, theta=Parameter(lambda th1, th2: th1**2 + th2, [0.1, 0.1]))) + c.add(gates.RY(q=0, theta=Parameter(lambda th1, th2: th1**2 + th2, [0.4, 0.1]))) + c.add(gates.RZ(q=0, theta=Parameter(lambda th1, th2: th1**2 + th2, [0.3, 0.1]))) + c.add(gates.M(0)) + + # _get_initparams + true = np.array([0.1, 0.1, 0.4, 0.1, 0.3, 0.1]) + Params = c._get_initparams() + check = [] + for Param in Params: + check.extend(Param._trainable) + + assert np.allclose(check, true) + + # _get_train_params + train_params = c._get_train_params() + + assert np.allclose(check, train_params) + + # set_variational_parameters + true = [(120.0,), (10010.0,), (420.0,)] + c.set_variational_parameters([10.0, 20, 100, 10, 20, 20]) + circuit_params = c.get_parameters() + + assert circuit_params == true + + +if __name__ == "__main__": + test_variational_circuit() From ba1dc89e38f3158b01cda6892b70d3b0ccde5c08 Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Thu, 24 Aug 2023 22:15:21 +0200 Subject: [PATCH 09/32] some Stavros and Matteo comments implemented --- src/qibo/gates/gates.py | 94 +------------------------------------- src/qibo/models/circuit.py | 16 +++---- tests/test_derivative.py | 74 ------------------------------ 3 files changed, 10 insertions(+), 174 deletions(-) diff --git a/src/qibo/gates/gates.py b/src/qibo/gates/gates.py index 251d2bc065..9a45c99e9c 100644 --- a/src/qibo/gates/gates.py +++ b/src/qibo/gates/gates.py @@ -2,100 +2,10 @@ from typing import List import numpy as np -import sympy as sp from qibo.config import PRECISION_TOL, raise_error from qibo.gates.abstract import Gate, ParametrizedGate - - -class Parameter: - """Object which allows for variational gate parameters. Several trainable parameter - and possibly features are linked through a lambda function which returns the - final gate parameter - - Args: - func (function): lambda function describing the gate parameter - trainable (list or np.ndarray): array with initial trainable parameters theta - feature (list or np.ndarray): array containing possible input features x - """ - - def __init__(self, func, trainable, feature=None): - self._trainable = trainable - self._feature = feature - self.nparams = len(trainable) - - if isinstance(feature, list): - self.nfeat = len(feature) - else: - self.nfeat = 0 - self.lambdaf = func - self.derivatives = self.calculate_derivatives() - - def _apply_func(self, function, fixed_params=None): - """Applies lambda function and returns final gate parameter""" - params = [] - if self._feature is not None: - if isinstance(self._feature, list): - params.extend(self._feature) - else: - params.append(self._feature) - if fixed_params: - params.extend(fixed_params) - else: - params.extend(self._trainable) - return float(function(*params)) - - def update_parameters(self, trainable=None, feature=None): - """Update gate trainable parameter and feature values""" - if not isinstance(trainable, (list, np.ndarray)): - raise_error( - ValueError, "Trainable parameters must be given as list or numpy array" - ) - - if not isinstance(trainable, (list, np.ndarray)): - raise_error(ValueError, "Features must be given as list or numpy array") - - if trainable is not None: - self._trainable = trainable - if feature and self._feature: - self._feature = feature - - def get_gate_parameters(self): - """Update values with trainable parameter and calculate current gate parameter""" - return self._apply_func(self.lambdaf) - - def get_indices(self, start_index): - """Return list of respective indices of trainable parameters within - the optimizer's trainable parameter list""" - return [start_index + i for i in range(self.nparams)] - - def get_fixed_part(self, trainable_idx): - """Retrieve parameter constant unaffected by a specific trainable parameter""" - params = self._trainable.copy() - params[trainable_idx] = 0.0 - return self._apply_func(self.lambdaf, fixed_params=params) - - def calculate_derivatives(self): - """Calculates derivatives w.r.t to all trainable parameters""" - vars = [] - for i in range(self.nfeat): - vars.append(sp.Symbol(f"x{i}")) - for i in range(self.nparams): - vars.append(sp.Symbol(f"th{i}")) - - expr = sp.sympify(self.lambdaf(*vars)) - - derivatives = [] - for i in range(self.nfeat, len(vars)): - derivative_expr = sp.diff(expr, vars[i]) - derivatives.append(sp.lambdify(vars, derivative_expr)) - - return derivatives - - def get_scaling_factor(self, trainable_idx): - """Get derivative w.r.t a trainable parameter""" - deriv = self.derivatives[trainable_idx] - return self._apply_func(deriv) +from qibo.parameter import Parameter class H(Gate): @@ -589,7 +499,7 @@ def __init__(self, q, theta, trainable=True): self.initparams = theta if isinstance(theta, Parameter): - self.parameters = theta.get_gate_parameters() + self.parameters = theta() else: self.parameters = theta self.init_args = [q] diff --git a/src/qibo/models/circuit.py b/src/qibo/models/circuit.py index 10ab2d88c9..be118eb1cd 100644 --- a/src/qibo/models/circuit.py +++ b/src/qibo/models/circuit.py @@ -1437,7 +1437,7 @@ def _get_initparams(self): if isinstance(gate, (gates.ParametrizedGate)): try: params.append(gate.initparams) - except Exception as e: + except AttributeError: params.append(gate.parameters) if isinstance(params[0], (float, int, tuple)): @@ -1455,9 +1455,9 @@ def _get_train_params(self): # for Parameter objects else: params = [] - for Param in self.initparams: + for param_object in self.initparams: # update trainable params and retrieve gate param - params.extend(Param._trainable) + params.extend(param_object._trainable) return params @@ -1478,12 +1478,12 @@ def set_variational_parameters(self, input_params, feature=None): else: gate_params = [] count = 0 - for Param in self.initparams: - trainable = input_params[count : count + Param.nparams] - count += Param.nparams + for param_object in self.initparams: + trainable = input_params[count : count + param_object.nparams] + count += param_object.nparams # update trainable params and retrieve gate param - Param.update_parameters(trainable, feature) - gate_params.append(Param.get_gate_parameters()) + param_object.update_parameters(trainable, feature) + gate_params.append(param_object()) self.set_parameters(gate_params) diff --git a/tests/test_derivative.py b/tests/test_derivative.py index 6aae176977..ec7a501720 100644 --- a/tests/test_derivative.py +++ b/tests/test_derivative.py @@ -3,8 +3,6 @@ from qibo import Circuit, gates, hamiltonians from qibo.derivative import finite_differences, parameter_shift -from qibo.gates.gates import Parameter -from qibo.models.circuit import VariationalCircuit from qibo.symbols import Z @@ -125,75 +123,3 @@ def test_finite_differences(backend, step_size): backend.assert_allclose(grad_0, grads[0], atol=atol) backend.assert_allclose(grad_1, grads[1], atol=atol) backend.assert_allclose(grad_2, grads[2], atol=atol) - - -def test_parameter(): - # single feature - param = Parameter( - lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, - [1.5, 2.0, 3.0], - feature=[7.0], - ) - - indices = param.get_indices(10) - assert indices == [10, 11, 12] - - fixed = param.get_fixed_part(1) - assert fixed == 73.5 - - factor = param.get_scaling_factor(2) - assert factor == 12.0 - - param.update_parameters(trainable=[15.0, 10.0, 7.0], feature=[5.0]) - gate_value = param.get_gate_parameters() - assert gate_value == 865 - - # multiple features - param = Parameter( - lambda x1, x2, th1, th2, th3: x1**2 * th1 + x2 * th2 * th3, - [1.5, 2.0, 3.0], - feature=[7.0, 4.0], - ) - - fixed = param.get_fixed_part(1) - assert fixed == 73.5 - - factor = param.get_scaling_factor(2) - assert factor == 8.0 - - param.update_parameters(trainable=np.array([15.0, 10.0, 7.0]), feature=[5.0, 3.0]) - gate_value = param.get_gate_parameters() - assert gate_value == 585 - - -def test_variational_circuit(): - c = VariationalCircuit(1) - c.add(gates.RX(q=0, theta=Parameter(lambda th1, th2: th1**2 + th2, [0.1, 0.1]))) - c.add(gates.RY(q=0, theta=Parameter(lambda th1, th2: th1**2 + th2, [0.4, 0.1]))) - c.add(gates.RZ(q=0, theta=Parameter(lambda th1, th2: th1**2 + th2, [0.3, 0.1]))) - c.add(gates.M(0)) - - # _get_initparams - true = np.array([0.1, 0.1, 0.4, 0.1, 0.3, 0.1]) - Params = c._get_initparams() - check = [] - for Param in Params: - check.extend(Param._trainable) - - assert np.allclose(check, true) - - # _get_train_params - train_params = c._get_train_params() - - assert np.allclose(check, train_params) - - # set_variational_parameters - true = [(120.0,), (10010.0,), (420.0,)] - c.set_variational_parameters([10.0, 20, 100, 10, 20, 20]) - circuit_params = c.get_parameters() - - assert circuit_params == true - - -if __name__ == "__main__": - test_variational_circuit() From 0a9b4a8a231bef9bf655b862619652e506656529 Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Fri, 25 Aug 2023 14:53:33 +0200 Subject: [PATCH 10/32] Parameter input checks, moved VariationalCircuit to separate file --- src/qibo/__init__.py | 11 +++- src/qibo/models/circuit.py | 68 ---------------------- src/qibo/models/variational.py | 101 +++++++++++++++++++++++++++++++++ 3 files changed, 111 insertions(+), 69 deletions(-) diff --git a/src/qibo/__init__.py b/src/qibo/__init__.py index 9b7f26e7c8..f3634db0f1 100644 --- a/src/qibo/__init__.py +++ b/src/qibo/__init__.py @@ -2,7 +2,16 @@ __version__ = im.version(__package__) -from qibo import callbacks, gates, hamiltonians, models, optimizers, parallel, solvers +from qibo import ( + callbacks, + gates, + hamiltonians, + models, + optimizers, + parallel, + parameter, + solvers, +) from qibo.backends import ( get_backend, get_device, diff --git a/src/qibo/models/circuit.py b/src/qibo/models/circuit.py index be118eb1cd..d0ee240ef0 100644 --- a/src/qibo/models/circuit.py +++ b/src/qibo/models/circuit.py @@ -1423,71 +1423,3 @@ def chunkstring(string, length): output += table return output.rstrip("\n") - - -class VariationalCircuit(Circuit): - def __init__(self, nqubits, accelerators=None, density_matrix=False): - super().__init__(nqubits, accelerators, density_matrix) - - def _get_initparams(self): - """Retrieve parameter values or objects directly from gates""" - - params = [] - for gate in self.queue: - if isinstance(gate, (gates.ParametrizedGate)): - try: - params.append(gate.initparams) - except AttributeError: - params.append(gate.parameters) - - if isinstance(params[0], (float, int, tuple)): - params = self.get_parameters() - if isinstance(params[0], tuple): - params = np.array([val for t in params for val in t]) - - return params - - def _get_train_params(self): - # for array - if isinstance(self.initparams, np.ndarray): - return self.initparams - - # for Parameter objects - else: - params = [] - for param_object in self.initparams: - # update trainable params and retrieve gate param - params.extend(param_object._trainable) - - return params - - def set_variational_parameters(self, input_params, feature=None): - """Retrieve gate parameters based on initial parameter values given to gates - Args: - feature (int or list): input feature if embedded in Parameter lambda function - - Returns: - (list or np.ndarray) gate parameters - """ - - # for array - if isinstance(self.initparams, np.ndarray): - gate_params = self.initparams - - # for Parameter objects - else: - gate_params = [] - count = 0 - for param_object in self.initparams: - trainable = input_params[count : count + param_object.nparams] - count += param_object.nparams - # update trainable params and retrieve gate param - param_object.update_parameters(trainable, feature) - gate_params.append(param_object()) - - self.set_parameters(gate_params) - - def add(self, gate): - super().add(gate) - if isinstance(gate, gates.ParametrizedGate): - self.initparams = self._get_initparams() diff --git a/src/qibo/models/variational.py b/src/qibo/models/variational.py index 7172bf07d8..03e81a1f47 100644 --- a/src/qibo/models/variational.py +++ b/src/qibo/models/variational.py @@ -1,4 +1,8 @@ +import numpy as np + from qibo.config import raise_error +from qibo.gates import gates +from qibo.models import Circuit from qibo.models.evolution import StateEvolution @@ -659,3 +663,100 @@ def _loss(params, falqon, hamiltonian): final_loss = _loss(parameters, self, self.hamiltonian) extra = {"energies": energy, "callbacks": callback_result} return final_loss, parameters, extra + + +class VariationalCircuit(Circuit): + """Circuit configuration for use cases with variational + circuit parameters of the Parameter class. + + This circuit is symbolic and cannot perform calculations. + A specific backend has to be used for performing calculations. + + Args: + nqubits (int): Total number of qubits in the circuit. + density_matrix (bool): If `True`, the circuit would evolve density matrices. + Defaults to ``False``. + accelerators (dict): Dictionary that maps device names to the number of times each + device will be used. Defaults to ``None``. + """ + + def __init__(self, nqubits, accelerators=None, density_matrix=False): + super().__init__(nqubits, accelerators, density_matrix) + + def _get_initparams(self): + """Retrieve parameter values or objects directly from gates""" + + params = [] + for gate in self.queue: + if isinstance(gate, (gates.ParametrizedGate)): + try: + params.append(gate.initparams) + except AttributeError: + params.append(gate.parameters) + + if isinstance(params[0], (float, int, tuple)): + params = self.get_parameters() + if isinstance(params[0], tuple): + params = np.array([val for t in params for val in t]) + + return params + + def _get_train_params(self): + # for array + if isinstance(self.initparams, np.ndarray): + return self.initparams + + # for Parameter objects + else: + params = [] + for param_object in self.initparams: + # update trainable params and retrieve gate param + params.extend(param_object._trainable) + + return params + + def set_variational_parameters(self, input_params, feature=None): + """Retrieve gate parameters based on initial parameter values given to gates + Args: + feature (int or list): input feature if embedded in Parameter lambda function + + Returns: + (list or np.ndarray) gate parameters + """ + + # for array + if isinstance(self.initparams, np.ndarray): + gate_params = self.initparams + + # for Parameter objects + else: + gate_params = [] + count = 0 + for param_object in self.initparams: + trainable = input_params[count : count + param_object.nparams] + count += param_object.nparams + # update trainable params and retrieve gate param + print(trainable, feature) + param_object.update_parameters(trainable, feature) + gate_params.append(param_object()) + + self.set_parameters(gate_params) + + def add(self, gate): + """Add a gate to a given queue. + + Args: + gate (:class:`qibo.gates.Gate`): the gate object to add. + See :ref:`Gates` for a list of available gates. + `gate` can also be an iterable or generator of gates. + In this case all gates in the iterable will be added in the + circuit. + + Returns: + If the circuit contains measurement gates with ``collapse=True`` + a ``sympy.Symbol`` that parametrizes the corresponding outcome. + """ + res = super().add(gate) + if isinstance(gate, gates.ParametrizedGate): + self.initparams = self._get_initparams() + return res From 7f42798b7cc4ec7f971414606a675b37ed5b7da2 Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Fri, 25 Aug 2023 14:54:19 +0200 Subject: [PATCH 11/32] Parameter input checks, moved VariationalCircuit to separate file --- src/qibo/parameter.py | 140 ++++++++++++++++++++++++++++++++++++++ tests/test_parameter.py | 98 ++++++++++++++++++++++++++ tests/test_variational.py | 34 +++++++++ 3 files changed, 272 insertions(+) create mode 100644 src/qibo/parameter.py create mode 100644 tests/test_parameter.py create mode 100644 tests/test_variational.py diff --git a/src/qibo/parameter.py b/src/qibo/parameter.py new file mode 100644 index 0000000000..6b7731067a --- /dev/null +++ b/src/qibo/parameter.py @@ -0,0 +1,140 @@ +import inspect + +import numpy as np +import sympy as sp + +from qibo.config import raise_error + + +class Parameter: + """Object which allows for variational gate parameters. Several trainable parameter + and possibly features are linked through a lambda function which returns the + final gate parameter. All possible analytical derivatives of the lambda function are + calculated at the object initialisation using Sympy. + + Args: + func (function): lambda function which builds the gate parameter. If both features and trainable parameters + compose the function, it must be passed by first providing the features and then the parameters, as + described in the code example above. + trainable (list or np.ndarray): array with initial trainable parameters theta + feature (list or np.ndarray): array containing possible input features x + """ + + def __init__(self, func, trainable, feature=None): + self._trainable = trainable + self._feature = feature + self.nparams = len(trainable) + + if isinstance(feature, list): + self.nfeat = len(feature) + else: + self.nfeat = 0 + + # lambda function + self.lambdaf = func + self._check_inputs(func) + + self.derivatives = self._calculate_derivatives() + + def __call__(self): + """Update values with trainable parameter and calculate current gate parameter""" + return self._apply_func(self.lambdaf) + + def _check_inputs(self, func): + """Verifies that the inputs are correct""" + parameters = inspect.signature(func).parameters + + if (self.nfeat + self.nparams) != len(parameters): + raise_error( + ValueError, + f"The lambda function has {len(parameters)} parameters, the input has {self.nfeat+self.nparams}.", + ) + + iterator = iter(parameters.items()) + + for i in range(self.nfeat): + x = next(iterator) + if x[0][0] != "x": + raise_error( + ValueError, + f"Parameter #{i} in the lambda function should be a feature starting with `x`", + ) + + for i in range(self.nparams): + x = next(iterator) + if x[0][:2] != "th": + raise_error( + ValueError, + f"Parameter #{self.nfeat+i} in the lambda function should be a trainable parameter starting with `th`", + ) + + def _apply_func(self, function, fixed_params=None): + """Applies lambda function and returns final gate parameter""" + params = [] + if self._feature is not None: + if isinstance(self._feature, list): + params.extend(self._feature) + else: + params.append(self._feature) + if fixed_params: + params.extend(fixed_params) + else: + params.extend(self._trainable) + return float(function(*params)) + + def _calculate_derivatives(self): + """Calculates derivatives w.r.t to all trainable parameters""" + vars = [] + for i in range(self.nfeat): + vars.append(sp.Symbol(f"x{i}")) + for i in range(self.nparams): + vars.append(sp.Symbol(f"th{i}")) + + expr = sp.sympify(self.lambdaf(*vars)) + + derivatives = [] + for i in range(len(vars)): + derivative_expr = sp.diff(expr, vars[i]) + derivatives.append(sp.lambdify(vars, derivative_expr)) + + return derivatives + + def update_parameters(self, trainable=None, feature=None): + """Update gate trainable parameter and feature values""" + if not isinstance(trainable, (list, np.ndarray)): + raise_error( + ValueError, "Trainable parameters must be given as list or numpy array" + ) + + if self.nparams != len(trainable): + raise_error( + ValueError, + f"{len(trainable)} trainable parameters given, need {self.nparams}", + ) + + if not isinstance(feature, (list, np.ndarray)) and self._feature != feature: + raise_error(ValueError, "Features must be given as list or numpy array") + + if self._feature is not None and self.nfeat != len(feature): + raise_error(ValueError, f"{len(feature)} features given, need {self.nfeat}") + + if trainable is not None: + self._trainable = trainable + if feature and self._feature: + self._feature = feature + + def get_indices(self, start_index): + """Return list of respective indices of trainable parameters within + a larger trainable parameter list""" + return [start_index + i for i in range(self.nparams)] + + def get_fixed_part(self, trainable_idx): + """Retrieve parameter constant unaffected by a specific trainable parameter""" + params = self._trainable.copy() + params[trainable_idx] = 0.0 + return self._apply_func(self.lambdaf, fixed_params=params) + + def get_partial_derivative(self, trainable_idx): + """Get derivative w.r.t a trainable parameter""" + deriv = self.derivatives[trainable_idx] + return self._apply_func(deriv) diff --git a/tests/test_parameter.py b/tests/test_parameter.py new file mode 100644 index 0000000000..0046c87b0a --- /dev/null +++ b/tests/test_parameter.py @@ -0,0 +1,98 @@ +import numpy as np +import pytest + +from qibo.parameter import Parameter + + +def test_parameter(): + # single feature + param = Parameter( + lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, + [1.5, 2.0, 3.0], + feature=[7.0], + ) + + indices = param.get_indices(10) + assert indices == [10, 11, 12] + + fixed = param.get_fixed_part(1) + assert fixed == 73.5 + + factor = param.get_partial_derivative(3) + assert factor == 12.0 + + param.update_parameters(trainable=[15.0, 10.0, 7.0], feature=[5.0]) + gate_value = param() + assert gate_value == 865 + + # multiple features + param = Parameter( + lambda x1, x2, th1, th2, th3: x1**2 * th1 + x2 * th2 * th3, + [1.5, 2.0, 3.0], + feature=[7.0, 4.0], + ) + + fixed = param.get_fixed_part(1) + assert fixed == 73.5 + + factor = param.get_partial_derivative(4) + assert factor == 8.0 + + param.update_parameters(trainable=np.array([15.0, 10.0, 7.0]), feature=[5.0, 3.0]) + gate_value = param() + assert gate_value == 585 + + +def test_parameter_errors(): + with pytest.raises(ValueError) as e_info: + param = Parameter( + lambda x, t1, th2, th3: x**2 * t1 + th2 * th3**2, + [1.5, 2.0, 3.0], + feature=[7.0], + ) + + with pytest.raises(ValueError) as e_info: + param = Parameter( + lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, + [1.5, 2.0], + feature=[3.0, 7.0], + ) + + with pytest.raises(ValueError) as e_info: + param = Parameter( + lambda j, th1, th2, th3: j**2 * th1 + th2 * th3**2, + [1.5, 2.0, 3.0], + feature=[7.0], + ) + + with pytest.raises(ValueError) as e_info: + param = Parameter( + lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, + [1.5, 2.0, 3.0], + feature=[7.0], + ) + param.update_parameters((1, 1, 1), [1]) + + with pytest.raises(ValueError) as e_info: + param = Parameter( + lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, + [1.5, 2.0, 3.0], + feature=[7.0], + ) + param.update_parameters([1, 1, 1], (1)) + + with pytest.raises(ValueError) as e_info: + param = Parameter( + lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, + [1.5, 2.0, 3.0], + feature=[7.0], + ) + param.update_parameters([1, 1], [1]) + + with pytest.raises(ValueError) as e_info: + param = Parameter( + lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, + [1.5, 2.0, 3.0], + feature=[7.0], + ) + param.update_parameters([1, 1, 1], [1, 1]) diff --git a/tests/test_variational.py b/tests/test_variational.py new file mode 100644 index 0000000000..fe5874d09a --- /dev/null +++ b/tests/test_variational.py @@ -0,0 +1,34 @@ +import numpy as np + +from qibo import gates +from qibo.models.variational import VariationalCircuit +from qibo.parameter import Parameter + + +def test_variational_circuit(): + c = VariationalCircuit(1) + c.add(gates.RX(q=0, theta=Parameter(lambda th1, th2: th1**2 + th2, [0.1, 0.1]))) + c.add(gates.RY(q=0, theta=Parameter(lambda th1, th2: th1**2 + th2, [0.4, 0.1]))) + c.add(gates.RZ(q=0, theta=Parameter(lambda th1, th2: th1**2 + th2, [0.3, 0.1]))) + c.add(gates.M(0)) + + # _get_initparams + true = np.array([0.1, 0.1, 0.4, 0.1, 0.3, 0.1]) + Params = c._get_initparams() + check = [] + for Param in Params: + check.extend(Param._trainable) + + assert np.allclose(check, true) + + # _get_train_params + train_params = c._get_train_params() + + assert np.allclose(check, train_params) + + # set_variational_parameters + true = [(120.0,), (10010.0,), (420.0,)] + c.set_variational_parameters([10.0, 20, 100, 10, 20, 20]) + circuit_params = c.get_parameters() + + assert circuit_params == true From 012d74f0528225ca161e6cfa42ceb02f77b34dc5 Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Wed, 6 Sep 2023 02:00:06 +0200 Subject: [PATCH 12/32] parameter class alone --- src/qibo/gates/gates.py | 8 +++ src/qibo/models/variational.py | 97 ---------------------------------- src/qibo/parameter.py | 30 ++++++++--- tests/test_variational.py | 34 ------------ 4 files changed, 32 insertions(+), 137 deletions(-) delete mode 100644 tests/test_variational.py diff --git a/src/qibo/gates/gates.py b/src/qibo/gates/gates.py index 9a45c99e9c..50c8515e2f 100644 --- a/src/qibo/gates/gates.py +++ b/src/qibo/gates/gates.py @@ -496,12 +496,20 @@ def __init__(self, q, theta, trainable=True): self.name = None self._controlled_gate = None self.target_qubits = (q,) + self.unitary = True self.initparams = theta if isinstance(theta, Parameter): self.parameters = theta() else: self.parameters = theta + + if ( + isinstance(self.parameters, (float, int)) + and (theta % (np.pi / 2)).is_integer() + ): + self.clifford = True + self.init_args = [q] self.init_kwargs = {"theta": theta, "trainable": trainable} diff --git a/src/qibo/models/variational.py b/src/qibo/models/variational.py index 03e81a1f47..b9a4c1a59c 100644 --- a/src/qibo/models/variational.py +++ b/src/qibo/models/variational.py @@ -663,100 +663,3 @@ def _loss(params, falqon, hamiltonian): final_loss = _loss(parameters, self, self.hamiltonian) extra = {"energies": energy, "callbacks": callback_result} return final_loss, parameters, extra - - -class VariationalCircuit(Circuit): - """Circuit configuration for use cases with variational - circuit parameters of the Parameter class. - - This circuit is symbolic and cannot perform calculations. - A specific backend has to be used for performing calculations. - - Args: - nqubits (int): Total number of qubits in the circuit. - density_matrix (bool): If `True`, the circuit would evolve density matrices. - Defaults to ``False``. - accelerators (dict): Dictionary that maps device names to the number of times each - device will be used. Defaults to ``None``. - """ - - def __init__(self, nqubits, accelerators=None, density_matrix=False): - super().__init__(nqubits, accelerators, density_matrix) - - def _get_initparams(self): - """Retrieve parameter values or objects directly from gates""" - - params = [] - for gate in self.queue: - if isinstance(gate, (gates.ParametrizedGate)): - try: - params.append(gate.initparams) - except AttributeError: - params.append(gate.parameters) - - if isinstance(params[0], (float, int, tuple)): - params = self.get_parameters() - if isinstance(params[0], tuple): - params = np.array([val for t in params for val in t]) - - return params - - def _get_train_params(self): - # for array - if isinstance(self.initparams, np.ndarray): - return self.initparams - - # for Parameter objects - else: - params = [] - for param_object in self.initparams: - # update trainable params and retrieve gate param - params.extend(param_object._trainable) - - return params - - def set_variational_parameters(self, input_params, feature=None): - """Retrieve gate parameters based on initial parameter values given to gates - Args: - feature (int or list): input feature if embedded in Parameter lambda function - - Returns: - (list or np.ndarray) gate parameters - """ - - # for array - if isinstance(self.initparams, np.ndarray): - gate_params = self.initparams - - # for Parameter objects - else: - gate_params = [] - count = 0 - for param_object in self.initparams: - trainable = input_params[count : count + param_object.nparams] - count += param_object.nparams - # update trainable params and retrieve gate param - print(trainable, feature) - param_object.update_parameters(trainable, feature) - gate_params.append(param_object()) - - self.set_parameters(gate_params) - - def add(self, gate): - """Add a gate to a given queue. - - Args: - gate (:class:`qibo.gates.Gate`): the gate object to add. - See :ref:`Gates` for a list of available gates. - `gate` can also be an iterable or generator of gates. - In this case all gates in the iterable will be added in the - circuit. - - Returns: - If the circuit contains measurement gates with ``collapse=True`` - a ``sympy.Symbol`` that parametrizes the corresponding outcome. - """ - res = super().add(gate) - if isinstance(gate, gates.ParametrizedGate): - self.initparams = self._get_initparams() - return res diff --git a/src/qibo/parameter.py b/src/qibo/parameter.py index 6b7731067a..5346b66100 100644 --- a/src/qibo/parameter.py +++ b/src/qibo/parameter.py @@ -12,6 +12,22 @@ class Parameter: final gate parameter. All possible analytical derivatives of the lambda function are calculated at the object initialisation using Sympy. + Example: + .. code-block:: python + + from qibo.parameter import Parameter + param = Parameter( + lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, + [1.5, 2.0, 3.0], + feature=[7.0], + ) + + partial_derivative = param.get_partial_derivative(3) + + param.update_parameters(trainable=[15.0, 10.0, 7.0], feature=[5.0]) + gate_value = param() + + Args: func (function): lambda function which builds the gate parameter. If both features and trainable parameters compose the function, it must be passed by first providing the features and then the parameters, as @@ -23,12 +39,6 @@ class Parameter: def __init__(self, func, trainable, feature=None): self._trainable = trainable self._feature = feature - self.nparams = len(trainable) - - if isinstance(feature, list): - self.nfeat = len(feature) - else: - self.nfeat = 0 # lambda function self.lambdaf = func @@ -40,6 +50,14 @@ def __call__(self): """Update values with trainable parameter and calculate current gate parameter""" return self._apply_func(self.lambdaf) + @property + def nparams(self): + return len(self._trainable) + + @property + def nfeat(self): + return len(self._feature) if isinstance(self._feature, list) else 0 + def _check_inputs(self, func): """Verifies that the inputs are correct""" parameters = inspect.signature(func).parameters diff --git a/tests/test_variational.py b/tests/test_variational.py deleted file mode 100644 index fe5874d09a..0000000000 --- a/tests/test_variational.py +++ /dev/null @@ -1,34 +0,0 @@ -import numpy as np - -from qibo import gates -from qibo.models.variational import VariationalCircuit -from qibo.parameter import Parameter - - -def test_variational_circuit(): - c = VariationalCircuit(1) - c.add(gates.RX(q=0, theta=Parameter(lambda th1, th2: th1**2 + th2, [0.1, 0.1]))) - c.add(gates.RY(q=0, theta=Parameter(lambda th1, th2: th1**2 + th2, [0.4, 0.1]))) - c.add(gates.RZ(q=0, theta=Parameter(lambda th1, th2: th1**2 + th2, [0.3, 0.1]))) - c.add(gates.M(0)) - - # _get_initparams - true = np.array([0.1, 0.1, 0.4, 0.1, 0.3, 0.1]) - Params = c._get_initparams() - check = [] - for Param in Params: - check.extend(Param._trainable) - - assert np.allclose(check, true) - - # _get_train_params - train_params = c._get_train_params() - - assert np.allclose(check, train_params) - - # set_variational_parameters - true = [(120.0,), (10010.0,), (420.0,)] - c.set_variational_parameters([10.0, 20, 100, 10, 20, 20]) - circuit_params = c.get_parameters() - - assert circuit_params == true From f9bccb4ac120f5d1c8441dc350aa2ecae98b46d1 Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Wed, 6 Sep 2023 02:02:57 +0200 Subject: [PATCH 13/32] removed imports --- src/qibo/models/variational.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/qibo/models/variational.py b/src/qibo/models/variational.py index b9a4c1a59c..7172bf07d8 100644 --- a/src/qibo/models/variational.py +++ b/src/qibo/models/variational.py @@ -1,8 +1,4 @@ -import numpy as np - from qibo.config import raise_error -from qibo.gates import gates -from qibo.models import Circuit from qibo.models.evolution import StateEvolution From d974986d8d82b34cece71938afa34168bfa76ccf Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Wed, 6 Sep 2023 03:04:57 +0200 Subject: [PATCH 14/32] fixed bug --- src/qibo/gates/gates.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/src/qibo/gates/gates.py b/src/qibo/gates/gates.py index 50c8515e2f..ecda9aafcf 100644 --- a/src/qibo/gates/gates.py +++ b/src/qibo/gates/gates.py @@ -500,16 +500,12 @@ def __init__(self, q, theta, trainable=True): self.initparams = theta if isinstance(theta, Parameter): - self.parameters = theta() - else: - self.parameters = theta + theta = theta() - if ( - isinstance(self.parameters, (float, int)) - and (theta % (np.pi / 2)).is_integer() - ): + if isinstance(theta, (float, int)) and (theta % (np.pi / 2)).is_integer(): self.clifford = True + self.parameters = theta self.init_args = [q] self.init_kwargs = {"theta": theta, "trainable": trainable} From 96d1b4b471183b52e8fbcbffdab0f47e1d413cee Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Fri, 8 Sep 2023 02:33:52 +0200 Subject: [PATCH 15/32] codecov OK --- tests/test_gates_gates.py | 9 +++++++++ tests/test_parameter.py | 21 +++++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/tests/test_gates_gates.py b/tests/test_gates_gates.py index 3bb906c60d..73b3cd5769 100644 --- a/tests/test_gates_gates.py +++ b/tests/test_gates_gates.py @@ -3,6 +3,7 @@ import pytest from qibo import gates +from qibo.parameter import Parameter from qibo.quantum_info import random_hermitian, random_statevector, random_unitary @@ -253,6 +254,14 @@ def test_rx(backend, theta): else: assert not gates.RX(0, theta=theta).clifford + # test Parameter + assert ( + gates.RX( + 0, theta=Parameter(lambda x, th1: 10 * th1 + x, [0.2], feature=[40]) + ).init_kwargs["theta"] + == 42 + ) + @pytest.mark.parametrize("theta", [np.random.rand(), np.pi / 2, -np.pi / 2, np.pi]) def test_ry(backend, theta): diff --git a/tests/test_parameter.py b/tests/test_parameter.py index 0046c87b0a..d201bd475f 100644 --- a/tests/test_parameter.py +++ b/tests/test_parameter.py @@ -25,6 +25,16 @@ def test_parameter(): gate_value = param() assert gate_value == 865 + # single feature, no list + param2 = Parameter( + lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, + [1.5, 2.0, 3.0], + feature=[7.0], + ) + + gate_value2 = param2() + assert gate_value2 == 91.5 + # multiple features param = Parameter( lambda x1, x2, th1, th2, th3: x1**2 * th1 + x2 * th2 * th3, @@ -96,3 +106,14 @@ def test_parameter_errors(): feature=[7.0], ) param.update_parameters([1, 1, 1], [1, 1]) + + with pytest.raises(ValueError) as e_info: + param = Parameter( + lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, + [1.5, 2.0], + feature=[7.0], + ) + + +if __name__ == "__main__": + test_parameter() From 602d8c84bc5b34dd74bc2bae99cd61580ff78460 Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Mon, 11 Sep 2023 16:21:45 +0200 Subject: [PATCH 16/32] ale comments implemented --- src/qibo/parameter.py | 83 +++++++++------------------------------- tests/test_parameter.py | 85 ++++++++++++++--------------------------- 2 files changed, 45 insertions(+), 123 deletions(-) diff --git a/src/qibo/parameter.py b/src/qibo/parameter.py index 5346b66100..e3e511967b 100644 --- a/src/qibo/parameter.py +++ b/src/qibo/parameter.py @@ -1,5 +1,3 @@ -import inspect - import numpy as np import sympy as sp @@ -12,20 +10,19 @@ class Parameter: final gate parameter. All possible analytical derivatives of the lambda function are calculated at the object initialisation using Sympy. - Example: - .. code-block:: python + Example:: - from qibo.parameter import Parameter - param = Parameter( - lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, - [1.5, 2.0, 3.0], - feature=[7.0], - ) + from qibo.parameter import Parameter + param = Parameter( + lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, + [1.5, 2.0, 3.0], + feature=[7.0], + ) - partial_derivative = param.get_partial_derivative(3) + partial_derivative = param.get_partial_derivative(3) - param.update_parameters(trainable=[15.0, 10.0, 7.0], feature=[5.0]) - gate_value = param() + param.update_parameters(trainable=[15.0, 10.0, 7.0], feature=[5.0]) + gate_value = param() Args: @@ -42,7 +39,6 @@ def __init__(self, func, trainable, feature=None): # lambda function self.lambdaf = func - self._check_inputs(func) self.derivatives = self._calculate_derivatives() @@ -58,46 +54,17 @@ def nparams(self): def nfeat(self): return len(self._feature) if isinstance(self._feature, list) else 0 - def _check_inputs(self, func): - """Verifies that the inputs are correct""" - parameters = inspect.signature(func).parameters - - if (self.nfeat + self.nparams) != len(parameters): - raise_error( - ValueError, - f"The lambda function has {len(parameters)} parameters, the input has {self.nfeat+self.nparams}.", - ) - - iterator = iter(parameters.items()) - - for i in range(self.nfeat): - x = next(iterator) - if x[0][0] != "x": - raise_error( - ValueError, - f"Parameter #{i} in the lambda function should be a feature starting with `x`", - ) - - for i in range(self.nparams): - x = next(iterator) - if x[0][:2] != "th": - raise_error( - ValueError, - f"Parameter #{self.nfeat+i} in the lambda function should be a trainable parameter starting with `th`", - ) - def _apply_func(self, function, fixed_params=None): """Applies lambda function and returns final gate parameter""" params = [] if self._feature is not None: - if isinstance(self._feature, list): - params.extend(self._feature) - else: - params.append(self._feature) + params.extend(self._feature) if fixed_params: params.extend(fixed_params) else: params.extend(self._trainable) + + # run function return float(function(*params)) def _calculate_derivatives(self): @@ -119,35 +86,19 @@ def _calculate_derivatives(self): def update_parameters(self, trainable=None, feature=None): """Update gate trainable parameter and feature values""" - if not isinstance(trainable, (list, np.ndarray)): - raise_error( - ValueError, "Trainable parameters must be given as list or numpy array" - ) - - if self.nparams != len(trainable): - raise_error( - ValueError, - f"{len(trainable)} trainable parameters given, need {self.nparams}", - ) - - if not isinstance(feature, (list, np.ndarray)) and self._feature != feature: - raise_error(ValueError, "Features must be given as list or numpy array") - - if self._feature is not None and self.nfeat != len(feature): - raise_error(ValueError, f"{len(feature)} features given, need {self.nfeat}") - if trainable is not None: self._trainable = trainable - if feature and self._feature: + + if feature is not None and self._feature is not None: self._feature = feature def get_indices(self, start_index): """Return list of respective indices of trainable parameters within a larger trainable parameter list""" - return [start_index + i for i in range(self.nparams)] + return (np.arange(self.nparams) + start_index).tolist() def get_fixed_part(self, trainable_idx): - """Retrieve parameter constant unaffected by a specific trainable parameter""" + """Retrieve constant term of lambda function with regard to a specific trainable parameter""" params = self._trainable.copy() params[trainable_idx] = 0.0 return self._apply_func(self.lambdaf, fixed_params=params) diff --git a/tests/test_parameter.py b/tests/test_parameter.py index d201bd475f..f83a1f294a 100644 --- a/tests/test_parameter.py +++ b/tests/test_parameter.py @@ -54,66 +54,37 @@ def test_parameter(): def test_parameter_errors(): - with pytest.raises(ValueError) as e_info: - param = Parameter( - lambda x, t1, th2, th3: x**2 * t1 + th2 * th3**2, - [1.5, 2.0, 3.0], - feature=[7.0], - ) - - with pytest.raises(ValueError) as e_info: - param = Parameter( - lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, - [1.5, 2.0], - feature=[3.0, 7.0], - ) - - with pytest.raises(ValueError) as e_info: - param = Parameter( - lambda j, th1, th2, th3: j**2 * th1 + th2 * th3**2, - [1.5, 2.0, 3.0], - feature=[7.0], - ) - - with pytest.raises(ValueError) as e_info: - param = Parameter( - lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, - [1.5, 2.0, 3.0], - feature=[7.0], - ) - param.update_parameters((1, 1, 1), [1]) - - with pytest.raises(ValueError) as e_info: - param = Parameter( - lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, - [1.5, 2.0, 3.0], - feature=[7.0], - ) - param.update_parameters([1, 1, 1], (1)) - - with pytest.raises(ValueError) as e_info: - param = Parameter( - lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, - [1.5, 2.0, 3.0], - feature=[7.0], - ) - param.update_parameters([1, 1], [1]) - - with pytest.raises(ValueError) as e_info: - param = Parameter( - lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, - [1.5, 2.0, 3.0], - feature=[7.0], - ) - param.update_parameters([1, 1, 1], [1, 1]) + param = Parameter( + lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, + [1.5, 2.0, 3.0], + feature=[7.0], + ) - with pytest.raises(ValueError) as e_info: + param.update_parameters([1, 1, 1], 1) + + try: + param() + assert False + except Exception as e: + assert True + + param.update_parameters([1, 1], [1]) + try: + param() + assert False + except Exception as e: + assert True + + param.update_parameters([1, 1, 1], [1, 1]) + try: + param() + assert False + except Exception as e: + assert True + + with pytest.raises(TypeError) as e_info: param = Parameter( lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, [1.5, 2.0], feature=[7.0], ) - - -if __name__ == "__main__": - test_parameter() From 3bfba6ef9a9c94eafe766d7517fe8ac1a51e3e52 Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Wed, 13 Sep 2023 17:49:43 +0200 Subject: [PATCH 17/32] implemented Ale and Matteo comments --- src/qibo/parameter.py | 57 +++++++++++++++++++++++++---------------- tests/test_parameter.py | 55 +++++++++++++++++++++------------------ 2 files changed, 65 insertions(+), 47 deletions(-) diff --git a/src/qibo/parameter.py b/src/qibo/parameter.py index e3e511967b..0731d99285 100644 --- a/src/qibo/parameter.py +++ b/src/qibo/parameter.py @@ -5,7 +5,7 @@ class Parameter: - """Object which allows for variational gate parameters. Several trainable parameter + """Object which allows for variational gate parameters. Several trainable parameters and possibly features are linked through a lambda function which returns the final gate parameter. All possible analytical derivatives of the lambda function are calculated at the object initialisation using Sympy. @@ -33,9 +33,10 @@ class Parameter: feature (list or np.ndarray): array containing possible input features x """ - def __init__(self, func, trainable, feature=None): + def __init__(self, func, features=None, trainable=None, nofeatures=False): self._trainable = trainable - self._feature = feature + self._features = features + self._nofeatures = nofeatures # lambda function self.lambdaf = func @@ -48,18 +49,23 @@ def __call__(self): @property def nparams(self): - return len(self._trainable) + """Returns the number of trainable parameters""" + try: + return len(self._trainable) + except TypeError: + return 0 @property def nfeat(self): - return len(self._feature) if isinstance(self._feature, list) else 0 + """Returns the number of features""" + return len(self._features) if isinstance(self._features, list) else 0 def _apply_func(self, function, fixed_params=None): """Applies lambda function and returns final gate parameter""" params = [] - if self._feature is not None: - params.extend(self._feature) - if fixed_params: + if self._features is not None: + params.extend(self._features) + if fixed_params is not None: params.extend(fixed_params) else: params.extend(self._trainable) @@ -70,10 +76,8 @@ def _apply_func(self, function, fixed_params=None): def _calculate_derivatives(self): """Calculates derivatives w.r.t to all trainable parameters""" vars = [] - for i in range(self.nfeat): - vars.append(sp.Symbol(f"x{i}")) - for i in range(self.nparams): - vars.append(sp.Symbol(f"th{i}")) + for i in range(self.lambdaf.__code__.co_argcount): + vars.append(sp.Symbol(f"p{i}")) expr = sp.sympify(self.lambdaf(*vars)) @@ -84,26 +88,35 @@ def _calculate_derivatives(self): return derivatives - def update_parameters(self, trainable=None, feature=None): - """Update gate trainable parameter and feature values""" - if trainable is not None: - self._trainable = trainable + def gettrainable(self): + return self._trainable - if feature is not None and self._feature is not None: - self._feature = feature + def settrainable(self, value): + self._trainable = value - def get_indices(self, start_index): + def getfeatures(self): + return self._features + + def setfeatures(self, value): + self._features = value if not self._nofeatures else None + + trainable = property( + gettrainable, settrainable, doc="I'm the trainable parameters property." + ) + features = property(getfeatures, setfeatures, doc="I'm the features property.") + + def trainable_parameter_indices(self, start_index): """Return list of respective indices of trainable parameters within - a larger trainable parameter list""" + the larger trainable parameter list of a circuit for example""" return (np.arange(self.nparams) + start_index).tolist() - def get_fixed_part(self, trainable_idx): + def unaffected_by(self, trainable_idx): """Retrieve constant term of lambda function with regard to a specific trainable parameter""" params = self._trainable.copy() params[trainable_idx] = 0.0 return self._apply_func(self.lambdaf, fixed_params=params) - def get_partial_derivative(self, trainable_idx): + def partial_derivative(self, trainable_idx): """Get derivative w.r.t a trainable parameter""" deriv = self.derivatives[trainable_idx] return self._apply_func(deriv) diff --git a/tests/test_parameter.py b/tests/test_parameter.py index f83a1f294a..5ec447802c 100644 --- a/tests/test_parameter.py +++ b/tests/test_parameter.py @@ -8,28 +8,29 @@ def test_parameter(): # single feature param = Parameter( lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, - [1.5, 2.0, 3.0], - feature=[7.0], + features=[7.0], + trainable=[1.5, 2.0, 3.0], ) - indices = param.get_indices(10) + indices = param.trainable_parameter_indices(10) assert indices == [10, 11, 12] - fixed = param.get_fixed_part(1) + fixed = param.unaffected_by(1) assert fixed == 73.5 - factor = param.get_partial_derivative(3) + factor = param.partial_derivative(3) assert factor == 12.0 - param.update_parameters(trainable=[15.0, 10.0, 7.0], feature=[5.0]) + param.trainable = [15.0, 10.0, 7.0] + param.features = [5.0] gate_value = param() assert gate_value == 865 # single feature, no list param2 = Parameter( lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, - [1.5, 2.0, 3.0], - feature=[7.0], + features=[7.0], + trainable=[1.5, 2.0, 3.0], ) gate_value2 = param2() @@ -38,29 +39,38 @@ def test_parameter(): # multiple features param = Parameter( lambda x1, x2, th1, th2, th3: x1**2 * th1 + x2 * th2 * th3, - [1.5, 2.0, 3.0], - feature=[7.0, 4.0], + features=[7.0, 4.0], + trainable=[1.5, 2.0, 3.0], ) - fixed = param.get_fixed_part(1) + fixed = param.unaffected_by(1) assert fixed == 73.5 - factor = param.get_partial_derivative(4) + factor = param.partial_derivative(4) assert factor == 8.0 - param.update_parameters(trainable=np.array([15.0, 10.0, 7.0]), feature=[5.0, 3.0]) + param.trainable = np.array([15.0, 10.0, 7.0]) + param.features = [5.0, 3.0] gate_value = param() assert gate_value == 585 + param = Parameter(lambda th1, th2, th3: th1 + th2 * th3, nofeatures=True) + param.trainable = [1.0, 2.0, 4.0] + param.features = [22.0] + + assert param() == 9.0 + assert param.features == None + def test_parameter_errors(): param = Parameter( lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, - [1.5, 2.0, 3.0], - feature=[7.0], + features=[7.0], + trainable=[1.5, 2.0, 3.0], ) - param.update_parameters([1, 1, 1], 1) + param.trainable = [1, 1, 1] + param.features = 1 try: param() @@ -68,23 +78,18 @@ def test_parameter_errors(): except Exception as e: assert True - param.update_parameters([1, 1], [1]) + param.trainable = [1, 1] + param.features = [1] try: param() assert False except Exception as e: assert True - param.update_parameters([1, 1, 1], [1, 1]) + param.trainable = [1, 1, 1] + param.features = [1, 1] try: param() assert False except Exception as e: assert True - - with pytest.raises(TypeError) as e_info: - param = Parameter( - lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, - [1.5, 2.0], - feature=[7.0], - ) From 86d0647b7b9dae80865eabdbc145c6e75d8e7d46 Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Wed, 13 Sep 2023 17:54:01 +0200 Subject: [PATCH 18/32] implemented Ale and Matteo comments --- src/qibo/parameter.py | 19 ++++++++++--------- tests/test_parameter.py | 4 ++++ 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/src/qibo/parameter.py b/src/qibo/parameter.py index 0731d99285..4b5555c7ed 100644 --- a/src/qibo/parameter.py +++ b/src/qibo/parameter.py @@ -88,23 +88,24 @@ def _calculate_derivatives(self): return derivatives - def gettrainable(self): + @property + def trainable(self): + """I'm the trainable parameters property.""" return self._trainable - def settrainable(self, value): + @trainable.setter + def trainable(self, value): self._trainable = value - def getfeatures(self): + @property + def features(self): + """I'm the features property.""" return self._features - def setfeatures(self, value): + @features.setter + def features(self, value): self._features = value if not self._nofeatures else None - trainable = property( - gettrainable, settrainable, doc="I'm the trainable parameters property." - ) - features = property(getfeatures, setfeatures, doc="I'm the features property.") - def trainable_parameter_indices(self, start_index): """Return list of respective indices of trainable parameters within the larger trainable parameter list of a circuit for example""" diff --git a/tests/test_parameter.py b/tests/test_parameter.py index 5ec447802c..ae79f7b3d6 100644 --- a/tests/test_parameter.py +++ b/tests/test_parameter.py @@ -93,3 +93,7 @@ def test_parameter_errors(): assert False except Exception as e: assert True + + +if __name__ == "__main__": + test_parameter() From 7cd7cdf522bac480ac62fef44a0771e55fea4b20 Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Thu, 14 Sep 2023 08:21:01 +0200 Subject: [PATCH 19/32] new empty initialisation possible --- src/qibo/parameter.py | 23 +++++++++++++++-------- tests/test_gates_gates.py | 5 ++++- tests/test_parameter.py | 4 ++++ 3 files changed, 23 insertions(+), 9 deletions(-) diff --git a/src/qibo/parameter.py b/src/qibo/parameter.py index 4b5555c7ed..66e3a9dfa4 100644 --- a/src/qibo/parameter.py +++ b/src/qibo/parameter.py @@ -30,7 +30,8 @@ class Parameter: compose the function, it must be passed by first providing the features and then the parameters, as described in the code example above. trainable (list or np.ndarray): array with initial trainable parameters theta - feature (list or np.ndarray): array containing possible input features x + features (list or np.ndarray): array containing possible input features x + nofeatures (bool): flag to explicitly ban the updating of the features. This simplifies the task of updating Parameter objects simultaneously when some have embedded features and some do not. """ def __init__(self, func, features=None, trainable=None, nofeatures=False): @@ -63,12 +64,18 @@ def nfeat(self): def _apply_func(self, function, fixed_params=None): """Applies lambda function and returns final gate parameter""" params = [] - if self._features is not None: - params.extend(self._features) - if fixed_params is not None: - params.extend(fixed_params) + + if self._trainable is None: + parameter_count = function.__code__.co_argcount + params = [0.0] * parameter_count + else: - params.extend(self._trainable) + if self._features is not None: + params.extend(self._features) + if fixed_params is not None: + params.extend(fixed_params) + else: + params.extend(self._trainable) # run function return float(function(*params)) @@ -90,7 +97,7 @@ def _calculate_derivatives(self): @property def trainable(self): - """I'm the trainable parameters property.""" + """Trainable parameters property.""" return self._trainable @trainable.setter @@ -99,7 +106,7 @@ def trainable(self, value): @property def features(self): - """I'm the features property.""" + """Features property.""" return self._features @features.setter diff --git a/tests/test_gates_gates.py b/tests/test_gates_gates.py index 73b3cd5769..60db57a4a1 100644 --- a/tests/test_gates_gates.py +++ b/tests/test_gates_gates.py @@ -257,7 +257,10 @@ def test_rx(backend, theta): # test Parameter assert ( gates.RX( - 0, theta=Parameter(lambda x, th1: 10 * th1 + x, [0.2], feature=[40]) + 0, + theta=Parameter( + lambda x, th1: 10 * th1 + x, trainable=[0.2], features=[40] + ), ).init_kwargs["theta"] == 42 ) diff --git a/tests/test_parameter.py b/tests/test_parameter.py index ae79f7b3d6..41a25112c7 100644 --- a/tests/test_parameter.py +++ b/tests/test_parameter.py @@ -61,6 +61,10 @@ def test_parameter(): assert param() == 9.0 assert param.features == None + param = Parameter(lambda th1, th2, th3: 3 + th1 + th2 * th3) + + assert param() == 3.0 + def test_parameter_errors(): param = Parameter( From 1eb9810b29606221bfa9ded1759586157492da6f Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Thu, 14 Sep 2023 11:14:07 +0200 Subject: [PATCH 20/32] codecov issues solved --- tests/test_parameter.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/tests/test_parameter.py b/tests/test_parameter.py index 41a25112c7..3feeae03ac 100644 --- a/tests/test_parameter.py +++ b/tests/test_parameter.py @@ -45,6 +45,8 @@ def test_parameter(): fixed = param.unaffected_by(1) assert fixed == 73.5 + assert param.nparams == 3 + assert param.nfeat == 2 factor = param.partial_derivative(4) assert factor == 8.0 @@ -55,11 +57,18 @@ def test_parameter(): assert gate_value == 585 param = Parameter(lambda th1, th2, th3: th1 + th2 * th3, nofeatures=True) + + assert param.nparams == 0 + assert param.nfeat == 0 + param.trainable = [1.0, 2.0, 4.0] param.features = [22.0] + assert param.nparams == 3 + assert param.nfeat == 0 assert param() == 9.0 assert param.features == None + assert param.trainable == [1.0, 2.0, 4.0] param = Parameter(lambda th1, th2, th3: 3 + th1 + th2 * th3) @@ -97,7 +106,3 @@ def test_parameter_errors(): assert False except Exception as e: assert True - - -if __name__ == "__main__": - test_parameter() From 424b687d3cbe53ea740b6f7429a1d1275f91feec Mon Sep 17 00:00:00 2001 From: acse-b99192e1 Date: Thu, 14 Sep 2023 11:26:57 +0200 Subject: [PATCH 21/32] codecov issues solved --- src/qibo/parameter.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/qibo/parameter.py b/src/qibo/parameter.py index 66e3a9dfa4..b52c72b342 100644 --- a/src/qibo/parameter.py +++ b/src/qibo/parameter.py @@ -15,14 +15,14 @@ class Parameter: from qibo.parameter import Parameter param = Parameter( lambda x, th1, th2, th3: x**2 * th1 + th2 * th3**2, - [1.5, 2.0, 3.0], - feature=[7.0], + features=[7.0], + trainable=[1.5, 2.0, 3.0], ) partial_derivative = param.get_partial_derivative(3) param.update_parameters(trainable=[15.0, 10.0, 7.0], feature=[5.0]) - gate_value = param() + param_value = param() Args: From 6f4f8441da6bef72d077af8c45ae72a1bf9df2f3 Mon Sep 17 00:00:00 2001 From: MatteoRobbiati Date: Tue, 19 Sep 2023 14:41:18 +0200 Subject: [PATCH 22/32] raise error if ncomponents is not correct --- src/qibo/parameter.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/src/qibo/parameter.py b/src/qibo/parameter.py index b52c72b342..eb6579677d 100644 --- a/src/qibo/parameter.py +++ b/src/qibo/parameter.py @@ -34,14 +34,20 @@ class Parameter: nofeatures (bool): flag to explicitly ban the updating of the features. This simplifies the task of updating Parameter objects simultaneously when some have embedded features and some do not. """ - def __init__(self, func, features=None, trainable=None, nofeatures=False): + def __init__(self, func, trainable=None, features=None): self._trainable = trainable self._features = features - self._nofeatures = nofeatures + if self.nfeat + self.nparams != func.__code__.co_argcount: + raise_error( + TypeError, + f"{self.nfeat + self.nparams} parameters are provided, but {func.__code__.co_argcount} are required, please initialize features and trainable according to the defined function.", + ) # lambda function self.lambdaf = func + # calculate derivatives + # maybe here use JAX ? self.derivatives = self._calculate_derivatives() def __call__(self): @@ -61,6 +67,11 @@ def nfeat(self): """Returns the number of features""" return len(self._features) if isinstance(self._features, list) else 0 + @property + def ncomponents(self): + """Returns the number of elements which compose the Parameter""" + return self.nparams + self.nfeat + def _apply_func(self, function, fixed_params=None): """Applies lambda function and returns final gate parameter""" params = [] @@ -111,7 +122,7 @@ def features(self): @features.setter def features(self, value): - self._features = value if not self._nofeatures else None + self._features = value def trainable_parameter_indices(self, start_index): """Return list of respective indices of trainable parameters within From 7dd425108aa4ed1754829edc1045871371f12304 Mon Sep 17 00:00:00 2001 From: MatteoRobbiati Date: Wed, 20 Sep 2023 11:29:02 +0200 Subject: [PATCH 23/32] restoring derivative and circuit files --- src/qibo/derivative.py | 353 ------------------------------------- src/qibo/models/circuit.py | 157 ++--------------- tests/test_derivative.py | 54 ++---- 3 files changed, 20 insertions(+), 544 deletions(-) diff --git a/src/qibo/derivative.py b/src/qibo/derivative.py index 0898322ce1..65d4ef87b0 100644 --- a/src/qibo/derivative.py +++ b/src/qibo/derivative.py @@ -1,84 +1,7 @@ -import copy -import random - import numpy as np -from qibo.backends import GlobalBackend from qibo.config import raise_error from qibo.hamiltonians.abstract import AbstractHamiltonian -from qibo.hamiltonians.hamiltonians import SymbolicHamiltonian -from qibo.models.parameter import Parameter -from qibo.symbols import Z - - -def calculate_gradients(optimizer, feature): - """ - Full parameter-shift rule's implementation - Args: - this_feature: np.array 2**nqubits-long containing the state vector assciated to a data - Returns: np.array of the observable's gradients with respect to the variational parameters - """ - - obs_gradients = np.zeros(optimizer.nparams, dtype=np.float64) - - ham = SymbolicHamiltonian( - np.prod([Z(i) for i in range(1)]), backend=GlobalBackend() - ) - - # parameter shift - if optimizer.options["shift_rule"] == "psr": - if isinstance(optimizer.paramInputs, np.ndarray): - for ipar in range(optimizer.nparams): - obs_gradients[ipar] = parameter_shift( - optimizer._circuit, - ham, - ipar, - initial_state=None, - scale_factor=1, - nshots=None, - ) - else: - count = 0 - for ipar, Param in enumerate(optimizer.paramInputs): - for nparam in range(Param.nparams): - scaling = Param.get_scaling_factor(nparam, feature) - - obs_gradients[count] = parameter_shift( - optimizer._circuit, - ham, - ipar, - initial_state=None, - scale_factor=scaling, - nshots=None, - ) - count += 1 - - # stochastic parameter shift - elif optimizer.options["shift_rule"] == "spsr": - for ipar, Param in enumerate(optimizer.parameters): - ntrainable_params = Param.nparams - obs_gradients[ipar : ipar + ntrainable_params] = stochastic_parameter_shift( - optimizer._circuit, - ham, - ipar, - initial_state=None, - scale_factor=1, - nshots=None, - ) - - # finite differences (central difference) - else: - for ipar in range(optimizer.nparams): - obs_gradients[ipar] = finite_differences( - optimizer._circuit, - ham, - ipar, - initial_state=None, - scale_factor=1, - nshots=None, - ) - - return obs_gradients def parameter_shift( @@ -232,282 +155,6 @@ def circuit(nqubits = 1): # float() necessary to not return a 0-dim ndarray result = float(generator_eigenval * (forward - backward) * scale_factor) - return result - - -def generate_new_stochastic_params(Param, ipar): - """Generates the three-gate parameters needed for the stochastic parameter-shift rule""" - - sampling = random.random() - trainable_param = Param._variational_parameters[ipar] - F = Param.get_fixed_part(ipar) - scaling = Param.get_scaling_factor(ipar) - - return [sampling * F, trainable_param, (1 - sampling) * F], scaling - - -def stochastic_parameter_shift( - circuit, - hamiltonian, - parameter_index, - parameter: Parameter, - initial_state=None, - nshots=None, -): - """In this method the stochastic parameter shift rule (SPSR) is implemented. - Given a circuit U and an observable H, the SPSR allows to calculate the derivative - of the expected value of H on the final state with respect to a variational - parameter of the circuit. The SPSR can calculate gradient approximations on - a larger family of gates than the standard PSR. - There is also the possibility of setting a scale factor. It is useful when a - circuit's parameter is obtained by combination of a variational - parameter and an external object, such as a training variable in a Quantum - Machine Learning problem. For example, performing a re-uploading strategy - to embed some data into a circuit, we apply to the quantum state rotations - whose angles are in the form: theta' = theta * x, where theta is a variational - parameter and x an input variable. The PSR allows to calculate the derivative - with respect of theta' but, if we want to optimize a system with respect its - variational parameters we need to "free" this procedure from the x depencency. - If the `scale_factor` is not provided, it is set equal to one and doesn't - affect the calculation. - If the PSR is needed to be executed on a real quantum device, it is important - to set `nshots` to some integer value. This enables the execution on the - hardware by calling the proper methods. - - Args: - circuit (:class:`qibo.models.circuit.Circuit`): custom quantum circuit. - hamiltonian (:class:`qibo.hamiltonians.Hamiltonian`): target observable. - if you want to execute on hardware, a symbolic hamiltonian must be - provided as follows (example with Pauli Z and ``nqubits=1``): - ``SymbolicHamiltonian(np.prod([ Z(i) for i in range(1) ]))``. - parameter_index (int): the index which identifies the target parameter - in the ``circuit.get_parameters()`` list. - initial_state (ndarray, optional): initial state on which the circuit - acts. Default is ``None``. - scale_factor (float, optional): parameter scale factor. Default is ``1``. - nshots (int, optional): number of shots if derivative is evaluated on - hardware. If ``None``, the simulation mode is executed. - Default is ``None``. - - Returns: - (float): Value of the derivative of the expectation value of the hamiltonian - with respect to the target variational parameter. - - Example: - - .. testcode:: - - import qibo - import numpy as np - from qibo import hamiltonians, gates - from qibo.models import Circuit - from qibo.derivative import parameter_shift - - # defining an observable - def hamiltonian(nqubits = 1): - m0 = (1/nqubits)*hamiltonians.Z(nqubits).matrix - ham = hamiltonians.Hamiltonian(nqubits, m0) - - return ham - - # defining a dummy circuit - def circuit(nqubits = 1): - c = Circuit(nqubits = 1) - c.add(gates.RY(q = 0, theta = 0)) - c.add(gates.RX(q = 0, theta = 0)) - c.add(gates.M(0)) - - return c - - # initializing the circuit - c = circuit(nqubits = 1) - - # some parameters - test_params = np.random.randn(2) - c.set_parameters(test_params) - - test_hamiltonian = hamiltonian() - - # running the psr with respect to the two parameters - grad_0 = stochastic_parameter_shift(circuit=c, hamiltonian=test_hamiltonian, parameter_index=0) - grad_1 = stochastic_parameter_shift(circuit=c, hamiltonian=test_hamiltonian, parameter_index=1) - - """ - - # some raise_error - if parameter_index > len(circuit.get_parameters()): - raise_error(ValueError, """This index is out of bounds.""") - - if not isinstance(hamiltonian, AbstractHamiltonian): - raise_error( - TypeError, - "hamiltonian must be a qibo.hamiltonians.Hamiltonian or qibo.hamiltonians.SymbolicHamiltonian object", - ) - - # inheriting hamiltonian's backend - backend = hamiltonian.backend - - # getting the gate's type - gate = circuit.associate_gates_with_parameters()[parameter_index] - - # getting the generator_eigenvalue - generator_eigenval = gate.generator_eigenvalue() - - # defining the shift according to the psr - s = np.pi / (4 * generator_eigenval) - - # saving original parameters and making a copy - original = np.asarray(circuit.get_parameters()).copy() - shifted = original.copy() - - # new circuit - ancilla_gate = copy.deepcopy(gate) - ancilla_gate2 = copy.deepcopy(gate) - circuit.add(ancilla_gate, position=parameter_index) - circuit.add(ancilla_gate2, position=parameter_index) - - # gradients - grads = np.zeros(parameter.nparams) - - # forward shift - shifted = np.insert(shifted, parameter_index, [0, 0]) - - for iparam in range(parameter.nparams): - new_params, scaling = generate_new_stochastic_params(parameter, iparam) - new_params[1] += s - shifted[parameter_index : parameter_index + 3] = new_params - - circuit.set_parameters(shifted) - - if nshots is None: - # forward evaluation - forward = hamiltonian.expectation( - backend.execute_circuit( - circuit=circuit, initial_state=initial_state - ).state() - ) - - # backward shift and evaluation - shifted[parameter_index + 1] -= s * 2 - circuit.set_parameters(shifted) - - backward = hamiltonian.expectation( - backend.execute_circuit( - circuit=circuit, initial_state=initial_state - ).state() - ) - - # same but using expectation from samples - else: - forward = backend.execute_circuit( - circuit=circuit, initial_state=initial_state, nshots=nshots - ) - - shifted[parameter_index + 1] -= s * 2 - circuit.set_parameters(shifted) - - backward = backend.execute_circuit( - circuit=circuit, initial_state=initial_state, nshots=nshots - ).expectation_from_samples(hamiltonian) - - # float() necessary to not return a 0-dim ndarray - result = float(generator_eigenval * (forward - backward) * scaling) - grads[iparam] = result - - # cleanup - circuit.remove(ancilla_gate) - circuit.remove(ancilla_gate2) - circuit.set_parameters(original) - - return grads - - -def finite_differences( - circuit, - hamiltonian, - parameter_index, - initial_state=None, - step_size=1e-1, - nshots=None, -): - """ - Calculate derivative of the expectation value of `hamiltonian` on the - final state obtained by executing `circuit` on `initial_state` with - respect to the variational parameter identified by `parameter_index` - in the circuit's parameters list. This method can be used only in - exact simulation mode. - - Args: - circuit (:class:`qibo.models.circuit.Circuit`): custom quantum circuit. - hamiltonian (:class:`qibo.hamiltonians.Hamiltonian`): target observable. - if you want to execute on hardware, a symbolic hamiltonian must be - provided as follows (example with Pauli Z and ``nqubits=1``): - ``SymbolicHamiltonian(np.prod([ Z(i) for i in range(1) ]))``. - parameter_index (int): the index which identifies the target parameter - in the ``circuit.get_parameters()`` list. - initial_state (ndarray, optional): initial state on which the circuit - acts. Default is ``None``. - step_size (float): step size used to evaluate the finite difference - (default 1e-7). - - Returns: - (float): Value of the derivative of the expectation value of the hamiltonian - with respect to the target variational parameter. - """ - - if parameter_index > len(circuit.get_parameters()): - raise_error(ValueError, f"""Index {parameter_index} is out of bounds.""") - - if not isinstance(hamiltonian, AbstractHamiltonian): - raise_error( - TypeError, - "hamiltonian must be a qibo.hamiltonians.Hamiltonian or qibo.hamiltonians.SymbolicHamiltonian object", - ) - - backend = hamiltonian.backend - - # parameters copies - parameters = np.asarray(circuit.get_parameters()).copy() - shifted = parameters.copy() - - # shift the parameter_index element - shifted[parameter_index] += step_size - circuit.set_parameters(shifted) - - if nshots is None: - # forward evaluation - forward = hamiltonian.expectation( - backend.execute_circuit( - circuit=circuit, initial_state=initial_state - ).state() - ) - - # backward shift and evaluation - shifted[parameter_index] -= 2 * step_size - circuit.set_parameters(shifted) - - backward = hamiltonian.expectation( - backend.execute_circuit( - circuit=circuit, initial_state=initial_state - ).state() - ) - - # same but using expectation from samples - else: - forward = backend.execute_circuit( - circuit=circuit, initial_state=initial_state, nshots=nshots - ).expectation_from_samples(hamiltonian) - - shifted[parameter_index] -= 2 * step_size - circuit.set_parameters(shifted) - - backward = backend.execute_circuit( - circuit=circuit, initial_state=initial_state, nshots=nshots - ).expectation_from_samples(hamiltonian) - - circuit.set_parameters(parameters) - - result = (forward - backward) / (2 * step_size) return result diff --git a/src/qibo/models/circuit.py b/src/qibo/models/circuit.py index 949d21463e..94a8f8ab28 100644 --- a/src/qibo/models/circuit.py +++ b/src/qibo/models/circuit.py @@ -28,16 +28,6 @@ def append(self, gate): self.set.add(gate) self.nparams += gate.nparams - def insert(self, pos, gate): - super().insert(pos, gate) - self.set.add(gate) - self.nparams += gate.nparams - - def remove(self, gate): - super().remove(gate) - self.set.remove(gate) - self.nparams -= gate.nparams - class _Queue(list): """List that holds the queue of gates of a circuit. @@ -109,44 +99,6 @@ def append(self, gate: gates.Gate): self.moments[idx][q] = gate self.moment_index[q] = idx + 1 - def insert(self, pos, gate: gates.Gate): - super().insert(pos, gate) - if gate.qubits: - qubits = gate.qubits - else: # special gate acting on all qubits - qubits = tuple(range(self.nqubits)) - - if isinstance(gate, gates.M): - self.nmeasurements += 1 - - # calculate moment index for this gate - idx = max(self.moment_index[q] for q in qubits) - for q in qubits: - if idx >= len(self.moments): - # Add a moment - self.moments.insert(pos, len(self.moments[-1]) * [None]) - self.moments[pos][q] = gate - self.moment_index[q] = idx + 1 - - def remove(self, gate): - pos = super().index(gate) - super().remove(gate) - - if isinstance(gate, gates.M): - self.nmeasurements -= 1 - - if gate.qubits: - qubits = gate.qubits - else: # special gate acting on all qubits - qubits = tuple(range(self.nqubits)) - - # calculate moment index for this gate - for q in qubits: - self.moment_index[q] -= 1 - del self.moments[pos] - - return pos - class Circuit: """Circuit object which holds a list of gates. @@ -490,26 +442,6 @@ def decompose(self, *free: int): decomp_circuit.add(gate.decompose(*free)) return decomp_circuit - def to_clifford(self): - """Translate a circuit into an equivalent one composed of only Clifford gates. - - In `Qibo` we refers to [``X``, ``CNOT``, ``RX(pi/2)``, ``RZ(theta)``] as - Clifford gates. - - Returns: - Circuit object containing only Clifford gates. - """ - - clifford_circuit = self.__class__(**self.init_kwargs) - # cycle on gates replacing non-clifford with clifford - for gate in self.queue: - if gate.is_clifford(): - clifford_circuit.add(gate) - else: - clifford_circuit.add(gate.decompose_into_clifford()) - - return clifford_circuit - def with_pauli_noise(self, noise_map: NoiseMapType): """Creates a copy of the circuit with Pauli noise gates after each gate. @@ -583,7 +515,7 @@ def with_pauli_noise(self, noise_map: NoiseMapType): noisy_circuit.add(noise_gate) return noisy_circuit - def add(self, gate, position=-1): + def add(self, gate): """Add a gate to a given queue. Args: @@ -599,11 +531,7 @@ def add(self, gate, position=-1): """ if isinstance(gate, collections.abc.Iterable): for g in gate: - if position >= 0: - self.add(g, position) - position += 1 - else: - self.add(g) + self.add(g) else: if self.accelerators: # pragma: no cover @@ -624,18 +552,12 @@ def add(self, gate, position=-1): if not isinstance(gate, gates.Gate): raise_error(TypeError, f"Unknown gate type {type(gate)}.") - if self._final_state is not None and position == -1: + if self._final_state is not None: raise_error( RuntimeError, "Cannot add gates to a circuit after it is executed.", ) - if isinstance(gate, gates.M) and position >= 0: - raise_error( - RuntimeError, - "Cannot add Measurement gate at a specific location.", - ) - for q in gate.target_qubits: if q >= self.nqubits: raise_error( @@ -675,78 +597,19 @@ def add(self, gate, position=-1): return gate.result else: - if position >= 0: - self.queue.insert(position, gate) - else: - self.queue.append(gate) - - for measurement in list(self.measurements): - if set(measurement.qubits) & set(gate.qubits): - measurement.collapse = False - self.repeated_execution = False - self.measurements.remove(measurement) - - if isinstance(gate, gates.UnitaryChannel): - self.repeated_execution = not self.density_matrix - if isinstance(gate, gates.ParametrizedGate): - if position >= 0: - param_loc = 0 - trainable_loc = 0 - for g in self.queue[:position]: - if isinstance(g, gates.ParametrizedGate): - param_loc += 1 - if g.trainable: - trainable_loc += 1 - - self.parametrized_gates.insert(param_loc, gate) - if gate.trainable: - self.trainable_gates.insert(trainable_loc, gate) - - else: - self.parametrized_gates.append(gate) - if gate.trainable: - self.trainable_gates.append(gate) - - def remove(self, gate, replacement_gates=[]): - if isinstance(gate, list): - for g in gate: - self.remove(g) - - else: - if isinstance(gate, gates.M): - # The following loop is useful when two circuits are added together: - # all the gates in the basis of the measure gates should not - # be added to the new circuit, otherwise once the measure gate is added in the circuit - # there will be two of the same. - - for base in gate.basis: - if base not in self.queue: - self.remove(base) - - pos = self.queue.remove(gate) - - if gate.collapse: - self.repeated_execution = False - else: - self.measurements.remove(gate) - - else: - pos = self.queue.remove(gate) + self.queue.append(gate) for measurement in list(self.measurements): - if not bool(set(measurement.qubits) & set(gate.qubits)): - measurement.collapse = False - self.repeated_execution = False + if set(measurement.qubits) & set(gate.qubits): + measurement.collapse = True + self.repeated_execution = True self.measurements.remove(measurement) if isinstance(gate, gates.UnitaryChannel): - self.repeated_execution = self.density_matrix + self.repeated_execution = not self.density_matrix if isinstance(gate, gates.ParametrizedGate): - self.parametrized_gates.remove(gate) + self.parametrized_gates.append(gate) if gate.trainable: - self.trainable_gates.remove(gate) - - for rep_gate in replacement_gates: - self.add(rep_gate, pos) + self.trainable_gates.append(gate) @property def measurement_tuples(self): diff --git a/tests/test_derivative.py b/tests/test_derivative.py index c74a669888..ec7a501720 100644 --- a/tests/test_derivative.py +++ b/tests/test_derivative.py @@ -5,11 +5,9 @@ from qibo.derivative import finite_differences, parameter_shift from qibo.symbols import Z -qibo.set_backend("tensorflow") - # defining an observable -def hamiltonian(nqubits, backend=GlobalBackend()): +def hamiltonian(nqubits, backend): return hamiltonians.hamiltonians.SymbolicHamiltonian( np.prod([Z(i) for i in range(nqubits)]), backend=backend ) @@ -86,39 +84,18 @@ def test_standard_parameter_shift(backend, nshots, atol, scale_factor, grads): backend.assert_allclose(grad_2, grads[2], atol=atol) -def gradient_exact(): - backend = GlobalBackend() - - test_params = tf.Variable(np.linspace(0.1, 1, 3)) - - with tf.GradientTape() as tape: - c = circuit(nqubits=1) - c.set_parameters(test_params) - - ham = hamiltonian(1) - results = ham.expectation( - backend.execute_circuit(circuit=c, initial_state=None).state() - ) - - gradients = tape.gradient(results, test_params) - - return gradients - - -@pytest.mark.parametrize("nshots, atol", [(None, 1e-1), (100000, 1e-1)]) -def test_finite_differences(backend, nshots, atol): - # exact gradients - grads = gradient_exact() - +@pytest.mark.parametrize("step_size", [10**-i for i in range(5, 10, 1)]) +def test_finite_differences(backend, step_size): # initializing the circuit c = circuit(nqubits=1) # some parameters - # we know the derivative's values with these params test_params = np.linspace(0.1, 1, 3) + grads = [-8.51104358e-02, -5.20075970e-01, 0] + atol = 1e-6 c.set_parameters(test_params) - test_hamiltonian = hamiltonian(nqubits=1) + test_hamiltonian = hamiltonian(nqubits=1, backend=backend) # testing parameter out of bounds with pytest.raises(ValueError): @@ -128,28 +105,17 @@ def test_finite_differences(backend, nshots, atol): # testing hamiltonian type with pytest.raises(TypeError): - grad_0 = finite_differences( - circuit=c, hamiltonian=c, parameter_index=0, nshots=nshots - ) + grad_0 = finite_differences(circuit=c, hamiltonian=c, parameter_index=0) # executing all the procedure grad_0 = finite_differences( - circuit=c, - hamiltonian=test_hamiltonian, - parameter_index=0, - nshots=nshots, + circuit=c, hamiltonian=test_hamiltonian, parameter_index=0, step_size=step_size ) grad_1 = finite_differences( - circuit=c, - hamiltonian=test_hamiltonian, - parameter_index=1, - nshots=nshots, + circuit=c, hamiltonian=test_hamiltonian, parameter_index=1, step_size=step_size ) grad_2 = finite_differences( - circuit=c, - hamiltonian=test_hamiltonian, - parameter_index=2, - nshots=nshots, + circuit=c, hamiltonian=test_hamiltonian, parameter_index=2, step_size=step_size ) # check of known values From 9f13a640c10644f14e9ed54fa32fa8173fd1e5c6 Mon Sep 17 00:00:00 2001 From: MatteoRobbiati Date: Fri, 22 Sep 2023 15:42:23 +0200 Subject: [PATCH 24/32] remove second parameter file --- src/qibo/models/parameter.py | 42 ------------------------------------ 1 file changed, 42 deletions(-) delete mode 100644 src/qibo/models/parameter.py diff --git a/src/qibo/models/parameter.py b/src/qibo/models/parameter.py deleted file mode 100644 index 7b3c4a92d9..0000000000 --- a/src/qibo/models/parameter.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Model for combining trainable parameters and possible features into circuit parameters.""" - - -class Parameter: - def __init__(self, func, variational_parameters, features=None): - self._variational_parameters = variational_parameters - self._featurep = features - self.nparams = len(variational_parameters) - self.lambdaf = func - - def _apply_func(self, fixed_params=None): - params = [] - if self._featurep is not None: - params.append(self._featurep) - if fixed_params: - params.extend(fixed_params) - else: - params.extend(self._variational_parameters) - return self.lambdaf(*params) - - def _update_params(self, trainablep=None, feature=None): - if trainablep: - self._variational_parameters = trainablep - if feature: - self._featurep = feature - - def get_params(self, trainablep=None, feature=None): - self._update_params(trainablep=trainablep, feature=feature) - return self._apply_func() - - def get_indices(self, start_index): - return [start_index + i for i in range(self.nparams)] - - def get_fixed_part(self, trainablep_idx): - params = [0] * self.nparams - params[trainablep_idx] = self._variational_parameters[trainablep_idx] - return self._apply_func(fixed_params=params) - - def get_scaling_factor(self, trainablep_idx): - params = [0] * self.nparams - params[trainablep_idx] = 1.0 - return self._apply_func(fixed_params=params) From 6db3884406932f97aed477ad83cafef2151a31b3 Mon Sep 17 00:00:00 2001 From: MatteoRobbiati Date: Mon, 25 Sep 2023 09:58:31 +0200 Subject: [PATCH 25/32] features and trainable as public attributes --- src/qibo/parameter.py | 36 +++++++++--------------------------- 1 file changed, 9 insertions(+), 27 deletions(-) diff --git a/src/qibo/parameter.py b/src/qibo/parameter.py index eb6579677d..936507f088 100644 --- a/src/qibo/parameter.py +++ b/src/qibo/parameter.py @@ -35,8 +35,8 @@ class Parameter: """ def __init__(self, func, trainable=None, features=None): - self._trainable = trainable - self._features = features + self.trainable = trainable + self.features = features if self.nfeat + self.nparams != func.__code__.co_argcount: raise_error( @@ -58,14 +58,14 @@ def __call__(self): def nparams(self): """Returns the number of trainable parameters""" try: - return len(self._trainable) + return len(self.trainable) except TypeError: return 0 @property def nfeat(self): """Returns the number of features""" - return len(self._features) if isinstance(self._features, list) else 0 + return len(self.features) if isinstance(self.features, list) else 0 @property def ncomponents(self): @@ -76,17 +76,17 @@ def _apply_func(self, function, fixed_params=None): """Applies lambda function and returns final gate parameter""" params = [] - if self._trainable is None: + if self.trainable is None: parameter_count = function.__code__.co_argcount params = [0.0] * parameter_count else: - if self._features is not None: - params.extend(self._features) + if self.features is not None: + params.extend(self.features) if fixed_params is not None: params.extend(fixed_params) else: - params.extend(self._trainable) + params.extend(self.trainable) # run function return float(function(*params)) @@ -106,24 +106,6 @@ def _calculate_derivatives(self): return derivatives - @property - def trainable(self): - """Trainable parameters property.""" - return self._trainable - - @trainable.setter - def trainable(self, value): - self._trainable = value - - @property - def features(self): - """Features property.""" - return self._features - - @features.setter - def features(self, value): - self._features = value - def trainable_parameter_indices(self, start_index): """Return list of respective indices of trainable parameters within the larger trainable parameter list of a circuit for example""" @@ -131,7 +113,7 @@ def trainable_parameter_indices(self, start_index): def unaffected_by(self, trainable_idx): """Retrieve constant term of lambda function with regard to a specific trainable parameter""" - params = self._trainable.copy() + params = self.trainable.copy() params[trainable_idx] = 0.0 return self._apply_func(self.lambdaf, fixed_params=params) From 65574a67491f907ac4764d9c98badc2fc0afd584 Mon Sep 17 00:00:00 2001 From: MatteoRobbiati Date: Mon, 25 Sep 2023 14:48:58 +0200 Subject: [PATCH 26/32] remove apply_function --- src/qibo/parameter.py | 97 ++++++++++++++++++++++++------------------- 1 file changed, 54 insertions(+), 43 deletions(-) diff --git a/src/qibo/parameter.py b/src/qibo/parameter.py index 936507f088..197e2e94b7 100644 --- a/src/qibo/parameter.py +++ b/src/qibo/parameter.py @@ -4,6 +4,22 @@ from qibo.config import raise_error +def calculate_derivatives(func): + """Calculates derivatives w.r.t to all parameters of a target function `func`.""" + vars = [] + for i in range(func.__code__.co_argcount): + vars.append(sp.Symbol(f"p{i}")) + + expr = sp.sympify(func(*vars)) + + derivatives = [] + for i in range(len(vars)): + derivative_expr = sp.diff(expr, vars[i]) + derivatives.append(sp.lambdify(vars, derivative_expr)) + + return derivatives + + class Parameter: """Object which allows for variational gate parameters. Several trainable parameters and possibly features are linked through a lambda function which returns the @@ -29,12 +45,12 @@ class Parameter: func (function): lambda function which builds the gate parameter. If both features and trainable parameters compose the function, it must be passed by first providing the features and then the parameters, as described in the code example above. - trainable (list or np.ndarray): array with initial trainable parameters theta - features (list or np.ndarray): array containing possible input features x + features (list or np.ndarray): array containing possible input features x. + trainable (list or np.ndarray): array with initial trainable parameters theta. nofeatures (bool): flag to explicitly ban the updating of the features. This simplifies the task of updating Parameter objects simultaneously when some have embedded features and some do not. """ - def __init__(self, func, trainable=None, features=None): + def __init__(self, func, trainable=[], features=[]): self.trainable = trainable self.features = features @@ -48,11 +64,35 @@ def __init__(self, func, trainable=None, features=None): # calculate derivatives # maybe here use JAX ? - self.derivatives = self._calculate_derivatives() + self.derivatives = calculate_derivatives(func=self.lambdaf) + + def __call__(self, features=None, trainable=None): + """Return parameter value with given features and/or trainable.""" + + params = [] - def __call__(self): - """Update values with trainable parameter and calculate current gate parameter""" - return self._apply_func(self.lambdaf) + if features is None: + params.extend(self.features) + else: + if len(features) != self.nfeat: + raise_error( + TypeError, + f"The number of features provided is not compatible with the problem's dimensionality, which is {self.nfeat}.", + ) + else: + params.extend(features) + if trainable is None: + params.extend(self.trainable) + else: + if len(trainable) != self.nparams: + raise_error( + TypeError, + f"The number of trainable provided is different from the number of required parameters, which is {self.nparams}.", + ) + else: + params.extend(trainable) + + return self.lambdaf(*params) @property def nparams(self): @@ -72,40 +112,6 @@ def ncomponents(self): """Returns the number of elements which compose the Parameter""" return self.nparams + self.nfeat - def _apply_func(self, function, fixed_params=None): - """Applies lambda function and returns final gate parameter""" - params = [] - - if self.trainable is None: - parameter_count = function.__code__.co_argcount - params = [0.0] * parameter_count - - else: - if self.features is not None: - params.extend(self.features) - if fixed_params is not None: - params.extend(fixed_params) - else: - params.extend(self.trainable) - - # run function - return float(function(*params)) - - def _calculate_derivatives(self): - """Calculates derivatives w.r.t to all trainable parameters""" - vars = [] - for i in range(self.lambdaf.__code__.co_argcount): - vars.append(sp.Symbol(f"p{i}")) - - expr = sp.sympify(self.lambdaf(*vars)) - - derivatives = [] - for i in range(len(vars)): - derivative_expr = sp.diff(expr, vars[i]) - derivatives.append(sp.lambdify(vars, derivative_expr)) - - return derivatives - def trainable_parameter_indices(self, start_index): """Return list of respective indices of trainable parameters within the larger trainable parameter list of a circuit for example""" @@ -115,9 +121,14 @@ def unaffected_by(self, trainable_idx): """Retrieve constant term of lambda function with regard to a specific trainable parameter""" params = self.trainable.copy() params[trainable_idx] = 0.0 - return self._apply_func(self.lambdaf, fixed_params=params) + return self.__call__(trainable=params) def partial_derivative(self, trainable_idx): """Get derivative w.r.t a trainable parameter""" deriv = self.derivatives[trainable_idx] - return self._apply_func(deriv) + + params = [] + params.extend(self.features) + params.extend(self.trainable) + + return deriv(*params) From 9e40953615872f05240e09d4b778890cd4a54057 Mon Sep 17 00:00:00 2001 From: MatteoRobbiati Date: Mon, 25 Sep 2023 14:49:15 +0200 Subject: [PATCH 27/32] fix test --- tests/test_parameter.py | 22 ++++------------------ 1 file changed, 4 insertions(+), 18 deletions(-) diff --git a/tests/test_parameter.py b/tests/test_parameter.py index 3feeae03ac..e85d701ac4 100644 --- a/tests/test_parameter.py +++ b/tests/test_parameter.py @@ -56,24 +56,6 @@ def test_parameter(): gate_value = param() assert gate_value == 585 - param = Parameter(lambda th1, th2, th3: th1 + th2 * th3, nofeatures=True) - - assert param.nparams == 0 - assert param.nfeat == 0 - - param.trainable = [1.0, 2.0, 4.0] - param.features = [22.0] - - assert param.nparams == 3 - assert param.nfeat == 0 - assert param() == 9.0 - assert param.features == None - assert param.trainable == [1.0, 2.0, 4.0] - - param = Parameter(lambda th1, th2, th3: 3 + th1 + th2 * th3) - - assert param() == 3.0 - def test_parameter_errors(): param = Parameter( @@ -106,3 +88,7 @@ def test_parameter_errors(): assert False except Exception as e: assert True + + # test type error due to wrong initialization + with pytest.raises(TypeError): + param = Parameter(func=lambda x, y: x + y**2) From a17d55ca8517361978f09bfbec5c6de0eac26c43 Mon Sep 17 00:00:00 2001 From: Matteo Robbiati <62071516+MatteoRobbiati@users.noreply.github.com> Date: Wed, 27 Sep 2023 11:38:52 +0200 Subject: [PATCH 28/32] Apply suggestions from code review Co-authored-by: Alessandro Candido --- src/qibo/parameter.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/qibo/parameter.py b/src/qibo/parameter.py index 197e2e94b7..6eecba0b34 100644 --- a/src/qibo/parameter.py +++ b/src/qibo/parameter.py @@ -50,9 +50,9 @@ class Parameter: nofeatures (bool): flag to explicitly ban the updating of the features. This simplifies the task of updating Parameter objects simultaneously when some have embedded features and some do not. """ - def __init__(self, func, trainable=[], features=[]): - self.trainable = trainable - self.features = features + def __init__(self, func, trainable=None, features=None): + self.trainable = trainable if trainable is not None else [] + self.features = features if features is not None else [] if self.nfeat + self.nparams != func.__code__.co_argcount: raise_error( @@ -105,7 +105,10 @@ def nparams(self): @property def nfeat(self): """Returns the number of features""" - return len(self.features) if isinstance(self.features, list) else 0 + try: + return len(self.features) + except TypeError: + return 0 @property def ncomponents(self): @@ -121,7 +124,7 @@ def unaffected_by(self, trainable_idx): """Retrieve constant term of lambda function with regard to a specific trainable parameter""" params = self.trainable.copy() params[trainable_idx] = 0.0 - return self.__call__(trainable=params) + return self(trainable=params) def partial_derivative(self, trainable_idx): """Get derivative w.r.t a trainable parameter""" From 4b5b255d6a30e7b18000916fece3f7174ff77e90 Mon Sep 17 00:00:00 2001 From: MatteoRobbiati Date: Wed, 27 Sep 2023 11:51:29 +0200 Subject: [PATCH 29/32] add tests --- tests/test_parameter.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tests/test_parameter.py b/tests/test_parameter.py index e85d701ac4..2996e060c0 100644 --- a/tests/test_parameter.py +++ b/tests/test_parameter.py @@ -56,6 +56,10 @@ def test_parameter(): gate_value = param() assert gate_value == 585 + # testing call with new values + executed = param(features=[0.5, 2.0], trainable=[2.0, 0.1, 4.0]) + assert executed == 1.3 + def test_parameter_errors(): param = Parameter( @@ -92,3 +96,15 @@ def test_parameter_errors(): # test type error due to wrong initialization with pytest.raises(TypeError): param = Parameter(func=lambda x, y: x + y**2) + + # test call function with wrong features and trainable dimensionality + param = Parameter( + func=lambda x, th1, th2: th1 * x + th2, features=[1.2], trainable=[0.2, 9.1] + ) + + # wrong features length + with pytest.raises(TypeError): + param(features=[2.3, 9.2], trainable=[0.4, 9.3]) + # wrong trainable length + with pytest.raises(TypeError): + param(features=[0.4], trainable=[3.4, 0.1, 5.6]) From 14f1ab3f61088884c63ecf0c65e0940dac37d176 Mon Sep 17 00:00:00 2001 From: MatteoRobbiati Date: Wed, 27 Sep 2023 14:25:29 +0200 Subject: [PATCH 30/32] testing nparams and nfeat --- tests/test_parameter.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/tests/test_parameter.py b/tests/test_parameter.py index 2996e060c0..b213ee0c0f 100644 --- a/tests/test_parameter.py +++ b/tests/test_parameter.py @@ -60,6 +60,20 @@ def test_parameter(): executed = param(features=[0.5, 2.0], trainable=[2.0, 0.1, 4.0]) assert executed == 1.3 + # injecting only trainable + param = Parameter(lambda x: x, trainable=[0.8]) + nparams = param.nparams + nfeat = param.nfeat + assert nparams == 1 + assert nfeat == 0 + + # injecting only features + param = Parameter(lambda x: x, features=[0.8]) + nparams = param.nparams + nfeat = param.nfeat + assert nparams == 0 + assert nfeat == 1 + def test_parameter_errors(): param = Parameter( From e19e6839ae5bbb9812794eb43cbdade87bb74b44 Mon Sep 17 00:00:00 2001 From: MatteoRobbiati Date: Thu, 28 Sep 2023 11:05:49 +0200 Subject: [PATCH 31/32] testing ncomponents --- tests/test_parameter.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_parameter.py b/tests/test_parameter.py index b213ee0c0f..4c934699c6 100644 --- a/tests/test_parameter.py +++ b/tests/test_parameter.py @@ -64,8 +64,10 @@ def test_parameter(): param = Parameter(lambda x: x, trainable=[0.8]) nparams = param.nparams nfeat = param.nfeat + ncomponents = param.ncomponents assert nparams == 1 assert nfeat == 0 + assert ncomponents == 1 # injecting only features param = Parameter(lambda x: x, features=[0.8]) From 45502d7ddaa207f1c3363c5440f33606756545ab Mon Sep 17 00:00:00 2001 From: MatteoRobbiati Date: Thu, 28 Sep 2023 14:13:23 +0200 Subject: [PATCH 32/32] fix ndata and nparams attributes --- src/qibo/parameter.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/src/qibo/parameter.py b/src/qibo/parameter.py index 6eecba0b34..92f2821dbd 100644 --- a/src/qibo/parameter.py +++ b/src/qibo/parameter.py @@ -97,18 +97,12 @@ def __call__(self, features=None, trainable=None): @property def nparams(self): """Returns the number of trainable parameters""" - try: - return len(self.trainable) - except TypeError: - return 0 + return len(self.trainable) @property def nfeat(self): """Returns the number of features""" - try: - return len(self.features) - except TypeError: - return 0 + return len(self.features) @property def ncomponents(self):