Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add VJP support to PL-Lightning #181

Merged
merged 33 commits into from
Dec 6, 2021
Merged
Show file tree
Hide file tree
Changes from 32 commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
b83d920
Add vecMatrixProd
maliasadi Nov 24, 2021
9d6c7da
Merge branch 'master' of https://github.com/PennyLaneAI/pennylane-lig…
maliasadi Nov 24, 2021
c3c7525
Add vjp to lightning_qubit.py and tests
maliasadi Nov 25, 2021
77fc34a
Update formatting
maliasadi Nov 25, 2021
ac39aaa
Add class VectorJacobianProduct
maliasadi Nov 25, 2021
1210aac
Update formatting
maliasadi Nov 25, 2021
90ea7ed
Update C++ class
maliasadi Nov 25, 2021
94ced72
Update clang-tidy
maliasadi Nov 25, 2021
b219561
Update VectorJacobianProduct public methods
maliasadi Nov 25, 2021
9c163b8
Add Test_VJP.cpp
maliasadi Nov 25, 2021
516d306
Update cpp tests
maliasadi Nov 26, 2021
d833c44
Update cpp tests
maliasadi Nov 26, 2021
74aeeac
Complete adding cpp tests
maliasadi Nov 26, 2021
a3896b5
Merge branch 'master' of github.com:PennyLaneAI/pennylane-lightning i…
maliasadi Nov 26, 2021
e2f8bd6
Update formatting w/ clang-tidy-12
maliasadi Nov 26, 2021
b30d626
Merge branch 'master' into lightning-add-vjp
maliasadi Nov 26, 2021
d85f20b
Merge branch 'lightning-add-vjp' of github.com:PennyLaneAI/pennylane-…
maliasadi Nov 26, 2021
6d0b610
Add Bindings
maliasadi Nov 26, 2021
d249d5e
Update Bindings
maliasadi Nov 26, 2021
09cbfcd
Fix issue with vjp pybinds and Add more tests
maliasadi Nov 26, 2021
ed76b80
Update #181
maliasadi Nov 26, 2021
99e7928
Apply codecov suggestions
maliasadi Nov 26, 2021
205f874
Apply code review suggestions
maliasadi Nov 30, 2021
25b93e8
Fix rendering math formulas in docs
maliasadi Nov 30, 2021
b9416e6
Apply code factor suggestions
maliasadi Nov 30, 2021
30534c0
Update python tests
maliasadi Nov 30, 2021
20b1e12
Update vector_jacobian_product method
maliasadi Nov 30, 2021
c0f4202
Add adjoint_diff_support_check method
maliasadi Nov 30, 2021
3d0bf52
Add more tests for batch_vjp
maliasadi Nov 30, 2021
6bca830
Update VJP Python bindings
maliasadi Dec 1, 2021
0742302
Update tests
maliasadi Dec 1, 2021
8cfb4ab
Merge branch 'master' into lightning-add-vjp
mlxd Dec 3, 2021
9df0c3a
Merge branch 'master' into lightning-add-vjp
mlxd Dec 6, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .github/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@

### New features since last release

* Add VJP support to PL-Lightning.
[(#181)](https://github.com/PennyLaneAI/pennylane-lightning/pull/181)

### Breaking changes

### Improvements
Expand Down
200 changes: 191 additions & 9 deletions pennylane_lightning/lightning_qubit.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import numpy as np
from pennylane import (
math,
BasisState,
DeviceError,
QuantumFunctionError,
Expand All @@ -42,13 +43,15 @@
StateVectorC64,
StateVectorC128,
AdjointJacobianC128,
VectorJacobianProductC128,
)
else:
from .lightning_qubit_ops import (
apply,
StateVectorC64,
StateVectorC128,
AdjointJacobianC128,
VectorJacobianProductC128,
)
from ._serialize import _serialize_obs, _serialize_ops

Expand Down Expand Up @@ -171,17 +174,15 @@ def apply_lightning(self, state, operations):

return np.reshape(state_vector, state.shape)

def adjoint_jacobian(self, tape, starting_state=None, use_device_state=False):
if self.shots is not None:
warn(
"Requested adjoint differentiation to be computed with finite shots."
" The derivative is always exact when using the adjoint differentiation method.",
UserWarning,
)
def adjoint_diff_support_check(self, tape):
"""Check Lightning adjoint differentiation method support for a tape.

if len(tape.trainable_params) == 0:
return np.array(0)
Raise ``QuantumFunctionError`` if ``tape`` contains not supported measurements,
observables, or operations by the Lightning adjoint differentiation method.

Args:
tape (.QuantumTape): quantum tape to differentiate
"""
for m in tape.measurements:
if m.return_type is not Expectation:
raise QuantumFunctionError(
Expand Down Expand Up @@ -216,6 +217,20 @@ def adjoint_jacobian(self, tape, starting_state=None, use_device_state=False):
'the "adjoint" differentiation method'
)

def adjoint_jacobian(self, tape, starting_state=None, use_device_state=False):
if self.shots is not None:
warn(
"Requested adjoint differentiation to be computed with finite shots."
" The derivative is always exact when using the adjoint differentiation method.",
UserWarning,
)

if len(tape.trainable_params) == 0:
return np.array(0)

# Check adjoint diff support
self.adjoint_diff_support_check(tape)

# Initialization of state
if starting_state is not None:
ket = np.ravel(starting_state)
Expand Down Expand Up @@ -248,6 +263,173 @@ def adjoint_jacobian(self, tape, starting_state=None, use_device_state=False):
)
return jac

def vector_jacobian_product(self, tape, dy, starting_state=None, use_device_state=False):
"""Generate the the vector-Jacobian products of a tape.

Args:
tape (.QuantumTape): quantum tape to differentiate
dy (tensor_like): Gradient-output vector. Must have shape
matching the output shape of the corresponding tape.

Keyword Args:
starting_state (tensor_like): post-forward pass state to start execution with. It should be
complex-valued. Takes precedence over ``use_device_state``.
use_device_state (bool): use current device state to initialize. A forward pass of the same
circuit should be the last thing the device has executed. If a ``starting_state`` is
provided, that takes precedence.

Returns:
tuple[array or None, tensor_like or None]: A tuple of the adjoint-jacobian and the Vector-Jacobian
product. Returns ``None`` if the tape has no trainable parameters.
"""
if self.shots is not None:
warn(
"Requested adjoint differentiation to be computed with finite shots."
" The derivative is always exact when using the adjoint differentiation method.",
UserWarning,
)

num_params = len(tape.trainable_params)

if num_params == 0:
return None, None

if math.allclose(dy, 0):
return None, math.convert_like(np.zeros([num_params]), dy)

# Check adjoint diff support
self.adjoint_diff_support_check(tape)

# Initialization of state
if starting_state is not None:
ket = np.ravel(starting_state)
else:
if not use_device_state:
self.reset()
self.execute(tape)
ket = np.ravel(self._pre_rotated_state)

VJP = VectorJacobianProductC128()

obs_serialized = _serialize_obs(tape, self.wire_map)
ops_serialized, use_sp = _serialize_ops(tape, self.wire_map)

ops_serialized = VJP.create_ops_list(*ops_serialized)

trainable_params = sorted(tape.trainable_params)
first_elem = 1 if trainable_params[0] == 0 else 0

tp_shift = (
trainable_params if not use_sp else [i - 1 for i in trainable_params[first_elem:]]
) # exclude first index if explicitly setting sv

jac, vjp = VJP.vjp(
math.reshape(dy, [-1]),
StateVectorC128(ket),
obs_serialized,
ops_serialized,
tp_shift,
tape.num_params,
)
return jac, vjp

def compute_vjp(self, dy, jac, num=None):
"""Convenience function to compute the vector-Jacobian product for a given
vector of gradient outputs and a Jacobian.

Args:
dy (tensor_like): vector of gradient outputs
jac (tensor_like): Jacobian matrix. For an n-dimensional ``dy``
vector, the first n-dimensions of ``jac`` should match
the shape of ``dy``.

Keyword Args:
num (int): The length of the flattened ``dy`` argument. This is an
optional argument, but can be useful to provide if ``dy`` potentially
has no shape (for example, due to tracing or just-in-time compilation).

Returns:
tensor_like: the vector-Jacobian product
"""
if jac is None:
return None

dy_row = math.reshape(dy, [-1])

if num is None:
num = math.shape(dy_row)[0]

if not isinstance(dy_row, np.ndarray):
jac = math.convert_like(jac, dy_row)

jac = math.reshape(jac, [num, -1])
num_params = jac.shape[1]

if math.allclose(dy, 0):
return math.convert_like(np.zeros([num_params]), dy)

VJP = VectorJacobianProductC128()

vjp_tensor = VJP.compute_vjp_from_jac(
math.reshape(jac, [-1]),
dy_row,
num,
num_params,
)
return vjp_tensor

def batch_vjp(
self, tapes, dys, reduction="append", starting_state=None, use_device_state=False
):
"""Generate the the vector-Jacobian products of a batch of tapes.

Args:
tapes (Sequence[.QuantumTape]): sequence of quantum tapes to differentiate
dys (Sequence[tensor_like]): Sequence of gradient-output vectors ``dy``. Must be the
same length as ``tapes``. Each ``dy`` tensor should have shape
matching the output shape of the corresponding tape.

Keyword Args:
reduction (str): Determines how the vector-Jacobian products are returned.
If ``append``, then the output of the function will be of the form
``List[tensor_like]``, with each element corresponding to the VJP of each
input tape. If ``extend``, then the output VJPs will be concatenated.
starting_state (tensor_like): post-forward pass state to start execution with. It should be
complex-valued. Takes precedence over ``use_device_state``.
use_device_state (bool): use current device state to initialize. A forward pass of the same
circuit should be the last thing the device has executed. If a ``starting_state`` is
provided, that takes precedence.

Returns:
tuple[List[array or None], List[tensor_like or None]]: A tuple containing a list
of adjoint-jacobians and a list of vector-Jacobian products. ``None`` elements corresponds
to tapes with no trainable parameters.
maliasadi marked this conversation as resolved.
Show resolved Hide resolved
"""
vjps = []
jacs = []

# Loop through the tapes and dys vector
for tape, dy in zip(tapes, dys):
jac, vjp = self.vector_jacobian_product(
tape,
dy,
starting_state=starting_state,
use_device_state=use_device_state,
)
if vjp is None:
if reduction == "append":
vjps.append(None)
jacs.append(jac)
continue
if isinstance(reduction, str):
getattr(vjps, reduction)(vjp)
getattr(jacs, reduction)(jac)
elif callable(reduction):
reduction(vjps, vjp)
reduction(jacs, jac)

return jacs, vjps


if not CPP_BINARY_AVAILABLE:

Expand Down
2 changes: 1 addition & 1 deletion pennylane_lightning/src/algorithms/AdjointDiff.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -762,7 +762,7 @@ template <class T = double> class AdjointJacobian {
}
}
}
};
}; // class AdjointJacobian

} // namespace Algorithms
} // namespace Pennylane
2 changes: 1 addition & 1 deletion pennylane_lightning/src/algorithms/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
project(lightning_algorithms LANGUAGES CXX)
set(CMAKE_CXX_STANDARD 17)

set(ALGORITHM_FILES AdjointDiff.hpp AdjointDiff.cpp CACHE INTERNAL "" FORCE)
set(ALGORITHM_FILES AdjointDiff.hpp AdjointDiff.cpp JacobianProd.hpp JacobianProd.cpp CACHE INTERNAL "" FORCE)
add_library(lightning_algorithms STATIC ${ALGORITHM_FILES})

target_link_libraries(lightning_algorithms PRIVATE pennylane_lightning_compile_options
Expand Down
19 changes: 19 additions & 0 deletions pennylane_lightning/src/algorithms/JacobianProd.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
// Copyright 2021 Xanadu Quantum Technologies Inc.

// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at

// http://www.apache.org/licenses/LICENSE-2.0

// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include "JacobianProd.hpp"

// explicit instantiation
template class Pennylane::Algorithms::VectorJacobianProduct<float>;
template class Pennylane::Algorithms::VectorJacobianProduct<double>;
Loading