Skip to content

Commit

Permalink
Merge branch 'master' into parameter-broadcasting-1
Browse files Browse the repository at this point in the history
  • Loading branch information
dwierichs authored Jun 3, 2022
2 parents 17044c9 + 3344c77 commit 2259ce0
Show file tree
Hide file tree
Showing 3 changed files with 42 additions and 7 deletions.
6 changes: 4 additions & 2 deletions doc/releases/changelog-dev.md
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,9 @@
instead of the controlled version of the diagonal unitary.
[(#2525)](https://github.com/PennyLaneAI/pennylane/pull/2525)

* Updated the gradients fix [(#2485)](https://github.com/PennyLaneAI/pennylane/pull/2485) to only apply to the `strawberryfields.gbs` device, since
the original logic was breaking some devices. [(#2595)](https://github.com/PennyLaneAI/pennylane/pull/2595)

<h3>Deprecations</h3>

<h3>Documentation</h3>
Expand All @@ -328,5 +331,4 @@
This release contains contributions from (in alphabetical order):

Amintor Dusko, Chae-Yeun Park, Christian Gogolin, Christina Lee, David Wierichs, Edward Jiang, Guillermo Alonso-Linaje,
Jay Soni, Juan Miguel Arrazola, Maria Schuld, Mikhail Andrenkov, Soran Jahangiri, Utkarsh Azad

Jay Soni, Juan Miguel Arrazola, Maria Schuld, Mikhail Andrenkov, Samuel Banning, Soran Jahangiri, Utkarsh Azad
9 changes: 4 additions & 5 deletions pennylane/interfaces/autograd.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,11 +234,10 @@ def grad_fn(dy):
return_vjps = [
qml.math.to_numpy(v, max_depth=_n) if isinstance(v, ArrayBox) else v for v in vjps
]
if device.capabilities().get("provides_jacobian", False):
# in the case where the device provides the jacobian,
# the output of grad_fn must be wrapped in a tuple in
# order to match the input parameters to _execute.
return (return_vjps,)
if device.short_name == "strawberryfields.gbs": # pragma: no cover
# TODO: remove this exceptional case once the source of this issue
# https://github.com/PennyLaneAI/pennylane-sf/issues/89 is determined
return (return_vjps,) # pragma: no cover
return return_vjps

return grad_fn
Expand Down
34 changes: 34 additions & 0 deletions tests/interfaces/test_autograd.py
Original file line number Diff line number Diff line change
Expand Up @@ -1141,3 +1141,37 @@ def circuit(v):

d_out = d_circuit(params)
assert np.allclose(d_out, np.array([1.0, 2.0, 3.0, 4.0]))

def test_custom_jacobians_2(self):
"""Test computing the gradient using the parameter-shift
rule with a device that provides a jacobian"""

class MyQubit(DefaultQubit):
@classmethod
def capabilities(cls):
capabilities = super().capabilities().copy()
capabilities.update(
provides_jacobian=True,
)
return capabilities

def jacobian(self, *args, **kwargs):
raise NotImplementedError()

dev = MyQubit(wires=2)

@qml.qnode(dev, diff_method="parameter-shift", mode="backward")
def qnode(a, b):
qml.RY(a, wires=0)
qml.RX(b, wires=1)
qml.CNOT(wires=[0, 1])
return [qml.expval(qml.PauliZ(0)), qml.expval(qml.PauliY(1))]

a = np.array(0.1, requires_grad=True)
b = np.array(0.2, requires_grad=True)

res = qml.jacobian(qnode)(a, b)
expected = ([-np.sin(a), np.sin(a) * np.sin(b)], [0, -np.cos(a) * np.cos(b)])

assert np.allclose(res[0], expected[0])
assert np.allclose(res[1], expected[1])

0 comments on commit 2259ce0

Please sign in to comment.