Skip to content

Commit

Permalink
bump torch version (#4650)
Browse files Browse the repository at this point in the history
torch is now tested with the latest version. jacobian computation doesn't work with backwards-mode AD and complex return type anymore due to a pytorch bug

Co-authored-by: Matthew Silverman <[email protected]>
  • Loading branch information
albi3ro and timmysilv authored Oct 30, 2023
1 parent 781092f commit 0717473
Show file tree
Hide file tree
Showing 5 changed files with 10 additions and 5 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/interface-unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ on:
description: The version of PyTorch to install for any job that requires PyTorch
required: false
type: string
default: 2.0.0
default: 2.1.0
pytest_coverage_flags:
description: PyTest Coverage flags to pass to all jobs
required: false
Expand Down
7 changes: 3 additions & 4 deletions tests/devices/qubit/test_apply_operation.py
Original file line number Diff line number Diff line change
Expand Up @@ -653,10 +653,9 @@ def f(phi):
phi = torch.tensor(0.325, requires_grad=True)

new_state = f(phi)
g = torch.autograd.functional.jacobian(f, phi + 0j)

# torch takes gradient with respect to conj(z), so we need to conj the gradient
g = torch.conj(g).resolve_conj()
# forward-mode needed with complex results.
# See bug: https://github.com/pytorch/pytorch/issues/94397
g = torch.autograd.functional.jacobian(f, phi + 0j, strategy="forward-mode", vectorize=True)

self.compare_expected_result(
phi.detach().numpy(),
Expand Down
1 change: 1 addition & 0 deletions tests/devices/test_default_mixed_torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -415,6 +415,7 @@ def circuit(a):
expected = torch.tensor(exp_fn(a), dtype=torch.float64)
assert torch.allclose(a.grad, expected, atol=tol, rtol=0)

@pytest.mark.xfail(reason="see pytorch/pytorch/issues/94397")
def test_state_vector_differentiability(self, tol):
"""Test that the device state vector can be differentiated directly"""
dev = qml.device("default.mixed", wires=1)
Expand Down
4 changes: 4 additions & 0 deletions tests/docs/test_supported_confs.py
Original file line number Diff line number Diff line change
Expand Up @@ -436,6 +436,8 @@ def test_all_paramshift_state(self, interface, return_type, shots, wire_specs):
with pytest.raises(qml.DeviceError, match="not accepted with finite shots"):
compute_gradient(x, interface, circuit, return_type, complex=complex)
else:
if interface == "torch" and return_type == "StateVector":
pytest.xfail(reason="see pytorch/pytorch/issues/94397")
with pytest.raises(ValueError, match=msg):
compute_gradient(x, interface, circuit, return_type, complex=complex)

Expand Down Expand Up @@ -526,6 +528,8 @@ def test_all_state_backprop(self, interface, wire_specs):
"""Test gradient of state directly succeeds for non-autograd interfaces"""
circuit = get_qnode(interface, "backprop", "StateVector", None, wire_specs)
x = get_variable(interface, wire_specs, complex=True)
if interface == "torch":
pytest.xfail(reason="see pytorch/pytorch/issues/94397")
compute_gradient(x, interface, circuit, "StateVector", complex=True)

wire_specs_list = [
Expand Down
1 change: 1 addition & 0 deletions tests/shadow/test_shadow_transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,6 +299,7 @@ def test_backward_tf(self):
assert qml.math.allclose(act, expected, atol=1e-1)

@pytest.mark.torch
@pytest.mark.xfail(reason="see pytorch/pytorch/issues/94397")
def test_backward_torch(self):
"""Test the gradient of the state for the torch interface"""
import torch
Expand Down

0 comments on commit 0717473

Please sign in to comment.