Skip to content

Commit

Permalink
most working
Browse files Browse the repository at this point in the history
  • Loading branch information
apchytr committed Dec 16, 2024
1 parent 5c5c868 commit 7e36f83
Show file tree
Hide file tree
Showing 3 changed files with 45 additions and 29 deletions.
4 changes: 1 addition & 3 deletions mrmustard/lab_dev/states/number.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,9 +74,7 @@ def __init__(
self._add_parameter(make_parameter(False, cs, "cutoffs", (None, None)))
self._representation = self.from_ansatz(
modes=modes,
ansatz=ArrayAnsatz.from_function(
fock_state, n=self.n.value, cutoffs=self.cutoffs.value
),
ansatz=ArrayAnsatz.from_function(fock_state, n=self.n, cutoffs=self.cutoffs),
).representation
self.short_name = [str(int(n)) for n in self.n.value]
for i, cutoff in enumerate(self.cutoffs.value):
Expand Down
17 changes: 15 additions & 2 deletions mrmustard/physics/ansatz/array_ansatz.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
from IPython.display import display

from mrmustard import math, widgets
from mrmustard.math.parameters import Variable
from mrmustard.utils.typing import Batch, Scalar, Tensor, Vector

from .base import Ansatz
Expand Down Expand Up @@ -203,8 +204,20 @@ def trace(self, idx_z: tuple[int, ...], idx_zconj: tuple[int, ...]) -> ArrayAnsa
return ArrayAnsatz([trace] if trace.shape == () else trace, batched=True)

def _generate_ansatz(self):
if self._array is None:
self.array = [self._fn(**self._kwargs)]
names = list(self._kwargs.keys())
vars = list(self._kwargs.values())

params = {}
param_types = []
for name, param in zip(names, vars):
try:
params[name] = param.value
param_types.append(type(param))
except AttributeError:
params[name] = param

if self._array is None or Variable in param_types:
self.array = [self._fn(**params)]

def _ipython_display_(self):
if widgets.IN_INTERACTIVE_SHELL or (w := widgets.fock(self)) is None:
Expand Down
53 changes: 29 additions & 24 deletions tests/test_training/test_opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -531,38 +531,43 @@ def cost_fn():
assert np.allclose(bsgate.theta.value, 0.1, atol=0.01)
assert np.allclose(bsgate.phi.value, 0.2, atol=0.01)

# def test_squeezing_grad_from_fock(self):
# """Test that the gradient of a squeezing gate is computed from the fock representation."""
# skip_np()
def test_squeezing_grad_from_fock(self):
"""Test that the gradient of a squeezing gate is computed from the fock representation."""
skip_np()

# squeezing = Sgate((0,), r=1, r_trainable=True)
squeezing = Sgate((0,), r=1.0, r_trainable=True)

# def cost_fn():
# return -(Number((0,), 2) >> squeezing >> Vacuum((0,)).dual)
def cost_fn():
return -(Number((0,), 2) >> squeezing >> Vacuum((0,)).dual)

# opt = Optimizer(euclidean_lr=0.05)
# opt.minimize(cost_fn, by_optimizing=[squeezing], max_steps=100)
opt = Optimizer(euclidean_lr=0.05)
opt.minimize(cost_fn, by_optimizing=[squeezing], max_steps=100)

# def test_displacement_grad_from_fock(self):
# """Test that the gradient of a displacement gate is computed from the fock representation."""
# skip_np()
def test_displacement_grad_from_fock(self):
"""Test that the gradient of a displacement gate is computed from the fock representation."""
skip_np()

# disp = Dgate(x=1.0, y=1.0, x_trainable=True, y_trainable=True)
disp = Dgate((0,), x=1.0, y=1.0, x_trainable=True, y_trainable=True)

# def cost_fn():
# return -(Fock(2) >> disp << Vacuum(1))
def cost_fn():
return -(Number((0,), 2) >> disp >> Vacuum((0,)).dual)

# opt = Optimizer(euclidean_lr=0.05)
# opt.minimize(cost_fn, by_optimizing=[disp], max_steps=100)
opt = Optimizer(euclidean_lr=0.05)
opt.minimize(cost_fn, by_optimizing=[disp], max_steps=100)

# def test_bsgate_grad_from_fock(self):
# """Test that the gradient of a beamsplitter gate is computed from the fock representation."""
# skip_np()
def test_bsgate_grad_from_fock(self):
"""Test that the gradient of a beamsplitter gate is computed from the fock representation."""
skip_np()

# sq = SqueezedVacuum(r=1.0, r_trainable=True)
sq = SqueezedVacuum((0,), r=1.0, r_trainable=True)

# def cost_fn():
# return -((sq & Fock(1)) >> BSgate(0.5) << (Vacuum(1) & Fock(1)))
def cost_fn():
return -(
sq
>> Number((1,), 1)
>> BSgate((0, 1), 0.5)
>> (Vacuum((0,)) >> Number((1,), 1)).dual
)

# opt = Optimizer(euclidean_lr=0.05)
# opt.minimize(cost_fn, by_optimizing=[sq], max_steps=100)
opt = Optimizer(euclidean_lr=0.05)
opt.minimize(cost_fn, by_optimizing=[sq], max_steps=100)

0 comments on commit 7e36f83

Please sign in to comment.