Skip to content

Commit

Permalink
Fix failing tutorials, change MNLE default for log_transform to False (
Browse files Browse the repository at this point in the history
…#1367)

* fix fmpe typo, add nle seed

* fix: change default log transfrom to False.

This was set to True for positive reaction times but does not hold in general.

* small nb fixes
  • Loading branch information
janfb authored Jan 20, 2025
1 parent 76c1e1b commit 80740b2
Show file tree
Hide file tree
Showing 5 changed files with 16 additions and 14 deletions.
2 changes: 1 addition & 1 deletion sbi/neural_nets/net_builders/mnle.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def build_mnle(
hidden_features: int = 50,
hidden_layers: int = 2,
tail_bound: float = 10.0,
log_transform_x: bool = True,
log_transform_x: bool = False,
**kwargs,
):
"""Returns a density estimator for mixed data types.
Expand Down
10 changes: 5 additions & 5 deletions tests/mnle_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,9 @@ def test_mnle_accuracy_with_different_samplers_and_trials(
x = mixed_simulator(theta, stimulus_condition=1.0)

# MNLE
density_estimator = likelihood_nn(model="mnle", flow_model=flow_model)
density_estimator = likelihood_nn(
model="mnle", flow_model=flow_model, log_transform_x=True
)
trainer = MNLE(prior, density_estimator=density_estimator)
trainer.append_simulations(theta, x).train(training_batch_size=200)
posterior = trainer.build_posterior()
Expand Down Expand Up @@ -294,7 +296,7 @@ def test_mnle_with_experimental_conditions(mcmc_params_accurate: dict):
)

# MNLE
estimator_fun = likelihood_nn(model="mnle", z_score_x=None)
estimator_fun = likelihood_nn(model="mnle", log_transform_x=True)
trainer = MNLE(proposal, estimator_fun)
estimator = trainer.append_simulations(theta, x).train()

Expand Down Expand Up @@ -362,9 +364,7 @@ def test_log_likelihood_over_local_iid_theta(
"""

# train mnle on mixed data
trainer = MNLE(
density_estimator=likelihood_nn(model="mnle", z_score_x=None),
)
trainer = MNLE()
proposal = MultipleIndependent(
[
Gamma(torch.tensor([1.0]), torch.tensor([0.5])),
Expand Down
2 changes: 1 addition & 1 deletion tests/tutorials_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def test_tutorials(notebook_path):
"""Test that all notebooks in the tutorials directory can be executed."""
with open(notebook_path) as f:
nb = nbformat.read(f, as_version=4)
ep = ExecutePreprocessor(timeout=1200, kernel_name='python3')
ep = ExecutePreprocessor(timeout=600, kernel_name='python3')
print(f"Executing notebook {notebook_path}")
try:
ep.preprocess(nb, {'metadata': {'path': os.path.dirname(notebook_path)}})
Expand Down
5 changes: 3 additions & 2 deletions tutorials/16_implemented_methods.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@
"from sbi.inference import FMPE\n",
"\n",
"inference = FMPE(prior)\n",
"# FMPE does support multiple rounds of inference\n",
"# FMPE does not support multiple rounds of inference\n",
"theta = prior.sample((num_sims,))\n",
"x = simulator(theta)\n",
"inference.append_simulations(theta, x).train()\n",
Expand Down Expand Up @@ -310,7 +310,8 @@
"\n",
"inference = MNLE(prior)\n",
"theta = prior.sample((num_sims,))\n",
"x = simulator(theta)\n",
"# add a column of discrete data to x.\n",
"x = torch.cat((simulator(theta), torch.bernoulli(theta[:, :1])), dim=1)\n",
"_ = inference.append_simulations(theta, x).train()\n",
"posterior = inference.build_posterior().set_default_x(x_o)"
]
Expand Down
11 changes: 6 additions & 5 deletions tutorials/Example_01_DecisionMakingModel.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,8 @@
" Beta(torch.tensor([2.0]), torch.tensor([2.0])),\n",
" ],\n",
" validate_args=False,\n",
")"
")\n",
"prior_transform = mcmc_transform(prior)"
]
},
{
Expand Down Expand Up @@ -184,7 +185,7 @@
"true_posterior = MCMCPosterior(\n",
" potential_fn=BinomialGammaPotential(prior, x_o),\n",
" proposal=prior,\n",
" theta_transform=mcmc_transform(prior, enable_transform=True),\n",
" theta_transform=prior_transform,\n",
" **mcmc_kwargs,\n",
")\n",
"true_samples = true_posterior.sample((num_samples,))"
Expand Down Expand Up @@ -228,7 +229,8 @@
"x = mixed_simulator(theta)\n",
"\n",
"# Train MNLE and obtain MCMC-based posterior.\n",
"trainer = MNLE()\n",
"estimator_builder = likelihood_nn(model=\"mnle\", log_transform_x=True)\n",
"trainer = MNLE(proposal, estimator_builder)\n",
"estimator = trainer.append_simulations(theta, x).train()"
]
},
Expand Down Expand Up @@ -610,7 +612,7 @@
}
],
"source": [
"estimator_builder = likelihood_nn(model=\"mnle\", z_score_x=None) # we don't want to z-score the binary data.\n",
"estimator_builder = likelihood_nn(model=\"mnle\", log_transform_x=True)\n",
"trainer = MNLE(proposal, estimator_builder)\n",
"estimator = trainer.append_simulations(theta, x).train()"
]
Expand Down Expand Up @@ -847,7 +849,6 @@
"\n",
"fig, ax = pairplot(\n",
" [prior.sample((1000,))] + posterior_samples,\n",
" # points=theta_o,\n",
" diag=\"kde\",\n",
" upper=\"contour\",\n",
" diag_kwargs=dict(bins=100),\n",
Expand Down

0 comments on commit 80740b2

Please sign in to comment.