Skip to content

Commit

Permalink
remove use_theorycovmat also from non-python files
Browse files Browse the repository at this point in the history
  • Loading branch information
RoyStegeman committed Nov 18, 2024
1 parent e77e54d commit b4c1d07
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 18 deletions.
31 changes: 15 additions & 16 deletions validphys2/examples/API_extension_Pineappl.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
},
{
"cell_type": "code",
"execution_count": 12,
"execution_count": null,
"id": "62ef3d68-89ed-4014-8719-4de3fbc1ecc6",
"metadata": {},
"outputs": [],
Expand All @@ -83,7 +83,7 @@
" pdf = pdf.load()\n",
" ret = []\n",
" bin_norm = self._grid.bin_normalizations().reshape(-1, 1)\n",
" \n",
"\n",
" for i, member in enumerate(pdf.members):\n",
" tmp = self._grid.convolve_with_one(2212, member.xfxQ2, member.alphasQ2, xi=all_scales).reshape(-1, len(all_scales))\n",
"\n",
Expand All @@ -92,7 +92,7 @@
"\n",
" # Apply shifts (if any) usually 0:\n",
" tmp = np.concatenate([np.zeros((self._shift, len(all_scales))), tmp])\n",
" \n",
"\n",
" ret.append(tmp)\n",
"\n",
" return np.array(ret)*self._factor\n",
Expand All @@ -111,7 +111,7 @@
" self._pine_objects = pine_objects\n",
"\n",
" @functools.lru_cache\n",
" def predictions(self, pdf): \n",
" def predictions(self, pdf):\n",
" operators = []\n",
" for pine_operator in self._pine_objects:\n",
" tmp = []\n",
Expand All @@ -124,7 +124,7 @@
"\n",
" # Loop over scales to get the result for all members for every scale\n",
" return self._operation(*operators) # (nmembers, ndata, nscales)\n",
" \n",
"\n",
" def __str__(self):\n",
" return f\"PineContainer({self._name})\"\n",
"\n",
Expand All @@ -134,7 +134,7 @@
},
{
"cell_type": "code",
"execution_count": 13,
"execution_count": null,
"id": "09b0bc28-e540-414f-a789-ed8fb5bac5a6",
"metadata": {},
"outputs": [],
Expand All @@ -161,14 +161,14 @@
"\n",
" if theory_meta.shifts is not None:\n",
" shift = theory_meta.shifts.get(i, 0)\n",
" \n",
"\n",
" pine_path = grid_path / f\"{i}.{EXT}\"\n",
" tmp.append(PineObject(pine_path, factor, shift=shift))\n",
" pinegrids.append(tmp)\n",
"\n",
" operation = OP[theory_meta.operation]\n",
" return PineContainer(pinegrids, dsname=dataset.name, operation=operation)\n",
" \n",
"\n",
"def _pine_predictions(dataset, pdf, central_only=False):\n",
" \"\"\"Given a dataset and a PDF, produces predictions with pineappl\n",
" The output shape is a list of DataFrames with the right shape for ThPredictions\"\"\"\n",
Expand Down Expand Up @@ -233,7 +233,7 @@
},
{
"cell_type": "code",
"execution_count": 14,
"execution_count": null,
"id": "0d88977d-5763-4929-8766-3919c8f70817",
"metadata": {
"scrolled": true
Expand All @@ -255,7 +255,7 @@
"pdf_name = \"NNPDF40_nnlo_as_01180\"\n",
"dname = \"LHCB_WPWM_8TEV_MUON_Y\"\n",
"kwargs = {\"dataset_input\": {\"dataset\": dname}, \"theoryid\" : tid, \"use_cuts\":\"internal\", \"pdf\": pdf_name}\n",
"theory_opt = {\"point_prescription\": \"3 point\", \"theoryids\": {\"from_\": \"scale_variation_theories\"}, \"use_theorycovmat\": True}\n",
"theory_opt = {\"point_prescription\": \"3 point\", \"theoryids\": {\"from_\": \"scale_variation_theories\"}}\n",
"\n",
"base_chi2 = API.abs_chi2_data(**kwargs)\n",
"print(f\"Exp chi2: {base_chi2.central_result / base_chi2.ndata:.4}\")\n",
Expand Down Expand Up @@ -320,11 +320,11 @@
"source": [
"def create_data_pickle(dname, pdf, theoryid, force=False, output_folder=Path(\".\")):\n",
" data_path = Path(output_folder) / f\"results_{dname}_{theoryid}_{pdf}.pkl\"\n",
" \n",
"\n",
" if data_path.exists() and not force:\n",
" print(f\"Skipping {dname} ({pdf}) since {data_path} already exists. If you want to overwrite it use `force=True`\")\n",
" return\n",
" \n",
"\n",
" print(f\"Computing predictions for {dname} with {pdf} (tid={theoryid})\")\n",
" generic = {\n",
" \"dataset_input\": {\"dataset\": dname},\n",
Expand All @@ -342,7 +342,6 @@
" **generic,\n",
" point_prescription=\"9 point\",\n",
" theoryids={\"from_\": \"scale_variation_theories\"},\n",
" use_theorycovmat=True,\n",
" )\n",
" ret = {\n",
" \"exp_data\": res_data.central_value,\n",
Expand All @@ -352,7 +351,7 @@
" \"theory_uncert\": np.sqrt(np.diag(theory_covmat)),\n",
" }\n",
" ds = API.dataset(**generic)\n",
" # Read the cuts \n",
" # Read the cuts\n",
" cuts = ds.cuts.load()+1 # kinematics start at 1\n",
" kin_df = ds.commondata.metadata.load_kinematics().loc[cuts]\n",
" results_df = pd.DataFrame(ret, index=kin_df.index)\n",
Expand All @@ -379,7 +378,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"display_name": "nnpdf",
"language": "python",
"name": "python3"
},
Expand All @@ -393,7 +392,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.3"
"version": "3.12.7"
}
},
"nbformat": 4,
Expand Down
1 change: 0 additions & 1 deletion validphys2/examples/data_theory_comparison_w_sv.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ use_cuts: "internal"

theoryid: 717 # define the central theory
point_prescription: "3 point"
use_theorycovmat: true

use_pdferr: true

Expand Down
2 changes: 1 addition & 1 deletion validphys2/src/validphys/results.py
Original file line number Diff line number Diff line change
Expand Up @@ -578,7 +578,7 @@ def results_with_theory_covmat(dataset, results, theory_covmat_dataset):
data theory covariance in which the experimental uncertainties are not stained by the thcovmat
"""
# TODO: in principle this function could be removed, and `results` could automagically include
# the theory covmat when a key, let's call it `use_theorycovmat: true`, by changing the nodes in
# the theory covmat when a certain key (to be introduced) is set to True by changing the nodes in
# `config.py` however at the moment config.py _loads_ theory covmats and we need to compute it
# on the fly
from .covmats import sqrt_covmat
Expand Down

0 comments on commit b4c1d07

Please sign in to comment.