From b4c1d071be06772e7122099d5b166e812156f52d Mon Sep 17 00:00:00 2001 From: Roy Stegeman Date: Mon, 18 Nov 2024 16:04:08 +0000 Subject: [PATCH] remove use_theorycovmat also from non-python files --- .../examples/API_extension_Pineappl.ipynb | 31 +++++++++---------- .../examples/data_theory_comparison_w_sv.yaml | 1 - validphys2/src/validphys/results.py | 2 +- 3 files changed, 16 insertions(+), 18 deletions(-) diff --git a/validphys2/examples/API_extension_Pineappl.ipynb b/validphys2/examples/API_extension_Pineappl.ipynb index 90d6afdc73..54ee785a0c 100644 --- a/validphys2/examples/API_extension_Pineappl.ipynb +++ b/validphys2/examples/API_extension_Pineappl.ipynb @@ -58,7 +58,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "id": "62ef3d68-89ed-4014-8719-4de3fbc1ecc6", "metadata": {}, "outputs": [], @@ -83,7 +83,7 @@ " pdf = pdf.load()\n", " ret = []\n", " bin_norm = self._grid.bin_normalizations().reshape(-1, 1)\n", - " \n", + "\n", " for i, member in enumerate(pdf.members):\n", " tmp = self._grid.convolve_with_one(2212, member.xfxQ2, member.alphasQ2, xi=all_scales).reshape(-1, len(all_scales))\n", "\n", @@ -92,7 +92,7 @@ "\n", " # Apply shifts (if any) usually 0:\n", " tmp = np.concatenate([np.zeros((self._shift, len(all_scales))), tmp])\n", - " \n", + "\n", " ret.append(tmp)\n", "\n", " return np.array(ret)*self._factor\n", @@ -111,7 +111,7 @@ " self._pine_objects = pine_objects\n", "\n", " @functools.lru_cache\n", - " def predictions(self, pdf): \n", + " def predictions(self, pdf):\n", " operators = []\n", " for pine_operator in self._pine_objects:\n", " tmp = []\n", @@ -124,7 +124,7 @@ "\n", " # Loop over scales to get the result for all members for every scale\n", " return self._operation(*operators) # (nmembers, ndata, nscales)\n", - " \n", + "\n", " def __str__(self):\n", " return f\"PineContainer({self._name})\"\n", "\n", @@ -134,7 +134,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "id": "09b0bc28-e540-414f-a789-ed8fb5bac5a6", "metadata": {}, "outputs": [], @@ -161,14 +161,14 @@ "\n", " if theory_meta.shifts is not None:\n", " shift = theory_meta.shifts.get(i, 0)\n", - " \n", + "\n", " pine_path = grid_path / f\"{i}.{EXT}\"\n", " tmp.append(PineObject(pine_path, factor, shift=shift))\n", " pinegrids.append(tmp)\n", "\n", " operation = OP[theory_meta.operation]\n", " return PineContainer(pinegrids, dsname=dataset.name, operation=operation)\n", - " \n", + "\n", "def _pine_predictions(dataset, pdf, central_only=False):\n", " \"\"\"Given a dataset and a PDF, produces predictions with pineappl\n", " The output shape is a list of DataFrames with the right shape for ThPredictions\"\"\"\n", @@ -233,7 +233,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "id": "0d88977d-5763-4929-8766-3919c8f70817", "metadata": { "scrolled": true @@ -255,7 +255,7 @@ "pdf_name = \"NNPDF40_nnlo_as_01180\"\n", "dname = \"LHCB_WPWM_8TEV_MUON_Y\"\n", "kwargs = {\"dataset_input\": {\"dataset\": dname}, \"theoryid\" : tid, \"use_cuts\":\"internal\", \"pdf\": pdf_name}\n", - "theory_opt = {\"point_prescription\": \"3 point\", \"theoryids\": {\"from_\": \"scale_variation_theories\"}, \"use_theorycovmat\": True}\n", + "theory_opt = {\"point_prescription\": \"3 point\", \"theoryids\": {\"from_\": \"scale_variation_theories\"}}\n", "\n", "base_chi2 = API.abs_chi2_data(**kwargs)\n", "print(f\"Exp chi2: {base_chi2.central_result / base_chi2.ndata:.4}\")\n", @@ -320,11 +320,11 @@ "source": [ "def create_data_pickle(dname, pdf, theoryid, force=False, output_folder=Path(\".\")):\n", " data_path = Path(output_folder) / f\"results_{dname}_{theoryid}_{pdf}.pkl\"\n", - " \n", + "\n", " if data_path.exists() and not force:\n", " print(f\"Skipping {dname} ({pdf}) since {data_path} already exists. If you want to overwrite it use `force=True`\")\n", " return\n", - " \n", + "\n", " print(f\"Computing predictions for {dname} with {pdf} (tid={theoryid})\")\n", " generic = {\n", " \"dataset_input\": {\"dataset\": dname},\n", @@ -342,7 +342,6 @@ " **generic,\n", " point_prescription=\"9 point\",\n", " theoryids={\"from_\": \"scale_variation_theories\"},\n", - " use_theorycovmat=True,\n", " )\n", " ret = {\n", " \"exp_data\": res_data.central_value,\n", @@ -352,7 +351,7 @@ " \"theory_uncert\": np.sqrt(np.diag(theory_covmat)),\n", " }\n", " ds = API.dataset(**generic)\n", - " # Read the cuts \n", + " # Read the cuts\n", " cuts = ds.cuts.load()+1 # kinematics start at 1\n", " kin_df = ds.commondata.metadata.load_kinematics().loc[cuts]\n", " results_df = pd.DataFrame(ret, index=kin_df.index)\n", @@ -379,7 +378,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "nnpdf", "language": "python", "name": "python3" }, @@ -393,7 +392,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.12.7" } }, "nbformat": 4, diff --git a/validphys2/examples/data_theory_comparison_w_sv.yaml b/validphys2/examples/data_theory_comparison_w_sv.yaml index ef86328b82..cef33ffed2 100644 --- a/validphys2/examples/data_theory_comparison_w_sv.yaml +++ b/validphys2/examples/data_theory_comparison_w_sv.yaml @@ -7,7 +7,6 @@ use_cuts: "internal" theoryid: 717 # define the central theory point_prescription: "3 point" -use_theorycovmat: true use_pdferr: true diff --git a/validphys2/src/validphys/results.py b/validphys2/src/validphys/results.py index 74529dcdf9..99e7b73c08 100644 --- a/validphys2/src/validphys/results.py +++ b/validphys2/src/validphys/results.py @@ -578,7 +578,7 @@ def results_with_theory_covmat(dataset, results, theory_covmat_dataset): data theory covariance in which the experimental uncertainties are not stained by the thcovmat """ # TODO: in principle this function could be removed, and `results` could automagically include - # the theory covmat when a key, let's call it `use_theorycovmat: true`, by changing the nodes in + # the theory covmat when a certain key (to be introduced) is set to True by changing the nodes in # `config.py` however at the moment config.py _loads_ theory covmats and we need to compute it # on the fly from .covmats import sqrt_covmat