Skip to content

Commit

Permalink
resolve conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
Radonirinaunimi committed Jul 25, 2024
2 parents 0e0adb4 + 4d72844 commit d36bcce
Show file tree
Hide file tree
Showing 23 changed files with 1,370 additions and 1,356 deletions.
2 changes: 1 addition & 1 deletion extra_tests/regression_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
"no_lagrange": 27,
"no_csr": 613,
"polarized_evol": 34,
"single_dense": 316,
"multi_dense": 316,
}


Expand Down
1,160 changes: 580 additions & 580 deletions extra_tests/regression_fits/flavour_29.exportgrid

Large diffs are not rendered by default.

30 changes: 15 additions & 15 deletions extra_tests/regression_fits/flavour_29.json
Original file line number Diff line number Diff line change
Expand Up @@ -49,25 +49,25 @@
"trainable": false
}
],
"stop_epoch": 181,
"best_epoch": 69,
"erf_tr": 4.977453231811523,
"erf_vl": 5.5623860359191895,
"chi2": 3.8969779014587402,
"stop_epoch": 141,
"best_epoch": 29,
"erf_tr": 4.978306293487549,
"erf_vl": 5.562335014343262,
"chi2": 3.8961379528045654,
"pos_state": "POS_PASS",
"arc_lengths": [
1.1047281768455546,
1.2121895138128305,
0.9906464943638001,
0.9904916210719534,
0.9907080933270742
1.1046950323223816,
1.2122357294523425,
0.9906464880057816,
0.9904916575857023,
0.9907080733680326
],
"integrability": [
0.46764490008354204,
0.4676449000835418,
0.08091166429221643,
0.10038149356842034,
0.5176680088043213
0.46764521300792666,
0.46764521300792683,
0.0809073969721795,
0.10018515586853038,
0.5176685005426407
],
"timing": {
"walltime": {
Expand Down
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@ nnseed: 2
mcseed: 1

# checks both that can be loaded and saved
save: "single_dense.weights.h5"
load: "single_dense.weights.h5"
save: "multi_dense.weights.h5"
load: "multi_dense.weights.h5"

parameters: # This defines the parameter dictionary that is passed to the Model Trainer
nodes_per_layer: [15, 10, 8]
Expand All @@ -50,7 +50,7 @@ parameters: # This defines the parameter dictionary that is passed to the Model
multiplier: 1.05
initial: 1.5
stopping_patience: 0.10 # percentage of the number of epochs
layer_type: 'single_dense'
layer_type: 'multidense'
dropout: 0.0
threshold_chi2: 10.0

Expand Down
633 changes: 633 additions & 0 deletions extra_tests/regression_fits/multi_dense_316.exportgrid

Large diffs are not rendered by default.

95 changes: 95 additions & 0 deletions extra_tests/regression_fits/multi_dense_316.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
{
"preprocessing": [
{
"fl": "sng",
"smallx": 1.1168484687805176,
"largex": 1.4932247400283813,
"trainable": false
},
{
"fl": "g",
"smallx": 0.9923287034034729,
"largex": 3.7714877128601074,
"trainable": false
},
{
"fl": "v",
"smallx": 0.4898352324962616,
"largex": 1.672149658203125,
"trainable": false
},
{
"fl": "v3",
"smallx": 0.28436920046806335,
"largex": 3.4038245677948,
"trainable": false
},
{
"fl": "v8",
"smallx": 0.635401725769043,
"largex": 1.9356087446212769,
"trainable": false
},
{
"fl": "t3",
"smallx": 0.5063296556472778,
"largex": 2.4878852367401123,
"trainable": false
},
{
"fl": "t8",
"smallx": 0.8517391681671143,
"largex": 2.7349023818969727,
"trainable": false
},
{
"fl": "t15",
"smallx": 1.1255686283111572,
"largex": 1.68263578414917,
"trainable": false
}
],
"stop_epoch": 1100,
"best_epoch": 1099,
"erf_tr": 2.4744553565979004,
"erf_vl": 2.9059481620788574,
"chi2": 2.118800401687622,
"pos_state": "POS_VETO",
"arc_lengths": [
1.3125837500708606,
1.488952866412567,
1.1794333655766738,
1.000696970195011,
1.2462031714666137
],
"integrability": [
0.0008942512758964849,
0.0008942512758955967,
5.0144157625053865e-05,
0.46068396419286617,
0.007144219125619022
],
"timing": {
"walltime": {
"Total": 10.184973239898682,
"start": 0.0,
"replica_set": 0.22072839736938477,
"replica_fitted": 10.184893608093262,
"replica_set_to_replica_fitted": 9.964165210723877
},
"cputime": {
"Total": 11.599816435000001,
"start": 0.0,
"replica_set": 0.21877608700000017,
"replica_fitted": 11.599735304,
"replica_set_to_replica_fitted": 11.380959217
}
},
"version": {
"keras": "3.4.1",
"tensorflow": "2.17.0, mkl=False",
"numpy": "1.26.4",
"nnpdf": "4.0.9.post1058.dev0+0a1b331ae",
"validphys": "4.0.9.post1058.dev0+0a1b331ae"
}
}
633 changes: 0 additions & 633 deletions extra_tests/regression_fits/single_dense_316.exportgrid

This file was deleted.

95 changes: 0 additions & 95 deletions extra_tests/regression_fits/single_dense_316.json

This file was deleted.

Binary file modified extra_tests/regression_fits/weights.weights.h5
Binary file not shown.
Binary file modified extra_tests/regression_fits/weights_feature.weights.h5
Binary file not shown.
Binary file modified extra_tests/regression_fits/weights_flavour.weights.h5
Binary file not shown.
Binary file modified extra_tests/regression_fits/weights_polevol.weights.h5
Binary file not shown.
6 changes: 3 additions & 3 deletions n3fit/runcards/examples/nnpdf40-like.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#
# Configuration file for n3fit
#
################################################################################
description: NNLO baseline fit, NNPDF4.0 dataset
######################################################################################
description: NNLO baseline fit, NNPDF4.0 dataset, compare to NNPDF40_nnlo_as_01180_qcd

################################################################################
######################################################################################
dataset_inputs:
- {dataset: NMC_NC_NOTFIXED_DW_EM-F2, frac: 0.75, variant: legacy}
- {dataset: NMC_NC_NOTFIXED_P_EM-SIGMARED, frac: 0.75, variant: legacy}
Expand Down
6 changes: 3 additions & 3 deletions n3fit/src/n3fit/backends/keras_backend/base_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def dense_per_flavour(basis_size=8, kernel_initializer="glorot_normal", **dense_

# Need to generate a list of dense layers
dense_basis = [
base_layer_selector("single_dense", kernel_initializer=initializer, **dense_kwargs)
base_layer_selector("dense", kernel_initializer=initializer, **dense_kwargs)
for initializer in kernel_initializer
]

Expand Down Expand Up @@ -132,7 +132,7 @@ def apply_dense(xinput):


layers = {
"dense": (
"multidense": (
MultiDense,
{
"replica_seeds": None,
Expand All @@ -144,7 +144,7 @@ def apply_dense(xinput):
"is_first_layer": False,
},
),
"single_dense": (
"dense": (
Dense,
{
"kernel_initializer": "glorot_normal",
Expand Down
4 changes: 2 additions & 2 deletions n3fit/src/n3fit/checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def check_initializer(initializer):
def check_layer_type_implemented(parameters):
"""Checks whether the layer_type is implemented"""
layer_type = parameters.get("layer_type")
implemented_types = ["dense", "dense_per_flavour", "single_dense"]
implemented_types = ["dense", "dense_per_flavour", "multidense"]
if layer_type not in implemented_types:
raise CheckError(
f"Layer type {layer_type} not implemented, must be one of {implemented_types}"
Expand Down Expand Up @@ -423,7 +423,7 @@ def check_consistent_parallel(parameters, parallel_models):
"""
if not parallel_models:
return
if parameters.get("layer_type") != "dense":
if parameters.get("layer_type") not in ("dense"):
raise CheckError("Parallelization has only been tested with layer_type=='dense'")


Expand Down
38 changes: 20 additions & 18 deletions n3fit/src/n3fit/model_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -742,7 +742,7 @@ def layer_generator(i_layer, nodes_out, activation):
"""Generate the ``i_layer``-th dense_per_flavour layer for all replicas."""
layers = []
for replica_seed in replica_seeds:
seed = replica_seed + i_layer * basis_size
seed = int(replica_seed + i_layer * basis_size)
initializers = [
MetaLayer.select_initializer(initializer_name, seed=seed + b)
for b in range(basis_size)
Expand All @@ -758,25 +758,27 @@ def layer_generator(i_layer, nodes_out, activation):

return layers

elif layer_type == "single_dense":
elif layer_type == "dense":

# The checks should've triggered, but better safe than sorry
if len(replica_seeds) > 1:
raise ValueError("`single_dense` only valid with one replica")
seed = replica_seeds[0]
def initializer_generator(seed, i_layer):
seed += i_layer
return MetaLayer.select_initializer(initializer_name, seed=int(seed))

def layer_generator(i_layer, nodes_out, activation):
return base_layer_selector(
layer_type,
kernel_initializer=MetaLayer.select_initializer(
initializer_name, seed=seed + i_layer
),
units=nodes_out,
activation=activation,
regularizer=reg,
)
layers = []
for replica_seed in replica_seeds:
layers.append(
base_layer_selector(
layer_type,
kernel_initializer=initializer_generator(replica_seed, i_layer),
units=nodes_out,
activation=activation,
regularizer=reg,
)
)
return layers

elif layer_type == "dense":
elif layer_type == "multidense":

def layer_generator(i_layer, nodes_out, activation):
"""Generate the ``i_layer``-th MetaLayer.MultiDense layer for all replicas."""
Expand Down Expand Up @@ -813,8 +815,8 @@ def layer_generator(i_layer, nodes_out, activation):
concat = base_layer_selector("concatenate")
list_of_pdf_layers[-1] = [lambda x: concat(layer(x)) for layer in list_of_pdf_layers[-1]]

# Apply all layers to the input to create the models
if layer_type in ("dense", "single_dense"):
# In the `layer_type` multidense we have a `MultiDense` layer and we can get out here
if layer_type == "multidense":
pdfs = x_input
for layer in list_of_pdf_layers:
pdfs = layer(pdfs)
Expand Down
Binary file modified n3fit/src/n3fit/tests/regressions/weights_1.weights.h5
Binary file not shown.
Binary file modified n3fit/src/n3fit/tests/regressions/weights_3.weights.h5
Binary file not shown.
Binary file modified n3fit/src/n3fit/tests/regressions/weights_pol_1.weights.h5
Binary file not shown.
Binary file modified n3fit/src/n3fit/tests/regressions/weights_pol_3.weights.h5
Binary file not shown.
Loading

0 comments on commit d36bcce

Please sign in to comment.