Skip to content

Commit 7641039

Browse files
Lara813mafrahm
andcommitted
Update hbw/tasks/inference.py
Co-authored-by: Mathis Frahm <49306645+mafrahm@users.noreply.github.com>
1 parent caccecd commit 7641039

File tree

3 files changed

+10
-20
lines changed

3 files changed

+10
-20
lines changed

hbw/ml/base.py

+9-18
Original file line numberDiff line numberDiff line change
@@ -335,23 +335,14 @@ def open_model(self, target: law.LocalDirectoryTarget) -> dict[str, Any]:
335335
models["parameters"] = yaml.load(f_in, Loader=yaml.Loader)
336336

337337
# custom loss needed due to output layer changes for negative weights
338-
from hbw.ml.tf_util import cumulated_crossentropy, categorical_crossentropy
338+
from hbw.ml.tf_util import cumulated_crossentropy
339339

340-
# Check for negative weight handling and assign loss function accordingly.
341-
if self.negative_weights == "ignore":
342-
models["model"] = tf.keras.models.load_model(
343-
target["mlmodel"].path, custom_objects={categorical_crossentropy.__name__: categorical_crossentropy},
344-
)
345-
models["best_model"] = tf.keras.models.load_model(
346-
target["checkpoint"].path, custom_objects={categorical_crossentropy.__name__: categorical_crossentropy},
347-
)
348-
else:
349-
models["model"] = tf.keras.models.load_model(
350-
target["mlmodel"].path, custom_objects={cumulated_crossentropy.__name__: cumulated_crossentropy},
351-
)
352-
models["best_model"] = tf.keras.models.load_model(
353-
target["checkpoint"].path, custom_objects={cumulated_crossentropy.__name__: cumulated_crossentropy},
354-
)
340+
models["model"] = tf.keras.models.load_model(
341+
target["mlmodel"].path, custom_objects={cumulated_crossentropy.__name__: cumulated_crossentropy},
342+
)
343+
models["best_model"] = tf.keras.models.load_model(
344+
target["checkpoint"].path, custom_objects={cumulated_crossentropy.__name__: cumulated_crossentropy},
345+
)
355346

356347
return models
357348

@@ -553,7 +544,7 @@ def prepare_ml_model(
553544

554545
from keras.models import Sequential
555546
from keras.layers import Dense, BatchNormalization
556-
from hbw.ml.tf_util import cumulated_crossentropy, categorical_crossentropy
547+
from hbw.ml.tf_util import cumulated_crossentropy
557548

558549
n_inputs = len(set(self.input_features))
559550
n_outputs = len(self.processes)
@@ -576,7 +567,7 @@ def prepare_ml_model(
576567
optimizer = keras.optimizers.Adam(learning_rate=0.00050)
577568
if self.negative_weights == "ignore":
578569
model.compile(
579-
loss=categorical_crossentropy,
570+
loss="categorical_crossentropy",
580571
optimizer=optimizer,
581572
weighted_metrics=["categorical_accuracy"],
582573
)

hbw/ml/mixins.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ def prepare_ml_model(
5555
import tensorflow.keras as keras
5656
from keras.models import Sequential
5757
from keras.layers import Dense, BatchNormalization
58-
from hbw.ml.tf_util import cumulated_crossentropy # , categorical_crossentropy
58+
from hbw.ml.tf_util import cumulated_crossentropy
5959

6060
n_inputs = len(set(self.input_features))
6161
n_outputs = len(self.processes)

hbw/tasks/inference.py

-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,6 @@
3232

3333
# Function copied from Mathis Hist hook commit
3434
# TODO: define once at central place (hist_util.py)
35-
# TODO: define once at central place (hist_util.py)
3635
def apply_rebinning_edges(h: hist.Histogram, axis_name: str, edges: list):
3736
"""
3837
Generalized rebinning of a single axis from a hist.Histogram, using predefined edges.

0 commit comments

Comments
 (0)