Skip to content

Commit

Permalink
Fix SequentialPreOutputLoss build when loss layer missing
Browse files Browse the repository at this point in the history
  • Loading branch information
qmarcou committed Feb 14, 2024
1 parent 5311ba9 commit 8dab1ed
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 1 deletion.
2 changes: 1 addition & 1 deletion keras_utils/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def build(self, input_shape=None):
super(SequentialPreOutputLoss, self).build(input_shape=input_shape)
# In case no Input or batch_shape had been provided output tensors
# had not been created
if self.loss_layer_name is not None and \
if self.loss_layer_name is not None and self._loss_layer_added and\
self._loss_tensor_output is None:
self.output_layer_name = self.layers[-1].name
self._loss_tensor_output = self._loss_layer.output
Expand Down
1 change: 1 addition & 0 deletions keras_utils/tests/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ def test_compile(self):
m.compile()
m.compile(loss=keras.losses.BinaryCrossentropy(),
metrics=keras.metrics.AUC())
# Non existing loss layer name
m = SequentialPreOutputLoss(layers=[inputL, hiddenL, outputL],
loss_layer_name="ZZZ")
self.assertRaises(ValueError, m.compile)
Expand Down

0 comments on commit 8dab1ed

Please sign in to comment.