Skip to content
Draft
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Add parameter trainability to ELU, ReLU savers
  • Loading branch information
kokorins committed Jun 21, 2021
commit 3b36a5165d8f1215e4ff9225da7f4e453dd18dc1
Original file line number Diff line number Diff line change
Expand Up @@ -393,15 +393,17 @@ private fun createKerasReLULayer(layer: ReLU): KerasLayer {
dtype = DATATYPE_FLOAT32,
max_value = layer.maxValue?.toDouble(),
negative_slope = layer.negativeSlope.toDouble(),
threshold = layer.threshold.toDouble()
threshold = layer.threshold.toDouble(),
trainable = layer.isTrainable
)
return KerasLayer(class_name = LAYER_RELU, config = configX)
}

private fun createKerasELULayer(layer: ELU): KerasLayer {
val configX = LayerConfig(
dtype = DATATYPE_FLOAT32,
alpha = layer.alpha.toDouble()
alpha = layer.alpha.toDouble(),
trainable = layer.isTrainable
)
return KerasLayer(class_name = LAYER_ELU, config = configX)
}
Expand Down