Skip to content

Commit

Permalink
fix hard_sigmoid_activation
Browse files Browse the repository at this point in the history
  • Loading branch information
Dobiasd committed Apr 10, 2024
1 parent e023db1 commit 18ba1b0
Showing 1 changed file with 8 additions and 1 deletion.
9 changes: 8 additions & 1 deletion include/fdeep/recurrent_ops.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,14 @@ namespace internal {

inline float_type hard_sigmoid_activation(float_type x)
{
return static_cast<float_type>(std::min(1.0, std::max(0.0, (0.2 * x) + 0.5)));
// https://github.com/keras-team/keras/blob/f7bc67e6c105c116a2ba7f5412137acf78174b1a/keras/ops/nn.py#L316C6-L316C74
if (x < -3) {
return 0;
}
if (x > 3) {
return 1;
}
return (x / static_cast<float_type>(6)) + static_cast<float_type>(0.5);
}

inline float_type relu_activation(float_type x)
Expand Down

0 comments on commit 18ba1b0

Please sign in to comment.