diff --git a/paddle/fluid/operators/activation_op.cc b/paddle/fluid/operators/activation_op.cc index 9bb4212743d8ef..f5ca261669c9c6 100644 --- a/paddle/fluid/operators/activation_op.cc +++ b/paddle/fluid/operators/activation_op.cc @@ -310,8 +310,6 @@ namespace ops = paddle::operators; ops::ActivationOpGrad, \ ops::ActivationGradOpInplaceInferer); -FOR_EACH_ACTIVATION_OP(REGISTER_ACTIVATION_OP); - REGISTER_ACTIVATION_OP(mish, Mish, MishFunctor, MishGradFunctor); /* ========================== register checkpoint ===========================*/ diff --git a/paddle/fluid/operators/activation_op.h b/paddle/fluid/operators/activation_op.h index 229c4e22685d93..6dba3284710f42 100644 --- a/paddle/fluid/operators/activation_op.h +++ b/paddle/fluid/operators/activation_op.h @@ -61,45 +61,5 @@ struct BaseActivationFunctor { USE_PHI_FUNCTOR(Mish) -template -struct SoftReluFunctor : public BaseActivationFunctor { - float threshold; - typename BaseActivationFunctor::AttrPair GetAttrs() { - return {{"threshold", &threshold}}; - } - - template - void operator()(Device d, X x, Out out) const { - auto tmp = static_cast(threshold); - auto temp = x.cwiseMax(-tmp).cwiseMin(tmp); - out.device(d) = (static_cast(1) + temp.exp()).log(); - } -}; - -template -struct SoftReluGradFunctor : public BaseActivationFunctor { - float threshold; - typename BaseActivationFunctor::AttrPair GetAttrs() { - return {{"threshold", &threshold}}; - } - template - void operator()(Device d, X x UNUSED, Out out, dOut dout, dX dx) const { - auto tmp = static_cast(threshold); - auto temp = ((out > -tmp) * (out < tmp)).template cast(); - dx.device(d) = dout * (static_cast(1) - (-out).exp()) * temp; - } - - static constexpr ActBwdOpFwdDeps FwdDeps() { - return ActBwdOpFwdDeps::kDepOut; - } -}; - } // namespace operators } // namespace paddle - -#define FOR_EACH_ACTIVATION_OP(__macro) \ - __macro(soft_relu, SoftRelu, SoftReluFunctor, SoftReluGradFunctor);