Skip to content

Commit 632bc1f

Browse files
authored
[PHI] Migrate relu6 and abs kernels (#45397)
* abs relu6 fwd * abs bwd * gaussian_random_kernel and mkldnn-onednn renaming * scale kernel * whitespace * whitespace * revert scale migration * whitespaces * revert changes to gaussian kernel * whitespaces
1 parent 923594d commit 632bc1f

File tree

8 files changed

+44
-67
lines changed

8 files changed

+44
-67
lines changed

paddle/fluid/operators/mkldnn/activation_mkldnn_op.cc

Lines changed: 19 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -144,27 +144,13 @@ void eltwise_grad_use_out(const framework::ExecutionContext &ctx,
144144
dx->set_mem_desc(diff_src_memory_p->get_desc());
145145
}
146146

147-
template <typename T, dnnl::algorithm algorithm>
148-
struct MKLDNNActivationFunc : public BaseActivationFunctor<T> {
149-
void operator()(const framework::ExecutionContext &ctx) const {
150-
eltwise_forward<T>(ctx, algorithm);
151-
}
152-
};
153-
154147
template <typename T, dnnl::algorithm algorithm>
155148
struct MKLDNNActivationGradFunc : public BaseActivationFunctor<T> {
156149
void operator()(const framework::ExecutionContext &ctx) const {
157150
eltwise_grad<T>(ctx, algorithm);
158151
}
159152
};
160153

161-
template <typename T, dnnl::algorithm algorithm>
162-
struct MKLDNNActivationGradUseOutFunc : public BaseActivationFunctor<T> {
163-
void operator()(const framework::ExecutionContext &ctx) const {
164-
eltwise_grad_use_out<T>(ctx, algorithm);
165-
}
166-
};
167-
168154
template <typename T>
169155
struct GeluMKLDNNFunctor : public BaseActivationFunctor<T> {
170156
void operator()(const framework::ExecutionContext &ctx) const {
@@ -196,59 +182,33 @@ struct SoftplusMKLDNNFunctor : public BaseActivationFunctor<T> {
196182
}
197183
};
198184

199-
template <typename T>
200-
using Relu6MKLDNNFunctor =
201-
MKLDNNActivationFunc<T, dnnl::algorithm::eltwise_bounded_relu>;
202-
203-
template <typename T>
204-
using AbsMKLDNNFunctor = MKLDNNActivationFunc<T, dnnl::algorithm::eltwise_abs>;
205-
206185
template <typename T>
207186
using Relu6MKLDNNGradFunctor =
208187
MKLDNNActivationGradFunc<T, dnnl::algorithm::eltwise_bounded_relu>;
209188

210-
template <typename T>
211-
using AbsMKLDNNGradFunctor =
212-
MKLDNNActivationGradFunc<T, dnnl::algorithm::eltwise_abs>;
213-
214189
} // namespace operators
215190
} // namespace paddle
216191

217192
namespace ops = paddle::operators;
218193

219-
#define REGISTER_ACTIVATION_MKLDNN_KERNEL(act_type, functor, grad_functor) \
220-
REGISTER_OP_KERNEL( \
221-
act_type, \
222-
MKLDNN, \
223-
::paddle::platform::CPUPlace, \
224-
ops::MKLDNNActivationKernel<ops::functor<float>>, \
225-
ops::MKLDNNActivationKernel<ops::functor<paddle::platform::bfloat16>>); \
226-
REGISTER_OP_KERNEL( \
227-
act_type##_grad, \
228-
MKLDNN, \
229-
::paddle::platform::CPUPlace, \
230-
ops::MKLDNNActivationGradKernel<ops::grad_functor<float>>, \
231-
ops::MKLDNNActivationGradKernel< \
194+
#define REGISTER_FWD_ACTIVATION_MKLDNN_KERNEL(act_type, functor) \
195+
REGISTER_OP_KERNEL( \
196+
act_type, \
197+
MKLDNN, \
198+
::paddle::platform::CPUPlace, \
199+
ops::MKLDNNActivationKernel<ops::functor<float>>, \
200+
ops::MKLDNNActivationKernel<ops::functor<paddle::platform::bfloat16>>);
201+
202+
#define REGISTER_GRAD_ACTIVATION_MKLDNN_KERNEL(act_type, grad_functor) \
203+
REGISTER_OP_KERNEL( \
204+
act_type##_grad, \
205+
MKLDNN, \
206+
::paddle::platform::CPUPlace, \
207+
ops::MKLDNNActivationGradKernel<ops::grad_functor<float>>, \
208+
ops::MKLDNNActivationGradKernel< \
232209
ops::grad_functor<paddle::platform::bfloat16>>);
233210

234-
#define REGISTER_ACTIVATION_MKLDNN_KERNEL_FWD_ONLY(act_type, functor) \
235-
REGISTER_OP_KERNEL(act_type, \
236-
MKLDNN, \
237-
::paddle::platform::CPUPlace, \
238-
ops::MKLDNNActivationKernel<ops::functor<float>>);
239-
240-
#define FOR_EACH_MKLDNN_KERNEL_FUNCTOR(__macro) \
241-
__macro(abs, AbsMKLDNNFunctor, AbsMKLDNNGradFunctor); \
242-
__macro(gelu, GeluMKLDNNFunctor, GeluMKLDNNGradFunctor); \
243-
__macro(relu6, Relu6MKLDNNFunctor, Relu6MKLDNNGradFunctor);
244-
245-
FOR_EACH_MKLDNN_KERNEL_FUNCTOR(REGISTER_ACTIVATION_MKLDNN_KERNEL);
246-
247-
namespace ops = paddle::operators;
248-
REGISTER_OP_KERNEL(
249-
softplus,
250-
MKLDNN,
251-
paddle::platform::CPUPlace,
252-
ops::MKLDNNActivationKernel<ops::SoftplusMKLDNNFunctor<float>>,
253-
ops::MKLDNNActivationKernel<
254-
ops::SoftplusMKLDNNFunctor<paddle::platform::bfloat16>>);
211+
REGISTER_FWD_ACTIVATION_MKLDNN_KERNEL(softplus, SoftplusMKLDNNFunctor);
212+
REGISTER_FWD_ACTIVATION_MKLDNN_KERNEL(gelu, GeluMKLDNNFunctor);
213+
REGISTER_GRAD_ACTIVATION_MKLDNN_KERNEL(gelu, GeluMKLDNNGradFunctor);
214+
REGISTER_GRAD_ACTIVATION_MKLDNN_KERNEL(relu6, Relu6MKLDNNGradFunctor);

paddle/fluid/platform/mkldnn_reuse.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ limitations under the License. */
2525
#include "paddle/fluid/operators/pool_op.h"
2626
#include "paddle/fluid/platform/mkldnn_helper.h"
2727
#include "paddle/fluid/platform/place.h"
28-
#include "paddle/phi/kernels/funcs/onednn/mkldnn_reuse.h"
28+
#include "paddle/phi/kernels/funcs/onednn/onednn_reuse.h"
2929

3030
namespace paddle {
3131
namespace platform {

paddle/phi/kernels/funcs/data_layout_transform.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@
2525
#include "paddle/phi/core/dense_tensor.h"
2626

2727
#ifdef PADDLE_WITH_MKLDNN
28-
#include "paddle/phi/kernels/funcs/onednn/mkldnn_helper.h"
29-
#include "paddle/phi/kernels/funcs/onednn/mkldnn_reuse.h"
28+
#include "paddle/phi/kernels/funcs/onednn/onednn_helper.h"
29+
#include "paddle/phi/kernels/funcs/onednn/onednn_reuse.h"
3030
#endif
3131

3232
namespace phi {

paddle/phi/kernels/funcs/onednn/mkldnn_reuse.h renamed to paddle/phi/kernels/funcs/onednn/onednn_reuse.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ limitations under the License. */
2525
#include "paddle/phi/common/data_type.h"
2626
#include "paddle/phi/common/place.h"
2727
#include "paddle/phi/core/dense_tensor.h"
28-
#include "paddle/phi/kernels/funcs/onednn/mkldnn_helper.h"
28+
#include "paddle/phi/kernels/funcs/onednn/onednn_helper.h"
2929

3030
namespace phi {
3131
namespace funcs {

paddle/phi/kernels/onednn/activation_grad_kernel.cc

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
#include "paddle/phi/common/place.h"
2020
#include "paddle/phi/core/kernel_registry.h"
2121
#include "paddle/phi/kernels/funcs/activation_functor.h"
22-
#include "paddle/phi/kernels/funcs/onednn/mkldnn_reuse.h"
22+
#include "paddle/phi/kernels/funcs/onednn/onednn_reuse.h"
2323

2424
namespace phi {
2525

@@ -147,6 +147,10 @@ struct MKLDNNActivationGradUseOutFunc : public funcs::BaseActivationFunctor<T> {
147147
}
148148
};
149149

150+
template <typename T>
151+
using AbsMKLDNNGradFunctor =
152+
MKLDNNActivationGradFunc<T, dnnl::algorithm::eltwise_abs>;
153+
150154
template <typename T>
151155
using ReluMKLDNNGradFunctor =
152156
MKLDNNActivationGradFunc<T, dnnl::algorithm::eltwise_relu>;
@@ -193,6 +197,7 @@ DEFINE_ONEDNN_ACTIVATION_GRAD_KERNEL_DEPOUT(Sqrt, SqrtMKLDNNGradUseOutFunctor);
193197
DEFINE_ONEDNN_ACTIVATION_GRAD_KERNEL_DEPOUT(Sigmoid,
194198
SigmoidMKLDNNGradUseOutFunctor);
195199
DEFINE_ONEDNN_ACTIVATION_GRAD_KERNEL_DEPOUT(Exp, ExpMKLDNNGradUseOutFunctor);
200+
DEFINE_ONEDNN_ACTIVATION_GRAD_KERNEL_DEPOUT(Abs, AbsMKLDNNGradFunctor);
196201
DEFINE_ONEDNN_ACTIVATION_GRAD_KERNEL_DEPOUT(Relu, ReluMKLDNNGradFunctor);
197202

198203
DEFINE_ONEDNN_ACT_GRAD_KERNEL_WITH_ONE_ATTRS_DEPX(LeakyRelu,
@@ -240,6 +245,7 @@ PD_REGISTER_KERNEL(relu_grad,
240245
PD_REGISTER_KERNEL( \
241246
name, OneDNN, ALL_LAYOUT, phi::func, float, phi::dtype::bfloat16) {}
242247

248+
PD_REGISTER_ACTIVATION_GRAD_KERNEL(abs_grad, AbsGradKernel)
243249
PD_REGISTER_ACTIVATION_GRAD_KERNEL(elu_grad, EluGradKernel)
244250
PD_REGISTER_ACTIVATION_GRAD_KERNEL(exp_grad, ExpGradKernel)
245251
PD_REGISTER_ACTIVATION_GRAD_KERNEL(hard_swish_grad, HardSwishGradKernel)

paddle/phi/kernels/onednn/activation_kernel.cc

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
#include "paddle/phi/common/place.h"
2020
#include "paddle/phi/core/kernel_registry.h"
2121
#include "paddle/phi/kernels/funcs/activation_functor.h"
22-
#include "paddle/phi/kernels/funcs/onednn/mkldnn_reuse.h"
22+
#include "paddle/phi/kernels/funcs/onednn/onednn_reuse.h"
2323

2424
namespace phi {
2525

@@ -88,10 +88,17 @@ struct MKLDNNActivationFunc : public funcs::BaseActivationFunctor<T> {
8888
}
8989
};
9090

91+
template <typename T>
92+
using AbsMKLDNNFunctor = MKLDNNActivationFunc<T, dnnl::algorithm::eltwise_abs>;
93+
9194
template <typename T>
9295
using ReluMKLDNNFunctor =
9396
MKLDNNActivationFunc<T, dnnl::algorithm::eltwise_relu>;
9497

98+
template <typename T>
99+
using Relu6MKLDNNFunctor =
100+
MKLDNNActivationFunc<T, dnnl::algorithm::eltwise_bounded_relu>;
101+
95102
template <typename T>
96103
using SwishMKLDNNFunctor =
97104
MKLDNNActivationFunc<T, dnnl::algorithm::eltwise_swish>;
@@ -126,6 +133,7 @@ template <typename T>
126133
using RoundMKLDNNFunctor =
127134
MKLDNNActivationFunc<T, dnnl::algorithm::eltwise_round>;
128135

136+
DEFINE_ONEDNN_ACTIVATION_KERNEL(Abs, AbsMKLDNNFunctor)
129137
DEFINE_ONEDNN_ACTIVATION_KERNEL(Relu, ReluMKLDNNFunctor)
130138
DEFINE_ONEDNN_ACTIVATION_KERNEL(Tanh, TanhMKLDNNFunctor)
131139
DEFINE_ONEDNN_ACTIVATION_KERNEL(Exp, ExpMKLDNNFunctor)
@@ -137,6 +145,7 @@ DEFINE_ONEDNN_ACTIVATION_KERNEL(Round, RoundMKLDNNFunctor)
137145
DEFINE_ONEDNN_ACT_KERNEL_WITH_ONE_ATTRS(LeakyRelu, ReluMKLDNNFunctor, alpha)
138146
DEFINE_ONEDNN_ACT_KERNEL_WITH_ONE_ATTRS(Mish, MishMKLDNNFunctor, threshold)
139147
DEFINE_ONEDNN_ACT_KERNEL_WITH_ONE_ATTRS(Elu, EluMKLDNNFunctor, alpha)
148+
DEFINE_ONEDNN_ACT_KERNEL_WITH_ONE_ATTRS(Relu6, Relu6MKLDNNFunctor, threshold)
140149
DEFINE_ONEDNN_ACT_KERNEL_WITH_ONE_ATTRS(Swish, SwishMKLDNNFunctor, beta)
141150

142151
template <typename T, typename Context>
@@ -158,13 +167,15 @@ PD_REGISTER_KERNEL(round, OneDNN, ALL_LAYOUT, phi::RoundKernel, float) {}
158167
PD_REGISTER_KERNEL( \
159168
name, OneDNN, ALL_LAYOUT, phi::func, float, phi::dtype::bfloat16) {}
160169

170+
PD_REGISTER_ACTIVATION_KERNEL(abs, AbsKernel)
161171
PD_REGISTER_ACTIVATION_KERNEL(elu, EluKernel)
162172
PD_REGISTER_ACTIVATION_KERNEL(exp, ExpKernel)
163173
PD_REGISTER_ACTIVATION_KERNEL(hard_swish, HardSwishKernel)
164174
PD_REGISTER_ACTIVATION_KERNEL(leaky_relu, LeakyReluKernel)
165175
PD_REGISTER_ACTIVATION_KERNEL(mish, MishKernel)
176+
PD_REGISTER_ACTIVATION_KERNEL(relu, ReluKernel)
177+
PD_REGISTER_ACTIVATION_KERNEL(relu6, Relu6Kernel)
166178
PD_REGISTER_ACTIVATION_KERNEL(sigmoid, SigmoidKernel)
167179
PD_REGISTER_ACTIVATION_KERNEL(sqrt, SqrtKernel)
168180
PD_REGISTER_ACTIVATION_KERNEL(swish, SwishKernel)
169181
PD_REGISTER_ACTIVATION_KERNEL(tanh, TanhKernel)
170-
PD_REGISTER_ACTIVATION_KERNEL(relu, ReluKernel)

paddle/phi/kernels/transfer_layout_kernel.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ limitations under the License. */
2323
#include "paddle/phi/kernels/funcs/data_layout_transform.h"
2424
#include "paddle/phi/kernels/funcs/math_function.h"
2525
#ifdef PADDLE_WITH_MKLDNN
26-
#include "paddle/phi/kernels/funcs/onednn/mkldnn_helper.h"
26+
#include "paddle/phi/kernels/funcs/onednn/onednn_helper.h"
2727
#endif
2828
namespace phi {
2929

0 commit comments

Comments
 (0)