Skip to content

Commit a8e1854

Browse files
committed
Fix the clang format.
1 parent d760b6a commit a8e1854

File tree

6 files changed

+14
-21
lines changed

6 files changed

+14
-21
lines changed

paddle/operators/lstm_op.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,10 +14,10 @@ limitations under the License. */
1414

1515
#pragma once
1616
#include "paddle/framework/op_registry.h"
17+
#include "paddle/operators/math/detail/activation_functions.h"
1718
#include "paddle/operators/math/lstm_compute.h"
1819
#include "paddle/operators/math/math_function.h"
1920
#include "paddle/operators/math/sequence2batch.h"
20-
#include "paddle/operators/math/detail/activation_functions.h"
2121

2222
namespace paddle {
2323
namespace operators {

paddle/operators/math/detail/activation_functions.h

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@ limitations under the License. */
1414

1515
#pragma once
1616
#include <math.h>
17-
#include "paddle/platform/hostdevice.h"
1817
#include "paddle/platform/enforce.h"
18+
#include "paddle/platform/hostdevice.h"
1919

2020
#ifdef __AVX__
2121
#include <immintrin.h>
@@ -37,20 +37,19 @@ enum ActivationType {
3737
kIdentity,
3838
};
3939

40-
inline ActivationType GetActivationType (const std::string &type) {
40+
inline ActivationType GetActivationType(const std::string &type) {
4141
if (type == "sigmoid") {
4242
return ActivationType::kSigmoid;
4343
} else if (type == "relu") {
4444
return ActivationType::kReLU;
4545
} else if (type == "tanh") {
4646
return ActivationType::kTanh;
47-
} else if (type == "identity") {
47+
} else if (type == "identity" || type == "") {
4848
return ActivationType::kIdentity;
4949
}
5050
PADDLE_THROW("Not support type %s.", type);
5151
}
5252

53-
5453
namespace forward {
5554

5655
template <typename T>

paddle/operators/math/detail/lstm_cpu_kernel.h

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,7 @@ namespace detail {
2626

2727
template <class T, class Op>
2828
void naive_lstm_forward_one_sequence(Op op, LstmMetaValue<T> value,
29-
int frame_size,
30-
ActivationType active_node,
29+
int frame_size, ActivationType active_node,
3130
ActivationType active_gate,
3231
ActivationType active_state) {
3332
T r_value_in;
@@ -149,8 +148,7 @@ void naive_lstm_backward_one_sequence(Op op, LstmMetaValue<T> value,
149148

150149
template <class T, class Op>
151150
void avx_lstm_forward_one_sequence(Op op, LstmMetaValue<T> value,
152-
int frame_size,
153-
ActivationType active_node,
151+
int frame_size, ActivationType active_node,
154152
ActivationType active_gate,
155153
ActivationType active_state) {
156154
#ifdef __AVX__
@@ -281,8 +279,7 @@ void avx_lstm_backward_one_sequence(Op op, LstmMetaValue<T> value,
281279

282280
template <class T, class Op>
283281
void cpu_lstm_forward(Op op, LstmMetaValue<T> value, int frame_size,
284-
ActivationType active_node,
285-
ActivationType active_gate,
282+
ActivationType active_node, ActivationType active_gate,
286283
ActivationType active_state) {
287284
if (Op::avx && !(frame_size & (8 - 1)) && (std::is_same<T, float>::value)) {
288285
avx_lstm_forward_one_sequence<T>(op, value, frame_size, active_node,

paddle/operators/math/detail/lstm_gpu_kernel.h

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -185,8 +185,7 @@ __global__ void KeLstmBackward(Op op, LstmMetaValue<T> value,
185185
template <class T, class Op>
186186
void gpu_lstm_forward(const platform::DeviceContext& context, Op op,
187187
LstmMetaValue<T> value, int frame_size, int batch_size,
188-
ActivationType active_node,
189-
ActivationType active_gate,
188+
ActivationType active_node, ActivationType active_gate,
190189
ActivationType active_state) {
191190
dim3 threads;
192191
dim3 grid;
@@ -220,8 +219,7 @@ template <class T, class Op>
220219
void gpu_lstm_backward(const platform::DeviceContext& context, Op op,
221220
LstmMetaValue<T> value, LstmMetaGrad<T> grad,
222221
int frame_size, int batch_size,
223-
ActivationType active_node,
224-
ActivationType active_gate,
222+
ActivationType active_node, ActivationType active_gate,
225223
ActivationType active_state) {
226224
dim3 threads;
227225
dim3 grid;

paddle/operators/math/lstm_compute.cu

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,8 @@ struct LstmUnitFunctor<platform::CUDADeviceContext, T> {
2828
const detail::ActivationType& cell_act,
2929
const detail::ActivationType& cand_act) {
3030
detail::gpu_lstm_forward<T>(context, detail::forward::lstm<T>(), value,
31-
frame_size, batch_size, cand_act,
32-
gate_act, cell_act);
31+
frame_size, batch_size, cand_act, gate_act,
32+
cell_act);
3333
}
3434
};
3535

@@ -42,8 +42,8 @@ struct LstmUnitGradFunctor<platform::CUDADeviceContext, T> {
4242
const detail::ActivationType& cell_act,
4343
const detail::ActivationType& cand_act) {
4444
detail::gpu_lstm_backward(context, detail::backward::lstm<T>(), value, grad,
45-
frame_size, batch_size, cand_act,
46-
gate_act, cell_act);
45+
frame_size, batch_size, cand_act, gate_act,
46+
cell_act);
4747
}
4848
};
4949

paddle/operators/math/lstm_compute.h

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,9 @@ limitations under the License. */
1414

1515
#pragma once
1616

17+
#include "paddle/operators/math/detail/activation_functions.h"
1718
#include "paddle/platform/device_context.h"
1819
#include "paddle/platform/enforce.h"
19-
#include "paddle/operators/math/detail/activation_functions.h"
2020

2121
namespace paddle {
2222
namespace operators {
@@ -30,7 +30,6 @@ typedef enum {
3030
HL_ACTIVATION_END
3131
} activation_mode_t;
3232

33-
3433
template <class T>
3534
struct LstmMetaValue {
3635
T *gate_value;

0 commit comments

Comments
 (0)