Skip to content

Commit d2e30a2

Browse files
authored
Merge pull request #5771 from lcy-seso/softsign
add the softsign activation.
2 parents 9db4d01 + fafd3e0 commit d2e30a2

File tree

3 files changed

+51
-5
lines changed

3 files changed

+51
-5
lines changed

paddle/gserver/activations/ActivationFunction.cpp

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -212,6 +212,37 @@ Error __must_check backward(Argument& act) {
212212
}
213213
END_DEFINE_ACTIVATION(sequence_softmax)
214214

215+
/*
216+
* @brief SoftSign Activation.
217+
* \f[
218+
* f(z) = \frac{z}{1 + |z|}
219+
* \f]
220+
*/
221+
BEGIN_DEFINE_ACTIVATION(softsign)
222+
private:
223+
MatrixPtr denominator_;
224+
225+
Error __must_check forward(Argument& act) {
226+
size_t height = act.value->getHeight();
227+
size_t width = act.value->getWidth();
228+
Matrix::resizeOrCreate(
229+
denominator_, height, width, false, useGpu(act.deviceId));
230+
denominator_->assign(*act.value);
231+
denominator_->abs2();
232+
denominator_->add(1.);
233+
234+
act.value->dotDiv(*act.value, *denominator_);
235+
return Error();
236+
}
237+
238+
Error __must_check backward(Argument& act) {
239+
denominator_->square2();
240+
denominator_->scalarDiv(*denominator_, 1.);
241+
act.grad->dotMul(*act.grad, *denominator_);
242+
return Error();
243+
}
244+
END_DEFINE_ACTIVATION(softsign)
245+
215246
/**
216247
* @brief Relu Activation.
217248
* forward. y = max(0, z)

python/paddle/trainer_config_helpers/activations.py

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,8 @@
1717
"IdentityActivation", "LinearActivation", 'SequenceSoftmaxActivation',
1818
'ExpActivation', "ReluActivation", "BReluActivation", "SoftReluActivation",
1919
"STanhActivation", "AbsActivation", "SquareActivation", "BaseActivation",
20-
"LogActivation", "SqrtActivation", "ReciprocalActivation"
20+
"LogActivation", "SqrtActivation", "ReciprocalActivation",
21+
"SoftSignActivation"
2122
]
2223

2324

@@ -243,8 +244,20 @@ class ReciprocalActivation(BaseActivation):
243244
Reciprocal Activation.
244245
245246
.. math::
246-
f(z) = 1/z
247+
f(z)=\\frac{1}{z}
247248
"""
248249

249250
def __init__(self):
250251
BaseActivation.__init__(self, 'reciprocal', False)
252+
253+
254+
class SoftSignActivation(BaseActivation):
255+
"""
256+
SoftSign Activation.
257+
258+
.. math::
259+
f(z)=\\frac{z}{1 + |z|}
260+
"""
261+
262+
def __init__(self):
263+
BaseActivation.__init__(self, 'softsign', False)

python/paddle/v2/fluid/tests/test_sequence_slice_op.py

100755100644
Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import sys
44
from op_test import OpTest
55

6+
67
class TestSequenceSliceOp(OpTest):
78
def set_data(self):
89
self.init_test_case()
@@ -13,12 +14,12 @@ def set_data(self):
1314
length = np.array(self.length).astype("int64")
1415

1516
self.inputs = {'X': (x, lod), 'Offset': offset, 'Length': length}
16-
outs = [] #np.zeros((100, 3, 2)).astype('float32')
17+
outs = [] #np.zeros((100, 3, 2)).astype('float32')
1718
out_lod = [[0]]
1819
out_lod_offset = 0
1920
for i in range(len(offset)):
20-
sub_x = x[lod[0][i] + offset[i, 0]: lod[0]
21-
[i] + offset[i, 0] + length[i, 0], :]
21+
sub_x = x[lod[0][i] + offset[i, 0]:lod[0][i] + offset[i, 0] +
22+
length[i, 0], :]
2223
out_lod_offset = out_lod_offset + len(sub_x)
2324
outs.append(sub_x)
2425
out_lod[0].append(out_lod_offset)
@@ -41,5 +42,6 @@ def test_check_output(self):
4142
def test_check_grad(self):
4243
self.check_grad(['X'], 'Out')
4344

45+
4446
if __name__ == '__main__':
4547
unittest.main()

0 commit comments

Comments
 (0)