Skip to content

Commit 63ee729

Browse files
committed
remove the tmp buffer
1 parent 67bd4cd commit 63ee729

File tree

2 files changed

+2
-21
lines changed

2 files changed

+2
-21
lines changed

paddle/gserver/layers/MKLDNNLayer.cpp

+2-16
Original file line numberDiff line numberDiff line change
@@ -294,22 +294,8 @@ void MKLDNNLayer::resetMergeGrad(MKLDNNMatrixPtr& out) {
294294
srcs.push_back(*src);
295295
}
296296

297-
// TODO(TJ): remove me when mkldnn sum support different formats
298-
for (size_t i = 1; i < srcPDs.size(); ++i) {
299-
CHECK(srcPDs[0] == srcPDs[i]);
300-
}
301-
tmpOutGrad_ = out;
302-
tmpCvt_ = nullptr;
303-
if (out->getPrimitiveDesc() != srcPDs[0]) {
304-
tmpOutGrad_ = MKLDNNMatrix::create(srcPDs[0]);
305-
tmpCvt_ = MKLDNNMatrix::createReorder(tmpOutGrad_, out);
306-
CHECK(tmpCvt_);
307-
pipelineMergeGrad_.push_back(*tmpCvt_);
308-
}
309-
310-
auto sumPD =
311-
sum::primitive_desc(tmpOutGrad_->getMemoryDesc(), scales, srcPDs);
312-
mergeGrad_.reset(new sum(sumPD, srcs, *tmpOutGrad_));
297+
auto sumPD = sum::primitive_desc(out->getMemoryDesc(), scales, srcPDs);
298+
mergeGrad_.reset(new sum(sumPD, srcs, *out));
313299
pipelineMergeGrad_.insert(pipelineMergeGrad_.begin(), *mergeGrad_);
314300
}
315301

paddle/gserver/layers/MKLDNNLayer.h

-5
Original file line numberDiff line numberDiff line change
@@ -94,11 +94,6 @@ class MKLDNNLayer : public Layer {
9494
std::vector<mkldnn::primitive> pipelineMergeGrad_;
9595
// tmp input argument to save input grad, only used to merge grad
9696
Argument tmpInArg_;
97-
// since mkldnn sum do not support different formats:
98-
// can refer to https://github.com/01org/mkl-dnn/issues/134
99-
// so need create reorder manually and save tmp MKLDNNMatrix
100-
MKLDNNMatrixPtr tmpOutGrad_;
101-
std::shared_ptr<mkldnn::primitive> tmpCvt_;
10297

10398
public:
10499
explicit MKLDNNLayer(const LayerConfig& config)

0 commit comments

Comments
 (0)