Skip to content

Commit a5f4865

Browse files
Pei YangShixiaowei02
Pei Yang
authored andcommitted
clean inference logs when config.DisableGlogInfo is triggered (#36356)
1 parent 287ca7d commit a5f4865

11 files changed

+42
-33
lines changed

paddle/fluid/framework/ir/fc_gru_fuse_pass.cc

+3-3
Original file line numberDiff line numberDiff line change
@@ -335,9 +335,9 @@ void FCGRUFusePass::ApplyImpl(ir::Graph* graph) const {
335335
graph, name_scope_, param_scope(), true /*with_fc_bias*/);
336336

337337
AddStatis(fusion_count);
338-
339-
string::PrettyLogDetail("--- fused %d pairs of fc gru patterns",
340-
fusion_count);
338+
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
339+
string::PrettyLogDetail("--- fused %d pairs of fc gru patterns",
340+
fusion_count);
341341
}
342342

343343
} // namespace ir

paddle/fluid/framework/ir/fc_lstm_fuse_pass.cc

+3-3
Original file line numberDiff line numberDiff line change
@@ -349,9 +349,9 @@ void FCLstmFusePass::ApplyImpl(ir::Graph* graph) const {
349349
BuildFusion(graph, name_scope_, param_scope(), true /*with_fc_bias*/);
350350

351351
AddStatis(fusion_count);
352-
353-
string::PrettyLogDetail("--- fused %d pairs of fc lstm patterns",
354-
fusion_count);
352+
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
353+
string::PrettyLogDetail("--- fused %d pairs of fc lstm patterns",
354+
fusion_count);
355355
}
356356

357357
} // namespace ir

paddle/fluid/framework/ir/layer_norm_fuse_pass.cc

+3-2
Original file line numberDiff line numberDiff line change
@@ -351,8 +351,9 @@ void LayerNormFusePass::ApplyImpl(Graph* graph) const {
351351

352352
gpd(graph, handler);
353353
AddStatis(found_layer_norm_count);
354-
PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.",
355-
found_layer_norm_count);
354+
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
355+
PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.",
356+
found_layer_norm_count);
356357
}
357358

358359
} // namespace ir

paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc

+3-2
Original file line numberDiff line numberDiff line change
@@ -150,8 +150,9 @@ void FuseBatchNormActOneDNNPass::FuseBatchNormAct(
150150

151151
gpd(graph, handler);
152152
AddStatis(found_bn_act_count);
153-
PrettyLogDetail("--- fused %d batch norm with relu activation",
154-
found_bn_act_count);
153+
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
154+
PrettyLogDetail("--- fused %d batch norm with relu activation",
155+
found_bn_act_count);
155156
}
156157

157158
} // namespace ir

paddle/fluid/framework/ir/mkldnn/fc_act_mkldnn_fuse_pass.cc

+5-4
Original file line numberDiff line numberDiff line change
@@ -68,9 +68,9 @@ void FuseFCActOneDNNPass::FuseFCAct(Graph *graph,
6868
bool approximate = BOOST_GET_CONST(bool, act_op->GetAttr("approximate"));
6969
std::string type = approximate ? "_tanh" : "_erf";
7070
fc_op->SetAttr("activation_type", act_type + type);
71-
} else
71+
} else {
7272
fc_op->SetAttr("activation_type", act_type);
73-
73+
}
7474
fc_op->SetAttr("use_mkldnn", true);
7575

7676
fc_op->SetOutput("Out", {act_out->Name()});
@@ -82,8 +82,9 @@ void FuseFCActOneDNNPass::FuseFCAct(Graph *graph,
8282

8383
gpd(graph, handler);
8484
AddStatis(found_fc_act_count);
85-
PrettyLogDetail("--- fused %d fc with %s activation", found_fc_act_count,
86-
act_type);
85+
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
86+
PrettyLogDetail("--- fused %d fc with %s activation", found_fc_act_count,
87+
act_type);
8788
}
8889

8990
} // namespace ir

paddle/fluid/framework/ir/mkldnn/matmul_transpose_reshape_fuse_pass.cc

+6-4
Original file line numberDiff line numberDiff line change
@@ -149,10 +149,12 @@ void MatmulTransposeReshapeMKLDNNPass::ApplyImpl(ir::Graph *graph) const {
149149

150150
gpd(graph, handler);
151151
AddStatis(found_matmul_transpose_reshape_count);
152-
std::stringstream msg_ss;
153-
msg_ss << "--- Fused " << found_matmul_transpose_reshape_count
154-
<< " MatmulTransposeReshape patterns";
155-
paddle::string::PrettyLogDetail(msg_ss.str().c_str());
152+
if (!Has("disable_logs") || !Get<bool>("disable_logs")) {
153+
std::stringstream msg_ss;
154+
msg_ss << "--- Fused " << found_matmul_transpose_reshape_count
155+
<< " MatmulTransposeReshape patterns";
156+
paddle::string::PrettyLogDetail(msg_ss.str().c_str());
157+
}
156158
}
157159
} // namespace ir
158160
} // namespace framework

paddle/fluid/framework/ir/mkldnn/multi_gru_fuse_pass.cc

+3-3
Original file line numberDiff line numberDiff line change
@@ -111,9 +111,9 @@ void MultiGRUFusePass::ApplyImpl(ir::Graph* graph) const {
111111
};
112112
gpd(graph, handler);
113113
AddStatis(fused_count);
114-
115-
PrettyLogDetail("--- fused %d pairs of concatenated multi_gru ops",
116-
fused_count);
114+
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
115+
PrettyLogDetail("--- fused %d pairs of concatenated multi_gru ops",
116+
fused_count);
117117
}
118118

119119
} // namespace ir

paddle/fluid/framework/ir/mkldnn/multi_gru_seq_fuse_pass.cc

+3-3
Original file line numberDiff line numberDiff line change
@@ -126,9 +126,9 @@ void MultiGruSeqFusePass::ApplyImpl(ir::Graph* graph) const {
126126
};
127127
gpd(graph, handler);
128128
AddStatis(fused_count);
129-
130-
PrettyLogDetail("--- fused %d sequences of two multi_gru ops",
131-
fused_count);
129+
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
130+
PrettyLogDetail("--- fused %d sequences of two multi_gru ops",
131+
fused_count);
132132
}
133133

134134
} // namespace ir

paddle/fluid/framework/ir/mkldnn/reshape_transpose_matmul_mkldnn_fuse_pass.cc

+8-7
Original file line numberDiff line numberDiff line change
@@ -148,13 +148,14 @@ void ReshapeTransposeMatmulMkldnnFusePass::Fuse(
148148

149149
gpd(graph, handler);
150150
AddStatis(found_reshape_transpose_matmul_count);
151-
152-
std::stringstream msg_ss;
153-
msg_ss << "--- Fused " << found_reshape_transpose_matmul_count
154-
<< " ReshapeTransposeMatmulMkldnn patterns";
155-
if (with_reshape_xshape) msg_ss << " with reshape's xshape";
156-
if (with_transpose_xshape) msg_ss << " with transpose's xshape";
157-
string::PrettyLogDetail(msg_ss.str().c_str());
151+
if (!Has("disable_logs") || !Get<bool>("disable_logs")) {
152+
std::stringstream msg_ss;
153+
msg_ss << "--- Fused " << found_reshape_transpose_matmul_count
154+
<< " ReshapeTransposeMatmulMkldnn patterns";
155+
if (with_reshape_xshape) msg_ss << " with reshape's xshape";
156+
if (with_transpose_xshape) msg_ss << " with transpose's xshape";
157+
string::PrettyLogDetail(msg_ss.str().c_str());
158+
}
158159
}
159160

160161
void ReshapeTransposeMatmulMkldnnFusePass::ApplyImpl(ir::Graph *graph) const {

paddle/fluid/framework/ir/mkldnn/scale_matmul_fuse_pass.cc

+3-2
Original file line numberDiff line numberDiff line change
@@ -129,8 +129,9 @@ void ScaleMatmulFusePass::ApplyImpl(ir::Graph* graph) const {
129129
};
130130
gpd(graph, handler);
131131
AddStatis(found_scale_matmul_fuse_count);
132-
PrettyLogDetail("--- fused %d scale with matmul",
133-
found_scale_matmul_fuse_count);
132+
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
133+
PrettyLogDetail("--- fused %d scale with matmul",
134+
found_scale_matmul_fuse_count);
134135
}
135136

136137
} // namespace ir

paddle/fluid/inference/analysis/ir_pass_manager.cc

+2
Original file line numberDiff line numberDiff line change
@@ -246,6 +246,8 @@ void IRPassManager::CreatePasses(Argument *argument,
246246
pass->Set("use_fc_padding", new bool(use_fc_padding));
247247
}
248248

249+
pass->Set("disable_logs", new bool(disable_logs_));
250+
249251
pre_pass = pass_name;
250252

251253
passes_.emplace_back(std::move(pass));

0 commit comments

Comments
 (0)