Skip to content

Commit

Permalink
log only if > 0 (#47181)
Browse files Browse the repository at this point in the history
  • Loading branch information
sfraczek authored Oct 20, 2022
1 parent acf56fb commit d6208aa
Show file tree
Hide file tree
Showing 10 changed files with 21 additions and 11 deletions.
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/fc_gru_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -350,7 +350,7 @@ void FCGRUFusePass::ApplyImpl(ir::Graph* graph) const {
graph, name_scope_, param_scope(), true /*with_fc_bias*/);

AddStatis(fusion_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
if ((!Has("disable_logs") || !Get<bool>("disable_logs")) && fusion_count > 0)
string::PrettyLogDetail("--- fused %d pairs of fc gru patterns",
fusion_count);
}
Expand Down
3 changes: 2 additions & 1 deletion paddle/fluid/framework/ir/layer_norm_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -422,7 +422,8 @@ void LayerNormFusePass::ApplyImpl(Graph* graph) const {

gpd(graph, handler);
AddStatis(found_layer_norm_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_layer_norm_count > 0)
PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.",
found_layer_norm_count);
}
Expand Down
3 changes: 2 additions & 1 deletion paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,8 @@ void FuseBatchNormActOneDNNPass::FuseBatchNormAct(

gpd(graph, handler);
AddStatis(found_bn_act_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_bn_act_count > 0)
PrettyLogDetail("--- fused %d batch norm with relu activation",
found_bn_act_count);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,8 @@ GraphWithStats ResidualConnectionMKLDNNFusePass::FuseConv(
};

gpd(graph_with_stats.first, handler);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) {
if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_conv_count > 0) {
std::stringstream msg_ss;
std::string fusionMode = as_x ? "x" : "y";
msg_ss << "--- Fused " << found_conv_count << " conv (as " << fusionMode
Expand Down Expand Up @@ -228,7 +229,8 @@ GraphWithStats ResidualConnectionMKLDNNFusePass::FuseProjectionConv(
};

gpd(graph_with_stats.first, handler);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) {
if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_projection_conv_count > 0) {
std::stringstream msg_ss;
msg_ss << "--- Fused " << found_projection_conv_count
<< " projection conv (as y) + elementwise_add patterns";
Expand Down
3 changes: 2 additions & 1 deletion paddle/fluid/framework/ir/mkldnn/elt_act_mkldnn_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,8 @@ void ElementwiseActivationOneDNNPass::FuseElementwiseAct(

gpd(graph, handler);
AddStatis(found_elementwise_activation_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
if ((!Has("disable_logs") ||
!Get<bool>("disable_logs") && found_elementwise_activation_count > 0))
PrettyLogDetail("--- fused %d %s with %s activation",
found_elementwise_activation_count,
elt_type,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,8 @@ GraphWithStats FCResidualConnectionMKLDNNFusePass::FuseFC(
};

gpd(graph_with_stats.first, handler);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) {
if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_fc_count > 0) {
std::stringstream msg_ss;
std::string fusionMode = fc_as_x ? "x" : "y";
msg_ss << "--- Fused " << found_fc_count << " fc (as " << fusionMode
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,8 @@ void MatmulActivationMkldnnFusePass::FuseMatmulAct(

gpd(graph, handler);
AddStatis(found_matmul_activation_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) {
if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_matmul_activation_count > 0) {
PrettyLogDetail("--- fused %d %s with %s activation",
found_matmul_activation_count,
matmul_type,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,8 @@ void MatmulElementwiseAddMKLDNNFusePass::FuseMatmulElementwiseAdd(

gpd(graph, handler);
AddStatis(found_matmul_elementwise_add_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) {
if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_matmul_elementwise_add_count > 0) {
PrettyLogDetail("--- fused %d %s (as %s) with elementwise_add",
found_matmul_elementwise_add_count,
matmul_type,
Expand Down
3 changes: 2 additions & 1 deletion paddle/fluid/framework/ir/mkldnn/scale_matmul_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,8 @@ void ScaleMatmulFusePass::ApplyImpl(ir::Graph* graph) const {
};
gpd(graph, handler);
AddStatis(found_scale_matmul_fuse_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_scale_matmul_fuse_count > 0)
PrettyLogDetail("--- fused %d scale with matmul",
found_scale_matmul_fuse_count);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,8 @@ void SoftplusActivationOneDNNPass::FuseSoftplusActivation(

gpd(graph, handler);
AddStatis(found_softplus_activation_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_softplus_activation_count > 0)
PrettyLogDetail("--- fused %d softplus with %s activation",
found_softplus_activation_count,
act_type);
Expand Down

0 comments on commit d6208aa

Please sign in to comment.