Skip to content

Commit dd18745

Browse files
committed
remove ExecutionStrategy
1 parent 6dd6447 commit dd18745

File tree

6 files changed

+3
-65
lines changed

6 files changed

+3
-65
lines changed

paddle/fluid/framework/details/execution_strategy.h

Lines changed: 0 additions & 51 deletions
This file was deleted.

paddle/fluid/framework/details/op_handle_base.h

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919
#include <unordered_set>
2020
#include <vector>
2121

22-
#include "paddle/fluid/framework/details/execution_strategy.h"
2322
#include "paddle/fluid/framework/details/var_handle.h"
2423
#include "paddle/fluid/framework/ir/node.h"
2524
#include "paddle/fluid/platform/device_context.h"

paddle/fluid/framework/distributed_strategy.proto

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -183,13 +183,6 @@ message BuildStrategy {
183183
optional bool fuse_resunit = 21 [ default = false ];
184184
}
185185

186-
message ExecutionStrategy {
187-
optional int32 num_threads = 1 [ default = 1 ];
188-
optional int32 num_iteration_per_drop_scope = 2 [ default = 10 ];
189-
optional int32 num_iteration_per_run = 3 [ default = 1 ];
190-
optional bool use_thread_barrier = 4 [ default = false ];
191-
}
192-
193186
message GradientScaleConfig {
194187
// Optional value ['avg', 'sum', 'customized']
195188
// If avg, loss@grad will be divided by the number of devices,
@@ -426,8 +419,7 @@ message DistributedStrategy {
426419
optional QatConfig qat_configs = 117;
427420

428421
optional BuildStrategy build_strategy = 201;
429-
optional ExecutionStrategy execution_strategy = 202;
430-
optional GradientScaleConfig gradient_scale_configs = 203;
422+
optional GradientScaleConfig gradient_scale_configs = 202;
431423
}
432424

433425
message DistributedJobInfo {

paddle/fluid/framework/ir/memory_optimize_pass/memory_reuse_pass.cc

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -238,7 +238,6 @@ bool MemoryReusePass::IsInVarReusable(const details::VarHandle &in_var) const {
238238
* - it is the first version var. Otherwise, the var may be overwritten
239239
* in the second batch, which results in wrong calculation result.
240240
* It is critical especially when
241-
* ExecutionStrategy::num_iteration_per_drop_scope_ > 1.
242241
* - it has not reused other var's memory. It is not necessary to do memory
243242
* reuse twice for the same var.
244243
* - it is not a persistable var.

paddle/fluid/operators/cinn/cinn_launch_context.cc

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@
3030
#include "paddle/common/flags.h"
3131
#include "paddle/fluid/framework/convert_utils.h"
3232
#include "paddle/fluid/framework/details/build_strategy.h"
33-
#include "paddle/fluid/framework/details/execution_strategy.h"
3433
#include "paddle/fluid/framework/io/save_paddle2cinn_varmap.h"
3534
#include "paddle/fluid/framework/ir/graph.h"
3635
#include "paddle/fluid/framework/lod_tensor.h"

python/paddle/distributed/fleet/base/distributed_strategy.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -185,8 +185,8 @@ def __init__(self):
185185
186186
DistributedStrategy can be serialized into protobuf file or deserialized from protobuf file
187187
188-
Users who run local training usually configure BuildStrategy and ExecutionStrategy, and
189-
DistributedStrategy supports configurations from BuildStrategy and ExecutionStrategy
188+
Users who run local training usually configure BuildStrategy, and
189+
DistributedStrategy supports configurations from BuildStrategy.
190190
191191
"""
192192
self.strategy = distributed_strategy_pb2.DistributedStrategy()

0 commit comments

Comments
 (0)