|
| 1 | +// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. |
| 2 | +// |
| 3 | +// Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | +// you may not use this file except in compliance with the License. |
| 5 | +// You may obtain a copy of the License at |
| 6 | +// |
| 7 | +// http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | +// |
| 9 | +// Unless required by applicable law or agreed to in writing, software |
| 10 | +// distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | +// See the License for the specific language governing permissions and |
| 13 | +// limitations under the License. |
| 14 | + |
| 15 | +#include <gtest/gtest.h> |
| 16 | + |
| 17 | +#include "paddle/fluid/framework/new_executor/standalone_executor.h" |
| 18 | +#include "paddle/fluid/ir/dialect/pd_dialect.h" |
| 19 | +#include "paddle/fluid/ir/dialect/pd_op.h" |
| 20 | +#include "paddle/fluid/ir/dialect/pd_type.h" |
| 21 | +#include "paddle/fluid/ir/dialect/utils.h" |
| 22 | +#include "paddle/fluid/ir/interface/op_yaml_info.h" |
| 23 | +#include "paddle/fluid/platform/init_phi.h" |
| 24 | +#include "paddle/ir/core/block.h" |
| 25 | +#include "paddle/ir/core/builtin_attribute.h" |
| 26 | +#include "paddle/ir/core/builtin_dialect.h" |
| 27 | +#include "paddle/ir/core/builtin_op.h" |
| 28 | +#include "paddle/ir/core/ir_context.h" |
| 29 | +#include "paddle/ir/core/program.h" |
| 30 | +#include "paddle/ir/core/utils.h" |
| 31 | +#include "paddle/phi/core/meta_tensor.h" |
| 32 | +#include "paddle/phi/infermeta/binary.h" |
| 33 | + |
| 34 | +DECLARE_FILE_SYMBOLS(kernel_dialect); |
| 35 | + |
| 36 | +PD_DECLARE_KERNEL(full, CPU, ALL_LAYOUT); |
| 37 | +PD_DECLARE_KERNEL(tanh, CPU, ALL_LAYOUT); |
| 38 | +PD_DECLARE_KERNEL(tanh_grad, CPU, ALL_LAYOUT); |
| 39 | + |
| 40 | +TEST(VJP, TanhBackwardTest) { |
| 41 | + ir::IrContext* ctx = ir::IrContext::Instance(); |
| 42 | + ir::Program program((ctx)); |
| 43 | + ctx->GetOrRegisterDialect<paddle::dialect::PaddleDialect>(); |
| 44 | + |
| 45 | + ir::Builder builder = ir::Builder(ctx, program.block()); |
| 46 | + |
| 47 | + paddle::dialect::FullOp op1 = builder.Build<paddle::dialect::FullOp>( |
| 48 | + std::vector<int64_t>{1}, 1.0, phi::DataType::FLOAT32, phi::CPUPlace()); |
| 49 | + |
| 50 | + paddle::dialect::TanhOp op2 = |
| 51 | + builder.Build<paddle::dialect::TanhOp>(op1.out()); |
| 52 | + |
| 53 | + paddle::dialect::FullOp op3 = builder.Build<paddle::dialect::FullOp>( |
| 54 | + std::vector<int64_t>{1}, 2.0, phi::DataType::FLOAT32, phi::CPUPlace()); |
| 55 | + |
| 56 | + paddle::dialect::VjpInterface tanh_vjp_interface = |
| 57 | + op2->dyn_cast<paddle::dialect::VjpInterface>(); |
| 58 | + |
| 59 | + std::vector<int> stop_gradients{0}; |
| 60 | + std::vector<ir::OpResult> out_grads{op3.out()}; |
| 61 | + std::vector<ir::OpResult> grad_res = |
| 62 | + tanh_vjp_interface.Vjp(op2.operation(), out_grads, stop_gradients); |
| 63 | +} |
0 commit comments