Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

【Hackathon 7th No.46】 添加对返回常量的 IfElse 算子的支持 #1383

Merged
merged 9 commits into from
Sep 20, 2024
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 44 additions & 4 deletions paddle2onnx/mapper/exporter.cc
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

#include <array>

#include "onnx_helper.h"
Asthestarsfalll marked this conversation as resolved.
Show resolved Hide resolved
#include "onnxoptimizer/optimize.h"
#include "paddle2onnx/optimizer/convert_fp32_to_fp16.h"
#include "paddle2onnx/optimizer/eliminate_non_transpose.h"
Expand Down Expand Up @@ -323,6 +324,30 @@ namespace paddle2onnx
return std::move(ExportBlock(parser, sub_block_idx, temp_parameters, temp_inputs, temp_outputs));
}

ONNX_NAMESPACE::GraphProto ModelExporter::ExportFillConstant(const PaddleParser &parser,
OnnxHelper *temp_helper,
int32_t block_id,
int32_t op_id,
const std::string &output_names)
{
ONNX_NAMESPACE::GraphProto graph;
graph.set_name("PaddlePaddle fill_constant Graph " + std::to_string(op_id));
auto op = parser.GetOpDesc(block_id, op_id); // fill_constant

auto out_info = parser.GetOpOutput(block_id, op_id, "Out");

*(graph.add_output()) = (*MakeValueInfo(out_info[0]));
Asthestarsfalll marked this conversation as resolved.
Show resolved Hide resolved

for (auto &item: temp_helper->nodes) {
if (item -> output(0) == output_names) {
*(graph.add_node()) = (*item.get());
break;
}
}

return std::move(graph);
}

ONNX_NAMESPACE::GraphProto ModelExporter::ExportBlock(const PaddleParser &parser,
int32_t block_id,
std::vector<std::shared_ptr<ONNX_NAMESPACE::NodeProto>> &parameters,
Expand Down Expand Up @@ -366,16 +391,27 @@ namespace paddle2onnx
// 构建 else 分支图
auto else_node_name = input_info[0].name;
auto conditional_block_cood_it = sub_block_map_.find(else_node_name);
Assert(conditional_block_cood_it != sub_block_map_.end(), "Don't find select_input else_input node.");
Assert(conditional_block_cood_it != sub_block_map_.end(), "Con't find select_input else_input node.");
auto conditional_block_cood = conditional_block_cood_it->second;
auto else_graph = ExportConditionalBlock(parser, conditional_block_cood.first, conditional_block_cood.second, else_node_name);
ONNX_NAMESPACE::GraphProto else_graph, then_graph;
auto else_node = parser.GetOpDesc(conditional_block_cood.first, conditional_block_cood.second);
if (else_node.type().find("conditional_block") != std::string::npos) {
else_graph = ExportConditionalBlock(parser, conditional_block_cood.first, conditional_block_cood.second, else_node_name);
} else {
else_graph = ExportFillConstant(parser, &temp_helper, conditional_block_cood.first, conditional_block_cood.second, else_node_name);
}

// 构建 then 分支图
auto then_node_name = input_info[1].name;
conditional_block_cood_it = sub_block_map_.find(then_node_name);
Assert(conditional_block_cood_it != sub_block_map_.end(), "Don't find select_input then_input node.");
Assert(conditional_block_cood_it != sub_block_map_.end(), "Con't find select_input then_input node.");
conditional_block_cood = conditional_block_cood_it->second;
auto then_graph = ExportConditionalBlock(parser, conditional_block_cood.first, conditional_block_cood.second, then_node_name);
auto then_node = parser.GetOpDesc(conditional_block_cood.first, conditional_block_cood.second);
if (then_node.type().find("conditional_block") != std::string::npos) {
then_graph = ExportConditionalBlock(parser, conditional_block_cood.first, conditional_block_cood.second, then_node_name);
} else {
then_graph = ExportFillConstant(parser, &temp_helper, conditional_block_cood.first, conditional_block_cood.second, then_node_name);
}

auto cond_info = parser.GetOpInput(block_id, op_id, "Mask");
auto output_info = parser.GetOpOutput(block_id, op_id, "Out");
Expand All @@ -384,6 +420,10 @@ namespace paddle2onnx
AddAttribute(node, "then_branch", then_graph);
AddAttribute(node, "else_branch", else_graph);
continue;
} else if (op.type() == "fill_constant")
{
auto out_info = parser.GetOpOutput(block_id, op_id, "Out");
sub_block_map_[out_info[0].name] = {block_id, op_id};
}
ExportOp(parser, &temp_helper, opset_version_, block_id, op_id, verbose_);
}
Expand Down
5 changes: 5 additions & 0 deletions paddle2onnx/mapper/exporter.h
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,11 @@ namespace paddle2onnx
int32_t block_id,
int32_t op_id,
const std::string &output_names);
ONNX_NAMESPACE::GraphProto ExportFillConstant(const PaddleParser &parser,
OnnxHelper *temp_helper,
int32_t block_id,
int32_t op_id,
const std::string &output_names);
ONNX_NAMESPACE::GraphProto ExportBlock(const PaddleParser &parser,
int32_t block_id,
std::vector<std::shared_ptr<ONNX_NAMESPACE::NodeProto>> &parameters,
Expand Down
6 changes: 5 additions & 1 deletion tests/onnxbase.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,11 @@ def compare(result, expect, delta=1e-10, rtol=1e-10):
# Convert Paddle Tensor to Numpy array
if type(expect) == list:
expect = expect[0]
expect = expect.numpy()

if isinstance(expect, paddle.Tensor):
expect = expect.numpy()
else:
expect = np.array(expect)

# For result_shape is (1) and expect_shape shape is ()
expect = expect.squeeze()
Expand Down
54 changes: 53 additions & 1 deletion tests/test_ifelse.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,60 @@ def test_ifelse_2_false():
obj.set_input_data("input_data", paddle.to_tensor(2), paddle.to_tensor(1))
obj.run()

class BaseNet3(paddle.nn.Layer):
def __init__(self):
super(BaseNet3, self).__init__()

def forward(self, inputs):
if inputs == 1:
return 1
else:
return 2

def test_ifelse_3_true():
op = BaseNet3()
op.eval()
obj = APIOnnx(op, 'ifelse', [11])
obj.set_input_data("input_data", paddle.to_tensor(1))
obj.run()

def test_ifelse_3_false():
op = BaseNet3()
op.eval()
obj = APIOnnx(op, 'ifelse', [11])
obj.set_input_data("input_data", paddle.to_tensor(2))
obj.run()

class BaseNet4(paddle.nn.Layer):
def __init__(self):
super(BaseNet4, self).__init__()

def forward(self, inputs):
if inputs == 1:
return inputs + 1
else:
return 2

def test_ifelse_4_true():
op = BaseNet4()
op.eval()
obj = APIOnnx(op, 'ifelse', [11])
obj.set_input_data("input_data", paddle.to_tensor(1))
obj.run()

def test_ifelse_4_false():
op = BaseNet3()
op.eval()
obj = APIOnnx(op, 'ifelse', [11])
obj.set_input_data("input_data", paddle.to_tensor(2))
obj.run()

Asthestarsfalll marked this conversation as resolved.
Show resolved Hide resolved
if __name__ == "__main__":
test_ifelse_1_true()
test_ifelse_1_false()
test_ifelse_2_true()
test_ifelse_2_false()
test_ifelse_2_false()
test_ifelse_3_true()
test_ifelse_3_false()
test_ifelse_4_true()
test_ifelse_4_false()