Skip to content

Commit

Permalink
Extended list of accepted types and some debug messages
Browse files Browse the repository at this point in the history
  • Loading branch information
gkrivor committed Oct 21, 2024
1 parent 160fd23 commit ec8e6cf
Show file tree
Hide file tree
Showing 2 changed files with 80 additions and 4 deletions.
23 changes: 23 additions & 0 deletions src/frontends/onnx/frontend/src/frontend.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,22 @@ InputModel::Ptr FrontEnd::load_impl(const std::vector<ov::Any>& variants) const
return std::make_shared<InputModel>(*stream, enable_mmap, m_extensions);
}
if (variants[0].is<ModelProtoPtr>()) {
std::cerr << "shared_ptr<ModelProto> has been received\n";
return std::make_shared<InputModel>(variants[0].as<ModelProtoPtr>(), m_extensions);
}
if (variants[0].is<ONNX_NAMESPACE::ModelProto*>()) {
std::cerr << "ModelProto* has been received\n";
return std::make_shared<InputModel>(
std::make_shared<ONNX_NAMESPACE::ModelProto>(*variants[0].as<ONNX_NAMESPACE::ModelProto*>()),
m_extensions);
}
if (variants[0].is<uint64_t>()) {
std::cerr << "uint64_t as a ModelProto* has been received\n";
void* model_proto_ptr = reinterpret_cast<void*>(variants[0].as<uint64_t>());
return std::make_shared<InputModel>(
std::make_shared<ONNX_NAMESPACE::ModelProto>(*static_cast<ONNX_NAMESPACE::ModelProto*>(model_proto_ptr)),
m_extensions);
}
return nullptr;
}

Expand Down Expand Up @@ -219,6 +233,15 @@ bool FrontEnd::supported_impl(const std::vector<ov::Any>& variants) const {
return is_valid_model(*stream);
}
if (variants[0].is<ModelProtoPtr>()) {
std::cerr << "shared_ptr<ModelProto> is supported\n";
return true;
}
if (variants[0].is<ONNX_NAMESPACE::ModelProto*>()) {
std::cerr << "ModelProto* is supported\n";
return true;
}
if (variants[0].is<uint64_t>()) {
std::cerr << "uint64_t as a ModelProto* is supported\n";
return true;
}
return false;
Expand Down
61 changes: 57 additions & 4 deletions src/frontends/onnx/tests/load_from.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ INSTANTIATE_TEST_SUITE_P(ONNXLoadTest,
::testing::Values(getTestData()),
FrontEndLoadFromTest::getTestCaseName);

TEST_P(FrontEndLoadFromTest, testLoadFromModelProto) {
TEST_P(FrontEndLoadFromTest, testLoadFromModelProtoSharedPtr) {
const auto path =
ov::util::path_join({ov::test::utils::getExecutableDirectory(), TEST_ONNX_MODELS_DIRNAME, "abs.onnx"});
std::ifstream ifs(path, std::ios::in | std::ios::binary);
Expand All @@ -76,15 +76,68 @@ TEST_P(FrontEndLoadFromTest, testLoadFromModelProto) {
ASSERT_TRUE(model_proto->ParseFromIstream(&ifs)) << "Could not parse ModelProto from file: " << path;

ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_model(model_proto))
<< "Could not create the ONNX FE using a ModelProto object";
<< "Could not create the ONNX FE using a shared_ptr on a ModelProto object";
ASSERT_NE(m_frontEnd, nullptr);
ASSERT_NO_THROW(m_inputModel = m_frontEnd->load(model_proto)) << "Could not load the model";
ASSERT_NE(m_inputModel, nullptr);
}

std::shared_ptr<ov::Model> model;
ASSERT_NO_THROW(model = m_frontEnd->convert(m_inputModel))
<< "Could not convert the model to OV representation";
ASSERT_NO_THROW(model = m_frontEnd->convert(m_inputModel)) << "Could not convert the model to OV representation";
ASSERT_NE(model, nullptr);

ASSERT_TRUE(model->get_ordered_ops().size() > 0);
}

TEST_P(FrontEndLoadFromTest, testLoadFromModelProtoPtr) {
const auto path =
ov::util::path_join({ov::test::utils::getExecutableDirectory(), TEST_ONNX_MODELS_DIRNAME, "abs.onnx"});
std::ifstream ifs(path, std::ios::in | std::ios::binary);
ASSERT_TRUE(ifs.is_open()) << "Could not open an ifstream for the model path: " << path;
std::vector<std::string> frontends;
FrontEnd::Ptr fe;

{
auto model_proto = std::make_shared<ONNX_NAMESPACE::ModelProto>();
ASSERT_TRUE(model_proto->ParseFromIstream(&ifs)) << "Could not parse ModelProto from file: " << path;

ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_model(model_proto.get()))
<< "Could not create the ONNX FE using a pointer on ModelProto object";
ASSERT_NE(m_frontEnd, nullptr);
ASSERT_NO_THROW(m_inputModel = m_frontEnd->load(model_proto.get())) << "Could not load the model";
ASSERT_NE(m_inputModel, nullptr);
}

std::shared_ptr<ov::Model> model;
ASSERT_NO_THROW(model = m_frontEnd->convert(m_inputModel)) << "Could not convert the model to OV representation";
ASSERT_NE(model, nullptr);

ASSERT_TRUE(model->get_ordered_ops().size() > 0);
}

TEST_P(FrontEndLoadFromTest, testLoadFromModelProtoUint64) {
const auto path =
ov::util::path_join({ov::test::utils::getExecutableDirectory(), TEST_ONNX_MODELS_DIRNAME, "abs.onnx"});
std::ifstream ifs(path, std::ios::in | std::ios::binary);
ASSERT_TRUE(ifs.is_open()) << "Could not open an ifstream for the model path: " << path;
std::vector<std::string> frontends;
FrontEnd::Ptr fe;

{
auto model_proto = std::make_shared<ONNX_NAMESPACE::ModelProto>();
ASSERT_TRUE(model_proto->ParseFromIstream(&ifs)) << "Could not parse ModelProto from file: " << path;

uint64_t model_proto_ptr = reinterpret_cast<uint64_t>(model_proto.get());

ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_model(model_proto_ptr))
<< "Could not create the ONNX FE using a pointer on ModelProto object as uint64_t";
ASSERT_NE(m_frontEnd, nullptr);
ASSERT_NO_THROW(m_inputModel = m_frontEnd->load(model_proto_ptr)) << "Could not load the model";
ASSERT_NE(m_inputModel, nullptr);
}

std::shared_ptr<ov::Model> model;
ASSERT_NO_THROW(model = m_frontEnd->convert(m_inputModel)) << "Could not convert the model to OV representation";
ASSERT_NE(model, nullptr);

ASSERT_TRUE(model->get_ordered_ops().size() > 0);
Expand Down

0 comments on commit ec8e6cf

Please sign in to comment.