Skip to content

Commit

Permalink
Optimize the onnxruntime code (PaddlePaddle#41044)
Browse files Browse the repository at this point in the history
  • Loading branch information
heliqi authored Mar 30, 2022
1 parent b1ee9d5 commit f12b526
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 6 deletions.
3 changes: 2 additions & 1 deletion paddle/fluid/inference/api/details/zero_copy_tensor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@
#include "paddle/fluid/platform/float16.h"
#include "paddle/phi/core/allocator.h"
#ifdef PADDLE_WITH_ONNXRUNTIME
#include "paddle/fluid/inference/api/onnxruntime_predictor.h"
#include "onnxruntime_c_api.h" // NOLINT
#include "onnxruntime_cxx_api.h" // NOLINT
#endif

namespace paddle_infer {
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/inference/api/onnxruntime_predictor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ bool CheckConvertToONNX(const AnalysisConfig &config) {
bool ONNXRuntimePredictor::Init() {
VLOG(3) << "ONNXRuntime Predictor::init()";

// Now ONNXRuntime only suuport CPU
// Now ONNXRuntime only support CPU
const char *device_name = config_.use_gpu() ? "Cuda" : "Cpu";
if (config_.use_gpu()) {
place_ = paddle::platform::CUDAPlace(config_.gpu_device_id());
Expand Down
4 changes: 0 additions & 4 deletions paddle/fluid/inference/api/onnxruntime_predictor_tester.cc
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,6 @@ TEST(ONNXRuntimePredictor, onnxruntime_on) {

ASSERT_TRUE(predictor);
ASSERT_TRUE(!predictor->Clone());
ASSERT_TRUE(predictor->scope_);
ASSERT_TRUE(predictor->sub_scope_);
ASSERT_EQ(predictor->scope_->parent(), nullptr);
ASSERT_EQ(predictor->sub_scope_->parent(), predictor->scope_.get());
// Dummy Input Data
std::vector<int64_t> input_shape = {-1, 3, 224, 224};
std::vector<float> input_data(1 * 3 * 224 * 224, 1.0);
Expand Down

0 comments on commit f12b526

Please sign in to comment.