Skip to content

Commit

Permalink
Merge pull request #93 from RidgeRun/dev-0.8
Browse files Browse the repository at this point in the history
Dev 0.8
  • Loading branch information
jafet-chaves authored Jul 2, 2020
2 parents 966ac2a + c8980ac commit 18ab50e
Show file tree
Hide file tree
Showing 48 changed files with 3,307 additions and 9 deletions.
10 changes: 8 additions & 2 deletions configure.ac
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright (C) 2018 RidgeRun, LLC (http://www.ridgerun.com)
# Copyright (C) 2018-2020 RidgeRun, LLC (http://www.ridgerun.com)
# All Rights Reserved.
#
# The contents of this software are proprietary and confidential to RidgeRun,
Expand All @@ -10,7 +10,7 @@

# Initialize autoconf.
AC_PREREQ([2.69])
AC_INIT([RidgeRun inference library],[0.7.0],[https://github.com/RidgeRun/r2inference/issues],[r2inference])
AC_INIT([RidgeRun inference library],[0.8.0],[https://github.com/RidgeRun/r2inference/issues],[r2inference])

# Initialize our build utils
RR_INIT
Expand Down Expand Up @@ -63,6 +63,9 @@ AM_COND_IF([HAVE_EDGETPU], [AM_COND_IF([HAVE_TFLITE], [], [AC_MSG_ERROR(The Edge

RR_CHECK_FEATURE_LIB(TENSORRT, TensorRT Installation,
nvinfer, createInferBuilder_INTERNAL, NvInfer.h, no)

RR_CHECK_FEATURE_LIB(ONNXRT, ONNX Runtime Installation,
onnxruntime, OrtGetApiBase, onnxruntime/core/session/onnxruntime_cxx_api.h, no)
AC_LANG_POP([C++])

AM_CONDITIONAL([PLATFORM_IS_GPU], [false])
Expand All @@ -81,12 +84,14 @@ examples/Makefile
examples/r2i/Makefile
examples/r2i/edgetpu/Makefile
examples/r2i/ncsdk/Makefile
examples/r2i/onnxrt/Makefile
examples/r2i/tensorflow/Makefile
examples/r2i/tensorrt/Makefile
examples/r2i/tflite/Makefile
r2i/Makefile
r2i/edgetpu/Makefile
r2i/ncsdk/Makefile
r2i/onnxrt/Makefile
r2i/tensorflow/Makefile
r2i/tensorrt/Makefile
r2i/tflite/Makefile
Expand All @@ -96,6 +101,7 @@ tests/acceptance/Makefile
tests/unit/Makefile
tests/unit/r2i/Makefile
tests/unit/r2i/ncsdk/Makefile
tests/unit/r2i/onnxrt/Makefile
tests/unit/r2i/tensorflow/Makefile
tests/unit/r2i/tensorrt/Makefile
tests/unit/r2i/tflite/Makefile
Expand Down
1 change: 1 addition & 0 deletions examples/external/list_backends.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ void PrintFramework (r2i::FrameworkMeta &meta) {
std::cout << "Name : " << meta.name << std::endl;
std::cout << "Description : " << meta.description << std::endl;
std::cout << "Version : " << meta.version << std::endl;
std::cout << "Label : " << meta.label << std::endl;
std::cout << "---" << std::endl;
}

Expand Down
4 changes: 4 additions & 0 deletions examples/r2i/Makefile.am
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@ if HAVE_NCSDK
SUBDIRS += ncsdk
endif

if HAVE_ONNXRT
SUBDIRS += onnxrt
endif

if HAVE_TENSORFLOW
SUBDIRS += tensorflow
endif
Expand Down
4 changes: 4 additions & 0 deletions examples/r2i/meson.build
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ if get_option('enable-edgetpu')
subdir('edgetpu')
endif

if get_option('enable-onnxrt')
subdir('onnxrt')
endif

if get_option('enable-tensorflow')
subdir('tensorflow')
endif
Expand Down
36 changes: 36 additions & 0 deletions examples/r2i/onnxrt/Makefile.am
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# Copyright (C) 2020 RidgeRun, LLC (http://www.ridgerun.com)
# All Rights Reserved.
#
# The contents of this software are proprietary and confidential to RidgeRun,
# LLC. No part of this program may be photocopied, reproduced or translated
# into another programming language without prior written consent of
# RidgeRun, LLC. The user is free to modify the source code after obtaining
# a software license from RidgeRun. All source code changes must be provided
# back to RidgeRun without any encumbrance.

AM_DEFAULT_SOURCE_EXT = .cc

if ENABLE_EXAMPLES

noinst_PROGRAMS = \
inception

AM_CXXFLAGS = \
$(RR_CXXFLAGS) \
$(CODE_COVERAGE_CXXFLAGS) \
-I../common/

AM_CFLAGS = \
$(RR_CFLAGS) \
$(CODE_COVERAGE_CFLAGS)

AM_CPPFLAGS = \
$(RR_CPPFLAGS) \
$(CODE_COVERAGE_CPPFLAGS)

AM_LDFLAGS = \
$(RR_LIBS) \
$(CODE_COVERAGE_LIBS) \
$(top_builddir)/r2i/libr2inference-@RR_PACKAGE_VERSION@.la

endif # ENABLE_EXAMPLES
215 changes: 215 additions & 0 deletions examples/r2i/onnxrt/inception.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,215 @@
/* Copyright (C) 2018-2020 RidgeRun, LLC (http://www.ridgerun.com)
* All Rights Reserved.
*
* The contents of this software are proprietary and confidential to RidgeRun,
* LLC. No part of this program may be photocopied, reproduced or translated
* into another programming language without prior written consent of
* RidgeRun, LLC. The user is free to modify the source code after obtaining
* a software license from RidgeRun. All source code changes must be provided
* back to RidgeRun without any encumbrance.
*/

#include <r2i/r2i.h>

#include <getopt.h>
#include <iostream>
#include <memory>
#include <string>

#define STB_IMAGE_IMPLEMENTATION
#include "stb_image.h"

#define STB_IMAGE_RESIZE_IMPLEMENTATION
#include "stb_image_resize.h"

void PrintTopPrediction (std::shared_ptr<r2i::IPrediction> prediction) {
r2i::RuntimeError error;
int index = 0;
double max = -1;
int num_labels = prediction->GetResultSize();

for (int i = 0; i < num_labels; ++i) {
double current = prediction->At(i, error);
if (current > max) {
max = current;
index = i;
}
}

std::cout << "Highest probability is label "
<< index << " (" << max << ")" << std::endl;
}

void PrintUsage() {
std::cerr << "Required arguments: "
<< "-i [JPG input_image] "
<< "-m [Inception ONNX Model] "
<< "-s [Model Input Size] "
<< "-I [Input Node] "
<< "-O [Output Node] \n"
<< " Example: "
<< " ./inception -i cat.jpg -m graph_inceptionv2_tensorflow.pb "
<< "-s 224"
<< std::endl;
}

std::unique_ptr<float[]> PreProcessImage (const unsigned char *input,
int width, int height, int reqwidth, int reqheight) {

const int channels = 3;
const int scaled_size = channels * reqwidth * reqheight;
std::unique_ptr<unsigned char[]> scaled (new unsigned char[scaled_size]);
std::unique_ptr<float[]> adjusted (new float[scaled_size]);

stbir_resize_uint8(input, width, height, 0, scaled.get(), reqwidth,
reqheight, 0, channels);

for (int i = 0; i < scaled_size; i += channels) {
/* RGB = (RGB - Mean)*StdDev */
adjusted[i + 0] = (static_cast<float>(scaled[i + 0]) - 128) / 128.0;
adjusted[i + 1] = (static_cast<float>(scaled[i + 1]) - 128) / 128.0;
adjusted[i + 2] = (static_cast<float>(scaled[i + 2]) - 128) / 128.0;
}

return adjusted;
}

std::unique_ptr<float[]> LoadImage(const std::string &path, int reqwidth,
int reqheight) {
int channels = 3;
int width, height, cp;

unsigned char *img = stbi_load(path.c_str(), &width, &height, &cp, channels);
if (!img) {
std::cerr << "The picture " << path << " could not be loaded";
return nullptr;
}

auto ret = PreProcessImage(img, width, height, reqwidth, reqheight);
free (img);

return ret;
}

bool ParseArgs (int &argc, char *argv[], std::string &image_path,
std::string &model_path, int &index, int &size,
std::string &in_node, std::string &out_node) {
int option = 0;
while ((option = getopt(argc, argv, "i:m:p:s:I:O:")) != -1) {
switch (option) {
case 'i' :
image_path = optarg;
break;
case 'm' :
model_path = optarg;
break;
case 'p' :
index = std::stoi (optarg);
break;
case 's' :
size = std::stoi (optarg);
break;
case 'I' :
in_node = optarg;
break;
case 'O' :
out_node = optarg;
break;
default:
return false;
}
}
return true;
}

int main (int argc, char *argv[]) {
r2i::RuntimeError error;
std::string model_path;
std::string image_path;
std::string in_node;
std::string out_node;
int Index = 0;
int size = 0;

if (false == ParseArgs (argc, argv, image_path, model_path, Index,
size, in_node, out_node)) {
PrintUsage ();
exit (EXIT_FAILURE);
}

if (image_path.empty() || model_path.empty ()) {
PrintUsage ();
exit (EXIT_FAILURE);
}

auto factory = r2i::IFrameworkFactory::MakeFactory(
r2i::FrameworkCode::ONNXRT,
error);

if (nullptr == factory) {
std::cerr << "TensorFlow backend is not built: " << error << std::endl;
exit(EXIT_FAILURE);
}

std::cout << "Loading Model: " << model_path << std::endl;
auto loader = factory->MakeLoader (error);
std::shared_ptr<r2i::IModel> model = loader->Load (model_path, error);
if (error.IsError ()) {
std::cerr << "Loader error: " << error << std::endl;
exit(EXIT_FAILURE);
}

std::cout << "Setting model to engine" << std::endl;
std::shared_ptr<r2i::IEngine> engine = factory->MakeEngine (error);
error = engine->SetModel (model);

std::cout << "Configuring ONNXRT session parameters" << std::endl;
auto params = factory->MakeParameters (error);
error = params->Configure(engine, model);
/* Set OrtLoggingLevel::ORT_LOGGING_LEVEL_WARNING */
error = params->Set ("logging-level", 2);
error = params->Set ("log-id", "onnxrt_example");
error = params->Set ("intra-num-threads", 1);
/* Set GraphOptimizationLevel::ORT_ENABLE_EXTENDED */
error = params->Set ("graph-optimization-level", 2);

if (error.IsError ()) {
std::cerr << "Parameters error: " << error << std::endl;
exit(EXIT_FAILURE);
}

std::cout << "Loading image: " << image_path << std::endl;
std::unique_ptr<float[]> image_data = LoadImage (image_path, size,
size);

std::cout << "Configuring frame" << std::endl;
std::shared_ptr<r2i::IFrame> frame = factory->MakeFrame (error);

error = frame->Configure (image_data.get(), size, size,
r2i::ImageFormat::Id::RGB);

std::cout << "Starting engine" << std::endl;
error = engine->Start ();
if (error.IsError ()) {
std::cerr << "Engine start error: " << error << std::endl;
exit(EXIT_FAILURE);
}

std::cout << "Predicting..." << std::endl;
auto prediction = engine->Predict (frame, error);
if (error.IsError ()) {
std::cerr << "Engine prediction error: " << error << std::endl;
exit(EXIT_FAILURE);
}

PrintTopPrediction (prediction);

std::cout << "Stopping engine" << std::endl;
error = engine->Stop ();
if (error.IsError ()) {
std::cerr << "Engine stop error: " << error << std::endl;
exit(EXIT_FAILURE);
}

return EXIT_SUCCESS;
}
11 changes: 11 additions & 0 deletions examples/r2i/onnxrt/meson.build
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# Compile examples
app_examples = [
'inception',
]

foreach app : app_examples
executable(app, '@0@.cc'.format(app),
include_directories: [configinc, common_inc_dir],
dependencies : [r2inference_lib_dep],
install: false)
endforeach
13 changes: 11 additions & 2 deletions meson.build
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
project('r2inference', ['cpp'], default_options : ['cpp_std=c++11'],
version : '0.7.0',
version : '0.8.0',
meson_version : '>= 0.50',)

# Set project information
Expand Down Expand Up @@ -88,8 +88,17 @@ if get_option('enable-tensorrt')
cdata.set('HAVE_TENSORRT', 1)
endif

# Define library dependencies for ONNX support
if get_option('enable-onnxrt')
onnxrt = cpp.find_library('onnxruntime', required: true)
onnxrt_dep = declare_dependency(dependencies: onnxrt)
lib_onnxrt_dep = [onnxrt_dep]
cdata.set('HAVE_ONNXRT', 1)
endif

# Check if at least one backend is enabled
if not (cdata.has('HAVE_TENSORFLOW') or cdata.has('HAVE_TFLITE') or cdata.has('HAVE_TENSORRT'))
if not (cdata.has('HAVE_TENSORFLOW') or cdata.has('HAVE_TFLITE') or
cdata.has('HAVE_TENSORRT') or cdata.has('HAVE_ONNXRT'))
error ('No backend selected, you must choose at least one')
endif

Expand Down
2 changes: 2 additions & 0 deletions meson_options.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,5 @@ option('enable-tflite', type : 'boolean', value: false,
description : 'Enable Tensorflow Lite backend support')
option('enable-tensorrt', type : 'boolean', value: false,
description : 'Enable TensorRT backend support')
option('enable-onnxrt', type : 'boolean', value: false,
description : 'Enable ONNX Runtime backend support')
Loading

0 comments on commit 18ab50e

Please sign in to comment.