This is my code:
#include <iostream>
#include <cstdio>
#include <iomanip>
#include "src/VideoProcessing.h"
#include <opencv2/opencv.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
#include <interpreter.h>
#include "tensorflow/lite/interpreter.h"
#include "tensorflow/lite/kernels/register.h"
#include "tensorflow/lite/model.h"
#include "tensorflow/lite/model_builder.h"
#include "tensorflow/lite/interpreter_builder.h"
#include "tensorflow/lite/optional_debug_tools.h"
#include "tensorflow/lite/tools/gen_op_registration.h"
typedef cv::Point3_<float> Pixel;
void normalize(Pixel &pixel) {...}
int main() {
...
auto model = tflite::FlatBufferModel::BuildFromFile("/home/me/tensorflow_src/tensorflow/lite/examples/model-verification/pose_landmark_full.tflite");
if(!model){
printf("Failed to mmap model\n");
exit(0);
}
tflite::ops::builtin::BuiltinOpResolver resolver;
std::unique_ptr<tflite::Interpreter> interpreter;
...
The last line std::unique_ptr<tflite::Interpreter> interpreter;
is throwing an error, suggesting that interpreter, and associated classes, are undefined. This is the error:
/usr/bin/ld: tensorflow-lite/libtensorflow-lite.a(interpreter.cc.o): in function `tflite::Interpreter::SetProfilerImpl(std::unique_ptr<tflite::Profiler, std::default_delete<tflite::Profiler> >)':
interpreter.cc:(.text+0x2a66): undefined reference to `tflite::profiling::RootProfiler::RemoveChildProfilers()'
/usr/bin/ld: interpreter.cc:(.text+0x2a75): undefined reference to `tflite::profiling::RootProfiler::AddProfiler(std::unique_ptr<tflite::Profiler, std::default_delete<tflite::Profiler> >&&)'
/usr/bin/ld: interpreter.cc:(.text+0x2ab2): undefined reference to `vtable for tflite::profiling::RootProfiler'
/usr/bin/ld: interpreter.cc:(.text+0x2b19): undefined reference to `vtable for tflite::profiling::RootProfiler'
/usr/bin/ld: tensorflow-lite/libtensorflow-lite.a(interpreter.cc.o): in function `tflite::Interpreter::~Interpreter()':
interpreter.cc:(.text+0x307e): undefined reference to `vtable for tflite::profiling::RootProfiler'
/usr/bin/ld: tensorflow-lite/libtensorflow-lite.a(interpreter.cc.o): in function `tflite::profiling::RootProfiler::~RootProfiler()':
interpreter.cc:(.text._ZN6tflite9profiling12RootProfilerD0Ev[_ZN6tflite9profiling12RootProfilerD5Ev]+0x7): undefined reference to `vtable for tflite::profiling::RootProfiler'
/usr/bin/ld: tensorflow-lite/libtensorflow-lite.a(interpreter.cc.o): in function `tflite::profiling::RootProfiler::~RootProfiler()':
interpreter.cc:(.text._ZN6tflite9profiling12RootProfilerD2Ev[_ZN6tflite9profiling12RootProfilerD5Ev]+0x7): undefined reference to `vtable for tflite::profiling::RootProfiler'
collect2: error: ld returned 1 exit status
make[2]: *** [CMakeFiles/model-verification.dir/build.make:247: model-verification] Error 1
make[1]: *** [CMakeFiles/Makefile2:1374: CMakeFiles/model-verification.dir/all] Error 2
make: *** [Makefile:149: all] Error 2
And I only get this error when I use tflite::interpreter
despite having the correct interpreter.h
file.
This is how I compile:
cmake ../tensorflow/lite/examples/model-verification/
make
./model-verification
This is my Cmake output:
cmake ../tensorflow/lite/examples/model-verification/
-- Setting build type to Release, for debug builds use'-DCMAKE_BUILD_TYPE=Debug'.
CMake Warning at /home/me/tensorflow_src/build/abseil-cpp/CMakeLists.txt:74 (message):
A future Abseil release will default ABSL_PROPAGATE_CXX_STD to ON for CMake
3.8 and up. We recommend enabling this option to ensure your project still
builds correctly.
-- Standard libraries to link to explicitly: none
-- The Fortran compiler identification is GNU 11.2.0
-- Could NOT find CLANG_FORMAT: Found unsuitable version "0.0", but required is exact version "9" (found CLANG_FORMAT_EXECUTABLE-NOTFOUND)
--
-- Configured Eigen 3.4.90
--
-- Proceeding with version: 2.0.6.v2.0.6
-- CMAKE_CXX_FLAGS: -std=c++0x -Wall -pedantic -Werror -Wextra -Werror=shadow -faligned-new -Werror=implicit-fallthrough=2 -Wunused-result -Werror=unused-result -Wunused-parameter -Werror=unused-parameter -fsigned-char
-- Configuring done
-- Generating done
-- Build files have been written to: /home/onur/tensorflow_src/build