diff --git a/libedgetpu/direct/aarch64/libedgetpu.so b/libedgetpu/direct/aarch64/libedgetpu.so new file mode 120000 index 0000000..d407741 --- /dev/null +++ b/libedgetpu/direct/aarch64/libedgetpu.so @@ -0,0 +1 @@ +libedgetpu.so.1 \ No newline at end of file diff --git a/src/cpp/examples/Makefile b/src/cpp/examples/Makefile index f84d299..e08198c 100644 --- a/src/cpp/examples/Makefile +++ b/src/cpp/examples/Makefile @@ -1,4 +1,5 @@ # This is a Makefile to cross-compile minimal.cc example. +# To compile in Dev Board with Mendel you must install libbenchmark-dev package # 1. Download TensorFlow to Linux machine: # $ git clone https://github.com/tensorflow/tensorflow.git # 2. Download external dependencies for TensorFlow Lite: @@ -16,15 +17,27 @@ MAKEFILE_DIR := $(realpath $(dir $(lastword $(MAKEFILE_LIST)))) TENSORFLOW_DIR ?= +UNAME_S := $(shell uname -s) +ifeq ($(UNAME_S),Linux) + TARGET := linux +else + TARGET := unknown +endif + +TARGET_ARCH := $(shell uname -m) +TARGET_OUT_DIR ?= $(TARGET)_$(TARGET_ARCH) + minimal: minimal.cc model_utils.cc aarch64-linux-gnu-g++ -std=c++11 -o minimal minimal.cc model_utils.cc \ -I$(MAKEFILE_DIR)/../../../ \ -I$(MAKEFILE_DIR)/../../../libedgetpu/ \ -I$(TENSORFLOW_DIR) \ + -I$(TENSORFLOW_DIR)/tensorflow/lite/tools \ -I$(TENSORFLOW_DIR)/tensorflow/lite/tools/make/downloads/flatbuffers/include \ - -L$(TENSORFLOW_DIR)/tensorflow/lite/tools/make/gen/generic-aarch64_armv8-a/lib \ - -L$(MAKEFILE_DIR)/../../../libedgetpu/direct/aarch64/ \ - -ltensorflow-lite -l:libedgetpu.so.1.0 -lpthread -lm -ldl + -I$(TENSORFLOW_DIR)/tensorflow/lite/tools/make/downloads/absl \ + -L$(TENSORFLOW_DIR)/tensorflow/lite/tools/make/gen/$(TARGET_OUT_DIR)/lib \ + -L$(MAKEFILE_DIR)/../../../libedgetpu/direct/$(TARGET_ARCH) \ + -lbenchmark -ltensorflow-lite -ledgetpu -lpthread -lm -ldl clean: rm -f minimal diff --git a/src/cpp/examples/minimal.cc b/src/cpp/examples/minimal.cc index 6259e2d..6a18f3e 100644 --- a/src/cpp/examples/minimal.cc +++ b/src/cpp/examples/minimal.cc @@ -14,6 +14,20 @@ #include "tensorflow/lite/interpreter.h" #include "tensorflow/lite/model.h" + + +namespace coral { + + std::string GetTempPrefix() { + const char* env_temp = getenv("TEMP"); + if (env_temp) { + return env_temp; + } else { + return "/tmp"; + } + } +} + std::vector decode_bmp(const uint8_t* input, int row_size, int width, int height, int channels, bool top_down) { std::vector output(height * width * channels);