-
Notifications
You must be signed in to change notification settings - Fork 2
/
CMakeLists.txt
204 lines (162 loc) · 8.42 KB
/
CMakeLists.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
cmake_minimum_required(VERSION 3.15)
# ==============================================================================
# Define the options for the anira library
# ==============================================================================
# Shall the library be built as a shared library?
option(BUILD_SHARED_LIBS "Build the library as a shared library" ON)
option(ANIRA_WITH_BENCHMARK "Build the library with benchmarking capabilities" OFF)
option(ANIRA_WITH_EXAMPLES "Add Example Targets for desktop plattforms (juce plugin, benchmarks, minimal inference and model examples)" OFF)
option(ANIRA_WITH_BELA_EXAMPLE "Add Example Targets for bela plattform (includes a model)" OFF)
option(ANIRA_WITH_INSTALL "Add install targets" OFF)
# Select the backends for the inference engine, multiple backends can be selected
option(ANIRA_WITH_LIBTORCH "Build with LibTorch backend" ON)
option(ANIRA_WITH_ONNXRUNTIME "Build with ONNX Runtime backend" ON)
option(ANIRA_WITH_TFLITE "Build with TensorFlow Lite backend" ON)
# Shall semaphores be used for synchronization instead of atomic variables?
option(ANIRA_WITH_SEMAPHORE "Use semaphores for synchronization instead of atomic variables" OFF)
# ==============================================================================
# Setup the project
# ==============================================================================
set (PROJECT_NAME anira)
project (${PROJECT_NAME} VERSION 0.1.3)
# Sets the minimum macOS version, c++20 is only available from macOS 11.0
if (APPLE)
set(CMAKE_OSX_DEPLOYMENT_TARGET "11.0" CACHE STRING "Minimum version of the target platform" FORCE)
if(CMAKE_OSX_DEPLOYMENT_TARGET)
message("The minimum macOS version is set to " $CACHE{CMAKE_OSX_DEPLOYMENT_TARGET}.)
endif()
if (NOT (CMAKE_OSX_ARCHITECTURES STREQUAL "arm64" OR CMAKE_OSX_ARCHITECTURES STREQUAL "x86_64" OR CMAKE_OSX_ARCHITECTURES STREQUAL ""))
message(FATAL_ERROR "The macOS architectures must be set to either 'arm64', 'x86_64', or an empty string. Universal binaries are not supported, because the inference engines do not support them. Please set the CMAKE_OSX_ARCHITECTURES variable to either 'arm64', 'x86_64', or leave it empty.")
endif()
endif ()
# Sets the cpp language minimum, 20 because of new semaphore types
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED True)
# ==============================================================================
# Download and install the selected inference engines
# ==============================================================================
if(NOT ANIRA_WITH_LIBTORCH AND NOT ANIRA_WITH_ONNXRUNTIME AND NOT ANIRA_WITH_TFLITE)
message(FATAL_ERROR "No backend selected. Please select at least one backend by setting one of the following options to ON: ANIRA_WITH_LIBTORCH, ANIRA_WITH_ONNXRUNTIME, or ANIRA_WITH_TFLITE. For example, add '-DANIRA_WITH_LIBTORCH=ON' to your CMake command line.")
endif()
message(STATUS "Selected backends:")
set(BACKEND_SOURCES)
set(BACKEND_BUILD_HEADER_DIRS)
set(BACKEND_BUILD_LIBRARY_DIRS)
if(ANIRA_WITH_LIBTORCH)
include(cmake/SetupLibTorch.cmake)
list(APPEND BACKEND_SOURCES src/backends/LibTorchProcessor.cpp)
endif()
if(ANIRA_WITH_ONNXRUNTIME)
include(cmake/SetupOnnxRuntime.cmake)
list(APPEND BACKEND_SOURCES src/backends/OnnxRuntimeProcessor.cpp)
endif()
if(ANIRA_WITH_TFLITE)
include(cmake/SetupTensorflowLite.cmake)
list(APPEND BACKEND_SOURCES src/backends/TFLiteProcessor.cpp)
endif()
if(ANIRA_WITH_SEMAPHORE)
message(STATUS "Using semaphores for thread synchronization.")
else()
message(STATUS "Using atomic variables for thread synchronization.")
endif()
# ==============================================================================
# Build the library
# ==============================================================================
# add the library as a shared or static library depending on the option BUILD_SHARED_LIBS
add_library(${PROJECT_NAME})
# enable position independent code because otherwise the static library cannot be linked into a shared library
set_target_properties(${PROJECT_NAME} PROPERTIES POSITION_INDEPENDENT_CODE ON)
# add an alias so that the project can be used with add_subdirectory
add_library(${PROJECT_NAME}::${PROJECT_NAME} ALIAS ${PROJECT_NAME})
target_sources(${PROJECT_NAME}
PRIVATE
# Backend
src/backends/BackendBase.cpp
${BACKEND_SOURCES}
# Scheduler
src/scheduler/InferenceManager.cpp
src/scheduler/InferenceThread.cpp
src/scheduler/InferenceThreadPool.cpp
src/scheduler/SessionElement.cpp
# Utils
src/utils/AudioBuffer.cpp
src/utils/RingBuffer.cpp
# Interface
src/InferenceHandler.cpp
src/PrePostProcessor.cpp
# System
src/system/RealtimeThread.cpp
)
# add the include directories for the backends to the build interface, public because the anira headers include the backend headers
foreach(HEADER_DIR ${BACKEND_BUILD_HEADER_DIRS})
target_include_directories(${PROJECT_NAME} SYSTEM PUBLIC
$<BUILD_INTERFACE:${HEADER_DIR}>
)
endforeach()
# include the public headers of the anira library in the top-level project include directory
target_include_directories(${PROJECT_NAME}
PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>
)
# include the link directories for all the backends to the build interface
foreach(LIBRARY_DIR ${BACKEND_BUILD_LIBRARY_DIRS})
target_link_directories(${PROJECT_NAME} PUBLIC
$<BUILD_INTERFACE:${LIBRARY_DIR}>
)
endforeach()
target_compile_definitions(${PROJECT_NAME}
PUBLIC
# Backend-specific definitions
$<$<BOOL:${ANIRA_WITH_LIBTORCH}>:USE_LIBTORCH>
$<$<BOOL:${ANIRA_WITH_ONNXRUNTIME}>:USE_ONNXRUNTIME>
$<$<BOOL:${ANIRA_WITH_TFLITE}>:USE_TFLITE>
# Semaphore definitions
$<$<BOOL:${ANIRA_WITH_SEMAPHORE}>:USE_SEMAPHORE>
)
if(ANIRA_WITH_LIBTORCH)
# The find_package(Torch) adds the libraries libc10.so and libkineto.a as full paths to ${TORCH_LIBRARIES}. This is no problem when we add anira as a subdirectory to another project, but when we install the library, the torch libraries will be link targets of the anira library with full paths and hence not found on other systems. Therefore, we link those libs privately and only add the torch target publicly.
# Also until cmake 3.26, there is a bug where the torch_cpu library is not found when linking publicly https://gitlab.kitware.com/cmake/cmake/-/issues/24163 and anira is added as a subdirectory to another project, see
# But this is necessary for when we install the library since otherwise symbols are not found
if (CMAKE_VERSION VERSION_LESS "3.26.0" AND NOT (CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR))
target_link_libraries(${PROJECT_NAME} PRIVATE ${TORCH_LIBRARIES})
set(TORCH_LIBRARIES_ALL_PRIVATE TRUE)
else()
foreach(TORCH_LIB ${TORCH_LIBRARIES})
if(TORCH_LIB STREQUAL "torch" OR TORCH_LIB STREQUAL "torch_library")
target_link_libraries(${PROJECT_NAME} PUBLIC ${TORCH_LIB})
else()
target_link_libraries(${PROJECT_NAME} PRIVATE ${TORCH_LIB})
endif()
endforeach()
endif()
endif()
# The onnxruntime library requires PUBLIC linking because otherwise "_OrtGetApiBase" symbol is not found
if(ANIRA_WITH_ONNXRUNTIME)
target_link_libraries(${PROJECT_NAME} PUBLIC onnxruntime)
endif()
if(ANIRA_WITH_TFLITE)
target_link_libraries(${PROJECT_NAME} PRIVATE tensorflowlite_c)
endif()
if(ANIRA_WITH_BENCHMARK)
include(cmake/benchmark-src.cmake)
endif()
# ==============================================================================
# Add install targets for the library
# ==============================================================================
if(ANIRA_WITH_INSTALL)
include(cmake/install.cmake)
endif()
# ==============================================================================
# Add support for MSVC
# ==============================================================================
if (MSVC)
include(cmake/msvc-support.cmake)
endif()
# ==============================================================================
# Build example targets and add extras (clone example model repos)
# ==============================================================================
# First we import the extras folder since we need the compile definitions (model paths) for the examples
if(ANIRA_WITH_EXAMPLES OR ANIRA_WITH_BELA_EXAMPLE)
add_subdirectory(extras)
add_subdirectory(examples)
endif()