diff --git a/.clang-format b/.clang-format
deleted file mode 100644
index 4ae554ea..00000000
--- a/.clang-format
+++ /dev/null
@@ -1,68 +0,0 @@
-# Generated from CLion C/C++ Code Style settings
-BasedOnStyle: LLVM
-Language: Cpp
-Standard: c++20
-AccessModifierOffset: -4
-AlignAfterOpenBracket: Align
-AlignConsecutiveAssignments: false
-AlignOperands: true
-AllowAllArgumentsOnNextLine: false
-AllowAllConstructorInitializersOnNextLine: false
-AllowAllParametersOfDeclarationOnNextLine: false
-AllowShortBlocksOnASingleLine: Always
-AllowShortCaseLabelsOnASingleLine: false
-AllowShortFunctionsOnASingleLine: All
-AllowShortIfStatementsOnASingleLine: Always
-AllowShortLambdasOnASingleLine: All
-AllowShortLoopsOnASingleLine: true
-AlwaysBreakAfterReturnType: None
-AlwaysBreakTemplateDeclarations: Yes
-BreakBeforeBraces: Custom
-BraceWrapping:
- AfterCaseLabel: false
- AfterClass: false
- AfterControlStatement: Never
- AfterEnum: false
- AfterFunction: false
- AfterNamespace: false
- AfterUnion: false
- BeforeCatch: false
- BeforeElse: false
- IndentBraces: false
- SplitEmptyFunction: false
- SplitEmptyRecord: true
-BreakBeforeBinaryOperators: None
-BreakBeforeTernaryOperators: true
-BreakConstructorInitializers: BeforeColon
-BreakInheritanceList: BeforeColon
-ColumnLimit: 0
-CompactNamespaces: false
-ContinuationIndentWidth: 8
-IndentCaseLabels: true
-IndentPPDirectives: None
-IndentWidth: 4
-KeepEmptyLinesAtTheStartOfBlocks: true
-MaxEmptyLinesToKeep: 2
-NamespaceIndentation: All
-ObjCSpaceAfterProperty: false
-ObjCSpaceBeforeProtocolList: true
-PointerAlignment: Right
-ReflowComments: false
-SpaceAfterCStyleCast: true
-SpaceAfterLogicalNot: false
-SpaceAfterTemplateKeyword: false
-SpaceBeforeAssignmentOperators: true
-SpaceBeforeCpp11BracedList: false
-SpaceBeforeCtorInitializerColon: true
-SpaceBeforeInheritanceColon: true
-SpaceBeforeParens: ControlStatements
-SpaceBeforeRangeBasedForLoopColon: true
-SpaceInEmptyParentheses: false
-SpacesBeforeTrailingComments: 0
-SpacesInAngles: false
-SpacesInCStyleCastParentheses: false
-SpacesInContainerLiterals: false
-SpacesInParentheses: false
-SpacesInSquareBrackets: false
-TabWidth: 4
-UseTab: ForContinuationAndIndentation
diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml
new file mode 100644
index 00000000..65bd4949
--- /dev/null
+++ b/.github/workflows/build.yaml
@@ -0,0 +1,32 @@
+name: Build Dockerfile
+on: push
+
+jobs:
+ build_dockerfile:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: docker/setup-qemu-action@v3
+ - uses: docker/setup-buildx-action@v3
+ - uses: actions/checkout@v4
+
+ - name: Build Dockerfile
+ uses: docker/build-push-action@v6
+ with:
+ context: .
+ load: true
+ tags: "tentris:${{ github.sha }}"
+ cache-from: type=gha
+ cache-to: type=gha,mode=max
+
+ - uses: shrink/actions-docker-extract@v3
+ id: extract
+ name: Extracting executables from docker image
+ with:
+ image: "tentris:${{ github.sha }}"
+ path: /.
+
+ - uses: actions/upload-artifact@v4
+ name: Uploading executables as artifacts
+ with:
+ name: tentris-frontend
+ path: ${{ steps.extract.outputs.destination }}/tentris_*
diff --git a/.gitignore b/.gitignore
index deb919bc..2baa1414 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,10 +4,9 @@
# log files
tentris.log
-# Created by https://www.toptal.com/developers/gitignore/api/c++,conan,jetbrains+all,cmake
-# Edit at https://www.toptal.com/developers/gitignore?templates=c++,conan,jetbrains+all,cmake
+.clang-format
+.clang-tidy
-### C++ ###
# Prerequisites
*.d
@@ -21,12 +20,6 @@ tentris.log
*.gch
*.pch
-# Linker files
-*.ilk
-
-# Debugger Files
-*.pdb
-
# Compiled Dynamic libraries
*.so
*.dylib
@@ -47,121 +40,16 @@ tentris.log
*.out
*.app
-### CMake ###
-CMakeLists.txt.user
-CMakeCache.txt
-CMakeFiles
-CMakeScripts
-Testing
-Makefile
-cmake_install.cmake
-install_manifest.txt
-compile_commands.json
-CTestTestfile.cmake
-_deps
-CMakeUserPresets.json
-
-### CMake Patch ###
-# External projects
-*-prefix/
-
-### Conan ###
-# Conan build information
-conan.lock
-conanbuildinfo.*
-conaninfo.txt
-graph_info.json
-
-### JetBrains+all ###
-# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
-# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
-
-# User-specific stuff
-.idea/**/workspace.xml
-.idea/**/tasks.xml
-.idea/**/usage.statistics.xml
-.idea/**/dictionaries
-.idea/**/shelf
-
-# Generated files
-.idea/**/contentModel.xml
-
-# Sensitive or high-churn files
-.idea/**/dataSources/
-.idea/**/dataSources.ids
-.idea/**/dataSources.local.xml
-.idea/**/sqlDataSources.xml
-.idea/**/dynamic.xml
-.idea/**/uiDesigner.xml
-.idea/**/dbnavigator.xml
-
-# Gradle
-.idea/**/gradle.xml
-.idea/**/libraries
-
-# Gradle and Maven with auto-import
-# When using Gradle or Maven with auto-import, you should exclude module files,
-# since they will be recreated, and may cause churn. Uncomment if using
-# auto-import.
-# .idea/artifacts
-# .idea/compiler.xml
-# .idea/jarRepositories.xml
-# .idea/modules.xml
-# .idea/*.iml
-# .idea/modules
-# *.iml
-# *.ipr
-
-# CMake
-cmake-build-*/
-
-# Mongo Explorer plugin
-.idea/**/mongoSettings.xml
-
-# File-based project format
-*.iws
-
-# IntelliJ
-out/
-
-# mpeltonen/sbt-idea plugin
-.idea_modules/
-
-# JIRA plugin
-atlassian-ide-plugin.xml
-
-# Cursive Clojure plugin
-.idea/replstate.xml
-
-# Crashlytics plugin (for Android Studio and IntelliJ)
-com_crashlytics_export_strings.xml
-crashlytics.properties
-crashlytics-build.properties
-fabric.properties
-
-# Editor-based Rest Client
-.idea/httpRequests
-
-# Android studio 3.1+ serialized cache file
-.idea/caches/build_file_checksums.ser
-
-### JetBrains+all Patch ###
-# Ignores the whole .idea folder and all .iml files
-# See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360
+# project folders
+cmake-build*/
+# intellij
.idea/
+venv/
+/build/
-# Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-249601023
-
-*.iml
-modules.xml
-.idea/misc.xml
-*.ipr
-
-# Sonarlint plugin
-.idea/sonarlint
-
-# End of https://www.toptal.com/developers/gitignore/api/c++,conan,jetbrains+all,cmake
-# docu folder
-/docu/
\ No newline at end of file
+test_package/build/
+test_package/CMakeUserPresets.json
+/CMakeUserPresets.json
+/conan_provider.cmake
\ No newline at end of file
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 9cf68038..0e3c213d 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,23 +1,33 @@
-cmake_minimum_required(VERSION 3.18)
-project(tentris
- VERSION 1.3.1
- DESCRIPTION "tensor-based triplestore")
+cmake_minimum_required(VERSION 3.24)
+project(tentris VERSION 1.4.0
+ DESCRIPTION "Tentris - A tensor-based Triplestore.")
include(cmake/boilerplate_init.cmake)
boilerplate_init()
-option(CONAN_CMAKE "If this should use conan cmake to fetch dependencies" On)
-if (IS_TOP_LEVEL AND CONAN_CMAKE)
- include(cmake/conan_cmake.cmake)
- install_packages_via_conan("${CMAKE_CURRENT_SOURCE_DIR}/conanfile.py" "")
-endif ()
+if (PROJECT_IS_TOP_LEVEL)
+ set(CONAN_INSTALL_ARGS "${CONAN_INSTALL_ARGS};-o=boost/*:header_only=True")
-if (NOT EXISTS ${CMAKE_CURRENT_BINARY_DIR}/CMakeCache.txt)
- if (NOT CMAKE_BUILD_TYPE)
- set(CMAKE_BUILD_TYPE "Release" CACHE STRING "" FORCE)
+ if (BUILD_TESTING)
+ set(CONAN_INSTALL_ARGS "${CONAN_INSTALL_ARGS};-o=&:with_test_deps=True")
endif ()
endif ()
+set(style_files
+ .clang-format
+ .clang-tidy
+)
+foreach(style_file ${style_files})
+ file(DOWNLOAD "https://raw.githubusercontent.com/dice-group/tentris-cpp-coding-guidelines/main/${style_file}"
+ "${CMAKE_SOURCE_DIR}/${style_file}"
+ TLS_VERIFY ON)
+endforeach()
+
+if (PROJECT_IS_TOP_LEVEL AND USE_CLANG_TIDY)
+ include(cmake/ClangTidy.cmake)
+endif ()
+add_compile_definitions(Dnsel_CONFIG_SELECT_EXPECTED=nsel_EXPECTED_NONSTD)
add_subdirectory(libs)
+add_subdirectory(execs)
diff --git a/Dockerfile b/Dockerfile
index 27e54024..322f5b22 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,24 +1,29 @@
-FROM alpine:3.17 AS builder
+FROM alpine:3.20 AS builder
ARG MARCH="x86-64-v3"
RUN apk update && \
apk add \
make cmake autoconf automake pkgconfig \
gcc g++ gdb \
- clang15 clang15-dev clang15-libs clang15-extra-tools clang15-static lldb llvm15 llvm15-dev lld \
+ clang17 clang17-dev clang17-libs clang17-extra-tools clang17-static lldb llvm17 llvm17-dev lld \
openjdk11-jdk \
pythonispython3 py3-pip \
- bash git libtool util-linux-dev linux-headers
+ bash git libtool util-linux-dev linux-headers patch pipx
-ARG CC="clang"
-ARG CXX="clang++"
+
+ENV CC="/usr/bin/clang-17"
+ENV CXX="/usr/bin/clang++-17"
ENV CXXFLAGS="${CXXFLAGS} -march=${MARCH}"
-RUN rm /usr/bin/ld && ln -s /usr/bin/lld /usr/bin/ld # use lld as default linker
+ENV CONAN_DISABLE_STRICT_MODE=1
+ENV PIPX_BIN_DIR="/usr/local/bin"
+# use lld as default linker
+RUN rm /usr/bin/ld && ln -s /usr/bin/lld /usr/bin/ld
-# Compile more recent tcmalloc-minimal with clang-14 + -march
-RUN git clone --quiet --branch gperftools-2.9.1 --depth 1 https://github.com/gperftools/gperftools
-WORKDIR /gperftools
+# Compile more recent tcmalloc-minimal
+WORKDIR /usr/local/src
+RUN git clone --quiet --branch gperftools-2.15 --depth 1 https://github.com/gperftools/gperftools
+WORKDIR /usr/local/src/gperftools
RUN ./autogen.sh
RUN ./configure \
--enable-minimal \
@@ -27,54 +32,46 @@ RUN ./configure \
--enable-dynamic-sized-delete-support && \
make -j$(nproc) && \
make install
-WORKDIR /
-
-ENV CONAN_DISABLE_STRICT_MODE=1
# install and configure conan
-RUN pip3 install conan==1.62.0 && \
- conan user && \
- conan profile new --detect default && \
- conan profile update settings.compiler=clang default && \
- conan profile update settings.compiler.libcxx=libstdc++11 default && \
- conan profile update settings.compiler.cppstd=20 default && \
- conan profile update env.CXXFLAGS="${CXXFLAGS}" default && \
- conan profile update env.CXX="${CXX}" default && \
- conan profile update env.CC="${CC}" default && \
- conan profile update options.boost:extra_b2_flags="cxxflags=\\\"${CXXFLAGS}\\\"" default && \
- conan profile update options.boost:header_only=True default && \
- conan profile update options.restinio:asio=boost default
+RUN pipx install conan==2.9.3 && \
+ conan profile detect && \
+ echo '[settings]' > ~/.conan2/profiles/default && \
+ echo 'os={{ detect_api.detect_os() }}' >> ~/.conan2/profiles/default && \
+ echo 'arch={{ detect_api.detect_arch() }}' >> ~/.conan2/profiles/default && \
+ echo 'build_type=Release' >> ~/.conan2/profiles/default && \
+ echo 'compiler=clang' >> ~/.conan2/profiles/default && \
+ echo 'compiler.version=17' >> ~/.conan2/profiles/default && \
+ echo 'compiler.cppstd=20' >> ~/.conan2/profiles/default && \
+ echo 'compiler.libcxx=libstdc++11' >> ~/.conan2/profiles/default
# add conan repositories
RUN conan remote add dice-group https://conan.dice-research.org/artifactory/api/conan/tentris
-# build and cache dependencies via conan
-WORKDIR /conan_cache
-COPY conanfile.py .
-COPY CMakeLists.txt .
-RUN conan install . --build=* --profile default
# import project files
-WORKDIR /tentris
+WORKDIR /usr/local/src/tentris
COPY libs libs
COPY execs execs
COPY cmake cmake
COPY CMakeLists.txt .
COPY conanfile.py .
+RUN wget https://github.com/conan-io/cmake-conan/raw/develop2/conan_provider.cmake -O conan_provider.cmake
-##build
-WORKDIR /tentris/execs/build
+# build
+WORKDIR /usr/local/src/tentris/build
RUN cmake \
-DCMAKE_BUILD_TYPE=Release \
-DWITH_TCMALLOC=true \
-DSTATIC=true \
-DMARCH=${MARCH} \
+ -DCMAKE_PROJECT_TOP_LEVEL_INCLUDES="conan_provider.cmake" \
..
RUN make -j $(nproc)
-FROM scratch
-COPY --from=builder /tentris/execs/build/tentris-server/tentris_server /tentris_server
-COPY --from=builder /tentris/execs/build/tentris-loader/tentris_loader /tentris_loader
-COPY --from=builder /tentris/execs/build/tools/deduplicated-nt/deduplicated_nt /deduplicated_nt
-COPY --from=builder /tentris/execs/build/tools/rdf2ids/rdf2ids /rdf2ids
+FROM scratch AS binaries
+COPY --from=builder /usr/local/src/tentris/build/execs/tentris-server/tentris_server /tentris_server
+COPY --from=builder /usr/local/src/tentris/build/execs/tentris-loader/tentris_loader /tentris_loader
+COPY --from=builder /usr/local/src/tentris/build/execs/tools/deduplicated-nt/deduplicated_nt /deduplicated_nt
+COPY --from=builder /usr/local/src/tentris/build/execs/tools/rdf2ids/rdf2ids /rdf2ids
COPY README.MD README.MD
-ENTRYPOINT ["/tentris_server"]
+ENTRYPOINT ["/tentris_server"]
\ No newline at end of file
diff --git a/LICENSE-APACHE b/LICENSE-APACHE
index 261eeb9e..f49a4e16 100644
--- a/LICENSE-APACHE
+++ b/LICENSE-APACHE
@@ -198,4 +198,4 @@
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
- limitations under the License.
+ limitations under the License.
\ No newline at end of file
diff --git a/LICENSE-MIT b/LICENSE-MIT
index 5539ba2c..bf4ec8f3 100644
--- a/LICENSE-MIT
+++ b/LICENSE-MIT
@@ -18,4 +18,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+SOFTWARE.
\ No newline at end of file
diff --git a/README.MD b/README.MD
index b3661719..d52a9984 100644
--- a/README.MD
+++ b/README.MD
@@ -80,7 +80,7 @@ Use the [Dockerfile](./Dockerfile) to build tentris.
Tᴇɴᴛʀɪs is known to build on Ubuntu 22.04 and newer.
-Building was tested with Clang 15. As standard library, only libstdc++11 (v12) was tested. For details
+Building was tested with Clang 17 & 19. As standard library, only libstdc++11 (v13) was tested. For details
refer to the [Dockerfile](./Dockerfile) or github actions.
diff --git a/cmake/ClangTidy.cmake b/cmake/ClangTidy.cmake
new file mode 100644
index 00000000..194d9918
--- /dev/null
+++ b/cmake/ClangTidy.cmake
@@ -0,0 +1,9 @@
+find_program(CLANG_TIDY_FOUND "clang-tidy")
+if(NOT CLANG_TIDY_FOUND)
+ message(FATAL_ERROR "clang-tidy not found")
+endif()
+
+set(CMAKE_CXX_CLANG_TIDY
+ ${CLANG_TIDY_FOUND};
+ -extra-arg=-Wno-unknown-warning-option;
+)
\ No newline at end of file
diff --git a/cmake/boilerplate_init.cmake b/cmake/boilerplate_init.cmake
index a70a8beb..d784d280 100644
--- a/cmake/boilerplate_init.cmake
+++ b/cmake/boilerplate_init.cmake
@@ -1,14 +1,24 @@
macro(boilerplate_init)
## enforce standard compliance
- set(CMAKE_CXX_STANDARD_REQUIRED True)
+ set(CMAKE_CXX_STANDARD 20)
+ set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
+ if (PROJECT_IS_TOP_LEVEL AND BUILD_TESTING)
+ set(CMAKE_POSITION_INDEPENDENT_CODE ON) # need fPIC to build tests
+ endif ()
+
## C++ compiler flags
if (MSVC)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /Wall")
else ()
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra -Wold-style-cast -Wcast-qual")
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -g -O0")
+
+ if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
+ # -Wchanges-meaning is not useful
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-changes-meaning")
+ endif ()
endif ()
## C++ language visibility configuration
@@ -24,6 +34,4 @@ macro(boilerplate_init)
set(CMAKE_BUILD_TYPE "Release" CACHE STRING "" FORCE)
endif ()
endif ()
-
- string(COMPARE EQUAL "${CMAKE_SOURCE_DIR}" "${CMAKE_CURRENT_SOURCE_DIR}" IS_TOP_LEVEL)
endmacro()
\ No newline at end of file
diff --git a/cmake/component-config.cmake.in b/cmake/component-config.cmake.in
deleted file mode 100644
index d8c788f1..00000000
--- a/cmake/component-config.cmake.in
+++ /dev/null
@@ -1,5 +0,0 @@
-# Dummy config file
-# When a dependency is added with add_subdirectory, but searched with find_package
-
-# Redirect to the directory added with add_subdirectory
-add_subdirectory(@PROJECT_SOURCE_DIR@ @PROJECT_BINARY_DIR@)
\ No newline at end of file
diff --git a/cmake/conan_cmake.cmake b/cmake/conan_cmake.cmake
deleted file mode 100644
index fcc6a1eb..00000000
--- a/cmake/conan_cmake.cmake
+++ /dev/null
@@ -1,26 +0,0 @@
-macro(install_packages_via_conan conanfile conan_options)
-
- list(APPEND CMAKE_MODULE_PATH ${CMAKE_BINARY_DIR})
- list(APPEND CMAKE_PREFIX_PATH ${CMAKE_BINARY_DIR})
-
- if (NOT EXISTS "${CMAKE_BINARY_DIR}/conan.cmake")
- message(STATUS "Downloading conan.cmake from https://github.com/conan-io/cmake-conan")
- file(DOWNLOAD "https://raw.githubusercontent.com/conan-io/cmake-conan/0.18.1/conan.cmake"
- "${CMAKE_BINARY_DIR}/conan.cmake"
- TLS_VERIFY ON)
- endif ()
- include(${CMAKE_BINARY_DIR}/conan.cmake)
-
- conan_cmake_autodetect(settings)
-
- if (IS_TOP_LEVEL AND BUILD_TESTING)
- set(CONAN_HYPERTRIE_WITH_TEST_DEPS "True")
- else()
- set(CONAN_HYPERTRIE_WITH_TEST_DEPS "False")
- endif()
- conan_cmake_install(PATH_OR_REFERENCE ${conanfile}
- BUILD missing
- SETTINGS ${settings}
- OPTIONS "${conan_options}"
- GENERATOR "CMakeDeps")
-endmacro()
\ No newline at end of file
diff --git a/cmake/dummy-config.cmake.in b/cmake/dummy-config.cmake.in
deleted file mode 100644
index d8c788f1..00000000
--- a/cmake/dummy-config.cmake.in
+++ /dev/null
@@ -1,5 +0,0 @@
-# Dummy config file
-# When a dependency is added with add_subdirectory, but searched with find_package
-
-# Redirect to the directory added with add_subdirectory
-add_subdirectory(@PROJECT_SOURCE_DIR@ @PROJECT_BINARY_DIR@)
\ No newline at end of file
diff --git a/cmake/install_components.cmake b/cmake/install_components.cmake
new file mode 100644
index 00000000..ad00017c
--- /dev/null
+++ b/cmake/install_components.cmake
@@ -0,0 +1,65 @@
+include(GNUInstallDirs)
+include(CMakePackageConfigHelpers)
+
+function(install_component TYPE COMPONENT_NAME INCLUDE_PATH)
+ set(lib_name "${PROJECT_NAME}-${COMPONENT_NAME}")
+
+ set(possible_types INTERFACE PUBLIC)
+ if(NOT TYPE IN_LIST possible_types)
+ message(FATAL_ERROR "Argument TYPE=${component} of function install_component is not allowed. Allowed values are ${possible_types}")
+ endif()
+
+ if("${TYPE}" STREQUAL "INTERFACE")
+ target_include_directories(
+ ${lib_name} INTERFACE $/${PROJECT_NAME}/${COMPONENT_NAME})
+
+ install(TARGETS ${lib_name}
+ EXPORT ${lib_name}-config
+ INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${COMPONENT_NAME}/
+ )
+ else()
+ set_target_properties(${lib_name} PROPERTIES
+ VERSION ${PROJECT_VERSION}
+ SOVERSION ${PROJECT_VERSION_MAJOR}
+ CXX_STANDARD 23
+ CXX_EXTENSIONS OFF
+ CXX_STANDARD_REQUIRED ON)
+ target_include_directories(
+ ${lib_name} PUBLIC $/${PROJECT_NAME}/${COMPONENT_NAME})
+
+ install(TARGETS ${lib_name}
+ EXPORT ${lib_name}-config
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}/${PROJECT_NAME}/${COMPONENT_NAME}/
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}/${PROJECT_NAME}/${COMPONENT_NAME}/
+ INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${COMPONENT_NAME}/
+ )
+ endif()
+
+ install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/${INCLUDE_PATH}/
+ DESTINATION include/${PROJECT_NAME}/${COMPONENT_NAME}/
+ FILES_MATCHING PATTERN "*.hpp" PATTERN "*.h")
+
+ install(
+ EXPORT ${lib_name}-config
+ FILE ${lib_name}-config.cmake
+ NAMESPACE ${PROJECT_NAME}::
+ DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/cmake/${PROJECT_NAME}/${COMPONENT_NAME}/)
+
+endfunction()
+
+function(install_package)
+
+ write_basic_package_version_file("${CMAKE_BINARY_DIR}/${PROJECT_NAME}-config-version.cmake"
+ VERSION ${PROJECT_VERSION}
+ COMPATIBILITY SameMajorVersion)
+
+ configure_package_config_file(
+ "${PROJECT_SOURCE_DIR}/cmake/main-component-config.cmake.in"
+ "${CMAKE_BINARY_DIR}/${PROJECT_NAME}-config.cmake"
+ INSTALL_DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/cmake/${PROJECT_NAME}/)
+
+
+ install(FILES "${CMAKE_BINARY_DIR}/${PROJECT_NAME}-config.cmake"
+ "${PROJECT_BINARY_DIR}/${PROJECT_NAME}-config-version.cmake"
+ DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/cmake/${PROJECT_NAME}/)
+endfunction()
\ No newline at end of file
diff --git a/cmake/install_library.cmake b/cmake/install_library.cmake
deleted file mode 100644
index 577cdd18..00000000
--- a/cmake/install_library.cmake
+++ /dev/null
@@ -1,79 +0,0 @@
-include(GNUInstallDirs)
-include(CMakePackageConfigHelpers)
-
-function(install_component COMPONENT_NAME INCLUDE_PATH)
-
- target_include_directories(
- ${COMPONENT_NAME} PUBLIC $/${PROJECT_NAME}/${COMPONENT_NAME})
-
- install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/${INCLUDE_PATH}/
- DESTINATION include/${PROJECT_NAME}/${COMPONENT_NAME}/
- FILES_MATCHING PATTERN "*.hpp" PATTERN "*.h")
-
- install(TARGETS ${COMPONENT_NAME}
- EXPORT ${COMPONENT_NAME}-config
- ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}/${PROJECT_NAME}/${COMPONENT_NAME}/
- LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}/${PROJECT_NAME}/${COMPONENT_NAME}/
- RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}/${PROJECT_NAME}/${COMPONENT_NAME}/
- INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${COMPONENT_NAME}/
- )
-
- install(
- EXPORT ${COMPONENT_NAME}-config
- FILE ${COMPONENT_NAME}-config.cmake
- NAMESPACE ${PROJECT_NAME}::
- DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/cmake/${PROJECT_NAME}/${COMPONENT_NAME}/)
-
- write_basic_package_version_file(${COMPONENT_NAME}-config-version.cmake
- VERSION ${PROJECT_VERSION}
- COMPATIBILITY SameMinorVersion)
- install(FILES ${CMAKE_CURRENT_BINARY_DIR}/${COMPONENT_NAME}-config-version.cmake DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${COMPONENT_NAME}/)
-endfunction()
-
-function(install_interface_component COMPONENT_NAME INCLUDE_PATH)
-
- target_include_directories(
- ${COMPONENT_NAME} INTERFACE $/${PROJECT_NAME}/${COMPONENT_NAME})
-
- install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/${INCLUDE_PATH}/
- DESTINATION include/${PROJECT_NAME}/${COMPONENT_NAME}/
- FILES_MATCHING PATTERN "*.hpp" PATTERN "*.h")
-
- install(TARGETS ${COMPONENT_NAME}
- EXPORT ${COMPONENT_NAME}-config
- INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${COMPONENT_NAME}/
- )
-
- install(
- EXPORT ${COMPONENT_NAME}-config
- FILE ${COMPONENT_NAME}-config.cmake
- NAMESPACE ${PROJECT_NAME}::
- DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/cmake/${PROJECT_NAME}/${COMPONENT_NAME}/)
-
- write_basic_package_version_file(${COMPONENT_NAME}-config-version.cmake
- VERSION ${PROJECT_VERSION}
- COMPATIBILITY SameMinorVersion)
- install(FILES ${CMAKE_CURRENT_BINARY_DIR}/${COMPONENT_NAME}-config-version.cmake DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${COMPONENT_NAME}/)
-endfunction()
-
-function(install_package)
-
- write_basic_package_version_file("${CMAKE_BINARY_DIR}/${PROJECT_NAME}-config-version.cmake"
- VERSION ${PROJECT_VERSION}
- COMPATIBILITY SameMajorVersion)
-
- configure_package_config_file(
- "${PROJECT_SOURCE_DIR}/cmake/main-component-config.cmake.in"
- "${CMAKE_BINARY_DIR}/${PROJECT_NAME}-config.cmake"
- INSTALL_DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/cmake/${PROJECT_NAME}/)
-
- write_basic_package_version_file(
- "${PROJECT_NAME}-config-version.cmake"
- VERSION ${PROJECT_VERSION}
- COMPATIBILITY SameMajorVersion)
-
- install(FILES "${CMAKE_BINARY_DIR}/${PROJECT_NAME}-config.cmake"
- "${PROJECT_BINARY_DIR}/${PROJECT_NAME}-config-version.cmake"
- DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/cmake/${PROJECT_NAME}/)
-endfunction()
-
diff --git a/cmake/main-component-config.cmake.in b/cmake/main-component-config.cmake.in
index 1de60f21..020415cd 100644
--- a/cmake/main-component-config.cmake.in
+++ b/cmake/main-component-config.cmake.in
@@ -3,10 +3,14 @@
# each component's config should be in a equally named subdirectory, i.e.: ${CMAKE_INSTALL_DATAROOTDIR}/cmake/@PROJECT_NAME@/${component}/${component}-config.cmake
-file(GLOB @PROJECT_NAME@_available_components LIST_DIRECTORIES true ${CMAKE_CURRENT_LIST_DIR}/*)
+file(GLOB query_available_components LIST_DIRECTORIES true ${CMAKE_CURRENT_LIST_DIR}/*)
+list(FILTER query_available_components EXCLUDE REGEX ".*\\..*")
+# todo: test with fetch_content
+message("actual: ${query_available_components}")
# available components are listed here
-set(@PROJECT_NAME@_available_components endpoint node-store rdf-tensor sparql2tensor tentris triple-store)
+set(@PROJECT_NAME@_available_components query sparql node-wrapper)
+message("expected: ${@PROJECT_NAME@_available_components}")
# check if the user provided components are actually available
foreach(component ${@PROJECT_NAME@_FIND_COMPONENTS})
@@ -15,26 +19,13 @@ foreach(component ${@PROJECT_NAME@_FIND_COMPONENTS})
endif()
endforeach()
-# default component @PROJECT_NAME@::@PROJECT_NAME@ is always included
-include(${CMAKE_CURRENT_LIST_DIR}/@PROJECT_NAME@/@PROJECT_NAME@-config.cmake)
+# set(@PROJECT_NAME@_default_component @PROJECT_NAME@)
+set(@PROJECT_NAME@_default_component endpoint)
-# add transitive dependencies among components
-set(transitive_components "")
-foreach(component ${@PROJECT_NAME@_FIND_COMPONENTS})
- if(${component} STREQUAL "tentris")
- list(APPEND transtive_components ${@PROJECT_NAME@_available_components})
- elseif(${component} STREQUAL "endpoint")
- list(APPEND transtive_components node-store rdf-tensor sparql2tensor triple-store)
- elseif(${component} STREQUAL "triple-store")
- list(APPEND transtive_components node-store rdf-tensor sparql2tensor triple-store)
- elseif(${component} STREQUAL "node-store" OR ${component} STREQUAL "sparql2tensor")
- list(APPEND transtive_components rdf-tensor)
- endif()
-endforeach()
-list(APPEND @PROJECT_NAME@_FIND_COMPONENTS ${transitive_components})
-list(REMOVE_DUPLICATES @PROJECT_NAME@_FIND_COMPONENTS)
+# default component @PROJECT_NAME@::${@PROJECT_NAME@_default_component} is always included
+include(${CMAKE_CURRENT_LIST_DIR}/@PROJECT_NAME@/${@PROJECT_NAME@_default_component}-config.cmake)
# include all listed components
foreach(component ${@PROJECT_NAME@_FIND_COMPONENTS})
- include(${CMAKE_CURRENT_LIST_DIR}/${component}/${component}-config.cmake)
-endforeach()
+ include(${CMAKE_CURRENT_LIST_DIR}/@PROJECT_NAME@/${component}-config.cmake)
+endforeach()
\ No newline at end of file
diff --git a/conanfile.py b/conanfile.py
index 7c1adc1d..f28a9e49 100644
--- a/conanfile.py
+++ b/conanfile.py
@@ -2,69 +2,48 @@
import re
from conan import ConanFile
-from conan.tools.cmake import CMake
-from conan.tools.files import rmdir, load
+from conan.tools.cmake import cmake_layout, CMake
+from conan.tools.files import load, rmdir, copy
class Recipe(ConanFile):
url = "https://tentris.dice-research.org"
- topics = ("triplestore", "sparql", "rdf", "sematic-web", "tensor")
+ topics = "triplestore", "sparql", "rdf", "sematic-web", "tensor"
settings = "os", "compiler", "build_type", "arch"
options = {
"shared": [True, False],
"fPIC": [True, False],
- "with_exec_deps": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
- "with_exec_deps": False,
"restinio/*:asio": "boost",
}
- def requirements(self):
- public_reqs = [
- "boost/1.84.0",
- "fmt/8.1.1",
- "restinio/0.6.17",
- "expected-lite/0.6.3", # overrides restinio dependency
- "hypertrie/0.9.4",
- "metall/0.21",
- "rdf4cpp/0.0.8.1",
- "dice-hash/0.4.0",
- "robin-hood-hashing/3.11.5",
- "cxxopts/2.2.1",
- "sparql-parser-base/0.3.0",
- "taskflow/3.4.0",
- "cppitertools/2.1",
- "spdlog/1.10.0",
- "rapidjson/cci.20220822",
- ]
-
- private_reqs = [
- ]
-
- exec_reqs = [
- "nlohmann_json/3.11.2",
- "vincentlaucsb-csv-parser/2.1.3",
- ]
- for req in public_reqs:
- self.requires(req)
- for req in private_reqs:
- self.requires(req, private=True)
-
- if self.options.get_safe("with_exec_deps"):
- for req in exec_reqs:
- self.requires(req)
-
- generators = ("cmake_find_package",)
-
- # Sources are located in the same place as this recipe, copy them to the recipe
exports_sources = "libs/*", "CMakeLists.txt", "cmake/*"
+ generators = "CMakeDeps", "CMakeToolchain"
- def config_options(self):
- if self.settings.os == "Windows":
- del self.options.fPIC
+ def requirements(self):
+ self.requires("hypertrie/0.9.6", transitive_headers=True)
+ self.requires("rdf4cpp/0.0.27.1", transitive_headers=True)
+ self.requires("sparql-parser-base/0.3.6")
+ self.requires("unordered_dense/4.4.0", transitive_headers=True, force=True)
+ self.requires("cxxopts/2.2.1")
+ self.requires("fmt/8.1.1", transitive_headers=True, force=True)
+ self.requires("restinio/0.7.2")
+ self.requires("expected-lite/0.8.0", override=True)
+ self.requires("taskflow/3.4.0")
+ self.requires("cppitertools/2.1")
+ self.requires("spdlog/1.14.1")
+ self.requires("rapidjson/cci.20220822")
+ self.requires("metall/0.23.1")
+ self.requires("nlohmann_json/3.11.2")
+ self.requires("vincentlaucsb-csv-parser/2.1.3")
+ self.requires("robin-hood-hashing/3.11.5", transitive_headers=True)
+ self.requires("dice-hash/0.4.6", transitive_headers=True, force=True)
+ self.requires("dice-sparse-map/0.2.5", transitive_headers=True)
+ self.requires("dice-template-library/1.9.1", transitive_headers=True)
+ self.requires("boost/1.84.0", transitive_headers=True, libs=False, force=True)
def set_name(self):
if not hasattr(self, 'name') or self.version is None:
@@ -79,33 +58,34 @@ def set_version(self):
cmake_file = load(self, os.path.join(self.recipe_folder, "CMakeLists.txt"))
self.description = re.search(r"project\([^)]*DESCRIPTION\s+\"([^\"]+)\"[^)]*\)", cmake_file).group(1)
+ def layout(self):
+ cmake_layout(self)
+
_cmake = None
def _configure_cmake(self):
- if self._cmake:
- return self._cmake
- self._cmake = CMake(self)
- self._cmake.definitions['CONAN_CMAKE'] = False
- self._cmake.configure()
+ if self._cmake is None:
+ self._cmake = CMake(self)
+ self._cmake.configure()
return self._cmake
def build(self):
- cmake = self._configure_cmake()
- cmake.build()
+ self._configure_cmake().build()
def package(self):
- cmake = self._configure_cmake()
- cmake.install()
- for dir in ("res", "share"):
- rmdir(os.path.join(self.package_folder, dir))
+ self._configure_cmake().install()
+ for dir in ("res", "share", "cmake"):
+ rmdir(self, os.path.join(self.package_folder, dir))
+ copy(self, "LICENSE", src=self.folders.base_source, dst="licenses")
def package_info(self):
- self.cpp_info.components["global"].set_property("cmake_target_name", "tentris::tentris")
- self.cpp_info.components["global"].names["cmake_find_package_multi"] = "tentris"
- self.cpp_info.components["global"].names["cmake_find_package"] = "tentris"
- self.cpp_info.components["global"].includedirs = [f"include/tentris/tentris"]
- self.cpp_info.components["global"].libdirs = []
- self.cpp_info.set_property("cmake_file_name", "tentris")
+ main_component = self.name
+ self.cpp_info.set_property("cmake_target_name", f"{self.name}")
+ self.cpp_info.components["global"].set_property("cmake_target_name", f"{self.name}::{main_component}")
+ self.cpp_info.components["global"].names["cmake_find_package_multi"] = f"{self.name}"
+ self.cpp_info.components["global"].names["cmake_find_package"] = f"{self.name}"
+ self.cpp_info.set_property("cmake_file_name", f"{self.name}")
+ self.cpp_info.components["global"].includedirs = [f"include/{self.name}/{main_component}/"]
self.cpp_info.components["global"].requires = [
"node-store", "rdf-tensor", "sparql2tensor", "triple-store", "endpoint",
"boost::boost",
@@ -125,14 +105,10 @@ def package_info(self):
"spdlog::spdlog",
]
- for component in ["node-store", "rdf-tensor", "sparql2tensor", "triple-store", "endpoint"]:
- self.cpp_info.components[f"{component}"].names["cmake_find_package_multi"] = f"{component}"
- self.cpp_info.components[f"{component}"].names["cmake_find_package"] = f"{component}"
- self.cpp_info.components[f"{component}"].includedirs = [f"include/tentris/{component}"]
-
- for component in ["node-store", "sparql2tensor", "triple-store", "endpoint"]:
- self.cpp_info.components[f"{component}"].libdirs = [f"lib/tentris/{component}"]
- self.cpp_info.components[f"{component}"].libs = [f"{component}"]
+ if self.options.with_exec_deps:
+ self.cpp_info.components["global"].requires += [
+ "vincentlaucsb-csv-parser::vincentlaucsb-csv-parser",
+ "nlohmann_json::nlohmann_json"]
self.cpp_info.components["rdf-tensor"].requires = [
"rdf4cpp::rdf4cpp",
@@ -143,6 +119,7 @@ def package_info(self):
self.cpp_info.components["node-store"].requires = [
"rdf-tensor",
+ "rdf4cpp::rdf4cpp"
]
self.cpp_info.components["sparql2tensor"].requires = [
@@ -163,7 +140,12 @@ def package_info(self):
"spdlog::spdlog",
"rapidjson::rapidjson",
]
- if self.options.get_safe("with_exec_deps"):
- self.cpp_info.components["global"].requires += [
- "vincentlaucsb-csv-parser::vincentlaucsb-csv-parser",
- "nlohmann_json::nlohmann_json"]
+
+ for component in ("node-store", "rdf-tensor", "sparql2tensor", "triple-store", "endpoint"):
+ self.cpp_info.components[f"{component}"].includedirs = [f"include/{self.name}/{component}"]
+ self.cpp_info.components[f"{component}"].names["cmake_find_package_multi"] = f"{component}"
+ self.cpp_info.components[f"{component}"].names["cmake_find_package"] = f"{component}"
+
+ for component in ("node-store", "sparql2tensor", "triple-store", "endpoint"):
+ self.cpp_info.components[f"{component}"].libdirs = [f"lib/{self.name}/{component}"]
+ self.cpp_info.components[f"{component}"].libs = [f"{self.name}-{component}"]
diff --git a/execs/CMakeLists.txt b/execs/CMakeLists.txt
index f27f482d..b9b9bde3 100644
--- a/execs/CMakeLists.txt
+++ b/execs/CMakeLists.txt
@@ -1,22 +1,6 @@
cmake_minimum_required(VERSION 3.18)
project(tentris-binaries)
-include(${CMAKE_SOURCE_DIR}/../cmake/boilerplate_init.cmake)
-
-boilerplate_init()
-if (NOT IS_TOP_LEVEL)
- message(FATAL_ERROR "Must only be used as CMake top-level project.")
-endif ()
-
-set(CMAKE_CXX_STANDARD 20)
-set(CMAKE_CXX_STANDARD_REQUIRED ON)
-set(CMAKE_CXX_EXTENSIONS OFF)
-
-if (NOT EXISTS ${CMAKE_CURRENT_BINARY_DIR}/CMakeCache.txt)
- if (NOT CMAKE_BUILD_TYPE)
- set(CMAKE_BUILD_TYPE "Release" CACHE STRING "" FORCE)
- endif ()
-endif ()
if (DEFINED ${MARCH})
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -march=${MARCH} -mtune=${MARCH}")
@@ -48,13 +32,6 @@ if (WITH_TCMALLOC)
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${TCMALLOCMINIMAL}")
endif ()
-
-# set library options
-include(${CMAKE_SOURCE_DIR}/../cmake/conan_cmake.cmake)
-install_packages_via_conan("${CMAKE_SOURCE_DIR}/../conanfile.py" "with_exec_deps=True ")
-
-add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/.. ${CMAKE_CURRENT_BINARY_DIR}/libtentris)
-
add_subdirectory(tentris-server)
add_subdirectory(tentris-loader)
add_subdirectory(tools)
diff --git a/execs/tentris-loader/src/dice/tentris-loader/TentrisLoader.cpp b/execs/tentris-loader/src/dice/tentris-loader/TentrisLoader.cpp
index 98318eae..94c65bb9 100644
--- a/execs/tentris-loader/src/dice/tentris-loader/TentrisLoader.cpp
+++ b/execs/tentris-loader/src/dice/tentris-loader/TentrisLoader.cpp
@@ -87,65 +87,70 @@ int main(int argc, char *argv[]) {
spdlog::info(version);
spdlog::flush_every(std::chrono::seconds{5});
- // init storage
- {
+ fs::path ttl_file(parsed_args["file"].as());
+
+ { // init storage
metall_manager{metall::create_only, storage_path.c_str()};
}
- metall_manager storage_manager{metall::open_only, storage_path.c_str()};
- // set up node store
- {
- using namespace rdf4cpp::rdf::storage::node;
- using namespace dice::node_store;
- auto *nodestore_backend = storage_manager.find_or_construct("node-store")(storage_manager.get_allocator());
- NodeStorage::default_instance(
- NodeStorage::new_instance(nodestore_backend));
- }
- // setup triple store
- auto &ht_context = *storage_manager.find_or_construct("hypertrie-context")(storage_manager.get_allocator());
- auto &rdf_tensor = *storage_manager.find_or_construct("rdf-tensor")(3, rdf_tensor::HypertrieContext_ptr{&ht_context});
- triple_store::TripleStore triplestore{rdf_tensor};
- fs::path ttl_file(parsed_args["file"].as());
- {// load data
- spdlog::info("Loading triples from file {}.", fs::absolute(ttl_file).string());
- spdlog::stopwatch loading_time;
- spdlog::stopwatch batch_loading_time;
- size_t total_processed_entries = 0;
- size_t total_inserted_entries = 0;
- size_t final_hypertrie_size_after = 0;
+ { // load
+ metall_manager storage_manager{metall::open_only, storage_path.c_str()};
+ // set up node store
+ {
+ using namespace rdf4cpp::rdf::storage::node;
+ using namespace dice::node_store;
+ auto *nodestore_backend = storage_manager.find_or_construct("node-store")(storage_manager.get_allocator());
+ NodeStorage::set_default_instance(
+ NodeStorage::new_instance(nodestore_backend));
+ }
+ // setup triple store
+ auto &ht_context = *storage_manager.find_or_construct("hypertrie-context")(storage_manager.get_allocator());
+ auto &rdf_tensor = *storage_manager.find_or_construct("rdf-tensor")(3, rdf_tensor::HypertrieContext_ptr{&ht_context});
+ triple_store::TripleStore triplestore{rdf_tensor};
+
+ {// load data
+ spdlog::info("Loading triples from file {}.", fs::absolute(ttl_file).string());
+ spdlog::stopwatch loading_time;
+ spdlog::stopwatch batch_loading_time;
+ size_t total_processed_entries = 0;
+ size_t total_inserted_entries = 0;
+ size_t final_hypertrie_size_after = 0;
- triplestore.load_ttl(
- parsed_args["file"].as(),
- parsed_args["bulksize"].as(),
- [&](size_t processed_entries,
- size_t inserted_entries,
- size_t hypertrie_size_after) noexcept {
- std::chrono::duration batch_duration = batch_loading_time.elapsed();
- spdlog::info("batch: {:>10.3} mio triples processed, {:>10.3} mio triples added, {} elapsed, {:>10.3} mio triples in storage.",
- (double(processed_entries) / 1'000'000),
- (double(inserted_entries) / 1'000'000),
- (batch_duration.count()),
- (double(hypertrie_size_after) / 1'000'000));
- total_processed_entries = processed_entries;
- total_inserted_entries = inserted_entries;
- final_hypertrie_size_after = hypertrie_size_after;
- batch_loading_time.reset();
- },
- [](rdf_tensor::parser::ParsingError const &error) noexcept {
- std::ostringstream oss;
- oss << error;
- spdlog::warn(oss.str());// spdlog does not want to use the ostream operator for ParsingError
- });
- spdlog::info("loading finished: {} triples processed, {} triples added, {} elapsed, {} triples in storage.",
- total_processed_entries, total_inserted_entries, std::chrono::duration(loading_time.elapsed()).count(), final_hypertrie_size_after);
- const auto cards = triplestore.get_hypertrie().get_cards({0, 1, 2});
- spdlog::info("Storage stats: {} triples ({} distinct subjects, {} distinct predicates, {} distinct objects)",
- triplestore.size(), cards[0], cards[1], cards[2]);
+ triplestore.load_ttl(
+ parsed_args["file"].as(),
+ parsed_args["bulksize"].as(),
+ [&](size_t processed_entries,
+ size_t inserted_entries,
+ size_t hypertrie_size_after) noexcept {
+ std::chrono::duration batch_duration = batch_loading_time.elapsed();
+ spdlog::info("batch: {:>10.3} mio triples processed, {:>10.3} mio triples added, {} elapsed, {:>10.3} mio triples in storage.",
+ (double(processed_entries) / 1'000'000),
+ (double(inserted_entries) / 1'000'000),
+ (batch_duration.count()),
+ (double(hypertrie_size_after) / 1'000'000));
+ total_processed_entries = processed_entries;
+ total_inserted_entries = inserted_entries;
+ final_hypertrie_size_after = hypertrie_size_after;
+ batch_loading_time.reset();
+ },
+ [](rdf_tensor::parser::ParsingError const &error) noexcept {
+ std::ostringstream oss;
+ oss << error;
+ spdlog::warn(oss.str());// spdlog does not want to use the ostream operator for ParsingError
+ });
+ spdlog::info("loading finished: {} triples processed, {} triples added, {} elapsed, {} triples in storage.",
+ total_processed_entries, total_inserted_entries, std::chrono::duration(loading_time.elapsed()).count(), final_hypertrie_size_after);
+ const auto cards = triplestore.get_hypertrie().get_cards({0, 1, 2});
+ spdlog::info("Storage stats: {} triples ({} distinct subjects, {} distinct predicates, {} distinct objects)",
+ triplestore.size(), cards[0], cards[1], cards[2]);
+ }
}
- // create snapshot
- spdlog::info("Creating snapshot: {}_snapshot", storage_path.string());
- auto snapshot_path = fs::absolute(storage_path.string().append("_snapshot"));
- storage_manager.snapshot(snapshot_path.c_str());
- spdlog::info("Finished loading: {}.", ttl_file.string());
+ { // create snapshot
+ metall_manager storage_manager{metall::open_read_only, storage_path.c_str()};
+ spdlog::info("Creating snapshot: {}_snapshot", storage_path.string());
+ auto snapshot_path = fs::absolute(storage_path.string().append("_snapshot"));
+ storage_manager.snapshot(snapshot_path.c_str());
+ spdlog::info("Finished loading: {}.", ttl_file.string());
+ }
}
\ No newline at end of file
diff --git a/execs/tentris-server/src/dice/tentris-server/TentrisServer.cpp b/execs/tentris-server/src/dice/tentris-server/TentrisServer.cpp
index 3f391afa..bde98a04 100644
--- a/execs/tentris-server/src/dice/tentris-server/TentrisServer.cpp
+++ b/execs/tentris-server/src/dice/tentris-server/TentrisServer.cpp
@@ -1,3 +1,5 @@
+#define _LARGEFILE64_SOURCE
+
#include
#include
@@ -93,7 +95,12 @@ int main(int argc, char *argv[]) {
const endpoint::EndpointCfg endpoint_cfg{
.port = parsed_args["port"].as(),
.threads = parsed_args["threads"].as(),
- .timeout_duration = std::chrono::seconds{parsed_args["timeout"].as()}};
+ .opt_timeout_duration = [&parsed_args]() -> std::optional {
+ auto const arg = parsed_args["timeout"].as();
+ if (arg == 0U)
+ return std::nullopt;
+ return std::chrono::seconds{arg};
+ }()};
using metall_manager = rdf_tensor::metall_manager;
@@ -122,7 +129,7 @@ int main(int argc, char *argv[]) {
using namespace rdf4cpp::rdf::storage::node;
using namespace dice::node_store;
auto *nodestore_backend = storage_manager.find_or_construct("node-store")(storage_manager.get_allocator());
- NodeStorage::default_instance(
+ NodeStorage::set_default_instance(
NodeStorage::new_instance(nodestore_backend));
}
@@ -145,7 +152,7 @@ int main(int argc, char *argv[]) {
spdlog::info("Storage stats: {} triples ({} distinct subjects, {} distinct predicates, {} distinct objects)",
triplestore.size(), cards[0], cards[1], cards[2]);
spdlog::info("SPARQL endpoint serving sparkling linked data treasures on {} threads at http://0.0.0.0:{}/ with {} request timeout.",
- endpoint_cfg.threads, endpoint_cfg.port, endpoint_cfg.timeout_duration);
+ endpoint_cfg.threads, endpoint_cfg.port, endpoint_cfg.opt_timeout_duration.value());
// start http server
http_server();
@@ -154,4 +161,4 @@ int main(int argc, char *argv[]) {
// warping up node storage
spdlog::info("Shutdown successful.");
return EXIT_SUCCESS;
-}
+}
\ No newline at end of file
diff --git a/execs/tools/deduplicated-nt/src/dice/tools/deduplicated_nt/DeduplicatedNT.cpp b/execs/tools/deduplicated-nt/src/dice/tools/deduplicated_nt/DeduplicatedNT.cpp
index 07221436..802e4b8d 100644
--- a/execs/tools/deduplicated-nt/src/dice/tools/deduplicated_nt/DeduplicatedNT.cpp
+++ b/execs/tools/deduplicated-nt/src/dice/tools/deduplicated_nt/DeduplicatedNT.cpp
@@ -82,7 +82,7 @@ int main(int argc, char *argv[]) {
}
dice::sparse_map::sparse_set deduplication;
- for (rdf4cpp::rdf::parser::IStreamQuadIterator qit{ifs}; qit != rdf4cpp::rdf::parser::IStreamQuadIterator{}; ++qit) {
+ for (rdf4cpp::rdf::parser::IStreamQuadIterator qit{ifs}; qit != std::default_sentinel; ++qit) {
if (qit->has_value()) {
auto const &quad = qit->value();
auto const hash = hash::dice_hash_templates::dice_hash(std::array{
diff --git a/execs/tools/rdf2ids/src/dice/tools/rdf2ids/RDF2IDs.cpp b/execs/tools/rdf2ids/src/dice/tools/rdf2ids/RDF2IDs.cpp
index bf669856..c8843b08 100644
--- a/execs/tools/rdf2ids/src/dice/tools/rdf2ids/RDF2IDs.cpp
+++ b/execs/tools/rdf2ids/src/dice/tools/rdf2ids/RDF2IDs.cpp
@@ -85,7 +85,7 @@ int main(int argc, char *argv[]) {
dice::sparse_map::sparse_set deduplication;
bool const deduplicate = parsed_args["distinct"].as();
- for (rdf4cpp::rdf::parser::IStreamQuadIterator qit{ifs}; qit != rdf4cpp::rdf::parser::IStreamQuadIterator{}; ++qit) {
+ for (rdf4cpp::rdf::parser::IStreamQuadIterator qit{ifs}; qit != std::default_sentinel; ++qit) {
if (qit->has_value()) {
auto const &quad = qit->value();
std::array const id_triple{
diff --git a/libs/CMakeLists.txt b/libs/CMakeLists.txt
index 35f7663c..d8dc8919 100644
--- a/libs/CMakeLists.txt
+++ b/libs/CMakeLists.txt
@@ -1,7 +1,10 @@
+cmake_minimum_required(VERSION 3.21)
add_subdirectory(rdf-tensor)
add_subdirectory(node-store)
add_subdirectory(sparql2tensor)
add_subdirectory(triple-store)
add_subdirectory(endpoint)
add_subdirectory(tentris)
-install_package()
\ No newline at end of file
+
+include(${CMAKE_SOURCE_DIR}/cmake/install_components.cmake)
+install_package()
diff --git a/libs/endpoint/CMakeLists.txt b/libs/endpoint/CMakeLists.txt
index f27e60cf..ae06c468 100644
--- a/libs/endpoint/CMakeLists.txt
+++ b/libs/endpoint/CMakeLists.txt
@@ -1,42 +1,40 @@
+cmake_minimum_required(VERSION 3.21)
+set(lib_suffix "endpoint")
+set(lib "${PROJECT_NAME}-${lib_suffix}")
+
find_package(Taskflow REQUIRED)
find_package(restinio REQUIRED)
find_package(spdlog REQUIRED)
find_package(cppitertools REQUIRED)
find_package(RapidJSON REQUIRED)
-add_library(endpoint
+add_library(${lib}
src/dice/endpoint/HTTPServer.cpp
src/dice/endpoint/SparqlEndpoint.cpp
src/dice/endpoint/CountEndpoint.cpp
src/dice/endpoint/SparqlStreamingEndpoint.cpp
src/dice/endpoint/SparqlQueryCache.cpp
+ src/dice/endpoint/Endpoint.cpp
)
-add_library(tentris::endpoint ALIAS endpoint)
+add_library(${PROJECT_NAME}::${lib_suffix} ALIAS ${lib})
-target_include_directories(endpoint PUBLIC
+target_include_directories(${lib}
+ PUBLIC
$
PRIVATE
${CMAKE_CURRENT_SOURCE_DIR}/private-include
)
-set_target_properties(endpoint PROPERTIES
- VERSION ${PROJECT_VERSION}
- SOVERSION ${PROJECT_VERSION_MAJOR}
- CXX_STANDARD 20
- CXX_STANDARD_REQUIRED YES
- CXX_EXTENSIONS NO
- )
-
-target_link_libraries(endpoint PUBLIC
- tentris::triple-store
- tentris::node-store
+target_link_libraries(${lib} PUBLIC
+ ${PROJECT_NAME}::triple-store
+ ${PROJECT_NAME}::node-store
restinio::restinio
Taskflow::Taskflow
PRIVATE
spdlog::spdlog
cppitertools::cppitertools
- RapidJSON::RapidJSON
+ rapidjson
)
-include(${PROJECT_SOURCE_DIR}/cmake/install_library.cmake)
-install_component(endpoint src)
\ No newline at end of file
+include(${CMAKE_SOURCE_DIR}/cmake/install_components.cmake)
+install_component(PUBLIC ${lib_suffix} src)
diff --git a/libs/endpoint/private-include/dice/endpoint/CountEndpoint.hpp b/libs/endpoint/private-include/dice/endpoint/CountEndpoint.hpp
new file mode 100644
index 00000000..f3b1cf63
--- /dev/null
+++ b/libs/endpoint/private-include/dice/endpoint/CountEndpoint.hpp
@@ -0,0 +1,16 @@
+#ifndef TENTRIS_COUNTENDPOINT_HPP
+#define TENTRIS_COUNTENDPOINT_HPP
+
+#include
+
+namespace dice::endpoint {
+
+ class CountEndpoint final : public Endpoint {
+ public:
+ CountEndpoint(tf::Executor &executor, triple_store::TripleStore &triplestore, SparqlQueryCache &sparql_query_cache, EndpointCfg const &endpoint_cfg);
+
+ protected:
+ void handle_query(restinio::request_handle_t req, std::chrono::steady_clock::time_point timeout) override;
+ };
+}// namespace dice::endpoint
+#endif//TENTRIS_COUNTENDPOINT_HPP
\ No newline at end of file
diff --git a/libs/endpoint/private-include/dice/endpoint/Endpoint.hpp b/libs/endpoint/private-include/dice/endpoint/Endpoint.hpp
new file mode 100644
index 00000000..4a6441aa
--- /dev/null
+++ b/libs/endpoint/private-include/dice/endpoint/Endpoint.hpp
@@ -0,0 +1,39 @@
+#ifndef ENDPOINT_HPP
+#define ENDPOINT_HPP
+
+#ifndef _LARGEFILE64_SOURCE
+#define _LARGEFILE64_SOURCE
+#endif
+
+#define nsel_CONFIG_SELECT_EXPECTED nsel_EXPECTED_NONSTD
+#include
+#include
+
+#include
+
+#include
+#include
+
+namespace dice::endpoint {
+
+ class Endpoint {
+ protected:
+ tf::Executor &executor_;
+
+ triple_store::TripleStore &triplestore_;
+
+ SparqlQueryCache &sparql_query_cache_;
+
+ EndpointCfg const cfg_;
+
+ protected:
+ virtual void handle_query(restinio::request_handle_t req, std::chrono::steady_clock::time_point timeout) = 0;
+
+ public:
+ Endpoint(tf::Executor &executor, triple_store::TripleStore &triplestore, SparqlQueryCache &sparql_query_cache, EndpointCfg const &endpoint_cfg);
+ virtual ~Endpoint() = default;
+ virtual restinio::request_handling_status_t operator()(restinio::request_handle_t req, restinio::router::route_params_t params) final;
+ };
+}// namespace dice::endpoint
+
+#endif//ENDPOINT_HPP
\ No newline at end of file
diff --git a/libs/endpoint/private-include/dice/endpoint/ParseSPARQLUpdateParam.hpp b/libs/endpoint/private-include/dice/endpoint/ParseSPARQLUpdateParam.hpp
deleted file mode 100644
index 52bdf5c4..00000000
--- a/libs/endpoint/private-include/dice/endpoint/ParseSPARQLUpdateParam.hpp
+++ /dev/null
@@ -1,41 +0,0 @@
-#ifndef TENTRIS_PARSESPARQLUPDATEPARAM_HPP
-#define TENTRIS_PARSESPARQLUPDATEPARAM_HPP
-
-#include
-
-#include
-#include
-#include
-
-#include
-
-
-namespace dice::endpoint {
-
- inline sparql2tensor::UPDATEDATAQueryData parse_sparql_update_param(restinio::request_handle_t &req) {
- using namespace dice::sparql2tensor;
- using namespace restinio;
- auto content_type = req->header().opt_value_of(http_field::content_type);
- auto content_type_value = http_field_parsers::content_type_value_t::try_parse(*content_type);
- if (not content_type_value.has_value() or
- content_type_value.value().media_type.type != "application" or
- content_type_value.value().media_type.subtype != "sparql-update") {
- throw std::runtime_error("Expected content-type: application/sparql-update");
- }
- std::string sparql_update_str{req->body()};
- try {
- auto update_query = UPDATEDATAQueryData::parse(sparql_update_str);
- return update_query;
- } catch (std::exception &ex) {
- static constexpr auto message = "Value of parameter 'update' is not parsable: ";
- throw std::runtime_error{std::string{message} + ex.what()};
- } catch (...) {
- static constexpr auto message = "Unknown error";
- throw std::runtime_error{message};
- }
- }
-
-}// namespace dice::endpoint
-
-
-#endif//TENTRIS_PARSESPARQLUPDATEPARAM_HPP
diff --git a/libs/endpoint/private-include/dice/endpoint/SparqlEndpoint.hpp b/libs/endpoint/private-include/dice/endpoint/SparqlEndpoint.hpp
new file mode 100644
index 00000000..ab6c7cab
--- /dev/null
+++ b/libs/endpoint/private-include/dice/endpoint/SparqlEndpoint.hpp
@@ -0,0 +1,17 @@
+#ifndef TENTRIS_SPARQLENDPOINT_HPP
+#define TENTRIS_SPARQLENDPOINT_HPP
+
+#include
+
+namespace dice::endpoint {
+
+ class SPARQLEndpoint final : public Endpoint {
+ public:
+ SPARQLEndpoint(tf::Executor &executor, triple_store::TripleStore &triplestore, SparqlQueryCache &sparql_query_cache, EndpointCfg const &endpoint_cfg);
+
+ protected:
+ void handle_query(restinio::request_handle_t req, std::chrono::steady_clock::time_point timeout) override;
+ };
+
+}// namespace dice::endpoint
+#endif//TENTRIS_SPARQLENDPOINT_HPP
\ No newline at end of file
diff --git a/libs/endpoint/private-include/dice/endpoint/SparqlJsonResultSAXWriter.hpp b/libs/endpoint/private-include/dice/endpoint/SparqlJsonResultSAXWriter.hpp
index a02c81a3..3a78b1f4 100644
--- a/libs/endpoint/private-include/dice/endpoint/SparqlJsonResultSAXWriter.hpp
+++ b/libs/endpoint/private-include/dice/endpoint/SparqlJsonResultSAXWriter.hpp
@@ -84,12 +84,12 @@ namespace dice::endpoint {
if (term.is_iri()) {
writer.String("uri");
writer.Key("value");
- auto const &identifier = ((IRI) term).identifier();
+ auto const &identifier = term.as_iri().identifier();
writer.String(identifier.data(), identifier.size());
} else if (term.is_literal()) {
writer.String("literal");
- auto literal = (Literal) term;
+ auto literal = term.as_literal();
static const IRI xsd_str{"http://www.w3.org/2001/XMLSchema#string"};
auto datatype = literal.datatype();
@@ -109,7 +109,7 @@ namespace dice::endpoint {
} else if (term.is_blank_node()) {
writer.String("bnode");
writer.Key("value");
- auto const &identifier = ((BlankNode) term).identifier();
+ auto const &identifier = term.as_blank_node().identifier();
writer.String(identifier.data(), identifier.size());
} else {
throw std::runtime_error("Node with incorrect type (none of Literal, BNode, URI) detected.");
diff --git a/libs/endpoint/private-include/dice/endpoint/SparqlStreamingEndpoint.hpp b/libs/endpoint/private-include/dice/endpoint/SparqlStreamingEndpoint.hpp
new file mode 100644
index 00000000..6cda1bac
--- /dev/null
+++ b/libs/endpoint/private-include/dice/endpoint/SparqlStreamingEndpoint.hpp
@@ -0,0 +1,18 @@
+#ifndef TENTRIS_SPARQLSTREAMINGENDPOINT_HPP
+#define TENTRIS_SPARQLSTREAMINGENDPOINT_HPP
+
+#include
+
+namespace dice::endpoint {
+
+ class SPARQLStreamingEndpoint final : public Endpoint {
+
+ public:
+ SPARQLStreamingEndpoint(tf::Executor &executor, triple_store::TripleStore &triplestore, SparqlQueryCache &sparql_query_cache, EndpointCfg const &endpoint_cfg);
+
+ protected:
+ void handle_query(restinio::request_handle_t req, std::chrono::steady_clock::time_point timeout) override;
+ };
+}// namespace dice::endpoint
+
+#endif//TENTRIS_SPARQLSTREAMINGENDPOINT_HPP
\ No newline at end of file
diff --git a/libs/endpoint/private-include/dice/endpoint/TimeoutCheck.hpp b/libs/endpoint/private-include/dice/endpoint/TimeoutCheck.hpp
new file mode 100644
index 00000000..ee62bd8b
--- /dev/null
+++ b/libs/endpoint/private-include/dice/endpoint/TimeoutCheck.hpp
@@ -0,0 +1,13 @@
+#ifndef TIMEOUTCHECK_HPP
+#define TIMEOUTCHECK_HPP
+
+#include
+#include
+
+namespace dice::endpoint {
+ inline void check_timeout(std::chrono::steady_clock::time_point timeout) {
+ if (timeout <= std::chrono::steady_clock::now())
+ throw std::runtime_error{"timeout reached"};
+ }
+}// namespace dice::endpoint
+#endif//TIMEOUTCHECK_HPP
\ No newline at end of file
diff --git a/libs/endpoint/src/dice/endpoint/CountEndpoint.cpp b/libs/endpoint/src/dice/endpoint/CountEndpoint.cpp
index ef0078cb..2097faae 100644
--- a/libs/endpoint/src/dice/endpoint/CountEndpoint.cpp
+++ b/libs/endpoint/src/dice/endpoint/CountEndpoint.cpp
@@ -1,51 +1,29 @@
-#include "CountEndpoint.hpp"
-
+#include "dice/endpoint/CountEndpoint.hpp"
#include
-#include "dice/endpoint/ParseSPARQLQueryParam.hpp"
-#include "dice/endpoint/SparqlJsonResultSAXWriter.hpp"
+#include
namespace dice::endpoint {
- CountEndpoint::CountEndpoint(tf::Executor &executor,
- triple_store::TripleStore &triplestore,
- SparqlQueryCache &sparql_query_cache,
- std::chrono::seconds timeoutDuration)
- : executor_(executor),
- triplestore_(triplestore),
- sparql_query_cache_(sparql_query_cache),
- timeout_duration_(timeoutDuration) {}
- restinio::request_handling_status_t CountEndpoint::operator()(
- restinio::request_handle_t req,
- [[maybe_unused]] restinio::router::route_params_t params) {
- auto timeout = (timeout_duration_.count()) ? std::chrono::steady_clock::now() + this->timeout_duration_ : std::chrono::steady_clock::time_point::max();
- if (executor_.num_topologies() < executor_.num_workers()) {
- executor_.silent_async([this, timeout](restinio::request_handle_t req) {
- using namespace dice::sparql2tensor;
- using namespace restinio;
+ CountEndpoint::CountEndpoint(tf::Executor &executor,
+ triple_store::TripleStore &triplestore,
+ SparqlQueryCache &sparql_query_cache,
+ EndpointCfg const &endpoint_cfg)
+ : Endpoint(executor, triplestore, sparql_query_cache, endpoint_cfg) {}
+
+ void CountEndpoint::handle_query(restinio::request_handle_t req, std::chrono::steady_clock::time_point timeout) {
+ using namespace dice::sparql2tensor;
+ using namespace restinio;
- std::shared_ptr sparql_query = parse_sparql_query_param(req, this->sparql_query_cache_);
- if (not sparql_query)
- return;
+ auto sparql_query = parse_sparql_query_param(req, this->sparql_query_cache_);
+ if (not sparql_query)
+ return;
- try {
- size_t count = this->triplestore_.count(*sparql_query, timeout);
+ auto const count = this->triplestore_.count(*sparql_query, timeout);
- req->create_response(status_ok())
- .set_body(fmt::format("{}", count))
- .done();
- spdlog::info("HTTP response {}: counted {} results", status_ok(), count);
- } catch (std::runtime_error const &timeout_exception) {
- const auto timeout_message = fmt::format("Request processing timed out after {}.", this->timeout_duration_);
- spdlog::warn("HTTP response {}: {}", status_gateway_time_out(), timeout_message);
- req->create_response(status_gateway_time_out()).set_body(timeout_message).done();
- }
- },
- std::move(req));
- return restinio::request_accepted();
- } else {
- spdlog::warn("Handling request was rejected. All workers are busy.");
- return restinio::request_rejected();
- }
- }
+ req->create_response(status_ok())
+ .set_body(fmt::format("{}", count))
+ .done();
+ spdlog::info("HTTP response {}: counted {} results", status_ok(), count);
+ }
}// namespace dice::endpoint
\ No newline at end of file
diff --git a/libs/endpoint/src/dice/endpoint/CountEndpoint.hpp b/libs/endpoint/src/dice/endpoint/CountEndpoint.hpp
deleted file mode 100644
index 914f416b..00000000
--- a/libs/endpoint/src/dice/endpoint/CountEndpoint.hpp
+++ /dev/null
@@ -1,33 +0,0 @@
-#ifndef TENTRIS_COUNTENDPOINT_HPP
-#define TENTRIS_COUNTENDPOINT_HPP
-
-#include
-#include
-
-#include
-#include
-
-#include
-
-namespace dice::endpoint {
-
- class CountEndpoint {
-
- tf::Executor &executor_;
-
- triple_store::TripleStore &triplestore_;
-
- SparqlQueryCache &sparql_query_cache_;
-
- std::chrono::seconds timeout_duration_;
-
- public:
- CountEndpoint(tf::Executor &executor, triple_store::TripleStore &triplestore, SparqlQueryCache &sparql_query_cache, std::chrono::seconds timeoutDuration);
-
- restinio::request_handling_status_t operator()(
- restinio::request_handle_t req,
- restinio::router::route_params_t params);
- };
-
-}// namespace dice::endpoint
-#endif//TENTRIS_COUNTENDPOINT_HPP
diff --git a/libs/endpoint/src/dice/endpoint/Endpoint.cpp b/libs/endpoint/src/dice/endpoint/Endpoint.cpp
new file mode 100644
index 00000000..72f27f0b
--- /dev/null
+++ b/libs/endpoint/src/dice/endpoint/Endpoint.cpp
@@ -0,0 +1,42 @@
+#include "dice/endpoint/Endpoint.hpp"
+
+namespace dice::endpoint {
+ Endpoint::Endpoint(tf::Executor &executor,
+ triple_store::TripleStore &triplestore,
+ SparqlQueryCache &sparql_query_cache,
+ EndpointCfg const &endpoint_cfg)
+ : executor_{executor},
+ triplestore_{triplestore},
+ sparql_query_cache_{sparql_query_cache},
+ cfg_{endpoint_cfg} {}// endpoint
+
+
+ restinio::request_handling_status_t Endpoint::operator()(
+ restinio::request_handle_t req,
+ [[maybe_unused]] restinio::router::route_params_t params) {
+ auto const timeout = (cfg_.opt_timeout_duration)
+ ? std::chrono::steady_clock::now() + cfg_.opt_timeout_duration.value()
+ : std::chrono::steady_clock::time_point::max();
+ if (executor_.num_topologies() < executor_.num_workers()) {
+ executor_.silent_async([req = std::move(req), this, timeout]() mutable {
+ try {
+ this->handle_query(std::move(req), timeout);
+ } catch (std::runtime_error const &) {
+ const auto timeout_message = fmt::format("Request processing timed out after {}.",
+ this->cfg_.opt_timeout_duration.value());
+ spdlog::warn("HTTP response {}: {}", restinio::status_gateway_time_out(), timeout_message);
+ req->create_response(restinio::status_gateway_time_out())
+ .connection_close()
+ .set_body(timeout_message)
+ .done();
+ }
+ });
+ spdlog::debug("Request was accepted.");
+ return restinio::request_accepted();
+ } else {
+ spdlog::warn("Handling request was rejected. All workers are busy.");
+ return restinio::request_rejected();
+ }
+ }
+
+}// namespace dice::endpoint
\ No newline at end of file
diff --git a/libs/endpoint/src/dice/endpoint/EndpointCfg.hpp b/libs/endpoint/src/dice/endpoint/EndpointCfg.hpp
new file mode 100644
index 00000000..d28c1264
--- /dev/null
+++ b/libs/endpoint/src/dice/endpoint/EndpointCfg.hpp
@@ -0,0 +1,16 @@
+#ifndef ENDOINTCFG_HPP
+#define ENDOINTCFG_HPP
+
+#include
+#include
+#include
+
+namespace dice::endpoint {
+
+ struct EndpointCfg {
+ uint16_t port;
+ uint16_t threads;
+ std::optional opt_timeout_duration;
+ };
+}// namespace dice::endpoint
+#endif//ENDOINTCFG_HPP
\ No newline at end of file
diff --git a/libs/endpoint/src/dice/endpoint/HTTPServer.cpp b/libs/endpoint/src/dice/endpoint/HTTPServer.cpp
index 596a7ab8..1077fddc 100644
--- a/libs/endpoint/src/dice/endpoint/HTTPServer.cpp
+++ b/libs/endpoint/src/dice/endpoint/HTTPServer.cpp
@@ -1,16 +1,46 @@
#include "HTTPServer.hpp"
-#include "dice/endpoint/CountEndpoint.hpp"
-#include "dice/endpoint/SparqlEndpoint.hpp"
-#include "dice/endpoint/SparqlStreamingEndpoint.hpp"
+#include
+#include
+#include
+
+#include
+#include
+#include
#include
+volatile sig_atomic_t signalReceived = 0;// Flag to indicate a signal was received
+
namespace dice::endpoint {
+ class restinio_spd_logger_t {
+ public:
+ template
+ void trace(Msg_Builder &&mb) {
+ spdlog::trace(mb());
+ }
+
+ template
+ void info(Msg_Builder &&mb) {
+ spdlog::info(mb());
+ }
+
+ template
+ void warn(Msg_Builder &&mb) {
+ spdlog::warn(mb());
+ }
+
+
+ template
+ void error(Msg_Builder &&mb) {
+ spdlog::error(mb());
+ }
+ };
+
struct tentris_restinio_traits : public restinio::traits_t<
- restinio::null_timer_manager_t,
- restinio::null_logger_t,
+ restinio::asio_timer_manager_t,
+ restinio_spd_logger_t,
restinio::router::express_router_t<>> {
static constexpr bool use_connection_count_limiter = true;
};
@@ -25,15 +55,15 @@ namespace dice::endpoint {
void HTTPServer::operator()() {
spdlog::info("Available endpoints:");
router_->http_get(R"(/sparql)",
- SPARQLEndpoint{executor_, triplestore_, sparql_query_cache_, cfg_.timeout_duration});
+ SPARQLEndpoint{executor_, triplestore_, sparql_query_cache_, cfg_});
spdlog::info(" GET /sparql?query= for normal queries");
router_->http_get(R"(/stream)",
- SPARQLStreamingEndpoint{executor_, triplestore_, sparql_query_cache_, cfg_.timeout_duration});
+ SPARQLStreamingEndpoint{executor_, triplestore_, sparql_query_cache_, cfg_});
spdlog::info(" GET /stream?query= for queries with huge results");
router_->http_get(R"(/count)",
- CountEndpoint{executor_, triplestore_, sparql_query_cache_, cfg_.timeout_duration});
+ CountEndpoint{executor_, triplestore_, sparql_query_cache_, cfg_});
spdlog::info(" GET /count?query= as a workaround for count");
@@ -43,11 +73,48 @@ namespace dice::endpoint {
});
spdlog::info("Use Ctrl+C on the terminal or SIGINT to shut down tentris gracefully. If tentris is killed or crashes, the index files will be corrupted.");
- restinio::run(
- restinio::on_thread_pool(cfg_.threads)
- .max_parallel_connections(cfg_.threads)
- .address("0.0.0.0")
- .port(cfg_.port)
- .request_handler(std::move(router_)));
+ using namespace std::chrono;
+ auto const time_limit = (cfg_.opt_timeout_duration)
+ ? duration_cast(cfg_.opt_timeout_duration.value() * 0.95)
+ : steady_clock::duration::max();
+ auto const pool_size = std::max(4, cfg_.threads / 4);
+ auto server = restinio::run_async(restinio::own_io_context(),
+
+ restinio::server_settings_t{}
+ .max_parallel_connections(cfg_.threads)
+ .handle_request_timeout(time_limit)
+ .read_next_http_message_timelimit(seconds{1})
+ .write_http_response_timelimit(time_limit)
+ .max_pipelined_requests(1)
+ .address("0.0.0.0")
+ .port(cfg_.port)
+ .request_handler(std::move(router_))
+ .cleanup_func([this]() { this->executor_.wait_for_all(); }),
+ pool_size);
+
+
+ auto signal_handler = [](int signum) {
+ spdlog::info("Interrupt signal ({}) received.\n", signum);
+ signalReceived = signum;// Set the signal received flag
+ // Cleanup and close the program
+ // wake
+ };
+
+ // Define the signal handler structure
+ struct sigaction sa;
+ memset(&sa, 0, sizeof(sa));
+ sa.sa_handler = signal_handler;
+ sigemptyset(&sa.sa_mask);
+
+
+ // Register the signal handler for SIGINT (CTRL+C) and SIGHUP (terminal closure)
+ sigaction(SIGINT, &sa, nullptr);
+ sigaction(SIGHUP, &sa, nullptr);
+
+ while (!signalReceived) {
+ pause();
+ }
+ server->stop();
+ server->wait();
}
}// namespace dice::endpoint
\ No newline at end of file
diff --git a/libs/endpoint/src/dice/endpoint/HTTPServer.hpp b/libs/endpoint/src/dice/endpoint/HTTPServer.hpp
index 5b5c756e..78cb3ab0 100644
--- a/libs/endpoint/src/dice/endpoint/HTTPServer.hpp
+++ b/libs/endpoint/src/dice/endpoint/HTTPServer.hpp
@@ -1,22 +1,21 @@
#ifndef TENTRIS_HTTPSERVER_HPP
#define TENTRIS_HTTPSERVER_HPP
+#ifndef _LARGEFILE64_SOURCE
+#define _LARGEFILE64_SOURCE
+#endif
+#define nsel_CONFIG_SELECT_EXPECTED nsel_EXPECTED_NONSTD
#include
#include
-#include
#include
+#include
#include
namespace dice::endpoint {
- struct EndpointCfg {
- uint16_t port;
- uint16_t threads;
- std::chrono::seconds timeout_duration;
- };
class HTTPServer {
tf::Executor &executor_;
@@ -28,7 +27,7 @@ namespace dice::endpoint {
public:
HTTPServer(tf::Executor &executor, triple_store::TripleStore &triplestore, EndpointCfg const &cfg);
- restinio::router::express_router_t<> &router(){
+ restinio::router::express_router_t<> &router() {
return *router_;
}
@@ -36,4 +35,4 @@ namespace dice::endpoint {
};
}// namespace dice::endpoint
-#endif//TENTRIS_HTTPSERVER_HPP
+#endif//TENTRIS_HTTPSERVER_HPP
\ No newline at end of file
diff --git a/libs/endpoint/src/dice/endpoint/SparqlEndpoint.cpp b/libs/endpoint/src/dice/endpoint/SparqlEndpoint.cpp
index ee49a731..01cb2e54 100644
--- a/libs/endpoint/src/dice/endpoint/SparqlEndpoint.cpp
+++ b/libs/endpoint/src/dice/endpoint/SparqlEndpoint.cpp
@@ -1,72 +1,54 @@
-#include "SparqlEndpoint.hpp"
+#include "dice/endpoint/SparqlEndpoint.hpp"
+
+#include
#include
-#include "dice/endpoint/ParseSPARQLQueryParam.hpp"
-#include "dice/endpoint/SparqlJsonResultSAXWriter.hpp"
+#include
+#include
namespace dice::endpoint {
- SPARQLEndpoint::SPARQLEndpoint(tf::Executor &executor,
- triple_store::TripleStore &triplestore,
- SparqlQueryCache &sparql_query_cache,
- std::chrono::seconds timeoutDuration)
- : executor_(executor),
- triplestore_(triplestore),
- sparql_query_cache_(sparql_query_cache),
- timeout_duration_(timeoutDuration) {}
-
- restinio::request_handling_status_t SPARQLEndpoint::operator()(
- restinio::request_handle_t req,
- [[maybe_unused]] restinio::router::route_params_t params) {
- auto timeout = (timeout_duration_.count()) ? std::chrono::steady_clock::now() + this->timeout_duration_ : std::chrono::steady_clock::time_point::max();
- if (executor_.num_topologies() < executor_.num_workers()) {
- executor_.silent_async([this, timeout](restinio::request_handle_t req) {
- using namespace dice::sparql2tensor;
- using namespace restinio;
-
- std::shared_ptr sparql_query = parse_sparql_query_param(req, this->sparql_query_cache_);
- if (not sparql_query)
- return;
-
- try {
- if (sparql_query->ask_) {
- bool ask_res = this->triplestore_.eval_ask(*sparql_query, timeout);
- std::string res = ask_res ? "true" : "false";
- req->create_response(status_ok())
- .append_header(http_field::content_type, "application/sparql-results+json")
- .set_body(R"({ "head" : {}, "boolean" : )" + res + " }")
- .done();
- } else {
- endpoint::SparqlJsonResultSAXWriter json_writer{sparql_query->projected_variables_, 100'000};
-
- for (auto const &entry : this->triplestore_.eval_select(*sparql_query, timeout)) {
- json_writer.add(entry);
- }
- json_writer.close();
-
- req->create_response(status_ok())
- .append_header(http_field::content_type, "application/sparql-results+json")
- .set_body(std::string{json_writer.string_view()})
- .done();
- spdlog::info("HTTP response {}: {} variables, {} solutions, {} bindings",
- status_ok(),
- sparql_query->projected_variables_.size(),
- json_writer.number_of_written_solutions(),
- json_writer.number_of_written_bindings());
- }
- } catch (std::runtime_error const &timeout_exception) {
- const auto timeout_message = fmt::format("Request processing timed out after {}.", this->timeout_duration_);
- spdlog::warn("HTTP response {}: {}", status_gateway_time_out(), timeout_message);
- req->create_response(status_gateway_time_out()).set_body(timeout_message).done();
- }
- },
- std::move(req));
- return restinio::request_accepted();
- } else {
- spdlog::warn("Handling request was rejected. All workers are busy.");
- return restinio::request_rejected();
- }
- }
+ SPARQLEndpoint::SPARQLEndpoint(tf::Executor &executor,
+ triple_store::TripleStore &triplestore,
+ SparqlQueryCache &sparql_query_cache,
+ EndpointCfg const &endpoint_cfg)
+ : Endpoint(executor, triplestore, sparql_query_cache, endpoint_cfg) {}
+
+ void SPARQLEndpoint::handle_query(restinio::request_handle_t req, std::chrono::steady_clock::time_point timeout) {
+ using namespace dice::sparql2tensor;
+ using namespace restinio;
+
+ auto sparql_query = parse_sparql_query_param(req, this->sparql_query_cache_);
+ if (not sparql_query)
+ return;
+
+ if (sparql_query->ask_) {
+ bool ask_res = this->triplestore_.eval_ask(*sparql_query, timeout);
+ std::string res = ask_res ? "true" : "false";
+ req->create_response(status_ok())
+ .append_header(http_field::content_type, "application/sparql-results+json")
+ .set_body(R"({ "head" : {}, "boolean" : )" + res + " }")
+ .done();
+ } else {
+ SparqlJsonResultSAXWriter json_writer{sparql_query->projected_variables_, 100'000};
+
+ for (auto const &entry : this->triplestore_.eval_select(*sparql_query, timeout)) {
+ json_writer.add(entry);
+ }
+ json_writer.close();
+ check_timeout(timeout);
+
+ req->create_response(status_ok())
+ .append_header(http_field::content_type, "application/sparql-results+json")
+ .set_body(std::string{json_writer.string_view()})
+ .done();
+ spdlog::info("HTTP response {}: {} variables, {} solutions, {} bindings",
+ status_ok(),
+ sparql_query->projected_variables_.size(),
+ json_writer.number_of_written_solutions(),
+ json_writer.number_of_written_bindings());
+ }
+ }
}// namespace dice::endpoint
\ No newline at end of file
diff --git a/libs/endpoint/src/dice/endpoint/SparqlEndpoint.hpp b/libs/endpoint/src/dice/endpoint/SparqlEndpoint.hpp
deleted file mode 100644
index 37a180fa..00000000
--- a/libs/endpoint/src/dice/endpoint/SparqlEndpoint.hpp
+++ /dev/null
@@ -1,33 +0,0 @@
-#ifndef TENTRIS_SPARQLENDPOINT_HPP
-#define TENTRIS_SPARQLENDPOINT_HPP
-
-#include
-#include
-
-#include
-#include
-
-#include
-
-namespace dice::endpoint {
-
- class SPARQLEndpoint {
-
- tf::Executor &executor_;
-
- triple_store::TripleStore &triplestore_;
-
- SparqlQueryCache &sparql_query_cache_;
-
- std::chrono::seconds timeout_duration_;
-
- public:
- SPARQLEndpoint(tf::Executor &executor, triple_store::TripleStore &triplestore, SparqlQueryCache &sparql_query_cache, std::chrono::seconds timeoutDuration);
-
- restinio::request_handling_status_t operator()(
- restinio::request_handle_t req,
- restinio::router::route_params_t params);
- };
-
-}// namespace dice::endpoint
-#endif//TENTRIS_SPARQLENDPOINT_HPP
diff --git a/libs/endpoint/src/dice/endpoint/SparqlStreamingEndpoint.cpp b/libs/endpoint/src/dice/endpoint/SparqlStreamingEndpoint.cpp
index 8d121160..4227bf69 100644
--- a/libs/endpoint/src/dice/endpoint/SparqlStreamingEndpoint.cpp
+++ b/libs/endpoint/src/dice/endpoint/SparqlStreamingEndpoint.cpp
@@ -1,74 +1,53 @@
-#include "SparqlStreamingEndpoint.hpp"
+#include "dice/endpoint/SparqlStreamingEndpoint.hpp"
#include
-#include "dice/endpoint/ParseSPARQLQueryParam.hpp"
-#include "dice/endpoint/SparqlJsonResultSAXWriter.hpp"
+#include
+#include
namespace dice::endpoint {
- SPARQLStreamingEndpoint::SPARQLStreamingEndpoint(tf::Executor &executor,
- triple_store::TripleStore &triplestore,
- SparqlQueryCache &sparql_query_cache,
- std::chrono::seconds timeoutDuration)
- : executor_(executor),
- triplestore_(triplestore),
- sparql_query_cache_(sparql_query_cache),
- timeout_duration_(timeoutDuration) {
- }
- restinio::request_handling_status_t SPARQLStreamingEndpoint::operator()(
- restinio::request_handle_t req,
- [[maybe_unused]] restinio::router::route_params_t params) {
- auto timeout = (timeout_duration_.count()) ? std::chrono::steady_clock::now() + this->timeout_duration_ : std::chrono::steady_clock::time_point::max();
- if (executor_.num_topologies() < executor_.num_workers()) {
- executor_.silent_async([this, timeout](restinio::request_handle_t req) {
- using namespace dice::sparql2tensor;
- using namespace restinio;
-
- std::shared_ptr sparql_query = parse_sparql_query_param(req, this->sparql_query_cache_);
- if (not sparql_query)
- return;
-
- bool asio_write_failed = false;
-
- endpoint::SparqlJsonResultSAXWriter json_writer{sparql_query->projected_variables_, 100'000};
-
- response_builder_t resp = req->template create_response();
- resp.append_header(http_field::content_type, "application/sparql-results+json");
-
- try {
- for (auto const &entry : this->triplestore_.eval_select(*sparql_query, timeout)) {
- json_writer.add(entry);
- if (json_writer.full()) {
- resp.append_chunk(std::string{json_writer.string_view()});
- resp.flush([&](auto const &status) { asio_write_failed = status.failed(); });
- if (asio_write_failed) {
- spdlog::warn("Writing chunked HTTP response failed.");
- return;
- }
- json_writer.clear();
- }
- }
- json_writer.close();
- resp.append_chunk(std::string{json_writer.string_view()});
- resp.done();
- spdlog::info("HTTP response {}: {} variables, {} solutions, {} bindings",
- status_ok(),
- sparql_query->projected_variables_.size(),
- json_writer.number_of_written_solutions(),
- json_writer.number_of_written_bindings());
- } catch (std::runtime_error const &timeout_exception) {
- const auto timeout_message = fmt::format("Request processing timed out after {}.", this->timeout_duration_);
- spdlog::warn("HTTP response {}: {}", status_gateway_time_out(), timeout_message);
- req->create_response(status_gateway_time_out()).set_body(timeout_message).done();
- }
- },
- std::move(req));
- return restinio::request_accepted();
- } else {
- spdlog::warn("Handling request was rejected. All workers are busy.");
- return restinio::request_rejected();
- }
- }
+ SPARQLStreamingEndpoint::SPARQLStreamingEndpoint(tf::Executor &executor,
+ triple_store::TripleStore &triplestore,
+ SparqlQueryCache &sparql_query_cache,
+ EndpointCfg const &endpoint_cfg)
+ : Endpoint(executor, triplestore, sparql_query_cache, endpoint_cfg) {}
+
+ void SPARQLStreamingEndpoint::handle_query(restinio::request_handle_t req, std::chrono::steady_clock::time_point timeout) {
+ using namespace dice::sparql2tensor;
+ using namespace restinio;
+
+ std::shared_ptr sparql_query = parse_sparql_query_param(req, this->sparql_query_cache_);
+ if (not sparql_query)
+ return;
+
+ bool asio_write_failed = false;
+
+ SparqlJsonResultSAXWriter json_writer{sparql_query->projected_variables_, 100'000};
+
+ response_builder_t resp = req->template create_response();
+ resp.append_header(http_field::content_type, "application/sparql-results+json");
+
+ for (auto const &entry : this->triplestore_.eval_select(*sparql_query, timeout)) {
+ json_writer.add(entry);
+ if (json_writer.full()) {
+ resp.append_chunk(std::string{json_writer.string_view()});
+ resp.flush([&](auto const &status) { asio_write_failed = status.failed(); });
+ if (asio_write_failed) {
+ spdlog::warn("Writing chunked HTTP response failed.");
+ return;
+ }
+ json_writer.clear();
+ }
+ }
+ json_writer.close();
+ resp.append_chunk(std::string{json_writer.string_view()});
+ resp.done();
+ spdlog::info("HTTP response {}: {} variables, {} solutions, {} bindings",
+ status_ok(),
+ sparql_query->projected_variables_.size(),
+ json_writer.number_of_written_solutions(),
+ json_writer.number_of_written_bindings());
+ }
}// namespace dice::endpoint
\ No newline at end of file
diff --git a/libs/endpoint/src/dice/endpoint/SparqlStreamingEndpoint.hpp b/libs/endpoint/src/dice/endpoint/SparqlStreamingEndpoint.hpp
deleted file mode 100644
index 0fc11bf4..00000000
--- a/libs/endpoint/src/dice/endpoint/SparqlStreamingEndpoint.hpp
+++ /dev/null
@@ -1,32 +0,0 @@
-#ifndef TENTRIS_SPARQLSTREAMINGENDPOINT_HPP
-#define TENTRIS_SPARQLSTREAMINGENDPOINT_HPP
-
-#include
-#include
-
-#include
-#include
-
-#include
-
-namespace dice::endpoint {
-
- class SPARQLStreamingEndpoint {
- tf::Executor &executor_;
-
- triple_store::TripleStore &triplestore_;
-
- SparqlQueryCache &sparql_query_cache_;
-
- std::chrono::seconds timeout_duration_;
-
- public:
- SPARQLStreamingEndpoint(tf::Executor &executor, triple_store::TripleStore &triplestore, SparqlQueryCache &sparql_query_cache, std::chrono::seconds timeoutDuration);
-
- restinio::request_handling_status_t operator()(
- restinio::request_handle_t req,
- restinio::router::route_params_t params);
- };
-}// namespace dice::endpoint
-
-#endif//TENTRIS_SPARQLSTREAMINGENDPOINT_HPP
diff --git a/libs/node-store/CMakeLists.txt b/libs/node-store/CMakeLists.txt
index 5a09b3ba..b9d6c71e 100644
--- a/libs/node-store/CMakeLists.txt
+++ b/libs/node-store/CMakeLists.txt
@@ -1,12 +1,16 @@
+cmake_minimum_required(VERSION 3.21)
+set(lib_suffix "node-store")
+set(lib "${PROJECT_NAME}-${lib_suffix}")
# Find cmake packages
find_package(hypertrie REQUIRED)
find_package(robin_hood REQUIRED)
find_package(dice-hash REQUIRED)
find_package(Boost REQUIRED)
+find_package(rdf4cpp REQUIRED)
# Define the library
-add_library(node-store
+add_library(${lib}
src/dice/node-store/PersistentNodeStorageBackendImpl.cpp
src/dice/node-store/PersistentNodeStorageBackend.cpp
src/dice/node-store/MetallBNodeBackend.cpp
@@ -14,23 +18,16 @@ add_library(node-store
src/dice/node-store/MetallLiteralBackend.cpp
src/dice/node-store/MetallVariableBackend.cpp
)
-add_library(tentris::node-store ALIAS node-store)
+add_library(${PROJECT_NAME}::${lib_suffix} ALIAS ${lib})
-target_link_libraries(node-store PUBLIC
- tentris::rdf-tensor
+target_link_libraries(${lib} PUBLIC
+ ${PROJECT_NAME}::rdf-tensor
+ rdf4cpp::rdf4cpp
)
-target_include_directories(node-store PUBLIC
+target_include_directories(${lib} PUBLIC
$
)
-set_target_properties(node-store PROPERTIES
- VERSION ${PROJECT_VERSION}
- SOVERSION ${PROJECT_VERSION_MAJOR}
- CXX_STANDARD 20
- CXX_STANDARD_REQUIRED YES
- CXX_EXTENSIONS NO
- )
-
-include(${PROJECT_SOURCE_DIR}/cmake/install_library.cmake)
-install_component(node-store src)
+include(${CMAKE_SOURCE_DIR}/cmake/install_components.cmake)
+install_component(PUBLIC ${lib_suffix} src)
diff --git a/libs/node-store/src/dice/node-store/MetallBNodeBackend.cpp b/libs/node-store/src/dice/node-store/MetallBNodeBackend.cpp
index 17d31f93..1bcdf7aa 100644
--- a/libs/node-store/src/dice/node-store/MetallBNodeBackend.cpp
+++ b/libs/node-store/src/dice/node-store/MetallBNodeBackend.cpp
@@ -12,6 +12,6 @@ namespace dice::node_store {
return identifier_;
}
MetallBNodeBackend::operator View() const noexcept {
- return {.identifier = identifier()};
+ return {.identifier = identifier(), .scope = nullptr};
}
}// namespace dice::node_store
\ No newline at end of file
diff --git a/libs/node-store/src/dice/node-store/MetallLiteralBackend.cpp b/libs/node-store/src/dice/node-store/MetallLiteralBackend.cpp
index 67d9b65e..e448b2f2 100644
--- a/libs/node-store/src/dice/node-store/MetallLiteralBackend.cpp
+++ b/libs/node-store/src/dice/node-store/MetallLiteralBackend.cpp
@@ -2,17 +2,12 @@
#include
#include
namespace dice::node_store {
-
- MetallLiteralBackend::MetallLiteralBackend(std::string_view lexical, const rdf4cpp::rdf::storage::node::identifier::NodeID &datatype_id, std::string_view lang_tag, metall_manager::allocator_type const &allocator) noexcept
- : datatype_id_(datatype_id),
- lexical(lexical, allocator),
- lang_tag(lang_tag, allocator),
- hash_(View(*this).hash()) {}
- MetallLiteralBackend::MetallLiteralBackend(rdf4cpp::rdf::storage::node::view::LiteralBackendView view, metall_manager::allocator_type const &allocator) noexcept
- : datatype_id_(view.datatype_id),
+ MetallLiteralBackend::MetallLiteralBackend(rdf4cpp::rdf::storage::node::view::LexicalFormLiteralBackendView view, metall_manager::allocator_type const &allocator) noexcept
+ : hash_(view.hash()),
+ datatype_id_(view.datatype_id),
lexical(view.lexical_form, allocator),
lang_tag(view.language_tag, allocator),
- hash_(View(*this).hash()) {}
+ needs_escape_(view.needs_escape) {}
std::string_view MetallLiteralBackend::language_tag() const noexcept {
return lang_tag;
}
@@ -22,10 +17,14 @@ namespace dice::node_store {
std::string_view MetallLiteralBackend::lexical_form() const noexcept {
return lexical;
}
- MetallLiteralBackend::operator rdf4cpp::rdf::storage::node::view::LiteralBackendView() const noexcept {
+ bool MetallLiteralBackend::needs_escape() const noexcept {
+ return needs_escape_;
+ }
+ MetallLiteralBackend::operator rdf4cpp::rdf::storage::node::view::LexicalFormLiteralBackendView() const noexcept {
return {.datatype_id = datatype_id(),
.lexical_form = lexical_form(),
- .language_tag = language_tag()};
+ .language_tag = language_tag(),
+ .needs_escape = needs_escape()};
}
}// namespace dice::node_store
diff --git a/libs/node-store/src/dice/node-store/MetallLiteralBackend.hpp b/libs/node-store/src/dice/node-store/MetallLiteralBackend.hpp
index e991198e..75f36a0e 100644
--- a/libs/node-store/src/dice/node-store/MetallLiteralBackend.hpp
+++ b/libs/node-store/src/dice/node-store/MetallLiteralBackend.hpp
@@ -8,16 +8,16 @@
namespace dice::node_store {
class MetallLiteralBackend {
+ size_t hash_;
rdf4cpp::rdf::storage::node::identifier::NodeID datatype_id_;
metall_string lexical;
metall_string lang_tag;
- size_t hash_;
+ bool needs_escape_;
public:
- using View = rdf4cpp::rdf::storage::node::view::LiteralBackendView;
+ using View = rdf4cpp::rdf::storage::node::view::LexicalFormLiteralBackendView;
- MetallLiteralBackend(std::string_view lexical, const rdf4cpp::rdf::storage::node::identifier::NodeID &datatype_id, std::string_view lang_tag, metall_manager::allocator_type const &allocator) noexcept;
- MetallLiteralBackend(rdf4cpp::rdf::storage::node::view::LiteralBackendView view, metall_manager::allocator_type const &allocator) noexcept;
+ MetallLiteralBackend(rdf4cpp::rdf::storage::node::view::LexicalFormLiteralBackendView view, metall_manager::allocator_type const &allocator) noexcept;
[[nodiscard]] std::string_view lexical_form() const noexcept;
@@ -25,9 +25,11 @@ namespace dice::node_store {
[[nodiscard]] std::string_view language_tag() const noexcept;
+ [[nodiscard]] bool needs_escape() const noexcept;
+
[[nodiscard]] size_t hash() const noexcept { return hash_; }
- explicit operator rdf4cpp::rdf::storage::node::view::LiteralBackendView() const noexcept;
+ explicit operator rdf4cpp::rdf::storage::node::view::LexicalFormLiteralBackendView() const noexcept;
};
}// namespace dice::node_store
diff --git a/libs/node-store/src/dice/node-store/PersistentNodeStorageBackend.cpp b/libs/node-store/src/dice/node-store/PersistentNodeStorageBackend.cpp
index 502b106e..2cc21213 100644
--- a/libs/node-store/src/dice/node-store/PersistentNodeStorageBackend.cpp
+++ b/libs/node-store/src/dice/node-store/PersistentNodeStorageBackend.cpp
@@ -3,6 +3,12 @@ namespace dice::node_store {
PersistentNodeStorageBackend::PersistentNodeStorageBackend(PersistentNodeStorageBackendImpl *impl)
: INodeStorageBackend(), impl_(impl) {}
+ size_t PersistentNodeStorageBackend::size() const noexcept {
+ return impl_->size();
+ }
+ bool PersistentNodeStorageBackend::has_specialized_storage_for(rdf4cpp::rdf::storage::node::identifier::LiteralType datatype) const noexcept {
+ return impl_->has_specialized_storage_for(datatype);
+ }
rdf4cpp::rdf::storage::node::identifier::NodeID PersistentNodeStorageBackend::find_or_make_id(const rdf4cpp::rdf::storage::node::view::BNodeBackendView &view) noexcept {
return impl_->find_or_make_id(view);
}
@@ -39,16 +45,16 @@ namespace dice::node_store {
rdf4cpp::rdf::storage::node::view::VariableBackendView PersistentNodeStorageBackend::find_variable_backend_view(rdf4cpp::rdf::storage::node::identifier::NodeID id) const {
return impl_->find_variable_backend_view(id);
}
- bool PersistentNodeStorageBackend::erase_iri([[maybe_unused]] rdf4cpp::rdf::storage::node::identifier::NodeID id) const {
+ bool PersistentNodeStorageBackend::erase_iri([[maybe_unused]] rdf4cpp::rdf::storage::node::identifier::NodeID id) {
throw std::runtime_error{"Not implemented."};
}
- bool PersistentNodeStorageBackend::erase_literal([[maybe_unused]] rdf4cpp::rdf::storage::node::identifier::NodeID id) const {
+ bool PersistentNodeStorageBackend::erase_literal([[maybe_unused]] rdf4cpp::rdf::storage::node::identifier::NodeID id) {
throw std::runtime_error{"Not implemented."};
}
- bool PersistentNodeStorageBackend::erase_bnode([[maybe_unused]] rdf4cpp::rdf::storage::node::identifier::NodeID id) const {
+ bool PersistentNodeStorageBackend::erase_bnode([[maybe_unused]] rdf4cpp::rdf::storage::node::identifier::NodeID id) {
throw std::runtime_error{"Not implemented."};
}
- bool PersistentNodeStorageBackend::erase_variable([[maybe_unused]] rdf4cpp::rdf::storage::node::identifier::NodeID id) const {
+ bool PersistentNodeStorageBackend::erase_variable([[maybe_unused]] rdf4cpp::rdf::storage::node::identifier::NodeID id) {
throw std::runtime_error{"Not implemented."};
}
}// namespace dice::node_store
\ No newline at end of file
diff --git a/libs/node-store/src/dice/node-store/PersistentNodeStorageBackend.hpp b/libs/node-store/src/dice/node-store/PersistentNodeStorageBackend.hpp
index cde59549..52aee426 100644
--- a/libs/node-store/src/dice/node-store/PersistentNodeStorageBackend.hpp
+++ b/libs/node-store/src/dice/node-store/PersistentNodeStorageBackend.hpp
@@ -13,6 +13,9 @@ namespace dice::node_store {
~PersistentNodeStorageBackend() override = default;
+ [[nodiscard]] size_t size() const noexcept override;
+ [[nodiscard]] bool has_specialized_storage_for(rdf4cpp::rdf::storage::node::identifier::LiteralType datatype) const noexcept override;
+
rdf4cpp::rdf::storage::node::identifier::NodeID find_or_make_id(const rdf4cpp::rdf::storage::node::view::BNodeBackendView &view) noexcept override;
rdf4cpp::rdf::storage::node::identifier::NodeID find_or_make_id(const rdf4cpp::rdf::storage::node::view::IRIBackendView &view) noexcept override;
rdf4cpp::rdf::storage::node::identifier::NodeID find_or_make_id(const rdf4cpp::rdf::storage::node::view::LiteralBackendView &view) noexcept override;
@@ -25,10 +28,10 @@ namespace dice::node_store {
rdf4cpp::rdf::storage::node::view::LiteralBackendView find_literal_backend_view(rdf4cpp::rdf::storage::node::identifier::NodeID id) const override;
rdf4cpp::rdf::storage::node::view::BNodeBackendView find_bnode_backend_view(rdf4cpp::rdf::storage::node::identifier::NodeID id) const override;
rdf4cpp::rdf::storage::node::view::VariableBackendView find_variable_backend_view(rdf4cpp::rdf::storage::node::identifier::NodeID id) const override;
- bool erase_iri(rdf4cpp::rdf::storage::node::identifier::NodeID id) const override;
- bool erase_literal(rdf4cpp::rdf::storage::node::identifier::NodeID id) const override;
- bool erase_bnode(rdf4cpp::rdf::storage::node::identifier::NodeID id) const override;
- bool erase_variable(rdf4cpp::rdf::storage::node::identifier::NodeID id) const override;
+ bool erase_iri(rdf4cpp::rdf::storage::node::identifier::NodeID id) override;
+ bool erase_literal(rdf4cpp::rdf::storage::node::identifier::NodeID id) override;
+ bool erase_bnode(rdf4cpp::rdf::storage::node::identifier::NodeID id) override;
+ bool erase_variable(rdf4cpp::rdf::storage::node::identifier::NodeID id) override;
};
}// namespace dice::node_store
diff --git a/libs/node-store/src/dice/node-store/PersistentNodeStorageBackendImpl.cpp b/libs/node-store/src/dice/node-store/PersistentNodeStorageBackendImpl.cpp
index d0f9ec20..1df7efc8 100644
--- a/libs/node-store/src/dice/node-store/PersistentNodeStorageBackendImpl.cpp
+++ b/libs/node-store/src/dice/node-store/PersistentNodeStorageBackendImpl.cpp
@@ -23,16 +23,30 @@ namespace dice::node_store {
}
}
+ template
+ static size_t lookup_size(Storage &storage) {
+ std::shared_lock l{storage.mutex};
+ return storage.id2data.size();
+ }
+
+ size_t PersistentNodeStorageBackendImpl::size() const noexcept {
+ return lookup_size(bnode_storage_) + lookup_size(iri_storage_) + lookup_size(literal_storage_) + lookup_size(variable_storage_);
+ }
+
+ bool PersistentNodeStorageBackendImpl::has_specialized_storage_for([[maybe_unused]] identifier::LiteralType type) {
+ return false;
+ }
+
/**
- * Synchronized lookup (and creation) of IDs by a provided view of a Node Backend.
- * @tparam Backend_t the Backend type. One of BNodeBackend, IRIBackend, LiteralBackend or VariableBackend
- * @tparam create_if_not_present enables code for creating non-existing Node Backends
- * @tparam NextIDFromView_func type of a function to generate the next ID which is assigned in case a new Node Backend is created
- * @param view contains the data of the requested Node Backend
- * @param storage the storage where the Node Backend is looked up
- * @param next_id_func function to generate the next ID which is assigned in case a new Node Backend is created
- * @return the NodeID for the looked up Node Backend. Result is null() if there was no matching Node Backend.
- */
+ * Synchronized lookup (and creation) of IDs by a provided view of a Node Backend.
+ * @tparam Backend_t the Backend type. One of BNodeBackend, IRIBackend, LiteralBackend or VariableBackend
+ * @tparam create_if_not_present enables code for creating non-existing Node Backends
+ * @tparam NextIDFromView_func type of a function to generate the next ID which is assigned in case a new Node Backend is created
+ * @param view contains the data of the requested Node Backend
+ * @param storage the storage where the Node Backend is looked up
+ * @param next_id_func function to generate the next ID which is assigned in case a new Node Backend is created
+ * @return the NodeID for the looked up Node Backend. Result is null() if there was no matching Node Backend.
+ */
template
inline identifier::NodeID lookup_or_insert_impl(typename Backend_t::View const &view,
auto &storage,
@@ -68,8 +82,8 @@ namespace dice::node_store {
identifier::NodeID PersistentNodeStorageBackendImpl::find_or_make_id(view::LiteralBackendView const &view) noexcept {
return lookup_or_insert_impl(
- view, literal_storage_,
- [this]([[maybe_unused]] view::LiteralBackendView const &literal_view) {
+ view.get_lexical(), literal_storage_,
+ [this](view::LexicalFormLiteralBackendView const &literal_view) {
return identifier::NodeID{next_literal_id++,
identifier::iri_node_id_to_literal_type(literal_view.datatype_id)};
});
@@ -108,7 +122,7 @@ namespace dice::node_store {
}
identifier::NodeID PersistentNodeStorageBackendImpl::find_id(const view::LiteralBackendView &view) const noexcept {
return lookup_or_insert_impl(
- view, literal_storage_);
+ view.get_lexical(), literal_storage_);
}
identifier::NodeID PersistentNodeStorageBackendImpl::find_id(const view::VariableBackendView &view) const noexcept {
return lookup_or_insert_impl(
diff --git a/libs/node-store/src/dice/node-store/PersistentNodeStorageBackendImpl.hpp b/libs/node-store/src/dice/node-store/PersistentNodeStorageBackendImpl.hpp
index 777fac64..8c216ed8 100644
--- a/libs/node-store/src/dice/node-store/PersistentNodeStorageBackendImpl.hpp
+++ b/libs/node-store/src/dice/node-store/PersistentNodeStorageBackendImpl.hpp
@@ -48,6 +48,8 @@ namespace dice::node_store {
public:
explicit PersistentNodeStorageBackendImpl(metall_manager::allocator_type const &allocator);
+ size_t size() const noexcept;
+ bool has_specialized_storage_for(rdf4cpp::rdf::storage::node::identifier::LiteralType type);
[[nodiscard]] NodeID find_or_make_id(BNodeBackendView const &) noexcept;
[[nodiscard]] NodeID find_or_make_id(IRIBackendView const &) noexcept;
diff --git a/libs/rdf-tensor/CMakeLists.txt b/libs/rdf-tensor/CMakeLists.txt
index b8fa2fce..4fb22ec9 100644
--- a/libs/rdf-tensor/CMakeLists.txt
+++ b/libs/rdf-tensor/CMakeLists.txt
@@ -1,3 +1,7 @@
+cmake_minimum_required(VERSION 3.21)
+set(lib_suffix "rdf-tensor")
+set(lib "${PROJECT_NAME}-${lib_suffix}")
+
# Find cmake packages
find_package(Boost REQUIRED)
find_package(hypertrie REQUIRED)
@@ -6,20 +10,21 @@ find_package(rdf4cpp REQUIRED)
find_package(dice-hash REQUIRED)
# Define the library
-add_library(rdf-tensor INTERFACE)
-add_library(tentris::rdf-tensor ALIAS rdf-tensor)
-target_link_libraries(rdf-tensor INTERFACE
+add_library(${lib} INTERFACE)
+add_library(${PROJECT_NAME}::${lib_suffix} ALIAS ${lib})
+target_link_libraries(${lib} INTERFACE
rdf4cpp::rdf4cpp
hypertrie::hypertrie
+ hypertrie::query
Metall::Metall
Boost::headers
dice-hash::dice-hash
)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cmake/version.hpp.in ${CMAKE_CURRENT_SOURCE_DIR}/src/dice/tentris/tentris_version.hpp)
-target_include_directories(rdf-tensor INTERFACE
+target_include_directories(${lib} INTERFACE
$
)
-include(${PROJECT_SOURCE_DIR}/cmake/install_library.cmake)
-install_interface_component(rdf-tensor src)
\ No newline at end of file
+include(${CMAKE_SOURCE_DIR}/cmake/install_components.cmake)
+install_component(INTERFACE ${lib_suffix} src)
diff --git a/libs/rdf-tensor/src/dice/rdf-tensor/NodeWrapper.hpp b/libs/rdf-tensor/src/dice/rdf-tensor/NodeWrapper.hpp
index 50d33dc8..515528eb 100644
--- a/libs/rdf-tensor/src/dice/rdf-tensor/NodeWrapper.hpp
+++ b/libs/rdf-tensor/src/dice/rdf-tensor/NodeWrapper.hpp
@@ -30,7 +30,7 @@ namespace dice::rdf_tensor {
};
operator std::optional() const noexcept {
- return (Node) * this;
+ return static_cast(*this);
};
};
};// namespace dice::rdf-tensor
diff --git a/libs/rdf-tensor/src/dice/rdf-tensor/RDFNodeHashes.hpp b/libs/rdf-tensor/src/dice/rdf-tensor/RDFNodeHashes.hpp
index d7c346bd..1fcd2b3e 100644
--- a/libs/rdf-tensor/src/dice/rdf-tensor/RDFNodeHashes.hpp
+++ b/libs/rdf-tensor/src/dice/rdf-tensor/RDFNodeHashes.hpp
@@ -5,13 +5,6 @@
#include
namespace dice::hash {
- template
- struct dice_hash_overload {
- inline static std::size_t dice_hash(rdf4cpp::rdf::Node const &x) noexcept {
- return Policy::hash_fundamental(x.backend_handle().raw());
- }
- };
-
template
struct dice_hash_overload {
inline static std::size_t dice_hash(rdf4cpp::rdf::query::Variable const &x) noexcept {
diff --git a/libs/rdf-tensor/src/dice/rdf-tensor/metall_manager.hpp b/libs/rdf-tensor/src/dice/rdf-tensor/metall_manager.hpp
index 99d032f9..b7e96684 100644
--- a/libs/rdf-tensor/src/dice/rdf-tensor/metall_manager.hpp
+++ b/libs/rdf-tensor/src/dice/rdf-tensor/metall_manager.hpp
@@ -7,7 +7,7 @@
#pragma GCC diagnostic pop
namespace dice::rdf_tensor {
- using metall_manager = metall::basic_manager;
+ using metall_manager = metall::basic_manager<>;
using allocator_type = metall_manager::allocator_type;
}// namespace dice::rdf-tensor
diff --git a/libs/sparql2tensor/CMakeLists.txt b/libs/sparql2tensor/CMakeLists.txt
index 6ade03a2..7b2389ce 100644
--- a/libs/sparql2tensor/CMakeLists.txt
+++ b/libs/sparql2tensor/CMakeLists.txt
@@ -1,40 +1,35 @@
+cmake_minimum_required(VERSION 3.21)
+set(lib_suffix "sparql2tensor")
+set(lib "${PROJECT_NAME}-${lib_suffix}")
+
# Find cmake packages
find_package(sparql-parser-base REQUIRED)
find_package(robin_hood REQUIRED)
find_package(dice-hash REQUIRED)
# Define the library
-add_library(sparql2tensor
+add_library(${lib}
src/dice/sparql2tensor/parser/exception/SPARQLErrorListener.cpp
src/dice/sparql2tensor/parser/visitors/PrologueVisitor.cpp
src/dice/sparql2tensor/parser/visitors/SelectAskQueryVisitor.cpp
src/dice/sparql2tensor/SPARQLQuery.cpp
- src/dice/sparql2tensor/UPDATEQuery.cpp
- )
+)
-add_library(tentris::sparql2tensor ALIAS sparql2tensor)
+add_library(${PROJECT_NAME}::${lib_suffix} ALIAS ${lib})
-target_include_directories(sparql2tensor PUBLIC
+target_include_directories(${lib} PUBLIC
$
PRIVATE
- private-include
+ $
)
-target_link_libraries(sparql2tensor PUBLIC
- tentris::rdf-tensor
+target_link_libraries(${lib} PUBLIC
+ ${PROJECT_NAME}::rdf-tensor
robin_hood::robin_hood
PRIVATE
sparql-parser-base::sparql-parser-base
)
-set_target_properties(sparql2tensor PROPERTIES
- VERSION ${PROJECT_VERSION}
- SOVERSION ${PROJECT_VERSION_MAJOR}
- CXX_STANDARD 20
- CXX_STANDARD_REQUIRED YES
- CXX_EXTENSIONS NO
- )
-
-include(${PROJECT_SOURCE_DIR}/cmake/install_library.cmake)
-install_component(sparql2tensor src)
+include(${CMAKE_SOURCE_DIR}/cmake/install_components.cmake)
+install_component(PUBLIC ${lib_suffix} src)
diff --git a/libs/sparql2tensor/private-include/dice/sparql2tensor/parser/visitors/PrologueVisitor.hpp b/libs/sparql2tensor/private-include/dice/sparql2tensor/parser/visitors/PrologueVisitor.hpp
index ac85657d..26309b58 100644
--- a/libs/sparql2tensor/private-include/dice/sparql2tensor/parser/visitors/PrologueVisitor.hpp
+++ b/libs/sparql2tensor/private-include/dice/sparql2tensor/parser/visitors/PrologueVisitor.hpp
@@ -13,9 +13,11 @@ namespace dice::sparql2tensor::parser::visitors {
using namespace dice::sparql_parser::base;
class PrologueVisitor : public SparqlParserBaseVisitor {
- rdf4cpp::rdf::parser::IStreamQuadIterator::prefix_storage_type prefixes_;
+ rdf4cpp::rdf::IRIFactory *prefixes_;
public:
+ PrologueVisitor(rdf4cpp::rdf::IRIFactory &prefixes) noexcept;
+
std::any visitPrologue(SparqlParser::PrologueContext *) override;
std::any visitBaseDecl(SparqlParser::BaseDeclContext *) override;
diff --git a/libs/sparql2tensor/src/dice/sparql2tensor/SPARQLQuery.cpp b/libs/sparql2tensor/src/dice/sparql2tensor/SPARQLQuery.cpp
index aa3f42b4..d2a78ebc 100644
--- a/libs/sparql2tensor/src/dice/sparql2tensor/SPARQLQuery.cpp
+++ b/libs/sparql2tensor/src/dice/sparql2tensor/SPARQLQuery.cpp
@@ -23,8 +23,8 @@ namespace dice::sparql2tensor {
SPARQLQuery p_sparql{};
if (q_ctx->prologue()) {
- parser::visitors::PrologueVisitor p_visitor{};
- p_sparql.prefixes_ = std::any_cast(p_visitor.visitPrologue(q_ctx->prologue()));
+ parser::visitors::PrologueVisitor p_visitor{p_sparql.prefixes_};
+ p_visitor.visitPrologue(q_ctx->prologue());
}
parser::visitors::SelectAskQueryVisitor visitor{&p_sparql};
diff --git a/libs/sparql2tensor/src/dice/sparql2tensor/SPARQLQuery.hpp b/libs/sparql2tensor/src/dice/sparql2tensor/SPARQLQuery.hpp
index 52c0a1de..bc5cc25e 100644
--- a/libs/sparql2tensor/src/dice/sparql2tensor/SPARQLQuery.hpp
+++ b/libs/sparql2tensor/src/dice/sparql2tensor/SPARQLQuery.hpp
@@ -21,7 +21,7 @@ namespace dice::sparql2tensor {
std::vector triple_patterns_;
- rdf4cpp::rdf::parser::IStreamQuadIterator::prefix_storage_type prefixes_;
+ rdf4cpp::rdf::IRIFactory prefixes_;
bool distinct_ = false;
diff --git a/libs/sparql2tensor/src/dice/sparql2tensor/UPDATEQuery.cpp b/libs/sparql2tensor/src/dice/sparql2tensor/UPDATEQuery.cpp
deleted file mode 100644
index c7e16a20..00000000
--- a/libs/sparql2tensor/src/dice/sparql2tensor/UPDATEQuery.cpp
+++ /dev/null
@@ -1,191 +0,0 @@
-#include "UPDATEQuery.hpp"
-
-#include
-#include
-
-#include
-#include
-
-#include "dice/sparql2tensor/parser/visitors/PrologueVisitor.hpp"
-#include "dice/sparql2tensor/parser/exception/SPARQLErrorListener.hpp"
-
-namespace dice::sparql2tensor {
-
- static bool is_alpha(char const ch) noexcept {
- return std::isalpha(ch);
- }
-
- static bool is_ws(char const ch) noexcept {
- return std::isspace(ch);
- }
-
- /**
- * @brief reads a single word (determined by matcher) from the start of s
- * @param s input string; will be modified to not include the word after extraction
- * @param matcher determines the charset the word is made of
- * @return the extracted word
- */
- template requires std::is_nothrow_invocable_r_v
- static std::string_view read_word(std::string_view &s, CharMatcher &&matcher) noexcept {
- auto const first_word_begin = std::find_if_not(s.begin(), s.end(), is_ws);
- auto const first_word_end = std::find_if_not(first_word_begin, s.end(), std::forward(matcher));
-
- auto word = s.substr(std::distance(s.begin(), first_word_begin), std::distance(first_word_begin, first_word_end));
- s.remove_prefix(std::distance(s.begin(), first_word_end));
-
- return word;
- }
-
- /**
- * @brief reads a single word (determined by matcher) from the end of s
- * @param s input string; will be modified to not include the word after extraction
- * @param matcher determines the charset the word is made of
- * @return the extracted word
- */
- template requires std::is_nothrow_invocable_r_v
- static std::string_view read_word_rev(std::string_view &s, CharMatcher &&matcher) noexcept {
- auto const first_word_rbegin = std::find_if_not(s.rbegin(), s.rend(), is_ws);
- auto const first_word_rend = std::find_if_not(first_word_rbegin, s.rend(), std::forward(matcher));
-
- auto word = s.substr(std::distance(first_word_rend, s.rend()), std::distance(first_word_rbegin, first_word_rend));
- s.remove_suffix(std::distance(s.rbegin(), first_word_rend));
-
- return word;
- }
-
- /**
- * @brief extracts the prologue from an update query
- * @param s the whole query, will be modified to not include the extracted prologue afterwards
- * @return the extracted prologue
- */
- static std::string_view read_prologue(std::string_view &s) noexcept {
- auto const query_body_begin = s.find_first_of('{');
- if (query_body_begin == std::string_view::npos) {
- // body begin not found, error will be handled by calling function
- return "";
- }
-
- auto const prologue_last_char = s.substr(0, query_body_begin).find_last_of('>');
- if (prologue_last_char == std::string_view::npos) {
- // no prologue found
- return "";
- }
-
- auto const prologue = s.substr(0, prologue_last_char + 1);
- s.remove_prefix(prologue_last_char + 1);
-
- return prologue;
- }
-
- enum struct QueryType {
- INSERT_DATA,
- DELETE_DATA,
- UNKNOWN,
- };
-
- /**
- * @brief reads the beginning of the actual query (after prologue) and tries to recognize the query type
- * @param s the whole query without the prologue, will be modified to not include the query type
- * @return the extracted query type
- *
- * @example
- * @code
- * std::string_view s = "DELETE DATA { ... }";
- * QueryType const query_type = read_query_type(s);
- *
- * assert(query_type == QueryType::DELETE_DATA);
- * assert(s == " { ... }");
- * @endcode
- */
- static QueryType read_query_type(std::string_view &s) noexcept {
- auto const first_word = read_word(s, is_alpha);
- auto const second_word = read_word(s, is_alpha);
-
- if (second_word != "DATA") {
- return QueryType::UNKNOWN;
- }
-
- if (first_word == "DELETE") {
- return QueryType::DELETE_DATA;
- } else if (first_word == "INSERT") {
- return QueryType::INSERT_DATA;
- }
-
- return QueryType::UNKNOWN;
- }
-
- UPDATEDATAQueryData UPDATEDATAQueryData::parse(std::string_view const sparql_update_str) {
- std::string_view rest_mut = sparql_update_str;
- auto const prologue = read_prologue(rest_mut);
-
- UPDATEDATAQueryData update_query;
-
- // expected structure for fast path: 'prologue... (DELETE|INSERT) DATA { triples... }'
- auto const query_type = read_query_type(rest_mut);
- auto const third_word = read_word(rest_mut, [](char const ch) noexcept { return ch == '{'; });
-
- if (query_type != QueryType::UNKNOWN) {
- // fast path for DELETE DATA / INSERT DATA
-
- if (third_word != "{") {
- // missing (or too many) '{' after '(DELETE|INSERT) DATA'
- std::ostringstream err;
- err << "syntax error: expected '{' after " << (query_type == QueryType::DELETE_DATA ? "DELETE DATA" : "INSERT DATA");
- throw std::runtime_error{err.str()};
- }
-
- auto const last_word = read_word_rev(rest_mut, [](char const ch) noexcept { return ch == '}'; });
-
- if (last_word != "}") {
- // closing brace is missing from query
- throw std::runtime_error{"syntax error: expected '}' at end of query"};
- }
-
- using namespace rdf_tensor::parser;
-
- { // parse only prologue using antlr
- parser::exception::SPARQLErrorListener error_listener{};
- antlr4::ANTLRInputStream input{prologue};
- dice::sparql_parser::base::SparqlLexer lexer{&input};
- antlr4::CommonTokenStream tokens{&lexer};
- dice::sparql_parser::base::SparqlParser parser{&tokens};
- parser.removeErrorListeners();
- parser.addErrorListener(&error_listener);
-
- auto update_ctx = parser.updateCommand();
-
- { // visit prologue and store prefixes
- parser::visitors::PrologueVisitor p_visitor{};
- for (auto prefix_ctx : update_ctx->prologue()) {
- auto cur_prefixes = std::any_cast(p_visitor.visitPrologue(prefix_ctx));
- update_query.prefixes.insert(cur_prefixes.begin(), cur_prefixes.end());
- }
- }
- }
-
- std::vector entries;
-
- { // try to parse all triples between '{' and '}' with rdf4cpp and then store them in 'entries'
- std::istringstream iss{std::string{rest_mut}};
- for (IStreamQuadIterator qit{iss, ParsingFlag::NoParsePrefix, update_query.prefixes}; qit != IStreamQuadIterator{}; ++qit) {
- if (qit->has_value()) {
- auto const &quad = **qit;
- entries.push_back(rdf_tensor::NonZeroEntry{{quad.subject(), quad.predicate(), quad.object()}});
- } else {
- std::ostringstream oss;
- oss << qit->error();
- throw std::runtime_error{oss.str()};
- }
- }
- }
-
- update_query.is_delete = query_type == QueryType::DELETE_DATA;
- update_query.entries = std::move(entries);
- } else {
- throw std::runtime_error{"Currently only DELETE DATA and INSERT DATA updates are supported"};
- }
-
- return update_query;
- }
-
-}// namespace dice::sparql2tensor
\ No newline at end of file
diff --git a/libs/sparql2tensor/src/dice/sparql2tensor/UPDATEQuery.hpp b/libs/sparql2tensor/src/dice/sparql2tensor/UPDATEQuery.hpp
deleted file mode 100644
index 16b36d5f..00000000
--- a/libs/sparql2tensor/src/dice/sparql2tensor/UPDATEQuery.hpp
+++ /dev/null
@@ -1,18 +0,0 @@
-#ifndef DICE_SPARQL_UPDATEQUERY_HPP
-#define DICE_SPARQL_UPDATEQUERY_HPP
-
-#include
-
-
-namespace dice::sparql2tensor {
-
- struct UPDATEDATAQueryData {
- rdf_tensor::parser::IStreamQuadIterator::prefix_storage_type prefixes;
- bool is_delete; // is this query DELETE DATA? (otherwise is INSERT DATA)
- std::vector entries;
-
- static UPDATEDATAQueryData parse(std::string_view sparql_update_str);
- };
-}
-
-#endif//DICE_SPARQL_UPDATEQUERY_HPP
diff --git a/libs/sparql2tensor/src/dice/sparql2tensor/parser/visitors/PrologueVisitor.cpp b/libs/sparql2tensor/src/dice/sparql2tensor/parser/visitors/PrologueVisitor.cpp
index e47ad8a6..fe96643c 100644
--- a/libs/sparql2tensor/src/dice/sparql2tensor/parser/visitors/PrologueVisitor.cpp
+++ b/libs/sparql2tensor/src/dice/sparql2tensor/parser/visitors/PrologueVisitor.cpp
@@ -2,13 +2,15 @@
namespace dice::sparql2tensor::parser::visitors {
+ PrologueVisitor::PrologueVisitor(rdf4cpp::rdf::IRIFactory &prefixes) noexcept : prefixes_{&prefixes} {
+ }
+
std::any PrologueVisitor::visitPrologue(SparqlParser::PrologueContext *ctx) {
- prefixes_.clear();
for (auto pref_ctx : ctx->prefixDecl())
visitPrefixDecl(pref_ctx);
for ([[maybe_unused]] auto base_ctx : ctx->baseDecl())
throw std::runtime_error("Base Declarations not supported yet.");
- return prefixes_;
+ return nullptr;
}
std::any PrologueVisitor::visitBaseDecl([[maybe_unused]] SparqlParser::BaseDeclContext *ctx) {
@@ -20,7 +22,7 @@ namespace dice::sparql2tensor::parser::visitors {
if (ctx->PNAME_NS())
prefix = ctx->PNAME_NS()->getText();
auto ns = ctx->IRIREF()->getText();
- prefixes_[prefix.substr(0, prefix.size() - 1)] = ns.substr(1, ns.size() - 2);
+ prefixes_->assign_prefix(prefix.substr(0, prefix.size() - 1), ns.substr(1, ns.size() - 2));
return nullptr;
}
diff --git a/libs/sparql2tensor/src/dice/sparql2tensor/parser/visitors/SelectAskQueryVisitor.cpp b/libs/sparql2tensor/src/dice/sparql2tensor/parser/visitors/SelectAskQueryVisitor.cpp
index e407ec9d..6dc953db 100644
--- a/libs/sparql2tensor/src/dice/sparql2tensor/parser/visitors/SelectAskQueryVisitor.cpp
+++ b/libs/sparql2tensor/src/dice/sparql2tensor/parser/visitors/SelectAskQueryVisitor.cpp
@@ -36,7 +36,7 @@ namespace dice::sparql2tensor::parser::visitors {
for (auto const &tp : query->triple_patterns_) {
for (auto const &node : tp) {
if (node.is_variable()) {
- auto var = (rdf4cpp::rdf::query::Variable) node;
+ auto var = node.as_variable();
if (not var.is_anonymous()) {
auto [_, was_new] = seen_vars.insert(var);
if (was_new)
@@ -183,7 +183,7 @@ namespace dice::sparql2tensor::parser::visitors {
if (ctx->varOrTerm() and ctx->propertyListPathNotEmpty()) {
active_subject = std::any_cast(visitVarOrTerm(ctx->varOrTerm()));
if (active_subject.is_variable())
- register_var(rdf4cpp::rdf::query::Variable(active_subject));
+ register_var(active_subject.as_variable());
visitPropertyListPathNotEmpty(ctx->propertyListPathNotEmpty());
} else if (ctx->triplesNodePath() and ctx->propertyListPath()) {
return nullptr;
@@ -247,11 +247,12 @@ namespace dice::sparql2tensor::parser::visitors {
}
std::string predicate = ctx->prefixedName()->PNAME_LN()->getText();
std::size_t split = predicate.find(':');
- try {
- return rdf4cpp::rdf::IRI(query->prefixes_.at(predicate.substr(0, split)) + predicate.substr(split + 1));
- } catch (...) {
- throw std::out_of_range("Prefix " + predicate.substr(0, split) + " not declared.");
- }
+ auto maybe_iri = query->prefixes_.from_prefix(predicate.substr(0, split), predicate.substr(split + 1));
+ if (maybe_iri.has_value()) {
+ return *maybe_iri;
+ } else {
+ throw std::runtime_error("Invalid prefixed IRI");
+ }
}
std::any SelectAskQueryVisitor::visitBlankNode(SparqlParser::BlankNodeContext *ctx) {
@@ -281,7 +282,7 @@ namespace dice::sparql2tensor::parser::visitors {
if (auto var_or_term_ctx = ctx->graphNodePath()->varOrTerm(); var_or_term_ctx) {
auto obj = std::any_cast(visitVarOrTerm(var_or_term_ctx));
if (obj.is_variable())
- register_var(rdf4cpp::rdf::query::Variable(obj));
+ register_var(obj.as_variable());
query->triple_patterns_.emplace_back(active_subject, active_predicate, obj);
add_tp(query->triple_patterns_.back());
} else {
@@ -294,7 +295,7 @@ namespace dice::sparql2tensor::parser::visitors {
if (auto var_or_term_ctx = ctx->graphNode()->varOrTerm(); var_or_term_ctx) {
auto obj = std::any_cast(visitVarOrTerm(var_or_term_ctx));
if (obj.is_variable())
- register_var(rdf4cpp::rdf::query::Variable(obj));
+ register_var(obj.as_variable());
query->triple_patterns_.emplace_back(active_subject, active_predicate, obj);
add_tp(query->triple_patterns_.back());
} else {
@@ -343,45 +344,45 @@ namespace dice::sparql2tensor::parser::visitors {
std::any SelectAskQueryVisitor::visitRdfLiteral(SparqlParser::RdfLiteralContext *ctx) {
auto value = std::any_cast(visitString(ctx->string()));
if (auto iri_ctx = ctx->iri(); iri_ctx)
- return rdf4cpp::rdf::Literal(value, std::any_cast(visitIri(iri_ctx)));
+ return rdf4cpp::rdf::Literal::make_typed(value, std::any_cast(visitIri(iri_ctx)));
else if (auto langtag_ctx = ctx->LANGTAG(); langtag_ctx)
- return rdf4cpp::rdf::Literal(value, langtag_ctx->getText().substr(1));
+ return rdf4cpp::rdf::Literal::make_lang_tagged(value, langtag_ctx->getText().substr(1));
else
- return rdf4cpp::rdf::Literal(value);
+ return rdf4cpp::rdf::Literal::make_simple(value);
}
std::any SelectAskQueryVisitor::visitNumericLiteral(SparqlParser::NumericLiteralContext *ctx) {
auto number = ctx->getText();
if (auto pos_literal_ctx = ctx->numericLiteralPositive(); pos_literal_ctx) {
if (pos_literal_ctx->DECIMAL_POSITIVE())
- return rdf4cpp::rdf::Literal(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#decimal"));
+ return rdf4cpp::rdf::Literal::make_typed(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#decimal"));
else if (pos_literal_ctx->DOUBLE_POSITIVE())
- return rdf4cpp::rdf::Literal(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#double"));
+ return rdf4cpp::rdf::Literal::make_typed(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#double"));
else
- return rdf4cpp::rdf::Literal(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#integer"));
+ return rdf4cpp::rdf::Literal::make_typed(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#integer"));
} else if (auto neg_literal_ctx = ctx->numericLiteralNegative(); neg_literal_ctx) {
if (neg_literal_ctx->DECIMAL_NEGATIVE())
- return rdf4cpp::rdf::Literal(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#decimal"));
+ return rdf4cpp::rdf::Literal::make_typed(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#decimal"));
else if (neg_literal_ctx->DOUBLE_NEGATIVE())
- return rdf4cpp::rdf::Literal(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#double"));
+ return rdf4cpp::rdf::Literal::make_typed(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#double"));
else
- return rdf4cpp::rdf::Literal(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#integer"));
+ return rdf4cpp::rdf::Literal::make_typed(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#integer"));
} else {
auto unsigned_literal_ctx = ctx->numericLiteralUnsigned();
if (unsigned_literal_ctx->DECIMAL())
- return rdf4cpp::rdf::Literal(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#decimal"));
+ return rdf4cpp::rdf::Literal::make_typed(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#decimal"));
else if (unsigned_literal_ctx->DOUBLE())
- return rdf4cpp::rdf::Literal(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#double"));
+ return rdf4cpp::rdf::Literal::make_typed(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#double"));
else
- return rdf4cpp::rdf::Literal(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#integer"));
+ return rdf4cpp::rdf::Literal::make_typed(number, rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#integer"));
}
}
std::any SelectAskQueryVisitor::visitBooleanLiteral(SparqlParser::BooleanLiteralContext *ctx) {
if (ctx->TRUE())
- return rdf4cpp::rdf::Literal("true", rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#boolean"));
+ return rdf4cpp::rdf::Literal::make_boolean(true);
else
- return rdf4cpp::rdf::Literal("false", rdf4cpp::rdf::IRI("http://www.w3.org/2001/XMLSchema#boolean"));
+ return rdf4cpp::rdf::Literal::make_boolean(false);
}
std::any SelectAskQueryVisitor::visitString(SparqlParser::StringContext *ctx) {
@@ -404,7 +405,7 @@ namespace dice::sparql2tensor::parser::visitors {
for (auto const &node : tp) {
if (not node.is_variable())
continue;
- var_ids.push_back(query->var_to_id_[rdf4cpp::rdf::query::Variable(node)]);
+ var_ids.push_back(query->var_to_id_[node.as_variable()]);
}
// create new node in the operand dependency graph
auto v_id = query->odg_.add_operand(var_ids);
diff --git a/libs/tentris/CMakeLists.txt b/libs/tentris/CMakeLists.txt
index 281d13e3..c6601123 100644
--- a/libs/tentris/CMakeLists.txt
+++ b/libs/tentris/CMakeLists.txt
@@ -1,15 +1,17 @@
-# Find cmake packages
+cmake_minimum_required(VERSION 3.21)
+set(lib_suffix "tentris")
+set(lib "${PROJECT_NAME}-${lib_suffix}")
# Define the library
-add_library(tentris INTERFACE)
-add_library(tentris::tentris ALIAS tentris)
-target_link_libraries(tentris INTERFACE
- tentris::endpoint
+add_library(${lib} INTERFACE)
+add_library(${PROJECT_NAME}::${lib_suffix} ALIAS ${lib})
+target_link_libraries(${lib} INTERFACE
+ ${PROJECT_NAME}::endpoint
)
-target_include_directories(tentris INTERFACE
+target_include_directories(${lib} INTERFACE
$
)
-include(${PROJECT_SOURCE_DIR}/cmake/install_library.cmake)
-install_interface_component(tentris src)
\ No newline at end of file
+include(${CMAKE_SOURCE_DIR}/cmake/install_components.cmake)
+install_component(INTERFACE ${lib_suffix} src)
diff --git a/libs/triple-store/CMakeLists.txt b/libs/triple-store/CMakeLists.txt
index 0201fc3b..5cdec2cc 100644
--- a/libs/triple-store/CMakeLists.txt
+++ b/libs/triple-store/CMakeLists.txt
@@ -1,31 +1,25 @@
+cmake_minimum_required(VERSION 3.21)
+set(lib_suffix "triple-store")
+set(lib "${PROJECT_NAME}-${lib_suffix}")
+
# Find cmake packages
find_package(dice-hash REQUIRED)
# Define the library
-add_library(triple-store
+add_library(${lib}
src/dice/triple-store/TripleStore.cpp
)
-add_library(tentris::triple-store ALIAS triple-store)
+add_library(${PROJECT_NAME}::${lib_suffix} ALIAS ${lib})
-target_include_directories(triple-store PUBLIC
+target_include_directories(${lib} PUBLIC
$
)
-target_link_libraries(triple-store PUBLIC
- tentris::sparql2tensor
- tentris::rdf-tensor
- )
-
-## Packaging and exporting the target
-
-set_target_properties(triple-store PROPERTIES
- VERSION ${PROJECT_VERSION}
- SOVERSION ${PROJECT_VERSION_MAJOR}
- CXX_STANDARD 20
- CXX_STANDARD_REQUIRED YES
- CXX_EXTENSIONS NO
+target_link_libraries(${lib} PUBLIC
+ ${PROJECT_NAME}::sparql2tensor
+ ${PROJECT_NAME}::rdf-tensor
)
-include(${PROJECT_SOURCE_DIR}/cmake/install_library.cmake)
-install_component(triple-store src)
+include(${CMAKE_SOURCE_DIR}/cmake/install_components.cmake)
+install_component(PUBLIC ${lib_suffix} src)
\ No newline at end of file
diff --git a/libs/triple-store/src/dice/triple-store/TripleStore.cpp b/libs/triple-store/src/dice/triple-store/TripleStore.cpp
index 3e041248..3702672c 100644
--- a/libs/triple-store/src/dice/triple-store/TripleStore.cpp
+++ b/libs/triple-store/src/dice/triple-store/TripleStore.cpp
@@ -17,7 +17,7 @@ namespace dice::triple_store {
}
HypertrieBulkInserter bulk_inserter{hypertrie_, bulk_size, call_back};
- for (rdf4cpp::rdf::parser::IStreamQuadIterator qit{ifs}; qit != rdf4cpp::rdf::parser::IStreamQuadIterator{}; ++qit) {
+ for (rdf4cpp::rdf::parser::IStreamQuadIterator qit{ifs}; qit != std::default_sentinel; ++qit) {
if (qit->has_value()) {
auto const &quad = qit->value();
bulk_inserter.add(
@@ -138,7 +138,7 @@ namespace dice::triple_store {
if (query.triple_patterns_.size() == 1) {// O(1)
auto slice_key = query.get_slice_keys()[0];
if (slice_key.get_fixed_depth() == 3)
- return (size_t) std::get(get_hypertrie()[slice_key]);
+ return static_cast(std::get(get_hypertrie()[slice_key]));
else
return std::get(get_hypertrie()[slice_key]).size();
} else {
diff --git a/package.sh b/package.sh
new file mode 100755
index 00000000..9f11b235
--- /dev/null
+++ b/package.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+
+set -euo pipefail
+
+VERSION_REGEX='project\([^)]*VERSION\s+(\d+\.\d+.\d+)[^)]*\)'
+TENTRIS_VER=$(grep -Poz "$VERSION_REGEX" CMakeLists.txt | grep -Poz '[0-9]+\.[0-9]+\.[0-9]+')
+CUR_BRANCH=$(git branch --show-current | sed 's|/|_|g')
+# get current commit hash
+COMMIT_HASH=$(git rev-parse --short HEAD)
+if [[ -n $CUR_BRANCH ]]; then
+ TENTRIS_NAME="tentris_${TENTRIS_VER}_${CUR_BRANCH}_${COMMIT_HASH}"
+else
+ TENTRIS_NAME="tentris_${TENTRIS_VER}_${COMMIT_HASH}"
+fi
+
+if podman --version > /dev/null; then
+ echo "Using podman"
+ BUILDER="podman"
+elif docker --version > /dev/null; then
+ echo "Using docker"
+ BUILDER="docker"
+else
+ echo "Error: Neither podman nor docker found" 2>&1
+ exit 1
+fi
+
+cleanup_and_tmpdir() {
+ TEMP_DIR=$(mktemp -d) || exit 1
+}
+
+# make sure TEMP_DIR will be removed on failure
+trap 'rm -rf "$TEMP_DIR"' EXIT
+
+cleanup_and_tmpdir
+
+mkdir -p "${TENTRIS_NAME}"
+${BUILDER} build . --output="${TEMP_DIR}" --target=binaries
+echo "${TEMP_DIR}"
+ls -lah "${TEMP_DIR}"
+cp -r "${TEMP_DIR}/." "./${TENTRIS_NAME}/"
\ No newline at end of file
diff --git a/test_fetch_content/CMakeLists.txt b/test_fetch_content/CMakeLists.txt
deleted file mode 100644
index 49cb5745..00000000
--- a/test_fetch_content/CMakeLists.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-cmake_minimum_required(VERSION 3.15)
-project(PackageTest CXX)
-
-# find out the current tag of the tentris-private repos in the folder above
-execute_process(COMMAND git rev-parse HEAD
- WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/../
- OUTPUT_VARIABLE parent_git_tag
- OUTPUT_STRIP_TRAILING_WHITESPACE
- )
-# fetch the tentris-private repo from the local copy
-include(FetchContent)
-FetchContent_Declare(
- tentris
- GIT_REPOSITORY "${CMAKE_CURRENT_SOURCE_DIR}/../"
- GIT_TAG ${parent_git_tag}
- GIT_SHALLOW TRUE
-)
-FetchContent_MakeAvailable(tentris)
-add_executable(example example.cpp)
-# With FetchContent_Declare we don't need find_package
-target_link_libraries(example PRIVATE tentris::node-store)
diff --git a/test_fetch_content/example.cpp b/test_fetch_content/example.cpp
deleted file mode 100644
index e4db1f0d..00000000
--- a/test_fetch_content/example.cpp
+++ /dev/null
@@ -1,6 +0,0 @@
-#include
-
-int main() {
- dice::rdf_tensor::metall_manager x{metall::create_only, "abc"};
- dice::node_store::PersistentNodeStorageBackendImpl y{x.get_allocator()};
-}
diff --git a/test_package/conanfile.py b/test_package/conanfile.py
index 47570667..37860b1e 100644
--- a/test_package/conanfile.py
+++ b/test_package/conanfile.py
@@ -1,17 +1,16 @@
import os
from conan import ConanFile
-from conan.tools.cmake import CMake, CMakeToolchain
-from conan.tools.layout import cmake_layout
+from conan.tools.build import can_run
+from conan.tools.cmake import CMake, cmake_layout
required_conan_version = ">=1.43.0"
class TestPackageConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
- generators = "CMakeDeps"
+ generators = "CMakeDeps", "CMakeToolchain"
- def generate(self):
- tc = CMakeToolchain(self)
- tc.generate()
+ def requirements(self):
+ self.requires(self.tested_reference_str)
def layout(self):
cmake_layout(self)
@@ -22,4 +21,6 @@ def build(self):
cmake.build()
def test(self):
- self.run(os.path.join(self.cpp.build.bindirs[0], "example"), run_environment=True)
\ No newline at end of file
+ if can_run(self):
+ cmd = os.path.join(self.cpp.build.bindir, "example")
+ self.run(cmd, env="conanrun")
\ No newline at end of file