forked from alpaka-group/llama
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathCMakeLists.txt
130 lines (115 loc) · 4.71 KB
/
CMakeLists.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
cmake_minimum_required (VERSION 3.16)
project (llama CXX)
# llama
find_package(Boost 1.70.0 REQUIRED)
find_package(fmt CONFIG QUIET)
add_library(${PROJECT_NAME} INTERFACE)
target_include_directories(${PROJECT_NAME} INTERFACE $<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/include> $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>)
target_compile_features(${PROJECT_NAME} INTERFACE cxx_std_17)
target_link_libraries(${PROJECT_NAME} INTERFACE Boost::headers)
if (fmt_FOUND)
target_link_libraries(${PROJECT_NAME} INTERFACE fmt::fmt)
else()
message("The fmt library was not found. You cannot use llama/DumpMapping.hpp")
endif()
# llama::llama to make subdirectory projects work
add_library(${PROJECT_NAME}::${PROJECT_NAME} ALIAS ${PROJECT_NAME})
# llama IDE target to make source browsable/editable in IDEs
file(GLOB_RECURSE llamaSources "${CMAKE_CURRENT_SOURCE_DIR}/include/**")
add_custom_target("llamaIde" SOURCES ${llamaSources})
source_group(TREE "${CMAKE_CURRENT_LIST_DIR}/include/llama" FILES ${llamaSources})
# tests
include(CTest)
if (BUILD_TESTING)
find_package(Catch2 2.6.1 CONFIG QUIET)
if (NOT ${Catch2_FOUND})
message(FATAL_ERROR "Catch2 is required for building the tests. Either install Catch2 or disable tests by passing -DBUILD_TESTING=OFF to CMake.")
endif()
file(GLOB_RECURSE testSources "${CMAKE_CURRENT_SOURCE_DIR}/tests/**")
add_executable(tests ${testSources})
include(Catch)
catch_discover_tests(tests)
source_group(TREE "${CMAKE_CURRENT_LIST_DIR}/tests" FILES ${testSources})
if (MSVC)
target_compile_options(tests PRIVATE /std:c++latest) # C++20 does not include ranges yet in MSVC, because the ABI is not finalized
target_compile_options(tests PRIVATE /permissive- /constexpr:steps10000000 /diagnostics:caret)
else()
target_compile_features(tests PRIVATE cxx_std_20)
target_compile_options(tests PRIVATE -Wall -Wextra -Wno-missing-braces)
endif()
if (CMAKE_CXX_COMPILER_ID STREQUAL "Clang" OR CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" OR CMAKE_CXX_COMPILER_ID STREQUAL "IntelLLVM")
target_compile_options(tests PRIVATE -fconstexpr-steps=10000000)
endif()
target_link_libraries(tests PRIVATE Catch2::Catch2 llama::llama)
option(ASAN_FOR_TESTS "Enables address sanitizer for tests" OFF)
if (ASAN_FOR_TESTS)
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU" OR CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
target_compile_options(tests PRIVATE -fsanitize=address -fno-omit-frame-pointer)
target_link_options (tests PRIVATE -fsanitize=address -fno-omit-frame-pointer)
elseif(MSVC)
target_compile_options(tests PRIVATE /fsanitize=address)
target_link_options (tests PRIVATE /wholearchive:clang_rt.asan_dynamic-x86_64.lib /wholearchive:clang_rt.asan_dynamic_runtime_thunk-x86_64.lib)
endif()
endif()
endif()
# examples
option(LLAMA_BUILD_EXAMPLES "Building (and installing) the examples" ON)
if (LLAMA_BUILD_EXAMPLES)
# general examples
add_subdirectory("examples/simpletest")
add_subdirectory("examples/vectoradd")
add_subdirectory("examples/nbody")
add_subdirectory("examples/nbody_benchmark")
add_subdirectory("examples/heatequation")
add_subdirectory("examples/viewcopy")
add_subdirectory("examples/bufferguard")
add_subdirectory("examples/raycast")
add_subdirectory("examples/bitpack")
add_subdirectory("examples/bytesplit")
add_subdirectory("examples/floatpack")
# alpaka examples
find_package(alpaka 0.7.0 QUIET)
if (_ALPAKA_FOUND)
add_subdirectory("examples/alpaka/nbody")
add_subdirectory("examples/alpaka/vectoradd")
add_subdirectory("examples/alpaka/asyncblur")
add_subdirectory("examples/alpaka/pic")
elseif()
message(WARNING "Could not find alpaka. Alpaka examples are disabled.")
endif()
# ROOT examples
find_package(ROOT QUIET)
if (ROOT_FOUND)
add_subdirectory("examples/hep_rntuple")
endif()
# CUDA examples
include(CheckLanguage)
check_language(CUDA)
if (CMAKE_CUDA_COMPILER)
enable_language(CUDA)
add_subdirectory("examples/cuda/nbody")
elseif()
message(WARNING "Could not find CUDA. Try setting CMAKE_CUDA_COMPILER. CUDA examples are disabled.")
endif()
endif()
# install
include(CMakePackageConfigHelpers)
include(GNUInstallDirs)
set(_llama_INSTALL_CMAKEDIR "${CMAKE_INSTALL_LIBDIR}/cmake/llama")
configure_package_config_file (
"${PROJECT_SOURCE_DIR}/cmake/llama-config.cmake.in"
"${PROJECT_BINARY_DIR}/cmake/llama-config.cmake"
INSTALL_DESTINATION "${_llama_INSTALL_CMAKEDIR}")
configure_file (
"${PROJECT_SOURCE_DIR}/cmake/llama-config-version.cmake.in"
"${PROJECT_BINARY_DIR}/cmake/llama-config-version.cmake"
@ONLY
)
install( DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/llama" DESTINATION "include" )
install(
FILES
"${PROJECT_BINARY_DIR}/cmake/llama-config.cmake"
"${PROJECT_BINARY_DIR}/cmake/llama-config-version.cmake"
DESTINATION
"${_llama_INSTALL_CMAKEDIR}"
)