diff --git a/CMakeLists.txt b/CMakeLists.txt index 891094e..1d577b7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -109,7 +109,51 @@ if(${Boost_FOUND}) set(COBS_LINK_LIBRARIES ${Boost_LIBRARIES} ${COBS_LINK_LIBRARIES}) endif() -set(COBS_LINK_LIBRARIES stdc++fs ${COBS_LINK_LIBRARIES}) +include(CheckCXXSourceRuns) +set(CMAKE_REQUIRED_FLAGS " -std=c++17") +check_cxx_source_runs(" + #include + #include + int main() { + std::cout << std::experimental::filesystem::temp_directory_path(); + return 0; + } +" CPPNOFS) +if(NOT CPPNOFS) + set(CMAKE_REQUIRED_FLAGS " -std=c++17") + set(CMAKE_REQUIRED_LIBRARIES "c++fs") + check_cxx_source_runs(" + #include + #include + int main() { + std::cout << std::experimental::filesystem::temp_directory_path(); + return 0; + } + " CPPFS) + unset(CMAKE_REQUIRED_FLAGS) + unset(CMAKE_REQUIRED_LIBRARIES) + if(CPPFS) + set(COBS_LINK_LIBRARIES c++fs ${COBS_LINK_LIBRARIES}) + else() + set(CMAKE_REQUIRED_FLAGS " -std=c++17") + set(CMAKE_REQUIRED_LIBRARIES "stdc++fs") + check_cxx_source_runs(" + #include + #include + int main() { + std::cout << std::experimental::filesystem::temp_directory_path(); + return 0; + } + " STDCPPFS) + unset(CMAKE_REQUIRED_FLAGS) + unset(CMAKE_REQUIRED_LIBRARIES) + if(STDCPPFS) + set(COBS_LINK_LIBRARIES stdc++fs ${COBS_LINK_LIBRARIES}) + else() + message(FATAL_ERROR "std::experimental::filesystem not found") + endif() + endif() +endif() ### use TLX ### diff --git a/cobs/query/classic_search.cpp b/cobs/query/classic_search.cpp index 390c4cb..f60908f 100644 --- a/cobs/query/classic_search.cpp +++ b/cobs/query/classic_search.cpp @@ -111,12 +111,12 @@ void ClassicSearch::compute_counts( const uint8_t* rows, size_t size, size_t buffer_size) { #if __SSE2__ - auto expansion_128 = reinterpret_cast(s_expansion_128); + auto expansion_128 = reinterpret_cast(s_expansion_128); #endif uint64_t num_hashes = index_file_.num_hashes(); #if __SSE2__ - auto counts_128 = reinterpret_cast<__m128i_u*>(scores); + auto counts_128 = reinterpret_cast<__m128i*>(scores); #else auto counts_64 = reinterpret_cast(scores); #endif @@ -264,7 +264,7 @@ const uint64_t ClassicSearch::s_expansion[] = { 281479271677952, 281479271677953, 281479271743488, 281479271743489 }; -const uint16_t ClassicSearch::s_expansion_128[] = { +alignas(16) const uint16_t ClassicSearch::s_expansion_128[] = { 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, diff --git a/src/main_performance.cpp b/src/main_performance.cpp index 6e25c0a..9866323 100644 --- a/src/main_performance.cpp +++ b/src/main_performance.cpp @@ -289,7 +289,7 @@ const uint16_t expansion[] = { 1, 1, 1, 1, 1, 1, 1, 1 }; -const auto expansion_128 = reinterpret_cast(expansion); +const auto expansion_128 = reinterpret_cast(expansion); void compute_counts(size_t hashes_size, std::vector& counts, const uint8_t* rows, size_t row_size) { #pragma omp declare reduction (merge : std::vector : \ @@ -298,7 +298,7 @@ void compute_counts(size_t hashes_size, std::vector& counts, const uin #pragma omp parallel reduction(merge: counts) { // auto counts_64 = reinterpret_cast(counts.data()); - auto counts_128 = reinterpret_cast<__m128i_u*>(counts.data()); + auto counts_128 = reinterpret_cast<__m128i*>(counts.data()); #pragma omp for for (uint64_t i = 0; i < hashes_size; i += 1) { auto rows_8 = rows + i * row_size;