Skip to content

Commit

Permalink
Upgrade to alpaka 1.0
Browse files Browse the repository at this point in the history
  • Loading branch information
bernhardmgruber committed Aug 31, 2023
1 parent 871b730 commit 6b1b12b
Show file tree
Hide file tree
Showing 15 changed files with 46 additions and 49 deletions.
2 changes: 1 addition & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ if (LLAMA_BUILD_EXAMPLES)
add_subdirectory("examples/stream")

# alpaka examples
find_package(alpaka 0.9.0)
find_package(alpaka 1.0)
if (_alpaka_FOUND)
add_subdirectory("examples/alpaka/nbody")
add_subdirectory("examples/alpaka/vectoradd")
Expand Down
2 changes: 1 addition & 1 deletion docs/pages/install.rst
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ Examples
To build all examples of LLAMA, the following additional libraries are needed:

- libfmt 6.2.1 or higher
- `Alpaka <https://github.com/alpaka-group/alpaka>`_ 0.9.0 or higher
- `Alpaka <https://github.com/alpaka-group/alpaka>`_ 1.0 or higher
- `xsimd <https://github.com/xtensor-stack/xsimd>`_ 9.0.1 or higher
- `ROOT <https://root.cern/>`_
- `tinyobjloader <https://github.com/tinyobjloader/tinyobjloader>`_ 2.0.0-rc9 or higher
Expand Down
2 changes: 1 addition & 1 deletion examples/alpaka/asyncblur/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ project(llama-alpaka-asyncblur CXX)
if (NOT TARGET llama::llama)
find_package(llama REQUIRED)
endif()
find_package(alpaka 0.9.0 REQUIRED)
find_package(alpaka 1.0 REQUIRED)
alpaka_add_executable(${PROJECT_NAME} asyncblur.cpp ../../common/alpakaHelpers.hpp ../../common/Stopwatch.hpp)
target_include_directories(${PROJECT_NAME} SYSTEM PRIVATE ../../../thirdparty/stb/include)
target_link_libraries(${PROJECT_NAME} PRIVATE llama::llama alpaka::alpaka)
17 changes: 6 additions & 11 deletions examples/alpaka/asyncblur/asyncblur.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -136,20 +136,15 @@ struct BlurKernel
auto main(int argc, char** argv) -> int
try
{
// ALPAKA
using Dim = alpaka::DimInt<2>;

using Acc = alpaka::ExampleDefaultAcc<Dim, int>;
// using Acc = alpaka::AccGpuCudaRt<Dim, Size>;
// using Acc = alpaka::AccCpuSerial<Dim, Size>;

using Queue = alpaka::Queue<Acc, std::conditional_t<async, alpaka::NonBlocking, alpaka::Blocking>>;
using DevHost = alpaka::DevCpu;
using DevAcc = alpaka::Dev<Acc>;
using PltfHost = alpaka::Pltf<DevHost>;
using PltfAcc = alpaka::Pltf<DevAcc>;
const DevAcc devAcc = alpaka::getDevByIdx<PltfAcc>(0);
const DevHost devHost = alpaka::getDevByIdx<PltfHost>(0);

const auto platformAcc = alpaka::Platform<Acc>{};
const auto platformHost = alpaka::PlatformCpu{};
const auto devAcc = alpaka::getDevByIdx(platformAcc, 0);
const auto devHost = alpaka::getDevByIdx(platformHost, 0);

std::vector<Queue> queue;
queue.reserve(chunkCount);
for(std::size_t i = 0; i < chunkCount; ++i)
Expand Down
10 changes: 6 additions & 4 deletions examples/alpaka/babelstream/AlpakaStream.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,8 @@ template<typename T>
AlpakaStream<T>::AlpakaStream(Idx arraySize, Idx deviceIndex)
: mapping({arraySize})
, arraySize(arraySize)
, devHost(alpaka::getDevByIdx<DevHost>(0u))
, devAcc(alpaka::getDevByIdx<Acc>(deviceIndex))
, devHost(alpaka::getDevByIdx(platformHost, 0))
, devAcc(alpaka::getDevByIdx(platformAcc, deviceIndex))
, sums(alpaka::allocBuf<T, Idx>(devHost, dotBlockSize))
, d_a(alpaka::allocBuf<T, Idx>(devAcc, arraySize))
, d_b(alpaka::allocBuf<T, Idx>(devAcc, arraySize))
Expand Down Expand Up @@ -305,15 +305,17 @@ auto AlpakaStream<T>::dot() -> T

void listDevices()
{
const auto count = alpaka::getDevCount<Acc>();
const auto platform = alpaka::Platform<Acc>{};
const auto count = alpaka::getDevCount(platform);
std::cout << "Devices:" << std::endl;
for(int i = 0; i < count; i++)
std::cout << i << ": " << getDeviceName(i) << std::endl;
}

auto getDeviceName(int deviceIndex) -> std::string
{
return alpaka::getName(alpaka::getDevByIdx<Acc>(deviceIndex));
const auto platform = alpaka::Platform<Acc>{};
return alpaka::getName(alpaka::getDevByIdx(platform, deviceIndex));
}

auto getDeviceDriver([[maybe_unused]] int device) -> std::string
Expand Down
6 changes: 5 additions & 1 deletion examples/alpaka/babelstream/AlpakaStream.h
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,9 @@ struct AlpakaStream : Stream<T>
void init_arrays(T initA, T initB, T initC) override;
void read_arrays(std::vector<T>& a, std::vector<T>& b, std::vector<T>& c) override;

using DevHost = alpaka::DevCpu;
using PlatformHost = alpaka::PlatformCpu;
using DevHost = alpaka::Dev<PlatformHost>;
using PlatformAcc = alpaka::Platform<Acc>;
using DevAcc = alpaka::Dev<Acc>;
using BufHost = alpaka::Buf<alpaka::DevCpu, T, Dim, Idx>;
using BufAcc = alpaka::Buf<Acc, T, Dim, Idx>;
Expand All @@ -50,7 +52,9 @@ struct AlpakaStream : Stream<T>
private:
llama::mapping::AoS<llama::ArrayExtents<Idx, llama::dyn>, T> mapping;
Idx arraySize;
PlatformHost platformHost;
DevHost devHost;
PlatformAcc platformAcc;
DevAcc devAcc;
BufHost sums;
BufAcc d_a;
Expand Down
2 changes: 1 addition & 1 deletion examples/alpaka/babelstream/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ project(llama-alpaka-babelstream CXX)
if (NOT TARGET llama::llama)
find_package(llama REQUIRED)
endif()
find_package(alpaka 0.9.0 REQUIRED)
find_package(alpaka 1.0 REQUIRED)
alpaka_add_executable(${PROJECT_NAME} main.cpp Stream.h AlpakaStream.cpp AlpakaStream.h)
target_compile_features(${PROJECT_NAME} PRIVATE cxx_std_17)
target_compile_definitions(${PROJECT_NAME} PUBLIC ALPAKA)
Expand Down
2 changes: 1 addition & 1 deletion examples/alpaka/daxpy/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ find_package(OpenMP REQUIRED)
if (NOT TARGET llama::llama)
find_package(llama REQUIRED)
endif()
find_package(alpaka 0.9.0 REQUIRED)
find_package(alpaka 1.0 REQUIRED)
alpaka_add_executable(${PROJECT_NAME} daxpy.cpp ../../common/Stopwatch.hpp ../../common/hostname.hpp)
target_compile_features(${PROJECT_NAME} PRIVATE cxx_std_17)
target_link_libraries(${PROJECT_NAME} PRIVATE llama::llama OpenMP::OpenMP_CXX alpaka::alpaka)
Expand Down
10 changes: 5 additions & 5 deletions examples/alpaka/daxpy/daxpy.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -75,11 +75,11 @@ void daxpyAlpakaLlama(std::string mappingName, std::ofstream& plotFile, Mapping
using Dim = alpaka::DimInt<1>;
using Size = std::size_t;
using Acc = alpaka::ExampleDefaultAcc<Dim, Size>;
using Dev = alpaka::Dev<Acc>;
using Queue = alpaka::Queue<Dev, alpaka::Blocking>;
const auto devAcc = alpaka::getDevByIdx<alpaka::Pltf<Dev>>(0u);
const auto devHost = alpaka::getDevByIdx<alpaka::PltfCpu>(0u);
auto queue = Queue(devAcc);
const auto platformAcc = alpaka::Platform<Acc>{};
const auto platformHost = alpaka::PlatformCpu{};
const auto devAcc = alpaka::getDevByIdx(platformAcc, 0);
const auto devHost = alpaka::getDevByIdx(platformHost, 0);
auto queue = alpaka::Queue<Acc, alpaka::Blocking>(devAcc);

Stopwatch watch;
auto x = llama::allocViewUninitialized(mapping);
Expand Down
2 changes: 1 addition & 1 deletion examples/alpaka/nbody/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ endif()
if (NOT TARGET llama::llama)
find_package(llama REQUIRED)
endif()
find_package(alpaka 0.9.0 REQUIRED)
find_package(alpaka 1.0 REQUIRED)
if (alpaka_ACC_GPU_CUDA_ENABLE AND (CMAKE_CUDA_COMPILER_ID STREQUAL "NVIDIA") AND
(CMAKE_CUDA_COMPILER_VERSION VERSION_LESS 11.6))
# nvcc <= 11.5 chokes on `pis(tag::Pos{}, tag::X{})` inside `pPInteraction()` and segfauls
Expand Down
13 changes: 5 additions & 8 deletions examples/alpaka/nbody/nbody.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -218,11 +218,6 @@ void run(std::ostream& plotFile)
using Dim = alpaka::DimInt<1>;
using Size = int;
using Acc = AccTemplate<Dim, Size>;
using DevHost = alpaka::DevCpu;
using DevAcc = alpaka::Dev<Acc>;
using PltfHost = alpaka::Pltf<DevHost>;
using PltfAcc = alpaka::Pltf<DevAcc>;
using Queue = alpaka::Queue<DevAcc, alpaka::Blocking>;

auto mappingName = [](int m) -> std::string
{
Expand All @@ -239,9 +234,11 @@ void run(std::ostream& plotFile)
const auto title = "GM " + mappingName(MappingGM) + " SM " + mappingName(MappingSM);
std::cout << '\n' << title << '\n';

const DevAcc devAcc(alpaka::getDevByIdx<PltfAcc>(0u));
const DevHost devHost(alpaka::getDevByIdx<PltfHost>(0u));
Queue queue(devAcc);
const auto platformAcc = alpaka::Platform<Acc>{};
const auto platformHost = alpaka::PlatformCpu{};
const auto devAcc = alpaka::getDevByIdx(platformAcc, 0);
const auto devHost = alpaka::getDevByIdx(platformHost, 0);
auto queue = alpaka::Queue<Acc, alpaka::Blocking>{devAcc};

auto mapping = []
{
Expand Down
2 changes: 1 addition & 1 deletion examples/alpaka/pic/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ if (NOT TARGET llama::llama)
endif()
find_package(fmt REQUIRED)
find_package(OpenMP REQUIRED)
find_package(alpaka 0.9.0 REQUIRED)
find_package(alpaka 1.0 REQUIRED)
alpaka_add_executable(${PROJECT_NAME} pic.cpp)
target_compile_features(${PROJECT_NAME} PRIVATE cxx_std_17)
target_link_libraries(${PROJECT_NAME} PRIVATE llama::llama fmt::fmt alpaka::alpaka OpenMP::OpenMP_CXX)
Expand Down
15 changes: 6 additions & 9 deletions examples/alpaka/pic/pic.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -781,14 +781,11 @@ template<int FieldMapping, int ParticleMapping>
void run(std::ostream& plotFile)
{
using Acc = alpaka::ExampleDefaultAcc<Dim, Size>;
using DevHost = alpaka::DevCpu;
using DevAcc = alpaka::Dev<Acc>;
using PltfHost = alpaka::Pltf<DevHost>;
using PltfAcc = alpaka::Pltf<DevAcc>;
using Queue = alpaka::Queue<DevAcc, alpaka::Blocking>;
const DevAcc devAcc(alpaka::getDevByIdx<PltfAcc>(0u));
const DevHost devHost(alpaka::getDevByIdx<PltfHost>(0u));
Queue queue(devAcc);
const auto platformAcc = alpaka::Platform<Acc>{};
const auto platformHost = alpaka::PlatformCpu{};
const auto devAcc = alpaka::getDevByIdx(platformAcc, 0);
const auto devHost = alpaka::getDevByIdx(platformHost, 0);
auto queue = alpaka::Queue<Acc, alpaka::Blocking>(devAcc);

fmt::print("Particle mapping: {}\n", particleMappingName(ParticleMapping));
fmt::print("Field mapping: {}\n", fieldMappingName(FieldMapping));
Expand Down Expand Up @@ -885,7 +882,7 @@ try
affinity = affinity == nullptr ? "NONE - PLEASE PIN YOUR THREADS!" : affinity;

using Acc = alpaka::ExampleDefaultAcc<Dim, Size>;
auto accName = alpaka::getName(alpaka::getDevByIdx<alpaka::Pltf<alpaka::Dev<Acc>>>(0u));
auto accName = alpaka::getName(alpaka::getDevByIdx(alpaka::Platform<Acc>{}, 0u));
while(static_cast<bool>(std::isspace(accName.back())))
accName.pop_back();
fmt::print(
Expand Down
2 changes: 1 addition & 1 deletion examples/alpaka/vectoradd/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ project(llama-alpaka-vectoradd CXX)
if (NOT TARGET llama::llama)
find_package(llama REQUIRED)
endif()
find_package(alpaka 0.9.0 REQUIRED)
find_package(alpaka 1.0 REQUIRED)
alpaka_add_executable(${PROJECT_NAME} vectoradd.cpp ../../common/alpakaHelpers.hpp ../../common/Stopwatch.hpp)
target_link_libraries(${PROJECT_NAME} PRIVATE llama::llama alpaka::alpaka)
8 changes: 5 additions & 3 deletions examples/alpaka/vectoradd/vectoradd.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,11 @@ try
using Dim = alpaka::DimInt<1>;

using Acc = alpaka::ExampleDefaultAcc<Dim, Size>;
const auto devAcc = alpaka::getDevByIdx<Acc>(0);
const auto devHost = alpaka::getDevByIdx<alpaka::DevCpu>(0);
auto queue = alpaka::Queue<Acc, alpaka::Blocking>(devAcc);
const auto platformAcc = alpaka::Platform<Acc>{};
const auto platformHost = alpaka::PlatformCpu{};
const auto devAcc = alpaka::getDevByIdx(platformAcc, 0);
const auto devHost = alpaka::getDevByIdx(platformHost, 0);
auto queue = alpaka::Queue<Acc, alpaka::Blocking>{devAcc};

// LLAMA
const auto mapping = [&]
Expand Down

0 comments on commit 6b1b12b

Please sign in to comment.