Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update trilinos container tags #32

Merged
merged 3 commits into from
Dec 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 3 additions & 4 deletions .github/workflows/ci-trilinos.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,8 @@ jobs:
image:
- ubuntu-gnu-trilinos-11
tag:
- ef73d14
- 702aac5
- trilinos-release-14-4-0
- 0dc4553
- 5bbda25
build_type:
- Release
- Debug
Expand Down Expand Up @@ -67,7 +66,7 @@ jobs:
-D EIGEN_INCLUDE_DIR=/eigen/eigen-${{ env.eigen_version }} \
-D Trilinos_DIR=${{ env.trilinos_dir }}/lib/cmake/Trilinos \
-D CMAKE_INSTALL_PREFIX:PATH=../install \
-D CMAKE_CXX_FLAGS='-Wall -Werror'
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is necessary because Trilinos has deprecated Epetra, resulting in a bunch of warnings:

#warning "The Epetra package is deprecated"

-D CMAKE_CXX_FLAGS='-Wall'

- name: Build
run: cmake --build builddir -j $num_cpus --target install
Expand Down
4 changes: 2 additions & 2 deletions include/pressio/type_traits/native_epetra_vector.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
#ifndef PRESSIOOPS_TYPE_TRAITS_NATIVE_EPETRA_VECTOR_HPP_
#define PRESSIOOPS_TYPE_TRAITS_NATIVE_EPETRA_VECTOR_HPP_

#ifdef PRESSIO_ENABLE_TPL_EPETRA
#ifdef PRESSIO_ENABLE_EPETRA
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is the actual fix for the CI failures

#include "Epetra_Vector.h"
#include "Epetra_MultiVector.h"
#endif
Expand All @@ -59,7 +59,7 @@ namespace pressio{
template <typename T, typename enable = void>
struct is_vector_epetra : std::false_type {};

#ifdef PRESSIO_ENABLE_TPL_EPETRA
#ifdef PRESSIO_ENABLE_EPETRA
template <typename T>
struct is_vector_epetra<T,
typename
Expand Down
6 changes: 6 additions & 0 deletions tests/cmake/options.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ add_definitions(-DPRESSIO_ENABLE_CXX17)

option(PRESSIO_ENABLE_TPL_EIGEN "Enable Eigen TPL" OFF)
option(PRESSIO_ENABLE_TPL_TRILINOS "Enable Trilinos TPL" OFF)
option(PRESSIO_ENABLE_EPETRA "Enable Epetra" OFF)
option(PRESSIO_ENABLE_TPL_KOKKOS "Enable Kokkos TPL" OFF)
option(PRESSIO_ENABLE_TPL_MPI "Enable MPI" OFF)

Expand Down Expand Up @@ -33,6 +34,11 @@ if(PRESSIO_ENABLE_TPL_TRILINOS)
message(">> PRESSIO_ENABLE_TPL_TRILINOS=ON ==> enabling also BLAS, LAPACK, MPI, KOKKOS")
add_definitions(-DPRESSIO_ENABLE_TPL_TRILINOS)

if(PRESSIO_ENABLE_EPETRA)
message("Enabling Epetra since PRESSIO_ENABLE_TPL_TRILINOS=${PRESSIO_ENABLE_TPL_TRILINOS} and PRESSIO_ENABLE_EPETRA=${PRESSIO_ENABLE_EPETRA}.")
add_definitions(-DPRESSIO_ENABLE_EPETRA)
endif()

set(PRESSIO_ENABLE_TPL_KOKKOS ON)
set(PRESSIO_ENABLE_TPL_MPI ON)

Expand Down
65 changes: 40 additions & 25 deletions tests/ops/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -29,40 +29,55 @@ if(PRESSIO_ENABLE_TPL_KOKKOS)
endif()

if(PRESSIO_ENABLE_TPL_TRILINOS)
# TPETRA
set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_vector.cc)
add_utest_mpi(ops_vector_tpetra gTestMain_tpetra 3 "${SRC1}")
# TPETRA
set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_vector.cc)
add_utest_mpi(ops_vector_tpetra gTestMain_tpetra 3 "${SRC1}")

set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_multi_vector.cc)
add_utest_mpi(ops_multi_vector_tpetra gTestMain_tpetra 3 "${SRC1}")
set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_multi_vector.cc)
add_utest_mpi(ops_multi_vector_tpetra gTestMain_tpetra 3 "${SRC1}")

set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_level2.cc)
add_utest_mpi(ops_level2_tpetra gTestMain_tpetra 3 "${SRC1}")
set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_level2.cc)
add_utest_mpi(ops_level2_tpetra gTestMain_tpetra 3 "${SRC1}")

set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_level3.cc)
add_utest_mpi(ops_level3_tpetra gTestMain_tpetra 3 "${SRC1}")
set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_level3.cc)
add_utest_mpi(ops_level3_tpetra gTestMain_tpetra 3 "${SRC1}")

set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_column_expr.cc)
add_utest_mpi(ops_column_expr_tpetra gTestMain_tpetra 3 "${SRC1}")
set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_column_expr.cc)
add_utest_mpi(ops_column_expr_tpetra gTestMain_tpetra 3 "${SRC1}")


# TPETRA BLOCK
set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_block_vector.cc)
add_utest_mpi(ops_vector_tpetra_block gTestMain_tpetra 3 "${SRC1}")
# TPETRA BLOCK
set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_block_vector.cc)
add_utest_mpi(ops_vector_tpetra_block gTestMain_tpetra 3 "${SRC1}")

set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_block_column_expr.cc)
add_utest_mpi(ops_column_expr_tpetra_block gTestMain_tpetra 3 "${SRC1}")
set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_block_column_expr.cc)
add_utest_mpi(ops_column_expr_tpetra_block gTestMain_tpetra 3 "${SRC1}")

set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_block_multi_vector.cc)
add_utest_mpi(ops_multi_vector_tpetra_block gTestMain_tpetra 3 "${SRC1}")
set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_block_multi_vector.cc)
add_utest_mpi(ops_multi_vector_tpetra_block gTestMain_tpetra 3 "${SRC1}")

set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_block_level2.cc)
add_utest_mpi(ops_level2_tpetra_block gTestMain_tpetra 3 "${SRC1}")
set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_block_level2.cc)
add_utest_mpi(ops_level2_tpetra_block gTestMain_tpetra 3 "${SRC1}")

set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_block_level3.cc)
add_utest_mpi(ops_level3_tpetra_block gTestMain_tpetra 3 "${SRC1}")
set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_tpetra_block_level3.cc)
add_utest_mpi(ops_level3_tpetra_block gTestMain_tpetra 3 "${SRC1}")

# TEUCHOS
set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_teuchos_vector.cc)
add_serial_utest(ops_vector_teuchos "${SRC1}")
# TEUCHOS
set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_teuchos_vector.cc)
add_serial_utest(ops_vector_teuchos "${SRC1}")

# EPETRA
if(PRESSIO_ENABLE_EPETRA)
set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_epetra_vector.cc)
add_utest_mpi(ops_vector_epetra gTestMain_mpi 3 "${SRC1}")

set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_epetra_multi_vector.cc)
add_utest_mpi(ops_multi_vector_epetra gTestMain_mpi 3 "${SRC1}")

set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_epetra_level2.cc)
add_utest_mpi(ops_level2_epetra gTestMain_mpi 3 "${SRC1}")

set(SRC1 ${CMAKE_CURRENT_SOURCE_DIR}/ops_epetra_level3.cc)
add_utest_mpi(ops_level3_epetra gTestMain_mpi 3 "${SRC1}")
endif()
endif()
123 changes: 123 additions & 0 deletions tests/ops/fixtures/epetra_only_fixtures.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@

#ifndef CONTAINERS_FIXTURES_EPETRA_ONLY_FIXTURES_HPP_
#define CONTAINERS_FIXTURES_EPETRA_ONLY_FIXTURES_HPP_

#include <gtest/gtest.h>
#include "Epetra_MpiComm.h"
#include "Epetra_Vector.h"
#include "Epetra_MultiVector.h"
#include "Epetra_Import.h"

struct epetraVectorGlobSize15Fixture
: public ::testing::Test{

public:
std::shared_ptr<Epetra_MpiComm> comm_;
int rank_;
int numProc_;
const int localSize_ = 5;
int numGlobalEntries_;
std::shared_ptr<Epetra_Map> contigMap_;
std::shared_ptr<Epetra_Vector> myVector_;

virtual void SetUp(){
MPI_Comm_rank(MPI_COMM_WORLD, &rank_);
comm_ = std::make_shared<Epetra_MpiComm>(MPI_COMM_WORLD);
rank_ = comm_->MyPID();
numProc_ = comm_->NumProc();
EXPECT_EQ(numProc_,3);

numGlobalEntries_ = numProc_ * localSize_;
contigMap_ = std::make_shared<Epetra_Map>(numGlobalEntries_, 0, *comm_);
myVector_ = std::make_shared<Epetra_Vector>(*contigMap_);
}

virtual void TearDown(){}
};
//-----------------------------------------------------------


struct epetraMultiVectorGlobSize15Fixture
: public ::testing::Test{

public:
std::shared_ptr<Epetra_MpiComm> comm_;
int rank_;
int numProc_;
const int numVecs_ = 4;
const int localSize_ = 5;
int numGlobalEntries_;
std::shared_ptr<Epetra_Map> contigMap_;
std::shared_ptr<Epetra_Map> map_to_all_;
std::shared_ptr<Epetra_Import> importer_;
std::shared_ptr<Epetra_MultiVector> myMv_;
std::shared_ptr<Epetra_Vector> x_epetra;
std::shared_ptr<Epetra_Vector> y_epetra;

virtual void SetUp(){
MPI_Comm_rank(MPI_COMM_WORLD, &rank_);
comm_ = std::make_shared<Epetra_MpiComm>(MPI_COMM_WORLD);
rank_ = comm_->MyPID();
numProc_ = comm_->NumProc();
EXPECT_EQ(numProc_,3);

numGlobalEntries_ = numProc_ * localSize_;
contigMap_ = std::make_shared<Epetra_Map>(numGlobalEntries_, 0, *comm_);
// Note: this importer sends whole object to all ranks
map_to_all_ = std::make_shared<Epetra_Map>(numGlobalEntries_, numGlobalEntries_, 0, *comm_);
importer_ = std::make_shared<Epetra_Import>(*map_to_all_, *contigMap_);

myMv_ = std::make_shared<Epetra_MultiVector>(*contigMap_, numVecs_);
myMv_->PutScalar(1.);
for (int i = 0; i < localSize_; ++i) {
for (int j = 0; j < numVecs_; ++j) {
// generate rank-unique int values
(*myMv_)[j][i] = (double)((rank_ * localSize_ + i) * numVecs_ + j + 1.);
}
}
x_epetra = std::make_shared<Epetra_Vector>(*contigMap_);
for (int j = 0; j < localSize_; ++j) {
// generate rank-unique int values
(*x_epetra)[j] = (double)(rank_ * localSize_ + j + 1.);
}
y_epetra = std::make_shared<Epetra_Vector>(*contigMap_);
y_epetra->PutScalar(3.);
}

virtual void TearDown(){}
};
//-----------------------------------------------------------



struct epetraMultiVectorR9C4VecS9Fixture
: public ::testing::Test{

public:
int rank_;
std::shared_ptr<Epetra_MpiComm> comm_;
int numProc_;
const int localSize_ = 3;
const int numVectors_ = 4;
int numGlobalEntries_;
std::shared_ptr<Epetra_Map> dataMap_;
std::shared_ptr<Epetra_MultiVector> myMv_;
std::shared_ptr<Epetra_Vector> myVector_;

virtual void SetUp(){
MPI_Comm_rank(MPI_COMM_WORLD, &rank_);
comm_ = std::make_shared<Epetra_MpiComm>(MPI_COMM_WORLD);
rank_ = comm_->MyPID();
numProc_ = comm_->NumProc();
EXPECT_EQ(numProc_,3);

numGlobalEntries_ = numProc_ * localSize_;
dataMap_ = std::make_shared<Epetra_Map>(numGlobalEntries_, 0, *comm_);
myMv_ = std::make_shared<Epetra_MultiVector>(*dataMap_, numVectors_);
myVector_ = std::make_shared<Epetra_Vector>(*dataMap_);
}

virtual void TearDown(){}
};

#endif /* CONTAINERS_FIXTURES_EPETRA_ONLY_FIXTURES_HPP_ */
Loading
Loading