43 #include "Teuchos_Assert.hpp" 52 #ifdef HAVE_TEUCHOSCORE_KOKKOSCORE 53 # include "Kokkos_Core.hpp" 60 bool GlobalMPISession::haveMPIState_ =
false;
61 bool GlobalMPISession::mpiIsFinalized_ =
false;
62 int GlobalMPISession::rank_ = 0 ;
63 int GlobalMPISession::nProc_ = 1 ;
68 std::ostringstream oss;
78 int mpiHasBeenStarted = 0;
79 MPI_Initialized(&mpiHasBeenStarted);
80 if (mpiHasBeenStarted) {
82 *out <<
"GlobalMPISession(): Error, MPI_Intialized() return true," 83 <<
" calling std::terminate()!\n" 90 mpierr = ::MPI_Init(argc, (
char ***) argv);
93 *out <<
"GlobalMPISession(): Error, MPI_Init() returned error code=" 94 << mpierr <<
"!=0, calling std::terminate()!\n" 103 char procName[MPI_MAX_PROCESSOR_NAME];
104 mpierr = ::MPI_Get_processor_name(procName, &nameLen);
107 *out <<
"GlobalMPISession(): Error, MPI_Get_processor_name() error code=" 108 << mpierr <<
"!=0, calling std::terminate()!\n" 114 oss <<
"Teuchos::GlobalMPISession::GlobalMPISession(): started processor with name " 115 << procName <<
" and rank " << rank_ <<
"!" << std::endl;
119 oss <<
"Teuchos::GlobalMPISession::GlobalMPISession(): started serial run" 124 #ifndef TEUCHOS_SUPPRESS_PROC_STARTUP_BANNER 127 bool printStartupBanner =
true;
128 const std::string suppress_option(
"--teuchos-suppress-startup-banner");
129 for (
int opt_i = 0; opt_i < *argc; ++opt_i ) {
130 if ( suppress_option == (*argv)[opt_i] ) {
132 printStartupBanner =
false;
135 for(
int i = opt_i; i < *argc; ++i )
136 (*argv)[i] = (*argv)[i+1];
140 if (out && printStartupBanner) {
141 *out << oss.str() << std::flush;
152 #ifdef HAVE_TEUCHOSCORE_KOKKOSCORE 154 Kokkos::finalize_all();
156 catch (
const std::runtime_error& e) {
157 std::cerr <<
"Kokkos::finalize_all failed:\n" 162 haveMPIState_ =
false;
164 const int mpierr = ::MPI_Finalize();
165 mpiIsFinalized_ = (mpierr == 0);
167 std::cerr <<
"Error code " << mpierr <<
" returned from MPI_Finalize()\n";
169 mpiIsFinalized_ =
true;
175 justInTimeInitialize();
176 return haveMPIState_;
182 return mpiIsFinalized_;
188 justInTimeInitialize();
194 justInTimeInitialize();
201 justInTimeInitialize();
203 MPI_Barrier(MPI_COMM_WORLD);
210 justInTimeInitialize();
213 MPI_Allreduce(&localVal, &globalSum, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
223 justInTimeInitialize();
226 MPI_Allgather( &localVal, 1, MPI_INT, allVals.
getRawPtr(), 1, MPI_INT,
229 allVals[0] = localVal;
237 void GlobalMPISession::initialize( std::ostream *out )
241 if(mpiIsFinalized_) {
256 int mpiHasBeenStarted = 0;
257 MPI_Initialized(&mpiHasBeenStarted);
259 if(!mpiHasBeenStarted)
267 mpierr = ::MPI_Comm_rank( MPI_COMM_WORLD, &rank_ );
269 *out <<
"Error code=" << mpierr <<
" detected in MPI_Comm_rank()" 273 mpierr = ::MPI_Comm_size( MPI_COMM_WORLD, &nProc_ );
275 *out <<
"Error code=" << mpierr <<
" detected in MPI_Comm_size()" 279 haveMPIState_ =
true;
280 mpiIsFinalized_ =
false;
287 void GlobalMPISession::justInTimeInitialize()
290 initialize(&std::cerr);
static int getRank()
The rank of the calling process in MPI_COMM_WORLD.
static int getNProc()
The number of processes in MPI_COMM_WORLD.
size_type size() const
The total number of items in the managed array.
static bool mpiIsFinalized()
Return whether MPI was already finalized.
GlobalMPISession(int *argc, char ***argv, std::ostream *out=&std::cout)
Calls MPI_Init() if MPI is enabled.
T * getRawPtr() const
Return a raw pointer to beginning of array or NULL if unsized.
static int sum(int localVal)
Sum a set of integers across processes.
The Teuchos namespace contains all of the classes, structs and enums used by Teuchos, as well as a number of utility routines.
A MPI utilities class, providing methods for initializing, finalizing, and querying the global MPI se...
static void barrier()
Call MPI_Barrier() on MPI_COMM_WORLD.
static bool mpiIsInitialized()
Return whether MPI was initialized.
static void allGather(int localVal, const ArrayView< int > &allVals)
Global all-to-all of a set of integers across processes.
#define TEUCHOS_ASSERT_EQUALITY(val1, val2)
This macro is checks that to numbers are equal and if not then throws an exception with a good error ...
~GlobalMPISession()
Call MPI_Finalize() if MPI is enabled.