Commit 3337a287 authored by Jakub Klinkovský's avatar Jakub Klinkovský
Browse files

Replaced MPI_Comm with MPI::Comm in distributed data structures

parent 98fcaffd
Loading
Loading
Loading
Loading
+3 −3
Original line number Diff line number Diff line
@@ -34,7 +34,7 @@ struct DistributedScan
      using ValueType = typename OutputDistributedArray::ValueType;
      using DeviceType = typename OutputDistributedArray::DeviceType;

      const auto communicator = input.getCommunicator();
      const auto& communicator = input.getCommunicator();
      if( communicator != MPI_COMM_NULL ) {
         // adjust begin and end for the local range
         const auto localRange = input.getLocalRange();
@@ -49,7 +49,7 @@ struct DistributedScan
         const ValueType local_result = block_results.getElement( block_results.getSize() - 1 );

         // exchange local results between ranks
         const int nproc = MPI::GetSize( communicator );
         const int nproc = communicator.size();
         std::unique_ptr< ValueType[] > dataForScatter{ new ValueType[ nproc ] };
         for( int i = 0; i < nproc; i++ )
            dataForScatter[ i ] = local_result;
@@ -62,7 +62,7 @@ struct DistributedScan
            rank_results, rank_results, 0, nproc, 0, reduction, identity );

         // perform the second phase, using the per-block and per-rank results
         const int rank = MPI::GetRank( communicator );
         const int rank = communicator.rank();
         Scan< DeviceType, Type, PhaseType >::performSecondPhase(
            inputLocalView, outputLocalView, block_results, begin, end, begin, reduction, identity, rank_results[ rank ] );
      }
+3 −3
Original line number Diff line number Diff line
@@ -78,11 +78,11 @@ public:
   DistributedArray( LocalRangeType localRange,
                     Index ghosts,
                     Index globalSize,
                     MPI_Comm communicator,
                     const MPI::Comm& communicator,
                     const AllocatorType& allocator = AllocatorType() );

   void
   setDistribution( LocalRangeType localRange, Index ghosts, Index globalSize, MPI_Comm communicator );
   setDistribution( LocalRangeType localRange, Index ghosts, Index globalSize, const MPI::Comm& communicator );

   const LocalRangeType&
   getLocalRange() const;
@@ -90,7 +90,7 @@ public:
   IndexType
   getGhosts() const;

   MPI_Comm
   const MPI::Comm&
   getCommunicator() const;

   AllocatorType
+3 −3
Original line number Diff line number Diff line
@@ -45,7 +45,7 @@ template< typename Value, typename Device, typename Index, typename Allocator >
DistributedArray< Value, Device, Index, Allocator >::DistributedArray( LocalRangeType localRange,
                                                                       IndexType ghosts,
                                                                       IndexType globalSize,
                                                                       MPI_Comm communicator,
                                                                       const MPI::Comm& communicator,
                                                                       const Allocator& allocator )
: localData( allocator )
{
@@ -57,7 +57,7 @@ void
DistributedArray< Value, Device, Index, Allocator >::setDistribution( LocalRangeType localRange,
                                                                      IndexType ghosts,
                                                                      IndexType globalSize,
                                                                      MPI_Comm communicator )
                                                                      const MPI::Comm& communicator )
{
   TNL_ASSERT_LE( localRange.getEnd(), globalSize, "end of the local range is outside of the global range" );
   if( communicator != MPI_COMM_NULL )
@@ -80,7 +80,7 @@ DistributedArray< Value, Device, Index, Allocator >::getGhosts() const
}

template< typename Value, typename Device, typename Index, typename Allocator >
MPI_Comm
const MPI::Comm&
DistributedArray< Value, Device, Index, Allocator >::getCommunicator() const
{
   return view.getCommunicator();
+5 −5
Original line number Diff line number Diff line
@@ -44,9 +44,9 @@ public:
   DistributedArrayView( const LocalRangeType& localRange,
                         IndexType ghosts,
                         IndexType globalSize,
                         MPI_Comm communicator,
                         MPI::Comm communicator,
                         LocalViewType localData )
   : localRange( localRange ), ghosts( ghosts ), globalSize( globalSize ), communicator( communicator ), localData( localData )
   : localRange( localRange ), ghosts( ghosts ), globalSize( globalSize ), communicator( std::move( communicator ) ), localData( localData )
   {
      TNL_ASSERT_EQ( localData.getSize(),
                     localRange.getSize() + ghosts,
@@ -71,7 +71,7 @@ public:
   bind( const LocalRangeType& localRange,
         IndexType ghosts,
         IndexType globalSize,
         MPI_Comm communicator,
         const MPI::Comm& communicator,
         LocalViewType localData );

   // Note that you can also bind directly to DistributedArray and other types implicitly
@@ -91,7 +91,7 @@ public:
   IndexType
   getGhosts() const;

   MPI_Comm
   const MPI::Comm&
   getCommunicator() const;

   LocalViewType
@@ -256,7 +256,7 @@ protected:
   LocalRangeType localRange;
   IndexType ghosts = 0;
   IndexType globalSize = 0;
   MPI_Comm communicator = MPI_COMM_NULL;
   MPI::Comm communicator = MPI_COMM_NULL;
   LocalViewType localData;

   std::shared_ptr< SynchronizerType > synchronizer = nullptr;
+2 −2
Original line number Diff line number Diff line
@@ -38,7 +38,7 @@ void
DistributedArrayView< Value, Device, Index >::bind( const LocalRangeType& localRange,
                                                    IndexType ghosts,
                                                    IndexType globalSize,
                                                    MPI_Comm communicator,
                                                    const MPI::Comm& communicator,
                                                    LocalViewType localData )
{
   TNL_ASSERT_EQ( localData.getSize(),
@@ -93,7 +93,7 @@ DistributedArrayView< Value, Device, Index >::getGhosts() const
}

template< typename Value, typename Device, typename Index >
MPI_Comm
const MPI::Comm&
DistributedArrayView< Value, Device, Index >::getCommunicator() const
{
   return communicator;
Loading