Commit 96ab7c2d authored by Vít Hanousek's avatar Vít Hanousek
Browse files

Merge remote-tracking branch 'geraldine/mpi-explosive' into mpi-explosive

Conflicts:
	src/TNL/Meshes/DistributedMeshes/DistributedGrid_1D.h
	src/TNL/Meshes/DistributedMeshes/DistributedGrid_2D.h
	src/TNL/Meshes/DistributedMeshes/DistributedGrid_3D.h
parents 66107cdf 8a1d5730
Loading
Loading
Loading
Loading
+1 −1
Original line number Diff line number Diff line
ADD_SUBDIRECTORY( Python )
#ADD_SUBDIRECTORY( Python )
ADD_SUBDIRECTORY( TNL )
ADD_SUBDIRECTORY( Tools )
ADD_SUBDIRECTORY( UnitTests )
+3 −1
Original line number Diff line number Diff line
SET( headers MpiCommunicator.h
             NoDistrCommunicator.h )
             MpiDefs.h             
             NoDistrCommunicator.h 
    )

INSTALL( FILES ${headers} DESTINATION ${TNL_TARGET_INCLUDE_DIRECTORY}/Communicators )
+50 −43
Original line number Diff line number Diff line
@@ -19,7 +19,9 @@

#include <TNL/String.h>
#include <TNL/Logger.h>
#include <TNL/Communicators/MpiDefs.h>
#include <TNL/Config/ConfigDescription.h>
#include <TNL/Exceptions/MPISupportMissing.h>

namespace TNL {
namespace Communicators {
@@ -138,8 +140,7 @@ class MpiCommunicator
        TNL_ASSERT_TRUE(IsInitialized(), "Fatal Error - MPI communicator is not inicialized");
        return MPI::COMM_WORLD.Get_rank();
#else
        TNL_ASSERT_TRUE(false, "Fatal Error - MPI in not compiled");
        return 0;
        throw Exceptions::MPISupportMissing();
#endif
      };

@@ -149,8 +150,7 @@ class MpiCommunicator
        TNL_ASSERT_TRUE(IsInitialized(), "Fatal Error - MPI communicator is not inicialized");
        return MPI::COMM_WORLD.Get_size();
#else
        TNL_ASSERT_TRUE(false, "Fatal Error - MPI in not compiled");
        return 0;
        throw Exceptions::MPISupportMissing();
#endif
      };

@@ -185,7 +185,7 @@ class MpiCommunicator
            TNL_ASSERT_TRUE(IsInitialized(), "Fatal Error - MPI communicator is not inicialized");
            MPI::COMM_WORLD.Barrier();;
#else
        TNL_ASSERT_TRUE(false, "Fatal Error - MPI in not compiled");
            throw Exceptions::MPISupportMissing();
#endif     
        };

@@ -196,8 +196,7 @@ class MpiCommunicator
            TNL_ASSERT_TRUE(IsInitialized(), "Fatal Error - MPI communicator is not inicialized");
            return MPI::COMM_WORLD.Isend((void*) data, count, MPIDataType(data) , dest, 0);
#else
        TNL_ASSERT_TRUE(false, "Fatal Error - MPI in not compiled");
        return 0;
            throw Exceptions::MPISupportMissing();
#endif  
        }    

@@ -208,8 +207,7 @@ class MpiCommunicator
            TNL_ASSERT_TRUE(IsInitialized(), "Fatal Error - MPI communicator is not inicialized");
            return MPI::COMM_WORLD.Irecv((void*) data, count, MPIDataType(data) , src, 0);
#else
        TNL_ASSERT_TRUE(false, "Fatal Error - MPI in not compiled");
        return 0;
            throw Exceptions::MPISupportMissing();
#endif  
        }

@@ -219,9 +217,8 @@ class MpiCommunicator
            TNL_ASSERT_TRUE(IsInitialized(), "Fatal Error - MPI communicator is not inicialized");
            MPI::Request::Waitall(length, reqs);
#else
        TNL_ASSERT_TRUE(false, "Fatal Error - MPI in not compiled");
            throw Exceptions::MPISupportMissing();
#endif

        };

        template< typename T > 
@@ -231,28 +228,38 @@ class MpiCommunicator
        TNL_ASSERT_TRUE(IsInitialized(), "Fatal Error - MPI communicator is not inicialized");
        MPI::COMM_WORLD.Bcast((void*) &data, count,  MPIDataType(data), root);
#else
        TNL_ASSERT_TRUE(false, "Fatal Error - MPI in not compiled");
        throw Exceptions::MPISupportMissing();
#endif  
        }

      /*  template< typename T >
        static void Allreduce( T& data,
                     T& reduced_data,
        template< typename T >
        static void Allreduce( T* data,
                               T* reduced_data,
                               int count,
                               const MPI_Op &op )
        {
                MPI::COMM_WORLD.Allreduce((void*) &data, (void*) &reduced_data,count,MPIDataType(data),op);
#ifdef HAVE_MPI
            MPI::COMM_WORLD.Allreduce( (void*) data, (void*) reduced_data,count,MPIDataType(data),op);
#else
            throw Exceptions::MPISupportMissing();
#endif            
        };


         template< typename T >
        static void Reduce( T& data,
                    T& reduced_data,
         static void Reduce( T* data,
                    T* reduced_data,
                    int count,
                    MPI_Op &op,
                    int root)
         {
             MPI::COMM_WORLD.Reduce((void*) &data, (void*) &reduced_data,count,MPIDataType(data),op,root);
        };*/
#ifdef HAVE_MPI
            MPI::COMM_WORLD.Reduce( (void*) data, (void*) reduced_data,count,MPIDataType(data),op,root);
#else
            throw Exceptions::MPISupportMissing();
#endif
        };


      static void writeProlog( Logger& logger ) 
      {
+15 −0
Original line number Diff line number Diff line
/***************************************************************************
                          MpiCommunicator.h  -  description
                             -------------------
    begin                : 2005/04/23
    copyright            : (C) 2005 by Tomas Oberhuber
    email                : tomas.oberhuber@fjfi.cvut.cz
 ***************************************************************************/

/* See Copyright Notice in tnl/Copyright */

#pragma once

#ifndef HAVE_MPI
enum MPI_Op { MPI_SUM, MPI_MAX };
#endif
 No newline at end of file
+18 −13
Original line number Diff line number Diff line
@@ -11,6 +11,11 @@
#pragma once

#include <TNL/Logger.h>
#include <TNL/Communicators/MpiDefs.h>

#ifdef HAVE_MPI
#include <mpi.h>
#endif

namespace TNL {
namespace Communicators {
@@ -96,24 +101,24 @@ class NoDistrCommunicator
      {
      }

     /* template< typename T >
      static void Allreduce( T& data,
                   T& reduced_data,
      template< typename T >
      static void Allreduce( T* data,
                             T* reduced_data,
                             int count,
                             const MPI_Op &op )
      {
              MPI::COMM_WORLD.Allreduce((void*) &data, (void*) &reduced_data,count,MPIDataType(data),op);
         memcpy( ( void* ) reduced_data, ( void* ) data, count * sizeof( T ) );
      };

      template< typename T >
      static void Reduce( T& data,
                  T& reduced_data,
      static void Reduce( T* data,
                          T* reduced_data,
                          int count,
                          MPI_Op &op,
                          int root )
      {
           MPI::COMM_WORLD.Reduce((void*) &data, (void*) &reduced_data,count,MPIDataType(data),op,root);
      };*/
         memcpy( ( void* ) reduced_data, ( void* ) data, count * sizeof( T ) );
      };

      static void writeProlog( Logger& logger ){};
};
Loading