diff --git a/src/TNL/Communicators/CMakeLists.txt b/src/TNL/Communicators/CMakeLists.txt
index 87feba13eb52023894808f6b75fca3e05eed2708..fdf69b44d3c53057f96eed343dffef5dd1992203 100644
--- a/src/TNL/Communicators/CMakeLists.txt
+++ b/src/TNL/Communicators/CMakeLists.txt
@@ -1,5 +1,6 @@
 SET( headers MpiCommunicator.h
              MpiDefs.h
+             MPIPrint.h
              MPITypeResolver.h
              NoDistrCommunicator.h
              ScopedInitializer.h
diff --git a/src/TNL/Communicators/MpiCommunicator.h b/src/TNL/Communicators/MpiCommunicator.h
index 7d66693a9984b43207acc01f68598b6e4f76367c..0a711a58ac7f88db41af565aefae2929cea8d7a5 100644
--- a/src/TNL/Communicators/MpiCommunicator.h
+++ b/src/TNL/Communicators/MpiCommunicator.h
@@ -541,51 +541,3 @@ bool MpiCommunicator::redirect = true;
 } // namespace Communicators
 } // namespace TNL
 
-#ifdef HAVE_MPI
-#define TNL_MPI_PRINT( message )                                                                                                 \
-if( ! TNL::Communicators::MpiCommunicator::IsInitialized() )                                                                     \
-   std::cerr << message << std::endl;                                                                                            \
-else                                                                                                                             \
-   for( int __tnl_mpi_print_j = 0;                                                                                               \
-        __tnl_mpi_print_j < TNL::Communicators::MpiCommunicator::GetSize( TNL::Communicators::MpiCommunicator::AllGroup );       \
-        __tnl_mpi_print_j++ )                                                                                                    \
-   {                                                                                                                             \
-      if( __tnl_mpi_print_j == TNL::Communicators::MpiCommunicator::GetRank( TNL::Communicators::MpiCommunicator::AllGroup ) )   \
-      {                                                                                                                          \
-         std::cerr << "Node " << __tnl_mpi_print_j << " of "                                                                     \
-                   << TNL::Communicators::MpiCommunicator::GetSize( TNL::Communicators::MpiCommunicator::AllGroup )              \
-                   << " : " << message << std::endl << std::flush;                                                                    \
-      }                                                                                                                          \
-      TNL::Communicators::MpiCommunicator::Barrier( TNL::Communicators::MpiCommunicator::AllGroup );                             \
-   }
-#else
-#define TNL_MPI_PRINT( message )                                                                                                 \
-   std::cerr << message << std::endl;
-#endif
-
-#ifdef HAVE_MPI
-#define TNL_MPI_PRINT_COND( condition, message )                                                                                 \
-if( ! TNL::Communicators::MpiCommunicator::IsInitialized() )                                                                     \
-{                                                                                                                                \
-   if( condition ) std::cerr << message << std::endl;                                                                            \
-}                                                                                                                                \
-else                                                                                                                             \
-{                                                                                                                                \
-   for( int __tnl_mpi_print_j = 0;                                                                                               \
-        __tnl_mpi_print_j < TNL::Communicators::MpiCommunicator::GetSize( TNL::Communicators::MpiCommunicator::AllGroup );       \
-        __tnl_mpi_print_j++ )                                                                                                    \
-   {                                                                                                                             \
-      if( __tnl_mpi_print_j == TNL::Communicators::MpiCommunicator::GetRank( TNL::Communicators::MpiCommunicator::AllGroup ) )   \
-      {                                                                                                                          \
-         if( condition )                                                                                                         \
-            std::cerr << "Node " << __tnl_mpi_print_j << " of "                                                                  \
-                      << TNL::Communicators::MpiCommunicator::GetSize( TNL::Communicators::MpiCommunicator::AllGroup )           \
-                      << " : " << message << std::endl << std::flush;                                                                          \
-      }                                                                                                                          \
-      TNL::Communicators::MpiCommunicator::Barrier( TNL::Communicators::MpiCommunicator::AllGroup );                             \
-   }                                                                                                                             \
-}
-#else
-#define TNL_MPI_PRINT_COND( condition, message )                                                                                 \
-   if( condition ) std::cerr << message << std::endl;
-#endif
diff --git a/src/TNL/Meshes/DistributedMeshes/DistributedGridSynchronizer.h b/src/TNL/Meshes/DistributedMeshes/DistributedGridSynchronizer.h
index b68136e07926ebb6f8bdf34855a8a3db630b6f1c..15d2eae06dcf39415cf0703a7f5196f9f925eb8a 100644
--- a/src/TNL/Meshes/DistributedMeshes/DistributedGridSynchronizer.h
+++ b/src/TNL/Meshes/DistributedMeshes/DistributedGridSynchronizer.h
@@ -14,6 +14,7 @@
 #include <TNL/Containers/Array.h>
 #include <TNL/Meshes/DistributedMeshes/BufferEntitiesHelper.h>
 #include <TNL/Meshes/DistributedMeshes/Directions.h>
+#include <TNL/Communicators/MPIPrint.h>
 
 namespace TNL {
 namespace Functions{
@@ -123,10 +124,9 @@ class DistributedMeshSynchronizer< Functions::MeshFunction< Grid< MeshDimension,
                   recieveBegin[i]=tmp;
                }
             }
-
          }
      }
-        
+
       template< typename CommunicatorType,
                 typename MeshFunctionType,
                 typename PeriodicBoundariesMaskPointer = Pointers::SharedPointer< MeshFunctionType > >
diff --git a/src/TNL/String.h b/src/TNL/String.h
index 25f05065f9e7e75ae455c2cbaf0c8c308efa7152..3da2ffbf4b74d63665140ad980d00640d921855d 100644
--- a/src/TNL/String.h
+++ b/src/TNL/String.h
@@ -15,6 +15,10 @@
 #include <vector>
 #include <string>
 
+#ifdef HAVE_MPI
+#include <mpi.h>
+#endif
+
 namespace TNL {
 
 class String;
@@ -210,8 +214,21 @@ public:
    /// @param separator Character, which separates substrings in given string.
    std::vector< String > split( const char separator = ' ', bool skipEmpty = false ) const;
 
+#ifdef HAVE_MPI
+
+   /****
+    * \brief Sends the string to the target MPI process.
+    */
+   void send( int target, int tag = 0, MPI_Comm mpi_comm = MPI_COMM_WORLD );
+
+   /****
+    * \brief Receives a string from the source MPI process.
+    */
+   void receive( int source, int tag = 0, MPI_Comm mpi_comm = MPI_COMM_WORLD );
+
    //! Broadcast to other nodes in MPI cluster
-//   void MPIBcast( int root, MPI_Comm mpi_comm = MPI_COMM_WORLD );
+   // void MPIBcast( int root, MPI_Comm mpi_comm = MPI_COMM_WORLD );
+#endif
 };
 
 /// \brief Returns concatenation of \e string1 and \e string2.
diff --git a/src/TNL/String_impl.h b/src/TNL/String_impl.h
index 17ba9359d5aa907d2ba0f472cdb077147ee7729c..3c5aa253a083a43c6d7a2936dea09c3474406797 100644
--- a/src/TNL/String_impl.h
+++ b/src/TNL/String_impl.h
@@ -13,9 +13,9 @@
 #include <TNL/String.h>
 #include <TNL/Assert.h>
 #include <TNL/Math.h>
-//#ifdef USE_MPI
-//   #include <mpi.h>
-//#endif
+#ifdef HAVE_MPI
+   #include <mpi.h>
+#endif
 
 namespace TNL {
 
@@ -233,18 +233,32 @@ String::split( const char separator, bool skipEmpty ) const
    return parts;
 }
 
+#ifdef HAVE_MPI
+inline void String::send( int target, int tag, MPI_Comm mpi_comm )
+{
+   int size = this->getSize();
+   MPI_Send( &size, 1, MPI_INT, target, tag, mpi_comm );
+   MPI_Send( this->getString(), this->length(), MPI_CHAR, target, tag, mpi_comm );
+}
+
+inline void String::receive( int source, int tag, MPI_Comm mpi_comm )
+{
+   int size;
+   MPI_Status status;
+   MPI_Recv( &size, 1, MPI_INT, source, tag, mpi_comm, &status );
+   this->setSize( size );
+   MPI_Recv( const_cast< void* >( ( const void* ) this->data() ), size, MPI_CHAR, source, tag, mpi_comm,  &status );
+}
+
 /*
 inline void String :: MPIBcast( int root, MPI_Comm comm )
 {
 #ifdef USE_MPI
-   dbgFunctionName( "mString", "MPIBcast" );
    int iproc;
    MPI_Comm_rank( MPI_COMM_WORLD, &iproc );
    TNL_ASSERT( string, );
    int len = strlen( string );
    MPI_Bcast( &len, 1, MPI_INT, root, comm );
-   dbgExpr( iproc );
-   dbgExpr( len );
    if( iproc != root )
    {
       if( length < len )
@@ -256,11 +270,10 @@ inline void String :: MPIBcast( int root, MPI_Comm comm )
    }
  
    MPI_Bcast( string, len + 1, MPI_CHAR, root, comm );
-   dbgExpr( iproc );
-   dbgExpr( string );
 #endif
 }
 */
+#endif
 
 inline String operator+( char string1, const String& string2 )
 {