Commit 77a0b527 authored by Jakub Klinkovský's avatar Jakub Klinkovský
Browse files

Added readDistributedMeshFunction and writeDistributedMeshFunction to MeshFunctionIO.h

parent 64f841c0
Loading
Loading
Loading
Loading
+17 −22
Original line number Diff line number Diff line
@@ -13,7 +13,6 @@

#pragma once
#include <TNL/FileName.h>
#include <TNL/Exceptions/NotImplementedError.h>

#include "tnlDirectEikonalProblem.h"

@@ -128,11 +127,11 @@ setInitialCondition( const Config::ParameterContainer& parameters,
   if( CommunicatorType::isDistributed() )
   {
      std::cout<<"Nodes Distribution: " << this->distributedMeshPointer->printProcessDistr() << std::endl;
    throw Exceptions::NotImplementedError( "PVTI reader is not implemented yet." );
//    if(distributedIOType==Meshes::DistributedMeshes::MpiIO)
//      Meshes::DistributedMeshes::DistributedGridIO<MeshFunctionType,Meshes::DistributedMeshes::MpiIO> ::load(inputFile, *initialData );
//    if(distributedIOType==Meshes::DistributedMeshes::LocalCopy)
//      Meshes::DistributedMeshes::DistributedGridIO<MeshFunctionType,Meshes::DistributedMeshes::LocalCopy> ::load(inputFile, *initialData );
      if( ! Functions::readDistributedMeshFunction( *this->distributedMeshPointer, *this->initialData, "u", inputFile ) )
      {
         std::cerr << "I am not able to load the initial condition from the file " << inputFile << "." << std::endl;
         return false;
      }
      synchronizer.setDistributedGrid( &this->distributedMeshPointer.getData() );
      synchronizer.synchronize( *initialData );
   }
@@ -166,11 +165,7 @@ makeSnapshot( )
   if(CommunicatorType::isDistributed())
   {
      fileName.setExtension( "pvti" );
      throw Exceptions::NotImplementedError( "PVTI writer is not implemented yet." );
//      if(distributedIOType==Meshes::DistributedMeshes::MpiIO)
//        Meshes::DistributedMeshes::DistributedGridIO<MeshFunctionType,Meshes::DistributedMeshes::MpiIO> ::save(fileName.getFileName(), *u );
//      if(distributedIOType==Meshes::DistributedMeshes::LocalCopy)
//        Meshes::DistributedMeshes::DistributedGridIO<MeshFunctionType,Meshes::DistributedMeshes::LocalCopy> ::save(fileName.getFileName(), *u );
      Functions::writeDistributedMeshFunction( *this->distributedMeshPointer, *this->initialData, "u", fileName.getFileName() );
   }
   else {
      fileName.setExtension( "vti" );
+125 −0
Original line number Diff line number Diff line
@@ -18,6 +18,7 @@
#include <TNL/Meshes/Writers/VTKWriter.h>
#include <TNL/Meshes/Writers/VTUWriter.h>
#include <TNL/Meshes/Writers/VTIWriter.h>
#include <TNL/Meshes/Writers/PVTIWriter.h>
#include <TNL/Functions/MeshFunctionGnuplotWriter.h>

namespace TNL {
@@ -64,6 +65,60 @@ readMeshFunction( MeshFunction& function,
   return true;
}

template< typename MeshFunction >
bool
readDistributedMeshFunction( Meshes::DistributedMeshes::DistributedMesh< typename MeshFunction::MeshType >& distributedMesh,
                             MeshFunction& function,
                             const std::string& functionName,
                             const std::string& fileName,
                             const std::string& fileFormat = "auto" )
{
   std::shared_ptr< Meshes::Readers::MeshReader > reader = Meshes::Readers::getMeshReader( fileName, fileFormat );
   if( reader == nullptr )
      return false;

   reader->detectMesh();

   // load the mesh if it was not loaded yet
   using MeshType = typename MeshFunction::MeshType;
   using DistributedMeshType = Meshes::DistributedMeshes::DistributedMesh< MeshType >;
   if( distributedMesh == DistributedMeshType {} ) {
      if( reader->getMeshType() == "Meshes::DistributedMesh" )
         dynamic_cast<Meshes::Readers::PVTUReader&>(*reader).loadMesh( distributedMesh );
      else if( reader->getMeshType() == "Meshes::DistributedGrid" )
         dynamic_cast<Meshes::Readers::PVTIReader&>(*reader).loadMesh( distributedMesh );
      else
         throw std::runtime_error( "Unknown type of a distributed mesh: " + reader->getMeshType() );
   }
   if( function.getMesh() != distributedMesh.getLocalMesh() )
      // FIXME: DistributedMesh does not have a SharedPointer of the local mesh,
      // the interface is fucked up (it should not require us to put SharedPointer everywhere)
      *function.getMeshPointer() = distributedMesh.getLocalMesh();

   Meshes::Readers::MeshReader::VariantVector data;
   if( function.getEntitiesDimension() == 0 )
      data = reader->readPointData( functionName );
   else if( function.getEntitiesDimension() == function.getMeshDimension() )
      data = reader->readCellData( functionName );
   else {
      std::cerr << "The mesh function with entities dimension " << function.getEntitiesDimension() << " cannot be read from the file " << fileName << std::endl;
      return false;
   }

   visit( [&](auto&& array) {
            const auto entitiesCount = function.getMesh().template getEntitiesCount< MeshFunction::getEntitiesDimension() >();
            if( array.size() == (std::size_t) entitiesCount )
               Algorithms::MultiDeviceMemoryOperations< typename MeshFunction::VectorType::DeviceType, Devices::Host >
                  ::copy( function.getData().getData(), array.data(), array.size() );
            else
               throw Exceptions::FileDeserializationError( fileName, "mesh function data size does not match the mesh size (expected " + std::to_string(entitiesCount) + ", got " + std::to_string(array.size()) + ")." );
         },
         data
      );

   return true;
}

// specialization for grids
template< typename MeshFunction >
std::enable_if_t< Meshes::isGrid< typename MeshFunction::MeshType >::value, bool >
@@ -156,5 +211,75 @@ writeMeshFunction( const MeshFunction& function,
   return true;
}

// specialization for grids
template< typename MeshFunction >
std::enable_if_t< Meshes::isGrid< typename MeshFunction::MeshType >::value, bool >
writeDistributedMeshFunction( const Meshes::DistributedMeshes::DistributedMesh< typename MeshFunction::MeshType >& distributedMesh,
                              const MeshFunction& function,
                              const std::string& functionName,
                              const std::string& fileName,
                              const std::string& fileFormat = "auto" )
{
   namespace fs = std::experimental::filesystem;
   std::string format = fileFormat;
   if( format == "auto" ) {
      format = fs::path(fileName).extension();
      if( format.length() > 0 )
         // remove dot from the extension
         format = format.substr(1);
   }

   if( format == "pvti" ) {
      const MPI_Comm group = distributedMesh.getCommunicationGroup();
      std::ofstream file;
      if( TNL::MPI::GetRank( group ) == 0 )
         file.open( fileName );

      using PVTI = Meshes::Writers::PVTIWriter< typename MeshFunction::MeshType >;
      PVTI pvti( file );
      // TODO: write metadata: step and time
      pvti.writeImageData( distributedMesh );
      // TODO
      //if( distributedMesh.getGhostLevels() > 0 ) {
      //   pvti.template writePPointData< std::uint8_t >( Meshes::VTK::ghostArrayName() );
      //   pvti.template writePCellData< std::uint8_t >( Meshes::VTK::ghostArrayName() );
      //}
      if( function.getEntitiesDimension() == 0 )
         pvti.template writePPointData< typename MeshFunction::RealType >( functionName );
      else
         pvti.template writePCellData< typename MeshFunction::RealType >( functionName );
      const std::string subfilePath = pvti.addPiece( fileName, distributedMesh );

      // create a .vti file for local data
      // TODO: write metadata: step and time
      using Writer = Meshes::Writers::VTIWriter< typename MeshFunction::MeshType >;
      std::ofstream subfile( subfilePath );
      Writer writer( subfile );
      // NOTE: passing the local mesh to writeImageData does not work correctly, just like meshFunction->write(...)
      //       (it does not write the correct extent of the subdomain - globalBegin is only in the distributed grid)
      // NOTE: globalBegin and globalEnd here are without overlaps
      writer.writeImageData( distributedMesh.getGlobalGrid().getOrigin(),
                             distributedMesh.getGlobalBegin() - distributedMesh.getLowerOverlap(),
                             distributedMesh.getGlobalBegin() + distributedMesh.getLocalSize() + distributedMesh.getUpperOverlap(),
                             distributedMesh.getGlobalGrid().getSpaceSteps() );
      if( function.getEntitiesDimension() == 0 )
         writer.writePointData( function.getData(), functionName );
      else
         writer.writeCellData( function.getData(), functionName );
      // TODO
      //if( mesh.getGhostLevels() > 0 ) {
      //   writer.writePointData( mesh.vtkPointGhostTypes(), Meshes::VTK::ghostArrayName() );
      //   writer.writeCellData( mesh.vtkCellGhostTypes(), Meshes::VTK::ghostArrayName() );
      //}
   }
   else {
      std::cerr << "Unknown output format: " << format << std::endl;
      return false;
   }
   return true;
}

// TODO: specialization of writeDistributedMeshFunction for unstructured mesh

} // namespace Functions
} // namespace TNL
+6 −11
Original line number Diff line number Diff line
@@ -20,7 +20,6 @@
#include <TNL/Matrices/MatrixSetter.h>
#include <TNL/Logger.h>
#include <TNL/Solvers/PDE/BoundaryConditionsSetter.h>
#include <TNL/Exceptions/NotImplementedError.h>

#include "HeatEquationProblem.h"

@@ -142,11 +141,11 @@ setInitialCondition( const Config::ParameterContainer& parameters,
   if(CommunicatorType::isDistributed())
   {
      std::cout<<"Nodes Distribution: " << this->distributedMeshPointer->printProcessDistr() << std::endl;
      throw Exceptions::NotImplementedError( "PVTI reader is not implemented yet." );
//      if(distributedIOType==Meshes::DistributedMeshes::MpiIO)
//         Meshes::DistributedMeshes::DistributedGridIO<MeshFunctionType,Meshes::DistributedMeshes::MpiIO> ::load(initialConditionFile, *uPointer );
//      if(distributedIOType==Meshes::DistributedMeshes::LocalCopy)
//         Meshes::DistributedMeshes::DistributedGridIO<MeshFunctionType,Meshes::DistributedMeshes::LocalCopy> ::load(initialConditionFile, *uPointer );
      if( ! Functions::readDistributedMeshFunction( *this->distributedMeshPointer, *this->uPointer, "u", initialConditionFile ) )
      {
         std::cerr << "I am not able to load the initial condition from the file " << initialConditionFile << "." << std::endl;
         return false;
      }
      synchronizer.setDistributedGrid( &this->distributedMeshPointer.getData() );
      synchronizer.synchronize( *uPointer );
   }
@@ -208,11 +207,7 @@ makeSnapshot( const RealType& time,
   if(CommunicatorType::isDistributed())
   {
      fileName.setExtension( "pvti" );
      throw Exceptions::NotImplementedError( "PVTI writer is not implemented yet." );
//      if(distributedIOType==Meshes::DistributedMeshes::MpiIO)
//        Meshes::DistributedMeshes::DistributedGridIO<MeshFunctionType,Meshes::DistributedMeshes::MpiIO> ::save(fileName.getFileName(), *uPointer );
//      if(distributedIOType==Meshes::DistributedMeshes::LocalCopy)
//        Meshes::DistributedMeshes::DistributedGridIO<MeshFunctionType,Meshes::DistributedMeshes::LocalCopy> ::save(fileName.getFileName(), *uPointer );
      Functions::writeDistributedMeshFunction( *this->distributedMeshPointer, *this->uPointer, "u", fileName.getFileName() );
   }
   else
   {
+1 −38
Original line number Diff line number Diff line
@@ -109,45 +109,8 @@ bool renderFunction( const Config::ParameterContainer& parameters )

      if( TNL::MPI::GetSize() > 1 )
      {
         std::ofstream file;
         if( TNL::MPI::GetRank() == 0 )
            file.open( outputFile );
         using PVTI = Meshes::Writers::PVTIWriter< typename DistributedGridType::GridType >;
         PVTI pvti( file );
         // TODO: write metadata: step and time
         pvti.writeImageData( distributedMesh );
         // TODO
         //if( distributedMesh.getGhostLevels() > 0 ) {
         //   pvti.template writePPointData< std::uint8_t >( Meshes::VTK::ghostArrayName() );
         //   pvti.template writePCellData< std::uint8_t >( Meshes::VTK::ghostArrayName() );
         //}
         if( meshFunction->getEntitiesDimension() == 0 )
            pvti.template writePPointData< typename MeshFunctionType::RealType >( meshFunctionName );
         else
            pvti.template writePCellData< typename MeshFunctionType::RealType >( meshFunctionName );
         const std::string subfilePath = pvti.addPiece( outputFile, distributedMesh );

         // create a .vti file for local data
         // TODO: write metadata: step and time
         using Writer = Meshes::Writers::VTIWriter< typename DistributedGridType::GridType >;
         std::ofstream subfile( subfilePath );
         Writer writer( subfile );
         // NOTE: passing the local mesh to writeImageData does not work correctly, just like meshFunction->write(...)
         //       (it does not write the correct extent of the subdomain - globalBegin is only in the distributed grid)
         // NOTE: globalBegin and globalEnd here are without overlaps
         writer.writeImageData( distributedMesh.getGlobalGrid().getOrigin(),
                                distributedMesh.getGlobalBegin(),
                                distributedMesh.getGlobalBegin() + distributedMesh.getLocalSize(),
                                distributedMesh.getGlobalGrid().getSpaceSteps() );
         if( meshFunction->getEntitiesDimension() == 0 )
            writer.writePointData( meshFunction->getData(), meshFunctionName );
         else
            writer.writeCellData( meshFunction->getData(), meshFunctionName );
         // TODO
         //if( mesh.getGhostLevels() > 0 ) {
         //   writer.writePointData( mesh.vtkPointGhostTypes(), Meshes::VTK::ghostArrayName() );
         //   writer.writeCellData( mesh.vtkCellGhostTypes(), Meshes::VTK::ghostArrayName() );
         //}
         Functions::writeDistributedMeshFunction( distributedMesh, *meshFunction, meshFunctionName, outputFile );
      }
      else
      {
+35 −0
Original line number Diff line number Diff line
@@ -467,6 +467,41 @@ TEST_F(DistributedGridTest_1D, PVTIWriterReader)
      EXPECT_EQ( fs::remove( baseName ), true );
   }
}

TEST_F(DistributedGridTest_1D, readDistributedMeshFunction)
{
   const std::string baseName = "DistributedGridTest_MeshFunction_1D_" + std::to_string(nproc) + "proc.pvti";
   const std::string mainFilePath = baseName + ".pvti";

   // evaluate a function
   dof.setValue( -1 );
   constFunctionEvaluator.evaluateAllEntities( meshFunctionPtr, constFunctionPtr );

   // write the mesh function into a .pvti file
   EXPECT_TRUE( writeDistributedMeshFunction( *distributedGrid, *meshFunctionPtr, "foo", mainFilePath ) );

   // wait for rank 0 to write the main .pvti file
   TNL::MPI::Barrier();

   // load the mesh function from the .pvti file
   DofType loadedDof;
   loadedDof.setLike( dof );
   loadedDof.setValue( -2 );
   MeshFunctionType loadedMeshFunction;
   loadedMeshFunction.bind( localGrid, loadedDof );
   EXPECT_TRUE( readDistributedMeshFunction( *distributedGrid, loadedMeshFunction, "foo", mainFilePath ) );

   // compare the dofs (MeshFunction and MeshFunctionView do not have operator==)
//   EXPECT_EQ( loadedMeshFunction, *meshFunctionPtr );
   EXPECT_EQ( loadedDof, dof );

   // cleanup
   TNL::MPI::Barrier();
   if( TNL::MPI::GetRank() == 0 ) {
      EXPECT_TRUE( fs::remove( mainFilePath ) );
      EXPECT_GT( fs::remove_all( baseName ), 1 );
   }
}
#endif

#endif
Loading