Commit 4b6c35f0 authored by Jakub Klinkovský's avatar Jakub Klinkovský
Browse files

Refactoring read and write methods of File

The splitting with if( std::is_same<...> ) is not perfect, the code for
all devices was always visible to the compiler, and irrelevant parts
were optimized out at a much later stage. To fix compilation for MIC and
custom data type, we need proper template specializations.
parent 54784be9
Loading
Loading
Loading
Loading
+55 −14
Original line number Diff line number Diff line
@@ -20,7 +20,6 @@
#include <TNL/Devices/Cuda.h>
#include <TNL/Devices/MIC.h>


namespace TNL {

enum class IOMode
@@ -79,7 +78,6 @@ class File
      return this->writtenElements;
   }

	// TODO: this does not work for constant types
   template< typename Type, typename Device = Devices::Host, typename Index = int >
   bool read( Type* buffer,
              const Index& elements );
@@ -97,6 +95,49 @@ class File
   bool close();

   static int verbose;

protected:
   template< typename Type,
             typename Device,
             typename = typename std::enable_if< std::is_same< Device, Devices::Host >::value >::type >
   bool read_impl( Type* buffer,
                   const std::size_t& elements );

   template< typename Type,
             typename Device,
             typename = typename std::enable_if< std::is_same< Device, Devices::Cuda >::value >::type,
             typename = void >
   bool read_impl( Type* buffer,
                   const std::size_t& elements );

   template< typename Type,
             typename Device,
             typename = typename std::enable_if< std::is_same< Device, Devices::MIC >::value >::type,
             typename = void,
             typename = void >
   bool read_impl( Type* buffer,
                   const std::size_t& elements );

   template< typename Type,
             typename Device,
             typename = typename std::enable_if< std::is_same< Device, Devices::Host >::value >::type >
   bool write_impl( const Type* buffer,
                    const std::size_t& elements );

   template< typename Type,
             typename Device,
             typename = typename std::enable_if< std::is_same< Device, Devices::Cuda >::value >::type,
             typename = void >
   bool write_impl( const Type* buffer,
                    const std::size_t& elements );

   template< typename Type,
             typename Device,
             typename = typename std::enable_if< std::is_same< Device, Devices::MIC >::value >::type,
             typename = void,
             typename = void >
   bool write_impl( const Type* buffer,
                    const std::size_t& elements );
};

bool fileExists( const String& fileName );
+219 −180
Original line number Diff line number Diff line
@@ -14,6 +14,7 @@

#include <TNL/File.h>
#include <TNL/Exceptions/CudaSupportMissing.h>
#include <TNL/Exceptions/MICSupportMissing.h>

namespace TNL {

@@ -22,13 +23,13 @@ template< typename Type, typename Device >
bool File::read( Type* buffer )
{
   return read< Type, Device, int >( buffer, 1 );
};
}

template< typename Type, typename Device >
bool File::write( const Type* buffer )
{
   return write< Type, Device, int >( buffer, 1 );
};
}

template< typename Type, typename Device, typename Index >
bool File::read( Type* buffer,
@@ -53,9 +54,17 @@ bool File :: read( Type* buffer,
      return false;
   }

   this->readElements = 0;
   if( std::is_same< Device, Devices::Host >::value )
   return read_impl< Type, Device >( buffer, elements );
}

// Host
template< typename Type,
          typename Device,
          typename >
bool File::read_impl( Type* buffer,
                      const std::size_t& elements )
{
   this->readElements = 0;
   if( std::fread( buffer,
                   sizeof( Type ),
                   elements,
@@ -68,11 +77,17 @@ bool File :: read( Type* buffer,
   this->readElements = elements;
   return true;
}
   if( std::is_same< Device, Devices::Cuda >::value )

// Cuda
template< typename Type,
          typename Device,
          typename, typename >
bool File::read_impl( Type* buffer,
                      const std::size_t& elements )
{
#ifdef HAVE_CUDA
      const std::size_t host_buffer_size = std::min( tnlFileGPUvsCPUTransferBufferSize / sizeof( Type ),
                                                elements );
   this->readElements = 0;
   const std::size_t host_buffer_size = std::min( tnlFileGPUvsCPUTransferBufferSize / sizeof( Type ), elements );
   using BaseType = typename std::remove_cv< Type >::type;
   BaseType* host_buffer = new BaseType[ host_buffer_size ];

@@ -110,11 +125,15 @@ bool File :: read( Type* buffer,
}

// MIC
   if( std::is_same< Device, Devices::MIC >::value )
template< typename Type,
          typename Device,
          typename, typename, typename >
bool File::read_impl( Type* buffer,
                      const std::size_t& elements )
{
#ifdef HAVE_MIC
        const std::size_t host_buffer_size = std::min( tnlFileGPUvsCPUTransferBufferSize / sizeof( Type ),
                                                elements );
   this->readElements = 0;
   const std::size_t host_buffer_size = std::min( tnlFileGPUvsCPUTransferBufferSize / sizeof( Type ), elements );
   Type * host_buffer = (Type *)malloc( sizeof( Type ) * host_buffer_size );
   readElements = 0;
   if( ! host_buffer )
@@ -150,12 +169,11 @@ bool File :: read( Type* buffer,
   }
   free( host_buffer );
   return true;
#else
   throw Exceptions::MICSupportMissing();
#endif
}

   return true;
};

template< class Type, typename Device, typename Index >
bool File::write( const Type* buffer,
                  const Index _elements )
@@ -179,9 +197,17 @@ bool File :: write( const Type* buffer,
      return false;
   }

   this->writtenElements = 0;
   if( std::is_same< Device, Devices::Host >::value )
   return write_impl< Type, Device >( buffer, elements );
}

// Host
template< typename Type,
          typename Device,
          typename >
bool File::write_impl( const Type* buffer,
                       const std::size_t& elements )
{
   this->writtenElements = 0;
   if( std::fwrite( buffer,
                    sizeof( Type ),
                    elements,
@@ -194,9 +220,16 @@ bool File :: write( const Type* buffer,
   this->writtenElements = elements;
   return true;
}
   if( std::is_same< Device, Devices::Cuda >::value )

// Cuda
template< typename Type,
          typename Device,
          typename, typename >
bool File::write_impl( const Type* buffer,
                       const std::size_t& elements )
{
#ifdef HAVE_CUDA
   this->writtenElements = 0;
   const std::size_t host_buffer_size = std::min( tnlFileGPUvsCPUTransferBufferSize / sizeof( Type ),
                                             elements );
   using BaseType = typename std::remove_cv< Type >::type;
@@ -234,10 +267,16 @@ bool File :: write( const Type* buffer,
   throw Exceptions::CudaSupportMissing();
#endif
}

// MIC
   if( std::is_same< Device, Devices::MIC >::value )
template< typename Type,
          typename Device,
          typename, typename, typename >
bool File::write_impl( const Type* buffer,
                       const std::size_t& elements )
{
#ifdef HAVE_MIC
   this->writtenElements = 0;
   const std::size_t host_buffer_size = std::min( tnlFileGPUvsCPUTransferBufferSize / sizeof( Type ),
                                                  elements );
   Type * host_buffer = (Type *)malloc( sizeof( Type ) * host_buffer_size );
@@ -250,7 +289,7 @@ bool File :: write( const Type* buffer,

   while( this->writtenElements < elements )
   {
            Index transfer = std::min( elements - this->writtenElements, host_buffer_size );
       std::size_t transfer = std::min( elements - this->writtenElements, host_buffer_size );

      Devices::MICHider<const Type> device_buff;
      device_buff.pointer=buffer;
@@ -277,9 +316,9 @@ bool File :: write( const Type* buffer,
   }
   free( host_buffer );
   return true;
#else
   throw Exceptions::MICSupportMissing();
#endif
}
   return true;
};

} // namespace TNL