From a5e4b77797ce3ae6bde818f6ce9b52efd152ae22 Mon Sep 17 00:00:00 2001 From: Kacper Kornet Date: Sun, 25 Jun 2023 11:12:50 +0100 Subject: [PATCH] Write output netCDF files in parallel --- core/src/CMakeLists.txt | 11 +++++++++++ core/src/RectGridIO.cpp | 35 ++++++++++++++++++++++++++++++++--- 2 files changed, 43 insertions(+), 3 deletions(-) diff --git a/core/src/CMakeLists.txt b/core/src/CMakeLists.txt index 0496c92a4..f9faf7dbf 100644 --- a/core/src/CMakeLists.txt +++ b/core/src/CMakeLists.txt @@ -27,6 +27,10 @@ set(BaseSources "${ModelArrayStructure}/ModelArrayDetails.cpp" ) +set(ParallelNetCDFSources + "ParallelNetcdfFile.cpp" + ) + list(TRANSFORM BaseSources PREPEND "${CMAKE_CURRENT_SOURCE_DIR}/") set(ModuleDir "${CMAKE_CURRENT_SOURCE_DIR}/modules") @@ -62,6 +66,13 @@ set(NextsimSources "${ModuleSources}" PARENT_SCOPE) +if(ENABLE_MPI) + set(NextsimSources + "${NextsimSources}" + "${ParallelNetCDFSources}" + ) +endif() + set(NextsimIncludeDirs "${NextsimIncludeDirs}" "${ModuleDir}" diff --git a/core/src/RectGridIO.cpp b/core/src/RectGridIO.cpp index 702bdc4a9..3ee3e8f5e 100644 --- a/core/src/RectGridIO.cpp +++ b/core/src/RectGridIO.cpp @@ -22,6 +22,10 @@ #include #include +#ifdef USE_MPI +#include +#endif + #include #include #include @@ -171,9 +175,8 @@ void RectGridIO::dumpModelState(const ModelState& state, const ModelMetadata& me const std::string& filePath, bool isRestart) const { #ifdef USE_MPI - auto filePathRank = filePath + "_" + std::to_string(metadata.mpiMyRank); std::cout << "MPI metadata: " << metadata.mpiMyRank << " / " << metadata.mpiSize << "\n"; - netCDF::NcFile ncFile(filePathRank, netCDF::NcFile::replace); + netCDF::NcFilePar ncFile(filePath, netCDF::NcFile::replace, metadata.mpiComm); #else netCDF::NcFile ncFile(filePath, netCDF::NcFile::replace); #endif @@ -193,22 +196,48 @@ void RectGridIO::dumpModelState(const ModelState& state, const ModelMetadata& me // Create the dimension data, since it has to be in the same group as the // data or the parent group +#ifdef USE_MPI + netCDF::NcDim xDim = dataGroup.addDim(dimensionNames[0], metadata.globalExtentX); + netCDF::NcDim yDim = dataGroup.addDim(dimensionNames[1], metadata.globalExtentY); +#else netCDF::NcDim xDim = dataGroup.addDim(dimensionNames[0], nx); netCDF::NcDim yDim = dataGroup.addDim(dimensionNames[1], ny); - std::vector dims2 = { xDim, yDim }; +#endif netCDF::NcDim zDim = dataGroup.addDim(dimensionNames[2], nz); + std::vector dims2 = { xDim, yDim }; std::vector dims3 = { xDim, yDim, zDim }; +// +#ifdef USE_MPI + // Set the origins and extensions for reading 2D data based + // on MPI decomposition + std::vector start(3); + std::vector size(3); + start[0] = metadata.localCornerX; + start[1] = metadata.localCornerY; + start[2] = 0; + size[0] = metadata.localExtentX; + size[1] = metadata.localExtentY; + size[2] = nz; +#endif for (const auto entry : state.data) { const std::string& name = entry.first; if (entry.second.getType() == ModelArray::Type::H) { netCDF::NcVar var(dataGroup.addVar(name, netCDF::ncDouble, dims2)); var.putAtt(mdiName, netCDF::ncDouble, MissingData::value()); +#ifdef USE_MPI + var.putVar(start, size, entry.second.getData()); +#else var.putVar(entry.second.getData()); +#endif } else if (entry.second.getType() == ModelArray::Type::Z) { netCDF::NcVar var(dataGroup.addVar(name, netCDF::ncDouble, dims3)); var.putAtt(mdiName, netCDF::ncDouble, MissingData::value()); +#ifdef USE_MPI + var.putVar(start, size, entry.second.getData()); +#else var.putVar(entry.second.getData()); +#endif } }