diff --git a/src/io.cpp b/src/io.cpp index 4354c227b..cb6c50aad 100644 --- a/src/io.cpp +++ b/src/io.cpp @@ -35,6 +35,8 @@ #include "io.hpp" #include +#include +#include #include "M2ulPhyS.hpp" #include "utils.hpp" @@ -268,7 +270,8 @@ void M2ulPhyS::restart_files_hdf5(string mode, string inputFileName) { return; } -void partitioning_file_hdf5(std::string mode, MPI_Groups *groupsMPI, int nelemGlobal, Array &partitioning) { +void partitioning_file_hdf5(std::string mode, MPI_Groups *groupsMPI, int nelemGlobal, Array &partitioning, + std::string pathName) { MPI_Comm TPSCommWorld = groupsMPI->getTPSCommWorld(); const bool rank0 = groupsMPI->isWorldRoot(); const int nprocs = groupsMPI->getTPSWorldSize(); @@ -281,8 +284,10 @@ void partitioning_file_hdf5(std::string mode, MPI_Groups *groupsMPI, int nelemGl // hid_t file, dataspace, data_soln; hid_t file = -1, dataspace; herr_t status; - std::string fileName("partition"); - fileName += "." + std::to_string(nprocs) + "p.h5"; + std::string fileNameEnd("partition"); + fileNameEnd += "." + std::to_string(nprocs) + "p.h5"; + std::string fileName; + fileName = pathName + fileNameEnd; assert((mode == "read") || (mode == "write")); @@ -402,7 +407,7 @@ void read_variable_data_hdf5(hid_t file, string varName, size_t index, double *d H5Dclose(data_soln); } -IOOptions::IOOptions() : output_dir_("output"), restart_mode_("standard") {} +IOOptions::IOOptions() : output_dir_("output"), restart_dir_("./"), restart_mode_("standard") {} void IOOptions::read(TPS::Tps *tps, std::string prefix) { std::string basename; @@ -417,8 +422,12 @@ void IOOptions::read(TPS::Tps *tps, std::string prefix) { tps->getInput((basename + "/exitCheckFreq").c_str(), exit_check_frequency_, 500); assert(exit_check_frequency_ > 0); + tps->getInput((basename + "/restartBase").c_str(), restart_dir_, std::string("./")); tps->getInput((basename + "/restartMode").c_str(), restart_mode_, std::string("standard")); setRestartFlags(); + + int status = mkdir(restart_dir_.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH); + if (status == 0) std::cout << "created restartBase directory: " << restart_dir_ << endl; } void IOOptions::setRestartFlags() { diff --git a/src/io.hpp b/src/io.hpp index 70c4d449b..4685132ba 100644 --- a/src/io.hpp +++ b/src/io.hpp @@ -66,8 +66,8 @@ class IOOptions { int exit_check_frequency_ = 500; + std::string restart_dir_; std::string restart_mode_; - bool restart_variable_order_ = false; bool restart_serial_read_ = false; bool restart_serial_write_ = false; @@ -362,5 +362,6 @@ void write_variable_data_hdf5(hid_t group, std::string varName, hid_t dataspace, * @todo Refactor this function to make it more generic and fit better * into the IODataOrganizer paradigm. */ -void partitioning_file_hdf5(std::string mode, MPI_Groups *groupsMPI, int nelemGlobal, mfem::Array &partitioning); +void partitioning_file_hdf5(std::string mode, MPI_Groups *groupsMPI, int nelemGlobal, mfem::Array &partitioning, + std::string fileName = "./"); #endif // IO_HPP_ diff --git a/src/loMachIO.cpp b/src/loMachIO.cpp index e2c7997af..4e1701186 100644 --- a/src/loMachIO.cpp +++ b/src/loMachIO.cpp @@ -181,7 +181,8 @@ void LoMachSolver::restart_files_hdf5(string mode, string inputFileName) { } serialName = inputFileName; } else { - serialName = "restart_"; + serialName = loMach_opts_.io_opts_.restart_dir_; + serialName.append("/restart_"); serialName.append(loMach_opts_.io_opts_.output_dir_); serialName.append(".sol.h5"); } diff --git a/src/mesh_base.cpp b/src/mesh_base.cpp index 09e0477bd..122e45a85 100644 --- a/src/mesh_base.cpp +++ b/src/mesh_base.cpp @@ -174,13 +174,17 @@ void MeshBase::initializeMesh() { // nelemGlobal_ = mesh->GetNE(); if (rank0_) grvy_printf(ginfo, "Total # of mesh elements = %i\n", nelemGlobal_); + string restartDir; + restartDir = loMach_opts_->io_opts_.restart_dir_; + restartDir += "/"; + if (nprocs_ > 1) { if (loMach_opts_->io_opts_.restart_serial_read_) { assert(serial_mesh_->Conforming()); partitioning_ = Array(serial_mesh_->GeneratePartitioning(nprocs_, defaultPartMethod), nelemGlobal_); - partitioning_file_hdf5("write", groupsMPI, nelemGlobal_, partitioning_); + partitioning_file_hdf5("write", groupsMPI, nelemGlobal_, partitioning_, restartDir); } else { - partitioning_file_hdf5("read", groupsMPI, nelemGlobal_, partitioning_); + partitioning_file_hdf5("read", groupsMPI, nelemGlobal_, partitioning_, restartDir); } } @@ -203,12 +207,16 @@ void MeshBase::initializeMesh() { serial_mesh_->UniformRefinement(); } + string restartDir; + restartDir = loMach_opts_->io_opts_.restart_dir_; + restartDir += "/"; + // generate partitioning file (we assume conforming meshes) nelemGlobal_ = serial_mesh_->GetNE(); if (nprocs_ > 1) { assert(serial_mesh_->Conforming()); partitioning_ = Array(serial_mesh_->GeneratePartitioning(nprocs_, defaultPartMethod), nelemGlobal_); - if (rank0_) partitioning_file_hdf5("write", groupsMPI, nelemGlobal_, partitioning_); + if (rank0_) partitioning_file_hdf5("write", groupsMPI, nelemGlobal_, partitioning_, restartDir); } } diff --git a/src/utils.cpp b/src/utils.cpp index 6af93458d..8dc7ab759 100644 --- a/src/utils.cpp +++ b/src/utils.cpp @@ -1156,6 +1156,13 @@ void Orthogonalize(Vector &v, MPI_Comm comm) { v -= global_sum / static_cast(global_size); } +bool copyFile(const char *SRC, const char *DEST) { + std::ifstream src(SRC, std::ios::binary); + std::ofstream dest(DEST, std::ios::binary); + dest << src.rdbuf(); + return src && dest; +} + namespace mfem { GradientVectorGridFunctionCoefficient::GradientVectorGridFunctionCoefficient(const GridFunction *gf) : MatrixCoefficient((gf) ? gf->VectorDim() : 0) { diff --git a/src/utils.hpp b/src/utils.hpp index 4fb5ef324..2cd4f0d86 100644 --- a/src/utils.hpp +++ b/src/utils.hpp @@ -174,6 +174,8 @@ void scalarGrad3D(ParGridFunction &u, ParGridFunction &gu); void vectorGrad3DV(FiniteElementSpace *fes, Vector u, Vector *gu, Vector *gv, Vector *gw); void scalarGrad3DV(FiniteElementSpace *fes, FiniteElementSpace *vfes, Vector u, Vector *gu); +bool copyFile(const char *SRC, const char *DEST); + /// Eliminate essential BCs in an Operator and apply to RHS. /// rename this to something sensible "ApplyEssentialBC" or something void EliminateRHS(Operator &A, ConstrainedOperator &constrainedA, const Array &ess_tdof_list, Vector &x, Vector &b,