diff --git a/FileIO/VtkIO/VtuInterface.cpp b/FileIO/VtkIO/VtuInterface.cpp index b85dccd787dc1640b55bbe2d970e17d7af0f2f0a..426c22c7f2e3b56b33f8a845e2b9f219fcd4752f 100644 --- a/FileIO/VtkIO/VtuInterface.cpp +++ b/FileIO/VtkIO/VtuInterface.cpp @@ -21,6 +21,12 @@ #include <logog/include/logog.hpp> +#include <boost/algorithm/string/erase.hpp> + +#ifdef USE_PETSC +#include <petsc.h> +#endif + #include "BaseLib/FileTools.h" #include "InSituLib/VtkMappedMeshSource.h" #include "MeshLib/Mesh.h" @@ -60,6 +66,38 @@ MeshLib::Mesh* VtuInterface::readVTUFile(std::string const &file_name) } bool VtuInterface::writeToFile(std::string const &file_name) +{ +#ifdef USE_PETSC + // Also for other approach with DDC. + // In such case, a MPI_Comm argument is need to this member, + // and PETSC_COMM_WORLD shoud be replaced with the argument. + int mpi_rank; + MPI_Comm_rank(PETSC_COMM_WORLD, &mpi_rank); + const std::string file_name_base = boost::erase_last_copy(file_name, ".vtu"); + + const std::string file_name_rank = file_name_base + "_" + + std::to_string(mpi_rank) + ".vtu"; + const bool vtu_status_i = writeVTU(file_name_rank); + bool vtu_status = false; + MPI_Allreduce(&vtu_status_i, &vtu_status, 1, MPI_C_BOOL, MPI_LAND, PETSC_COMM_WORLD); + + int mpi_size; + MPI_Comm_size(PETSC_COMM_WORLD, &mpi_size); + bool pvtu_status = false; + if (mpi_rank == 0) + { + pvtu_status = writeVTU(file_name_base + ".pvtu", mpi_size); + } + MPI_Bcast(&pvtu_status, 1, MPI_C_BOOL, 0, PETSC_COMM_WORLD); + + return vtu_status && pvtu_status; + +#else + return writeVTU(file_name); +#endif +} + +bool VtuInterface::writeVTU(std::string const &file_name, const int num_partitions) { if(!_mesh) { @@ -76,6 +114,7 @@ bool VtuInterface::writeToFile(std::string const &file_name) vtkSmartPointer<vtkXMLUnstructuredGridWriter> vtuWriter = vtkSmartPointer<vtkXMLUnstructuredGridWriter>::New(); + vtuWriter->SetInputConnection(vtkSource->GetOutputPort()); if(_use_compressor) vtuWriter->SetCompressorTypeToZLib(); @@ -97,6 +136,9 @@ bool VtuInterface::writeToFile(std::string const &file_name) } vtuWriter->SetFileName(file_name.c_str()); + if (num_partitions > 0) + vtuWriter->SetNumberOfPieces(num_partitions); + return (vtuWriter->Write() > 0); } diff --git a/FileIO/VtkIO/VtuInterface.h b/FileIO/VtkIO/VtuInterface.h index 2e4306fd33a64230512178383484b3578c035af6..1e20f19188c4b5ac1eefd044cd509b36f414f2e7 100644 --- a/FileIO/VtkIO/VtuInterface.h +++ b/FileIO/VtkIO/VtuInterface.h @@ -38,13 +38,20 @@ public: ~VtuInterface(); /// Read an unstructured grid from a VTU file - /// \returns The converted mesh or a nullptr if reading failed + /// \return The converted mesh or a nullptr if reading failed static MeshLib::Mesh* readVTUFile(std::string const &file_name); /// Writes the given mesh to file. - /// \returns True on success, false on error + /// Note: a MPI_Comm type argument is needed. + /// \return True on success, false on error bool writeToFile(std::string const &file_name); + /// Writes the given mesh to vtu file. + /// \param file_name File name. + /// \param num_partitions Number of partiions to be merged. + /// \return True on success, false on error + bool writeVTU(std::string const &file_name, const int num_partitions = 0); + private: const MeshLib::Mesh* _mesh; int _data_mode; diff --git a/ProcessLib/GroundwaterFlowProcess.h b/ProcessLib/GroundwaterFlowProcess.h index 39623c1b88c78a2a994c53e9e861c12345a1f09b..6a29916db9852b5fe2bdf343f59c165e0140a40f 100644 --- a/ProcessLib/GroundwaterFlowProcess.h +++ b/ProcessLib/GroundwaterFlowProcess.h @@ -269,9 +269,8 @@ public: { MeshLib::Location const l(_mesh.getID(), MeshLib::MeshItemType::Node, i); - auto const global_index = - _local_to_global_index_map->getGlobalIndex( - l, 0); // 0 is the component id. + auto const global_index = // 0 is the component id. + std::abs( _local_to_global_index_map->getGlobalIndex(l, 0) ); _x->set(global_index, variable.getInitialConditionValue(*_mesh.getNode(i))); } @@ -324,26 +323,6 @@ public: { DBUG("Postprocessing GroundwaterFlowProcess."); -#ifdef USE_PETSC // Note: this is only a test - - std::vector<PetscScalar> u(_x->size()); - _x->getGlobalVector(&u[0]); - - int rank; - MPI_Comm_rank(PETSC_COMM_WORLD, &rank); - if(rank == 0) - { - std::string output_file_name = file_name; - boost::erase_last(output_file_name, ".vtu"); - output_file_name.append(".dat"); - std::ofstream os(output_file_name); - os << "SCALARS HEAD double 1"<<endl; - os << "LOOKUP_TABLE default"<< endl; - for (std::size_t i = 0; i < u.size(); ++i) - os << u[i] << "\n"; - os.close(); - } -#else std::string const property_name = "Result"; // Get or create a property vector for results. @@ -362,14 +341,32 @@ public: } assert(result && result->size() == _x->size()); - // Copy result - for (std::size_t i = 0; i < _x->size(); ++i) - (*result)[i] = (*_x)[i]; + // If in the case of DDC under parallel computation + if ( (_x->getRangeEnd() - _x->getRangeBegin()) != _x->size()) + { + std::unique_ptr<double[]> u( new double[_x->size()]); + _x->getValues(u.get()); // get the global solution + + std::size_t const n = _mesh.getNNodes(); + result->resize(n); + for (std::size_t i = 0; i < n; ++i) + { + MeshLib::Location const l(_mesh.getID(), + MeshLib::MeshItemType::Node, i); + auto const global_index = std::abs(// 0 is the component id. + _local_to_global_index_map->getGlobalIndex(l, 0) ); + (*result)[i] = u[global_index]; + } + } + else // serial computation + { + // Copy result + _x->getValues(&(*result)[0]); + } // Write output file FileIO::VtuInterface vtu_interface(&_mesh, vtkXMLWriter::Binary, true); vtu_interface.writeToFile(file_name); -#endif } void postTimestep(std::string const& file_name, const unsigned /*timestep*/) override