From 049da77ed8abef7df24b7f82580f1170cb572b79 Mon Sep 17 00:00:00 2001 From: Wenqing Wang <wenqing.wang@ufz.de> Date: Thu, 8 Jun 2017 15:15:27 +0200 Subject: [PATCH] [PETSc] Dropped some comments and one unused members in PETScVector --- MathLib/LinAlg/LinAlg.h | 12 ++++++-- MathLib/LinAlg/PETSc/PETScLinearSolver.cpp | 2 +- MathLib/LinAlg/PETSc/PETScVector.cpp | 11 +++---- MathLib/LinAlg/PETSc/PETScVector.h | 35 ++++++++-------------- NumLib/DOF/MeshComponentMap.cpp | 3 -- ProcessLib/Process.cpp | 14 --------- ProcessLib/TES/TESProcess.cpp | 5 ---- 7 files changed, 27 insertions(+), 55 deletions(-) diff --git a/MathLib/LinAlg/LinAlg.h b/MathLib/LinAlg/LinAlg.h index 0dee45dd135..154d8b45a80 100644 --- a/MathLib/LinAlg/LinAlg.h +++ b/MathLib/LinAlg/LinAlg.h @@ -149,7 +149,7 @@ namespace LinAlg // Vector /// Set local accessible vector in order to get entries. -/// Call this before call operator[] or get(...). +/// Call this before call operator[] or get(...) of x. void setLocalAccessibleVector(PETScVector const& x); void set(PETScVector& x, double const a); @@ -210,8 +210,14 @@ namespace LinAlg // Vector -/// Set local accessible vector in order to get entries. -/// Call this before call operator[] or get(...). Not used for EIGEN +/** + Set local accessible vector in order to get entries. Call this + before call operator[] or get(...) of x. + The function only has computation if DDC is appied, + e.g. parallel computing. Up to now, Eigen vector is not used for + global vectors in parallel computing. Therefore this function is + empty in the current status. +*/ void setLocalAccessibleVector(EigenVector const& x); void set(EigenVector& x, double const a); diff --git a/MathLib/LinAlg/PETSc/PETScLinearSolver.cpp b/MathLib/LinAlg/PETSc/PETScLinearSolver.cpp index e4fa6ee8052..a563e4e47c9 100644 --- a/MathLib/LinAlg/PETSc/PETScLinearSolver.cpp +++ b/MathLib/LinAlg/PETSc/PETScLinearSolver.cpp @@ -83,7 +83,7 @@ bool PETScLinearSolver::solve(PETScMatrix& A, PETScVector &b, PETScVector &x) KSPSetOperators(_solver, A.getRawMatrix(), A.getRawMatrix(), DIFFERENT_NONZERO_PATTERN); #endif - KSPSolve(_solver, b.getData(), x.getData()); + KSPSolve(_solver, b.getRawVector(), x.getRawVector()); KSPConvergedReason reason; KSPGetConvergedReason(_solver, &reason); diff --git a/MathLib/LinAlg/PETSc/PETScVector.cpp b/MathLib/LinAlg/PETSc/PETScVector.cpp index ebd841c61a2..0ef9641355c 100644 --- a/MathLib/LinAlg/PETSc/PETScVector.cpp +++ b/MathLib/LinAlg/PETSc/PETScVector.cpp @@ -200,13 +200,10 @@ PetscScalar PETScVector::get(const PetscInt idx) const { return _entry_array[getLocalIndex(idx)]; } - else - { - // Ghost entries, and its original index is 0. - const PetscInt id_p = (idx == -_size) ? 0 : std::abs(idx); - return _entry_array[id_p]; - } - return 0; // avoid warning. + + // Ghost entries, and its original index is 0. + const PetscInt id_p = (idx == -_size) ? 0 : std::abs(idx); + return _entry_array[id_p]; } diff --git a/MathLib/LinAlg/PETSc/PETScVector.h b/MathLib/LinAlg/PETSc/PETScVector.h index 3a936f11ee6..706377cfa4d 100644 --- a/MathLib/LinAlg/PETSc/PETScVector.h +++ b/MathLib/LinAlg/PETSc/PETScVector.h @@ -17,7 +17,6 @@ #pragma once #include <map> -#include <memory> #include <string> #include <vector> @@ -157,6 +156,14 @@ class PETScVector VecSetValues(_v, e_idxs.size(), &e_idxs[0], &sub_vec[0], INSERT_VALUES); } + // TODO preliminary + void setZero() { VecSet(_v, 0.0); } + + /// Disallow moving. + /// \todo This operator should be implemented properly when doing a + /// general cleanup of all matrix and vector classes. + PETScVector& operator = (PETScVector &&) = delete; + /// Set local accessible vector in order to get entries. /// Call this before call operator[] or get(...). void setLocalAccessibleVector() const; @@ -178,12 +185,6 @@ class PETScVector */ void getGlobalVector(std::vector<PetscScalar>& u) const; - /*! - Copy local entries including ghost ones to an array - \param u Preallocated vector for the values of local entries. - */ - void copyValues(std::vector<double>& u) const; - /* Get an entry value. This is an expensive operation, and it only get local value. Use it for only test purpose Get the value of an entry by [] operator. @@ -191,21 +192,6 @@ class PETScVector */ PetscScalar get(const PetscInt idx) const; - // TODO eliminate in favour of getRawVector() - /// Get PETsc vector. Use it only for test purpose - const PETSc_Vec &getData() const - { - return _v; - } - - // TODO preliminary - void setZero() { VecSet(_v, 0.0); } - - /// Disallow moving. - /// \todo This operator should be implemented properly when doing a - /// general cleanup of all matrix and vector classes. - PETScVector& operator = (PETScVector &&) = delete; - //! Exposes the underlying PETSc vector. PETSc_Vec getRawVector() { return _v; } @@ -217,6 +203,11 @@ class PETScVector */ PETSc_Vec getRawVector() const { return _v; } + /*! + Copy local entries including ghost ones to an array + \param u Preallocated vector for the values of local entries. + */ + void copyValues(std::vector<double>& u) const; /*! View the global vector for test purpose. Do not use it for output a big vector. \param file_name File name for output diff --git a/NumLib/DOF/MeshComponentMap.cpp b/NumLib/DOF/MeshComponentMap.cpp index 160c30b8b91..c9be0069205 100644 --- a/NumLib/DOF/MeshComponentMap.cpp +++ b/NumLib/DOF/MeshComponentMap.cpp @@ -84,9 +84,6 @@ MeshComponentMap::MeshComponentMap( OGS_FATAL("Global index in the system of equations" " can only be numbered by the oder type" " of ComponentOrder::BY_LOCATION"); - // _num_global_dof is used as the global index offset - //global_id = static_cast<GlobalIndexType>( - // _num_global_dof + mesh.getGlobalNodeID(j)); } const bool is_ghost = mesh.isGhostNode(mesh.getNode(j)->getID()); diff --git a/ProcessLib/Process.cpp b/ProcessLib/Process.cpp index f003e2f42e0..25e265394bb 100644 --- a/ProcessLib/Process.cpp +++ b/ProcessLib/Process.cpp @@ -122,8 +122,6 @@ void Process::assemble(const double t, GlobalVector const& x, GlobalMatrix& M, GlobalMatrix& K, GlobalVector& b, StaggeredCouplingTerm const& coupling_term) { - // The function only has computation if DDC is appied, - // e.g. Parallel comuting. MathLib::LinAlg::setLocalAccessibleVector(x); assembleConcreteProcess(t, x, M, K, b, coupling_term); @@ -138,8 +136,6 @@ void Process::assembleWithJacobian(const double t, GlobalVector const& x, GlobalVector& b, GlobalMatrix& Jac, StaggeredCouplingTerm const& coupling_term) { - // The function only has computation if DDC is appied, - // e.g. Parallel comuting. MathLib::LinAlg::setLocalAccessibleVector(x); MathLib::LinAlg::setLocalAccessibleVector(xdot); @@ -244,8 +240,6 @@ void Process::computeSparsityPattern() void Process::preTimestep(GlobalVector const& x, const double t, const double delta_t) { - // The function only has computation if DDC is appied, - // e.g. Parallel comuting. MathLib::LinAlg::setLocalAccessibleVector(x); preTimestepConcreteProcess(x, t, delta_t); _boundary_conditions.preTimestep(t); @@ -253,8 +247,6 @@ void Process::preTimestep(GlobalVector const& x, const double t, void Process::postTimestep(GlobalVector const& x) { - // The function only has computation if DDC is appied, - // e.g. Parallel comuting. MathLib::LinAlg::setLocalAccessibleVector(x); postTimestepConcreteProcess(x); } @@ -263,8 +255,6 @@ void Process::computeSecondaryVariable(const double t, GlobalVector const& x, StaggeredCouplingTerm const& coupled_term) { - // The function only has computation if DDC is appied, - // e.g. Parallel comuting. MathLib::LinAlg::setLocalAccessibleVector(x); computeSecondaryVariableConcrete(t, x, coupled_term); @@ -277,16 +267,12 @@ void Process::preIteration(const unsigned iter, const GlobalVector &x) cached_var->expire(); } - // The function only has computation if DDC is appied, - // e.g. Parallel comuting. MathLib::LinAlg::setLocalAccessibleVector(x); preIterationConcreteProcess(iter, x); } NumLib::IterationResult Process::postIteration(const GlobalVector &x) { - // The function only has computation if DDC is appied, - // e.g. Parallel comuting. MathLib::LinAlg::setLocalAccessibleVector(x); return postIterationConcreteProcess(x); } diff --git a/ProcessLib/TES/TESProcess.cpp b/ProcessLib/TES/TESProcess.cpp index ffdeb5dd6be..db778b975b8 100644 --- a/ProcessLib/TES/TESProcess.cpp +++ b/ProcessLib/TES/TESProcess.cpp @@ -285,8 +285,6 @@ NumLib::IterationResult TESProcess::postIterationConcreteProcess( std::vector<double> local_x_cache; std::vector<double> local_x_prev_ts_cache; - // The function only has computation if DDC is appied, - // e.g. Parallel comuting. MathLib::LinAlg::setLocalAccessibleVector(*_x_previous_timestep); auto check_variable_bounds = [&](std::size_t id, @@ -344,7 +342,6 @@ TESProcess::computeVapourPartialPressure( auto const x_nV = Adsorption::AdsorptionReaction::getMolarFraction( x_mV, _assembly_params.M_react, _assembly_params.M_inert); - // TODO Problems with PETSc? (local vs. global index) result_cache->set(node_id, p * x_nV); } @@ -382,7 +379,6 @@ TESProcess::computeRelativeHumidity( auto const p_S = Adsorption::AdsorptionReaction::getEquilibriumVapourPressure(T); - // TODO Problems with PETSc? (local vs. global index) result_cache->set(node_id, p * x_nV / p_S); } @@ -423,7 +419,6 @@ TESProcess::computeEquilibriumLoading( : _assembly_params.react_sys->getEquilibriumLoading( p_V, T, _assembly_params.M_react); - // TODO Problems with PETSc? (local vs. global index) result_cache->set(node_id, C_eq); } -- GitLab