changed: refactor setup of sparsity pattern into separate methods

makes the code more readable
This commit is contained in:
Arne Morten Kvarving 2022-05-06 14:29:56 +02:00
parent 048e37ffcb
commit 646bf62b0f
2 changed files with 270 additions and 195 deletions

View File

@ -195,9 +195,76 @@ void PETScMatrix::initAssembly (const SAM& sam, bool delayLocking)
MatSetFromOptions(pA);
// Allocate sparsity pattern
if (adm.isParallel() && !adm.dd.isPartitioned()) {
if (adm.isParallel() && !adm.dd.isPartitioned())
this->setupSparsityDD(sam);
else if (adm.dd.isPartitioned())
this->setupSparsityPartitioned(sam);
else
this->setupSparsitySerial(sam);
MatSetUp(pA);
switch (solParams.getLinSysType()) {
case LinAlg::SPD:
MatSetOption(pA, MAT_SPD, PETSC_TRUE);
break;
case LinAlg::SYMMETRIC:
MatSetOption(pA, MAT_SYMMETRIC, PETSC_TRUE);
break;
default:
break;
}
#ifndef SP_DEBUG
// Do not abort program for allocation error in release mode
MatSetOption(pA,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
#endif
} else {
if (adm.isParallel() && !adm.dd.isPartitioned())
this->setupBlockSparsityDD(sam);
else if (adm.dd.isPartitioned())
assert(0);//this->setupSparsityPartitioned(sam);
else
this->setupBlockSparsitySerial(sam);
isvec.resize(adm.dd.getNoBlocks());
// index sets
for (size_t i = 0; i < isvec.size(); ++i) {
std::vector<int> blockEq;
blockEq.reserve(adm.dd.getMaxEq(i+1)-adm.dd.getMinEq(i+1)+1);
for (auto& it : adm.dd.getBlockEqs(i)) {
int eq = adm.dd.getGlobalEq(it);
if (eq >= adm.dd.getMinEq())
blockEq.push_back(eq-1);
}
ISCreateGeneral(*adm.getCommunicator(),blockEq.size(),
blockEq.data(),PETSC_COPY_VALUES,&isvec[i]);
}
MatCreateNest(*adm.getCommunicator(),solParams.getNoBlocks(),isvec.data(),
solParams.getNoBlocks(),isvec.data(),matvec.data(),&pA);
#ifndef SP_DEBUG
// Do not abort program for allocation error in release mode
for (auto& it : matvec)
MatSetOption(it,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
#endif
}
assembled = false;
}
void PETScMatrix::setupSparsityDD (const SAM& sam)
{
// Allocate sparsity pattern
std::vector<IntSet> dofc;
sam.getDofCouplings(dofc);
int ifirst = adm.dd.getMinEq();
int ilast = adm.dd.getMaxEq();
PetscInt neq = ilast - ifirst + 1;
PetscIntVec d_nnz(neq, 0);
IntVec o_nnz_g(adm.dd.getNoGlbEqs(), 0);
for (int i = 0; i < sam.neq; ++i) {
@ -226,8 +293,13 @@ void PETScMatrix::initAssembly (const SAM& sam, bool delayLocking)
MatMPIAIJSetPreallocation(pA,PETSC_DEFAULT,d_nnz.data(),
PETSC_DEFAULT,o_nnz.data());
} else if (adm.dd.isPartitioned()) {
}
void PETScMatrix::setupSparsityPartitioned (const SAM& sam)
{
// Setup sparsity pattern for global matrix
PetscInt neq = adm.dd.getMaxEq() - adm.dd.getMinEq() + 1;
SparseMatrix* lA = new SparseMatrix(neq, sam.neq);
int iMin = adm.dd.getMinEq(0);
int iMax = adm.dd.getMaxEq(0);
@ -256,7 +328,13 @@ void PETScMatrix::initAssembly (const SAM& sam, bool delayLocking)
(*this)(i,j) = 0.0;
}
this->optimiseSLU();
} else {
}
void PETScMatrix::setupSparsitySerial (const SAM& sam)
{
std::vector<IntSet> dofc;
sam.getDofCouplings(dofc);
PetscIntVec Nnz;
for (const auto& it : dofc)
Nnz.push_back(it.size());
@ -271,32 +349,17 @@ void PETScMatrix::initAssembly (const SAM& sam, bool delayLocking)
MatSeqAIJSetColumnIndices(pA,&col[0]);
MatSetOption(pA, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE);
MatSetOption(pA, MAT_KEEP_NONZERO_PATTERN, PETSC_TRUE);
}
}
MatSetUp(pA);
switch (solParams.getLinSysType()) {
case LinAlg::SPD:
MatSetOption(pA, MAT_SPD, PETSC_TRUE);
break;
case LinAlg::SYMMETRIC:
MatSetOption(pA, MAT_SYMMETRIC, PETSC_TRUE);
break;
default:
break;
}
#ifndef SP_DEBUG
// Do not abort program for allocation error in release mode
MatSetOption(pA,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
#endif
} else {
const DomainDecomposition& dd = adm.dd;
size_t blocks = solParams.getNoBlocks();
std::vector<std::array<int,2>> PETScMatrix::setupGlb2Blk (const SAM& sam)
{
// map from sparse matrix indices to block matrix indices
glb2Blk.resize(A.size());
size_t blocks = solParams.getNoBlocks();
const DomainDecomposition& dd = adm.dd;
std::vector<std::array<int,2>> eq2b(sam.neq, {{-1, 0}}); // cache
for (size_t j = 0; j < cols(); ++j) {
for (int i = IA[j]; i < IA[j+1]; ++i) {
int iblk = -1;
@ -332,7 +395,18 @@ void PETScMatrix::initAssembly (const SAM& sam, bool delayLocking)
}
}
if (adm.isParallel()) {
return eq2b;
}
void PETScMatrix::setupBlockSparsityDD (const SAM& sam)
{
size_t blocks = solParams.getNoBlocks();
const DomainDecomposition& dd = adm.dd;
std::vector<IntSet> dofc;
sam.getDofCouplings(dofc);
std::vector<std::array<int,2>> eq2b = this->setupGlb2Blk(sam);
std::vector<PetscIntVec> d_nnz(blocks*blocks);
std::vector<IntVec> o_nnz_g(blocks*blocks);
size_t k = 0;
@ -385,7 +459,17 @@ void PETScMatrix::initAssembly (const SAM& sam, bool delayLocking)
MatSetUp(matvec[k]);
}
}
} else {
}
void PETScMatrix::setupBlockSparsitySerial (const SAM& sam)
{
size_t blocks = solParams.getNoBlocks();
const DomainDecomposition& dd = adm.dd;
std::vector<IntSet> dofc;
sam.getDofCouplings(dofc);
this->setupGlb2Blk(sam);
auto it = matvec.begin();
for (size_t i = 0; i < blocks; ++i) {
for (size_t j = 0; j < blocks; ++j, ++it) {
@ -403,34 +487,6 @@ void PETScMatrix::initAssembly (const SAM& sam, bool delayLocking)
MatSetUp(*it);
}
}
}
isvec.resize(dd.getNoBlocks());
// index sets
for (size_t i = 0; i < isvec.size(); ++i) {
std::vector<int> blockEq;
blockEq.reserve(dd.getMaxEq(i+1)-dd.getMinEq(i+1)+1);
for (auto& it : dd.getBlockEqs(i)) {
int eq = dd.getGlobalEq(it);
if (eq >= dd.getMinEq())
blockEq.push_back(eq-1);
}
ISCreateGeneral(*adm.getCommunicator(),blockEq.size(),
blockEq.data(),PETSC_COPY_VALUES,&isvec[i]);
}
MatCreateNest(*adm.getCommunicator(),solParams.getNoBlocks(),isvec.data(),
solParams.getNoBlocks(),isvec.data(),matvec.data(),&pA);
#ifndef SP_DEBUG
// Do not abort program for allocation error in release mode
for (auto& it : matvec)
MatSetOption(it,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
#endif
}
assembled = false;
}

View File

@ -186,6 +186,25 @@ protected:
//! \brief Disabled copy constructor.
PETScMatrix(const PETScMatrix& A) = delete;
//! \brief Setup sparsity pattern for a DD partitioned model.
//! \param[in] sam Auxiliary data describing the FE model topology, etc.
void setupSparsityDD(const SAM& sam);
//! \brief Setup sparsity pattern for a graph partitioned model.
//! \param[in] sam Auxiliary data describing the FE model topology, etc.
void setupSparsityPartitioned(const SAM& sam);
//! \brief Setup sparsity pattern for a serial model.
//! \param[in] sam Auxiliary data describing the FE model topology, etc.
void setupSparsitySerial(const SAM& sam);
//! \brief Setup sparsity pattern for block-matrices for a DD partitioned model.
//! \param[in] sam Auxiliary data describing the FE model topology, etc.
void setupBlockSparsityDD(const SAM& sam);
//! \brief Setup sparsity pattern for block-matrices for a serial model.
void setupBlockSparsitySerial(const SAM& sam);
//! \brief Calculates the global-to-block mapping for equations.
std::vector<std::array<int,2>> setupGlb2Blk (const SAM& sam);
Mat pA; //!< The actual PETSc matrix
KSP ksp; //!< Linear equation solver
MatNullSpace* nsp; //!< Null-space of linear operator