From 9c1f94d4fd6e5489a1c1f68f3e834e284c7c8900 Mon Sep 17 00:00:00 2001 From: Mark Olesen <Mark.Olesen@esi-group.com> Date: Mon, 11 Oct 2021 22:59:50 +0200 Subject: [PATCH] BUG: parallel blocking with faFieldDecomposer, faMeshReconstructor (fixes #2237) - the patch remapping in faFieldDecomposer calls weights internalField() which can trigger parallel communication on the complete mesh for some processors only (ie, blocks). Force a priori creation of weights instead. - ensure that the complete mesh (reconstruction helper) is serial when adding patches. --- .../decompose/faDecompose/faFieldDecomposer.C | 15 ++++++++++++--- .../faDecompose/faFieldDecomposerFields.C | 4 ++-- .../faReconstruct/faMeshReconstructor.C | 6 ++++++ 3 files changed, 20 insertions(+), 5 deletions(-) diff --git a/src/parallel/decompose/faDecompose/faFieldDecomposer.C b/src/parallel/decompose/faDecompose/faFieldDecomposer.C index f0966dfe47a..6c6bc6a73b4 100644 --- a/src/parallel/decompose/faDecompose/faFieldDecomposer.C +++ b/src/parallel/decompose/faDecompose/faFieldDecomposer.C @@ -293,7 +293,7 @@ void Foam::faFieldDecomposer::reset new processorEdgePatchFieldDecomposer ( procMesh_.boundary()[patchi].size(), - static_cast<const labelUList&>(localPatchSlice) + localPatchSlice ) ); } @@ -309,6 +309,15 @@ void Foam::faFieldDecomposer::reset(const faMesh& completeMesh) processorAreaPatchFieldDecomposerPtrs_.resize(nMappers); processorEdgePatchFieldDecomposerPtrs_.resize(nMappers); + // Create weightings now - needed for proper parallel synchronization + (void)completeMesh.weights(); + + // faPatches don't have their own start() - so these are invariant + const labelList completePatchStarts + ( + completeMesh.boundary().patchStarts() + ); + forAll(boundaryAddressing_, patchi) { const label oldPatchi = boundaryAddressing_[patchi]; @@ -324,7 +333,7 @@ void Foam::faFieldDecomposer::reset(const faMesh& completeMesh) ( completeMesh.boundary()[oldPatchi].size(), localPatchSlice, - completeMesh.boundary()[oldPatchi].start() + completePatchStarts[oldPatchi] ) ); } @@ -346,7 +355,7 @@ void Foam::faFieldDecomposer::reset(const faMesh& completeMesh) new processorEdgePatchFieldDecomposer ( procMesh_.boundary()[patchi].size(), - static_cast<const labelUList&>(localPatchSlice) + localPatchSlice ) ); } diff --git a/src/parallel/decompose/faDecompose/faFieldDecomposerFields.C b/src/parallel/decompose/faDecompose/faFieldDecomposerFields.C index 1b7eaea40f2..0b73aa76194 100644 --- a/src/parallel/decompose/faDecompose/faFieldDecomposerFields.C +++ b/src/parallel/decompose/faDecompose/faFieldDecomposerFields.C @@ -219,9 +219,9 @@ void Foam::faFieldDecomposer::decomposeFields const PtrList<GeoField>& fields ) const { - forAll(fields, fieldI) + forAll(fields, fieldi) { - decomposeField(fields[fieldI])().write(); + decomposeField(fields[fieldi])().write(); } } diff --git a/src/parallel/reconstruct/faReconstruct/faMeshReconstructor.C b/src/parallel/reconstruct/faReconstruct/faMeshReconstructor.C index 67e41128620..c3b6330a2d2 100644 --- a/src/parallel/reconstruct/faReconstruct/faMeshReconstructor.C +++ b/src/parallel/reconstruct/faReconstruct/faMeshReconstructor.C @@ -517,7 +517,13 @@ void Foam::faMeshReconstructor::createMesh() ); } + // Serial mesh - no parallel communication + + const bool oldParRun = Pstream::parRun(false); + completeMesh.addFaPatches(completePatches); + + Pstream::parRun(oldParRun); // Restore parallel state } -- GitLab