Skip to content

refactor: Refactor/cleanup partitions #3609

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 6 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion src/coreComponents/mesh/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,6 @@ set( mesh_headers
generators/MeshGeneratorBase.hpp
generators/ParMETISInterface.hpp
generators/ParticleMeshGenerator.hpp
generators/PartitionDescriptor.hpp
generators/PrismUtilities.hpp
generators/Region.hpp
generators/WellGeneratorBase.hpp
Expand Down
174 changes: 101 additions & 73 deletions src/coreComponents/mesh/DomainPartition.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,17 @@ DomainPartition::DomainPartition( string const & name,
Group * const parent ):
Group( name, parent )
{
this->registerWrapper( "Neighbors", &m_neighbors ).

this->registerWrapper< SpatialPartition, PartitionBase >( keys::partitionManager ).
setRestartFlags( RestartFlags::NO_WRITE ).
setSizedFromParent( false );

this->registerWrapper< SpatialPartition, PartitionBase >( keys::partitionManager ).
std::vector< NeighborCommunicator > & neighbors = getNeighbors();
this->registerWrapper( "Neighbors", &neighbors ).
setRestartFlags( RestartFlags::NO_WRITE ).
setSizedFromParent( false );


registerGroup( groupKeys.meshBodies );
registerGroup< constitutive::ConstitutiveManager >( groupKeys.constitutiveManager );

Expand All @@ -57,6 +60,28 @@ DomainPartition::DomainPartition( string const & name,
DomainPartition::~DomainPartition()
{}


PartitionBase & DomainPartition::getPartition()
{ return getReference< PartitionBase >( dataRepository::keys::partitionManager ); }

PartitionBase const & DomainPartition::getPartition() const
{ return getReference< PartitionBase >( dataRepository::keys::partitionManager ); }

/**
* @brief Get the neighbor communicators. @see DomainPartition#neighbors.
* @return Container of communicators.
*/
std::vector< NeighborCommunicator > & DomainPartition::getNeighbors()
{ return getPartition().getNeighbors(); }

/**
* @brief Get the neighbor communicators, const version. @see DomainPartition#neighbors.
* @return Container of communicators.
*/
std::vector< NeighborCommunicator > const & DomainPartition::getNeighbors() const
{ return getPartition().getNeighbors(); }


void DomainPartition::initializationOrder( string_array & order )
{
set< string > usedNames;
Expand Down Expand Up @@ -88,6 +113,7 @@ void DomainPartition::setupBaseLevelMeshGlobalInfo()
PartitionBase & partition1 = getReference< PartitionBase >( keys::partitionManager );
SpatialPartition & partition = dynamic_cast< SpatialPartition & >(partition1);

std::vector< NeighborCommunicator > & neighbors = getNeighbors();
const std::set< int > metisNeighborList = partition.getMetisNeighborList();
if( metisNeighborList.empty() )
{
Expand All @@ -104,57 +130,57 @@ void DomainPartition::setupBaseLevelMeshGlobalInfo()
MpiWrapper::cartCoords( cartcomm, rank, partition.m_nsdof, partition.m_coords.data() );

int ncoords[3];
addNeighbors( 0, cartcomm, ncoords );
partition.addNeighbors( 0, cartcomm, ncoords );

MpiWrapper::commFree( cartcomm );
}
else
{
for( integer const neighborRank : metisNeighborList )
{
m_neighbors.emplace_back( neighborRank );
neighbors.emplace_back( neighborRank );
}
}

// Create an array of the first neighbors.
array1d< int > firstNeighborRanks;
for( NeighborCommunicator const & neighbor : m_neighbors )
for( NeighborCommunicator const & neighbor : neighbors )
{
firstNeighborRanks.emplace_back( neighbor.neighborRank() );
}

int neighborsTag = 54;

// Send this list of neighbors to all neighbors.
std::vector< MPI_Request > requests( m_neighbors.size(), MPI_REQUEST_NULL );
std::vector< MPI_Request > requests( neighbors.size(), MPI_REQUEST_NULL );

for( std::size_t i = 0; i < m_neighbors.size(); ++i )
for( std::size_t i = 0; i < neighbors.size(); ++i )
{
MpiWrapper::iSend( firstNeighborRanks.toView(), m_neighbors[ i ].neighborRank(), neighborsTag, MPI_COMM_GEOS, &requests[ i ] );
MpiWrapper::iSend( firstNeighborRanks.toView(), neighbors[ i ].neighborRank(), neighborsTag, MPI_COMM_GEOS, &requests[ i ] );
}

// This set will contain the second (neighbor of) neighbors ranks.
std::set< int > secondNeighborRanks;

array1d< int > neighborOfNeighborRanks;
for( std::size_t i = 0; i < m_neighbors.size(); ++i )
for( std::size_t i = 0; i < neighbors.size(); ++i )
{
MpiWrapper::recv( neighborOfNeighborRanks, m_neighbors[ i ].neighborRank(), neighborsTag, MPI_COMM_GEOS, MPI_STATUS_IGNORE );
MpiWrapper::recv( neighborOfNeighborRanks, neighbors[ i ].neighborRank(), neighborsTag, MPI_COMM_GEOS, MPI_STATUS_IGNORE );

// Insert the neighbors of the current neighbor into the set of second neighbors.
secondNeighborRanks.insert( neighborOfNeighborRanks.begin(), neighborOfNeighborRanks.end() );
}

// Remove yourself and all the first neighbors from the second neighbors.
secondNeighborRanks.erase( MpiWrapper::commRank() );
for( NeighborCommunicator const & neighbor : m_neighbors )
for( NeighborCommunicator const & neighbor : neighbors )
{
secondNeighborRanks.erase( neighbor.neighborRank() );
}

for( integer const neighborRank : secondNeighborRanks )
{
m_neighbors.emplace_back( neighborRank );
neighbors.emplace_back( neighborRank );
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We used to have two versions of m_neighbors: one in DomainPartition, one in SpatialPartition.
Despite sharing the same name, I have the impression that:

  • the SpatialPartition version used to be a "strict" list of neighbors.
  • the DomainPartition version also appended the neighbors of neighbors.

After this refactoring, I understand only one version will remain. It will be stored in SpatialPartition, and will now include the neighbors of neighbors.

If that's correct, this might conflict notably with the Particles machinery (e.g. SpatialPartition::repartitionMasterParticles) which was relying so far on the "strict" definition of neighbors.

Best case scenario: the MPM will simply iterate over more neighbors for nothing.

NB: the CI tests for MPM may still run fine as they seem to use a 1x1x1 or 2x2x1 partition... meaning they won't generate neighbors of neighbors anyway.

}

MpiWrapper::waitAll( requests.size(), requests.data(), MPI_STATUSES_IGNORE );
Expand All @@ -173,24 +199,24 @@ void DomainPartition::setupBaseLevelMeshGlobalInfo()
EdgeManager & edgeManager = meshLevel.getEdgeManager();

nodeManager.setMaxGlobalIndex();
for( NeighborCommunicator const & neighbor : m_neighbors )
for( NeighborCommunicator const & neighbor : neighbors )
{
neighbor.addNeighborGroupToMesh( meshLevel );
}

CommunicationTools::getInstance().assignGlobalIndices( faceManager,
nodeManager,
m_neighbors );
neighbors );

CommunicationTools::getInstance().assignGlobalIndices( edgeManager,
nodeManager,
m_neighbors );
neighbors );

CommunicationTools::getInstance().findMatchedPartitionBoundaryObjects( faceManager,
m_neighbors );
neighbors );

CommunicationTools::getInstance().findMatchedPartitionBoundaryObjects( edgeManager,
m_neighbors );
neighbors );

// w.r.t. edges and faces, finding the matching nodes between partitions is a bit trickier.
// Because for contact mechanics and fractures, some nodes can be collocated.
Expand Down Expand Up @@ -226,7 +252,7 @@ void DomainPartition::setupBaseLevelMeshGlobalInfo()
} );

CommunicationTools::getInstance().findMatchedPartitionBoundaryNodes( nodeManager,
m_neighbors,
neighbors,
collocatedNodesBuckets,
requestedNodes );
}
Expand All @@ -236,6 +262,7 @@ void DomainPartition::setupBaseLevelMeshGlobalInfo()

void DomainPartition::setupCommunications( bool use_nonblocking )
{
std::vector< NeighborCommunicator > & neighbors = getNeighbors();
forMeshBodies( [&]( MeshBody & meshBody )
{
meshBody.forMeshLevels( [&]( MeshLevel & meshLevel )
Expand All @@ -248,22 +275,22 @@ void DomainPartition::setupCommunications( bool use_nonblocking )
NodeManager & nodeManager = meshLevel.getNodeManager();
FaceManager & faceManager = meshLevel.getFaceManager();

CommunicationTools::getInstance().setupGhosts( meshLevel, m_neighbors, use_nonblocking );
CommunicationTools::getInstance().setupGhosts( meshLevel, neighbors, use_nonblocking );
faceManager.sortAllFaceNodes( nodeManager, meshLevel.getElemManager() );
faceManager.computeGeometry( nodeManager );
}
else if( !meshLevel.isShallowCopyOf( meshBody.getMeshLevels().getGroup< MeshLevel >( 0 )) )
{
for( NeighborCommunicator const & neighbor : m_neighbors )
for( NeighborCommunicator const & neighbor : neighbors )
{
neighbor.addNeighborGroupToMesh( meshLevel );
}
NodeManager & nodeManager = meshLevel.getNodeManager();
FaceManager & faceManager = meshLevel.getFaceManager();

CommunicationTools::getInstance().findMatchedPartitionBoundaryObjects( faceManager, m_neighbors );
CommunicationTools::getInstance().findMatchedPartitionBoundaryObjects( nodeManager, m_neighbors );
CommunicationTools::getInstance().setupGhosts( meshLevel, m_neighbors, use_nonblocking );
CommunicationTools::getInstance().findMatchedPartitionBoundaryObjects( faceManager, neighbors );
CommunicationTools::getInstance().findMatchedPartitionBoundaryObjects( nodeManager, neighbors );
CommunicationTools::getInstance().setupGhosts( meshLevel, neighbors, use_nonblocking );
}
else
{
Expand All @@ -274,56 +301,57 @@ void DomainPartition::setupCommunications( bool use_nonblocking )
} );
}

void DomainPartition::addNeighbors( const unsigned int idim,
MPI_Comm & cartcomm,
int * ncoords )
{
PartitionBase & partition1 = getReference< PartitionBase >( keys::partitionManager );
SpatialPartition & partition = dynamic_cast< SpatialPartition & >(partition1);

if( idim == partition.m_nsdof )
{
bool me = true;
for( int i = 0; i < partition.m_nsdof; i++ )
{
if( ncoords[i] != partition.m_coords( i ))
{
me = false;
break;
}
}
int const neighborRank = MpiWrapper::cartRank( cartcomm, ncoords );
if( !me && !std::any_of( m_neighbors.begin(), m_neighbors.end(), [=]( NeighborCommunicator const & nn ) { return nn.neighborRank( ) == neighborRank; } ) )
{
m_neighbors.emplace_back( NeighborCommunicator( neighborRank ) );
}
}
else
{
const int dim = partition.getPartitions()( LvArray::integerConversion< localIndex >( idim ));
const bool periodic = partition.m_Periodic( LvArray::integerConversion< localIndex >( idim ));
for( int i = -1; i < 2; i++ )
{
ncoords[idim] = partition.m_coords( LvArray::integerConversion< localIndex >( idim )) + i;
bool ok = true;
if( periodic )
{
if( ncoords[idim] < 0 )
ncoords[idim] = dim - 1;
else if( ncoords[idim] >= dim )
ncoords[idim] = 0;
}
else
{
ok = ncoords[idim] >= 0 && ncoords[idim] < dim;
}
if( ok )
{
addNeighbors( idim + 1, cartcomm, ncoords );
}
}
}
}
// void DomainPartition::addNeighbors( const unsigned int idim,
// MPI_Comm & cartcomm,
// int * ncoords )
// {
// std::vector< NeighborCommunicator > & neighbors = getNeighbors();
// PartitionBase & partition1 = getReference< PartitionBase >( keys::partitionManager );
// SpatialPartition & partition = dynamic_cast< SpatialPartition & >(partition1);

// if( idim == partition.m_nsdof )
// {
// bool me = true;
// for( int i = 0; i < partition.m_nsdof; i++ )
// {
// if( ncoords[i] != partition.m_coords( i ))
// {
// me = false;
// break;
// }
// }
// int const neighborRank = MpiWrapper::cartRank( cartcomm, ncoords );
// if( !me && !std::any_of( neighbors.begin(), neighbors.end(), [=]( NeighborCommunicator const & nn ) { return nn.neighborRank( ) == neighborRank; } ) )
// {
// neighbors.emplace_back( NeighborCommunicator( neighborRank ) );
// }
// }
// else
// {
// const int dim = partition.getPartitions()( LvArray::integerConversion< localIndex >( idim ));
// const bool periodic = partition.m_Periodic( LvArray::integerConversion< localIndex >( idim ));
// for( int i = -1; i < 2; i++ )
// {
// ncoords[idim] = partition.m_coords( LvArray::integerConversion< localIndex >( idim )) + i;
// bool ok = true;
// if( periodic )
// {
// if( ncoords[idim] < 0 )
// ncoords[idim] = dim - 1;
// else if( ncoords[idim] >= dim )
// ncoords[idim] = 0;
// }
// else
// {
// ok = ncoords[idim] >= 0 && ncoords[idim] < dim;
// }
// if( ok )
// {
// addNeighbors( idim + 1, cartcomm, ncoords );
// }
// }
// }
// }

void DomainPartition::outputPartitionInformation() const
{
Expand Down
21 changes: 10 additions & 11 deletions src/coreComponents/mesh/DomainPartition.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
#include "discretizationMethods/NumericalMethodsManager.hpp"
#include "mesh/MeshBody.hpp"
#include "mesh/mpiCommunications/NeighborCommunicator.hpp"
#include "mesh/mpiCommunications/PartitionBase.hpp"

namespace geos
{
Expand Down Expand Up @@ -113,9 +114,9 @@ class DomainPartition : public dataRepository::Group
* This functions should have been implemented `private`
* and an additional functions to initiate the recursion could have been implemented.
*/
void addNeighbors( const unsigned int idim,
MPI_Comm & cartcomm,
int * ncoords );
// void addNeighbors( const unsigned int idim,
// MPI_Comm & cartcomm,
// int * ncoords );

/**
* @brief Outputs information about the partitioning of the domain.
Expand Down Expand Up @@ -260,26 +261,24 @@ class DomainPartition : public dataRepository::Group
getMeshBodies().forSubGroupsIndex< MeshBody >( std::forward< FUNCTION >( function ) );
}


PartitionBase & getPartition();
PartitionBase const & getPartition() const;

/**
* @brief Get the neighbor communicators. @see DomainPartition#m_neighbors.
* @return Container of communicators.
*/
std::vector< NeighborCommunicator > & getNeighbors()
{ return m_neighbors; }
std::vector< NeighborCommunicator > & getNeighbors();

/**
* @brief Get the neighbor communicators, const version. @see DomainPartition#m_neighbors.
* @return Container of communicators.
*/
std::vector< NeighborCommunicator > const & getNeighbors() const
{ return m_neighbors; };
std::vector< NeighborCommunicator > const & getNeighbors() const;

private:

/**
* @brief Contains all the communicators from this DomainPartition to its neighbors.
*/
std::vector< NeighborCommunicator > m_neighbors;
};

} /* namespace geos */
Expand Down
1 change: 0 additions & 1 deletion src/coreComponents/mesh/generators/CellBlockManager.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
#include "mesh/generators/LineBlock.hpp"
#include "mesh/generators/LineBlockABC.hpp"
#include "mesh/generators/CellBlockManagerABC.hpp"
#include "mesh/generators/PartitionDescriptor.hpp"

namespace geos
{
Expand Down
Loading
Loading