diff --git a/src/coreComponents/mesh/CMakeLists.txt b/src/coreComponents/mesh/CMakeLists.txt index 433d55155ef..3706da2575c 100644 --- a/src/coreComponents/mesh/CMakeLists.txt +++ b/src/coreComponents/mesh/CMakeLists.txt @@ -88,7 +88,6 @@ set( mesh_headers generators/MeshGeneratorBase.hpp generators/ParMETISInterface.hpp generators/ParticleMeshGenerator.hpp - generators/PartitionDescriptor.hpp generators/PrismUtilities.hpp generators/Region.hpp generators/WellGeneratorBase.hpp diff --git a/src/coreComponents/mesh/DomainPartition.cpp b/src/coreComponents/mesh/DomainPartition.cpp index c699afc5ed2..e40b935182b 100644 --- a/src/coreComponents/mesh/DomainPartition.cpp +++ b/src/coreComponents/mesh/DomainPartition.cpp @@ -39,14 +39,17 @@ DomainPartition::DomainPartition( string const & name, Group * const parent ): Group( name, parent ) { - this->registerWrapper( "Neighbors", &m_neighbors ). + + this->registerWrapper< SpatialPartition, PartitionBase >( keys::partitionManager ). setRestartFlags( RestartFlags::NO_WRITE ). setSizedFromParent( false ); - this->registerWrapper< SpatialPartition, PartitionBase >( keys::partitionManager ). + std::vector< NeighborCommunicator > & neighbors = getNeighbors(); + this->registerWrapper( "Neighbors", &neighbors ). setRestartFlags( RestartFlags::NO_WRITE ). setSizedFromParent( false ); + registerGroup( groupKeys.meshBodies ); registerGroup< constitutive::ConstitutiveManager >( groupKeys.constitutiveManager ); @@ -57,6 +60,28 @@ DomainPartition::DomainPartition( string const & name, DomainPartition::~DomainPartition() {} + +PartitionBase & DomainPartition::getPartition() +{ return getReference< PartitionBase >( dataRepository::keys::partitionManager ); } + +PartitionBase const & DomainPartition::getPartition() const +{ return getReference< PartitionBase >( dataRepository::keys::partitionManager ); } + +/** + * @brief Get the neighbor communicators. @see DomainPartition#neighbors. + * @return Container of communicators. + */ +std::vector< NeighborCommunicator > & DomainPartition::getNeighbors() +{ return getPartition().getNeighbors(); } + +/** + * @brief Get the neighbor communicators, const version. @see DomainPartition#neighbors. + * @return Container of communicators. + */ +std::vector< NeighborCommunicator > const & DomainPartition::getNeighbors() const +{ return getPartition().getNeighbors(); } + + void DomainPartition::initializationOrder( string_array & order ) { set< string > usedNames; @@ -88,6 +113,7 @@ void DomainPartition::setupBaseLevelMeshGlobalInfo() PartitionBase & partition1 = getReference< PartitionBase >( keys::partitionManager ); SpatialPartition & partition = dynamic_cast< SpatialPartition & >(partition1); + std::vector< NeighborCommunicator > & neighbors = getNeighbors(); const std::set< int > metisNeighborList = partition.getMetisNeighborList(); if( metisNeighborList.empty() ) { @@ -104,7 +130,7 @@ void DomainPartition::setupBaseLevelMeshGlobalInfo() MpiWrapper::cartCoords( cartcomm, rank, partition.m_nsdof, partition.m_coords.data() ); int ncoords[3]; - addNeighbors( 0, cartcomm, ncoords ); + partition.addNeighbors( 0, cartcomm, ncoords ); MpiWrapper::commFree( cartcomm ); } @@ -112,13 +138,13 @@ void DomainPartition::setupBaseLevelMeshGlobalInfo() { for( integer const neighborRank : metisNeighborList ) { - m_neighbors.emplace_back( neighborRank ); + neighbors.emplace_back( neighborRank ); } } // Create an array of the first neighbors. array1d< int > firstNeighborRanks; - for( NeighborCommunicator const & neighbor : m_neighbors ) + for( NeighborCommunicator const & neighbor : neighbors ) { firstNeighborRanks.emplace_back( neighbor.neighborRank() ); } @@ -126,20 +152,20 @@ void DomainPartition::setupBaseLevelMeshGlobalInfo() int neighborsTag = 54; // Send this list of neighbors to all neighbors. - std::vector< MPI_Request > requests( m_neighbors.size(), MPI_REQUEST_NULL ); + std::vector< MPI_Request > requests( neighbors.size(), MPI_REQUEST_NULL ); - for( std::size_t i = 0; i < m_neighbors.size(); ++i ) + for( std::size_t i = 0; i < neighbors.size(); ++i ) { - MpiWrapper::iSend( firstNeighborRanks.toView(), m_neighbors[ i ].neighborRank(), neighborsTag, MPI_COMM_GEOS, &requests[ i ] ); + MpiWrapper::iSend( firstNeighborRanks.toView(), neighbors[ i ].neighborRank(), neighborsTag, MPI_COMM_GEOS, &requests[ i ] ); } // This set will contain the second (neighbor of) neighbors ranks. std::set< int > secondNeighborRanks; array1d< int > neighborOfNeighborRanks; - for( std::size_t i = 0; i < m_neighbors.size(); ++i ) + for( std::size_t i = 0; i < neighbors.size(); ++i ) { - MpiWrapper::recv( neighborOfNeighborRanks, m_neighbors[ i ].neighborRank(), neighborsTag, MPI_COMM_GEOS, MPI_STATUS_IGNORE ); + MpiWrapper::recv( neighborOfNeighborRanks, neighbors[ i ].neighborRank(), neighborsTag, MPI_COMM_GEOS, MPI_STATUS_IGNORE ); // Insert the neighbors of the current neighbor into the set of second neighbors. secondNeighborRanks.insert( neighborOfNeighborRanks.begin(), neighborOfNeighborRanks.end() ); @@ -147,14 +173,14 @@ void DomainPartition::setupBaseLevelMeshGlobalInfo() // Remove yourself and all the first neighbors from the second neighbors. secondNeighborRanks.erase( MpiWrapper::commRank() ); - for( NeighborCommunicator const & neighbor : m_neighbors ) + for( NeighborCommunicator const & neighbor : neighbors ) { secondNeighborRanks.erase( neighbor.neighborRank() ); } for( integer const neighborRank : secondNeighborRanks ) { - m_neighbors.emplace_back( neighborRank ); + neighbors.emplace_back( neighborRank ); } MpiWrapper::waitAll( requests.size(), requests.data(), MPI_STATUSES_IGNORE ); @@ -173,24 +199,24 @@ void DomainPartition::setupBaseLevelMeshGlobalInfo() EdgeManager & edgeManager = meshLevel.getEdgeManager(); nodeManager.setMaxGlobalIndex(); - for( NeighborCommunicator const & neighbor : m_neighbors ) + for( NeighborCommunicator const & neighbor : neighbors ) { neighbor.addNeighborGroupToMesh( meshLevel ); } CommunicationTools::getInstance().assignGlobalIndices( faceManager, nodeManager, - m_neighbors ); + neighbors ); CommunicationTools::getInstance().assignGlobalIndices( edgeManager, nodeManager, - m_neighbors ); + neighbors ); CommunicationTools::getInstance().findMatchedPartitionBoundaryObjects( faceManager, - m_neighbors ); + neighbors ); CommunicationTools::getInstance().findMatchedPartitionBoundaryObjects( edgeManager, - m_neighbors ); + neighbors ); // w.r.t. edges and faces, finding the matching nodes between partitions is a bit trickier. // Because for contact mechanics and fractures, some nodes can be collocated. @@ -226,7 +252,7 @@ void DomainPartition::setupBaseLevelMeshGlobalInfo() } ); CommunicationTools::getInstance().findMatchedPartitionBoundaryNodes( nodeManager, - m_neighbors, + neighbors, collocatedNodesBuckets, requestedNodes ); } @@ -236,6 +262,7 @@ void DomainPartition::setupBaseLevelMeshGlobalInfo() void DomainPartition::setupCommunications( bool use_nonblocking ) { + std::vector< NeighborCommunicator > & neighbors = getNeighbors(); forMeshBodies( [&]( MeshBody & meshBody ) { meshBody.forMeshLevels( [&]( MeshLevel & meshLevel ) @@ -248,22 +275,22 @@ void DomainPartition::setupCommunications( bool use_nonblocking ) NodeManager & nodeManager = meshLevel.getNodeManager(); FaceManager & faceManager = meshLevel.getFaceManager(); - CommunicationTools::getInstance().setupGhosts( meshLevel, m_neighbors, use_nonblocking ); + CommunicationTools::getInstance().setupGhosts( meshLevel, neighbors, use_nonblocking ); faceManager.sortAllFaceNodes( nodeManager, meshLevel.getElemManager() ); faceManager.computeGeometry( nodeManager ); } else if( !meshLevel.isShallowCopyOf( meshBody.getMeshLevels().getGroup< MeshLevel >( 0 )) ) { - for( NeighborCommunicator const & neighbor : m_neighbors ) + for( NeighborCommunicator const & neighbor : neighbors ) { neighbor.addNeighborGroupToMesh( meshLevel ); } NodeManager & nodeManager = meshLevel.getNodeManager(); FaceManager & faceManager = meshLevel.getFaceManager(); - CommunicationTools::getInstance().findMatchedPartitionBoundaryObjects( faceManager, m_neighbors ); - CommunicationTools::getInstance().findMatchedPartitionBoundaryObjects( nodeManager, m_neighbors ); - CommunicationTools::getInstance().setupGhosts( meshLevel, m_neighbors, use_nonblocking ); + CommunicationTools::getInstance().findMatchedPartitionBoundaryObjects( faceManager, neighbors ); + CommunicationTools::getInstance().findMatchedPartitionBoundaryObjects( nodeManager, neighbors ); + CommunicationTools::getInstance().setupGhosts( meshLevel, neighbors, use_nonblocking ); } else { @@ -274,56 +301,57 @@ void DomainPartition::setupCommunications( bool use_nonblocking ) } ); } -void DomainPartition::addNeighbors( const unsigned int idim, - MPI_Comm & cartcomm, - int * ncoords ) -{ - PartitionBase & partition1 = getReference< PartitionBase >( keys::partitionManager ); - SpatialPartition & partition = dynamic_cast< SpatialPartition & >(partition1); - - if( idim == partition.m_nsdof ) - { - bool me = true; - for( int i = 0; i < partition.m_nsdof; i++ ) - { - if( ncoords[i] != partition.m_coords( i )) - { - me = false; - break; - } - } - int const neighborRank = MpiWrapper::cartRank( cartcomm, ncoords ); - if( !me && !std::any_of( m_neighbors.begin(), m_neighbors.end(), [=]( NeighborCommunicator const & nn ) { return nn.neighborRank( ) == neighborRank; } ) ) - { - m_neighbors.emplace_back( NeighborCommunicator( neighborRank ) ); - } - } - else - { - const int dim = partition.getPartitions()( LvArray::integerConversion< localIndex >( idim )); - const bool periodic = partition.m_Periodic( LvArray::integerConversion< localIndex >( idim )); - for( int i = -1; i < 2; i++ ) - { - ncoords[idim] = partition.m_coords( LvArray::integerConversion< localIndex >( idim )) + i; - bool ok = true; - if( periodic ) - { - if( ncoords[idim] < 0 ) - ncoords[idim] = dim - 1; - else if( ncoords[idim] >= dim ) - ncoords[idim] = 0; - } - else - { - ok = ncoords[idim] >= 0 && ncoords[idim] < dim; - } - if( ok ) - { - addNeighbors( idim + 1, cartcomm, ncoords ); - } - } - } -} +// void DomainPartition::addNeighbors( const unsigned int idim, +// MPI_Comm & cartcomm, +// int * ncoords ) +// { +// std::vector< NeighborCommunicator > & neighbors = getNeighbors(); +// PartitionBase & partition1 = getReference< PartitionBase >( keys::partitionManager ); +// SpatialPartition & partition = dynamic_cast< SpatialPartition & >(partition1); + +// if( idim == partition.m_nsdof ) +// { +// bool me = true; +// for( int i = 0; i < partition.m_nsdof; i++ ) +// { +// if( ncoords[i] != partition.m_coords( i )) +// { +// me = false; +// break; +// } +// } +// int const neighborRank = MpiWrapper::cartRank( cartcomm, ncoords ); +// if( !me && !std::any_of( neighbors.begin(), neighbors.end(), [=]( NeighborCommunicator const & nn ) { return nn.neighborRank( ) == neighborRank; } ) ) +// { +// neighbors.emplace_back( NeighborCommunicator( neighborRank ) ); +// } +// } +// else +// { +// const int dim = partition.getPartitions()( LvArray::integerConversion< localIndex >( idim )); +// const bool periodic = partition.m_Periodic( LvArray::integerConversion< localIndex >( idim )); +// for( int i = -1; i < 2; i++ ) +// { +// ncoords[idim] = partition.m_coords( LvArray::integerConversion< localIndex >( idim )) + i; +// bool ok = true; +// if( periodic ) +// { +// if( ncoords[idim] < 0 ) +// ncoords[idim] = dim - 1; +// else if( ncoords[idim] >= dim ) +// ncoords[idim] = 0; +// } +// else +// { +// ok = ncoords[idim] >= 0 && ncoords[idim] < dim; +// } +// if( ok ) +// { +// addNeighbors( idim + 1, cartcomm, ncoords ); +// } +// } +// } +// } void DomainPartition::outputPartitionInformation() const { diff --git a/src/coreComponents/mesh/DomainPartition.hpp b/src/coreComponents/mesh/DomainPartition.hpp index 0af42ee9256..3c77242c695 100644 --- a/src/coreComponents/mesh/DomainPartition.hpp +++ b/src/coreComponents/mesh/DomainPartition.hpp @@ -26,6 +26,7 @@ #include "discretizationMethods/NumericalMethodsManager.hpp" #include "mesh/MeshBody.hpp" #include "mesh/mpiCommunications/NeighborCommunicator.hpp" +#include "mesh/mpiCommunications/PartitionBase.hpp" namespace geos { @@ -113,9 +114,9 @@ class DomainPartition : public dataRepository::Group * This functions should have been implemented `private` * and an additional functions to initiate the recursion could have been implemented. */ - void addNeighbors( const unsigned int idim, - MPI_Comm & cartcomm, - int * ncoords ); + // void addNeighbors( const unsigned int idim, + // MPI_Comm & cartcomm, + // int * ncoords ); /** * @brief Outputs information about the partitioning of the domain. @@ -260,26 +261,24 @@ class DomainPartition : public dataRepository::Group getMeshBodies().forSubGroupsIndex< MeshBody >( std::forward< FUNCTION >( function ) ); } + + PartitionBase & getPartition(); + PartitionBase const & getPartition() const; + /** * @brief Get the neighbor communicators. @see DomainPartition#m_neighbors. * @return Container of communicators. */ - std::vector< NeighborCommunicator > & getNeighbors() - { return m_neighbors; } + std::vector< NeighborCommunicator > & getNeighbors(); /** * @brief Get the neighbor communicators, const version. @see DomainPartition#m_neighbors. * @return Container of communicators. */ - std::vector< NeighborCommunicator > const & getNeighbors() const - { return m_neighbors; }; + std::vector< NeighborCommunicator > const & getNeighbors() const; private: - /** - * @brief Contains all the communicators from this DomainPartition to its neighbors. - */ - std::vector< NeighborCommunicator > m_neighbors; }; } /* namespace geos */ diff --git a/src/coreComponents/mesh/generators/CellBlockManager.hpp b/src/coreComponents/mesh/generators/CellBlockManager.hpp index 4e8d2688564..f9554f8a73d 100644 --- a/src/coreComponents/mesh/generators/CellBlockManager.hpp +++ b/src/coreComponents/mesh/generators/CellBlockManager.hpp @@ -26,7 +26,6 @@ #include "mesh/generators/LineBlock.hpp" #include "mesh/generators/LineBlockABC.hpp" #include "mesh/generators/CellBlockManagerABC.hpp" -#include "mesh/generators/PartitionDescriptor.hpp" namespace geos { diff --git a/src/coreComponents/mesh/generators/PartitionDescriptor.hpp b/src/coreComponents/mesh/generators/PartitionDescriptor.hpp deleted file mode 100644 index ef4e4bb4566..00000000000 --- a/src/coreComponents/mesh/generators/PartitionDescriptor.hpp +++ /dev/null @@ -1,98 +0,0 @@ -/* - * ------------------------------------------------------------------------------------------------------------ - * SPDX-License-Identifier: LGPL-2.1-only - * - * Copyright (c) 2016-2024 Lawrence Livermore National Security LLC - * Copyright (c) 2018-2024 TotalEnergies - * Copyright (c) 2018-2024 The Board of Trustees of the Leland Stanford Junior University - * Copyright (c) 2023-2024 Chevron - * Copyright (c) 2019- GEOS/GEOSX Contributors - * All rights reserved - * - * See top level LICENSE, COPYRIGHT, CONTRIBUTORS, NOTICE, and ACKNOWLEDGEMENTS files for details. - * ------------------------------------------------------------------------------------------------------------ - */ - -/** - * @file PartitionDescriptor.hpp - */ - -#ifndef GEOS_MESH_PARTITIONDESCRIPTOR_H_ -#define GEOS_MESH_PARTITIONDESCRIPTOR_H_ - -#include "mesh/mpiCommunications/SpatialPartition.hpp" - -#include - - -namespace geos -{ - -/** - * @class PartitionDescriptor - * @brief Simple utility to retrieve partition information in case of Metis or Spatial partition. - */ -class PartitionDescriptor -{ -public: - /** - * @brief indicate if the partition is described using a Metis Neighbor list. - * @return A boolean indicating if the partition is described usins a Metis neighbor list. - */ - bool hasMetisNeighborList() const { return m_hasMetisNeighborList; } - - /** - * @brief Sets the boolean that indicates if the partition is described using a Metis Neighbor list. - * @param hasMetisNeighborList A boolean indicating if the partition is described usins a Metis neighbor list. - */ - void setHasMetisNeighborList( bool hasMetisNeighborList ) { m_hasMetisNeighborList = hasMetisNeighborList; } - - /** - * @brief Gets a reference to the list of metis neighbor list. - * @return A reference to the Metis neighbor list. - */ - std::set< int > const & getMetisNeighborList() const { return m_metisNeighborList; } - - /** - * @brief Sets the list of metis neighbor list. - * @param metisNeighborList A reference to the Metis neighbor list. - */ - void setMetisNeighborList( std::vector< int > const & metisNeighborList ) - { - m_metisNeighborList.clear(); - m_metisNeighborList.insert( metisNeighborList.cbegin(), metisNeighborList.cend() ); - } - - /** - * @brief indicate if the partition is described using a spatial partition. - * @return A boolean indicating if the parition is described using a spatial partition. - */ - bool hasSpatialPartition() const { return !m_hasMetisNeighborList; } - - /** - * @brief Sets the boolean that indicates if the partition is described using a Metis Neighbor list. - * @param hasSpatialPartition a boolean indicating if the parition is described using a spatial partition. - */ - void setHasSpatialPartition( bool hasSpatialPartition ) { m_hasMetisNeighborList = !hasSpatialPartition; } - - /** - * @brief Returns a reference to the spatialPartition - * @return The spatial partiton. - */ - SpatialPartition const & getSpatialPartition() const { return m_spatialPartition; } - - /** - * @brief Sets the spatialPartition - * @param spatialPartition The spatial partiton. - */ - void setSpatialPartition( SpatialPartition const & spatialPartition ) { m_spatialPartition = spatialPartition; } - -private: - - bool m_hasMetisNeighborList; //< Indicate if we use metis neighbor list or spatial partition to describe the partition - std::set< int > m_metisNeighborList; //< The list of neighbors computed wwith metis - SpatialPartition m_spatialPartition; //< The spatial partition -}; - -} -#endif /* GEOS_MESH_PARTITIONDESCRIPTOR_H_ */ diff --git a/src/coreComponents/mesh/generators/VTKUtilities.cpp b/src/coreComponents/mesh/generators/VTKUtilities.cpp index 39adbb5e5fd..65f3bef3e1d 100644 --- a/src/coreComponents/mesh/generators/VTKUtilities.cpp +++ b/src/coreComponents/mesh/generators/VTKUtilities.cpp @@ -24,6 +24,8 @@ #endif #include "common/TypeDispatch.hpp" +#include "LvArray/src/tensorOps.hpp" +#include "mesh/utilities/ComputationalGeometry.hpp" #include #include diff --git a/src/coreComponents/mesh/generators/VTKWellGenerator.cpp b/src/coreComponents/mesh/generators/VTKWellGenerator.cpp index eba79d7076e..b0cd54b2cae 100644 --- a/src/coreComponents/mesh/generators/VTKWellGenerator.cpp +++ b/src/coreComponents/mesh/generators/VTKWellGenerator.cpp @@ -22,6 +22,7 @@ #include "mesh/LogLevelsInfo.hpp" #include "mesh/generators/VTKUtilities.hpp" +#include "LvArray/src/tensorOps.hpp" #include #include #include diff --git a/src/coreComponents/mesh/mpiCommunications/PartitionBase.hpp b/src/coreComponents/mesh/mpiCommunications/PartitionBase.hpp index b4b5bc402bc..ba6b7c4f560 100644 --- a/src/coreComponents/mesh/mpiCommunications/PartitionBase.hpp +++ b/src/coreComponents/mesh/mpiCommunications/PartitionBase.hpp @@ -75,6 +75,14 @@ class PartitionBase int numColor() const { return m_numColors; } + + std::vector< NeighborCommunicator > & getNeighbors() + { return m_neighbors; }; + + std::vector< NeighborCommunicator > const & getNeighbors() const + { return m_neighbors; }; + + protected: /** * @brief Preventing dummy default constructor. diff --git a/src/coreComponents/mesh/mpiCommunications/SpatialPartition.cpp b/src/coreComponents/mesh/mpiCommunications/SpatialPartition.cpp index 5e308bc5551..f6089a3bb34 100644 --- a/src/coreComponents/mesh/mpiCommunications/SpatialPartition.cpp +++ b/src/coreComponents/mesh/mpiCommunications/SpatialPartition.cpp @@ -207,11 +207,11 @@ void SpatialPartition::setSizes( real64 const ( &min )[ 3 ], MpiWrapper::cartCoords( cartcomm, m_rank, m_nsdof, m_coords.data()); //add neighbors - { - int ncoords[m_nsdof]; - m_neighbors.clear(); - addNeighbors( 0, cartcomm, ncoords ); - } + // { + // int ncoords[m_nsdof]; + // m_neighbors.clear(); + // addNeighbors( 0, cartcomm, ncoords ); + // } MpiWrapper::commFree( cartcomm ); } diff --git a/src/coreComponents/mesh/mpiCommunications/SpatialPartition.hpp b/src/coreComponents/mesh/mpiCommunications/SpatialPartition.hpp index 7a37120bbc6..2877b203982 100644 --- a/src/coreComponents/mesh/mpiCommunications/SpatialPartition.hpp +++ b/src/coreComponents/mesh/mpiCommunications/SpatialPartition.hpp @@ -156,6 +156,9 @@ class SpatialPartition : public PartitionBase MPI_Comm & cartcomm, int * ncoords ); +private: + + /** * @brief Defines a distance/buffer below which we are considered in the contact zone ghosts. * @param bufferSize The distance.