From 3cc17282decb8ba5ef439581fd33e0de4a75b909 Mon Sep 17 00:00:00 2001
From: manuelpett <manuel.petit@inria.fr>
Date: Tue, 26 Mar 2024 18:21:43 +0100
Subject: [PATCH] *Fix bugs related to tempfile in the case where multiple
 AbstractDomain are added togather inside another AbstractDomain. Only one and
 unique tempfile will now be generated (and deleted) in any case (MPI or not).
 *Control some verbosity level related to MPI (solver & dirichlet warning)
 *Control verbosity level of GMSH

---
 src/bvpy/boundary_conditions/dirichlet.py |  9 ++--
 src/bvpy/domains/abstract.py              | 58 ++++++++++++++---------
 src/bvpy/solvers/nonlinear.py             | 30 ++++++------
 3 files changed, 57 insertions(+), 40 deletions(-)

diff --git a/src/bvpy/boundary_conditions/dirichlet.py b/src/bvpy/boundary_conditions/dirichlet.py
index 7255301..8440be5 100644
--- a/src/bvpy/boundary_conditions/dirichlet.py
+++ b/src/bvpy/boundary_conditions/dirichlet.py
@@ -21,6 +21,7 @@
 # -----------------------------------------------------------------------
 import fenics as fe
 from bvpy import logger
+from bvpy import MPI_COMM_WORLD
 
 import numpy as np
 from sympy import Symbol
@@ -98,10 +99,12 @@ class ConstantDirichlet(object):
             dir = fe.DirichletBC(functionSpace.sub(self._subspace),
                                  fe.Constant(self._val),
                                  self._boundary(), method=self._method)
-            
+
         if not dir.get_boundary_values():
-            logger.warning('No nodes are marked for this domain: '
-                           + str(self._boundary._string))
+            if (MPI_COMM_WORLD.Get_size() > 1) and (MPI_COMM_WORLD.Get_rank() == 0):
+                logger.warning('No nodes are marked for some domain (this can be normal when using MPI)')
+            elif MPI_COMM_WORLD.Get_size() == 1:
+                logger.warning('No nodes are marked for this domain: ' + str(self._boundary._string))
 
         return dir
 
diff --git a/src/bvpy/domains/abstract.py b/src/bvpy/domains/abstract.py
index 21836c3..0260aa5 100644
--- a/src/bvpy/domains/abstract.py
+++ b/src/bvpy/domains/abstract.py
@@ -19,6 +19,7 @@
 #           https://www.gnu.org/licenses/lgpl-3.0.en.html
 #
 # -----------------------------------------------------------------------
+import time
 
 import gmsh
 import sys
@@ -159,8 +160,23 @@ class AbstractDomain(ABC):
                          verbosity=verbosity, algorithm=algorithm)
 
         AbstractDomain._domain_instances += 1
+
         if AbstractDomain._domain_instances == 1:
-            gmsh.initialize() # for all processes
+            # - Initialize GMSH and the temp dir necessary to store the mesh
+            gmsh.initialize() # required for all processes
+            self.set_verbosity(verbosity)
+            log.debug(f'Initialize GMSH for {MPI_COMM_WORLD.Get_rank()}')
+
+            if MPI_COMM_WORLD.Get_rank() == 0:
+                self._mesh_dir = tempfile.TemporaryDirectory()  # keep reference to avoid suppression of temp folder
+                dir_name = self._mesh_dir.name
+            else:
+                dir_name = None
+
+            # Broadcast to other processes
+            self._dir_name = MPI_COMM_WORLD.bcast(dir_name, root=0)
+
+            log.debug(f'TempFile {self._dir_name} for {MPI_COMM_WORLD.Get_rank()}')
 
         if clear:
             gmsh.clear()
@@ -170,18 +186,7 @@ class AbstractDomain(ABC):
         self._cell_size = cell_size
         self._algorithm = algorithm
 
-        if MPI_COMM_WORLD.Get_rank() == 0:
-            # Creation of the temporary folder only for the process of rank 0
-            self._mesh_dir  = tempfile.TemporaryDirectory() # keep a reference to avoid suppression of the temp file
-            dir_name = self._mesh_dir .name
-        else:
-            dir_name = None
-
-        # Diffusion of the folder to the other processess
-        self._dir_name = MPI_COMM_WORLD.bcast(dir_name, root=0)
-
         self.set_cell_type(cell_type)
-        self.set_verbosity(verbosity)
         self.set_cell_size(cell_size)
         self.set_meshing_algorithm(algorithm)
         self.set_dim(dim)
@@ -206,14 +211,18 @@ class AbstractDomain(ABC):
         """
         #self._mesh_dir.cleanup()
 
-        if MPI_COMM_WORLD.Get_rank() == 0:
-            # Remove the temporary folder and its content using process of rank 0
-            if self._dir_name and os.path.isdir(self._dir_name):
+        if (MPI_COMM_WORLD.Get_rank() == 0) and hasattr(self, '_dir_name'):
+            if os.path.isdir(self._dir_name):
+                # Remove the temporary folder and its content using process of rank 0
+                log.debug(f'Clear temp file for {MPI_COMM_WORLD.Get_rank()} at {self._dir_name} instances')
                 shutil.rmtree(self._dir_name)
 
+
         AbstractDomain._domain_instances -= 1
         if AbstractDomain._domain_instances == 0:
             gmsh.finalize() # for all processes
+            log.debug(
+                f'Finalize GMSH  for {MPI_COMM_WORLD.Get_rank()}')
 
     def __repr__(self):
         """Returns a short description of the considered domain.
@@ -487,10 +496,11 @@ class AbstractDomain(ABC):
 
         # - Set default values for the domain & boundaries labels/name
         has_bdata = False  # flag to fill automatically labels to boundary
-        sub_boundary_names = {0: 'boundary'}  # default naming for the boundary
         has_cdata = False  # flag to fill automatically labels if needed
+        sub_boundary_names = {0: 'boundary'}  # default naming for the boundary
         sub_domain_names = {0: 'domain'}  # default naming for the domain
 
+
         ### From GMSH to MESHIO
         # Only the rank 0 process generate the gmsh mesh, convert it to MeshIO and save it as XDMF file.
         # It will avoid duplicates
@@ -504,9 +514,9 @@ class AbstractDomain(ABC):
             element_type, element_tags, _ = self.model.mesh.getElements(ele_dim) # get the entities of dimension `ele_dim`
             N = len(element_tags[np.where(element_type == ELEMENT_GMSH_TYPES[self._cell_type])[0][0]]) # number of entities
 
-            gmsh.write(self._dir_name+'/mesh.msh')
-
             path = self._dir_name + '/mesh.msh'
+
+            gmsh.write(path)
             mesh_io = mio.read(path, file_format='gmsh')
 
             # - Get the coordinates of the vertices
@@ -573,7 +583,7 @@ class AbstractDomain(ABC):
                     sub_domain_names.update({lab: f'sub_domain_{i}'
                                              for i, lab in enumerate(set(labels) - set(sub_domain_names))})
 
-                mio.write(self._dir_name + '/' + 'mesh.xdmf',
+                mio.write(self._dir_name + '/mesh.xdmf',
                           mio.Mesh(points=mesh_io.points,
                                    cells={self._cell_type:
                                               mesh_io.cells_dict[self._cell_type]},
@@ -581,10 +591,11 @@ class AbstractDomain(ABC):
             else:
                 log.debug(f"No labels found at the dimension {ele_dim}, use default.")
 
-                mio.write(self._dir_name + '/' + 'mesh.xdmf',
+                mio.write(self._dir_name + '/mesh.xdmf',
                           mio.Mesh(points=mesh_io.points,
                                    cells={self._cell_type:
                                               mesh_io.cells_dict[self._cell_type]}))
+            log.debug(f"MeshIO Mesh was written at {self._dir_name + '/mesh.xdmf'}")
 
             # - Extract physical properties (and possible names) for the boundaries (ele_dim - 1)
             bnd_cell_type = [cell_type for  cell_type, d in CELL_TYPE.items() if d == (ele_dim - 1)][0]
@@ -618,6 +629,7 @@ class AbstractDomain(ABC):
                                               mesh_io.cells_dict[bnd_cell_type]},
                                      cell_data={f"boundary": [labels]})
                 mio.xdmf.write(self._dir_name + '/mesh_boundary.xdmf', bnd_mesh)
+                log.debug(f"MeshIO Boundary was written at {self._dir_name + '/mesh_boundary.xdmf'}")
 
         # Update state of other processes from rank 0 (labels and sub-domain names)
         has_cdata = MPI_COMM_WORLD.bcast(has_cdata, root=0)
@@ -636,7 +648,7 @@ class AbstractDomain(ABC):
         mesh = fe.Mesh()
 
         # Read the saved mesh
-        with fe.XDMFFile(MPI_COMM_WORLD, self._dir_name + '/' + 'mesh.xdmf') as f:
+        with fe.XDMFFile(MPI_COMM_WORLD, self._dir_name + '/mesh.xdmf') as f:
             f.read(mesh)
 
         # Fill the cdata
@@ -681,9 +693,9 @@ class GmshProxy:
     def __getattr__(self, name):
         """Redirige les appels de méthode vers factory si le rang est 0."""
         def method(*args, **kwargs):
-            log.debug(f'Call from {MPI_COMM_WORLD.Get_rank()} of the method {name}')
+            #log.debug(f'Call from {MPI_COMM_WORLD.Get_rank()} of the method {name}')
             if MPI_COMM_WORLD.Get_rank() == 0:
-                log.debug(f'Ok for  {MPI_COMM_WORLD.Get_rank()} of the method {name}')
+                #log.debug(f'Ok for  {MPI_COMM_WORLD.Get_rank()} of the method {name}')
                 return getattr(self._factory, name)(*args, **kwargs)
             else:
                 log.debug(f'NO for  {MPI_COMM_WORLD.Get_rank()} of the method {method}')
diff --git a/src/bvpy/solvers/nonlinear.py b/src/bvpy/solvers/nonlinear.py
index b136475..d42e9cf 100644
--- a/src/bvpy/solvers/nonlinear.py
+++ b/src/bvpy/solvers/nonlinear.py
@@ -221,26 +221,28 @@ class NonLinearSolver():
             of the performed computation.
         """
         if self._type == 'petsc':
-            logger.info(' Backend       : petsc')
-            logger.info(' Linear Solver : '
-                        + str(self._solver.parameters['linear_solver']))
-            logger.info(' Preconditioner: '
-                        + str(self._solver.parameters['preconditioner']))
-            logger.info(f" atol: {self._solver.parameters['absolute_tolerance']}")
-            logger.info(f" rtol: {self._solver.parameters['relative_tolerance']}")
-            logger.info(' Size          : '
-                        + str(self._u.function_space().dim()))
-            logger.info(' Solving NonLinearProblem ...')
+            if MPI_COMM_WORLD.Get_rank() == 0:
+                logger.info(' Backend       : petsc')
+                logger.info(' Linear Solver : '
+                            + str(self._solver.parameters['linear_solver']))
+                logger.info(' Preconditioner: '
+                            + str(self._solver.parameters['preconditioner']))
+                logger.info(f" atol: {self._solver.parameters['absolute_tolerance']}")
+                logger.info(f" rtol: {self._solver.parameters['relative_tolerance']}")
+                logger.info(' Size          : '
+                            + str(self._u.function_space().dim()))
+                logger.info(' Solving NonLinearProblem ...')
 
             start = time.time()
             self._solver.solve(self._problem, self._u.vector())
             end = time.time()
 
             residuals = self._snes.getConvergenceHistory()[0]
-
-            logger.info(' ... Done [' + str(end - start) + ' s]')
             conv = self._snes.getConvergedReason()
-            logger.info(' Iterations    : ' + str(len(residuals)))
-            logger.info(' Resiudal      : ' + str(residuals[-1]))
+
+            if MPI_COMM_WORLD.Get_rank() == 0:
+                logger.info(' ... Done [' + str(end - start) + ' s]')
+                logger.info(' Iterations    : ' + str(len(residuals)))
+                logger.info(' Resiudal      : ' + str(residuals[-1]))
 
             return residuals
\ No newline at end of file
-- 
GitLab