diff --git a/brick/component/connection.py b/brick/component/connection.py
index 0a1bb194c06f95a990d0258cd68e35f7a9b4572a..ae2454aa22b25930f9a016bbf93368ae9dfd0c97 100644
--- a/brick/component/connection.py
+++ b/brick/component/connection.py
@@ -84,11 +84,10 @@ def ShortestPathFromToN(
     extension: extension_t,
     costs: array_t,
     candidate_points_fct: Callable,
-    all_end_points : tuple = None,
+    all_end_points: tuple = None,
     extensions: tuple = None,
     max_straight_sq_dist: float = np_.inf,
-    erode_path: bool = True,
-    strict_erosion: bool = True,
+    erode_path: bool = False,
 ) -> Tuple[site_path_h, float]:
     '''
     Find the shortest weighted path between endpoints and soma closest contour points.
@@ -103,48 +102,43 @@ def ShortestPathFromToN(
     # Erode the candidate points of the extensions for Ext <-> Ext connexion
     if erode_path:
         if candidate_indexing is not None:
-            if strict_erosion:
-                # reformat to modify them
-                candidate_points = list(candidate_points)
-                candidate_indexing_var = list(candidate_indexing)
-                candidate_indexing = []
-                for idx in candidate_indexing_var:
-                    idx = list(idx)
-                    candidate_indexing.append(idx)
-
-                # Delete the points that are not endpoints and that are less than 2 pixels away from the closest endpoint
-                # -- important for dilatation/erosion process
-                for candidate, indx in enumerate(candidate_points):
-                    # Verify if end point
-                    if candidate not in zip(*all_end_points):
-                        # Here  necessary to delete the 2 closest points next to the end points
-                        # because if the all extension is candidate there might be a endpoint
-                        # that will be polluted by the connection
-                        not_allowed = set()
-                        for e_p in zip(*all_end_points):
-                            not_allowed_ = set(
-                                (e_p[0] + i, e_p[1] + j, e_p[2] + k)
-                                for i in (-2, -1, 0, 1, 2)
-                                for j in (-2, -1, 0, 1, 2)
-                                for k in (-2, -1, 0, 1, 2)
-                                if i != 0 or j != 0 or k != 0)
-                            not_allowed |= not_allowed_
-
-                        if candidate in not_allowed:
-                            for i in range(3):
-                                candidate_indexing[i].pop(indx)
-                            candidate_points.remove(candidate)
-                        # ext_sites = set()
-                        # for extension in extensions: ext_sites.add(set(zip(*extension.sites[2:-2])))
-                        # Verify if more than 2 pixels away from the closest endpoint
-                        # if candidate not in ext_sites:
-
-                # reformatting
-                candidate_points = tuple(candidate_points)
-                candidate_indexing = tuple(candidate_indexing)
+            # reformat to modify them
+            candidate_points = list(candidate_points)
+            candidate_indexing_var = list(candidate_indexing)
+            candidate_indexing = []
+            for idx in candidate_indexing_var:
+                idx = list(idx)
+                candidate_indexing.append(idx)
+
+            # Delete the points that are not endpoints and that are less than 2 pixels away from the closest endpoint
+            # -- important for dilatation/erosion process
+            for candidate, indx in enumerate(candidate_points):
+                # Verify if end point
+                if candidate not in zip(*all_end_points):
+                    # Here  necessary to delete the 2 closest points next to the end points
+                    # because if the all extension is candidate there might be a endpoint
+                    # that will be polluted by the connection
+                    not_allowed = set()
+                    for e_p in zip(*all_end_points):
+                        not_allowed_ = set(
+                            (e_p[0] + i, e_p[1] + j, e_p[2] + k)
+                            for i in (-2, -1, 0, 1, 2)
+                            for j in (-2, -1, 0, 1, 2)
+                            for k in (-2, -1, 0, 1, 2)
+                            if i != 0 or j != 0 or k != 0)
+                        not_allowed |= not_allowed_
+
+                    if candidate in not_allowed:
+                        for i in range(3):
+                            candidate_indexing[i].pop(indx)
+                        candidate_points.remove(candidate)
+
+            # reformatting
+            candidate_points = tuple(candidate_points)
+            candidate_indexing = tuple(candidate_indexing)
 
             # Erode the end_point of the extension AND the extension candidate points in the costs map
-            costs = Erode(candidate_points, costs, extension, extensions, image, all_end_points=all_end_points, strict_erosion=strict_erosion)
+            Erode(candidate_points, costs, extension, extensions, image, all_end_points=all_end_points)
 
     # If no path, return empty tuple for path and infinite path length.
     if candidate_points is None:
@@ -183,8 +177,8 @@ def ShortestPathFromToN(
 
 
 def ValidateConnection(
-    glial_cmp: glial_cmp_t, extension: glial_cmp_t, end_point: tuple, dijkstra_path: site_path_h, costs: array_t
-) -> array_t:
+    glial_cmp: glial_cmp_t, extension: glial_cmp_t, end_point: tuple, dijkstra_path: site_path_h, costs: array_t,
+) -> None:
     '''
     Keep the connection path in the glial_cmp.
     Add the extension to the extensions list of the glial_cmp.
@@ -195,8 +189,6 @@ def ValidateConnection(
     if connection_path.__len__() == 0:
         connection_path = None
 
-    costs[connection_path] = np_.inf
-
     # Store the connexion path in the glial_cmp
     glial_cmp.connection_path[extension.uid] = connection_path
     # Add the connected extension to the list of the extensions of the glial_cmp
@@ -204,16 +196,16 @@ def ValidateConnection(
     # TODO BackReferenceSoma
     extension.BackReferenceSoma(glial_cmp)
 
-    # Add the site of the connexion between the extension and the soma, for each soma and for ech extension
+    # Add the site of the connexion between the extension and the soma, for each soma and for each extension
     # restrain the verification to the soma <-> ext step
+
+    # Store the new endpoints of the extended extension - TODO update the end point vector
     if type(glial_cmp) is soma_t:
-        # Store the new endpoints of the extended extension
-        # print(end_point)
-        end_point = UpdateEndPointsWithConnexionPath(glial_cmp, connection_path, end_point)
-        # print(end_point)
+        end_point = UpdateEndPointsWithConnexionPath(connection_path, end_point)
         glial_cmp.ext_roots.append((extension.uid, end_point))
 
     if connection_path is not None:
+        costs[connection_path] = np_.inf
 
         # add the connexion sites to the extension sites
         ext_sites = np_.asarray(extension.sites)
@@ -222,25 +214,35 @@ def ValidateConnection(
             sites = ext_sites[i].tolist()
             sites += list(connection_path[i])
             extension.sites[i] = np_.asarray(sites, dtype=np_.int64)
-        # # reorder the new extension sites
-        # extension.sites = _ReOrderedSites(extension.sites)
         # reformat
         extension.sites = tuple(extension.sites)
 
-    # Dilate extensions
-    costs = Dilate(extension.sites, costs)
-
-    return costs
 
-
-def UpdateEndPointsWithConnexionPath(soma: soma_t, connexion_path: tuple, end_point: tuple) -> tuple:
+def UpdateEndPointsWithConnexionPath(connexion_path: tuple, end_point: tuple) -> tuple:
+    #
     if connexion_path is None:
         return end_point
 
     else:
-        # print(end_point)
+        # /!\ may yield a bug
+        # Update the end point vector - delete the old endpoint
+        # # reformat to modify them
+        # all_end_points_var = list(all_end_points)
+        # all_end_points = []
+        # for i in all_end_points_var:
+        #     indx = list(i)
+        #     all_end_points.append(indx)
+        # # delete the connected end point
+        # if end_point in zip(*all_end_points):
+        #     idx = list(zip(*all_end_points)).index(end_point)
+        #     for i in range(3):
+        #         all_end_points[i].pop(idx)
+        # else:
+        #     print("WARNING: End point not in the all end points vectors !")
+
+        # Search the connection point link to the extension
         connexion_path = tuple(zip(*connexion_path))
-        # print(connexion_path[0], connexion_path[-1])
+
         close_end_pt = tuple(
             (end_point[0] + i, end_point[1] + j, end_point[2] + k)
             for i in (-1, 0, 1)
@@ -249,13 +251,19 @@ def UpdateEndPointsWithConnexionPath(soma: soma_t, connexion_path: tuple, end_po
             if i != 0 or j != 0 or k != 0)
 
         if connexion_path[0] in close_end_pt:
+            # for i in range(3):
+            #     all_end_points.append(connexion_path[-1][i])
+            # all_end_points = tuple(all_end_points)
             return connexion_path[-1]
 
         elif connexion_path[-1] in close_end_pt:
+            # for i in range(3):
+            #     all_end_points.append(connexion_path[0][i])
+            # all_end_points = tuple(all_end_points)
             return connexion_path[0]
 
 
-def Dilate(path: tuple, costs_map: array_t, cost: float = np_.inf) -> array_t:
+def Dilate(path: tuple, costs_map: array_t, cost: float = np_.inf) -> None:
     '''
     Put to the value cost in the cost map the neighbors voxels of a given path.
     The path must be in the format: array([[x1,x2,...],[y1,y2,...],[z1,z2,...]])
@@ -273,17 +281,14 @@ def Dilate(path: tuple, costs_map: array_t, cost: float = np_.inf) -> array_t:
             if (ext > (0, 0, 0)) and (ext[0] < shape[0]) and (ext[1] < shape[1]) and (ext[2] < shape[2]):
                 costs_map[int(ext[0]), int(ext[1]), int(ext[2])] = cost
 
-    return costs_map
-
 
 def Erode(path: Tuple[tuple],
           costs_map: array_t,
           extension: extension_t,
           extensions: tuple,
           image: array_t,
-          endpt=None,
           all_end_points=None,
-          strict_erosion: bool = False) -> array_t:
+          ) -> None:
     '''
     Erode the voxels of a given path or point, without eroding the neighboring extension pixels and their neighbors.
     The path must be in the format: Tuple(tuple(x1, y1, z1), ...)
@@ -292,30 +297,18 @@ def Erode(path: Tuple[tuple],
     new_path = []
 
     for point in path:
-
-        if endpt is None:
-            # find out whether this is a end point or not
-            if point in zip(*all_end_points):
-                endpt = True
-            else:
-                endpt = False
-
-        elif endpt is True:
-            costs_map = ErodeEndPoint(point, costs_map, extension, image)
-
-        elif endpt is False:
+        # find out whether this is a end point or not
+        if point in zip(*all_end_points):
+            ErodeEndPoint(point, costs_map, extension, image)
+        else:
             # build a new path with no endpoints inside
             new_path.append(point)
 
-        else:
-            raise ValueError("Value of endpt in Erode() not allowed. Allowed values: (None, True, False).")
-
-    costs_map = ErodePath(tuple(new_path), costs_map, extensions, image, strict_erosion=strict_erosion)
-
-    return costs_map
+    if new_path.__len__() > 0:
+        ErodePath(tuple(new_path), costs_map, extensions, image)
 
 
-def ErodeEndPoint(end_point: tuple, costs_map: array_t, extension: extension_t, image: array_t) -> array_t:
+def ErodeEndPoint(end_point: tuple, costs_map: array_t, extension: extension_t, image: array_t) -> None:
     '''
     Erode the endpoint of the extension to be connected and its neighbors.
     '''
@@ -348,10 +341,8 @@ def ErodeEndPoint(end_point: tuple, costs_map: array_t, extension: extension_t,
         if (tuple(ext) > (0, 0, 0)) and (tuple(ext)[0] < shape[0]) and (tuple(ext)[1] < shape[1]) and (tuple(ext)[2] < shape[2]):
             costs_map[ext[0], ext[1], ext[2]] = 1.0 / (image[ext[0], ext[1], ext[2]] + 1.0)
 
-    return costs_map
 
-
-def ErodePath(path: tuple, costs_map: array_t, extensions: tuple, image: array_t, strict_erosion:bool = False) -> array_t:
+def ErodePath(path: tuple, costs_map: array_t, extensions: tuple, image: array_t) -> None:
     '''
     Erode the voxels of a given path, without eroding the neighboring extension pixels and their neighbors.
     '''
@@ -373,18 +364,16 @@ def ErodePath(path: tuple, costs_map: array_t, extensions: tuple, image: array_t
     for extension in extensions:
         for site in zip(*extension.sites):
             if (site in dilated) and (site not in path):
-                dilated_site = set(
-                    (site[0] + i, site[1] + j, site[2] + k)
-                    for i in (-1, 0, 1)
-                    for j in (-1, 0, 1)
-                    for k in (-1, 0, 1)
-                    if i != 0 or j != 0 or k != 0)
-
                 dilated.remove(site)
-                if not strict_erosion:
-                    # Here the work of the strict erosion is already done in ShortestPathFromToN.
-                    # So we allow more voxels to be erode in this condition in ErodePath.
-                    dilated = dilated.difference(dilated_site)
+                # # Here the work of the strict erosion is already done in ShortestPathFromToN.
+                # # So we allow more voxels to be erode in this condition in ErodePath.
+                # dilated_site = set(
+                #     (site[0] + i, site[1] + j, site[2] + k)
+                #     for i in (-1, 0, 1)
+                #     for j in (-1, 0, 1)
+                #     for k in (-1, 0, 1)
+                #     if i != 0 or j != 0 or k != 0)
+                # dilated = dilated.difference(dilated_site)
 
     dilated = list(dilated)
 
@@ -392,6 +381,4 @@ def ErodePath(path: tuple, costs_map: array_t, extensions: tuple, image: array_t
     for ext in dilated:
         if (tuple(ext) > (0, 0, 0)) and (tuple(ext)[0] < shape[0]) and (tuple(ext)[1] < shape[1]) and (
                 tuple(ext)[2] < shape[2]):
-            costs_map[ext[0], ext[1], ext[2]] = 1.0 / (image[ext[0], ext[1], ext[2]] + 1.0)
-
-    return costs_map
\ No newline at end of file
+            costs_map[ext[0], ext[1], ext[2]] = 1.0 / (image[ext[0], ext[1], ext[2]] + 1.0)
\ No newline at end of file
diff --git a/nutrimorph.py b/nutrimorph.py
index e0547ef387b4ffb4d29c60d82f709b749cbf5443..ed53f782d2e5114fe2b8f886ba0709d2647b0954 100644
--- a/nutrimorph.py
+++ b/nutrimorph.py
@@ -60,7 +60,6 @@ import psutil as mu_
 import matplotlib.pyplot as pl_
 import numpy as np_
 import imageio as io_
-from skimage.segmentation import relabel_sequential
 import skimage.morphology as mp_
 import skimage.measure as ms_
 import skimage.exposure as ex_
@@ -79,6 +78,8 @@ if not (os_.path.isfile(sy_.argv[1]) and os_.access(sy_.argv[1], os_.R_OK)):
 data_path = None
 channel = None
 save_images = None
+save_csv = None
+dilatation_erosion = None
 size_voxel_in_micron = None
 statistical_analysis = None
 soma_low_c = None
@@ -116,467 +117,8 @@ in_parallel = None
 exec(open(sy_.argv[1]).read())  # Only with search_parameters.py (stable version)
 
 
-###### --------------- FOR DVPT -------------------##########
-# soma_t = sm_.soma_t
-# extension_t = xt_.extension_t
-#
-# print(f"STARTED: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}")
-# start_time = tm_.time()
-#
-# # TODO add proper warning with warn module
-# # --- Images
-#
-# # Find the name of the file, eq. to the biological tested condition number x
-# name_file = os_.path.basename(data_path)
-# print("FILE: ", name_file)
-# name_file = name_file.replace(".tif", "")
-#
-# name_dir = os_.path.dirname(data_path)
-# print("DIR: ", name_dir)
-#
-# # Find the dimension of the image voxels in micron
-# # TODO do something more general, here too specific to one microscope metadata
-# size_voxel_in_micron = in_.FindVoxelDimensionInMicron(data_path, size_voxel_in_micron=size_voxel_in_micron)
-#
-# # Read the image
-# image = io_.volread(data_path)
-#
-# # Image size verification - simple version without user interface
-# image = in_.ImageVerification(image, channel)
-#
-# # iv_.image_verification(image, channel)  # -> PySide2 user interface  # TODO: must return the modified image!
-# # /!\ conflicts between some versions of PySide2 and Python3
-#
-# image = image[:, 800:, :300]
-# img_shape = image.shape
-#
-# #
-# print(f"IMAGE: s.{img_shape} t.{image.dtype} m.{image.min()} M.{image.max()}")
-#
-# # Intensity relative normalization (between 0 and 1).
-# # Image for soma normalized for hysteresis
-# image_for_soma = in_.IntensityNormalizedImage(image)
-# # Image for extensions not normalized for frangi enhancement
-# image_for_ext = image.copy()
-#
-# print(f"NRM-IMG: t.{image_for_soma.dtype} m.{image_for_soma.min():.2f} M.{image_for_soma.max():.2f}")
-#
-# # --- Initialization
-# som_nfo = {}
-# ext_nfo = {}
-# axes = {}
-#
-# #
-# # --- Somas
-# print("\n--- Soma Detection")
-#
-# # Change the soma parameters from micron to pixel dimensions, using the previously determined voxel size
-# soma_min_area_c = in_.ToPixel(soma_min_area_c, size_voxel_in_micron, dimension=(0, 1))
-# soma_max_area_c = in_.ToPixel(soma_max_area_c, size_voxel_in_micron, dimension=(0, 1))
-# soma_selem_c = mp_.disk(in_.ToPixel(soma_selem_micron_c, size_voxel_in_micron))
-#
-# # - Create the maps for enhancing and detecting the soma
-# # Hysteresis thresholding and closing/opening
-# print("Hysteresis thresholding: ", end="")
-# som_nfo["map_small"] = soma_t.Map(image_for_soma, soma_low_c, soma_high_c, soma_selem_c)
-# print("Done")
-# # Deletion of too small elements
-# print("Morphological cleaning: ", end="")
-# som_nfo["map_small"], som_lmp_small = soma_t.DeleteSmallSomas(som_nfo["map_small"], soma_min_area_c)
-# # Deletion of too big elements (probably aggregation of cells)
-# # Separated from the deletion of small elements to avoid extensions to be detected where too big somas were just deleted
-# som_nfo["map"], som_lmp = soma_t.DeleteBigSomas(som_nfo["map_small"], som_lmp_small, soma_max_area_c)
-# print("Done")
-#
-# # Labelling of somas, find the number of somas
-# print("Somas labelling: ", end="")
-# som_nfo["lmp"], n_somas = ms_.label(som_nfo["map"], return_num=True)
-# print("Done")
-#
-# # Use relabel instead of label to optimize the algorithm. /!\ But BUG.
-# # som_nfo["lmp"] = relabel_sequential(som_lmp)[0]
-# # n_somas = som_nfo["lmp"].max()
-#
-# # id_soma = 5
-# # print(f"/!\ Selected soma {id_soma} for dvpt")
-# # for id in range(n_somas + 1):
-# #     if id != id_soma:
-# #         som_nfo["lmp"][som_nfo["lmp"] == id] = 0
-# # som_nfo["lmp"], n_somas = ms_.label(som_nfo["lmp"], return_num=True)
-#
-# # po_.MaximumIntensityProjectionZ(som_nfo["lmp"])
-#
-# # Create the distance and influence maps used next for connexions between somas-extensions and extensions-extensions
-# print("Distance and Influence Map creation: ", end="")
-# som_nfo["dist_to_closest"], som_nfo["influence_map"] = soma_t.InfluenceMaps(som_nfo["lmp"])
-# print("Done")
-#
-# # po_.MaximumIntensityProjectionZ(som_nfo["influence_map"])
-#
-# # Create the tuple somas, containing all the objects 'soma'
-# print("Somas creation: ", end="")
-# somas = tuple(soma_t().FromMap(som_nfo["lmp"], uid) for uid in range(1, n_somas + 1))
-# print("Done")
-#
-# print(f"    n = {n_somas}")
-#
-# elapsed_time = tm_.gmtime(tm_.time() - start_time)
-# print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
-#
-# if with_plot:
-#     fb_.PlotSomas(somas, som_nfo, axes)
-#
-# #
-# # -- Extentions
-# print("\n--- Extension Detection")
-#
-# # Set to zeros the pixels belonging to the somas.
-# # Take into account the aggregation of cells detected as one big soma to avoid extension creation there
-# del_soma = (som_nfo["map_small"] > 0).nonzero()
-# image_for_ext[del_soma] = 0
-#
-# # Change the extensions parameters from micron to pixel dimensions
-# ext_min_area_c = in_.ToPixel(ext_min_area_c, size_voxel_in_micron, dimension=(0, 1))
-# ext_max_length_c = in_.ToPixel(ext_max_length_c, size_voxel_in_micron, dimension=(0,))
-#
-# if ext_selem_micron_c == 0:
-#     ext_selem_pixel_c = None
-# else:
-#     ext_selem_pixel_c = mp_.disk(in_.ToPixel(ext_selem_micron_c, size_voxel_in_micron))
-#
-# # scale_range_pixel = []
-# # #
-# # for value in scale_range:
-# #     value_in_pixel = in_.ToPixel(value, size_voxel_in_micron, decimals=1)
-# #     scale_range_pixel.append(value_in_pixel)
-# # scale_range = tuple(scale_range_pixel)
-# #
-# # scale_step = in_.ToPixel(scale_step, size_voxel_in_micron, decimals=1)
-#
-# # - Perform frangi feature enhancement (via python or c - faster - implementation)
-# enhanced_ext, ext_scales = extension_t.EnhancedForDetection(
-#     image_for_ext,
-#     scale_range,
-#     scale_step,
-#     alpha,
-#     beta,
-#     frangi_c,
-#     bright_on_dark,
-#     method,
-#     diff_mode=diff_mode,
-#     in_parallel=in_parallel)
-#
-# elapsed_time = tm_.gmtime(tm_.time() - start_time)
-# print(f"Elapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}\n")
-#
-# # po_.MaximumIntensityProjectionZ(enhanced_ext, cmap="OrRd")
-#
-# # - Creation of the enhanced maps
-# # enhanced_ext = ex_.adjust_log(enhanced_ext, 1)  # Not necessary, log contrast adjustment
-#
-# # Enhanced the contrast in frangi output
-# print('Enhancing Contrast: ', end='')
-# if adapt_hist_equalization:
-#     # necessary but not sufficient, histogram equalisation (adaptative)
-#     enhanced_ext = ex_.equalize_adapthist(enhanced_ext)
-# else:
-#     # necessary but not sufficient, histogram equalisation (global)
-#     enhanced_ext = ex_.equalize_hist(enhanced_ext)
-#
-# # po_.MaximumIntensityProjectionZ(enhanced_ext, cmap="OrRd")
-#
-# # Rescale the image by stretching the intensities between 0 and 255, necessary but not sufficient
-# enhanced_ext = ex_.rescale_intensity(enhanced_ext, out_range=(0, 255))
-# print('Done')
-# # Hysteresis thersholding
-# print('Hysteresis Thresholding: ', end='')
-# ext_nfo["coarse_map"] = extension_t.CoarseMap(enhanced_ext, ext_low_c, ext_high_c, ext_selem_pixel_c)
-# print('Done')
-# # po_.MaximumIntensityProjectionZ(ext_nfo["coarse_map"])
-# # Deletion of too small elements
-# print('Morphological cleaning: ', end='')
-# ext_nfo["coarse_map"], ext_lmp = extension_t.FilteredCoarseMap(ext_nfo["coarse_map"], ext_min_area_c)
-# print('Done')
-# # po_.MaximumIntensityProjectionZ(ext_nfo["coarse_map"])
-# # Creation of the extensions skeleton
-# print('Skeletonization and Thinning: ', end='')
-# ext_nfo["map"] = extension_t.FineMapFromCoarseMap(ext_nfo["coarse_map"])
-# print('Done')
-# # ext_nfo["map"] = extension_t.FilteredSkeletonMap(ext_nfo["coarse_map"], ext_max_length_c)
-# # po_.MaximumIntensityProjectionZ(ext_nfo["map"])
-# # Deletion of extensions found within the somas
-# print('Map Labelling: ', end='')
-# ext_nfo["map"][som_nfo["map"] > 0] = 0
-# # Labelling of extensions and number of extensions determined
-# ext_nfo["lmp"], n_extensions = ms_.label(ext_nfo["map"], return_num=True)
-# print('Done')
-#
-# # Use relabel instead of label to optimize the algorithm. BUT PROBLEM WITH THE NUMBER OF EXTENSIONS DETECTED !
-# # ext_nfo["lmp"] = relabel_sequential(ext_lmp)[0]
-# # n_extensions = ext_nfo["lmp"].max()
-#
-# # Create the tuple extensions, containing all the objects 'extension'
-# print('Extensions creation: ', end='')
-# extensions = tuple(
-#     extension_t().FromMap(ext_nfo["lmp"], ext_scales, uid)
-#     for uid in range(1, n_extensions + 1))
-# print('Done')
-# print(f"    n = {n_extensions}")
-#
-# #
-# elapsed_time = tm_.gmtime(tm_.time() - start_time)
-# print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
-#
-# if with_plot:
-#     fb_.PlotExtensions(extensions, ext_nfo, img_shape)
-#
-# # -- Preparation for Connections
-# # Calculate the COSTS of each pixel on the image and DILATE the existing extensions to
-# # avoid tangency of connexions to extensions
-# dijkstra_costs = in_.DijkstraCosts(image, som_nfo["map"], ext_nfo["map"], extensions, dilate=True)
-#
-# # -- Soma-Extention
-# print("\n--- Soma <-> Extension")
-#
-# # Change the extensions parameters from micron to pixel dimensions
-# max_straight_sq_dist_c = in_.ToPixel(max_straight_sq_dist_c, size_voxel_in_micron)
-# max_weighted_length_c = in_.ToPixel(max_weighted_length_c, size_voxel_in_micron)
-#
-# # Find the candidate extensions, with their endpoints, for connexion to the somas
-# candidate_conn_nfo = cn_.CandidateConnections(
-#     somas,
-#     som_nfo["influence_map"],
-#     som_nfo["dist_to_closest"],
-#     extensions,
-#     max_straight_sq_dist_c,
-# )
-#
-# # For each candidate, verify that it is valid based on the shortest path and the maximum allowed straight distance
-# for ep_idx, soma, extension, end_point in candidate_conn_nfo:
-#     # Only try to connect if the extension is not already connected
-#     if extension.is_unconnected:
-#         print(f"    Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end="")
-#         # Erode the end_point of the extension in the costs map
-#         dijkstra_costs = in_.ErodeEndPoint(end_point, dijkstra_costs, extension, image)
-#         # Use of Dijkstra shortest weighted path
-#         path, length = cn_.ShortestPathFromToN(
-#             image,
-#             end_point,
-#             extension,
-#             dijkstra_costs,
-#             soma.ContourPointsCloseTo,
-#             max_straight_sq_dist=max_straight_sq_dist_c,
-#             erode_path=False,
-#         )
-#
-#         # Keep the connexion only if inferior to the allowed max weighted distance
-#         if length is not None:
-#             length = in_.ToPixel(length, size_voxel_in_micron)
-#             if length <= max_weighted_length_c:
-#                 # Validate and update all the fields + dilate again the whole extension
-#                 dijkstra_costs = cn_.ValidateConnection(soma, extension, end_point, path, dijkstra_costs)
-#                 print(": Made")
-#             else:
-#                 dijkstra_costs = in_.Dilate(extension.sites, dijkstra_costs)
-#                 print("")
-#         else:
-#             dijkstra_costs = in_.Dilate(extension.sites, dijkstra_costs)
-#             print("")
-#
-# # for soma in somas:
-# #     soma.Extend(extensions, som_nfo["dist_to_closest"], dijkstra_costs)
-#
-# fb_.PrintConnectedExtensions(extensions)
-#
-# #
-# elapsed_time = tm_.gmtime(tm_.time() - start_time)
-# print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
-#
-# if with_plot:
-#     fb_.PlotSomasWithExtensions(somas, som_nfo, "all")
-#
-# # -- Extention-Extention
-# print("\n--- Extension <-> Extension")
-#
-# should_look_for_connections = True
-#
-# # Update the soma maps with next Ext-Ext connexions
-# while should_look_for_connections:
-#     # Update the costs by dilating everything again #is it necessary given all that is done above?
-#     for soma in somas:
-#         for extension in soma.extensions:
-#             dijkstra_costs = in_.Dilate(extension.sites, dijkstra_costs)
-#     # Update the final somas + ext map by adding the new extensions and their connexion paths
-#     som_nfo["soma_w_ext_lmp"] = soma_t.SomasLMap(somas)
-#     # Update the influence map
-#     som_nfo["dist_to_closest"], som_nfo["influence_map"] = soma_t.InfluenceMaps(som_nfo["soma_w_ext_lmp"])
-#
-#     # Find the candidate extensions for connexion to the primary extensions
-#     candidate_conn_nfo = cn_.CandidateConnections(
-#         somas,
-#         som_nfo["influence_map"],
-#         som_nfo["dist_to_closest"],
-#         extensions,
-#         max_straight_sq_dist_c,
-#     )
-#
-#     should_look_for_connections = False
-#
-#     # For each candidate, verify that it is valid based on the shortest path and the maximum allowed straight distance
-#     for ep_idx, soma, extension, end_point in candidate_conn_nfo:
-#         # Only try to connect if the extension is not already connected
-#         if extension.is_unconnected:
-#             print(f"    Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end="")
-#             # Erode the end_point of the extension in the costs map
-#             dijkstra_costs = in_.ErodeEndPoint(end_point, dijkstra_costs, extension, image)
-#             # Use of Dijkstra shortest weighted path
-#             path, length = cn_.ShortestPathFromToN(
-#                 image,
-#                 end_point,
-#                 extension,
-#                 dijkstra_costs,
-#                 soma.ExtensionPointsCloseTo,
-#                 max_straight_sq_dist=max_straight_sq_dist_c,
-#                 erode_path=True,
-#             )
-#
-#             # Keep the connexion only if inferior to the allowed max weighted distance
-#             if length is not None:
-#                 if length <= max_weighted_length_c:
-#                     # Verify that the last element of the connexion path is contained in the extension to be connected
-#                     # If so, returns the extensions containing the path last site.
-#                     tgt_extenstion = extension_t.ExtensionContainingSite(extensions, path[-1])
-#                     # Validate connexion: update the glial component objects and store more info in their fields
-#                     cn_.ValidateConnection(tgt_extenstion, extension, end_point, path, dijkstra_costs)
-#
-#                     should_look_for_connections = True
-#
-#                     print(": Made")
-#                 else:
-#                     dijkstra_costs = in_.Dilate(extension.sites, dijkstra_costs)
-#                     print("")
-#             else:
-#                 dijkstra_costs = in_.Dilate(extension.sites, dijkstra_costs)
-#                 print("")
-#
-# # -- Create an extension map containing all the ext + connexions, without the somas.
-# # Used for the graphs extraction
-# ext_nfo["lmp_soma"] = som_nfo['soma_w_ext_lmp']
-# ext_nfo["lmp_soma"][som_nfo["map"] > 0] = 0
-#
-#
-# # po_.MaximumIntensityProjectionZ(ext_nfo["lmp_soma"])
-#
-# #
-#
-# fb_.PrintConnectedExtensions(extensions)
-#
-# #
-# elapsed_time = tm_.gmtime(tm_.time() - start_time)
-# print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
-#
-# if with_plot:
-#     fb_.PlotSomasWithExtensions(somas, som_nfo, "with_ext_of_ext")
-#
-# # -- Summary
-# print("\n")
-# for soma in somas:
-#     print(soma)
-#
-# # snapshot = tr_.take_snapshot()
-# # top_file_stats = snapshot.statistics('lineno')
-# # print("Memory Profiling: Top 10 FILES")
-# # for stat in top_file_stats[:10]:
-# #     print(stat)
-# # top_block_stats = snapshot.statistics('traceback')
-# # top_block_stats = top_block_stats[0]
-# # print(f"Memory Profiling: {top_block_stats.count} memory blocks: {top_block_stats.size / 1024:.1f} KiB")
-# # for line in top_block_stats.traceback.format():
-# #     print(line)
-#
-# elapsed_time = tm_.gmtime(tm_.time() - start_time)
-# print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
-#
-# if with_plot:
-#     pl_.show()
-#
-# po_.MaximumIntensityProjectionZ(som_nfo['soma_w_ext_lmp'])
-# # , output_image_file_name=f"D:\\MorganeNadal\\Results\\Images\\{name_file}.png")
-#
-# # --- Extract all the extensions of all somas as a graph
-# print('\n--- Graph extraction')
-#
-# # Create the graphs with SKLGraph module (available on Eric Debreuve Gitlab)
-# for soma in somas:
-#     print(f" Soma {soma.uid}", end="")
-#     # Create SKLGraph skeletonized map
-#     ext_map = skl_map_t.FromShapeMap(ext_nfo['lmp_soma'] == soma.uid,
-#                                      store_widths=True,
-#                                      skeletonize=False,
-#                                      do_post_thinning=False)
-#
-#     # Create the graph from the SKLGaph skeletonized map
-#     soma.skl_graph = skl_graph_t.FromSkeleton(ext_map, end_point, size_voxel=size_voxel_in_micron)
-#
-#     # --- Find the root of the {ext+conn} graphs.
-#     # Roots are the nodes of degree 1 that are to be linked to the soma
-#     soma.graph_roots = ge_.FindGraphsRootWithNodes(soma)
-#
-#     # Add a node "soma" and link it to the root nodes
-#     soma_node = f"S-{int(soma.centroid[0])}-{int(soma.centroid[1])}-{int(soma.centroid[2])}"
-#     soma.skl_graph.add_node(soma_node, soma=True, soma_nfo=soma)
-#
-#     for node in soma.graph_roots.values():
-#         soma.skl_graph.add_edge(node, soma_node, root=True)
-#
-#     nx_.draw_networkx(soma.skl_graph)
-#     pl_.show(block=True)
-#     pl_.close()
-#
-#     print(": Done")
-#
-# elapsed_time = tm_.gmtime(tm_.time() - start_time)
-#
-# # --- Extract features
-# print('\n--- Features Extraction\n')
-#
-# # Parameters
-# if hist_bins_borders_length is None:
-#     number_of_bins_length = int(number_of_bins_length)
-#     bins_length = np_.linspace(hist_min_length, hist_min_length + hist_step_length * number_of_bins_length,
-#                                num=number_of_bins_length)
-#     bins_length[-1] = np_.inf
-# else:
-#     bins_length = np_.array(hist_bins_borders_length)
-#     bins_length[-1] = np_.inf
-#
-# if hist_bins_borders_curvature is None:
-#     number_of_bins_curvature = int(number_of_bins_curvature)
-#     bins_curvature = np_.linspace(hist_min_curvature,
-#                                   hist_min_curvature + hist_step_curvature * number_of_bins_curvature,
-#                                   num=number_of_bins_curvature)
-#     bins_curvature[-1] = np_.inf
-# else:
-#     bins_curvature = np_.array(hist_bins_borders_curvature)
-#     bins_curvature[-1] = np_.inf
-#
-# # Pandas dataframe creation with all the measured features
-# features_df = ge_.ExtractFeaturesInDF(name_file, somas, size_voxel_in_micron, bins_length, bins_curvature,
-#                                       ext_scales)
-#
-# # Save the pandas df into .csv
-# features_df.to_csv(f"{name_dir}\\{name_file}.csv")
-#
-# #
-# elapsed_time = tm_.gmtime(tm_.time() - start_time)
-# print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
-# print(f"DONE: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}")
-
-# test if connexion paths are directly in contact with the extensions
-# (connexion to end points are normal (number = nb of primary and secondary extensions) but not the others)
-
 def Test_Tangency_ext_conn(soma):
-    pb=[]
+    pb = []
     for extension in soma.extensions:
         for site in list(zip(*extension.sites)):
             close_end_pt = tuple(
@@ -595,42 +137,43 @@ def Test_Tangency_ext_conn(soma):
                                         pb.append(conn)
                                         print("\nconn: ", conn, "\next_site: ", site, "\next_uid: ", extension.uid)
 
+
 #########----------- Executable version with files AND repositories ------------###########
 
 def NutriMorph(data_path: str,
-        channel=channel,
-        size_voxel_in_micron=size_voxel_in_micron,
-        soma_low_c=soma_low_c,
-        soma_high_c=soma_high_c,
-        soma_selem_micron_c=soma_selem_micron_c,
-        soma_min_area_c=soma_min_area_c,
-        soma_max_area_c=soma_max_area_c,
-        adapt_hist_equalization=adapt_hist_equalization,
-        ext_low_c=ext_low_c,
-        ext_high_c=ext_high_c,
-        ext_selem_micron_c=ext_selem_micron_c,
-        ext_min_area_c=ext_min_area_c,
-        ext_max_length_c=ext_max_length_c,
-        max_straight_sq_dist_c=max_straight_sq_dist_c,
-        max_weighted_length_c=max_weighted_length_c,
-        scale_range=scale_range,
-        scale_step=scale_step,
-        alpha=alpha,
-        beta=beta,
-        frangi_c=frangi_c,
-        diff_mode=diff_mode,
-        bright_on_dark=bright_on_dark,
-        method=method,
-        hist_min_length=hist_min_length,
-        hist_step_length=hist_step_length,
-        number_of_bins_length=number_of_bins_length,
-        hist_bins_borders_length=hist_bins_borders_length,
-        hist_min_curvature=hist_min_curvature,
-        hist_step_curvature=hist_step_curvature,
-        number_of_bins_curvature=number_of_bins_curvature,
-        hist_bins_borders_curvature=hist_bins_borders_curvature,
-        with_plot=with_plot,
-        in_parallel=in_parallel) -> pd_.DataFrame():
+               channel=channel,
+               dilatation_erosion=dilatation_erosion,
+               size_voxel_in_micron=size_voxel_in_micron,
+               soma_low_c=soma_low_c,
+               soma_high_c=soma_high_c,
+               soma_selem_micron_c=soma_selem_micron_c,
+               soma_min_area_c=soma_min_area_c,
+               soma_max_area_c=soma_max_area_c,
+               adapt_hist_equalization=adapt_hist_equalization,
+               ext_low_c=ext_low_c,
+               ext_high_c=ext_high_c,
+               ext_selem_micron_c=ext_selem_micron_c,
+               ext_min_area_c=ext_min_area_c,
+               max_straight_sq_dist_c=max_straight_sq_dist_c,
+               max_weighted_length_c=max_weighted_length_c,
+               scale_range=scale_range,
+               scale_step=scale_step,
+               alpha=alpha,
+               beta=beta,
+               frangi_c=frangi_c,
+               diff_mode=diff_mode,
+               bright_on_dark=bright_on_dark,
+               method=method,
+               hist_min_length=hist_min_length,
+               hist_step_length=hist_step_length,
+               number_of_bins_length=number_of_bins_length,
+               hist_bins_borders_length=hist_bins_borders_length,
+               hist_min_curvature=hist_min_curvature,
+               hist_step_curvature=hist_step_curvature,
+               number_of_bins_curvature=number_of_bins_curvature,
+               hist_bins_borders_curvature=hist_bins_borders_curvature,
+               with_plot=with_plot,
+               in_parallel=in_parallel) -> pd_.DataFrame():
 
     soma_t = sm_.soma_t
     extension_t = xt_.extension_t
@@ -662,9 +205,8 @@ def NutriMorph(data_path: str,
     # iv_.image_verification(image, channel)  # -> PySide2 user interface  # TODO: must return the modified image!
     # /!\ conflicts between some versions of PySide2 and Python3
 
-    # image = image[:, 800:, :300]
+    image = image[:, 800:, 300:]
     img_shape = image.shape
-
     #
     print(f"IMAGE: s.{img_shape} t.{image.dtype} m.{image.min()} M.{image.max()}")
 
@@ -717,8 +259,6 @@ def NutriMorph(data_path: str,
     som_nfo["dist_to_closest"], som_nfo["influence_map"] = soma_t.InfluenceMaps(som_nfo["lmp"])
     print("Done")
 
-    # po_.MaximumIntensityProjectionZ(som_nfo["influence_map"])
-
     # Create the tuple somas, containing all the objects 'soma'
     print("Somas creation: ", end="")
     somas = tuple(soma_t().FromMap(som_nfo["lmp"], uid) for uid in range(1, n_somas + 1))
@@ -765,11 +305,7 @@ def NutriMorph(data_path: str,
     elapsed_time = tm_.gmtime(tm_.time() - start_time)
     print(f"Elapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}\n")
 
-    # po_.MaximumIntensityProjectionZ(enhanced_ext, cmap="OrRd")
-
     # - Creation of the enhanced maps
-    # enhanced_ext = ex_.adjust_log(enhanced_ext, 1)  # Not necessary, log contrast adjustment
-
     # Enhanced the contrast in frangi output
     print('Enhancing Contrast: ', end='')
     if adapt_hist_equalization:
@@ -779,8 +315,6 @@ def NutriMorph(data_path: str,
         # necessary but not sufficient, histogram equalisation (global)
         enhanced_ext = ex_.equalize_hist(enhanced_ext)
 
-    # po_.MaximumIntensityProjectionZ(enhanced_ext, cmap="OrRd")
-
     # Rescale the image by stretching the intensities between 0 and 255, necessary but not sufficient
     enhanced_ext = ex_.rescale_intensity(enhanced_ext, out_range=(0, 255))
     print('Done')
@@ -788,12 +322,10 @@ def NutriMorph(data_path: str,
     print('Hysteresis Thresholding: ', end='')
     ext_nfo["coarse_map"] = extension_t.CoarseMap(enhanced_ext, ext_low_c, ext_high_c, ext_selem_pixel_c)
     print('Done')
-    # po_.MaximumIntensityProjectionZ(ext_nfo["coarse_map"])
     # Deletion of too small elements
     print('Morphological cleaning: ', end='')
     ext_nfo["coarse_map"], ext_lmp = extension_t.FilteredCoarseMap(ext_nfo["coarse_map"], ext_min_area_c)
     print('Done')
-    # po_.MaximumIntensityProjectionZ(ext_nfo["coarse_map"])
     # Creation of the extensions skeleton
     print('Skeletonization and Thinning: ', end='')
     ext_nfo["map"] = extension_t.FineMapFromCoarseMap(ext_nfo["coarse_map"])
@@ -831,10 +363,12 @@ def NutriMorph(data_path: str,
     # -- Preparation for Connections
     # Calculate the COSTS of each pixel on the image and DILATE the existing extensions to
     # avoid tangency of connexions to extensions
-    dijkstra_costs_ini = in_.DijkstraCosts(image, som_nfo["map"], ext_nfo["map"])
-    # Dilate all the extensions
-    for extension in extensions:
-        dijkstra_costs = cn_.Dilate(extension.sites, dijkstra_costs_ini)
+    dijkstra_costs = in_.DijkstraCosts(image, som_nfo["map"], ext_nfo["map"])
+
+    if dilatation_erosion:
+        # Dilate all extensions
+        for extension in extensions:
+            cn_.Dilate(extension.sites, dijkstra_costs)
 
     # -- Soma-Extention
     print("\n--- Soma <-> Extension")
@@ -857,31 +391,38 @@ def NutriMorph(data_path: str,
         # Only try to connect if the extension is not already connected
         if extension.is_unconnected:
             print(f"    Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end="")
-            # Erode the end_point of the extension in the costs map
-            dijkstra_costs = cn_.Erode((end_point,), dijkstra_costs, extension, extensions, image, endpt=True)
+            if dilatation_erosion:
+                # Erode the end_point of the extension in the costs map
+                cn_.Erode((end_point,), dijkstra_costs, extension, extensions, image, all_end_points=all_end_points)
+
             # Use of Dijkstra shortest weighted path
             path, length = cn_.ShortestPathFromToN(
-                image,
-                end_point,
-                extension,
-                dijkstra_costs,
-                soma.ContourPointsCloseTo,
+                image=image,
+                point=end_point,
+                extension=extension,
+                costs=dijkstra_costs,
+                candidate_points_fct=soma.ContourPointsCloseTo,
                 max_straight_sq_dist=max_straight_sq_dist_c,
                 erode_path=False,
             )
 
             # Keep the connexion only if inferior to the allowed max weighted distance
             if path.__len__() > 0:
-                length = in_.ToPixel(length, size_voxel_in_micron)
+                # length in pixel and max_weighted_length_c too
                 if length <= max_weighted_length_c:
                     # Validate and update all the fields + dilate again the whole extension
-                    dijkstra_costs = cn_.ValidateConnection(soma, extension, end_point, path, dijkstra_costs)
+                    cn_.ValidateConnection(soma, extension, end_point, path, dijkstra_costs)
+                    if dilatation_erosion:
+                        # Dilate extensions
+                        cn_.Dilate(extension.sites, dijkstra_costs)
                     print(": Made")
                 else:
-                    dijkstra_costs = cn_.Dilate(extension.sites, dijkstra_costs)
+                    if dilatation_erosion:
+                        cn_.Dilate(extension.sites, dijkstra_costs)
                     print("")
             else:
-                dijkstra_costs = cn_.Dilate(extension.sites, dijkstra_costs)
+                if dilatation_erosion:
+                    cn_.Dilate(extension.sites, dijkstra_costs)
                 print("")
 
     fb_.PrintConnectedExtensions(extensions)
@@ -901,12 +442,13 @@ def NutriMorph(data_path: str,
     # Update the soma maps with next Ext-Ext connexions
     while should_look_for_connections:
 
-        # Update the costs by dilating everything again plus the contour points of the soma
-        for soma in somas:
-            dijkstra_costs = cn_.Dilate(soma.contour_points, dijkstra_costs)
-            # Below probably not necessary but security
-            for extension in soma.extensions:
-                dijkstra_costs = cn_.Dilate(extension.sites, dijkstra_costs)
+        if dilatation_erosion:
+            # Update the costs by dilating everything again plus the contour points of the soma
+            for soma in somas:
+                cn_.Dilate(soma.contour_points, dijkstra_costs)
+                # Below probably not necessary but security
+                for extension in soma.extensions:
+                    cn_.Dilate(extension.sites, dijkstra_costs)
 
         # Update the final somas + ext map by adding the new extensions and their connexion paths
         som_nfo["soma_w_ext_lmp"] = soma_t.SomasLMap(somas)
@@ -930,20 +472,20 @@ def NutriMorph(data_path: str,
             # Only try to connect if the extension is not already connected
             if extension.is_unconnected:
                 print(f"    Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end="")
-                # Erode the end_point of the extension in the costs map
-                dijkstra_costs = cn_.Erode((end_point,), dijkstra_costs, extension, extensions, image, endpt=True)
+                if dilatation_erosion:
+                    # Erode the end_point of the extension in the costs map
+                    cn_.Erode((end_point,), dijkstra_costs, extension, extensions, image, all_end_points=all_end_points)
                 # Use of Dijkstra shortest weighted path
                 path, length = cn_.ShortestPathFromToN(
-                    image,
-                    end_point,
-                    extension,
-                    dijkstra_costs,
-                    soma.ExtensionPointsCloseTo,
+                    image=image,
+                    point=end_point,
+                    extension=extension,
+                    costs=dijkstra_costs,
+                    candidate_points_fct=soma.ExtensionPointsCloseTo,
                     extensions=extensions,
                     all_end_points=all_end_points,
                     max_straight_sq_dist=max_straight_sq_dist_c,
-                    erode_path=True,
-                    strict_erosion=True,
+                    erode_path=dilatation_erosion,
                 )
 
                 # Keep the connexion only if inferior to the allowed max weighted distance
@@ -954,15 +496,20 @@ def NutriMorph(data_path: str,
                         tgt_extenstion = extension_t.ExtensionContainingSite(extensions, path[-1])
                         # Validate connexion: update the glial component objects and store more info in their fields
                         cn_.ValidateConnection(tgt_extenstion, extension, end_point, path, dijkstra_costs)
+                        if dilatation_erosion:
+                            # Dilate extensions
+                            cn_.Dilate(extension.sites, dijkstra_costs)
 
                         should_look_for_connections = True
 
                         print(": Made")
                     else:
-                        dijkstra_costs = cn_.Dilate(extension.sites, dijkstra_costs)
+                        if dilatation_erosion:
+                            cn_.Dilate(extension.sites, dijkstra_costs)
                         print("")
                 else:
-                    dijkstra_costs = cn_.Dilate(extension.sites, dijkstra_costs)
+                    if dilatation_erosion:
+                        cn_.Dilate(extension.sites, dijkstra_costs)
                     print("")
 
     # -- Create an extension map containing all the ext + connexions, without the somas.
@@ -994,7 +541,7 @@ def NutriMorph(data_path: str,
     if save_images is not None:
         po_.MaximumIntensityProjectionZ(som_nfo['soma_w_ext_lmp'],
                                         block=False,
-                                        output_image_file_name=f"{save_images}\\{name_file}.png")
+                                        output_image_file_name=f"{save_images}\\MIP_{name_file}.png")
 
     # --- Extract all the extensions of all somas as a graph
     print('\n--- Graph extraction')
@@ -1005,8 +552,8 @@ def NutriMorph(data_path: str,
         # Create SKLGraph skeletonized map
         ext_map = skl_map_t.FromShapeMap(ext_nfo['lmp_soma'] == soma.uid,
                                          store_widths=True,
-                                         skeletonize=False,
-                                         do_post_thinning=False)
+                                         skeletonize=True,
+                                         do_post_thinning=True)
 
         # Create the graph from the SKLGaph skeletonized map
         soma.skl_graph = skl_graph_t.FromSkeleton(ext_map, end_point, size_voxel=size_voxel_in_micron)
@@ -1057,12 +604,19 @@ def NutriMorph(data_path: str,
         bins_curvature[-1] = np_.inf
 
     # Pandas dataframe creation with all the measured features
-    features_df = ge_.ExtractFeaturesInDF(name_file, somas, size_voxel_in_micron, bins_length, bins_curvature,
-                                          ext_scales)
+    features_df = ge_.ExtractFeaturesInDF(name_file,
+                                          somas,
+                                          size_voxel_in_micron,
+                                          bins_length,
+                                          bins_curvature,
+                                          ext_scales,
+                                          )
 
     # Save the pandas df into .csv
-    # features_df.to_csv(f"{name_dir}\\{name_file}.csv")
-    features_df.to_csv(f"{save_images}\\{name_file}.csv")
+    if save_csv is not None:
+        features_df.to_csv(f"{save_csv}\\{name_file}.csv")
+    else:
+        features_df.to_csv(f"{name_dir}\\{name_file}.csv")
 
     #
     elapsed_time = tm_.gmtime(tm_.time() - start_time)
@@ -1100,66 +654,85 @@ if __name__ == '__main__':
                 # -- due to best fitting ellipsoid algo (JTJ is singular due to convex hull being flat)
                 concatenated_features_df.dropna()
 
-                ## TODO /!\ Still errors in the graph = some extensions are tangent to each other.
-                #       Verify Dilatation and Erosion!
         # Save to .csv in the parent repository
-        # concatenated_features_df.to_csv(f"{data_path}\\features.csv")
-        concatenated_features_df.to_csv(f"{save_images}\\features.csv")
+        if save_csv is None:
+            concatenated_features_df.to_csv(f"{data_path}\\features.csv")
+        else:
+            concatenated_features_df.to_csv(f"{save_csv}\\features.csv")
 
-    else:
-        raise ImportError("Not a valid data path!")
-
-            # --- TODO Clustering with this df and module features_analysis.py
+        # --- TODO Clustering with this df and module features_analysis.py
         # if statistical_analysis:
-            ## -- K-means with all the features (2 conditions)
+        ## For the moment drop the columns with non scalar values, and un-useful values
+        ## - TO BE CHANGED (use distance metrics such as bhattacharyya coef, etc)
+        # df = concatenated_features_df.drop(["soma uid",
+        #                                     "spherical_angles_eva",
+        #                                     "spherical_angles_evb",
+        #                                     "hist_lengths",
+        #                                     "hist_lengths_P",
+        #                                     "hist_lengths_S",
+        #                                     "hist_curvature",
+        #                                     "hist_curvature_P",
+        #                                     "hist_curvature_S"],
+        #                                    axis=1)
+
+        ## -- PCA with all the features
+        ## Separating the features from their conditions and durations
+        # all_target = concatenated_features_df.loc[:, ['Condition']].values
+        # all_features_df = concatenated_features_df.drop(["Duration"], axis=1)
 
-            ## Between the two conditions, regardless the duration of experiment (2 conditions, all durations)
-            # kmeans = fa_.KmeansOnDF(concatenated_features_df, nb_clusters=(2,), elbow=True, intracluster_var=True)
+        ## Between the two conditions, regardless the duration of experiment (2 conditions, all durations)
+        # TODO pca = fa_.PCAOnDF(concatenated_features_df)
 
-            ## Between the two conditions, for each duration (2 conditions, 3 durations)
-            # groupby_duration = concatenated_features_df.groupby("Duration")
-            # for duration, values in groupby_duration:
-                # kmeans = fa_.KmeansOnDF(values, nb_clusters=(2,), elbow=True, intracluster_var=True)
+        ## Between the two conditions, for each duration (2 conditions, 3 durations)
+        # groupby_duration = concatenated_features_df.groupby("Duration")
+        # for duration, values in groupby_duration:
+        ##TODO find the condition to print it on PCA
+        # print(duration)
+        # duration_features_df = concatenated_features_df.drop(["Duration"], axis=1)
+        # pca = fa_.PCAOnDF(values)
 
-            ## -- PCA with all the features
+        ## -- K-means with all the features (2 conditions)
 
-            ## Between the two conditions, regardless the duration of experiment (2 conditions, all durations)
-            # TODO pca = fa_.PCAOnDF(concatenated_features_df)
+        ## Between the two conditions, regardless the duration of experiment (2 conditions, all durations)
+        # kmeans = fa_.KmeansOnDF(concatenated_features_df, nb_clusters=(2,), elbow=True, intracluster_var=True)
 
-            ## Between the two conditions, for each duration (2 conditions, 3 durations)
-            # for duration, values in groupby_duration:
-                # pca = fa_.PCAOnDF(values)
+        ## Between the two conditions, for each duration (2 conditions, 3 durations)
+        # for duration, values in groupby_duration:
+        # kmeans = fa_.KmeansOnDF(values, nb_clusters=(2,), elbow=True, intracluster_var=True)
 
-            ## -- Select Discriminant features by statistical analysis
-            # TODO filtered_df = SelectFeatures(concatenated_features_df)
+        ## -- Select Discriminant features by statistical analysis
+        # TODO filtered_df = SelectFeatures(concatenated_features_df)
 
-            ## -- PCA with all the features with the cluster as label
+        ## -- PCA with all the features with the cluster as label
 
-            ## Between the two conditions, regardless the duration of experiment (2 conditions, all durations)
-            # TODO pca = fa_.PCAOnDF(concatenated_features_df)
+        ## Between the two conditions, regardless the duration of experiment (2 conditions, all durations)
+        # TODO pca = fa_.PCAOnDF(concatenated_features_df)
 
-            ## Between the two conditions, for each duration (2 conditions, 3 durations)
-            # for duration, values in groupby_duration:
-            # pca = fa_.PCAOnDF(values)
+        ## Between the two conditions, for each duration (2 conditions, 3 durations)
+        # for duration, values in groupby_duration:
+        # pca = fa_.PCAOnDF(values)
 
-            ## -- Select Discriminant features by statistical analysis
-            # TODO filtered_df = SelectFeatures(concatenated_features_df)
+        ## -- Select Discriminant features by statistical analysis
+        # TODO filtered_df = SelectFeatures(concatenated_features_df)
 
-            ## -- K-means with selected features
+        ## -- PCA with selected features
 
-            ## Between the two conditions, regardless the duration of experiment (2 conditions, all durations)
-            # filtered_kmeans = fa_.KmeansOnDF(filtered_df, nb_clusters=(2,), elbow=True, intracluster_var=True)
+        ## Between the two conditions, regardless the duration of experiment (2 conditions, all durations)
+        # TODO pca = fa_.PCAOnDF(filtered_df)
 
-            ## Between the two conditions, for each duration (2 conditions, 3 durations)
-            # filtered_groupby_duration = filtered_df.groupby("Duration")
-            # for duration, values in filtered_groupby_duration:
-                # filtered_kmeans = fa_.KmeansOnDF(values, nb_clusters=(2,), elbow=True, intracluster_var=True)
+        ## Between the two conditions, for each duration (2 conditions, 3 durations)
+        # filtered_groupby_duration = filtered_df.groupby("Duration")
+        # for duration, values in filtered_groupby_duration:
+        # pca = fa_.PCAOnDF(values)
 
-            ## -- PCA with selected features
+        ## -- K-means with selected features
 
-            ## Between the two conditions, regardless the duration of experiment (2 conditions, all durations)
-            # TODO pca = fa_.PCAOnDF(filtered_df)
+        ## Between the two conditions, regardless the duration of experiment (2 conditions, all durations)
+        # filtered_kmeans = fa_.KmeansOnDF(filtered_df, nb_clusters=(2,), elbow=True, intracluster_var=True)
 
-            ## Between the two conditions, for each duration (2 conditions, 3 durations)
-            # for duration, values in filtered_groupby_duration:
-                # pca = fa_.PCAOnDF(values)
\ No newline at end of file
+        ## Between the two conditions, for each duration (2 conditions, 3 durations)
+        # for duration, values in filtered_groupby_duration:
+        # filtered_kmeans = fa_.KmeansOnDF(values, nb_clusters=(2,), elbow=True, intracluster_var=True)
+
+    else:
+        raise ImportError("Not a valid data path!")
diff --git a/parameters.ini b/parameters.ini
index 519df012c2bb1951f66e1d40acced6774af761a4..232fd6659a4eed701b11635771bc741f51c7f4f2 100644
--- a/parameters.ini
+++ b/parameters.ini
@@ -49,38 +49,28 @@ parameter3                  :   None
 ; then your parameter has the value string: 'None', and not None
 
 [Input]
-data_path                   :   D:\\MorganeNadal\\IBA1-Microglies
-# D:\\MorganeNadal\\IBA1-Microglies\\DIO_1H_16_1.58_2.1\DIO_1H_16_1.58_2.1_3.tif
-#D:\\MorganeNadal\\IBA1-Microglies\\CHO_1H_D_1.70_3.3\\CHO_1H_D_1.70_3.3_1.tif
-#.\data\DIO_6H_6_1.70bis_2.2_3.tif
-#D:\MorganeNadal\IBA1-Microglies\standard conditions\020620_ST8_20W_1_2.lif - Position 3.tif
-#D:\\MorganeNadal\\IBA1-Microglies\\standard conditions\\ST8_crop2(1024x1024)_vert.tif
+data_path                   :   .\\data\\DIO_6H_6_1.70bis_2.2_3.tif
 ; direct data path to the image
-channel
+channel                     :   G
 ; Can take the values R, G or B.
 ; Can also be set to None (cf. README above).
 size_voxel_in_micron
-#: [0.167,0.167,0.5]
-#:   [0.071, 0.071, 0.4997]
 ; size_voxel_in_micron ->  [X,Y,Z]
 ; Can also be set to None (cf. README above).
-crop_image
-# TODO
-save_images                :    D:\\MorganeNadal\\Results\\Images
+save_images                :    \\path_where_to_save_MIP_and_graphs
 ; if None, no saving. Otherwise, path where to save images (MIP & graphs)
-condition
-; TODO CHO, DIO, ...
-duration
-; TODO 1H, 3H, 1W, 3W, ...
+save_csv                   :    \\path_where_to_save_features_csv
+; where to save the csv. if None, by default it is in the .tif directory
+dilatation_erosion         :    False
+; Choose whether to perform erosion/dilation on the extensions during the connexion process.
+; For the moment, better results are achieved without it, but doing dilatation_erosion is theoretically more rigorous
 statistical_analysis       :    True
 
 [Somas]
 soma_low_c                 :    0.15
-# low = 10 #0.15
 ; this is a coefficient => not in micron
 soma_high_c                :    0.7126
 ; this is a coefficient => not in micron
-# high = 67.4 # 0.7126
 soma_selem_micron_c        :    0.481
 soma_min_area_c            :    58
 soma_max_area_c            :    580
@@ -89,23 +79,17 @@ soma_max_area_c            :    580
 adapt_hist_equalization    :    False
 ; for hysteresis
 ext_low_c                  :    10
-#7.5e-8
-#1e-20
-# low = 0.02
 ext_high_c                 :    0.6
-#1e-5
-# 0.5e-5
-# high = 0.04
 ; for morphological cleaning
 ext_selem_micron_c         :    0.2405
 ext_min_area_c             :    5.7840
-ext_max_length_c           :    60
 
 [Connexions]
-max_straight_sq_dist_c     :    216.45
-max_weighted_length_c      :    4.81
+max_straight_sq_dist_c     :    217
+max_weighted_length_c      :    5
 
 [Frangi]
+; Values can be adapted to the image with the interface of frangi3gui.py
 scale_range                :    (0.1,3.1)
 ; Stop at max x.1 with x <= sqrt(Z/2)
 ; ex: 3.1 for Z stacks < 18 and 5.1 for Z stacks < 50
@@ -113,8 +97,6 @@ scale_step                 :    1.0
 alpha                      :    0.8
 beta                       :    0.5
 frangi_c                   :    30
-## 12.5 if normalized btw 0 and 1
-# 60.12506 # 500.0
 diff_mode                  :    indirect
 bright_on_dark             :    True
 method                     :    c
@@ -130,7 +112,6 @@ hist_step_length
 ; in micron
 number_of_bins_length
 #:    5
-; by default, 5
 ; extensions generally do not exceed 30 microns
 
 ; if the above parameters are set to None, you can directly use the boundaries of your choice for the histogram:
diff --git a/search_parameters.py b/search_parameters.py
index 9b3c740519ad7ba8bdda4e65530ea3b9fb584744..7fd20c0bef222e31ec87506242bbfbe9d31b6fa0 100644
--- a/search_parameters.py
+++ b/search_parameters.py
@@ -52,8 +52,9 @@ def IfNotFloat(section: str, key: str) -> Union[Union[float, str], Any]:
 data_path = parameters['Input']['data_path']
 channel = parameters['Input']['channel']
 size_voxel_in_micron = IfNotFloat('Input', 'size_voxel_in_micron')
-crop_image = IfNotFloat('Input', 'crop_image')
+dilatation_erosion = parameters.getboolean('Input', 'dilatation_erosion')
 save_images = parameters['Input']['save_images']
+save_csv = parameters['Input']['save_csv']
 statistical_analysis = parameters.getboolean('Input', 'statistical_analysis')
 
 # [Somas]
@@ -69,7 +70,6 @@ ext_low_c = IfNotFloat('Extensions', 'ext_low_c')
 ext_high_c = IfNotFloat('Extensions', 'ext_high_c')
 ext_selem_micron_c = IfNotFloat('Extensions', 'ext_selem_micron_c')
 ext_min_area_c = IfNotFloat('Extensions', 'ext_min_area_c')
-ext_max_length_c = IfNotFloat('Extensions', 'ext_max_length_c')
 
 # [Connexions]
 max_straight_sq_dist_c = IfNotFloat('Connexions', 'max_straight_sq_dist_c')