# Copyright CNRS/Inria/UNS
# Contributor(s): Eric Debreuve (since 2019), Morgane Nadal (2020)
#
# eric.debreuve@cnrs.fr
#
# This software is governed by the CeCILL  license under French law and
# abiding by the rules of distribution of free software.  You can  use,
# modify and/ or redistribute the software under the terms of the CeCILL
# license as circulated by CEA, CNRS and INRIA at the following URL
# "http://www.cecill.info".
#
# As a counterpart to the access to the source code and  rights to copy,
# modify and redistribute granted by the license, users are provided only
# with a limited warranty  and the software's author,  the holder of the
# economic rights,  and the successive licensors  have only  limited
# liability.
#
# In this respect, the user's attention is drawn to the risks associated
# with loading,  using,  modifying and/or developing or reproducing the
# software by the user in light of its specific status of free software,
# that may mean  that it is complicated to manipulate,  and  that  also
# therefore means  that it is reserved for developers  and  experienced
# professionals having in-depth computer knowledge. Users are therefore
# encouraged to load and test the software's suitability as regards their
# requirements in conditions enabling the security of their systems and/or
# data to be ensured and,  more generally, to use and operate it in the
# same conditions as regards security.
#
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL license and that you accept its terms.

# Time profiling:
#     python -m cProfile -o runtime/profiling.log -s name main.py
# Memory profiling:
#     python -m memory_profiler main.py
#     or
#     mprof run main.py
#     mprof plot


import brick.component.connection as cn_
import brick.component.extension as xt_
import brick.component.soma as sm_
import brick.general.feedback as fb_
import brick.processing.input as in_
import brick.processing.plot as po_
import brick.processing.map_labeling as ml_
# import brick.processing.image_verification as iv_
from sklgraph.skl_fgraph import skl_graph_t
from sklgraph.skl_map import skl_map_t
import brick.processing.graph_feat_extraction as ge_

import os as os_
import sys as sy_
import time as tm_
import pathlib
import psutil as mu_

import matplotlib.pyplot as pl_
import numpy as np_
import imageio as io_
from skimage.segmentation import relabel_sequential
import skimage.morphology as mp_
import skimage.measure as ms_
import skimage.exposure as ex_
import pandas as pd_
import networkx as nx_

print(sy_.argv, sy_.argv.__len__())

if sy_.argv.__len__() < 2:
    print("Missing parameter file argument")
    sy_.exit(0)
if not (os_.path.isfile(sy_.argv[1]) and os_.access(sy_.argv[1], os_.R_OK)):
    print("Wrong parameter file path or parameter file unreadable")
    sy_.exit(0)

data_path = None
channel = None
size_voxel_in_micron = None
soma_low_c = None
soma_high_c = None
soma_selem_micron_c = None
soma_min_area_c = None
soma_max_area_c = None
adapt_hist_equalization = None
ext_low_c = None
ext_high_c = None
ext_selem_micron_c = None
ext_min_area_c = None
ext_max_length_c = None
max_straight_sq_dist_c = None
max_weighted_length_c = None
scale_range = None
scale_step = None
alpha = None
beta = None
frangi_c = None
diff_mode = None
bright_on_dark = None
method = None
hist_min_length = None
hist_step_length = None
number_of_bins_length = None
hist_bins_borders_length = None
hist_min_curvature = None
hist_step_curvature = None
number_of_bins_curvature = None
hist_bins_borders_curvature = None
with_plot = None
in_parallel = None

exec(open(sy_.argv[1]).read())  # Only with search_parameters.py (stable version)

soma_t = sm_.soma_t
extension_t = xt_.extension_t

print(f"STARTED: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}")
start_time = tm_.time()

# TODO add proper warning with warn module
# --- Images

# Find the name of the file, eq. to the biological tested condition number x
name_file = os_.path.basename(data_path)
print("FILE: ", name_file)
name_file = name_file.replace(".tif", "")

name_dir = os_.path.dirname(data_path)
print("DIR: ", name_dir)

# Find the dimension of the image voxels in micron
# TODO do something more general, here too specific to one microscope metadata
size_voxel_in_micron = in_.FindVoxelDimensionInMicron(data_path, size_voxel_in_micron=size_voxel_in_micron)

# Read the image
image = io_.volread(data_path)

# Image size verification - simple version without user interface
image = in_.ImageVerification(image, channel)

# iv_.image_verification(image, channel)  # -> PySide2 user interface  # TODO: must return the modified image!
# /!\ conflicts between some versions of PySide2 and Python3

image = image
img_shape = image.shape

#
print(f"IMAGE: s.{img_shape} t.{image.dtype} m.{image.min()} M.{image.max()}")

# Intensity relative normalization (between 0 and 1).
# Image for soma normalized for hysteresis
image_for_soma = in_.IntensityNormalizedImage(image)
# Image for extensions not normalized for frangi enhancement
image_for_ext = image.copy()

print(f"NRM-IMG: t.{image_for_soma.dtype} m.{image_for_soma.min():.2f} M.{image_for_soma.max():.2f}")

# --- Initialization
som_nfo = {}
ext_nfo = {}
axes = {}

#
# --- Somas
print("\n--- Soma Detection")

# Change the soma parameters from micron to pixel dimensions, using the previously determined voxel size
soma_min_area_c = in_.ToPixel(soma_min_area_c, size_voxel_in_micron, dimension=(0, 1))
soma_max_area_c = in_.ToPixel(soma_max_area_c, size_voxel_in_micron, dimension=(0, 1))
soma_selem_c = mp_.disk(in_.ToPixel(soma_selem_micron_c, size_voxel_in_micron))

# - Create the maps for enhancing and detecting the soma
# Hysteresis thresholding and closing/opening
som_nfo["map_small"] = soma_t.Map(image_for_soma, soma_low_c, soma_high_c, soma_selem_c)
# Deletion of too small elements
som_nfo["map_small"], som_lmp_small = soma_t.DeleteSmallSomas(som_nfo["map_small"], soma_min_area_c)
# Deletion of too big elements (probably aggregation of cells)
# Separated from the deletion of small elements to avoid extensions to be detected where too big somas were just deleted
som_nfo["map"], som_lmp = soma_t.DeleteBigSomas(som_nfo["map_small"], som_lmp_small, soma_max_area_c)

# Labelling of somas, find the number of somas
# som_nfo["lmp"], n_somas = ms_.label(som_nfo["map"], return_num=True)

# Use relabel instead of label to optimize the algorithm. /!\ But BUG.
som_nfo["lmp"] = relabel_sequential(som_lmp)[0]
n_somas = som_nfo["lmp"].max()

id_soma = 1
for id in range(n_somas + 1):
    if id != id_soma:
        print(id)
        som_nfo["lmp"][som_nfo["lmp"] == id] = 0
som_nfo["lmp"], n_somas = ms_.label(som_nfo["lmp"], return_num=True)

# po_.MaximumIntensityProjectionZ(som_nfo["lmp"])

# Create the distance and influence maps used next for connexions between somas-extensions and extensions-extensions
som_nfo["dist_to_closest"], som_nfo["influence_map"] = soma_t.InfluenceMaps(som_nfo["lmp"])

# po_.MaximumIntensityProjectionZ(som_nfo["influence_map"])

# Create the tuple somas, containing all the objects 'soma'
somas = tuple(soma_t().FromMap(som_nfo["lmp"], uid) for uid in range(1, n_somas + 1))

print(f"    n = {n_somas}")

elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")

if with_plot:
    fb_.PlotSomas(somas, som_nfo, axes)

#
# -- Extentions
print("\n--- Extension Detection")

# Set to zeros the pixels belonging to the somas.
# Take into account the aggregation of cells detected as one big soma to avoid extension creation there
del_soma = (som_nfo["map_small"] > 0).nonzero()
image_for_ext[del_soma] = 0

# Change the extensions parameters from micron to pixel dimensions
ext_min_area_c = in_.ToPixel(ext_min_area_c, size_voxel_in_micron, dimension=(0, 1))
ext_max_length_c = in_.ToPixel(ext_max_length_c, size_voxel_in_micron, dimension=(0,))

if ext_selem_micron_c == 0:
    ext_selem_pixel_c = None
else:
    ext_selem_pixel_c = mp_.disk(in_.ToPixel(ext_selem_micron_c, size_voxel_in_micron))

# scale_range_pixel = []
# #
# for value in scale_range:
#     value_in_pixel = in_.ToPixel(value, size_voxel_in_micron, decimals=1)
#     scale_range_pixel.append(value_in_pixel)
# scale_range = tuple(scale_range_pixel)
#
# scale_step = in_.ToPixel(scale_step, size_voxel_in_micron, decimals=1)

# - Perform frangi feature enhancement (via python or c - faster - implementation)
enhanced_ext, ext_scales = extension_t.EnhancedForDetection(
    image_for_ext,
    scale_range,
    scale_step,
    alpha,
    beta,
    frangi_c,
    bright_on_dark,
    method,
    diff_mode=diff_mode,
    in_parallel=in_parallel)

elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"Elapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}\n")

# po_.MaximumIntensityProjectionZ(enhanced_ext, cmap="OrRd")

# - Creation of the enhanced maps
# enhanced_ext = ex_.adjust_log(enhanced_ext, 1)  # Not necessary, log contrast adjustment

# Enhanced the contrast in frangi output
if adapt_hist_equalization:
    # necessary but not sufficient, histogram equalisation (adaptative)
    enhanced_ext = ex_.equalize_adapthist(enhanced_ext)
else:
    # necessary but not sufficient, histogram equalisation (global)
    enhanced_ext = ex_.equalize_hist(enhanced_ext)

# po_.MaximumIntensityProjectionZ(enhanced_ext, cmap="OrRd")

# Rescale the image by stretching the intensities between 0 and 255, necessary but not sufficient
enhanced_ext = ex_.rescale_intensity(enhanced_ext, out_range=(0, 255))
# Hysteresis thersholding
ext_nfo["coarse_map"] = extension_t.CoarseMap(enhanced_ext, ext_low_c, ext_high_c, ext_selem_pixel_c)
# po_.MaximumIntensityProjectionZ(ext_nfo["coarse_map"])
# Deletion of too small elements
ext_nfo["coarse_map"], ext_lmp = extension_t.FilteredCoarseMap(ext_nfo["coarse_map"], ext_min_area_c)
# po_.MaximumIntensityProjectionZ(ext_nfo["coarse_map"])
# Creation of the extensions skeleton
ext_nfo["map"] = extension_t.FineMapFromCoarseMap(ext_nfo["coarse_map"])
# ext_nfo["map"] = extension_t.FilteredSkeletonMap(ext_nfo["coarse_map"], ext_max_length_c)
# po_.MaximumIntensityProjectionZ(ext_nfo["map"])
# Deletion of extensions found within the somas
ext_nfo["map"][som_nfo["map"] > 0] = 0
# Labelling of extensions and number of extensions determined
ext_nfo["lmp"], n_extensions = ms_.label(ext_nfo["map"], return_num=True)

# Use relabel instead of label to optimize the algorithm. BUT PROBLEM WITH THE NUMBER OF EXTENSIONS DETECTED !
# ext_nfo["lmp"] = relabel_sequential(ext_lmp)[0]
# n_extensions = ext_nfo["lmp"].max()

# Create the tuple extensions, containing all the objects 'extension'
extensions = tuple(
    extension_t().FromMap(ext_nfo["lmp"], ext_scales, uid)
    for uid in range(1, n_extensions + 1))

print(f"    n = {n_extensions}")

#
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")

if with_plot:
    fb_.PlotExtensions(extensions, ext_nfo, img_shape)

# -- Preparation for Connections
# Calculate the costs of each pixel on the image
dijkstra_costs = in_.DijkstraCosts(image, som_nfo["map"], ext_nfo["map"])

# -- Soma-Extention
print("\n--- Soma <-> Extension")

# Change the extensions parameters from micron to pixel dimensions
max_straight_sq_dist_c = in_.ToPixel(max_straight_sq_dist_c, size_voxel_in_micron)
max_weighted_length_c = in_.ToPixel(max_weighted_length_c, size_voxel_in_micron)

# Find the candidate extensions, with their endpoints, for connexion to the somas
candidate_conn_nfo = cn_.CandidateConnections(
    somas,
    som_nfo["influence_map"],
    som_nfo["dist_to_closest"],
    extensions,
    max_straight_sq_dist_c,
)

# For each candidate, verify that it is valid based on the shortest path and the maximum allowed straight distance
for ep_idx, soma, extension, end_point in candidate_conn_nfo:
    # Only try to connect if the extension is not already connected
    if extension.is_unconnected:
        print(f"    Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end="")
        # Use of Dijkstra shortest weighted path
        path, length = cn_.ShortestPathFromToN(
            start_time,
            end_point,
            dijkstra_costs,
            soma.ContourPointsCloseTo,
            max_straight_sq_dist=max_straight_sq_dist_c,
        )

        # Keep the connexion only if inferior to the allowed max weighted distance
        if length <= max_weighted_length_c:
            cn_.ValidateConnection(soma, extension, end_point, path, dijkstra_costs)
            print(": Made")
        else:
            print("")

# for soma in somas:
#     soma.Extend(extensions, som_nfo["dist_to_closest"], dijkstra_costs)

fb_.PrintConnectedExtensions(extensions)

#
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")

if with_plot:
    fb_.PlotSomasWithExtensions(somas, som_nfo, "all")

# -- Extention-Extention
print("\n--- Extension <-> Extension")

should_look_for_connections = True

# Update the soma maps with next Ext-Ext connexions
while should_look_for_connections:
    # Update the final somas + ext map by adding the new extensions and their connexion paths
    som_nfo["soma_w_ext_lmp"] = soma_t.SomasLMap(somas)
    # Update the influence map
    som_nfo["dist_to_closest"], som_nfo["influence_map"] = soma_t.InfluenceMaps(som_nfo["soma_w_ext_lmp"])

    # Find the candidate extensions for connexion to the primary extensions
    candidate_conn_nfo = cn_.CandidateConnections(
        somas,
        som_nfo["influence_map"],
        som_nfo["dist_to_closest"],
        extensions,
        max_straight_sq_dist_c,
    )

    should_look_for_connections = False

    # For each candidate, verify that it is valid based on the shortest path and the maximum allowed straight distance
    for ep_idx, soma, extension, end_point in candidate_conn_nfo:
        # Only try to connect if the extension is not already connected
        if extension.is_unconnected:
            print(f"    Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end="")
            # Use of Dijkstra shortest weighted path
            path, length = cn_.ShortestPathFromToN(
                start_time,
                end_point,
                dijkstra_costs,
                soma.ExtensionPointsCloseTo,
                max_straight_sq_dist=max_straight_sq_dist_c,
            )

            # Keep the connexion only if inferior to the allowed max weighted distance
            if length <= max_weighted_length_c:
                # Verify that the last element of the connexion path is contained in the extension to be connected
                # If so, returns the extensions containing the path last site.
                tgt_extenstion = extension_t.ExtensionContainingSite(extensions, path[-1])
                # Validate connexion: update the glial component objects and store more info in their fields
                cn_.ValidateConnection(tgt_extenstion, extension, end_point, path, dijkstra_costs)

                should_look_for_connections = True

                print(": Made")
            else:
                print("")

# Create an extension map containing all the ext + connexions, without the somas.
# Used for the graphs extraction
ext_nfo["lmp_soma"] = som_nfo['soma_w_ext_lmp'] - som_nfo['lmp']
#

fb_.PrintConnectedExtensions(extensions)

#
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")

if with_plot:
    fb_.PlotSomasWithExtensions(somas, som_nfo, "with_ext_of_ext")

# -- Summary
print("\n")
for soma in somas:
    print(soma)

# snapshot = tr_.take_snapshot()
# top_file_stats = snapshot.statistics('lineno')
# print("Memory Profiling: Top 10 FILES")
# for stat in top_file_stats[:10]:
#     print(stat)
# top_block_stats = snapshot.statistics('traceback')
# top_block_stats = top_block_stats[0]
# print(f"Memory Profiling: {top_block_stats.count} memory blocks: {top_block_stats.size / 1024:.1f} KiB")
# for line in top_block_stats.traceback.format():
#     print(line)

elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")

if with_plot:
    pl_.show()

po_.MaximumIntensityProjectionZ(som_nfo['soma_w_ext_lmp'])
# , output_image_file_name=f"D:\\MorganeNadal\\Results\\Images\\{name_file}.png")

# --- Extract all the extensions of all somas as a graph
print('\n--- Graph extraction')

# Create the graphs with SKLGraph module (available on Eric Debreuve Gitlab)
for soma in somas:
    print(f" Soma {soma.uid}", end="")
    # Create SKLGraph skeletonized map
    ext_map = skl_map_t.FromShapeMap(ext_nfo['lmp_soma'] == soma.uid,
                                     store_widths=True,
                                     skeletonize=False,
                                     do_post_thinning=False)
    # do_post_thinning = True, in order to remove pixels that are not breaking connectivity

    # Create the graph from the SKLGaph skeletonized map
    soma.skl_graph = skl_graph_t.FromSkeleton(ext_map, size_voxel=size_voxel_in_micron)

    # --- Find the root of the {ext+conn} graphs.
    # Roots are the nodes of degree 1 that are to be linked to the soma
    soma.graph_roots = ge_.FindGraphsRootWithNodes(soma)

    # Add a node "soma" and link it to the root nodes
    soma_node = f"S-{int(soma.centroid[0])}-{int(soma.centroid[1])}-{int(soma.centroid[2])}"
    soma.skl_graph.add_node(soma_node, soma=True, soma_nfo=soma)

    for node in soma.graph_roots.values():
        soma.skl_graph.add_edge(node, soma_node, root=True)

    # nx_.draw_networkx(soma.skl_graph)
    # pl_.show(block=True)
    # pl_.close()

    print(": Done")

elapsed_time = tm_.gmtime(tm_.time() - start_time)

# --- Extract features
print('\n--- Features Extraction\n')

# Parameters
if hist_bins_borders_length is None:
    number_of_bins_length = int(number_of_bins_length)
    bins_length = np_.linspace(hist_min_length, hist_min_length + hist_step_length * number_of_bins_length,
                               num=number_of_bins_length)
    bins_length[-1] = np_.inf
else:
    bins_length = np_.array(hist_bins_borders_length)
    bins_length[-1] = np_.inf

if hist_bins_borders_curvature is None:
    number_of_bins_curvature = int(number_of_bins_curvature)
    bins_curvature = np_.linspace(hist_min_curvature,
                                  hist_min_curvature + hist_step_curvature * number_of_bins_curvature,
                                  num=number_of_bins_curvature)
    bins_curvature[-1] = np_.inf
else:
    bins_curvature = np_.array(hist_bins_borders_curvature)
    bins_curvature[-1] = np_.inf

# Pandas dataframe creation with all the measured features
features_df = ge_.ExtractFeaturesInDF(name_file, somas, size_voxel_in_micron, bins_length, bins_curvature,
                                      ext_scales)

# Save the pandas df into .csv
features_df.to_csv(f"{name_dir}\\{name_file}.csv")

#
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
print(f"DONE: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}")


#########----------- Executable version with files AND repositories ------------###########

# def NutriMorph(data_path: str,
#         channel=channel,
#         size_voxel_in_micron=size_voxel_in_micron,
#         soma_low_c=soma_low_c,
#         soma_high_c=soma_high_c,
#         soma_selem_micron_c=soma_selem_micron_c,
#         soma_min_area_c=soma_min_area_c,
#         soma_max_area_c=soma_max_area_c,
#         adapt_hist_equalization=adapt_hist_equalization,
#         ext_low_c=ext_low_c,
#         ext_high_c=ext_high_c,
#         ext_selem_micron_c=ext_selem_micron_c,
#         ext_min_area_c=ext_min_area_c,
#         ext_max_length_c=ext_max_length_c,
#         max_straight_sq_dist_c=max_straight_sq_dist_c,
#         max_weighted_length_c=max_weighted_length_c,
#         scale_range=scale_range,
#         scale_step=scale_step,
#         alpha=alpha,
#         beta=beta,
#         frangi_c=frangi_c,
#         diff_mode=diff_mode,
#         bright_on_dark=bright_on_dark,
#         method=method,
#         hist_min_length=hist_min_length,
#         hist_step_length=hist_step_length,
#         number_of_bins_length=number_of_bins_length,
#         hist_bins_borders_length=hist_bins_borders_length,
#         hist_min_curvature=hist_min_curvature,
#         hist_step_curvature=hist_step_curvature,
#         number_of_bins_curvature=number_of_bins_curvature,
#         hist_bins_borders_curvature=hist_bins_borders_curvature,
#         with_plot=with_plot,
#         in_parallel=in_parallel) -> pd_.DataFrame():
#
#     soma_t = sm_.soma_t
#     extension_t = xt_.extension_t
#
#     print(f"STARTED: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}")
#     start_time = tm_.time()
#
#     # TODO add proper warning with warn module
#     # --- Images
#
#     # Find the name of the file, eq. to the biological tested condition number x
#     name_file = os_.path.basename(data_path)
#     print("FILE: ", name_file)
#     name_file = name_file.replace(".tif", "")
#
#     name_dir = os_.path.dirname(data_path)
#     print("DIR: ", name_dir)
#
#     # Find the dimension of the image voxels in micron
#     # TODO do something more general, here too specific to one microscope metadata
#     size_voxel_in_micron = in_.FindVoxelDimensionInMicron(data_path, size_voxel_in_micron=size_voxel_in_micron)
#
#     # Read the image
#     image = io_.volread(data_path)
#
#     # Image size verification - simple version without user interface
#     image = in_.ImageVerification(image, channel)
#
#     # iv_.image_verification(image, channel)  # -> PySide2 user interface  # TODO: must return the modified image!
#     # /!\ conflicts between some versions of PySide2 and Python3
#
#     image = image[:, 500:, 500:]
#     img_shape = image.shape
#
#     #
#     print(f"IMAGE: s.{img_shape} t.{image.dtype} m.{image.min()} M.{image.max()}")
#
#     # Intensity relative normalization (between 0 and 1).
#     # Image for soma normalized for hysteresis
#     image_for_soma = in_.IntensityNormalizedImage(image)
#     # Image for extensions not normalized for frangi enhancement
#     image_for_ext = image.copy()
#
#     print(f"NRM-IMG: t.{image_for_soma.dtype} m.{image_for_soma.min():.2f} M.{image_for_soma.max():.2f}")
#
#     # --- Initialization
#     som_nfo = {}
#     ext_nfo = {}
#     axes = {}
#
#     #
#     # --- Somas
#     print("\n--- Soma Detection")
#
#     # Change the soma parameters from micron to pixel dimensions, using the previously determined voxel size
#     soma_min_area_c = in_.ToPixel(soma_min_area_c, size_voxel_in_micron, dimension=(0, 1))
#     soma_max_area_c = in_.ToPixel(soma_max_area_c, size_voxel_in_micron, dimension=(0, 1))
#     soma_selem_c = mp_.disk(in_.ToPixel(soma_selem_micron_c, size_voxel_in_micron))
#
#     # - Create the maps for enhancing and detecting the soma
#     # Hysteresis thresholding and closing/opening
#     som_nfo["map_small"] = soma_t.Map(image_for_soma, soma_low_c, soma_high_c, soma_selem_c)
#     # Deletion of too small elements
#     som_nfo["map_small"], som_lmp_small = soma_t.DeleteSmallSomas(som_nfo["map_small"], soma_min_area_c)
#     # Deletion of too big elements (probably aggregation of cells)
#     # Separated from the deletion of small elements to avoid extensions to be detected where too big somas were just deleted
#     som_nfo["map"], som_lmp = soma_t.DeleteBigSomas(som_nfo["map_small"], som_lmp_small, soma_max_area_c)
#
#     # Labelling of somas, find the number of somas
#     # som_nfo["lmp"], n_somas = ms_.label(som_nfo["map"], return_num=True)
#
#     # Use relabel instead of label to optimize the algorithm. /!\ But BUG.
#     som_nfo["lmp"] = relabel_sequential(som_lmp)[0]
#     n_somas = som_nfo["lmp"].max()
#
#     # po_.MaximumIntensityProjectionZ(som_nfo["lmp"])
#
#     # Create the distance and influence maps used next for connexions between somas-extensions and extensions-extensions
#     som_nfo["dist_to_closest"], som_nfo["influence_map"] = soma_t.InfluenceMaps(som_nfo["lmp"])
#
#     # po_.MaximumIntensityProjectionZ(som_nfo["influence_map"])
#
#     # Create the tuple somas, containing all the objects 'soma'
#     somas = tuple(soma_t().FromMap(som_nfo["lmp"], uid) for uid in range(1, n_somas + 1))
#
#     print(f"    n = {n_somas}")
#
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#     print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
#
#     if with_plot:
#         fb_.PlotSomas(somas, som_nfo, axes)
#
#     #
#     # -- Extentions
#     print("\n--- Extension Detection")
#
#     # Set to zeros the pixels belonging to the somas.
#     # Take into account the aggregation of cells detected as one big soma to avoid extension creation there
#     del_soma = (som_nfo["map_small"] > 0).nonzero()
#     image_for_ext[del_soma] = 0
#
#     # Change the extensions parameters from micron to pixel dimensions
#     ext_min_area_c = in_.ToPixel(ext_min_area_c, size_voxel_in_micron, dimension=(0, 1))
#     ext_max_length_c = in_.ToPixel(ext_max_length_c, size_voxel_in_micron, dimension=(0,))
#
#     if ext_selem_micron_c == 0:
#         ext_selem_pixel_c = None
#     else:
#         ext_selem_pixel_c = mp_.disk(in_.ToPixel(ext_selem_micron_c, size_voxel_in_micron))
#
#     # scale_range_pixel = []
#     # #
#     # for value in scale_range:
#     #     value_in_pixel = in_.ToPixel(value, size_voxel_in_micron, decimals=1)
#     #     scale_range_pixel.append(value_in_pixel)
#     # scale_range = tuple(scale_range_pixel)
#     #
#     # scale_step = in_.ToPixel(scale_step, size_voxel_in_micron, decimals=1)
#
#     # - Perform frangi feature enhancement (via python or c - faster - implementation)
#     enhanced_ext, ext_scales = extension_t.EnhancedForDetection(
#         image_for_ext,
#         scale_range,
#         scale_step,
#         alpha,
#         beta,
#         frangi_c,
#         bright_on_dark,
#         method,
#         diff_mode=diff_mode,
#         in_parallel=in_parallel)
#
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#     print(f"Elapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}\n")
#
#     # po_.MaximumIntensityProjectionZ(enhanced_ext, cmap="OrRd")
#
#     # - Creation of the enhanced maps
#     # enhanced_ext = ex_.adjust_log(enhanced_ext, 1)  # Not necessary, log contrast adjustment
#
#     # Enhanced the contrast in frangi output
#     if adapt_hist_equalization:
#         # necessary but not sufficient, histogram equalisation (adaptative)
#         enhanced_ext = ex_.equalize_adapthist(enhanced_ext)
#     else:
#         # necessary but not sufficient, histogram equalisation (global)
#         enhanced_ext = ex_.equalize_hist(enhanced_ext)
#
#     # po_.MaximumIntensityProjectionZ(enhanced_ext, cmap="OrRd")
#
#     # Rescale the image by stretching the intensities between 0 and 255, necessary but not sufficient
#     enhanced_ext = ex_.rescale_intensity(enhanced_ext, out_range=(0, 255))
#     # Hysteresis thersholding
#     ext_nfo["coarse_map"] = extension_t.CoarseMap(enhanced_ext, ext_low_c, ext_high_c, ext_selem_pixel_c)
#     # po_.MaximumIntensityProjectionZ(ext_nfo["coarse_map"])
#     # Deletion of too small elements
#     ext_nfo["coarse_map"], ext_lmp = extension_t.FilteredCoarseMap(ext_nfo["coarse_map"], ext_min_area_c)
#     # po_.MaximumIntensityProjectionZ(ext_nfo["coarse_map"])
#     # Creation of the extensions skeleton
#     ext_nfo["map"] = extension_t.FineMapFromCoarseMap(ext_nfo["coarse_map"])
#     # TODO delete too long extensions
#     # ext_nfo["map"] = extension_t.FilteredSkeletonMap(ext_nfo["coarse_map"], ext_max_length_c)
#     # po_.MaximumIntensityProjectionZ(ext_nfo["map"])
#     # Deletion of extensions found within the somas
#     ext_nfo["map"][som_nfo["map"] > 0] = 0
#     # Labelling of extensions and number of extensions determined
#     ext_nfo["lmp"], n_extensions = ms_.label(ext_nfo["map"], return_num=True)
#
#     # Use relabel instead of label to optimize the algorithm. BUT PROBLEM WITH THE NUMBER OF EXTENSIONS DETECTED !
#     # ext_nfo["lmp"] = relabel_sequential(ext_lmp)[0]
#     # n_extensions = ext_nfo["lmp"].max()
#
#     # Create the tuple extensions, containing all the objects 'extension'
#     extensions = tuple(
#         extension_t().FromMap(ext_nfo["lmp"], ext_scales, uid)
#         for uid in range(1, n_extensions + 1))
#
#     print(f"    n = {n_extensions}")
#
#     #
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#     print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
#
#     if with_plot:
#         fb_.PlotExtensions(extensions, ext_nfo, img_shape)
#
#     # -- Preparation for Connections
#     # Calculate the costs of each pixel on the image
#     dijkstra_costs = in_.DijkstraCosts(image, som_nfo["map"], ext_nfo["map"])
#
#     # -- Soma-Extention
#     print("\n--- Soma <-> Extension")
#
#     # Change the extensions parameters from micron to pixel dimensions
#     max_straight_sq_dist_c = in_.ToPixel(max_straight_sq_dist_c, size_voxel_in_micron)
#     max_weighted_length_c = in_.ToPixel(max_weighted_length_c, size_voxel_in_micron)
#
#     # Find the candidate extensions, with their endpoints, for connexion to the somas
#     candidate_conn_nfo = cn_.CandidateConnections(
#         somas,
#         som_nfo["influence_map"],
#         som_nfo["dist_to_closest"],
#         extensions,
#         max_straight_sq_dist_c,
#     )
#
#     # For each candidate, verify that it is valid based on the shortest path and the maximum allowed straight distance
#     for ep_idx, soma, extension, end_point in candidate_conn_nfo:
#         # Only try to connect if the extension is not already connected
#         if extension.is_unconnected:
#             print(f"    Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end="")
#             # Use of Dijkstra shortest weighted path
#             path, length = cn_.ShortestPathFromToN(
#                 start_time,
#                 end_point,
#                 dijkstra_costs,
#                 soma.ContourPointsCloseTo,
#                 max_straight_sq_dist=max_straight_sq_dist_c,
#             )
#
#             # Keep the connexion only if inferior to the allowed max weighted distance
#             if length <= max_weighted_length_c:
#                 cn_.ValidateConnection(soma, extension, end_point, path, dijkstra_costs)
#                 print(": Made")
#             else:
#                 print("")
#
#     # for soma in somas:
#     #     soma.Extend(extensions, som_nfo["dist_to_closest"], dijkstra_costs)
#
#     fb_.PrintConnectedExtensions(extensions)
#
#     #
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#     print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
#
#     if with_plot:
#         fb_.PlotSomasWithExtensions(somas, som_nfo, "all")
#
#     # -- Extention-Extention
#     print("\n--- Extension <-> Extension")
#
#     should_look_for_connections = True
#
#     # Update the soma maps with next Ext-Ext connexions
#     while should_look_for_connections:
#         # Update the final somas + ext map by adding the new extensions and their connexion paths
#         som_nfo["soma_w_ext_lmp"] = soma_t.SomasLMap(somas)
#         # Update the influence map
#         som_nfo["dist_to_closest"], som_nfo["influence_map"] = soma_t.InfluenceMaps(som_nfo["soma_w_ext_lmp"])
#
#         # Find the candidate extensions for connexion to the primary extensions
#         candidate_conn_nfo = cn_.CandidateConnections(
#             somas,
#             som_nfo["influence_map"],
#             som_nfo["dist_to_closest"],
#             extensions,
#             max_straight_sq_dist_c,
#         )
#
#         should_look_for_connections = False
#
#         # For each candidate, verify that it is valid based on the shortest path and the maximum allowed straight distance
#         for ep_idx, soma, extension, end_point in candidate_conn_nfo:
#             # Only try to connect if the extension is not already connected
#             if extension.is_unconnected:
#                 print(f"    Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end="")
#                 # Use of Dijkstra shortest weighted path
#                 path, length = cn_.ShortestPathFromToN(
#                     start_time,
#                     end_point,
#                     dijkstra_costs,
#                     soma.ExtensionPointsCloseTo,
#                     max_straight_sq_dist=max_straight_sq_dist_c,
#                 )
#
#                 # Keep the connexion only if inferior to the allowed max weighted distance
#                 if length <= max_weighted_length_c:
#                     # Verify that the last element of the connexion path is contained in the extension to be connected
#                     # If so, returns the extensions containing the path last site.
#                     tgt_extenstion = extension_t.ExtensionContainingSite(extensions, path[-1])
#                     # Validate connexion: update the glial component objects and store more info in their fields
#                     cn_.ValidateConnection(tgt_extenstion, extension, end_point, path, dijkstra_costs)
#                     # TODO delete too long extensions
#
#                     should_look_for_connections = True
#
#                     print(": Made")
#                 else:
#                     print("")
#
#     # Create an extension map containing all the ext + connexions, without the somas.
#     # Used for the graphs extraction
#     ext_nfo["lmp_soma"] = som_nfo['soma_w_ext_lmp'] - som_nfo['lmp']
#
#     fb_.PrintConnectedExtensions(extensions)
#
#     #
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#     print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
#
#     if with_plot:
#         fb_.PlotSomasWithExtensions(somas, som_nfo, "with_ext_of_ext")
#
#     # -- Summary
#     print("\n")
#     for soma in somas:
#         print(soma)
#
#     # snapshot = tr_.take_snapshot()
#     # top_file_stats = snapshot.statistics('lineno')
#     # print("Memory Profiling: Top 10 FILES")
#     # for stat in top_file_stats[:10]:
#     #     print(stat)
#     # top_block_stats = snapshot.statistics('traceback')
#     # top_block_stats = top_block_stats[0]
#     # print(f"Memory Profiling: {top_block_stats.count} memory blocks: {top_block_stats.size / 1024:.1f} KiB")
#     # for line in top_block_stats.traceback.format():
#     #     print(line)
#
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#     print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
#
#     if with_plot:
#         pl_.show()
#
#     po_.MaximumIntensityProjectionZ(som_nfo['soma_w_ext_lmp'])
#     # , output_image_file_name=f"D:\\MorganeNadal\\Results\\Images\\{name_file}.png")
#
#     # --- Extract all the extensions of all somas as a graph
#     print('\n--- Graph extraction')
#
#     # Create the graphs with SKLGraph module (available on Eric Debreuve Gitlab)
#     for soma in somas:
#         print(f" Soma {soma.uid}", end="")
#         # Create SKLGraph skeletonized map
#         ext_map = skl_map_t.FromShapeMap(ext_nfo['lmp_soma'] == soma.uid, store_widths=True, skeletonize=False,
#                                          do_post_thinning=True)
#         # do_post_thinning = True, in order to remove pixels that are not breaking connectivity
#
#         # Create the graph from the SKLGaph skeletonized map
#         soma.skl_graph = skl_graph_t.FromSkeleton(ext_map, size_voxel=size_voxel_in_micron)
#
#         # --- Find the root of the {ext+conn} graphs.
#         # Roots are the nodes of degree 1 that are to be linked to the soma
#         soma.graph_roots = ge_.FindGraphsRootWithNodes(soma)
#
#         # Add a node "soma" and link it to the root nodes
#         soma_node = f"S-{int(soma.centroid[0])}-{int(soma.centroid[1])}-{int(soma.centroid[2])}"
#         soma.skl_graph.add_node(soma_node, soma=True, soma_nfo=soma)
#
#         for node in soma.graph_roots.values():
#             soma.skl_graph.add_edge(node, soma_node, root=True)
#         #
#         # nx_.draw_networkx(soma.skl_graph)
#         # if with_plot:
#         #     pl_.show(block=True)
#         #
#         print(": Done")
#
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#
#     # --- Extract features
#     print('\n--- Features Extraction\n')
#
#     # Parameters
#     if hist_bins_borders_length is None:
#         number_of_bins_length = int(number_of_bins_length)
#         bins_length = np_.linspace(hist_min_length, hist_min_length + hist_step_length * number_of_bins_length,
#                                    num=number_of_bins_length)
#         bins_length[-1] = np_.inf
#     else:
#         bins_length = np_.array(hist_bins_borders_length)
#         bins_length[-1] = np_.inf
#
#     if hist_bins_borders_curvature is None:
#         number_of_bins_curvature = int(number_of_bins_curvature)
#         bins_curvature = np_.linspace(hist_min_curvature,
#                                       hist_min_curvature + hist_step_curvature * number_of_bins_curvature,
#                                       num=number_of_bins_curvature)
#         bins_curvature[-1] = np_.inf
#     else:
#         bins_curvature = np_.array(hist_bins_borders_curvature)
#         bins_curvature[-1] = np_.inf
#
#     # Pandas dataframe creation with all the measured features
#     features_df = ge_.ExtractFeaturesInDF(name_file, somas, size_voxel_in_micron, bins_length, bins_curvature,
#                                           ext_scales)
#
#     # Save the pandas df into .csv
#     features_df.to_csv(f"{name_dir}\\{name_file}.csv")
#
#     #
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#     print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
#     print(f"DONE: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}")
#
#     return features_df
#
#
# if __name__ == '__main__':
#
#     # --- Extract cell graphs and features from microscope images using NutriMorph function.
#     #
#     # Differentiate between path to a tiff file or to a repository
#     if pathlib.Path(data_path).is_file():
#         # Perform NutriMorph algorithm on the file entered in parameters
#         features_df = NutriMorph(data_path)
#
#     elif pathlib.Path(data_path).is_dir():
#         # Keep the directory to the repository
#         name_dir = os_.path.dirname(data_path)
#         # Initialize the future concatenated features
#         concatenated_features_df = pd_.DataFrame()
#         # Find all the tiff files in the parent and child repositories
#         for path in pathlib.Path(data_path).glob("**/*.tif"):
#             if path.is_file():
#                 # Perform NutriMorph algorithm
#                 features_df = NutriMorph(path)
#                 # Concatenate all the dataframes
#                 concatenated_features_df = concatenated_features_df.append(features_df)
#         # Save to .csv in the parent repository
#         concatenated_features_df.to_csv(f"{data_path}_features.csv")
#
#     # --- TODO Clustering with this df and module cell_clustering.py