# Copyright CNRS/Inria/UNS
# Contributor(s): Eric Debreuve (since 2019), Morgane Nadal (2020)
#
# eric.debreuve@cnrs.fr
#
# This software is governed by the CeCILL  license under French law and
# abiding by the rules of distribution of free software.  You can  use,
# modify and/ or redistribute the software under the terms of the CeCILL
# license as circulated by CEA, CNRS and INRIA at the following URL
# "http://www.cecill.info".
#
# As a counterpart to the access to the source code and  rights to copy,
# modify and redistribute granted by the license, users are provided only
# with a limited warranty  and the software's author,  the holder of the
# economic rights,  and the successive licensors  have only  limited
# liability.
#
# In this respect, the user's attention is drawn to the risks associated
# with loading,  using,  modifying and/or developing or reproducing the
# software by the user in light of its specific status of free software,
# that may mean  that it is complicated to manipulate,  and  that  also
# therefore means  that it is reserved for developers  and  experienced
# professionals having in-depth computer knowledge. Users are therefore
# encouraged to load and test the software's suitability as regards their
# requirements in conditions enabling the security of their systems and/or
# data to be ensured and,  more generally, to use and operate it in the
# same conditions as regards security.
#
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL license and that you accept its terms.

from typing import Any, Dict, Sequence, Union

import numpy as nmpy
import pandas as pd_
import scipy.stats as st_
from logger_36 import LOGGER

import brick.task.ellipsoid_fit as bf_
import brick.task.unit
from brick.type.base import array_t
from brick.type.soma import soma_t


def FindGraphsRootWithEdges(
    soma: soma_t, ext_nfo: Dict[str, Union[array_t, Any]]
) -> dict:
    """
    Finds the soma roots of the graph extension.
    """
    # For a given soma, find the roots of the graphs
    root_nodes = {}

    # Finds the primary extensions
    primary_extension_uids = tuple(extension.uid for extension in soma.extensions)
    # print(primary_extension_uids, '\nn = ', len(primary_extension_uids))

    # List of the degree 1 nodes of the graph
    for node1_id, node2_id, edge_nfo in soma.skl_graph.edges.data("details"):
        if (soma.skl_graph.degree[node1_id] == 1) or (
            soma.skl_graph.degree[node2_id] == 1
        ):
            # Find the pixels of the terminal extension
            sites = ext_nfo["lmp_soma"][edge_nfo.sites]
            ext_uid = nmpy.unique(sites)[-1]
            # sites > 0 because ext_nfo['lmp'] do not contain the connexions

            # Save the root node candidates (one-degree nodes)
            if ext_uid in primary_extension_uids:
                if soma.skl_graph.degree[node1_id] == 1:
                    root_node = node1_id
                else:
                    root_node = node2_id

                # Get the node coordinates and extend them to the 26 neighboring voxels
                # tuple('x-y-z') -> list[(x,y,z)]
                root_node_coor = soma.skl_graph.nodes[root_node][
                    "details"
                ].position.tolist()  # _GetNodesCoordinates((root_node,))[0]

                root_sites = set(
                    (
                        root_node_coor[0] + i,
                        root_node_coor[1] + j,
                        root_node_coor[2] + k,
                    )
                    for i in (-1, 0, 1)
                    for j in (-1, 0, 1)
                    for k in (-1, 0, 1)
                    if i != 0 or j != 0 or k != 0
                )

                # Find the intersection between the extended root node candidate and the soma contour points
                intersections = set(soma.contour_points).intersection(root_sites)

                # if the graph root sites are included in the soma extensions sites (non-nul intersection):
                if len(intersections) > 0:
                    # Keep the info of the root node. Key = ext uid, Value = root node
                    root_nodes[ext_uid] = root_node
                    ## By construction, only one root node possible for an ext

    # TODO: find out why there are less root points than extensions !!
    return root_nodes


def FindGraphsRootWithNodes(soma: soma_t) -> dict:
    """
    Find the roots of the {extension+connexion} graphs to be lined to the soma.
    Add a key "root" (bool) in the dict of nodes attributes.
    """
    is_end_point = []
    node_label = []
    coordinates = []
    for label, degree in soma.skl_graph.degree:
        is_end_point.append(degree == 1)
        node_label.append(label)
        coordinates.append(
            tuple(soma.skl_graph.nodes[label]["details"].position.tolist())
        )

    # Find the nodes of degree == 1, and the str coordinates of the nodes
    # is_end_point = tuple(degree == 1 for _, degree in soma.skl_graph.degree)
    # node_label = tuple(xyz for xyz, _ in soma.skl_graph.degree)

    root_nodes = {}

    # get the coordinates of the nodes (x,y,z)
    # coordinates = _GetNodesCoordinates(node_label)

    # get a list with elements = (extension_uid, root coordinates), which length is the number of primary extensions
    roots = soma.ext_roots

    # for each node in the graph, search among the degree 1 nodes the nodes that are roots (linked to soma)
    for node in range(len(coordinates)):
        if is_end_point[node]:
            # compare the coor with end points
            for ext_root in roots:
                if ext_root[1] == coordinates[node]:
                    root_nodes[ext_root[0]] = node_label[node]

    if root_nodes.__len__() != roots.__len__():
        # raise ValueError("Number of extensions roots not equal to number of graph roots.")
        LOGGER.warning(
            f"Number of extensions roots: {root_nodes.__len__()} "
            f"not equal to number of graph roots: {roots.__len__()}."
        )

    return root_nodes


# def _GetNodesCoordinates(node_coord: Tuple[str, ...]) -> list:
#     """
#     Input: nodes attributes -> Tuple('x1-y1-z1', 'x2-y2-z2', ...) .
#     Output: coordinates -> List[Tuple(x1,y1,z1), Tuple(x2,y2,z2), ...]
#     """
#     coord = []
#     for c in node_coord:
#         coord.append(c)
#
#     for node in range(len(node_coord)):
#         coord_node = coord[node]
#         pattern = r"\d+"
#         coord_node = re_.findall(pattern, coord_node)
#         coor = []
#         for i in range(3):
#             coor.append(int(coord_node[i]))
#         coor = tuple(coor)
#         coord[node] = coor
#
#     return coord

FEATURE_SHEET_COLUMNS = [
    "condition",
    "duration",
    "soma uid",
    "coef_V_soma__V_convex_hull",
    "coef_axes_ellips_b__a",
    "coef_axes_ellips_c__a",
    "spherical_angles_eva",
    "spherical_angles_evb",
    #
    "N_nodes",
    "N_ext",
    "N_primary_ext",
    "N_sec_ext",
    "min_degree",
    "mean_degree",
    "median_degree",
    "max_degree",
    "std_degree",
    #
    "total_ext_length",
    "min_length",
    "mean_length",
    "median_length",
    "max_length",
    "std_lengths",
    "entropy_lengths",
    "hist_lengths",
    "min_thickness",
    "mean_thickness",
    "median_thickness",
    "max_thickness",
    "std_thickness",
    "entropy_thickness",
    "min_volume",
    "mean_volume",
    "median_volume",
    "max_volume",
    "std_volume",
    "entropy_volume",
    "min_curvature",
    "max_curvature",
    "mean_curvature",
    "median_curvature",
    "std_curvature",
    "entropy_curvature",
    "hist_curvature",
    #
    "total_ext_length_P",
    "min_length_P",
    "mean_length_P",
    "median_length_P",
    "max_length_P",
    "std_lengths_P",
    "entropy_lengths_P",
    "hist_lengths_P",
    "min_thickness_P",
    "mean_thickness_P",
    "median_thickness_P",
    "max_thickness_P",
    "std_thickness_P",
    "entropy_thickness_P",
    "min_volume_P",
    "mean_volume_P",
    "median_volume_P",
    "max_volume_P",
    "std_volume_P",
    "entropy_volume_P",
    "min_curvature_P",
    "max_curvature_P",
    "mean_curvature_P",
    "median_curvature_P",
    "std_curvature_P",
    "entropy_curvature_P",
    "hist_curvature_P",
    #
    "total_ext_length_S",
    "min_length_S",
    "mean_length_S",
    "median_length_S",
    "max_length_S",
    "std_lengths_S",
    "entropy_lengths_S",
    "hist_lengths_S",
    "min_thickness_S",
    "mean_thickness_S",
    "median_thickness_S",
    "max_thickness_S",
    "std_thickness_S",
    "entropy_thickness_S",
    "min_volume_S",
    "mean_volume_S",
    "median_volume_S",
    "max_volume_S",
    "std_volume_S",
    "entropy_volume_S",
    "min_curvature_S",
    "max_curvature_S",
    "mean_curvature_S",
    "median_curvature_S",
    "std_curvature_S",
    "entropy_curvature_S",
    "hist_curvature_S",
]


def ExtractFeaturesInDF(
    name_file,
    Condition,
    Duration,
    somas,
    voxel_size_in_micron: array_t,
    bins_length: array_t,
    bins_curvature: array_t,
    scale_map: array_t,
    decimals: int = 4,
    _=None,  # condition
    __=None,  # duration
):
    """
    Extract the features from somas and graphs.

    Store the condition and duration of the image
    if (condition is None) or (duration is None):
    Condition = re_.findall(r"[A-Z]{3}", name_file)[0]
    Duration = re_.findall(r"\dH", name_file)
    Find our whether the duration is in hours or in weeks
    if Duration.__len__() == 0:
        Duration = re_.findall(r"\dW", name_file)[0]
    else:
        Duration = Duration[0]

    Returns a pandas dataframe, without NaN.
    """
    somas_features_dict = {}  # Dict{soma 1: [features], soma 2: [features], ...}

    (
        min_degree,
        mean_degree,
        median_degree,
        max_degree,
        std_degree,
        total_ext_length_S,
        min_length_S,
        mean_length_S,
        median_length_S,
        max_length_S,
        std_lengths_S,
        entropy_lengths_S,
        hist_lengths_S,
        min_thickness_S,
        mean_thickness_S,
        median_thickness_S,
        max_thickness_S,
        std_thickness_S,
        entropy_thickness_S,
        min_volume_S,
        mean_volume_S,
        median_volume_S,
        max_volume_S,
        std_volume_S,
        entropy_volume_S,
        min_curvature_S,
        max_curvature_S,
        mean_curvature_S,
        median_curvature_S,
        std_curvature_S,
        entropy_curvature_S,
        hist_curvature_S,
    ) = 32 * (None,)

    for soma in somas:
        # -- Soma features

        # Axes of the best fitting ellipsoid: a > b > c
        (
            _,
            _,
            soma.axes_ellipsoid,
            _,
            spherical_coor,
            _,
            volume_convex_hull,
        ) = bf_.FindBestFittingEllipsoid3D(soma)

        # These ratios give info about the shape of the soma. ex: rather flat, rather patatoide, rather spherical...
        if type(soma.axes_ellipsoid[0]) is str:
            (
                Coef_axes_ellips_b__a,
                Coef_axes_ellips_c__a,
                spherical_angles_eva,
                spherical_angles_evb,
                soma.volume_soma_micron,
                Coef_V_soma__V_convex_hull,
            ) = 6 * (None,)
        else:
            Coef_axes_ellips_b__a = soma.axes_ellipsoid[0] / soma.axes_ellipsoid[2]
            Coef_axes_ellips_c__a = soma.axes_ellipsoid[1] / soma.axes_ellipsoid[2]

            # Spherical angles give the orientation of the somas in the 3D volume
            spherical_angles_eva = (spherical_coor[0][1], spherical_coor[0][2])
            spherical_angles_evb = (spherical_coor[1][1], spherical_coor[1][2])

            # Volume of the  in micron**3
            soma.volume_soma_micron = brick.task.unit.ToMicron(
                len(soma.sites[0]),
                voxel_size_in_micron,
                dimension=(0, 1, 2),
                decimals=2,
            )
            # Calculates volume of soma's convex hull in voxel volume
            # Take into account anisotropy of the 3D space ( volume = x * y * z with z > x=y)
            volume_convex_hull = (
                volume_convex_hull * voxel_size_in_micron[2] / voxel_size_in_micron[0]
            )

            # Volume of the soma / Volume of its convex hull gives the info about the convexity of the soma
            # If close to 0, the soma has a lot of invaginations, if close to 1, it is smooth and convex
            Coef_V_soma__V_convex_hull = len(soma.sites[0]) / round(
                volume_convex_hull + 0.5
            )

        # -- Extension features
        # Graph features

        # number of nodes except the constructed ones from node soma to the roots
        N_nodes = soma.skl_graph.n_nodes - len(soma.graph_roots)
        # number of edges except the constructed ones from node soma to the roots
        N_ext = soma.skl_graph.n_edges - len(soma.graph_roots)
        # number of primary edges = linked to the soma except the constructed ones from node soma to the roots
        N_primary_ext = len(soma.graph_roots)
        # number of secondary edges = not linked to the soma.
        N_sec_ext = N_ext - N_primary_ext

        LOGGER.info(
            f"Soma {soma.uid}\n"
            f"N nodes = {N_nodes}\n"
            f"N edges = {N_ext}\n"
            f"N primary extensions = {N_primary_ext}\n"
            f"N secondary extensions = {N_sec_ext}\n"
        )

        if N_primary_ext > 0:
            ext_lengths = [_elm[-1] for _elm in soma.skl_graph.lengths]
            for idx, length in enumerate(ext_lengths):
                ext_lengths[idx] = brick.task.unit.ToMicron(
                    length, voxel_size_in_micron, decimals=decimals
                )
            total_ext_length = brick.task.unit.ToMicron(
                soma.skl_graph.length, voxel_size_in_micron, decimals=decimals
            )

            (
                min_length,
                max_length,
                median_length,
                mean_length,
                std_lengths,
                hist_lengths,
                entropy_lengths,
            ) = _Statistics_mMdashe(ext_lengths, bins=bins_length)
            # hist_lengths = nmpy.histogram(ext_lengths, bins_length)[0]
            min_length = brick.task.unit.ToMicron(
                min_length, voxel_size_in_micron, decimals=decimals
            )
            mean_length = brick.task.unit.ToMicron(
                mean_length, voxel_size_in_micron, decimals=decimals
            )
            median_length = brick.task.unit.ToMicron(
                median_length, voxel_size_in_micron, decimals=decimals
            )
            max_length = brick.task.unit.ToMicron(
                max_length, voxel_size_in_micron, decimals=decimals
            )
            # std_lengths = nmpy.std(ext_lengths)
            # entropy_lengths = st_.entropy(hist_lengths)

            # Find the thickness of the extensions
            for *_, edge in soma.skl_graph.edges.data("details"):
                if edge is not None:
                    edge.widths = scale_map[edge.sites] * voxel_size_in_micron[1]
            mean_widths = soma.skl_graph.edge_reduced_widths()
            ext_thickness = nmpy.array(mean_widths) ** 2
            (
                min_thickness,
                max_thickness,
                median_thickness,
                mean_thickness,
                std_thickness,
                _,
                entropy_thickness,
            ) = _Statistics_mMdashe(ext_thickness)
            # min_thickness = min(ext_thickness)
            # mean_thickness = nmpy.mean(ext_thickness)
            # median_thickness = nmpy.median(ext_thickness)
            # max_thickness = max(ext_thickness)
            # std_thickness = nmpy.std(ext_thickness)
            # entropy_thickness = st_.entropy(nmpy.histogram(ext_thickness)[0])

            # Find the volume of the extensions
            ext_volume = nmpy.array(ext_lengths) * ext_thickness
            (
                min_volume,
                max_volume,
                median_volume,
                mean_volume,
                std_volume,
                _,
                entropy_volume,
            ) = _Statistics_mMdashe(ext_volume)
            # min_volume = min(ext_volume)
            # mean_volume = nmpy.mean(ext_volume)
            # median_volume = nmpy.median(ext_volume)
            # max_volume = max(ext_volume)
            # std_volume = nmpy.std(ext_volume)
            # entropy_volume = st_.entropy(nmpy.histogram(ext_volume)[0])

            curvatures = soma.skl_graph.curvature_and_torsion()
            (
                min_curvature,
                max_curvature,
                median_curvature,
                mean_curvature,
                std_curvature,
                hist_curvature,
                entropy_curvature,
            ) = _Statistics_mMdashe(curvatures, bins=bins_curvature)
            # hist_curvature = nmpy.histogram(curvatures, bins_curvature)[0]
            # min_curvature = min(curvatures)
            # max_curvature = max(curvatures)
            # mean_curvature = nmpy.mean(curvatures)
            # median_curvature = nmpy.median(curvatures)
            # std_curvature = nmpy.std(curvatures)
            # entropy_curvature = st_.entropy(hist_curvature)

            # PRIMARY extensions
            ext_lengths_P = list(soma.skl_graph.primary_edge_lengths(soma))
            for idx, length in enumerate(ext_lengths_P):
                ext_lengths_P[idx] = brick.task.unit.ToMicron(
                    length, voxel_size_in_micron, decimals=decimals
                )
            total_ext_length_P = sum(ext_lengths_P)
            (
                min_length_P,
                max_length_P,
                median_length_P,
                mean_length_P,
                std_lengths_P,
                hist_lengths_P,
                entropy_lengths_P,
            ) = _Statistics_mMdashe(ext_lengths_P, bins=bins_length)
            # hist_lengths_P = nmpy.histogram(ext_lengths_P, bins_length)[0]
            # min_length_P = min(ext_lengths_P)
            # mean_length_P = nmpy.mean(ext_lengths_P)
            # median_length_P = nmpy.median(ext_lengths_P)
            # max_length_P = max(ext_lengths_P)
            # std_lengths_P = nmpy.std(ext_lengths_P)
            # entropy_lengths_P = st_.entropy(hist_lengths_P)

            mean_widths_P = soma.skl_graph.P_edge_reduced_widths(soma)
            ext_thickness_P = nmpy.array(mean_widths_P) ** 2
            (
                min_thickness_P,
                max_thickness_P,
                median_thickness_P,
                mean_thickness_P,
                std_thickness_P,
                _,
                entropy_thickness_P,
            ) = _Statistics_mMdashe(ext_thickness_P)
            # min_thickness_P = min(ext_thickness_P)
            # mean_thickness_P = nmpy.mean(ext_thickness_P)
            # median_thickness_P = nmpy.median(ext_thickness_P)
            # max_thickness_P = max(ext_thickness_P)
            # std_thickness_P = nmpy.std(ext_thickness_P)
            # entropy_thickness_P = st_.entropy(nmpy.histogram(ext_thickness_P)[0])

            ext_volume_P = nmpy.array(ext_lengths_P) * ext_thickness_P
            (
                min_volume_P,
                max_volume_P,
                median_volume_P,
                mean_volume_P,
                std_volume_P,
                _,
                entropy_volume_P,
            ) = _Statistics_mMdashe(ext_volume_P)
            # min_volume_P = min(ext_volume_P)
            # mean_volume_P = nmpy.mean(ext_volume_P)
            # median_volume_P = nmpy.median(ext_volume_P)
            # max_volume_P = max(ext_volume_P)
            # std_volume_P = nmpy.std(ext_volume_P)
            # entropy_volume_P = st_.entropy(nmpy.histogram(ext_volume_P)[0])

            curvatures_P = soma.skl_graph.P_curvature_and_torsion(soma)
            (
                min_curvature_P,
                max_curvature_P,
                median_curvature_P,
                mean_curvature_P,
                std_curvature_P,
                hist_curvature_P,
                entropy_curvature_P,
            ) = _Statistics_mMdashe(curvatures_P, bins=bins_curvature)
            # hist_curvature_P = nmpy.histogram(curvatures_P, bins_curvature)[0]
            # min_curvature_P = min(curvatures_P)
            # max_curvature_P = max(curvatures_P)
            # mean_curvature_P = nmpy.mean(curvatures_P)
            # median_curvature_P = nmpy.median(curvatures_P)
            # std_curvature_P = nmpy.std(curvatures_P)
            # entropy_curvature_P = st_.entropy(hist_curvature_P)

            # Secondary extensions
            if N_sec_ext > 0:
                # min, mean, median, max and standard deviation of the degrees of non-leaves nodes
                min_degree = soma.skl_graph.min_degree_except_leaves_and_roots
                mean_degree = soma.skl_graph.mean_degree_except_leaves_and_roots
                median_degree = soma.skl_graph.median_degree_except_leaves_and_roots
                max_degree = soma.skl_graph.max_degree_except_leaves_an_roots
                std_degree = soma.skl_graph.std_degree_except_leaves_and_roots

                # SECONDARY extensions length
                ext_lengths_S = list(soma.skl_graph.secondary_edge_lengths(soma))
                for idx, length in enumerate(ext_lengths_S):
                    ext_lengths_S[idx] = brick.task.unit.ToMicron(
                        length, voxel_size_in_micron, decimals=decimals
                    )
                total_ext_length_S = sum(ext_lengths_S)

                (
                    min_length_S,
                    max_length_S,
                    median_length_S,
                    mean_length_S,
                    std_lengths_S,
                    hist_lengths_S,
                    entropy_lengths_S,
                ) = _Statistics_mMdashe(ext_lengths_S, bins=bins_length)
                # hist_lengths_S = nmpy.histogram(ext_lengths_S, bins_length)[0]
                # min_length_S = min(ext_lengths_S)
                # mean_length_S = nmpy.mean(ext_lengths_S)
                # median_length_S = nmpy.median(ext_lengths_S)
                # max_length_S = max(ext_lengths_S)
                # std_lengths_S = nmpy.std(ext_lengths_S)
                # entropy_lengths_S = st_.entropy(hist_lengths_S)

                mean_widths_S = soma.skl_graph.S_edge_reduced_widths(soma)
                ext_thickness_S = nmpy.array(mean_widths_S) ** 2
                (
                    min_thickness_S,
                    max_thickness_S,
                    median_thickness_S,
                    mean_thickness_S,
                    std_thickness_S,
                    _,
                    entropy_thickness_S,
                ) = _Statistics_mMdashe(ext_thickness_S)
                # min_thickness_S = min(ext_thickness_S)
                # mean_thickness_S = nmpy.mean(ext_thickness_S)
                # median_thickness_S = nmpy.median(ext_thickness_S)
                # max_thickness_S = max(ext_thickness_S)
                # std_thickness_S = nmpy.std(ext_thickness_S)
                # entropy_thickness_S = st_.entropy(nmpy.histogram(ext_thickness_S)[0])

                ext_volume_S = nmpy.array(ext_lengths_S) * ext_thickness_S
                (
                    min_volume_S,
                    max_volume_S,
                    median_volume_S,
                    mean_volume_S,
                    std_volume_S,
                    _,
                    entropy_volume_S,
                ) = _Statistics_mMdashe(ext_volume_S)
                # min_volume_S = min(ext_volume_S)
                # mean_volume_S = nmpy.mean(ext_volume_S)
                # median_volume_S = nmpy.median(ext_volume_S)
                # max_volume_S = max(ext_volume_S)
                # std_volume_S = nmpy.std(ext_volume_S)
                # entropy_volume_S = st_.entropy(nmpy.histogram(ext_volume_S)[0])

                curvatures_S = soma.skl_graph.S_curvature_and_torsion(soma)
                (
                    min_curvature_S,
                    max_curvature_S,
                    median_curvature_S,
                    mean_curvature_S,
                    std_curvature_S,
                    hist_curvature_S,
                    entropy_curvature_S,
                ) = _Statistics_mMdashe(curvatures_S, bins=bins_curvature)
                # hist_curvature_S = nmpy.histogram(curvatures_S, bins_curvature)[0]
                # min_curvature_S = min(curvatures_S)
                # max_curvature_S = max(curvatures_S)
                # mean_curvature_S = nmpy.mean(curvatures_S)
                # median_curvature_S = nmpy.median(curvatures_S)
                # std_curvature_S = nmpy.std(curvatures_S)
                # entropy_curvature_S = st_.entropy(hist_curvature_S)

            # If no secondary extensions, give certain value to parameters
            if N_sec_ext == 0:
                # min, mean, median, max and standard deviation of the degrees of non-leaves nodes
                min_degree = 1
                mean_degree = 1
                median_degree = 1
                max_degree = 1
                std_degree = 0

                total_ext_length_S = 0
                min_length_S = 0
                mean_length_S = 0
                median_length_S = 0
                max_length_S = 0
                std_lengths_S = 0
                entropy_lengths_S = 0
                hist_lengths_S = 0

                min_thickness_S = 0
                mean_thickness_S = 0
                median_thickness_S = 0
                max_thickness_S = 0
                std_thickness_S = 0
                entropy_thickness_S = 0

                min_volume_S = 0
                mean_volume_S = 0
                median_volume_S = 0
                max_volume_S = 0
                std_volume_S = 0
                entropy_volume_S = 0

                min_curvature_S = -1
                max_curvature_S = -1
                mean_curvature_S = -1
                median_curvature_S = -1
                std_curvature_S = 0
                entropy_curvature_S = 0
                hist_curvature_S = 0
        else:
            min_degree = 0
            mean_degree = 0
            median_degree = 0
            max_degree = 0
            std_degree = 0

            total_ext_length = 0
            min_length = 0
            mean_length = 0
            median_length = 0
            max_length = 0
            std_lengths = 0
            entropy_lengths = 0
            hist_lengths = 0

            min_thickness = 0
            mean_thickness = 0
            median_thickness = 0
            max_thickness = 0
            std_thickness = 0
            entropy_thickness = 0

            min_volume = 0
            mean_volume = 0
            median_volume = 0
            max_volume = 0
            std_volume = 0
            entropy_volume = 0

            min_curvature = -1
            max_curvature = -1
            mean_curvature = -1
            median_curvature = -1
            std_curvature = 0
            entropy_curvature = 0
            hist_curvature = 0

            total_ext_length_P = 0
            min_length_P = 0
            mean_length_P = 0
            median_length_P = 0
            max_length_P = 0
            std_lengths_P = 0
            entropy_lengths_P = 0
            hist_lengths_P = 0

            min_thickness_P = 0
            mean_thickness_P = 0
            median_thickness_P = 0
            max_thickness_P = 0
            std_thickness_P = 0
            entropy_thickness_P = 0

            min_volume_P = 0
            mean_volume_P = 0
            median_volume_P = 0
            max_volume_P = 0
            std_volume_P = 0
            entropy_volume_P = 0

            min_curvature_P = -1
            max_curvature_P = -1
            mean_curvature_P = -1
            median_curvature_P = -1
            std_curvature_P = 0
            entropy_curvature_P = 0
            hist_curvature_P = 0

            total_ext_length_S = 0
            min_length_S = 0
            mean_length_S = 0
            median_length_S = 0
            max_length_S = 0
            std_lengths_S = 0
            entropy_lengths_S = 0
            hist_lengths_S = 0

            min_thickness_S = 0
            mean_thickness_S = 0
            median_thickness_S = 0
            max_thickness_S = 0
            std_thickness_S = 0
            entropy_thickness_S = 0

            min_volume_S = 0
            mean_volume_S = 0
            median_volume_S = 0
            max_volume_S = 0
            std_volume_S = 0
            entropy_volume_S = 0

            min_curvature_S = -1
            max_curvature_S = -1
            mean_curvature_S = -1
            median_curvature_S = -1
            std_curvature_S = 0
            entropy_curvature_S = 0
            hist_curvature_S = 0

        # Create a dictionary with all the features for every somas
        somas_features_dict[name_file + f" soma {soma.uid}"] = [
            Condition,
            Duration,
            soma.uid,
            Coef_V_soma__V_convex_hull,
            Coef_axes_ellips_b__a,
            Coef_axes_ellips_c__a,
            spherical_angles_eva,
            spherical_angles_evb,
            N_nodes,
            N_ext,
            N_primary_ext,
            N_sec_ext,
            min_degree,
            mean_degree,
            median_degree,
            max_degree,
            std_degree,
            #
            total_ext_length,
            min_length,
            mean_length,
            median_length,
            max_length,
            std_lengths,
            entropy_lengths,
            hist_lengths,
            min_thickness,
            mean_thickness,
            median_thickness,
            max_thickness,
            std_thickness,
            entropy_thickness,
            min_volume,
            mean_volume,
            median_volume,
            max_volume,
            std_volume,
            entropy_volume,
            min_curvature,
            max_curvature,
            mean_curvature,
            median_curvature,
            std_curvature,
            entropy_curvature,
            hist_curvature,
            #
            total_ext_length_P,
            min_length_P,
            mean_length_P,
            median_length_P,
            max_length_P,
            std_lengths_P,
            entropy_lengths_P,
            hist_lengths_P,
            min_thickness_P,
            mean_thickness_P,
            median_thickness_P,
            max_thickness_P,
            std_thickness_P,
            entropy_thickness_P,
            min_volume_P,
            mean_volume_P,
            median_volume_P,
            max_volume_P,
            std_volume_P,
            entropy_volume_P,
            min_curvature_P,
            max_curvature_P,
            mean_curvature_P,
            median_curvature_P,
            std_curvature_P,
            entropy_curvature_P,
            hist_curvature_P,
            #
            total_ext_length_S,
            min_length_S,
            mean_length_S,
            median_length_S,
            max_length_S,
            std_lengths_S,
            entropy_lengths_S,
            hist_lengths_S,
            min_thickness_S,
            mean_thickness_S,
            median_thickness_S,
            max_thickness_S,
            std_thickness_S,
            entropy_thickness_S,
            min_volume_S,
            mean_volume_S,
            median_volume_S,
            max_volume_S,
            std_volume_S,
            entropy_volume_S,
            min_curvature_S,
            max_curvature_S,
            mean_curvature_S,
            median_curvature_S,
            std_curvature_S,
            entropy_curvature_S,
            hist_curvature_S,
        ]

    features_df = pd_.DataFrame.from_dict(
        somas_features_dict, orient="index", columns=FEATURE_SHEET_COLUMNS
    )

    return features_df


_Median = lambda _msr: nmpy.median(_msr).item()
_Mean = lambda _msr: nmpy.mean(_msr).item()
_Std = lambda _msr: nmpy.std(_msr).item()


def _Statistics_mMdashe(measures: Sequence, /, *, bins = None) -> tuple:
    """"""
    if bins is None:
        bins = {}
    else:
        bins = {"bins": bins}
    histogram = nmpy.histogram(measures, **bins)[0]
    histogram = tuple(histogram.tolist())
    entropy = st_.entropy(histogram).item()

    output = [
        _StatisticOf(measures) for _StatisticOf in (min, max, _Median, _Mean, _Std)
    ]
    output.extend((histogram, entropy))

    return tuple(output)