Newer
Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
# Copyright CNRS/Inria/UNS
# Contributor(s): Eric Debreuve (since 2019), Morgane Nadal (2020)
#
# eric.debreuve@cnrs.fr
#
# This software is governed by the CeCILL license under French law and
# abiding by the rules of distribution of free software. You can use,
# modify and/ or redistribute the software under the terms of the CeCILL
# license as circulated by CEA, CNRS and INRIA at the following URL
# "http://www.cecill.info".
#
# As a counterpart to the access to the source code and rights to copy,
# modify and redistribute granted by the license, users are provided only
# with a limited warranty and the software's author, the holder of the
# economic rights, and the successive licensors have only limited
# liability.
#
# In this respect, the user's attention is drawn to the risks associated
# with loading, using, modifying and/or developing or reproducing the
# software by the user in light of its specific status of free software,
# that may mean that it is complicated to manipulate, and that also
# therefore means that it is reserved for developers and experienced
# professionals having in-depth computer knowledge. Users are therefore
# encouraged to load and test the software's suitability as regards their
# requirements in conditions enabling the security of their systems and/or
# data to be ensured and, more generally, to use and operate it in the
# same conditions as regards security.
#
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL license and that you accept its terms.
import re as re_
import numpy as np_
import math as mt_
import scipy.stats as st_
import pandas as pd_
from brick.component.soma import soma_t
from brick.general.type import array_t
import brick.processing.best_fit_ellipsoid as bf_
import brick.processing.input as in_
from typing import Tuple, Dict, Union, Any
def FindGraphsRootWithEdges(soma: soma_t, ext_nfo: Dict[str, Union[array_t, Any]]) -> dict:
"""
Finds the soma roots of the graph extension.
"""
# For a given soma, find the roots of the graphs
root_nodes = {}
# Finds the primary extensions
primary_extension_uids = tuple(extension.uid for extension in soma.extensions)
# print(primary_extension_uids, '\nn = ', len(primary_extension_uids))
# List of the degree 1 nodes of the graph
for node1_id, node2_id, edge_nfo in soma.skl_graph.edges.data('as_edge_t'):
if (soma.skl_graph.degree[node1_id] == 1) or (soma.skl_graph.degree[node2_id] == 1):
# Find the pixels of the terminal extension
sites = ext_nfo['lmp_soma'][edge_nfo.sites]
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
ext_uid = np_.unique(sites)[-1]
# sites > 0 because ext_nfo['lmp'] do not contain the connexions
# Save the root node candidates (one-degree nodes)
if ext_uid in primary_extension_uids:
if soma.skl_graph.degree[node1_id] == 1:
root_node = node1_id
else:
root_node = node2_id
# Get the node coordinates and extend them to the 26 neighboring voxels
root_node_coor = GetNodesCoordinates((root_node,))[0] # tuple('x-y-z') -> list[(x,y,z)]
root_sites = set(
(root_node_coor[0] + i, root_node_coor[1] + j, root_node_coor[2] + k)
for i in (-1, 0, 1)
for j in (-1, 0, 1)
for k in (-1, 0, 1)
if i != 0 or j != 0 or k != 0)
# Find the intersection between the extended root node candidate and the soma contour points
intersections = set(soma.contour_points).intersection(root_sites)
# if the graph root sites are included in the soma extensions sites (non-nul intersection):
if len(intersections) > 0:
# Keep the info of the root node. Key = ext uid, Value = root node
root_nodes[ext_uid] = root_node
## By construction, only one root node possible for an ext
return root_nodes # TODO: find out why there are less root points than extensions !!
def FindGraphsRootWithNodes(soma: soma_t) -> dict:
"""
Find the roots of the {extension+connexion} graphs to be lined to the soma.
Add a key "root" (bool) in the dict of nodes attributes.
"""
# Find the nodes of degree == 1, and the str coordinates of the nodes
node_degree_bool = tuple(degree == 1 for _, degree in soma.skl_graph.degree)
node_coord = tuple(xyz for xyz, _ in soma.skl_graph.degree)
root_nodes = {}
# get the coordinates of the nodes (x,y,z)
coordinates = GetNodesCoordinates(node_coord)
# get a list with elements = (extension_uid, root coordinates), which length is the number of primary extensions
roots = soma.ext_roots
# for each node in the graph, search among the degree 1 nodes the nodes that are roots (linked to soma)
for node in range(len(coordinates)):
if node_degree_bool[node]:
# compare the coor with end points
for ext_root in roots:
if ext_root[1] == coordinates[node]:
root_nodes[ext_root[0]] = node_coord[node]
if root_nodes.__len__() != roots.__len__():
# raise ValueError("Number of extensions roots not equal to number of graph roots.")
print(f"\nNumber of extensions roots: {root_nodes.__len__()} not equal to number of graph roots: {roots.__len__()}.")
def GetNodesCoordinates(node_coord: Tuple[str, ...]) -> list:
"""
Input: nodes attributes -> Tuple('x1-y1-z1', 'x2-y2-z2', ...) .
Output: coordinates -> List[Tuple(x1,y1,z1), Tuple(x2,y2,z2), ...]
"""
coord = []
for c in node_coord:
coord.append(c)
for node in range(len(node_coord)):
coord_node = coord[node]
coord_node = re_.findall(pattern, coord_node)
coor = []
for i in range(3):
coor.append(int(coord_node[i]))
coor = tuple(coor)
coord[node] = coor
return coord
def ExtractFeaturesInDF(name_file, somas, size_voxel_in_micron: list, bins_length:array_t, bins_curvature:array_t, scale_map: array_t, decimals: int = 4, condition=None, duration=None):
"""
Extract the features from somas and graphs.
NADAL Morgane
committed
Returns a pandas dataframe, without NaN.
# Store the condition and duration of the image
if (condition is None) or (duration is None):
Condition = re_.findall(r"[A-Z]{3}", name_file)[0]
Duration = re_.findall(r"\dH", name_file)
# Find our whether the duration is in hours or in weeks
if Duration.__len__() == 0:
Duration = re_.findall(r"\dW", name_file)[0]
else:
Duration = Duration[0]
somas_features_dict = {} # Dict{soma 1: [features], soma 2: [features], ...}
columns = [
"condition",
"duration",
"coef_V_soma__V_convex_hull",
"coef_axes_ellips_b__a",
"coef_axes_ellips_c__a",
"spherical_angles_eva",
"spherical_angles_evb",
"N_nodes",
"N_ext",
"N_primary_ext",
"N_sec_ext",
"min_degree",
"mean_degree",
"median_degree",
"max_degree",
"std_degree",
"total_ext_length",
"min_length",
"mean_length",
"median_length",
"max_length",
"std_lengths",
"entropy_lengths",
"hist_lengths",
"min_thickness",
"mean_thickness",
"median_thickness",
"max_thickness",
"std_thickness",
"entropy_thickness",
"min_volume",
"mean_volume",
"median_volume",
"max_volume",
"std_volume",
"entropy_volume",
"min_curvature",
"max_curvature",
"mean_curvature",
"median_curvature",
"std_curvature",
"entropy_curvature",
"min_torsion",
"max_torsion",
"mean_torsion",
"median_torsion",
"std_torsion",
"entropy_torsion",
"total_ext_length_P",
"min_length_P",
"mean_length_P",
"median_length_P",
"max_length_P",
"std_lengths_P",
"entropy_lengths_P",
"hist_lengths_P",
"min_thickness_P",
"mean_thickness_P",
"median_thickness_P",
"max_thickness_P",
"std_thickness_P",
"entropy_thickness_P",
"min_volume_P",
"mean_volume_P",
"median_volume_P",
"max_volume_P",
"std_volume_P",
"entropy_volume_P",
"min_curvature_P",
"max_curvature_P",
"mean_curvature_P",
"median_curvature_P",
"std_curvature_P",
"entropy_curvature_P",
"min_torsion_P",
"max_torsion_P",
"mean_torsion_P",
"median_torsion_P",
"std_torsion_P",
"entropy_torsion_P",
"total_ext_length_S",
"min_length_S",
"mean_length_S",
"median_length_S",
"max_length_S",
"std_lengths_S",
"entropy_lengths_S",
"hist_lengths_S",
"min_thickness_S",
"mean_thickness_S",
"median_thickness_S",
"max_thickness_S",
"std_thickness_S",
"entropy_thickness_S",
"min_volume_S",
"mean_volume_S",
"median_volume_S",
"max_volume_S",
"std_volume_S",
"entropy_volume_S",
"min_curvature_S",
"max_curvature_S",
"mean_curvature_S",
"median_curvature_S",
"std_curvature_S",
"entropy_curvature_S",
"min_torsion_S",
"max_torsion_S",
"mean_torsion_S",
"median_torsion_S",
"std_torsion_S",
"entropy_torsion_S",
NADAL Morgane
committed
# -- Soma features
# Axes of the best fitting ellipsoid
_, _, soma.axes_ellipsoid, _, spherical_coor,_, volume_convex_hull = bf_.FindBestFittingEllipsoid3D(soma)
NADAL Morgane
committed
# This ratios give info about the shape of the soma. ex: rather flat, rather patatoide, rather spherical...
if type(soma.axes_ellipsoid[0]) is str:
Coef_axes_ellips_b__a = None
Coef_axes_ellips_c__a = None
spherical_angles_eva = None
spherical_angles_evb = None
soma.volume_soma_micron = None
Coef_V_soma__V_convex_hull = None
else:
Coef_axes_ellips_b__a = soma.axes_ellipsoid[0] / soma.axes_ellipsoid[2]
Coef_axes_ellips_c__a = soma.axes_ellipsoid[1] / soma.axes_ellipsoid[2]
# Spherical angles give the orientation of the somas in the 3D volume
spherical_angles_eva = (spherical_coor[0][1], spherical_coor[0][2])
spherical_angles_evb = (spherical_coor[1][1], spherical_coor[1][2])
# Volume of the in micron**3
soma.volume_soma_micron = in_.ToMicron(len(soma.sites[0]), size_voxel_in_micron, dimension=(0, 1, 2), decimals=2)
# Calculates volume of soma's convex hull in voxel volume
# Take into account anisotropy of the 3D space ( volume = x * y * z with z > x=y)
volume_convex_hull = volume_convex_hull * size_voxel_in_micron[2] / size_voxel_in_micron[0]
# Volume of the soma / Volume of its convex hull gives the info about the convexity of the soma
# If close to 0, the soma has a lot of invaginations, if close to 1, it is smooth and convex
Coef_V_soma__V_convex_hull = len(soma.sites[0]) / round(volume_convex_hull + 0.5)
NADAL Morgane
committed
# Graph features
# number of nodes except the constructed ones from node soma to the roots
N_nodes = soma.skl_graph.n_nodes - len(soma.graph_roots)
NADAL Morgane
committed
# number of edges except the constructed ones from node soma to the roots
N_ext = soma.skl_graph.n_edges - len(soma.graph_roots)
# number of primary edges = linked to the soma except the constructed ones from node soma to the roots
N_primary_ext = len(soma.graph_roots)
# number of secondary edges = not linked to the soma.
N_sec_ext = N_ext - N_primary_ext
f" Soma {soma.uid}\n"
f"N nodes = {N_nodes}\n"
f"N edges = {N_ext}\n"
f"N primary extensions = {N_primary_ext}\n"
f"N secondary extensions = {N_sec_ext}\n"
)
if N_primary_ext > 0:
# Calculate the extensions lengths
ext_lengths = list(soma.skl_graph.edge_lengths)
for idx, length in enumerate(ext_lengths):
ext_lengths[idx] = in_.ToMicron(length, size_voxel_in_micron, decimals=decimals)
total_ext_length = in_.ToMicron(soma.skl_graph.length, size_voxel_in_micron, decimals=decimals)
NADAL Morgane
committed
hist_lengths = np_.histogram(ext_lengths, bins_length)[0]
#
# min, mean, median, max and standard deviation of the ALL extensions
min_length = in_.ToMicron(soma.skl_graph.min_length, size_voxel_in_micron, decimals=decimals)
mean_length = in_.ToMicron(soma.skl_graph.mean_length, size_voxel_in_micron, decimals=decimals)
median_length = in_.ToMicron(soma.skl_graph.median_length, size_voxel_in_micron, decimals=decimals)
max_length = in_.ToMicron(soma.skl_graph.max_length, size_voxel_in_micron, decimals=decimals)
std_lengths = np_.std(ext_lengths)
if any(ext_lengths) > 0:
entropy_lengths = st_.entropy(ext_lengths)
else:
entropy_lengths = 0
# Find the thickness of the extensions
for ___, ___, edge in soma.skl_graph.edges.data("as_edge_t"):
if edge is not None:
edge.widths = scale_map[edge.sites] * size_voxel_in_micron[1]
mean_widths = soma.skl_graph.edge_reduced_widths()
ext_thickness = np_.array(mean_widths) ** 2
min_thickness = min(ext_thickness)
mean_thickness = np_.mean(ext_thickness)
median_thickness = np_.median(ext_thickness)
max_thickness = max(ext_thickness)
std_thickness = np_.std(ext_thickness)
if any(ext_lengths) > 0:
entropy_thickness = st_.entropy(ext_thickness)
else:
entropy_thickness = 0
NADAL Morgane
committed
# Find the volume of the extensions
ext_volume = np_.array(ext_lengths) * ext_thickness
min_volume = min(ext_volume)
mean_volume = np_.mean(ext_volume)
median_volume = np_.median(ext_volume)
max_volume = max(ext_volume)
std_volume = np_.std(ext_volume)
if any(ext_volume) > 0:
entropy_volume = st_.entropy(ext_volume)
else:
entropy_volume = 0
#
# Curvature and Torsion
curvatures, torsions = soma.skl_graph.curvature_and_torsion(size_voxel=size_voxel_in_micron)
#
min_curvature = min(curvatures)
max_curvature = max(curvatures)
mean_curvature = np_.mean(curvatures)
median_curvature = np_.median(curvatures)
std_curvature = np_.std(curvatures)
if any(curvatures) > 0:
entropy_curvature = st_.entropy(curvatures)
else:
entropy_curvature = 0
hist_curvature = np_.histogram(curvatures, bins_curvature)[0]
#
min_torsion = min(torsions)
max_torsion = max(torsions)
mean_torsion = np_.mean(torsions)
median_torsion = np_.median(torsions)
std_torsion = np_.std(torsions)
entropy_torsion = st_.entropy(torsions)
else:
entropy_torsion = 0
# PRIMARY extensions
ext_lengths_P = list(soma.skl_graph.primary_edge_lengths(soma))
for idx, length in enumerate(ext_lengths_P):
ext_lengths_P[idx] = in_.ToMicron(length, size_voxel_in_micron, decimals=decimals)
total_ext_length_P = sum(ext_lengths_P)
#
# Lengths histogram
hist_lengths_P = np_.histogram(ext_lengths_P, bins_length)[0]
#
# min, mean, median, max and standard deviation of the PRIMARY extensions
min_length_P = min(ext_lengths_P)
mean_length_P = np_.mean(ext_lengths_P)
median_length_P = np_.median(ext_lengths_P)
max_length_P = max(ext_lengths_P)
std_lengths_P = np_.std(ext_lengths_P)
if any(ext_lengths_P) > 0:
entropy_lengths_P = st_.entropy(ext_lengths_P)
else:
entropy_lengths_P = 0
#
mean_widths_P = soma.skl_graph.P_edge_reduced_widths(soma)
ext_thickness_P = np_.array(mean_widths_P) ** 2
min_thickness_P = min(ext_thickness_P)
mean_thickness_P = np_.mean(ext_thickness_P)
median_thickness_P = np_.median(ext_thickness_P)
max_thickness_P = max(ext_thickness_P)
std_thickness_P = np_.std(ext_thickness_P)
if any(ext_thickness_P) > 0:
entropy_thickness_P = st_.entropy(ext_thickness_P)
else:
entropy_thickness_P = 0
#
ext_volume_P = np_.array(ext_lengths_P) * ext_thickness_P
min_volume_P = min(ext_volume_P)
mean_volume_P = np_.mean(ext_volume_P)
median_volume_P = np_.median(ext_volume_P)
max_volume_P = max(ext_volume_P)
std_volume_P = np_.std(ext_volume_P)
if any(ext_volume_P) > 0:
entropy_volume_P = st_.entropy(ext_volume_P)
else:
entropy_volume_P = 0
#
# Curvature and Torsion
curvatures_P, torsions_P = soma.skl_graph.P_curvature_and_torsion(size_voxel=size_voxel_in_micron, soma=soma)
#
min_curvature_P = min(curvatures_P)
max_curvature_P = max(curvatures_P)
mean_curvature_P = np_.mean(curvatures_P)
median_curvature_P = np_.median(curvatures_P)
std_curvature_P = np_.std(curvatures_P)
if any(curvatures_P) > 0:
entropy_curvature_P = st_.entropy(curvatures_P)
else:
entropy_curvature_P = 0
hist_curvature_P = np_.histogram(curvatures_P, bins_curvature)[0]
#
min_torsion_P = min(torsions_P)
max_torsion_P = max(torsions_P)
mean_torsion_P = np_.mean(torsions_P)
median_torsion_P = np_.median(torsions_P)
std_torsion_P = np_.std(torsions_P)
entropy_torsion_P = st_.entropy(torsions_P)
else:
entropy_torsion_P = 0
NADAL Morgane
committed
# Secondary extensions
if N_sec_ext > 0:
# min, mean, median, max and standard deviation of the degrees of non-leaves nodes
min_degree = soma.skl_graph.min_degree_except_leaves_and_roots
mean_degree = soma.skl_graph.mean_degree_except_leaves_and_roots
median_degree = soma.skl_graph.median_degree_except_leaves_and_roots
max_degree = soma.skl_graph.max_degree_except_leaves_an_roots
std_degree = soma.skl_graph.std_degree_except_leaves_and_roots
# SECONDARY extensions length
ext_lengths_S = list(soma.skl_graph.secondary_edge_lengths(soma))
for idx, length in enumerate(ext_lengths_S):
ext_lengths_S[idx] = in_.ToMicron(length, size_voxel_in_micron, decimals=decimals)
total_ext_length_S = sum(ext_lengths_S)
#
# Lengths histogram
hist_lengths_S = np_.histogram(ext_lengths_S, bins_length)[0]
#
# min, mean, median, max and standard deviation of the PRIMARY extensions
min_length_S = min(ext_lengths_S)
mean_length_S = np_.mean(ext_lengths_S)
median_length_S = np_.median(ext_lengths_S)
max_length_S = max(ext_lengths_S)
std_lengths_S = np_.std(ext_lengths_S)
if any(ext_lengths_S) > 0:
entropy_lengths_S = st_.entropy(ext_lengths_S)
else:
entropy_lengths_S = 0
#
mean_widths_S = soma.skl_graph.S_edge_reduced_widths(soma)
ext_thickness_S = np_.array(mean_widths_S) ** 2
min_thickness_S = min(ext_thickness_S)
mean_thickness_S = np_.mean(ext_thickness_S)
median_thickness_S = np_.median(ext_thickness_S)
max_thickness_S = max(ext_thickness_S)
std_thickness_S = np_.std(ext_thickness_S)
if any(ext_thickness_S) > 0:
entropy_thickness_S = st_.entropy(ext_thickness_S)
else:
entropy_thickness_S = 0
ext_volume_S = np_.array(ext_lengths_S) * ext_thickness_S
min_volume_S = min(ext_volume_S)
mean_volume_S = np_.mean(ext_volume_S)
median_volume_S = np_.median(ext_volume_S)
max_volume_S = max(ext_volume_S)
std_volume_S = np_.std(ext_volume_S)
if any(ext_volume_S) > 0:
entropy_volume_S = st_.entropy(ext_volume_S)
else:
entropy_volume_S = 0
#
# Curvature and Torsion
curvatures_S, torsions_S = soma.skl_graph.S_curvature_and_torsion(size_voxel=size_voxel_in_micron, soma=soma)
#
min_curvature_S = min(curvatures_S)
max_curvature_S = max(curvatures_S)
mean_curvature_S = np_.mean(curvatures_S)
median_curvature_S = np_.median(curvatures_S)
std_curvature_S = np_.std(curvatures_S)
if any(curvatures_S) > 0:
entropy_curvature_S = st_.entropy(curvatures_S)
else:
entropy_curvature_S = 0
hist_curvature_S = np_.histogram(ext_lengths_S, bins_curvature)[0]
#
min_torsion_S = min(torsions_S)
max_torsion_S = max(torsions_S)
mean_torsion_S = np_.mean(torsions_S)
median_torsion_S = np_.median(torsions_S)
std_torsion_S = np_.std(torsions_S)
entropy_torsion_S = st_.entropy(torsions_S)
else:
entropy_torsion_S = 0
NADAL Morgane
committed
# If no secondary extensions, give certain value to parameters
# min, mean, median, max and standard deviation of the degrees of non-leaves nodes
min_degree = 1
mean_degree = 1
median_degree = 1
max_degree = 1
std_degree = 0
total_ext_length_S = 0
min_length_S = 0
mean_length_S = 0
median_length_S = 0
max_length_S = 0
std_lengths_S = 0
entropy_lengths_S = 0
hist_lengths_S = 0
#
min_thickness_S = 0
mean_thickness_S = 0
median_thickness_S = 0
max_thickness_S = 0
std_thickness_S = 0
entropy_thickness_S = 0
min_volume_S = 0
mean_volume_S = 0
median_volume_S = 0
max_volume_S = 0
std_volume_S = 0
entropy_volume_S = 0
#
min_curvature_S = -1
max_curvature_S = -1
mean_curvature_S = -1
median_curvature_S = -1
std_curvature_S = 0
entropy_curvature_S = 0
#
min_torsion_S = -1
max_torsion_S = -1
mean_torsion_S = -1
median_torsion_S = -1
std_torsion_S = 0
entropy_torsion_S = 0
min_degree = 0
mean_degree = 0
median_degree = 0
max_degree = 0
std_degree = 0
#
min_length = 0
mean_length = 0
median_length = 0
max_length = 0
std_lengths = 0
entropy_lengths = 0
hist_lengths = 0
min_thickness = 0
mean_thickness = 0
median_thickness = 0
max_thickness = 0
std_thickness = 0
entropy_thickness = 0
min_volume = 0
mean_volume = 0
median_volume = 0
max_volume = 0
std_volume = 0
entropy_volume = 0
min_curvature = -1
max_curvature = -1
mean_curvature = -1
median_curvature = -1
std_curvature = 0
entropy_curvature = 0
min_torsion = -1
max_torsion= -1
mean_torsion = -1
median_torsion = -1
std_torsion = 0
entropy_torsion = 0
min_length_P = 0
mean_length_P = 0
median_length_P = 0
max_length_P = 0
std_lengths_P = 0
entropy_lengths_P = 0
hist_lengths_P = 0
min_thickness_P = 0
mean_thickness_P = 0
median_thickness_P = 0
max_thickness_P = 0
std_thickness_P = 0
entropy_thickness_P = 0
min_volume_P = 0
mean_volume_P = 0
median_volume_P = 0
max_volume_P = 0
std_volume_P = 0
entropy_volume_P = 0
min_curvature_P = -1
max_curvature_P = -1
mean_curvature_P = -1
median_curvature_P = -1
std_curvature_P = 0
entropy_curvature_P = 0
min_torsion_P = -1
max_torsion_P = -1
mean_torsion_P = -1
median_torsion_P = -1
std_torsion_P = 0
entropy_torsion_P = 0
min_length_S = 0
mean_length_S = 0
median_length_S = 0
max_length_S = 0
std_lengths_S = 0
entropy_lengths_S = 0
hist_lengths_S = 0
min_thickness_S = 0
mean_thickness_S = 0
median_thickness_S = 0
max_thickness_S = 0
std_thickness_S = 0
entropy_thickness_S = 0
min_volume_S = 0
mean_volume_S = 0
median_volume_S = 0
max_volume_S = 0
std_volume_S = 0
entropy_volume_S = 0
min_curvature_S = -1
max_curvature_S = -1
mean_curvature_S = -1
median_curvature_S = -1
std_curvature_S = 0
entropy_curvature_S = 0
min_torsion_S = -1
max_torsion_S = -1
mean_torsion_S = -1
median_torsion_S = -1
std_torsion_S = 0
entropy_torsion_S = 0
NADAL Morgane
committed
# Create a dictionary with all the features for every somas
somas_features_dict[name_file + f" soma {soma.uid}"] = [
Condition,
Duration,
Coef_axes_ellips_b__a,
NADAL Morgane
committed
Coef_axes_ellips_c__a,
spherical_angles_eva,
spherical_angles_evb,
N_nodes,
N_ext,
N_primary_ext,
N_sec_ext,
min_degree,
mean_degree,
median_degree,
max_degree,
std_degree,
total_ext_length,
min_length,
mean_length,
median_length,
max_length,
std_lengths,
entropy_lengths,
hist_lengths,
min_thickness,
mean_thickness,
median_thickness,
max_thickness,
std_thickness,
entropy_thickness,
min_volume,
mean_volume,
median_volume,
max_volume,
std_volume,
entropy_volume,
min_curvature,
max_curvature,
mean_curvature,
median_curvature,
std_curvature,
entropy_curvature,
min_torsion,
max_torsion,
mean_torsion,
median_torsion,
std_torsion,
entropy_torsion,
total_ext_length_P,
min_length_P,
mean_length_P,
median_length_P,
max_length_P,
std_lengths_P,
entropy_lengths_P,
hist_lengths_P,
min_thickness_P,
mean_thickness_P,
median_thickness_P,
max_thickness_P,
std_thickness_P,
entropy_thickness_P,
min_volume_P,
mean_volume_P,
median_volume_P,
max_volume_P,
std_volume_P,
entropy_volume_P,
min_curvature_P,
max_curvature_P,
mean_curvature_P,
median_curvature_P,
std_curvature_P,
entropy_curvature_P,
min_torsion_P,
max_torsion_P,
mean_torsion_P,
median_torsion_P,
std_torsion_P,
entropy_torsion_P,
total_ext_length_S,
min_length_S,
mean_length_S,
median_length_S,
max_length_S,
std_lengths_S,
entropy_lengths_S,
hist_lengths_S,
min_thickness_S,
mean_thickness_S,
median_thickness_S,
max_thickness_S,
std_thickness_S,
entropy_thickness_S,
min_volume_S,
mean_volume_S,
median_volume_S,
max_volume_S,
std_volume_S,
entropy_volume_S,
min_curvature_S,
max_curvature_S,
mean_curvature_S,
median_curvature_S,
std_curvature_S,
entropy_curvature_S,
min_torsion_S,
max_torsion_S,
mean_torsion_S,
median_torsion_S,
std_torsion_S,
entropy_torsion_S,
NADAL Morgane
committed
# Convert the dictionary into pandas dataframe
features_df = pd_.DataFrame.from_dict(somas_features_dict, orient="index", columns=columns)
return features_df