Commit 7ce33225 authored by VIGNET Pierre's avatar VIGNET Pierre
Browse files

[cmd] Rewrite solutions2common_graph..

- to be renamed
- support of CSV, JSON, GraphML exports
parent f6dbe2ac
......@@ -344,7 +344,7 @@ def get_solution_graphs(sol_steps, transitions):
def queries_2_common_graph(output_dir, model_file, path,
make_graphs=True, make_csv=False, make_json=True): # force option
make_graphs=True, make_csv=False, make_json=False):
"""Entry point for solutions_2_common_graph
Create a GraphML formated file containing a unique representation of **all**
......@@ -359,29 +359,107 @@ def queries_2_common_graph(output_dir, model_file, path,
:param output_dir: Output path.
:param model_file: Filepath of the model.
:param path: Filepath/directory of a/many complete solutions files.
:key make_graphs: (optional) Make a GraphML for each query results in path.
default: True
:key make_csv: (optional) Make a **global** CSV for all query results in path.
default: False
:key make_json: (optional) Make a JSON dump of each query results in path.
default: False
:type output_dir: <str>
:type model_file: <str>
:type path: <str>
:type make_graphs: <boolean>
:type make_csv: <boolean>
:type make_json: <boolean>
"""
def write_csv(data):
"""Write given data in CSV file; then flush the file descriptor
.. note:: data is modified in place.
:param data: Data to be serialized in JSON.
"""
# Handle nested dictionaries by flattening them
data.update({
sub_key: sub_val
for key, val in data.items() if isinstance(val, dict)
for sub_key, sub_val in val.items()
})
writer.writerow(data)
f_d.flush()
def do_magic(solution_file):
# Get query string from the name of the solution file
query = get_query_from_filename(model_file, solution_file)
LOGGER.info("Processing %s query...", query)
# generator of ("Ax Bx", [['h2', 'h00'], ['h3'], ['h0', 'h1'], ['hlast']])
solutions = tuple(load_solutions(solution_file))
graphs = get_solution_graphs(
solutions,
model_transitions
)
# Get common graph
graph = merge_graphs(graphs)
if make_graphs:
# Write graph
export_graph(output_dir, query, '', graph)
if make_json or make_csv:
# Export to json file (similaire à model_graph_info() ...)
# Frontiers
frontiers = set(it.chain(*[sol.split() for sol, _ in solutions]))
solutions_info = {
'modelFile': model_file,
'query': query,
'solutions': len(solutions), # nb trajectories/solutions
'boundaries': len(frontiers), # frontier places
}
# Advanced metrics
get_solutions_graph_data(graph, solutions_info, True)
if make_json:
# Save to <solution_filename>_graph_summary.json
write_json(output_dir, solution_file, "_graph_summary", solutions_info)
if make_csv:
# Export to csv graphs_summary.csv
write_csv(solutions_info)
# Check valid input file/directory
assert os.path.isfile(path) or os.path.isdir(path)
# Get transitions from the model
model_transitions = get_transitions_from_model_file(model_file)
if os.path.isfile(path):
# The given path is a solution file
graphs = get_solution_graphs(
load_solutions(path),
model_transitions
if make_csv:
# Init the CSV file now
import csv
f_d = open(output_dir + 'graphs_summary.csv', 'w')
writer = csv.DictWriter(
f_d,
[
"query", "solutions", "boundaries", "graph_nodes", "graph_edges",
"strongly_connected", "max_degree", "min_degree", "average_degree"
],
extrasaction="ignore", # Ignore unknown fieldnames
delimiter=str(';'),
)
# Get query string from the name of the solution file
query = get_query_from_filename(model_file, path)
# Write graph
export_graph(output_dir, query, '', merge_graphs(graphs))
writer.writeheader()
# File management...
if os.path.isfile(path):
# The given path is a solution file
do_magic(path)
elif os.path.isdir(path):
# The given path is a directory
path = path if path[-1] == '/' else path + '/'
......@@ -391,24 +469,16 @@ def queries_2_common_graph(output_dir, model_file, path,
for file_number, solution_file in \
enumerate(glob.glob(path + '*mac_complete.txt'), 1):
# Get query string from the name of the solution file
query = get_query_from_filename(model_file, solution_file)
LOGGER.info("Processing %s query...", query)
# generator of ("Ax Bx", [['h2', 'h00'], ['h3'], ['h0', 'h1'], ['hlast']])
solutions = tuple(load_solutions(solution_file))
graphs = get_solution_graphs(
solutions,
model_transitions
)
# Write graph
export_graph(output_dir, query, '', merge_graphs(graphs))
do_magic(solution_file)
LOGGER.info("Files processed: %s", file_number)
assert file_number != 0, "No *mac_complete.txt files found!"
if make_csv:
# Close the CSV file descriptor
f_d.close()
## Matrices of occurrences #####################################################
def solutions_2_occcurrences_matrix(output_dir, model_file, path,
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment