Commit e04db2ad authored by VIGNET Pierre's avatar VIGNET Pierre

sol_digging: replace txt file by a json file; add param to exclude conditions from the json

parent da319595
......@@ -72,7 +72,7 @@ def parse_trajectories(args):
def sol_digging(args):
"""Convert all events for all solutions in a complete MAC file
and write them in a separate file.
and write them in a separate file in the json format.
This is a function to quickly search all transition attributes involved
in a solution.
......@@ -86,7 +86,8 @@ def sol_digging(args):
solution_repr.sol_digging_main(
params['output'],
params['chart_file'],
params['sol_file']
params['sol_file'],
conditions=not params['no_conditions'], # Inv the param...
)
def model_comp(args):
......@@ -271,7 +272,7 @@ def main():
parser_trajectories.set_defaults(func=parse_trajectories)
# subparser: Representation of the trajectories of MACs in a complete file.
# subparser: Decompilation of trajectories of MACs in a complete file/dir.
# Model file (xml : cadbiom language)
# Solution file (cam_complete)
parser_sol_digging = subparsers.add_parser(
......@@ -286,6 +287,10 @@ def main():
parser_sol_digging.add_argument('--output', action=ReadableDir,
nargs='?', default='decompiled_solutions/',
help="Output directory for decompiled solutions files.")
parser_sol_digging.add_argument('--no_conditions', action='store_true',
help="Don't export conditions of transitions. This allows " + \
"to have only places that are used inside trajectories; " + \
"thus, inhibitors nodes are not present in the json file")
parser_sol_digging.set_defaults(func=sol_digging)
......
......@@ -33,6 +33,7 @@ import re
import json
import os
import glob
import json
from logging import DEBUG
# Remove matplotlib dependency
# It is used on demand during the drawing of a graph
......@@ -884,64 +885,99 @@ def test_main():
def sol_digging(decompiled_filename, sol_steps, transitions):
def sol_digging(decompiled_filename, sol_steps, transitions, conditions=True):
"""Convert all events for all solutions in a complete MAC file
and write them in a separate file.
and write them in a separate file in the json format.
This is a function to quickly search all transition attributes involved
in a solution.
:param arg1: List of steps involved in a solution. See load_solutions().
:param arg2: A dictionnary of events as keys, and transitions as values.
:param arg1: Path of the json file.
:param arg2: List of steps involved in a solution. See load_solutions().
:param arg3: A dictionnary of events as keys, and transitions as values.
Since many transitions can define an event, values are lists.
Each transition is a tuple with: origin node, final node, attributes
like label and condition.
{u'h00': [('Ax', 'n1', {u'label': u'h00[]'}),]
See get_transitions().
:type arg1: <list>
:type arg2: <dict <list <tuple <str>, <str>, <dict <str>: <str>>>>
:param arg4: (Optional) Integrate in the final file,
the conditions for each transition.
:type arg1: <str>
:type arg2: <list>
:type arg3: <dict <list <tuple <str>, <str>, <dict <str>: <str>>>>
:type arg4: <bool>
"""
def write_transition_def(step_event):
"""Write each event on a new line"""
def get_transition_def(step_event):
"""Dump each transition in the given event to a list of dictionnaries.
.. note:: ori="JUN_nucl_gene" ext="JUN_nucl" event="_h_391"
:return: A list of dictionaries
(1 dict for 1 transition in the given event)
:rtype: <list <dict>>
"""
# Many transitions per event (ex: complex dissociation)
decomp_transitions = list()
for trans in step_event:
# ori="JUN_nucl_gene" ext="JUN_nucl" event="_h_391"
line = 'ori="{}" ext="{}" event="{}" condition="{}"\n'.format(
trans[0],
trans[1],
trans[2]['label'].split('[')[0],
trans[2]['condition']
)
fd.write(line)
with open(decompiled_filename, 'w') as fd:
# For each sol
for sol, steps in sol_steps:
# Solution header
fd.write(sol + '\n')
# For each step
for step in steps:
for event in step:
step_event = transitions[event]
# print(step_event)
if len(step_event) == 0:
fd.write("ERROR for event: " + event + '\n')
else:
write_transition_def(transitions[event])
decomp_transition = {
#"event": trans[2]['label'].split('[')[0],
"ori": trans[0],
"ext": trans[1],
}
# If condition boolean is set (by default),
# we add the event's transition to the json data.
if conditions:
decomp_transition["condition"] = trans[2]['condition']
decomp_transitions.append(decomp_transition)
return decomp_transitions
# sol_steps structure:
# ("Bx Ax", [[u'h2', u'h00'], [u'h3'], [u'h0', u'h1'], [u'hlast']])
decomp_solutions = list()
for sol, steps in sol_steps:
# Decompile steps in each solution
decomp_steps = list()
for step in steps:
# Decompile events in each step
decomp_events = list()
for event in step:
# Decompile transitions in each event
decomp_event = dict()
# Get transitions for the given event
# Structure of transitions:
# {u'h00': [('Ax', 'n1', {u'label': u'h00[]'}),]
step_event = transitions[event]
if len(step_event) == 0:
decomp_event['event'] = "ERROR, no transition"
else:
# Get list of transitions
decomp_event['event'] = event
decomp_event['transitions'] = get_transition_def(step_event)
# Add event and its transitions
decomp_events.append(decomp_event)
# Add step and its events
decomp_steps.append(decomp_events)
# Add solution and its steps
solution = {
"solution": sol,
"steps": decomp_steps,
}
decomp_solutions.append(solution)
# Step separation
fd.write('\n')
# Debugging only
if LOGGER.getEffectiveLevel() == DEBUG:
LOGGER.debug(json.dumps(decomp_solutions, sort_keys=True, indent=4))
# Write file
with open(decompiled_filename, 'w') as f_d:
json.dump(decomp_solutions, f_d, sort_keys=True, indent=4)
def sol_digging_main(output_dir, model_file, solution_path):
def sol_digging_main(output_dir, model_file, solution_path, conditions=True):
"""Entry point for sol_digging
.. note:: This functions tests if the solution_path is a directory
......@@ -954,11 +990,12 @@ def sol_digging_main(output_dir, model_file, solution_path):
if os.path.isfile(solution_path):
# The given path is a solution file
# Add _decomp to the solution filename
filename, file_extension = os.path.splitext(solution_file)
filename, file_extension = os.path.splitext(solution_path)
sol_digging(
output_dir + filename + '_decomp' + file_extension,
load_solutions(solution_file),
model_transitions
load_solutions(solution_path),
model_transitions,
conditions=conditions,
)
elif os.path.isdir(solution_path):
......@@ -975,7 +1012,8 @@ def sol_digging_main(output_dir, model_file, solution_path):
sol_digging(
output_dir + filename + '_decomp' + file_extension,
load_solutions(solution_file),
model_transitions
model_transitions,
conditions=conditions,
)
LOGGER.info("Files processed: " + str(file_number))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment