Commit b84b6b06 authored by VIGNET Pierre's avatar VIGNET Pierre
Browse files

Review API: sol_digging() no longer write file, but returns json data; fix typos; add doc

parent 23a71366
...@@ -108,7 +108,7 @@ def load_solutions(file): ...@@ -108,7 +108,7 @@ def load_solutions(file):
:param: File name :param: File name
:type: <str> :type: <str>
:return:A tuple of "frontier places" and a list of events in each step. :return: A tuple of "frontier places" and a list of events in each step.
("Bx Ax", [[u'h2', u'h00'], [u'h3'], [u'h0', u'h1'], [u'hlast']]) ("Bx Ax", [[u'h2', u'h00'], [u'h3'], [u'h0', u'h1'], [u'hlast']])
:rtype: <tuple <str>, <list>> :rtype: <tuple <str>, <list>>
""" """
...@@ -885,27 +885,29 @@ def test_main(): ...@@ -885,27 +885,29 @@ def test_main():
def sol_digging(decompiled_filename, sol_steps, transitions, conditions=True): def sol_digging(sol_steps, transitions, conditions=True):
"""Convert all events for all solutions in a complete MAC file """Convert all events for all solutions in a complete MAC file
and write them in a separate file in the json format. and write them in a separate file in the json format.
This is a function to quickly search all transition attributes involved This is a function to quickly search all transition attributes involved
in a solution. in a solution.
:param arg1: Path of the json file. :param arg1: List of steps involved in a solution. See load_solutions().
:param arg2: List of steps involved in a solution. See load_solutions(). A tuple of "frontier places" and a list of events in each step.
:param arg3: A dictionnary of events as keys, and transitions as values. ("Bx Ax", [[u'h2', u'h00'], [u'h3'], [u'h0', u'h1'], [u'hlast']])
:param arg2: A dictionnary of events as keys, and transitions as values.
Since many transitions can define an event, values are lists. Since many transitions can define an event, values are lists.
Each transition is a tuple with: origin node, final node, attributes Each transition is a tuple with: origin node, final node, attributes
like label and condition. like label and condition.
{u'h00': [('Ax', 'n1', {u'label': u'h00[]'}),] {u'h00': [('Ax', 'n1', {u'label': u'h00[]'}),]
See get_transitions(). See get_transitions().
:param arg4: (Optional) Integrate in the final file, :param arg3: (Optional) Integrate in the final file,
the conditions for each transition. the conditions for each transition.
:type arg1: <str> :type arg1: <list>
:type arg2: <list> :type arg2: <dict <list <tuple <str>, <str>, <dict <str>: <str>>>>
:type arg3: <dict <list <tuple <str>, <str>, <dict <str>: <str>>>> :type arg3: <bool>
:type arg4: <bool> :return: Return the JSON data for the given steps.
:rtype: <list>
""" """
def get_transition_def(step_event): def get_transition_def(step_event):
...@@ -972,9 +974,8 @@ def sol_digging(decompiled_filename, sol_steps, transitions, conditions=True): ...@@ -972,9 +974,8 @@ def sol_digging(decompiled_filename, sol_steps, transitions, conditions=True):
# Debugging only # Debugging only
if LOGGER.getEffectiveLevel() == DEBUG: if LOGGER.getEffectiveLevel() == DEBUG:
LOGGER.debug(json.dumps(decomp_solutions, sort_keys=True, indent=4)) LOGGER.debug(json.dumps(decomp_solutions, sort_keys=True, indent=4))
# Write file
with open(decompiled_filename, 'w') as f_d: return decomp_solutions
json.dump(decomp_solutions, f_d, sort_keys=True, indent=4)
def sol_digging_main(output_dir, model_file, solution_path, conditions=True): def sol_digging_main(output_dir, model_file, solution_path, conditions=True):
...@@ -984,19 +985,25 @@ def sol_digging_main(output_dir, model_file, solution_path, conditions=True): ...@@ -984,19 +985,25 @@ def sol_digging_main(output_dir, model_file, solution_path, conditions=True):
or just a file. or just a file.
""" """
def write_json_file(decompiled_filename, decomp_solutions):
# Write file
with open(decompiled_filename, 'w') as f_d:
json.dump(decomp_solutions, f_d, sort_keys=True, indent=4)
# Get transitions from the model # Get transitions from the model
model_transitions = get_transitions(model_file) model_transitions = get_transitions(model_file)
decompiled_filename = output_dir + filename + '_decomp' + file_extension
if os.path.isfile(solution_path): if os.path.isfile(solution_path):
# The given path is a solution file # The given path is a solution file
# Add _decomp to the solution filename # Add _decomp to the solution filename
filename, file_extension = os.path.splitext(solution_path) filename, file_extension = os.path.splitext(solution_path)
sol_digging( decomp_solutions = sol_digging(
output_dir + filename + '_decomp' + file_extension,
load_solutions(solution_path), load_solutions(solution_path),
model_transitions, model_transitions,
conditions=conditions, conditions=conditions,
) )
write_json_file(decompiled_filename, decomp_solutions)
elif os.path.isdir(solution_path): elif os.path.isdir(solution_path):
# The given path is a directory # The given path is a directory
...@@ -1009,12 +1016,12 @@ def sol_digging_main(output_dir, model_file, solution_path, conditions=True): ...@@ -1009,12 +1016,12 @@ def sol_digging_main(output_dir, model_file, solution_path, conditions=True):
# Add _decomp to the solution filename # Add _decomp to the solution filename
filename, file_extension = os.path.splitext(solution_file) filename, file_extension = os.path.splitext(solution_file)
sol_digging( decomp_solutions = sol_digging(
output_dir + filename + '_decomp' + file_extension,
load_solutions(solution_file), load_solutions(solution_file),
model_transitions, model_transitions,
conditions=conditions, conditions=conditions,
) )
write_json_file(decompiled_filename, decomp_solutions)
LOGGER.info("Files processed: " + str(file_number)) LOGGER.info("Files processed: " + str(file_number))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment