Commit 1e72fd08 authored by VIGNET Pierre's avatar VIGNET Pierre
Browse files

PEP8 review; optimizations of the logger: lazy evaluation everywhere

parent 116a626d
......@@ -230,27 +230,27 @@ def main():
group = parser_input_file.add_mutually_exclusive_group(required=True)
group.add_argument('final_prop', nargs='?')
group.add_argument('--input_file', action=ReadableFile, nargs='?',
help="Without input file, there will be only one process. " + \
"While there will be 1 process per line (per logical formula " + \
"on each line)")
help="Without input file, there will be only one process. "
"While there will be 1 process per line (per logical formula "
"on each line)")
parser_input_file.add_argument('--output', action=ReadableDir, nargs='?',
default='result/', help="Output directory.")
# default: False
parser_input_file.add_argument('--combinations', action='store_true',
help="If input_file is set, we can compute all combinations of " + \
"given elements on each line")
help="If input_file is set, we can compute all combinations of "
"given elements on each line")
parser_input_file.add_argument('--steps', type=int, nargs='?', default=10,
help="Maximum of allowed steps to find macs")
# https://docs.python.org/dev/library/argparse.html#action
# all_macs to False by default
parser_input_file.add_argument('--all_macs', action='store_true',
help="Solver will try to search all macs with 0 to the maximum of " + \
"allowed steps.")
help="Solver will try to search all macs with 0 to the maximum of "
"allowed steps.")
# continue to False by default
parser_input_file.add_argument('--continue', action='store_true',
help="Resume previous computations; if there is a mac file from " + \
"a previous work, last frontier places/boundaries will be reloaded.")
help="Resume previous computations; if there is a mac file from a "
"previous work, last frontier places/boundaries will be reloaded.")
parser_input_file.add_argument('--start_prop', nargs='?', default=None)
parser_input_file.add_argument('--inv_prop', nargs='?', default=None)
......@@ -264,7 +264,7 @@ def main():
help=solutions_sort.__doc__)
parser_solutions_sort.add_argument('path',
help="Solution file or directory with MAC solutions files "
"(*cam* files) generated with the 'compute_macs' command.")
"(*cam* files) generated with the 'compute_macs' command.")
parser_solutions_sort.set_defaults(func=solutions_sort)
......@@ -279,7 +279,7 @@ def main():
help="bcx model file.")
parser_trajectories.add_argument('path',
help="Complete solution file or directory with MAC solutions files "
"(*cam_complete.txt files) generated with the 'compute_macs' command.")
"(*cam_complete.txt files) generated with the 'compute_macs' command.")
parser_trajectories.add_argument('--output', action=ReadableDir,
nargs='?', default='graphs/',
help="Output directory for GraphML files.")
......@@ -297,14 +297,14 @@ def main():
help="bcx model file.")
parser_solutions_2_json.add_argument('path',
help="Complete solution file or directory with MAC solutions files "
"(*cam_complete.txt files) generated with the 'compute_macs' command.")
"(*cam_complete.txt files) generated with the 'compute_macs' command.")
parser_solutions_2_json.add_argument('--output', action=ReadableDir,
nargs='?', default='decompiled_solutions/',
help="Directory for newly created files.")
parser_solutions_2_json.add_argument('--no_conditions', action='store_true',
help="Don't export conditions of transitions. This allows "
"to have only places/entities that are used inside trajectories; "
"thus, inhibitors nodes are not present in the json file")
"to have only places/entities that are used inside trajectories; "
"thus, inhibitors nodes are not present in the json file")
parser_solutions_2_json.set_defaults(func=solutions_2_json)
......@@ -360,7 +360,7 @@ def main():
# PS: Argparse doesn't allow to select a default value here
group.add_argument('--default', action='store_true',
help="Display quick description of the model "
"(Number of places/entities, transitions, entity types, locations)")
"(Number of places/entities, transitions, entity types, locations)")
group.add_argument('--all_entities', action='store_true',
help="Retrieve data for all places/entities of the model.")
group.add_argument('--boundaries', action='store_true',
......@@ -370,12 +370,12 @@ def main():
# Outputs
parser_model_info.add_argument('--csv', action='store_true',
help="Create a CSV file containing data about previously filtered "
"places/entities of the model.")
"places/entities of the model.")
parser_model_info.add_argument('--json', action='store_true',
help="Create a JSON formated file containing data about previously "
"filtered places/entities of the model, and a full summary about the "
"model itself (boundaries, transitions, events, entities locations, "
"entities types).")
"filtered places/entities of the model, and a full summary about the "
"model itself (boundaries, transitions, events, entities locations,"
" entities types).")
parser_model_info.add_argument('--output', action=ReadableDir,
nargs='?', default='./',
help="Directory for newly created files.")
......@@ -392,18 +392,18 @@ def main():
# Additional data
parser_model_graph.add_argument('--centralities', action='store_true',
help="Get centralities for each node of the graph "
"(degree, in_degree, out_degree, closeness, betweenness). "
"Works in conjunction with the ``--json`` option.")
"(degree, in_degree, out_degree, closeness, betweenness). "
"Works in conjunction with the ``--json`` option.")
# Outputs
parser_model_graph.add_argument('--graph', action='store_true',
help="Translate the model into a GraphML formated file which can "
"be opened in Cytoscape.")
"be opened in Cytoscape.")
parser_model_graph.add_argument('--json', action='store_true',
help="Create a JSON formated file containing a summary of the graph "
"based on the model.")
"based on the model.")
parser_model_graph.add_argument('--json_graph', action='store_true',
help="Create a JSON formated file containing the graph based on the "
"model, which can be opened by Web applications.")
"model, which can be opened by Web applications.")
parser_model_graph.add_argument('--output', action=ReadableDir,
nargs='?', default='graphs/',
help="Directory for newly created files.")
......
......@@ -52,13 +52,13 @@ def merge_cams_to_csv(directory, csvfile='merged_cams.csv'):
formula = ''.join(filename.split('_')[1:-1])
# Read the content of the cam file & memorize this content
with open(filename) as fd:
with open(filename) as f_d:
# Add the formula column, before each cam to futur csv file
csv_data.append([[formula] + [line.rstrip('\n')] for line in fd])
csv_data.append([[formula] + [line.rstrip('\n')] for line in f_d])
# Write the final csv
with open(csvfile, 'w') as fd:
writer = csv.writer(fd, delimiter=str(';'))
with open(csvfile, 'w') as f_d:
writer = csv.writer(f_d, delimiter=str(';'))
writer.writerows(it.chain(*csv_data))
......
......@@ -21,6 +21,9 @@
# Dyliss team
# IRISA Campus de Beaulieu
# 35042 RENNES Cedex, FRANCE
"""
Search Minimal Accessibility Conditions
"""
from __future__ import unicode_literals
from __future__ import print_function
......@@ -28,7 +31,6 @@ from __future__ import print_function
#from pycallgraph.output import GraphvizOutput
# Standard imports
import re
import os
from functools import partial
import sys
......@@ -50,19 +52,20 @@ LOGGER = cm.logger()
class ErrorRep(object):
# Cf class CompilReporter(object):
# gt_gui/utils/reporter.py
"""Cf class CompilReporter(object):
gt_gui/utils/reporter.py
"""
def __init__(self):
self.context = ""
self.error = False
def display(self, mess):
self.error = True
LOGGER.error(">> Context: {}; {}".format(self.context, mess))
LOGGER.error(">> Context: %s; %s", self.context, mess)
exit()
def display_info(self, mess):
LOGGER.error("-- Context: {}; {}".format(self.context, mess))
LOGGER.error("-- Context: %s; %s", self.context, mess)
exit()
def set_context(self, cont):
......@@ -113,17 +116,15 @@ def make_logical_formula(previous_frontier_places, start_prop):
return start_prop + ' and (' + prev_frontier_places_formula + ')'
elif prev_frontier_places_formula:
return prev_frontier_places_formula
else:
return start_prop
return start_prop
cam_list = [logical_and(frontier_places)
for frontier_places in previous_frontier_places]
if len(cam_list) != 0:
if cam_list:
# Logical or between each line
return add_start_prop('not(' + logical_or(cam_list) + ')')
else:
return add_start_prop('')
return add_start_prop('')
def main2(chart_file, cam_file, cam_step_file, cam_complete_file, cam_strong_file,
......@@ -185,11 +186,11 @@ def main2(chart_file, cam_file, cam_step_file, cam_complete_file, cam_strong_fil
previous_frontier_places = read_cam_file(cam_file)
current_start_prop = make_logical_formula(previous_frontier_places,
start_prop)
LOGGER.info(final_prop + \
":: Reload previous frontier places: " + \
str(len(previous_frontier_places)))
LOGGER.info("%s:: Reload previous frontier places: %s",
final_prop,
len(previous_frontier_places))
except IOError:
LOGGER.warning(final_prop + ":: cam file not found !")
LOGGER.warning("%s:: cam file not found!", final_prop)
previous_frontier_places = set()
current_start_prop = start_prop
else:
......@@ -219,8 +220,9 @@ def main2(chart_file, cam_file, cam_step_file, cam_complete_file, cam_strong_fil
# Add theese frontier places to set of previous ones
# (tuple is hashable)
previous_frontier_places.add(tuple(frontier_places))
LOGGER.debug(final_prop + ":: Prev frontier places: " + \
str(previous_frontier_places))
LOGGER.debug("%s:: Prev frontier places: %s",
final_prop,
previous_frontier_places)
# Compute the formula of the next start_property
current_start_prop = make_logical_formula(previous_frontier_places,
......@@ -234,13 +236,10 @@ def main2(chart_file, cam_file, cam_step_file, cam_complete_file, cam_strong_fil
if not all_macs:
steps = min_steps
LOGGER.debug(
final_prop +
":: Next start_prop formula: {} in {} steps".format(
current_start_prop,
steps
)
)
LOGGER.debug("%s:: Next start_prop formula: %s in %s steps",
final_prop,
current_start_prop,
steps)
def find_macs(mcla,
......@@ -265,25 +264,25 @@ def find_macs(mcla,
vmac_list = mcla.mac_search(query, steps)
# If yes, in how many steps ?
min_step = mcla.unfolder.get_current_step()
min_step = mcla.unfolder.get_current_step()
for next_mac_object in vmac_list:
LOGGER.debug(final_prop + ":: Next MAC object:\n{}".format(next_mac_object))
LOGGER.debug("%s:: Next MAC object:\n%s", final_prop, next_mac_object)
# Save MAC and timings
LOGGER.debug(final_prop + ":: Save MAC and timings...")
LOGGER.debug("%s:: Save MAC and timings...", final_prop)
with open(cam_complete_file, 'a') as file:
next_mac_object.save(file)
# Save MAC
next_mac = next_mac_object.activated_frontier
LOGGER.debug(final_prop + ":: Save next MAC: {}".format(next_mac))
LOGGER.debug("%s:: Save next MAC: %s", final_prop, next_mac)
with open(cam_file, 'a') as file:
file.write('\t'.join(next_mac) + '\n')
# Save min steps
min_step = mcla.unfolder.get_current_step() - 1 # Magic number !
LOGGER.debug(final_prop + ":: Save minimal steps: {}".format(min_step))
LOGGER.debug("%s:: Save minimal steps: %s", final_prop, min_step)
with open(cam_step_file, 'a') as file:
file.write(str(min_step)+'\n')
......@@ -302,49 +301,42 @@ def find_mac(mcla,
# Is the property reacheable ?
reacheable = mcla.sq_is_satisfiable(query, steps)
# If yes, in how many steps ?
min_step = mcla.unfolder.get_current_step()
min_step = mcla.unfolder.get_current_step()
if reacheable and (min_step <= steps):
LOGGER.info(
final_prop + ":: Property {} is reacheable in {} steps".format(
final_prop, min_step
)
)
LOGGER.info("%s:: Property %s is reacheable in %s steps",
final_prop, final_prop, min_step)
else:
LOGGER.info(
final_prop + ":: Property {} is NOT reacheable in {} steps".format(
final_prop, min_step
)
)
LOGGER.info(final_prop + ":: STOP the search!")
LOGGER.info("%s:: Property %s is NOT reacheable in %s steps",
final_prop, final_prop, min_step)
LOGGER.info("%s:: STOP the search!", final_prop)
return
# Find next MAC
next_mac_object = mcla.next_mac(query, min_step)
if next_mac_object:
LOGGER.debug(final_prop + ":: Next MAC object:\n{}".format(next_mac_object))
LOGGER.debug("%s:: Next MAC object:\n%s", final_prop, next_mac_object)
# Save MAC and timings
LOGGER.debug(final_prop + ":: Save MAC and timings...")
LOGGER.debug("%s:: Save MAC and timings...", final_prop)
with open(cam_complete_file, 'a') as file:
next_mac_object.save(file)
# Save MAC (in alphabetic order...)
next_mac = sorted(next_mac_object.activated_frontier)
LOGGER.debug(final_prop + ":: Save next MAC: {}".format(next_mac))
LOGGER.debug("%s:: Save next MAC: %s", final_prop, next_mac)
with open(cam_file, 'a') as file:
file.write(" ".join(sorted(next_mac, key=lambda s: s.lower()))+'\n')
# Save min steps
min_step = mcla.unfolder.get_current_step() - 1 # Magic number !
LOGGER.debug(final_prop + ":: Save minimal steps: {}".format(min_step))
LOGGER.debug("%s:: Save minimal steps: %s", final_prop, min_step)
with open(cam_step_file, 'a') as file:
file.write(str(min_step)+'\n')
return next_mac, min_step
else:
LOGGER.info(final_prop + ":: STOP the search! No more MAC.")
return
LOGGER.info("%s:: STOP the search! No more MAC.", final_prop)
def detect_model_type(mclanalyser, filepath):
......@@ -371,11 +363,12 @@ def detect_model_type(mclanalyser, filepath):
".cal": mclanalyser.build_from_cadlang,
".xml": mclanalyser.build_from_pid_file,
}
try:
extension = re.search('^.*(\.[bcx|xml|cal]{3})', filepath).group(1)
LOGGER.debug("Find '" + extension + "' extension: " + filepath)
except:
LOGGER.error("Unauthorized file: " + filepath)
_, extension = os.path.splitext(filepath)
LOGGER.debug("Found %s extension: %s", extension, filepath)
if extension not in build_func:
LOGGER.error("Unauthorized file: %s", filepath)
exit()
return build_func[extension]
......@@ -500,10 +493,10 @@ def compute_combinations(final_properties):
negated_places = ' and not '.join(final_properties - data)
return "{}{}{}".format(
' and '.join(data),
' and not ' if negated_places != '' else '',
negated_places
)
' and '.join(data),
' and not ' if negated_places != '' else '',
negated_places
)
all_combinations = list()
......@@ -515,13 +508,8 @@ def compute_combinations(final_properties):
# Unpack combinations
all_combinations = [comb for comb in it.chain(*all_combinations)]
LOGGER.debug("Combinations: {}, Length: {}".format(
str(all_combinations),
len(all_combinations)
)
)
LOGGER.info("Number of computed combinations: " + \
str(len(all_combinations)))
LOGGER.debug("Combinations: %s, Length: %s", all_combinations, all_combinations)
LOGGER.info("Number of computed combinations: %s", len(all_combinations))
return all_combinations
......@@ -545,8 +533,8 @@ def solutions_search(params):
# Multiple properties in input file
# => multiprocessing: 1 process for each property
with open(params['input_file'], 'r') as fd:
g = (line.rstrip('\n') for line in fd)
with open(params['input_file'], 'r') as f_d:
g = (line.rstrip('\n') for line in f_d)
final_properties = [prop for prop in g if prop != '']
......@@ -558,36 +546,36 @@ def solutions_search(params):
LOGGER.debug("Final properties: " + str(final_properties))
# Output combinations of final_properties
# with open(params['input_file'] + '_combinations.txt', 'w') as fd:
# fd.write('\n'.join(final_properties) + '\n')
# with open(params['input_file'] + '_combinations.txt', 'w') as f_d:
# f_d.write('\n'.join(final_properties) + '\n')
#
# g = (elem for elem in final_properties)
# for i in range(1, len(final_properties) + 1):
# with open(params['input_file'][:-4] + '_combinations' + str(i) + '.txt', 'w') as fd:
# with open(params['input_file'][:-4] + '_combinations' + str(i) + '.txt', 'w') as f_d:
# try:
# fd.write(next(g) + '\n')
# fd.write(next(g) + '\n')
# f_d.write(next(g) + '\n')
# f_d.write(next(g) + '\n')
# except StopIteration:
# break
#
# exit()
def update_params(prop):
d = params.copy()
d['final_prop'] = prop
return d
"""Shallow copy of parameters and update final_prop for a new run"""
new_params = params.copy()
new_params['final_prop'] = prop
return new_params
# Fix number of processes
# PS: the new solver is optimized for 8 threads
# nb_cpu_required = mp.cpu_count() / 8
# nb_cpu_required = 1 if nb_cpu_required == 0 else nb_cpu_required
with ProcessPoolExecutor(max_workers=mp.cpu_count()) as e:
with ProcessPoolExecutor(max_workers=mp.cpu_count()) as executor:
futures_and_output = {e.submit(compute_macs,
update_params(job_property)
):job_property \
futures_and_output = {executor.submit(compute_macs,
update_params(job_property)
):job_property \
for job_property in final_properties} # Job name
nb_errors = 0
......@@ -599,20 +587,16 @@ def solutions_search(params):
# On affiche les résultats si les futures en contiennent.
# Si elles contiennent une exception, on affiche l'exception.
if future.exception() is not None:
LOGGER.error("{} generated an exception: \n{}".format(
job_name,
future.exception())
)
LOGGER.error("%s generated an exception: \n%s",
job_name,
future.exception())
nb_errors += 1
else:
# The end
LOGGER.info("{}... \t\t[Done]".format(job_name))
LOGGER.info("%s... \t\t[Done]", job_name)
nb_done += 1
LOGGER.info("Ending: {} errors, {} done\nbye.".format(
nb_errors,
nb_done)
)
LOGGER.info("Ending: %s errors, %s done\nbye.", nb_errors, nb_done)
def compute_macs(params):
......@@ -629,29 +613,28 @@ def compute_macs(params):
sys.setrecursionlimit(10000)
# QUERY PARAMETERS
# Todo beware with type of separators in path..
MODEL_NAME = params['chart_file'].split('/')[-1][:-4]
model_filename = os.path.basename(os.path.splitext(params['chart_file'])[0])
# FILES
# Add trailing '/' if not present
output = params['output'] if params['output'][-1] == '/' \
else params['output'] + '/'
CAM_FILE_PREFIX = output + MODEL_NAME + \
cam_file_prefix = output + model_filename + \
'_' + params['final_prop'] + '_cam'
# cam_file
cam_file = CAM_FILE_PREFIX + ".txt"
cam_file = cam_file_prefix + ".txt"
# cam_step_file
cam_step_file = CAM_FILE_PREFIX + "_step.txt"
cam_step_file = cam_file_prefix + "_step.txt"
# cam_complete_file
cam_complete_file = CAM_FILE_PREFIX + "_complete.txt"
cam_complete_file = cam_file_prefix + "_complete.txt"
# cam_strong_file
cam_strong_file = CAM_FILE_PREFIX + "_strongA.txt"
cam_strong_file = cam_file_prefix + "_strongA.txt"
# Reset files
def remove_file(file):
"""Reset files"""
try:
os.remove(file)
except:
except OSError:
pass
if not params['continue']:
......@@ -685,7 +668,5 @@ def read_cam_file(file):
:rtype: <set>
"""
with open(file, 'r') as fd:
return {tuple(line.rstrip('\n').split(' ')) for line in fd}
with open(file, 'r') as f_d:
return {tuple(line.rstrip('\n').split(' ')) for line in f_d}
......@@ -143,7 +143,7 @@ def solutions_sort(path):
## Conversion functions ########################################################
def solutions_2_json(output_dir, model_file, solution_path, conditions=True):
def solutions_2_json(output_dir, model_file, path, conditions=True):
"""Entry point for solutions_2_json
Create a JSON formated file containing all data from complete MAC files
......@@ -157,48 +157,49 @@ def solutions_2_json(output_dir, model_file, solution_path, conditions=True):
:param output_dir: Output path.
:param model_file: Filepath of the model.
:param solution_path: Filepath/directory of a complete solution file.
:param path: Filepath/directory of a complete solution file.
:param conditions: (Optional) If False, conditions of transitions will not
be present in the JSON file. This allows to have only places/entities
used inside trajectories; thus, inhibitors are avoided.
:type output_dir: <str>
:type model_file: <str>
:type solution_path: <str>
:type path: <str>
:type conditions: <boolean>
"""
def write_json_file(decompiled_filename, decomp_solutions):
# Write file
"""Write decompiled solutions to a JSON formated file"""
with open(decompiled_filename, 'w') as f_d:
json.dump(decomp_solutions, f_d, sort_keys=True, indent=2)
# Check valid input file/directory
assert os.path.isfile(solution_path) or os.path.isdir(solution_path)
assert os.path.isfile(path) or os.path.isdir(path)
# Get transitions from the model
model_transitions = get_transitions_from_model_file(model_file)
if os.path.isfile(solution_path):
if os.path.isfile(path):
# The given path is a solution file
# Add _decomp to the solution filename
filename = os.path.basename(os.path.splitext(solution_path)[0])
filename = os.path.basename(os.path.splitext(path)[0])
decompiled_filename = output_dir + filename + '_decomp.txt'
decomp_solutions = get_json_solutions(
load_solutions(solution_path),
load_solutions(path),
model_transitions,
conditions=conditions,
)
write_json_file(decompiled_filename, decomp_solutions)
elif os.path.isdir(solution_path):
elif os.path.isdir(path):
# The given path is a directory
solution_path = solution_path if solution_path[-1] == '/' \
else solution_path + '/'
path = path if path[-1] == '/' \
else path + '/'
# Decompilation of all files in the directory
file_number = 0
for file_number, solution_file in \
enumerate(glob.glob(solution_path + '*cam_complete.txt'), 1):
enumerate(glob.glob(path + '*cam_complete.txt'), 1):
# Add _decomp to the solution filename
filename = os.path.basename(os.path.splitext(solution_file)[0])
......@@ -210,10 +211,10 @@ def solutions_2_json(output_dir, model_file, solution_path, conditions=True):
)
write_json_file(decompiled_filename, decomp_solutions)
LOGGER.info("Files processed: " + str(file_number))
LOGGER.info("Files processed: %s", file_number)
def solutions_2_graph(output_dir, model_file, solution_path):
def solutions_2_graph(output_dir, model_file, path):
"""Entry point for solutions_2_graph