Commit 28a6ff75 authored by bguillaum's avatar bguillaum

VERSION 0.9.9: add filter in modules

git-svn-id: svn+ssh://scm.gforge.inria.fr/svn/semagramme/libcaml-grew/trunk@6812 7838e531-6607-4d57-9587-6c381814729c
parent 0c013298
......@@ -28,7 +28,7 @@ INFO = @INFO@
OCAMLFIND_DIR=`ocamlfind printconf destdir`
VERSION = 0.9.8
VERSION = 0.9.9
cleanup:
rm -rf *.cmo *.cmx *.cmi *.annot *.o *.*~
......
;;====================================================================
;; Grew mode
;; Mode used to write Grew with emacs (highlight)
;; see: https://gforge.inria.fr/projects/lexicomp/
;; see: https://wikilligramme.loria.fr/doku.php?id=grew:grew
(require 'generic-x) ;;pour Emacs OK, mais semble ne pas marcher avec XEmacs
(define-generic-mode 'grew-mode
'("%");;comments
'("features" "module" "rule" "lex_rule" "match" "without" "labels" "bad_labels" "sequences" "commands" "graph" "confluent" "include");;keywords
'("features" "module" "rule" "lex_rule" "match" "without" "labels" "bad_labels" "sequences" "commands" "graph" "confluent" "include" "filter");;keywords
'(
;; ("class\\s (\*\*)* +\\(\\sw[a-zA-Z0-9_.-]*\\)" 1 'font-lock-type-face);noms de classes
;; ("\?[a-zA-Z0-9]+" . font-lock-variable-name-face)
......
......@@ -83,6 +83,11 @@ module Ast = struct
type command = u_command * Loc.t
(* the [rule] type is used for 3 kids of module items:
- rule { param=None; ... }
- lex_rule
- filter { param=None; commands=[]; ... }
*)
type rule = {
rule_id:Id.name;
pos_pattern: pattern;
......@@ -92,7 +97,7 @@ module Ast = struct
rule_doc:string;
rule_loc: Loc.t;
}
type modul = {
module_id:Id.name;
local_labels: (string * string option) list;
......
......@@ -150,6 +150,7 @@ module Modul = struct
local_labels: (string * string option) array;
bad_labels: Label.t list;
rules: Rule.t list;
filters: Rule.t list;
confluent: bool;
loc: Loc.t;
}
......@@ -166,12 +167,15 @@ module Modul = struct
let build ast_module =
let locals = Array.of_list ast_module.Ast.local_labels in
Array.sort compare locals;
let rules_or_filters = List.map (Rule.build ~locals ast_module.Ast.mod_dir) ast_module.Ast.rules in
let (filters, rules) = List.partition Rule.is_filter rules_or_filters in
let modul =
{
name = ast_module.Ast.module_id;
local_labels = locals;
bad_labels = List.map Label.from_string ast_module.Ast.bad_labels;
rules = List.map (Rule.build ~locals ast_module.Ast.mod_dir) ast_module.Ast.rules;
rules = rules;
filters = filters;
confluent = ast_module.Ast.confluent;
loc = ast_module.Ast.mod_loc;
} in
......@@ -269,7 +273,7 @@ module Grs = struct
Rule.normalize
~confluent: next.Modul.confluent
next.Modul.rules
(fun x -> true) (* FIXME filter at the end of rewriting modules *)
next.Modul.filters
(Instance.clear instance) in
let good_list = Instance_set.elements good_set
and bad_list = Instance_set.elements bad_set in
......@@ -291,7 +295,7 @@ module Grs = struct
Rule.normalize
~confluent: next.Modul.confluent
next.Modul.rules
(fun x -> true) (* FIXME: filtering in module outputs *)
next.Modul.filters
(Instance.clear instance) in
let inst_list = Instance_set.elements good_set
(* and bad_list = Instance_set.elements bad_set *) in
......
......@@ -141,6 +141,8 @@ module Rule = struct
let get_loc t = t.loc
let is_filter t = t.commands = []
let build_commands ?cmd_vars ?(locals=[||]) pos pos_table ast_commands =
let known_node_ids = Array.to_list pos_table in
let known_edge_ids = get_edge_ids pos in
......@@ -175,26 +177,7 @@ module Rule = struct
let nb_cv = List.length cmd_vars in
let param = Lex_par.load ~loc:rule_ast.Ast.rule_loc dir nb_pv nb_cv file in
(Some param, pat_vars, cmd_vars) in
(* try *)
(* let lines = File.read file in *)
(* let param = Some *)
(* (List.map *)
(* (fun line -> *)
(* match Str.split (Str.regexp "##") line with *)
(* | [args] when cmd_vars = [] -> *)
(* (match Str.split (Str.regexp "#") args with *)
(* | l when List.length l = nb_pv -> (l,[]) *)
(* | _ -> Error.bug "Illegal param line in file '%s' line '%s' hasn't %d args" file line nb_pv) *)
(* | [args; values] -> *)
(* (match (Str.split (Str.regexp "#") args, Str.split (Str.regexp "#") values) with *)
(* | (lp,lc) when List.length lp = nb_pv && List.length lc = nb_cv -> (lp,lc) *)
(* | _ -> Error.bug "Illegal param line in file '%s' line '%s' hasn't %d args and %d values" file line nb_pv nb_cv) *)
(* | _ -> Error.bug "Illegal param line in file '%s' line '%s'" file line *)
(* ) lines *)
(* ) in *)
(* (param, pat_vars, cmd_vars) *)
(* with Sys_error _ -> Error.build ~loc:rule_ast.Ast.rule_loc "File '%s' not found" file in *)
let (pos,pos_table) = build_pos_pattern ~pat_vars ~locals rule_ast.Ast.pos_pattern in
{
......@@ -679,7 +662,7 @@ module Rule = struct
* Info about the commands applied on [t] are kept
*)
(* type: Instance.t -> t list -> Instance_set.t *)
(* type: Instance.t -> t list -> Instance_set.t *)
let normalize_instance instance rules =
let rec loop to_do nf =
if to_do = Instance_set.empty
......@@ -697,6 +680,36 @@ module Rule = struct
loop new_to_do new_nf in
let nfs = loop (Instance_set.singleton instance) Instance_set.empty in
filter_equal_nfs nfs
(* [filter_instance instance filters] return a boolean:
- true iff the instance does NOT match any pattern in [filters] *)
let filter_instance filters instance =
let rec loop = function
| [] -> true (* no more filter to check *)
| filter::filter_tail ->
let pos_graph = filter.pos.graph in
(* get the list of partial matching for positive part of the pattern *)
let matching_list =
extend_matching
(pos_graph,P_graph.empty)
instance.Instance.graph
(init filter.param filter.pos) in
if List.exists
(fun (sub, already_matched_gids) ->
List.for_all
(fun without ->
let neg_graph = without.graph in
let new_partial_matching = update_partial pos_graph without (sub, already_matched_gids) in
fulfill (pos_graph,neg_graph) instance.Instance.graph new_partial_matching
) filter.neg
) matching_list
then (* one of the matching can be extended *) false
else loop filter_tail in
loop filters
let rec conf_normalize instance rules =
match conf_one_step instance rules with
......@@ -705,16 +718,16 @@ module Rule = struct
(* type: t list -> (Instance_set.elt -> bool) -> Instance.t -> Instance_set.t * Instance_set.t *)
let normalize ?(confluent=false) rules filter instance =
let normalize ?(confluent=false) rules filters instance =
if confluent
then
let output = conf_normalize instance rules in
if filter output
if filter_instance filters output
then (Instance_set.singleton output, Instance_set.empty)
else (Instance_set.empty, Instance_set.singleton output)
else
let output_set = normalize_instance instance rules in
let (good_set, bad_set) = Instance_set.partition filter output_set in
let (good_set, bad_set) = Instance_set.partition (filter_instance filters) output_set in
(good_set, bad_set)
end
......@@ -39,9 +39,11 @@ module Rule : sig
val get_name: t -> string
(** [get_loc t] returns the file location of the rule definition *)
(** [get_loc t] returns the file location of th*)
val get_loc: t -> Loc.t
val is_filter: t -> bool
(** [build ?local dir ast_rule] returns the Rule.t value corresponding to [ast_rule].
[dir] is used for localisation of lp files *)
val build: ?locals:Label.decl array -> string -> Ast.rule -> t
......@@ -49,8 +51,9 @@ module Rule : sig
(* raise Stop if some command fails to apply *)
val normalize:
?confluent:bool ->
t list ->
(Instance_set.elt -> bool) ->
t list -> (* rule list *)
t list -> (* filter list *)
Instance.t ->
Instance_set.t * Instance_set.t
end
......@@ -52,6 +52,7 @@ let localize t = (t,get_loc ())
%token CONFLUENT /* confluent */
%token RULE /* rule */
%token LEX_RULE /* lex_rule */
%token FILTER /* filter */
%token SEQUENCES /* sequences */
%token GRAPH /* graph */
......@@ -65,11 +66,11 @@ let localize t = (t,get_loc ())
%token ADD_NODE /* add_node */
%token DEL_FEAT /* del_feat */
%token <string> PAT /* $id */
%token <string> CMD /* @id */
%token <string> PAT /* $id */
%token <string> CMD /* @id */
%token <string> IDENT /* indentifier */
%token <Grew_ast.Ast.qfn> QFN /* ident.ident */
%token <Grew_ast.Ast.qfn> QFN /* ident.ident */
%token <string> STRING
%token <int> INT
%token <string> COMMENT
......@@ -300,7 +301,18 @@ rule:
rule_loc = (!Parser_global.current_file,snd id);
}
}
| doc = option(rule_doc) FILTER id = rule_id LACC p = pos_item n = list(neg_item) RACC
{
{ Ast.rule_id = fst id;
pos_pattern = p;
neg_patterns = n;
commands = [];
param = None;
rule_doc = begin match doc with Some d -> d | None -> "" end;
rule_loc = (!Parser_global.current_file,snd id);
}
}
param:
| LPAREN FEATURE vars = separated_nonempty_list(COMA,var) SEMIC FILE file=STRING RPAREN { (file,vars) }
......
......@@ -83,6 +83,7 @@ and global = parse
| "confluent" { CONFLUENT }
| "rule" { RULE }
| "lex_rule" { LEX_RULE }
| "filter" { FILTER }
| "sequences" { SEQUENCES }
| "graph" { GRAPH }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment