Commit fdcd70a4 authored by Bruno Guillaume's avatar Bruno Guillaume

remove Massoc for lexicons

parent 68887367
......@@ -319,7 +319,7 @@ module Ast = struct
| File of string
| Final of string list
type lexicon_info = lexicon Massoc_string.t
type lexicon_info = (string * lexicon) list
(* the [rule] type is used for 3 kinds of module items:
- rule { param=None; ... }
......
......@@ -171,7 +171,7 @@ module Ast : sig
| File of string
| Final of string list
type lexicon_info = lexicon Massoc_string.t
type lexicon_info = (string * lexicon) list
type rule = {
rule_id:Id.name;
......
......@@ -97,15 +97,13 @@ and string_lex re target = parse
}
(* a dedicated lexer for lexical parameter: read everything until "#END" *)
and lp_lex target = parse
| '\n' { Global.new_line (); Lexing.new_line lexbuf; bprintf buff "\n"; lp_lex target lexbuf }
| _ as c { bprintf buff "%c" c; lp_lex target lexbuf }
and lp_lex name target = parse
| '\n' { Global.new_line (); Lexing.new_line lexbuf; bprintf buff "\n"; lp_lex name target lexbuf }
| _ as c { bprintf buff "%c" c; lp_lex name target lexbuf }
| "#END" [' ' '\t']* '\n' { Global.new_line ();
Printf.printf "********%s********\n%!" (Buffer.contents buff);
LEX_PAR (
"TODO",
Str.split (Str.regexp "\n") (Buffer.contents buff)
)
let s = Buffer.contents buff in
let lines= Str.split (Str.regexp "\n") s in
LEX_PAR ( name, lines)
}
(* The lexer must be different when label_ident are parsed. The [global] lexer calls either
......@@ -153,7 +151,7 @@ and standard target = parse
| '%' { comment global lexbuf }
| "#BEGIN" [' ' '\t']* (label_ident as li) [' ' '\t']* '\n'
{ Printf.printf "%s\n%!" li; Global.new_line (); Buffer.clear buff; lp_lex global lexbuf}
{ Global.new_line (); Buffer.clear buff; lp_lex li global lexbuf}
| '\n' { Global.new_line (); Lexing.new_line lexbuf; global lexbuf}
......
......@@ -339,7 +339,7 @@ rule:
commands = cmds;
param = None;
lex_par = None;
lexicon_info = Massoc_string.empty; (* TODOLEX *)
lexicon_info = lex_par;
rule_doc = begin match doc with Some d -> d | None -> [] end;
rule_loc = snd id_loc;
rule_dir = None;
......@@ -351,8 +351,8 @@ rule:
pattern = Ast.complete_pattern { Ast.pat_pos = p; Ast.pat_negs = n };
commands = cmds;
param = Some param;
lex_par = lex_par;
lexicon_info = Massoc_string.empty; (* TODOLEX *)
lex_par = None;
lexicon_info = []; (* TODOLEX *)
rule_doc = begin match doc with Some d -> d | None -> [] end;
rule_loc = snd id_loc;
rule_dir = None;
......@@ -360,7 +360,7 @@ rule:
}
lex_par:
| lex_par = LEX_PAR { snd (lex_par) }
| lex_par = LEX_PAR { (fst lex_par, Ast.Final (snd lex_par)) }
param:
| LPAREN FEATURE vars=separated_nonempty_list(COMA,var) RPAREN { ([],vars) }
......
......@@ -512,7 +512,7 @@ module Rule = struct
let build_lex = function
| Ast.File filename -> Lexicon.load filename
| Ast.Final line_list -> Lexicon.build (List.map (fun s -> Str.split (Str.regexp "\\t") s) line_list)
| Ast.Final line_list -> Lexicon.build (List.map (fun s -> Str.split (Str.regexp "\t") s) line_list)
(* ====================================================================== *)
......@@ -523,19 +523,15 @@ module Rule = struct
| None -> deprecated_dir in
let (lexicons : (string * Lexicon.t) list) =
Massoc_string.fold_on_list (fun acc name desc_list ->
match desc_list with
| [] -> Error.bug "Empty description list in lexicon %s" name
| h::t ->
let lex =
List.fold_left
(fun acc2 desc -> Lexicon.union acc2 (build_lex desc)) (build_lex h) t in
(name, lex) :: acc
) [] rule_ast.Ast.lexicon_info
in
List.fold_left (fun acc (name,lex) ->
try
let prev = List.assoc name acc in
(name, (Lexicon.union prev (build_lex lex))) :: (List.remove_assoc name acc)
with
Not_found -> (name, build_lex lex) :: acc
) [] rule_ast.Ast.lexicon_info in
let lexicon_names = List.map fst lexicons in
Printf.printf "******* %d --> %s\n%!" (List.length lexicon_names) (String.concat "/" lexicon_names);
let (param, pat_vars) =
match rule_ast.Ast.param with
......
......@@ -179,7 +179,7 @@ module Lexicon = struct
let load file =
let lines = File.read file in
let items = List.map (fun line -> Str.split (Str.regexp "\\t") line) lines in
let items = List.map (fun line -> Str.split (Str.regexp "\t") line) lines in
build items
let reduce sub_list lexicon =
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment