Commit d4ddbedd authored by POGODALLA Sylvain's avatar POGODALLA Sylvain

Compile, but a shift/reduce conflict needs to be fixed

parent 5a51c80a
This diff is collapsed.
;; -*-lisp-*-
;(rule
; (targets data_parser.ml)
; (deps (:input-file data_parser.dyp))
; (action (chdir %{project_root} (run %{bin:dypgen} --noemit-token-type --no-pp --no-obj-type --no-mli %{input-file})))
; )
(ocamllex data_lexer term_lexer)
(menhir
; (flags (--explain --table --compile-errors /home/pogodall/work/dev/ACGtk/src/grammars/term_parser.messages))
(flags (--explain --table))
(flags (--explain --table --trace))
(modules term_parser))
(menhir
; (flags (--explain --table --compile-errors /home/pogodall/work/dev/ACGtk/src/grammars/data_parser.messages))
(merge_into data_parser)
(flags (--explain --table))
(flags (--explain --table --trace))
(modules file_parser sig_parser lex_parser type_parser term2_parser))
;; Rule to generate the messages ml file
(rule
(targets messages.ml)
(mode promote)
(deps
(alias update)
(alias check)
(:message_file data_parser.messages.new)
(:parsers file_parser.mly lex_parser.mly sig_parser.mly term2_parser.mly type_parser.mly)
)
(action
(with-stdout-to messages.ml (run %{bin:menhir} --base data_parser --explain --table --trace --compile-errors %{message_file} %{parsers})))
)
;; Rule to generate the automatic message file
(rule
(targets data_parser.messages.automatic)
(deps (:parsers file_parser.mly lex_parser.mly sig_parser.mly term2_parser.mly type_parser.mly))
(action
(with-stdout-to data_parser.messages.automatic (run %{bin:menhir} --base data_parser --explain --table --trace --list-errors %{parsers})))
)
;; Rule to generate the message file
(rule
(targets data_parser.messages.new)
(deps (:parsers file_parser.mly lex_parser.mly sig_parser.mly term2_parser.mly type_parser.mly))
(action (with-stdout-to data_parser.messages.new (run %{bin:menhir} --base data_parser --explain --table --trace --update-errors data_parser.messages %{parsers}))
)
)
(alias
(name update)
(action (diff data_parser.messages data_parser.messages.new))
)
(alias
(name check)
(deps
data_parser.messages.automatic
data_parser.messages
(:parsers file_parser.mly lex_parser.mly sig_parser.mly term2_parser.mly type_parser.mly)
)
(action (run %{bin:menhir} --base data_parser --explain --table --trace --compare-errors data_parser.messages.automatic --compare-errors data_parser.messages %{parsers}))
)
;; This stanza declares the Grammar library
(library
(name grammars)
......
......@@ -68,7 +68,9 @@
Environment.get_lexicon name e
with
| Environment.Lexicon_not_found _ ->
emit_parse_error (Error.No_such_lexicon name) loc
emit_parse_error (Error.No_such_lexicon name) loc
%}
......@@ -93,8 +95,12 @@
%%
main:
| s=signature EOI { fun e -> s e }
| l=lexicon EOI { fun e -> l e }
| dec=sig_or_lex+ EOI { fun e -> List.fold_left (fun acc d -> d acc) e dec
}
sig_or_lex:
| s=signature { fun e -> s e }
| l=lexicon { fun e -> l e }
signature :
| SIG_OPEN id=IDENT EQUAL entries=separated_list(SEMICOLON,sig_entry) END_OF_DEC
......
(* This file was auto-generated based on "data_parser.messages". *)
(* This file was auto-generated based on "data_parser.messages.new". *)
(* Please note that the function [message] can raise [Not_found]. *)
......@@ -46,7 +46,7 @@ let message =
"An equality symbol \"=\" is expected.\n"
| 114 | 68 ->
"A signature entry (type declaration, type definition, term declaration, or term definition) is expected.\n"
| 156 | 65 ->
| 157 | 65 ->
"A declaration of a signature (keyword \"signature\") or of a lexicon (keyword \"lexicon\" or \"nl_lexicon\") is expected.\n"
| 123 ->
"An identifier (the name of a new lexicon) is expected.\n"
......@@ -82,11 +82,11 @@ let message =
"An expression representing the composition of lexicons is expected.\n"
| 153 ->
"The composition operator \"<<\" or a right parenthesis \")\" is expected.\n"
| 159 | 150 ->
| 150 ->
"The composition operator \"<<\" is expected.\n"
| 151 ->
"An identifier (the name of a lexicon), or an expression representing the composition of lexicons is expected.\n"
| 161 ->
| 163 ->
"An identifier or a keyword (\"infix\", \"prefix, or \"binder\") is expected.\n"
| 77 | 69 ->
"A symbol is expected.\n"
......@@ -118,7 +118,7 @@ let message =
"After a term or type declaration of the form \"<ident1>, <ident2>\", a type declaration of the form \": type;\" (where type is a keyword) or a typing judgment of the form \": <type>;\" is expected.\n"
| 117 ->
"After a term declaration of the form \"<term>: \", a type expression and a semicolon \"<type> ;\" are expected.\n"
| 163 ->
| 165 ->
"After a term declaration of the form \"<term>: <type>\", a semicolon \";\" is expected.\n"
| 98 ->
"An identidier (the name of the binder) is expected after the keyword \"binder\".\n"
......@@ -128,11 +128,11 @@ let message =
"A term is expected as right hand side of a term definition.\n"
| 101 ->
"A typing judgment in the form of \": <type>\" is expected after defining a binder.\n"
| 165 ->
"A typing judgment in the form of \"<term> : <type>\" is expected.\n"
| 167 ->
"A typing judgment in the form of \"<term> : <type>\" is expected.\n"
| 169 ->
"A typing judgement in the form of \": <type>\" is expected after a term.\n"
| 169 | 168 | 104 | 102 | 113 ->
| 171 | 170 | 104 | 102 | 113 ->
"A type expression is expected after \":\".\n"
| _ ->
raise Not_found
......@@ -4,6 +4,33 @@ open Environment
(* A short name for the incremental parser API. *)
let tok_to_string = function
| Data_parser.EOI -> "EOI"
| Data_parser.LPAREN _ -> "LPAREN"
| Data_parser.RPAREN _ -> "RPAREN"
| Data_parser.SIG_OPEN _ -> "SIG_OPEN"
| Data_parser.LEX_OPEN _ -> "LEX_OPEN"
| Data_parser.NL_LEX_OPEN _ -> "NL_LEX_OPEN"
| Data_parser.END_OF_DEC _ -> "END_OF_DEC"
| Data_parser.IDENT (s,_) -> Printf.sprintf "IDENT (%s)" s
| Data_parser.COLON _ -> "COLON"
| Data_parser.EQUAL _ -> "EQUAL"
| Data_parser.SEMICOLON _ -> "SEMICOLON"
| Data_parser.COMPOSE _ -> "COMPOSE"
| Data_parser.SYMBOL (s,_) -> Printf.sprintf "SYMBOL (%s)" s
| Data_parser.COMMA _ -> "COMMA"
| Data_parser.TYPE _ -> "TYPE"
| Data_parser.PREFIX _ -> "PREFIX"
| Data_parser.INFIX _ -> "INFIX"
| Data_parser.BINDER _ -> "BINDER"
| Data_parser.COLON_EQUAL _ -> "COLON_EQUAL"
| Data_parser.LAMBDA _ -> "LAMBDA"
| Data_parser.LAMBDA0 _ -> "LAMBDA0"
| Data_parser.DOT _ -> "DOT"
| Data_parser.ARROW _ -> "ARROW"
| Data_parser.LIN_ARROW _ -> "LIN_ARROW"
module I = Data_parser.MenhirInterpreter
......@@ -30,6 +57,19 @@ let fail lexbuf (c : (Environment.t -> Environment.t) I.checkpoint) =
let loc = Lexing.lexeme_start_p lexbuf,Lexing.lexeme_end_p lexbuf in
raise Error.(Error (Parse_error (Syntax_error (""),loc)))
let core_supplier lexbuf = I.lexer_lexbuf_to_supplier Data_lexer.lexer lexbuf
(*
let supplier lexbuf =
let sup () =
let (tok,_,_) as res = core_supplier lexbuf () in
let () = Printf.printf "Token: \"%s\"\n%!" (tok_to_string tok) in
res in
sup
*)
let supplier = core_supplier
let parse_data ?(override=false) ?(output=false) filename includes env =
try
......@@ -38,9 +78,8 @@ let parse_data ?(override=false) ?(output=false) filename includes env =
open_in fullname in
let lexbuf = Lexing.from_channel in_ch in
let () = Printf.printf "Parsing \"%s\"...\n%!" filename in
let supplier = I.lexer_lexbuf_to_supplier Data_lexer.lexer lexbuf in
let starting_parse_time = Sys.time () in
let e = (I.loop_handle succeed (fail lexbuf) supplier (Data_parser.Incremental.main lexbuf.lex_curr_p)) env in
let e = (I.loop_handle succeed (fail lexbuf) (supplier lexbuf) (Data_parser.Incremental.main lexbuf.lex_curr_p)) env in
let ending_parse_time = Sys.time () in
let () = Printf.printf "Done (required %.3f seconds).\n%!" (ending_parse_time -. starting_parse_time) in
let () = match output with
......@@ -82,9 +121,8 @@ let pp_error er t =
let parse_term ?(output=false) t sg =
let lexbuf = Lexing.from_string t in
let supplier = I.lexer_lexbuf_to_supplier Data_lexer.lexer lexbuf in
try
let abs_term,abs_type = I.loop supplier (Data_parser.Incremental.term_alone lexbuf.lex_curr_p) sg in
let abs_term,abs_type = I.loop (supplier lexbuf) (Data_parser.Incremental.term_alone lexbuf.lex_curr_p) sg in
let () =
match output with
| true ->
......@@ -116,11 +154,10 @@ let parse_term ?(output=false) t sg =
let parse_heterogenous_term ?(output=false) t lex =
let lexbuf = Lexing.from_string t in
let supplier = I.lexer_lexbuf_to_supplier Data_lexer.lexer lexbuf in
let abs,obj=Environment.Lexicon.get_sig lex in
try
let obj_term,abs_type =
I.loop supplier (Data_parser.Incremental.heterogenous_term_and_type lexbuf.lex_curr_p) abs obj in
I.loop (supplier lexbuf) (Data_parser.Incremental.heterogenous_term_and_type lexbuf.lex_curr_p) abs obj in
let abs_type=Environment.Signature1.convert_type abs_type abs in
let obj_type=Environment.Lexicon.interpret_type abs_type lex in
let obj_term=Environment.Signature1.typecheck obj_term obj_type obj in
......@@ -153,9 +190,8 @@ let parse_heterogenous_term ?(output=false) t lex =
let parse_sig_entry t sg =
let lexbuf = Lexing.from_string t in
let supplier = I.lexer_lexbuf_to_supplier Data_lexer.lexer lexbuf in
try
Some (I.loop supplier (Data_parser.Incremental.sig_entry_eoi lexbuf.lex_curr_p) sg )
Some (I.loop (supplier lexbuf) (Data_parser.Incremental.sig_entry_eoi lexbuf.lex_curr_p) sg )
with
| Error.Error er ->
let () = pp_error er t in
......@@ -165,9 +201,8 @@ let parse_heterogenous_term ?(output=false) t lex =
let parse_lex_entry t lex =
let lexbuf = Lexing.from_string t in
let supplier = I.lexer_lexbuf_to_supplier Data_lexer.lexer lexbuf in
try
Some (I.loop supplier (Data_parser.Incremental.lex_entry_eoi lexbuf.lex_curr_p) lex )
Some (I.loop (supplier lexbuf) (Data_parser.Incremental.lex_entry_eoi lexbuf.lex_curr_p) lex )
with
| Error.Error er ->
let () = pp_error er t in
......
......@@ -2,8 +2,6 @@
%token <Logic.Abstract_syntax.Abstract_syntax.location> LAMBDA0
%token <Logic.Abstract_syntax.Abstract_syntax.location> DOT
(*%start < UtilsLib.Utils.StringSet.t-> AcgData.Environment.Environment.Signature1.t -> Logic.Abstract_syntax.Abstract_syntax.term * Logic.Abstract_syntax.Abstract_syntax.location * (AcgData.Error.warning list) > term
*)
%start < AcgData.Environment.Environment.Signature1.t -> Logic.Lambda.Lambda.term * Logic.Lambda.Lambda.stype > term_alone
%start < AcgData.Environment.Environment.Signature1.t -> AcgData.Environment.Environment.Signature1.t -> Logic.Abstract_syntax.Abstract_syntax.term * Logic.Abstract_syntax.Abstract_syntax.type_def > heterogenous_term_and_type
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment