Commit c071b8ca authored by POGODALLA Sylvain's avatar POGODALLA Sylvain

Dypgen dependeny removed. Dependencies to menhir added. Moved to 1.5.0...

Dypgen dependeny removed. Dependencies to menhir added. Moved to 1.5.0 version. Compilation works and first tests (local grammars and scripts, and TAG2ACG grammars and scripts) are ok.
parent db752686
; -*-org-*-
* Version 1.5.0
** The acgc.opt/acgc compiler and the acg.opt/acg interpreter:
+ Some syntax changes:
1. Prefix operators have the highest priority
2. Infix operators have a higher priority than application (*Note:
this may break existing code*)
3. An associativity property (none, left, right) can be set to infix
operators (left is the default), and a precedence level as well
+ Removed the dependency to BOLT (replaced by Logs) and dypgen (replaced by menhir)
* Version 1.4.0
** The acgc.opt/acgc compiler and the acg.opt/acg interpreter:
......
......@@ -24,7 +24,7 @@
In order to compile the ACG toolkit, you need:
+ ocaml (>=4.03) installed (http://caml.inria.fr/)
+ dune installed (https://github.com/ocaml/dune)
+ dypgen (>=20080925) installed (http://dypgen.free.fr/)
+ menhir installed (http://gallium.inria.fr/~fpottier/menhir/)
+ ANSITerminal (>=0.6.5) installed (https://forge.ocamlcore.org/projects/ansiterminal/)
+ fmt installed (http://erratique.ch/software/fmt)
+ logs installed (http://erratique.ch/software/logs)
......@@ -41,7 +41,7 @@ please also install the free DejaVu fonts (http://dejavu-fonts.org).
=======
IMPORTANT:
A fast an easy way to install dypgen and all important ocaml libraries is
A fast an easy way to install all important ocaml libraries is
to use "opam" (http://opam.ocaml.org/).
The installation typically goes that way:
......@@ -69,7 +69,7 @@ It will also install in the `.opam/OCAML_VERSION/share/acgtk` directory:
(Skip the following part if you installed acgtk as described above)
Alternatively, if you want to compile acgtk by yourself
+ first install the required libraries with the command
opam install dune dypgen ANSITerminal fmt logs cairo2 easy-format bibiou yojson ocf
opam install dune menhir ANSITerminal fmt logs cairo2 easy-format bibiou yojson ocf
+ download the ACGtk sources from http://acg.loria.fr/
To build the ACG toolkit, first run:
......
......@@ -13,7 +13,7 @@ A list of related publications is available at the [ACG web page](http://calligr
In order to compile the ACG toolkit, you need:
* `ocaml` (>=3.07) installed (http://ocaml.org/)
* `dune` installed (https://github.com/ocaml/dune)
* `dypgen` (>=20080925) installed (http://dypgen.free.fr/)
* `menhir` installed (http://gallium.inria.fr/~fpottier/menhir/)
* `ANSITerminal` (>=0.6.5) installed (https://github.com/Chris00/ANSITerminal)
* `fmt` installed (http://erratique.ch/software/fmt)
* `logs` installed (http://erratique.ch/software/logs)
......@@ -41,7 +41,7 @@ opam depext acgtk
## Installation with OPAM (preferred)
A fast and easy way to install `dypgen` and all important ocaml libraries is to use [opam](http://opam.ocaml.org/).
A fast and easy way to install all important ocaml libraries is to use [opam](http://opam.ocaml.org/).
The installation typically goes that way:
1. first install [OCaml](http://ocaml.org/) using your prefered distribution/packaging mode
......@@ -61,7 +61,7 @@ It will also install in the `.opam/OCAML_VERSION/share/acgtk` directory:
Alternatively, if you want to compile acgtk by yourself
1. first install the required libraries with the command
```opam install dune dypgen ANSITerminal fmt logs cairo2 easy-format bibiou yojson ocf```
```opam install dune menhir ANSITerminal fmt logs cairo2 easy-format bibiou yojson ocf```
2. Download the ACGtk sources from http://acg.loria.fr/
To build the ACG toolkit, first run:
......
1.4.0
\ No newline at end of file
1.5.0
\ No newline at end of file
......@@ -8,7 +8,7 @@ build: [
depends: [
"dune" {build}
"dypgen"
"menhir"
"ANSITerminal"
"fmt"
"logs"
......
1.4.0
\ No newline at end of file
1.5.0
\ No newline at end of file
......@@ -4,7 +4,6 @@
wait;
# We start with the Montague examples.
load d montague-fr.acg;
......
......@@ -13,7 +13,7 @@ install: ["dune" "install"]
depends: [
"dune" {build}
"dypgen"
"menhir"
"ANSITerminal"
"fmt"
"logs"
......
......@@ -450,6 +450,13 @@ struct
Buffer.add_buffer buff (DatalogAbstractSyntax.Program.to_buffer (Datalog.Program.to_abstract p)) in
buff
let program_to_log src level lex =
match lex.datalog_prog with
| None -> Logs.msg ~src Logs.Warning (fun m -> m "This lexicon was not recognized as having a 2nd order abstract signature" )
| Some {prog=p} ->
let () = Logs.msg ~src level (fun m -> m "This lexicon is recognized as having a 2nd order abstract signature.") in
let () = Logs.msg ~src level (fun m -> m "The associated datalog program is:") in
DatalogAbstractSyntax.Program.log_content ~src level (Datalog.Program.to_abstract p)
let query_to_buffer term dist_type lex =
......@@ -485,7 +492,36 @@ struct
let () =
Printf.bprintf buff "Parsing is not yet implemented for non atomic distinguished type\n%!" in
buff
let query_to_log src level term dist_type lex =
match lex.datalog_prog,Sg.expand_type dist_type lex.abstract_sig with
| None,_ -> Logs.msg ~src Logs.Warning (fun m -> m "Parsing is not implemented for non 2nd order ACG")
| Some {prog}, (Lambda.Atom _ as dist_type) ->
let dist_type_image = interpret_type dist_type lex in
let obj_term=
Sg.eta_long_form
(Lambda.normalize
~id_to_term:(fun i -> Sg.unfold_term_definition i lex.object_sig)
(Signature.expand_term term lex.object_sig))
dist_type_image
lex.object_sig in
let obj_princ_type,obj_typing_env = TypeInference.Type.inference obj_term in
let query,temp_prog =
Reduction.edb_and_query
~obj_term
~obj_type:obj_princ_type
~obj_typing_env
~dist_type
prog
~abs_sig:lex.abstract_sig
~obj_sig:lex.object_sig in
let () = Datalog.Program.edb_to_log src level temp_prog in
let () = Logs.msg ~src level (fun m -> m "Query:") in
Logs.msg ~src level (fun m -> m "@;<4>%s?" (DatalogAbstractSyntax.Predicate.to_string query temp_prog.Datalog.Program.pred_table temp_prog.Datalog.Program.const_table))
| Some _ , _ ->
Logs.msg ~src Logs.Warning (fun m -> m "Parsing is not yet implemented for non atomic distinguished type")
(* let timestamp lex = {lex with timestamp=Unix.time ()} *)
......
......@@ -109,7 +109,9 @@ sig
val get_analysis : resume -> t -> Lambda.term option * resume
val compose: t -> t -> (string*Abstract_syntax.location) -> t
val program_to_buffer : t -> Buffer.t
val program_to_log : Logs.src -> Logs.level -> t -> unit
val query_to_buffer : Signature.term -> Signature.stype -> t -> Buffer.t
val query_to_log : Logs.src -> Logs.level -> Signature.term -> Signature.stype -> t -> unit
val interpret_linear_arrow_as_non_linear : t -> bool
val update : t -> (string -> data) -> t
end
......@@ -276,16 +276,36 @@ module type Lexicon_sig =
resumption. Otherwise it returns [(None,r')].*)
val get_analysis : resume -> t -> Lambda.term option * resume
(** [compose l2 l1 (name,loc)] returns a new lexicon which is the composition of [l2] and [l1 ] ([l2] after [l1]) such that the abstract signature of the resulting lexicon is the same as the one of [l1] and its object signature is the same as the one of [l2].*)
(** [compose l2 l1 (name,loc)] returns a new lexicon which is the
composition of [l2] and [l1 ] ([l2] after [l1]) such that the
abstract signature of the resulting lexicon is the same as the one
of [l1] and its object signature is the same as the one of [l2].*)
val compose: t -> t -> (string*Abstract_syntax.location) -> t
(** [program_to_buffer l] returns a buffer containing a parsable version of [l]*)
val program_to_buffer : t -> Buffer.t
(** [query_to_buffer te ty l] returns a buffer containing a datalog query corresponding to the (object) term [te] and the (abstract) type [ty] to be parsed with respect to [l].*)
(** [program_to_log src level l] logs the content of [l] according
to the source [src] and the level [level]*)
val program_to_log : Logs.src -> Logs.level -> t -> unit
(** [query_to_buffer te ty l] returns a buffer containing a datalog
query corresponding to the (object) term [te] and the (abstract)
type [ty] to be parsed with respect to [l].*)
val query_to_buffer : Signature.term -> Signature.stype -> t -> Buffer.t
(** [interpret_linear_arrow_as_non_linear lex] returns [True] if [lex] has been defined as a non-linear lexicon, i.e., a lexicon that interprets all arrows (and lambdas) as non-linear ones. It returns [False] otherwise.*)
(** [query_to_log src level te ty l] logs the datalog query
corresponding to the (object) term [te] and the (abstract) type [ty]
to be parsed with respect to [l] on the [src] source according to
the [level] level.*)
val query_to_log : Logs.src -> Logs.level -> Signature.term -> Signature.stype -> t -> unit
(** [interpret_linear_arrow_as_non_linear lex] returns [True] if
[lex] has been defined as a non-linear lexicon, i.e., a lexicon that
interprets all arrows (and lambdas) as non-linear ones. It returns
[False] otherwise.*)
val interpret_linear_arrow_as_non_linear : t -> bool
val update : t -> (string -> data) -> t
......
......@@ -139,6 +139,7 @@ module type Datalog_Sig=
val edb_to_buffer : program -> Buffer.t
val edb_to_log : Logs.src -> Logs.level -> program -> unit
end
......@@ -1363,7 +1364,14 @@ module Make (S:UnionFind.Store) =
prog.edb_facts in
buff
let edb_to_log src level prog =
Predicate.PredMap.iter
(fun _ facts ->
Predicate.FactSet.iter
(fun fact -> Logs.msg ~src level (fun m -> m "@;<4>%s." (ASPred.to_string fact prog.pred_table prog.const_table)))
facts)
prog.edb_facts
end
......
......@@ -139,6 +139,7 @@ sig
val build_forest : ?query:Datalog_AbstractSyntax.AbstractSyntax.Predicate.predicate -> Predicate.PremiseSet.t Predicate.PredicateMap.t -> program -> int SharedForest.SharedForest.tree list list
val edb_to_buffer : program -> Buffer.t
val edb_to_log : Logs.src -> Logs.level -> program -> unit
end
end
......
......@@ -150,6 +150,12 @@ module AbstractSyntax =
(Printf.sprintf "%s\n" (to_string r pred_id_table cst_id_table)))
rules in
buff
let to_log rules pred_id_table cst_id_table src level =
List.iter
(fun r -> Logs.msg ~src level (fun m -> m "@;<4>%s" (to_string r pred_id_table cst_id_table)))
rules
end
module Rule =
......@@ -202,6 +208,13 @@ module AbstractSyntax =
rules in
buff
let to_log rules pred_id_table cst_id_table src level =
Rules.iter
(fun r -> Logs.msg ~src level (fun m -> m "@;<4>%s" (to_string r pred_id_table cst_id_table)))
rules
let init_split_rhs proto_preds intensional_pred =
let i_num,i_p,e_p,_=
List.fold_left
......@@ -409,17 +422,22 @@ module AbstractSyntax =
(fun elt -> Buffer.add_string buff (Printf.sprintf "\t%s\n%!" (Predicate.PredIdTable.find_sym_from_id elt prog.pred_table)))
prog.i_preds in
buff
let log_content level prog =
let () = Log.msg level (fun m -> m "Intensional predicates are:") in
Log.msg level (fun m ->
let () = Predicate.PredIds.iter
(fun elt -> Log.msg level (fun m -> m "\t%s" (Predicate.PredIdTable.find_sym_from_id elt prog.pred_table)))
prog.i_preds in
m "Done.")
let log_content ?(src=Logs.default) level prog =
let () = Rule.to_log prog.rules prog.pred_table prog.const_table src level in
let () = Logs.msg ~src level (fun m -> m "Intensional predicates are:") in
let () = Predicate.PredIds.iter
(fun elt ->
Logs.msg
~src
level
(fun m -> m "@;<4>%s" (Predicate.PredIdTable.find_sym_from_id elt prog.pred_table)))
prog.i_preds in
Logs.msg
~src
level
(fun m -> m "Done.")
end
end
end
......@@ -115,7 +115,7 @@ sig
val make_program : Proto_Program.t -> program
val extend : program -> Proto_Program.t -> program
val to_buffer : program -> Buffer.t
val log_content : Logs.level -> program -> unit
val log_content : ?src:Logs.src -> Logs.level -> program -> unit
end
end
......
......@@ -5,7 +5,7 @@
* TODO Add (+) notation style to prevent the infix use of +
* TODO Add syntactic extensions to handle associativity and precedence of infix operators
* TODO Add error (and warning) messages management to the Error module, and make a difference between "real" parsing errors and errors resulting from evaluating the parsing against some environment. The latter can be output all together (may be some limit to be set)
* TODO Move from dypgen to menhir in scripts
* DONE Move from dypgen to menhir in scripts
* TODO change README and INSTALL files:
+ [ ] to indicate new dependencies (menhir, logs...)
+ [X] to indicate new dependencies (menhir, logs...)
+ [ ] to document syntax extensions for infix operators, precedence of the latter over application. Highest precedence for prefix operators
......@@ -4,15 +4,15 @@
(ocamllex data_lexer term_lexer)
(menhir
; (flags (--explain --table --compile-errors /home/pogodall/work/dev/ACGtk/src/grammars/term_parser.messages))
; (flags (--explain --table --compile-errors /home/pogodall/work/dev/ACGtk/src/grammars/term_parser_test.messages))
(flags (--explain --table))
(modules term_parser))
(modules term_parser_test))
(menhir
; (flags (--explain --table --compile-errors /home/pogodall/work/dev/ACGtk/src/grammars/data_parser.messages))
(merge_into data_parser)
(flags (--explain --table))
(modules file_parser sig_parser lex_parser type_parser term2_parser))
(modules file_parser sig_parser lex_parser type_parser term_parser))
;; Rule to generate the messages ml file
......@@ -23,7 +23,7 @@
(alias update)
(alias check)
(:message_file data_parser.messages.new)
(:parsers file_parser.mly lex_parser.mly sig_parser.mly term2_parser.mly type_parser.mly)
(:parsers file_parser.mly lex_parser.mly sig_parser.mly term_parser.mly type_parser.mly)
)
(action
(with-stdout-to messages.ml (run %{bin:menhir} --base data_parser --explain --table --compile-errors %{message_file} %{parsers})))
......@@ -32,7 +32,7 @@
;; Rule to generate the automatic message file
(rule
(targets data_parser.messages.automatic)
(deps (:parsers file_parser.mly lex_parser.mly sig_parser.mly term2_parser.mly type_parser.mly))
(deps (:parsers file_parser.mly lex_parser.mly sig_parser.mly term_parser.mly type_parser.mly))
(action
(with-stdout-to data_parser.messages.automatic (run %{bin:menhir} --base data_parser --explain --table --list-errors %{parsers})))
)
......@@ -40,7 +40,7 @@
;; Rule to generate the message file
(rule
(targets data_parser.messages.new)
(deps (:parsers file_parser.mly lex_parser.mly sig_parser.mly term2_parser.mly type_parser.mly))
(deps (:parsers file_parser.mly lex_parser.mly sig_parser.mly term_parser.mly type_parser.mly))
(action (with-stdout-to data_parser.messages.new (run %{bin:menhir} --base data_parser --explain --table --update-errors data_parser.messages %{parsers}))
)
)
......@@ -55,7 +55,7 @@
(deps
data_parser.messages.automatic
data_parser.messages
(:parsers file_parser.mly lex_parser.mly sig_parser.mly term2_parser.mly type_parser.mly)
(:parsers file_parser.mly lex_parser.mly sig_parser.mly term_parser.mly type_parser.mly)
)
(action (run %{bin:menhir} --base data_parser --explain --table --compare-errors data_parser.messages.automatic --compare-errors data_parser.messages %{parsers}))
)
......
open UtilsLib
open AcgData
open Environment
open Logic
(* A short name for the incremental parser API. *)
......@@ -43,8 +44,8 @@ module I = Data_parser.MenhirInterpreter
let succeed (data : (?overwrite:bool -> Environment.t -> Environment.t)) =
(* The parser has succeeded and produced a semantic value. *)
data
let fail lexbuf (c : (?overwrite:bool -> Environment.t -> Environment.t) I.checkpoint) =
let fail lexbuf c =
(* The parser has suspended itself because of a syntax error. Stop. *)
match c with
| I.HandlingError env ->
......@@ -56,7 +57,6 @@ let fail lexbuf (c : (?overwrite:bool -> Environment.t -> Environment.t) I.check
let loc = Lexing.lexeme_start_p lexbuf,Lexing.lexeme_end_p lexbuf in
raise Error.(Error (Parse_error (Syntax_error (""),loc)))
let core_supplier lexbuf = I.lexer_lexbuf_to_supplier Data_lexer.lexer lexbuf
(*
......@@ -95,14 +95,14 @@ let parse_data ?(overwrite=false) ?(output=false) filename includes env =
with
| Utils.No_file(f,msg) ->
let e = Error.System_error (Printf.sprintf "No such file \"%s\" in %s" f msg) in
let () = Logs.err (fun m -> m "Error: %s\n%!" (Error.error_msg e filename)) in
let () = Logs.err (fun m -> m "%s" (Error.error_msg e filename)) in
None
| Sys_error s ->
let e = Error.System_error s in
let () = Logs.err (fun m -> m "Error: %s\n%!" (Error.error_msg e filename)) in
let () = Logs.err (fun m -> m "%s" (Error.error_msg e filename)) in
None
| Error.Error e ->
let () = Logs.err (fun m -> m "Error: %s\n%!" (Error.error_msg e filename)) in
let () = Logs.err (fun m -> m "%s" (Error.error_msg e filename)) in
None
let pp_error er t =
......@@ -114,13 +114,13 @@ let pp_error er t =
let t_error = Utils.red (String.sub t s' (e'-s')) in
let end_start_index = (s' + (e'-s')) in
let t_end = String.sub t end_start_index ((String.length t) - end_start_index) in
let () = Printf.fprintf stderr "%s%s%s\n" t_init t_error t_end in
Logs.err (fun m -> m "Error: %s\n%!" (Error.error_msg er "stdin"))
let () = Logs.err (fun m -> m "%s%s%s" t_init t_error t_end) in
Logs.err (fun m -> m "%s" (Error.error_msg er "stdin"))
let parse_term ?(output=false) t sg =
let lexbuf = Lexing.from_string t in
try
let abs_term,abs_type = I.loop (supplier lexbuf) (Data_parser.Incremental.term_alone lexbuf.lex_curr_p) sg in
let abs_term,abs_type = I.loop_handle (fun x -> x) (fail lexbuf) (supplier lexbuf) (Data_parser.Incremental.term_alone lexbuf.lex_curr_p) sg in
let () =
match output with
| true ->
......@@ -155,7 +155,7 @@ let parse_heterogenous_term ?(output=false) t lex =
let abs,obj=Environment.Lexicon.get_sig lex in
try
let obj_term,abs_type =
I.loop (supplier lexbuf) (Data_parser.Incremental.heterogenous_term_and_type lexbuf.lex_curr_p) abs obj in
I.loop_handle (fun x -> x) (fail lexbuf) (supplier lexbuf) (Data_parser.Incremental.heterogenous_term_and_type lexbuf.lex_curr_p) abs obj in
let abs_type=Environment.Signature1.convert_type abs_type abs in
let obj_type=Environment.Lexicon.interpret_type abs_type lex in
let obj_term=Environment.Signature1.typecheck obj_term obj_type obj in
......
This diff is collapsed.
{
open Term_parser
open Term_parser_test
exception Error of string
let loc lexbuf = Lexing.lexeme_start_p lexbuf,Lexing.lexeme_end_p lexbuf
......
This diff is collapsed.
%{ open Term_sequence_parser
%}
%token <string*(Lexing.position*Lexing.position)> IDENT SYMBOL
%token LAMBDA DOT LPAREN RPAREN
%token EOI
%start <Term_sequence_parser.term> main
%%
main:
| t = term1 EOI
{ t }
term0:
| id = IDENT
{ Term (Var (fst id)) }
| id = SYMBOL
{ let name = fst id in
Op (name,Term_sequence_parser.(get_fixity name test_sig)) }
| LPAREN t = term1 RPAREN
{ Term t }
term1:
| terms = term0+
{ let token,stream=next terms in
parse_sequence [] token stream}
| LAMBDA vars = IDENT+ DOT t = term1
{ (List.fold_left
(fun acc (var,_) ->
(fun t -> acc (Abs (var,t))))
(fun x -> x)
vars)
t }
......@@ -19,7 +19,7 @@ let message =
(* A short name for the incremental parser API. *)
module I = Grammars.Term_parser.MenhirInterpreter
module I = Grammars.Term_parser_test.MenhirInterpreter
(* -------------------------------------------------------------------------- *)
......@@ -54,7 +54,7 @@ let loop lexbuf result =
let process (line : string) =
let lexbuf = from_string line in
try
loop lexbuf (Grammars.Term_parser.Incremental.main lexbuf.lex_curr_p)
loop lexbuf (Grammars.Term_parser_test.Incremental.main lexbuf.lex_curr_p)
with
| Grammars.Term_lexer.Error msg ->
Printf.fprintf stderr "%s%!" msg
......
......@@ -101,7 +101,8 @@ let print_welcome_message () =
let anon_fun s =
let () = print_welcome_message () in
let () = resize_terminal () in
env := snd (P.parse_file ?svg_output:!svg_output s !dirs !env)
let ctx = F.context ~wait:false ~echo:true ~svg:!svg_output ~dirs:!dirs ~parse_fun:P.parse_file in
env := snd (P.parse_file s ctx !env)
let invite () =
......@@ -120,7 +121,8 @@ let main first =
let () = resize_terminal () in
while !continue do
try
let () = env := snd (P.parse_entry ~resize:!pp_output ?svg_output:!svg_output stdin_tmp_in_ch !dirs !env) in
let ctx = F.context ~wait:false ~echo:true ~svg:!svg_output ~dirs:!dirs ~parse_fun:P.parse_file in
let () = env := snd (P.parse_entry ~resize:!pp_output stdin_tmp_in_ch ctx !env) in
Format.print_flush ()
with
| End_of_file ->
......
This diff is collapsed.
This diff is collapsed.
......@@ -27,7 +27,7 @@ sig
type env
type context
exception Not_yet_implemented of string
(* exception Not_yet_implemented of string*)
exception Stop
exception Quit
......@@ -58,13 +58,13 @@ sig
type file_type =
| Data
| Object
| Script of (string -> string list -> context * env -> context * env)
| Script of (string -> context * env -> context * env)
val color_output : bool -> unit
val set_config : string -> string list -> unit
val load : file_type -> string -> string list -> context * env -> context * env
val load : file_type -> string -> context * env -> context * env
val list : env -> unit
......@@ -72,8 +72,8 @@ sig
val unselect : env -> env
val trace : unit -> unit
val dont_trace : unit -> unit
val trace : (Lexing.position * Lexing.position) -> unit
val dont_trace : (Lexing.position * Lexing.position) -> unit
val print : ?name:string -> env -> (Lexing.position * Lexing.position) -> unit
......@@ -98,7 +98,7 @@ sig
string * (Lexing.position * Lexing.position) ->
string * (Lexing.position * Lexing.position) -> env -> env
val context : wait:bool -> echo:bool -> svg:string option -> dirs:string list -> parse_fun:(?verbose:bool -> ?svg_output:string -> string -> string list -> Environment.t -> context * Environment.t) -> context
val context : wait:bool -> echo:bool -> svg:string option -> dirs:string list -> parse_fun:(string -> context -> Environment.t -> context * Environment.t) -> context
val wait : context -> context
......@@ -112,7 +112,7 @@ sig
val dirs : context -> string list
val parse_script : context -> (?verbose:bool -> ?svg_output:string -> string -> string list -> Environment.t -> context * Environment.t)
val parse_script : context -> (string -> context -> Environment.t -> context * Environment.t)
val help : action -> unit
......
......@@ -10,11 +10,11 @@ module Error = AcgData.Error
replaced with the following code, which exploits the functions
[lexer_lexbuf_to_supplier] and [loop_handle] offered by Menhir. *)
let succeed (data : (Functions.context -> Environment.t -> Functions.context * Environment.t)) =
let succeed (data : (Functions.context * Environment.t -> Functions.context * Environment.t)) =
(* The parser has succeeded and produced a semantic value. *)
data
let fail lexbuf (c : (Functions.context -> Environment.t -> Functions.context * Environment.t) I.checkpoint) =
let fail lexbuf (c : (Functions.context * Environment.t -> Functions.context * Environment.t) I.checkpoint) =
(* The parser has suspended itself because of a syntax error. Stop. *)
match c with
| I.HandlingError env ->
......@@ -29,45 +29,40 @@ let fail lexbuf (c : (Functions.context -> Environment.t -> Functions.context *
let core_supplier lexbuf = I.lexer_lexbuf_to_supplier Script_lexer.lexer lexbuf
(*
let supplier lexbuf =
let sup () =
let (tok,_,_) as res = core_supplier lexbuf () in
let () = Printf.printf "Token: \"%s\"\n%!" (tok_to_string tok) in
res in
sup
*)
let supplier = core_supplier
let rec parse_file ?(verbose=true) ?svg_output filename includes env =
let ctx = Functions.context ~wait:false ~echo:verbose ~dirs:includes ~svg:svg_output ~parse_fun:parse_file in
let rec parse_file filename ctx env =
try
let in_ch =
let fullname = UtilsLib.Utils.find_file filename includes in
let fullname = UtilsLib.Utils.find_file filename (Functions.dirs ctx) in
open_in fullname in
let lexbuf = Lexing.from_channel in_ch in
let () = Printf.printf "Parsing script file \"%s\"...\n%!" filename in
let ctx',new_env= (I.loop_handle succeed (fail lexbuf) (supplier lexbuf) (Command_parser.Incremental.commands lexbuf.lex_curr_p)) ctx env in
let () = Printf.printf "Done.\n%!" in
let () = Logs.app (fun m -> m "Parsing script file \"%s\"..." filename) in
let ctx',new_env= (I.loop_handle
succeed
(fail lexbuf)
(supplier lexbuf)
(Command_parser.Incremental.commands lexbuf.lex_curr_p))
(ctx,env) in
let () = Logs.app (fun m -> m "Done.") in
ctx',new_env
with
| UtilsLib.Utils.No_file(f,msg) ->
let e = AcgData.Error.System_error (Printf.sprintf "No such file \"%s\" in %s" f msg) in
let () = Printf.fprintf stderr "Error: %s\n%!" (AcgData.Error.error_msg e filename) in
let () = Logs.err (fun m -> m "%s" (AcgData.Error.error_msg e filename)) in
let _ = Script_lexer.reset_echo () in
ctx,env
| Sys_error s ->
let e = AcgData.Error.System_error s in
let () = Printf.fprintf stderr "Error: %s\n%!" (AcgData.Error.error_msg e filename) in
let () = Logs.err (fun m -> m "%s" (AcgData.Error.error_msg e filename)) in
let _ = Script_lexer.reset_echo () in
ctx,env
| AcgData.Error.Error e ->
let () = Printf.fprintf stderr "Error: %s\n%!" (AcgData.Error.error_msg e filename) in
let () = Logs.err (fun m -> m "%s" (AcgData.Error.error_msg e filename)) in
let _ = Script_lexer.reset_echo () in
ctx,env
| Scripting_errors.Error (e,p) ->
let () = Printf.fprintf stderr "Error: %s\n%!" (Scripting_errors.error_msg e p) in
let () = Logs.err (fun m -> m "%s" (Scripting_errors.error_msg e p)) in
let _ = Script_lexer.reset_echo () in
ctx,env
......@@ -103,7 +98,7 @@ let bufferize in_ch =
Buffer.contents buf
let parse_entry ~resize ?svg_output ?(verbose=true) in_ch includes env =
let parse_entry ~resize in_ch ctx env =
let in_str = bufferize in_ch in
let lexbuf = Lexing.from_string in_str in
let () =
......@@ -112,18 +107,22 @@ let parse_entry ~resize ?svg_output ?(verbose=true) in_ch includes env =
UtilsLib.Utils.term_set_size ()
else
() in
let ctx = Functions.context ~wait:false ~echo:verbose ~svg:svg_output ~dirs:includes ~parse_fun:parse_file in
try
(I.loop_handle succeed (fail lexbuf) (supplier lexbuf) (Command_parser.Incremental.commands lexbuf.lex_curr_p)) ctx env
(I.loop_handle
succeed
(fail lexbuf)
(supplier lexbuf)
(Command_parser.Incremental.commands lexbuf.lex_curr_p))
(ctx,env)
with
| Functions.Stop -> ctx,env
| Failure f when f="lexing: empty token" -> ctx,env
| AcgData.Error.Error e ->
let () = Printf.fprintf stderr "Error: %s\n%!" (AcgData.Error.error_msg e "stdin") in
let () = Logs.err (fun m -> m "%s" (AcgData.Error.error_msg e "stdin")) in
let _ = Script_lexer.reset_echo () in
ctx,env
| Scripting_errors.Error (e,p) ->
let () = Printf.fprintf stderr "Error: %s\n%!" (Scripting_errors.error_msg e p) in
let () = Logs.err (fun m -> m "%s" (Scripting_errors.error_msg e p)) in
let _ = Script_lexer.reset_echo () in
ctx,env