Commit 70328180 authored by POTTIER Francois's avatar POTTIER Francois

New demo [calc-inspection-dune].

parent 2faca505
......@@ -17,6 +17,7 @@ DEMOS := \
generate-printers \
calc-dune \
calc-incremental-dune \
calc-inspection-dune \
.PHONY: all clean
......
open Parser
open Parser.MenhirInterpreter
(* In order to submit artificial tokens to the parser, we need a function that
converts a terminal symbol to a (dummy) token. Unfortunately, we cannot (in
general) auto-generate this code, because it requires making up semantic
values of arbitrary OCaml types. *)
let terminal2token (type a) (symbol : a terminal) : token =
match symbol with
| T_TIMES ->
TIMES
| T_RPAREN ->
RPAREN
| T_PLUS ->
PLUS
| T_MINUS ->
MINUS
| T_LPAREN ->
LPAREN
| T_INT ->
INT 0
| T_EOL ->
EOL
| T_DIV ->
DIV
| T_error ->
assert false
open Parser.MenhirInterpreter
(* This module offers the functionality required by the functor
[ErrorReporting.Printers.Make]. *)
val terminal2token: _ terminal -> token
open Parser.MenhirInterpreter
(* In order to print syntax error messages and/or debugging information, we
need a symbol printer. *)
let print_symbol symbol : string =
match symbol with
| X (T T_TIMES) ->
"*"
| X (T T_RPAREN) ->
")"
| X (T T_PLUS) ->
"+"
| X (T T_MINUS) ->
"-"
| X (T T_LPAREN) ->
"("
| X (T T_INT) ->
"INT"
| X (N N_expr) ->
"expr"
| X (N N_main) ->
"main"
| X (T T_EOL) ->
"EOL"
| X (T T_DIV) ->
"/"
| X (T T_error) ->
"error"
(* In order to print a view of the stack that includes semantic values,
we need an element printer. (If we don't need this feature, then
[print_symbol] above suffices.) *)
let print_element e : string =
match e with
| Element (s, v, _, _) ->
match incoming_symbol s with
| T T_TIMES ->
"*"
| T T_RPAREN ->
")"
| T T_PLUS ->
"+"
| T T_MINUS ->
"-"
| T T_LPAREN ->
"("
| T T_INT ->
string_of_int v
| N N_expr ->
string_of_int v
| N N_main ->
string_of_int v
| T T_EOL ->
""
| T T_DIV ->
"/"
| T T_error ->
"error"
(* The public functions. *)
let print = prerr_string
let print_symbol s = print (print_symbol s)
let print_element = Some (fun s -> print (print_element s))
open Parser.MenhirInterpreter
(* This module offers the functionality required by the functor
[MenhirLib.Printers.Make]. *)
val print: string -> unit
val print_symbol: xsymbol -> unit
val print_element: (element -> unit) option
module Make
(I : MenhirLib.IncrementalEngine.EVERYTHING)
(User : sig
(* In order to submit artificial tokens to the parser, we need a function
that converts a terminal symbol to a token. Unfortunately, we cannot
(in general) auto-generate this code, because it requires making up
semantic values of arbitrary OCaml types. *)
val terminal2token: _ I.terminal -> I.token
end)
= struct
open MenhirLib.General
open I
open User
(* ------------------------------------------------------------------------ *)
(* Explanations. *)
type explanation = {
item: item;
past: (xsymbol * Lexing.position * Lexing.position) list
}
let item explanation =
explanation.item
let past explanation =
explanation.past
let future explanation =
let prod, index = explanation.item in
let rhs = rhs prod in
drop index rhs
let goal explanation =
let prod, _ = explanation.item in
lhs prod
(* ------------------------------------------------------------------------ *)
(* [items_current env] assumes that [env] is not an initial state (which
implies that the stack is non-empty). Under this assumption, it extracts
the automaton's current state, i.e., the LR(1) state found in the top
stack cell. It then goes through [items] so as to obtain the LR(0) items
associated with this state. *)
let items_current env : item list =
(* Get the current state. *)
match Lazy.force (stack env) with
| Nil ->
(* If we get here, then the stack is empty, which means the parser
is in an initial state. This should not happen. *)
invalid_arg "items_current" (* TEMPORARY it DOES happen! *)
| Cons (Element (current, _, _, _), _) ->
(* Extract the current state out of the top stack element, and
convert it to a set of LR(0) items. Returning a set of items
instead of an ['a lr1state] is convenient; returning [current]
would require wrapping it in an existential type. *)
items current
(* [is_shift_item t item] determines whether [item] justifies a shift
transition along the terminal symbol [t]. *)
let is_shift_item (t : _ terminal) (prod, index) : bool =
let rhs = rhs prod in
let length = List.length rhs in
assert (0 < index && index <= length);
(* We test that there is one symbol after the bullet and this symbol
is [t] or can generate a word that begins with [t]. (Note that we
don't need to worry about the case where this symbol is nullable
and [t] is generated by the following symbol. In that situation,
we would have to reduce before we can shift [t].) *)
index < length && xfirst (List.nth rhs index) t
let compare_explanations x1 x2 =
let c = compare_items x1.item x2.item in
(* TEMPORARY checking that if [c] is 0 then the positions are the same *)
assert (
c <> 0 || List.for_all2 (fun (_, start1, end1) (_, start2, end2) ->
start1.Lexing.pos_cnum = start2.Lexing.pos_cnum &&
end1.Lexing.pos_cnum = end2.Lexing.pos_cnum
) x1.past x2.past
);
c
(* [marry past stack] TEMPORARY comment *)
let rec marry past stack =
match past, stack with
| [], _ ->
[]
| symbol :: past, lazy (Cons (Element (s, _, startp, endp), stack)) ->
assert (compare_symbols symbol (X (incoming_symbol s)) = 0);
(symbol, startp, endp) :: marry past stack
| _ :: _, lazy Nil ->
assert false
(* [accumulate t env explanations] is called if the parser decides to shift
the test token [t]. The parameter [env] describes the parser configuration
before it shifts this token. (Some reductions have taken place.) We use the
shift items found in [env] to produce new explanations. *)
let accumulate (t : _ terminal) env explanations =
(* The parser is about to shift, which means it is willing to
consume the terminal symbol [t]. In the state before the
transition, look at the items that justify shifting [t].
We view these items as explanations: they explain what
we have read and what we expect to read. *)
let stack = stack env in
List.fold_left (fun explanations item ->
if is_shift_item t item then
let prod, index = item in
let rhs = rhs prod in
{
item = item;
past = List.rev (marry (List.rev (take index rhs)) stack)
} :: explanations
else
explanations
) explanations (items_current env)
(* TEMPORARY [env] may be an initial state!
violating [item_current]'s precondition *)
(* [investigate pos checkpoint] assumes that [checkpoint] is of the form
[InputNeeded _]. For every terminal symbol [t], it investigates
how the parser reacts when fed the symbol [t], and returns a list
of explanations. The position [pos] is where a syntax error was
detected; it is used when manufacturing dummy tokens. This is
important because the position of the dummy token may end up in
the explanations that we produce. *)
let investigate pos (checkpoint : _ checkpoint) : explanation list =
weed compare_explanations (
foreach_terminal_but_error (fun symbol explanations ->
match symbol with
| X (N _) -> assert false
| X (T t) ->
(* Build a dummy token for the terminal symbol [t]. *)
let token = (terminal2token t, pos, pos) in
(* Submit it to the parser. Accumulate explanations. *)
match shifts (offer checkpoint token) with
| None ->
explanations
| Some env ->
accumulate t env explanations
) []
)
(* We drive the parser in the usual way, but records the last [InputNeeded]
checkpoint. If a syntax error is detected, we go back to this checkpoint
and analyze it in order to produce a meaningful diagnostic. *)
exception Error of (Lexing.position * Lexing.position) * explanation list
let entry (start : 'a I.checkpoint) lexer lexbuf =
let fail (inputneeded : 'a I.checkpoint) (checkpoint : 'a I.checkpoint) =
(* The parser signals a syntax error. Note the position of the
problematic token, which is useful. Then, go back to the
last [InputNeeded] checkpoint and investigate. *)
match checkpoint with
| HandlingError env ->
let (startp, _) as positions = positions env in
raise (Error (positions, investigate startp inputneeded))
| _ ->
assert false
in
I.loop_handle_undo
(fun v -> v)
fail
(lexer_lexbuf_to_supplier lexer lexbuf)
start
(* TEMPORARY could also publish a list of the terminal symbols that
do not cause an error *)
end
(* This module needs cleaning up. It is supposed to automatically
produce a syntax error message, based on the current state and
stack. *)
module Make
(I : MenhirLib.IncrementalEngine.EVERYTHING)
(User : sig
(* In order to submit artificial tokens to the parser, we need a function
that converts a terminal symbol to a token. Unfortunately, we cannot
(in general) auto-generate this code, because it requires making up
semantic values of arbitrary OCaml types. *)
val terminal2token: _ I.terminal -> I.token
end)
: sig
open I
(* An explanation is a description of what the parser has recognized in the
recent past and what it expects next. More precisely, an explanation is
an LR(0) item, enriched with positions. Indeed, the past (the first half
of the item's right-hand side, up to the bullet) corresponds to a part of
the input that has been read already, so it can be annotated with
positions. *)
type explanation
(* The LR(0) item. *)
val item: explanation -> item
(* The past. This is a non-empty sequence of (terminal and non-terminal)
symbols, each of which corresponds to a range of the input file. These
symbols correspond to the first half (up to the bullet) of the item's
right-hand side. In short, they represent what (we think) we have
recognized in the recent past. *)
(* It is worth noting that, when an error occurs, we produce multiple
explanations, which may have different pasts. Indeed, not only may
these pasts have different lengths (one may be a suffix of another),
but two pasts can in fact be incomparable. Indeed, different choices
of the lookahead token may cause different reductions, hence different
interpretations of what has been read in the past. *)
val past: explanation -> (xsymbol * Lexing.position * Lexing.position) list
(* The future. This is a non-empty sequence of (terminal and non-terminal)
symbols. These symbols correspond to the second half (after the bullet)
of the item's right-hand side. In short, they represent what we expect
to recognize in the future, if this item is a good prediction. *)
(* This information can be computed from [item]. This function is provided
only for convenience. *)
val future: explanation -> xsymbol list
(* A goal. This is a non-terminal symbol. It is the item's left-hand side.
In short, it represents the reduction that we will be able to perform if
we successfully recognize this future. *)
(* This information can be computed from [item]. This function is provided
only for convenience. *)
val goal: explanation -> xsymbol
(* TEMPORARY *)
(* We build lists of explanations. These explanations may originate in
distinct LR(1) states. They may have different pasts, because *)
exception Error of (Lexing.position * Lexing.position) * explanation list
(* TEMPORARY *)
val entry: 'a I.checkpoint -> (Lexing.lexbuf -> token) -> Lexing.lexbuf -> 'a
end
.PHONY: all clean test
DUNE := jbuilder
EXECUTABLE := calc.exe
all:
@ if command -v $(DUNE) > /dev/null ; then \
$(DUNE) build $(EXECUTABLE) ; \
else \
echo "Error: $(DUNE) is required." ; \
fi
clean:
rm -rf `cat .gitignore`
rm -f *~
test: all
@echo "The following command should print 42:"
echo "(1 + 2 * 10) * 2" | ./_build/default/$(EXECUTABLE)
@echo "The following command should print an error message:"
echo "(1 + 2 * 10) * )" | ./_build/default/$(EXECUTABLE)
@echo "The following command should print an error message:"
echo "(1 + 2 * 10))" | ./_build/default/$(EXECUTABLE)
This demo is identical to the "calc-inspection" demo,
but uses dune (a.k.a. jbuilder)
instead of ocamlbuild.
open Lexing
open MenhirLib.General
open Parser.MenhirInterpreter
(* Instantiate [MenhirLib.Printers] for our parser. This requires providing a
few printing functions -- see [CalcPrinters]. *)
module P =
MenhirLib.Printers.Make
(Parser.MenhirInterpreter)
(CalcPrinters)
(* Instantiate [ErrorReporting] for our parser. This requires
providing a few functions -- see [CalcErrorReporting]. *)
module E =
ErrorReporting.Make
(Parser.MenhirInterpreter)
(CalcErrorReporting)
(* Define a printer for explanations. We treat an explanation as if it
were just an item: that is, we ignore the position information that
is provided in the explanation. Indeed, this information is hard to
show in text mode. *)
let print_explanation explanation =
P.print_item (E.item explanation)
let print_explanations startp explanations =
Printf.fprintf stderr
"At line %d, column %d: syntax error.\n"
startp.pos_lnum
startp.pos_cnum;
List.iter print_explanation explanations;
flush stderr
(* The rest of the code is as in the [calc] demo. *)
let process (line : string) =
let lexbuf = from_string line in
try
let v = E.entry (Parser.Incremental.main lexbuf.lex_curr_p) Lexer.token lexbuf in
Printf.printf "%d\n%!" v
with
| Lexer.Error msg ->
Printf.fprintf stderr "%s%!" msg
| E.Error ((startp, _), explanations) ->
print_explanations startp explanations
let process (optional_line : string option) =
match optional_line with
| None ->
()
| Some line ->
process line
let rec repeat channel =
(* Attempt to read one line. *)
let optional_line, continue = Lexer.line channel in
process optional_line;
if continue then
repeat channel
let () =
repeat (from_channel stdin)
(jbuild_version 1)
(ocamllex (lexer))
(menhir (
(modules (parser))
(flags ("--table" "--inspection" "-v" "-la" "2"))
))
(executable (
(name calc)
(libraries (menhirLib))
))
{
open Parser
exception Error of string
}
(* This rule looks for a single line, terminated with '\n' or eof.
It returns a pair of an optional string (the line that was found)
and a Boolean flag (false if eof was reached). *)
rule line = parse
| ([^'\n']* '\n') as line
(* Normal case: one line, no eof. *)
{ Some line, true }
| eof
(* Normal case: no data, eof. *)
{ None, false }
| ([^'\n']+ as line) eof
(* Special case: some data but missing '\n', then eof.
Consider this as the last line, and add the missing '\n'. *)
{ Some (line ^ "\n"), false }
(* This rule analyzes a single line and turns it into a stream of
tokens. *)
and token = parse
| [' ' '\t']
{ token lexbuf }
| '\n'
{ EOL }
| ['0'-'9']+ as i
{ INT (int_of_string i) }
| '+'
{ PLUS }
| '-'
{ MINUS }
| '*'
{ TIMES }
| '/'
{ DIV }
| '('
{ LPAREN }
| ')'
{ RPAREN }
| _
{ raise (Error (Printf.sprintf "At offset %d: unexpected character.\n" (Lexing.lexeme_start lexbuf))) }
%token <int> INT
%token PLUS MINUS TIMES DIV
%token LPAREN RPAREN
%token EOL
%left PLUS MINUS /* lowest precedence */
%left TIMES DIV /* medium precedence */
%nonassoc UMINUS /* highest precedence */
%start <int> main
%%
main:
| e = expr EOL
{ e }
expr:
| i = INT
{ i }
| LPAREN e = expr RPAREN
{ e }
| e1 = expr PLUS e2 = expr
{ e1 + e2 }
| e1 = expr MINUS e2 = expr
{ e1 - e2 }
| e1 = expr TIMES e2 = expr
{ e1 * e2 }
| e1 = expr DIV e2 = expr
{ e1 / e2 }
| MINUS e = expr %prec UMINUS
{ - e }
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment