Commit 50c30d25 authored by POTTIER Francois's avatar POTTIER Francois

Added demos/calc-incremental, which demonstrates the table back-end

and the incremental API.
parent 44501292
* Place INCREMENTAL_ENGINE in a separate file, so as to reduce confusion
and allow including this file in the documentation
* Add an incremental version of the calc demo
* Test incremental interface in conjunction with %parameter
* Include comments in the generated .mli file (Interface)?
......@@ -17,8 +15,9 @@
change String to Bytes where appropriate
replace let/unless with match/exception in a few places
* Update the demos to use ocamlbuild; remove Makefile.shared;
remove ocamldep.wrapper? remove OMakefiles
* Is it possible to update calc-two and calc-param to use ocamlbuild?
(they need --external-tokens and --only-tokens)
Then, one could remove Makefile.shared and ocamldep.wrapper.
* engine.ml: initial call to [run env true] should be [run env false]
if the initial state has a default reduction on #
......
DEMOS := calc calc-two calc-param
DEMOS := calc calc-two calc-param calc-incremental
.PHONY: all clean
......
.PHONY: all clean test
# Find Menhir.
ifndef MENHIR
MENHIR := $(shell ../find-menhir.sh)
endif
# We use the table back-end, and link against menhirLib.
# We assume that menhirLib has been installed in such a
# way that ocamlfind knows about it.
MENHIRFLAGS := --infer --table
OCAMLBUILD := ocamlbuild -use-ocamlfind -use-menhir -menhir "$(MENHIR) $(MENHIRFLAGS)" -package menhirLib
MAIN := calc
all:
$(OCAMLBUILD) $(MAIN).native
clean:
rm -f *~
$(OCAMLBUILD) -clean
test: all
@echo "The following command should print 42:"
echo "(1 + 2 * 10) * 2" | ./$(MAIN).native
This variant of the calc demo uses Menhir with the --table option.
It also demonstrates how to use the incremental parser interface.
(* Introduce a short name for the incremental parser API. *)
module I =
Parser.MenhirInterpreter
(* Define the loop which drives the parser. At each iteration,
we analyze a result produced by the parser, and act in an
appropriate manner. *)
let rec loop linebuf (result : int I.result) =
match result with
| I.InputNeeded env ->
(* The parser needs a token. Request one from the lexer,
and offer it to the parser, which will produce a new
result. Then, repeat. *)
let token = Lexer.token linebuf in
let startp = linebuf.Lexing.lex_start_p
and endp = linebuf.Lexing.lex_curr_p in
let result = I.offer env (token, startp, endp) in
loop linebuf result
| I.HandlingError env ->
(* The parser has suspended itself because of a syntax error. Stop. *)
Printf.fprintf stderr
"At offset %d: syntax error.\n%!"
(Lexing.lexeme_start linebuf)
| I.Accepted v ->
(* The parser has succeeded and produced a semantic value. Print it. *)
Printf.printf "%d\n%!" v
| I.Rejected ->
(* The parser rejects this input. This cannot happen, here, because
we stop as soon as the parser reports [HandlingError]. *)
assert false
(* Initialize the lexer, and catch any exception raised by the lexer. *)
let process (line : string) =
let linebuf = Lexing.from_string line in
try
loop linebuf (Parser.main_incremental())
with
| Lexer.Error msg ->
Printf.fprintf stderr "%s%!" msg
(* The rest of the code is as in the [calc] demo. *)
let process (optional_line : string option) =
match optional_line with
| None ->
()
| Some line ->
process line
let rec repeat channel =
(* Attempt to read one line. *)
let optional_line, continue = Lexer.line channel in
process optional_line;
if continue then
repeat channel
let () =
repeat (Lexing.from_channel stdin)
{
open Parser
exception Error of string
}
(* This rule looks for a single line, terminated with '\n' or eof.
It returns a pair of an optional string (the line that was found)
and a Boolean flag (false if eof was reached). *)
rule line = parse
| ([^'\n']* '\n') as line
(* Normal case: one line, no eof. *)
{ Some line, true }
| eof
(* Normal case: no data, eof. *)
{ None, false }
| ([^'\n']+ as line) eof
(* Special case: some data but missing '\n', then eof.
Consider this as the last line, and add the missing '\n'. *)
{ Some (line ^ "\n"), false }
(* This rule analyzes a single line and turns it into a stream of
tokens. *)
and token = parse
| [' ' '\t']
{ token lexbuf }
| '\n'
{ EOL }
| ['0'-'9']+ as i
{ INT (int_of_string i) }
| '+'
{ PLUS }
| '-'
{ MINUS }
| '*'
{ TIMES }
| '/'
{ DIV }
| '('
{ LPAREN }
| ')'
{ RPAREN }
| _
{ raise (Error (Printf.sprintf "At offset %d: unexpected character.\n" (Lexing.lexeme_start lexbuf))) }
%token <int> INT
%token PLUS MINUS TIMES DIV
%token LPAREN RPAREN
%token EOL
%left PLUS MINUS /* lowest precedence */
%left TIMES DIV /* medium precedence */
%nonassoc UMINUS /* highest precedence */
%start <int> main
%%
main:
| e = expr EOL
{ e }
expr:
| i = INT
{ i }
| LPAREN e = expr RPAREN
{ e }
| e1 = expr PLUS e2 = expr
{ e1 + e2 }
| e1 = expr MINUS e2 = expr
{ e1 - e2 }
| e1 = expr TIMES e2 = expr
{ e1 * e2 }
| e1 = expr DIV e2 = expr
{ e1 / e2 }
| MINUS e = expr %prec UMINUS
{ - e }
......@@ -15,6 +15,7 @@ all:
$(OCAMLBUILD) $(MAIN).native
clean:
rm -f *~
$(OCAMLBUILD) -clean
test: all
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment