Commit ed420aef authored by POGODALLA Sylvain's avatar POGODALLA Sylvain

Scripting language added. See the src/scripting directory and the data/script...

Scripting language added. See the src/scripting directory and the data/script example file. Usage: in src/scripting, run ./test < ../data/script or use ./test with the interaciton loop
parent e5e70e22
......@@ -301,7 +301,7 @@ on `(hostname || uname -n) 2>/dev/null | sed 1q`
"
# Files that config.status was made for.
config_files=" ./Makefile config/Makefile src/Makefile.master src/Makefile.common src/Makefile src/utils/Makefile src/logic/Makefile src/grammars/Makefile src/lambda/Makefile"
config_files=" ./Makefile config/Makefile src/Makefile.master src/Makefile.common src/Makefile src/utils/Makefile src/logic/Makefile src/grammars/Makefile src/lambda/Makefile src/scripting/Makefile"
ac_cs_usage="\
\`$as_me' instantiates files from templates according to the
......@@ -418,6 +418,7 @@ do
"src/logic/Makefile") CONFIG_FILES="$CONFIG_FILES src/logic/Makefile" ;;
"src/grammars/Makefile") CONFIG_FILES="$CONFIG_FILES src/grammars/Makefile" ;;
"src/lambda/Makefile") CONFIG_FILES="$CONFIG_FILES src/lambda/Makefile" ;;
"src/scripting/Makefile") CONFIG_FILES="$CONFIG_FILES src/scripting/Makefile" ;;
*) { { echo "$as_me:$LINENO: error: invalid argument: $ac_config_target" >&5
echo "$as_me: error: invalid argument: $ac_config_target" >&2;}
......
......@@ -124,7 +124,7 @@ AC_SUBST(OCAMLP4_LOC)
AC_SUBST(SET_MAKE)
AC_CONFIG_FILES([./Makefile config/Makefile src/Makefile.master src/Makefile.common src/Makefile src/utils/Makefile src/logic/Makefile src/grammars/Makefile src/lambda/Makefile])
AC_CONFIG_FILES([./Makefile config/Makefile src/Makefile.master src/Makefile.common src/Makefile src/utils/Makefile src/logic/Makefile src/grammars/Makefile src/lambda/Makefile src/scripting/Makefile])
AC_PROG_MAKE_SET
......
......@@ -2134,7 +2134,7 @@ echo "${ECHO_T}Compilation will be done with the $OCAML09WARNINGS option" >&6; }
ac_config_files="$ac_config_files ./Makefile config/Makefile src/Makefile.master src/Makefile.common src/Makefile src/utils/Makefile src/logic/Makefile src/grammars/Makefile src/lambda/Makefile"
ac_config_files="$ac_config_files ./Makefile config/Makefile src/Makefile.master src/Makefile.common src/Makefile src/utils/Makefile src/logic/Makefile src/grammars/Makefile src/lambda/Makefile src/scripting/Makefile"
{ echo "$as_me:$LINENO: checking whether ${MAKE-make} sets \$(MAKE)" >&5
......@@ -2744,6 +2744,7 @@ do
"src/logic/Makefile") CONFIG_FILES="$CONFIG_FILES src/logic/Makefile" ;;
"src/grammars/Makefile") CONFIG_FILES="$CONFIG_FILES src/grammars/Makefile" ;;
"src/lambda/Makefile") CONFIG_FILES="$CONFIG_FILES src/lambda/Makefile" ;;
"src/scripting/Makefile") CONFIG_FILES="$CONFIG_FILES src/scripting/Makefile" ;;
*) { { echo "$as_me:$LINENO: error: invalid argument: $ac_config_target" >&5
echo "$as_me: error: invalid argument: $ac_config_target" >&2;}
......
load d ../data/lex1.dat;
list;
abs print;
obj print;
absobj print;
select absobj;
analyse lambda x.t (u (v x)):a ->a;
(*open Interface*)
open Interface
open Abstract_syntax
module type Signature_sig =
(*module type Signature_sig =
sig
(** Exceptions raised when definitions of types or constants are
duplicated *)
......@@ -68,7 +68,7 @@ sig
end
*)
module type Environment_sig =
......
(*open Interface*)
open Interface
open Abstract_syntax
(* A simple interface for environmnent *)
module type Signature_sig =
(*module type Signature_sig =
sig
(** Exceptions raised when definitions of types or constants are
duplicated *)
......@@ -70,7 +70,7 @@ sig
end
*)
......
......@@ -60,11 +60,19 @@ type env_error =
| Duplicated_signature of string
| Duplicated_lexicon of string
type lexicon_error =
| Missing_interpretations of (string * string * (string list))
type error =
| Parse_error of parse_error * (Lexing.position * Lexing.position)
| Lexer_error of lex_error * (Lexing.position * Lexing.position)
| Type_error of type_error * (Lexing.position * Lexing.position)
| Env_error of env_error * (Lexing.position * Lexing.position)
| Lexicon_error of lexicon_error * (Lexing.position * Lexing.position)
type warning =
| Variable_or_constant of (string * Lexing.position * Lexing.position)
......@@ -125,7 +133,11 @@ let type_error_to_string = function
let env_error_to_string = function
| Duplicated_signature s -> Printf.sprintf "Syntax error: Signature id \"%s\" is used twice in this environment" s
| Duplicated_lexicon s -> Printf.sprintf "Syntax error: Lexion id \"%s\" is used twice in this environment" s
| Duplicated_lexicon s -> Printf.sprintf "Syntax error: Lexicon id \"%s\" is used twice in this environment" s
let lexicon_error_to_string = function
| Missing_interpretations (lex_name,abs_name,missing_inters) ->
Printf.sprintf "Lexicon definition error: Lexicon \"%s\" is missing the interpretations of the following terms of the abstract signature \"%s\":\n%s" lex_name abs_name (Utils.string_of_list "\n" (fun x -> Printf.sprintf"\t%s" x) missing_inters)
let warning_to_string w =
match w with
......@@ -137,7 +149,8 @@ let error_msg e input_file =
| Parse_error (er,(s,e)) -> parse_error_to_string er,compute_comment_for_position s e
| Lexer_error (er,(s,e)) -> lex_error_to_string er,compute_comment_for_position s e
| Type_error (er,(s,e)) -> type_error_to_string er,compute_comment_for_position s e
| Env_error (er,(s,e)) -> env_error_to_string er,compute_comment_for_position s e in
| Env_error (er,(s,e)) -> env_error_to_string er,compute_comment_for_position s e
| Lexicon_error (er,(s,e)) -> lexicon_error_to_string er,compute_comment_for_position s e in
Printf.sprintf "File \"%s\", %s\n%s" input_file location_msg msg
let dyp_error lexbuf input_file =
......
......@@ -41,6 +41,10 @@ type type_error =
| Vacuous_abstraction of (string * (Lexing.position * Lexing.position))
(** The types for errors raised by lexicons *)
type lexicon_error =
| Missing_interpretations of (string * string * (string list))
(** The types for errors raised by the environment. Names should be
explicit *)
type env_error =
......@@ -54,6 +58,7 @@ type error =
| Lexer_error of lex_error * (Lexing.position * Lexing.position)
| Type_error of type_error * (Lexing.position * Lexing.position)
| Env_error of env_error * (Lexing.position * Lexing.position)
| Lexicon_error of lexicon_error * (Lexing.position * Lexing.position)
(** The type for warnings *)
type warning =
......
......@@ -29,6 +29,7 @@ sig
val typecheck : Abstract_syntax.term -> stype -> t -> term
val fold : (entry -> 'a -> 'a) -> 'a -> t -> 'a
val get_binder_argument_functional_type : string -> t -> Abstract_syntax.abstraction option
val is_declared : entry -> t -> string option
end
module type Lexicon_sig =
......@@ -45,4 +46,5 @@ sig
val to_string : t -> string
val interpret : Signature.term -> Signature.stype -> t -> (Signature.term*Signature.stype)
val get_sig : t -> (signature*signature)
val check : t -> unit
end
......@@ -108,6 +108,11 @@ sig
{!Abstract_syntax.Abstract_syntax.Non_linear} otherwise *)
val get_binder_argument_functional_type : string -> t -> Abstract_syntax.abstraction option
(** [is_declared e sg] returns [Some s] if the entry [e] is a
declaration of the string [s] (and not a definiton) in [sg] and
[None] otherwise *)
val is_declared : entry -> t -> string option
end
(** This module signature describes the interface for modules implementing lexicons *)
......@@ -127,4 +132,5 @@ sig
val to_string : t -> string
val interpret : Signature.term -> Signature.stype -> t -> (Signature.term*Signature.stype)
val get_sig : t -> (signature*signature)
val check : t -> unit
end
......@@ -138,7 +138,7 @@
data:
| EOI {get_env_value dyp.last_local_data}
| signature[s] ...@{s,[Local_data (Some (Env (E.insert (E.Signature s) (get_env_value dyp.last_local_data))))]} data[d] {d}
| lexicon[l] ...@{l,[Local_data (Some (Env (E.insert (E.Lexicon l) (get_env_value dyp.last_local_data))))]} data[d] {d}
| lexicon[l] ...@{let () = E.Lexicon.check l in l,[Local_data (Some (Env (E.insert (E.Lexicon l) (get_env_value dyp.last_local_data))))]} data[d] {d}
signature :
| SIG_OPEN sig_ident EQUAL sig_entries {$4}
......@@ -438,22 +438,22 @@ sig_entries :
let e =
try (fst (List.hd (data ~local_data:actual_env Lexer.lexer lexbuf))) with
| Dyp.Syntax_error -> raise (Error.dyp_error lexbuf filename) in
let () = Printf.printf "Done.\n" in
let () = Printf.printf "Done.\n%!" in
let () = match output with
| false -> ()
| true ->
E.iter
(function
| E.Signature sg ->
let () = Printf.printf "%s\n" (E.Signature1.to_string sg) in
Printf.printf "%s\n" (Error.warnings_to_string filename (E.Signature1.get_warnings sg))
let () = Printf.printf "%s\n%!" (E.Signature1.to_string sg) in
Printf.printf "%s\n%!" (Error.warnings_to_string filename (E.Signature1.get_warnings sg))
| E.Lexicon lex ->
Printf.printf "%s\n" (E.Lexicon.to_string lex))
Printf.printf "%s\n%!" (E.Lexicon.to_string lex))
e in
e
with
| Error.Error e ->
let () = Printf.fprintf stderr "Error: %s\n" (Error.error_msg e filename) in
let () = Printf.fprintf stderr "Error: %s\n%!" (Error.error_msg e filename) in
env
let parse_term ?(output=false) t sg =
......@@ -464,7 +464,7 @@ sig_entries :
try fst (List.hd(term_alone ~local_data:(Some (Signature sg)) Lexer.lexer lexbuf)) with
| Dyp.Syntax_error -> raise (Error.dyp_error lexbuf "stdin") in
let () = match output with
| true -> Printf.printf "I read: %s : %s \n" (E.Signature1.term_to_string abs_term sg) (E.Signature1.type_to_string abs_type sg)
| true -> Printf.printf "\t %s : %s \n%!" (E.Signature1.term_to_string abs_term sg) (E.Signature1.type_to_string abs_type sg)
| false -> () in
Some (abs_term,abs_type)
with
......
......@@ -274,6 +274,8 @@ struct
let typecheck t _ _ = t
let get_binder_argument_functional_type _ _ = Some Abstract_syntax.Linear
let is_declared _ _ = None
end
module Abstract_lex =
......@@ -327,6 +329,8 @@ struct
None with
| None -> ""
| Some s -> Printf.sprintf "%s\n" s)
let check _ = Printf.printf "No checking of interpretations\n%!"
end
......@@ -25,7 +25,7 @@ PREVIOUS_DIRS = ../utils ../logic ../grammars ../lambda.sylvain
# Source files in the right order of dependance
ML = functions.ml script_parser.ml
ML = functions.ml script_lexer.ml script_parser.ml
EXE_SOURCES = test.ml
......
......@@ -55,15 +55,15 @@ struct
| Some n -> E.get n e
in
match entry with
| E.Signature sg -> ignore (Term_parser.parse_term data sg)
| E.Signature sg -> ignore (Term_parser.parse_term ~output:true data sg)
| E.Lexicon lex ->
let abs,obj=E.Lexicon.get_sig lex in
match Term_parser.parse_term data abs with
match Term_parser.parse_term ~output:true data abs with
| None -> ()
| Some (t,ty) ->
let t',ty' = E.Lexicon.interpret t ty lex in
Printf.printf
"Interpreted as:\n%s : %s\n"
"Interpreted as:\n\t%s : %s\n%!"
(E.Signature1.term_to_string t' obj)
(E.Signature1.type_to_string ty' obj)
......
{
open Abstract_syntax
open Script_parser
(* open Error*)
(* open Script_parser*)
(* | STRING of (string*Abstract_syntax.location)*)
type token =
| EOII
| LOAD_DATA of (string*Abstract_syntax.location)
| LOAD_SCRIPT of (string*Abstract_syntax.location)
| LIST
| SELECT
| UNSELECT
| TRACE
| PRINT
| ANALYSE of (string*Abstract_syntax.location)
| COMPOSE
| SEMICOLONN
| AS
| IDENTT of (string*Abstract_syntax.location)
let loc lexbuf = Lexing.lexeme_start_p lexbuf,Lexing.lexeme_end_p lexbuf
}
let string_content = Buffer.create 16
}
let newline = ('\010' | '\013' | "\013\010")
let letter = ['a'-'z' 'A'-'Z']
......@@ -17,22 +38,45 @@ let string = (letter|digit|'_')*
parse
| [' ' '\t'] {lexer lexbuf}
| newline {let () = Error.update_loc lexbuf None in lexer lexbuf}
| eof {EOI}
| "#" {comment lexer}
| [';'] {let () = check_brackets () in
SEMICOLON(loc lexbuf)}
| "load" {}
| "list" {}
| "select" {}
| "unselect" {}
| "trace" {}
| "print" {}
| "analyse"
| letter string {IDENT (Lexing.lexeme lexbuf,loc lexbuf)}
and comment parser = parse
| newline {parser lexbuf}
| _ {comment lexbuf}
| eof {EOII}
| "#" {comment lexer lexbuf}
| [';'] {SEMICOLONN}
| "load" {(*let () = Printf.fprintf stderr "Entering load commad\n%!" in*) let t = load_options lexbuf in t}
| "list" {LIST}
| "select" {SELECT}
| "unselect" {UNSELECT}
| "trace" {TRACE}
| "print" {PRINT}
| "analyse" {let () = Buffer.reset string_content in
string (fun x l -> ANALYSE (x,l)) lexbuf}
| "compose" {COMPOSE}
| "as" {AS}
| letter string {IDENTT (Lexing.lexeme lexbuf,loc lexbuf)}
and comment f_parser = parse
| newline {f_parser lexbuf}
| _ {comment f_parser lexbuf}
and string f = parse
| ";" {f (loc lexbuf) (Buffer.contents string_content),}
| "#" {comment string}
| ";" {(*let () = Printf.fprintf stderr "Read the \"%s\" filename\n%!" (Buffer.contents string_content) in*) f (Buffer.contents string_content) (loc lexbuf)}
| "#" {comment (string f) lexbuf}
| _ as c {(*let () = Printf.fprintf stderr "Addind the \'%c\' char\n%!" c in *)
let () = Buffer.add_char string_content c in string f lexbuf}
and string_wo_space f = parse
| ";" {f (Buffer.contents string_content) (loc lexbuf)}
| "#" {comment (string_wo_space f) lexbuf}
| [' ' '\t'] {string_wo_space f lexbuf}
| _ as c {let () = Buffer.add_char string_content c in string f lexbuf}
and load_options = parse
| [' ' '\t'] {load_options lexbuf}
| newline {let () = Error.update_loc lexbuf None in load_options lexbuf}
| eof {EOII}
| "#" {comment load_options lexbuf}
| "data" {let () = Buffer.reset string_content in
(*let () = Printf.fprintf stderr "Read data option\n%!" in*)
string_wo_space (fun x l -> LOAD_DATA (x,l)) lexbuf}
| "d" {let () = Buffer.reset string_content in
string_wo_space (fun x l -> LOAD_DATA (x,l)) lexbuf}
| "script" {let () = Buffer.reset string_content in
string_wo_space (fun x l -> LOAD_SCRIPT (x,l)) lexbuf}
| "s" {let () = Buffer.reset string_content in
string_wo_space (fun x l -> LOAD_SCRIPT (x,l)) lexbuf}
%mltop{
open Abstract_syntax
type token =
| EOI
| LOAD_DATA of Abstract_syntax.location
| LOAD_SCRIPT of Abstract_syntax.location
| LIST of Abstract_syntax.location
| SELECT of Abstract_syntax.location
| UNSELECT of Abstract_syntax.location
| TRACE of Abstract_syntax.location
| PRINT of Abstract_syntax.location
| ANALYSE of Abstract_syntax.location
| COMPOSE of Abstract_syntax.location
| SEMICOLON of Abstract_syntax.location
| AS of Abstract_syntax.location
| IDENT of (string*Abstract_syntax.location)
| STRING of (string*Abstract_syntax.location)
open Script_lexer
(* type token = Script_lexer.token *)
let id = fun x -> x
let pr s = Printf.printf "%s\n%!" s
module Make (E:Environment.Environment_sig) =
struct
(* module Data_parser = Parser.Make(E)*)
module F = Functions.Make(E)
}
{
open Dyp
}
}
{
open Dyp
let local_data = E.empty
}
%token
EOI
<Abstract_syntax.location>LOAD_DATA
<Abstract_syntax.location>LOAD_SCRIPT
<Abstract_syntax.location>LIST
<Abstract_syntax.location>SELECT
<Abstract_syntax.location>UNSELECT
<Abstract_syntax.location>TRACE
<Abstract_syntax.location>PRINT
<Abstract_syntax.location>ANALYSE
<Abstract_syntax.location>COMPOSE
<Abstract_syntax.location>SEMICOLON
<Abstract_syntax.location>AS
<(string*Abstract_syntax.location)>IDENT
<(string*Abstract_syntax.location)>STRING
%start <(E.t -> E.t)> command
EOII
<(string*Abstract_syntax.location)>LOAD_DATA
<(string*Abstract_syntax.location)>LOAD_SCRIPT
LIST
SELECT
UNSELECT
TRACE
PRINT
<(string*Abstract_syntax.location)>ANALYSE
COMPOSE
SEMICOLONN
AS
<(string*Abstract_syntax.location)>IDENTT
/*<(string*Abstract_syntax.location)>STRING*/
%start <(E.t)> zzcommands
%local_data_type <(E.t)>
%%
command:
| LOAD_DATA STRING SEMICOLON {}
| LOAD_SCRIPT STRING SEMICOLON {}
| LIST SEMICOLON {}
| SELECT IDENT SEMICOLON {}
| UNSELECT SEMICOLON {}
| TRACE SEMICOLON {}
| optional_ident PRINT {}
| optional_ident ANALYSE STRING {}
| COMPOSE IDENT IDENT AS IDENT {}
zzcommands :
| EOII {dyp.last_local_data}
| command[c] ...@{let e' = c (dyp.last_local_data) in
e',[Local_data e']}
zzcommands {dyp.last_local_data}
command:
| LOAD_DATA[(s,loc)] {fun e -> F.load F.Data s e}
| LOAD_SCRIPT[(s,loc)] {fun e -> F.load F.Script s e}
| LIST SEMICOLONN {fun e -> let () = F.list e in e}
| SELECT IDENTT[(name,_)] SEMICOLONN {fun e -> F.select name e}
| UNSELECT SEMICOLONN {F.unselect}
| TRACE SEMICOLONN {fun e -> let () = F.trace () in e}
| optional_ident[name] PRINT SEMICOLONN {fun e -> match name with
| None -> let () = F.print e in e
| Some (n,_) -> let () = F.print ~name:n e in e}
| optional_ident[name] ANALYSE[(t,l)] {fun e -> match name with
| None -> let () = F.analyse e t in e
| Some (n,_) -> let () = F.analyse ~name:n e t in e}
| COMPOSE IDENTT[(n1,_)] IDENTT [(n2,_)] AS IDENTT[(n3,_)] SEMICOLONN {fun e -> F.compose n1 n2 n3 e}
optional_ident :
| {None}
| IDENTT[id] {Some id}
%%
{end}
{
let parse_file filename env =
let in_ch = open_in filename in
let lexbuf = Lexing.from_channel in_ch in
let () = Printf.printf "Parsing \"%s\"...\n%!" filename in
try
let new_env=
try (fst (List.hd (zzcommands ~local_data:env Script_lexer.lexer lexbuf))) with
| Dyp.Syntax_error -> raise (Error.dyp_error lexbuf filename) in
new_env
with
| Error.Error e ->
let () = Printf.fprintf stderr "Error: %s\n" (Error.error_msg e filename) in
env
let bufferize () =
let buf = Buffer.create 16 in
let no_semi_colon=ref true in
let () =
while !no_semi_colon do
let input = read_line () in
try
let semi_colon_index=String.index input ';' in
let () = Buffer.add_string buf (String.sub input 0 (semi_colon_index+1)) in
no_semi_colon:=false
with
| Not_found -> Buffer.add_string buf input ;Buffer.add_char buf '\n'
done in
Buffer.contents buf
let parse_entry env =
let in_str = bufferize () in
(* let () = Printf.printf "\texecute \"%s\"\n%!" in_str in*)
let lexbuf = Lexing.from_string in_str in
try
let new_env=
try (fst (List.hd (zzcommands ~local_data:env Script_lexer.lexer lexbuf))) with
| Dyp.Syntax_error -> raise (Error.dyp_error lexbuf "stdin") in
new_env
with
| Error.Error e ->
let () = Printf.fprintf stderr "Error: %s\n" (Error.error_msg e "stdin") in
env
end}
%mlitop{
open Abstract_syntax
type token =
| EOI
| LOAD_DATA of Abstract_syntax.location
| LOAD_SCRIPT of Abstract_syntax.location
| LIST of Abstract_syntax.location
| SELECT of Abstract_syntax.location
| UNSELECT of Abstract_syntax.location
| TRACE of Abstract_syntax.location
| PRINT of Abstract_syntax.location
| ANALYSE of Abstract_syntax.location
| COMPOSE of Abstract_syntax.location
| SEMICOLON of Abstract_syntax.location
| AS of Abstract_syntax.location
| IDENT of (string*Abstract_syntax.location)
| STRING of (string*Abstract_syntax.location)
open Script_lexer
module Make(E:Environment.Environment_sig) :
sig
(* type token = Script_lexer.token*)
val parse_file : string -> E.t -> E.t
val parse_entry : E.t -> E.t
}
%mli{end}
......@@ -5,22 +5,17 @@ module Lex = Lexicon.Sylvain_lexicon
module E = Environment.Make(Lex)
module F=Functions.Make(E)
open F
let e = ref E.empty
let () = e:= (load Data "../data/lex1.dat" !e)
let () = list !e
let () = e:= select "absobj" !e
let () = print !e
let () = print ~name:"abs" !e
let () = analyse !e "lambda x.t x:a->a"
module P=Script_parser.Make(E)
(*let _ = P.parse_file "../data/script" E.empty) *)
let _ =
let continue = ref true in
let env = ref E.empty in
while !continue do
try
env := P.parse_entry !env
with
| End_of_file -> continue := false
done
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment