Commit 07cb17e6 authored by POGODALLA Sylvain's avatar POGODALLA Sylvain

Slowly replacing all the functions triggering parsing of some data

parent fd4399db
......@@ -18,10 +18,9 @@
(**************************************************************************)
open UtilsLib
open AcgData
open Interface
open Logic
open Abstract_syntax
open Interface
......
......@@ -17,11 +17,10 @@
(* *)
(**************************************************************************)
open AcgData
open Interface
open Logic
open Abstract_syntax
open Interface
(** This modules implements a functor that build an environment
containing signatures and lexicons when provided with to actual
......@@ -57,7 +56,8 @@ sig
(** [insert c d e] adds the content [c] into the environment [e] and
returns the resulting environmnent. If [d] is set to true, then
[c] is dumped in [e] by the
{!val:Environment.Environment_sig.write} function. *)
{!val:Environment.Environment_sig.write} function. The default
value for the [override] optional parameter is [false].*)
val insert : ?override:bool -> entry -> to_be_dumped:bool -> t -> t
(** [get_signature name e] returns the signature of name [name] in
......
......@@ -19,7 +19,7 @@
open UtilsLib
module Actual_env = Grammars.Environment.Environment
module Actual_env = AcgData.Environment.Environment
module Sg=Actual_env.Signature1
(** This module correspond to the main funtion of the ACG compiler and
......@@ -34,7 +34,7 @@ type return_status = Failure | Success of ((string option) * Actual_env.t)
(** Tries a reference to {!Logic.Lambda.Lambda.stype}. *)
module Actual_parser = Grammars.Data_parser
module Actual_parser = Grammars.Parsers
let resize_terminal () =
let () = Utils.sterm_set_size () in
......
......@@ -22,8 +22,9 @@
open AcgData
open Entry
open Acg_token
type lexing_of =
| Data of Entry.data
| Term of Entry.term
......@@ -97,75 +98,75 @@ let string = (letter|digit|'_')*'\''*
| "*)" {raise (Error.Error (Error.Lexer_error (Error.Unstarted_comment,loc lexbuf)))}
| eof {let () = update_data Entry.EOI (loc lexbuf) in
let () = check_brackets () in
Token.EOI}
Data_parser.EOI}
| ['='] {let () = update_data Entry.Equal (loc lexbuf) in
let () = check_brackets () in
Token.EQUAL(loc lexbuf)}
Data_parser.EQUAL(loc lexbuf)}
| "<<" {let () = update_data Entry.Compose (loc lexbuf) in
let () = check_brackets () in
Token.COMPOSE(loc lexbuf)}
Data_parser.COMPOSE(loc lexbuf)}
| [';'] {let () = update_data Entry.Semi_colon (loc lexbuf) in
let () = check_brackets () in
Token.SEMICOLON(loc lexbuf)}
Data_parser.SEMICOLON(loc lexbuf)}
| [':'] {let () = update_data Entry.Colon (loc lexbuf) in
let () = check_brackets () in
Token.COLON(loc lexbuf)}
Data_parser.COLON(loc lexbuf)}
| [','] {let () = update_data Entry.Comma (loc lexbuf) in
let () = check_brackets () in
Token.COMMA(loc lexbuf)}
Data_parser.COMMA(loc lexbuf)}
| ['('] {let () = update_data (Entry.Type_or_term Entry.LPAR) (loc lexbuf) in
let l = loc lexbuf in
let () = add_bracket l in
Token.LPAREN l}
Data_parser.LPAREN l}
| [')'] {let () = update_data (Entry.Type_or_term Entry.RPAR) (loc lexbuf) in
let brac_loc = loc lexbuf in
let () = remove_bracket brac_loc in
Token.RPAREN brac_loc}
Data_parser.RPAREN brac_loc}
| ['.'] {let () = update_data (Entry.Type_or_term Entry.DOT) (loc lexbuf) in
Token.DOT(loc lexbuf)}
Data_parser.DOT(loc lexbuf)}
| "signature" {let () = update_data Entry.Sig_kwd (loc lexbuf) in
let () = check_brackets () in
Token.SIG_OPEN(loc lexbuf)}
Data_parser.SIG_OPEN(loc lexbuf)}
| "lexicon" {let () = update_data Entry.Lex_kwd (loc lexbuf) in
let () = check_brackets () in
Token.LEX_OPEN(loc lexbuf)}
Data_parser.LEX_OPEN(loc lexbuf)}
| "nl_lexicon" {let () = update_data Entry.Lex_kwd (loc lexbuf) in
let () = check_brackets () in
Token.NL_LEX_OPEN(loc lexbuf)}
Data_parser.NL_LEX_OPEN(loc lexbuf)}
| "end" {let () = update_data Entry.End_kwd (loc lexbuf) in
let () = check_brackets () in
Token.END_OF_DEC(loc lexbuf)}
Data_parser.END_OF_DEC(loc lexbuf)}
| "type" {let () = update_data Entry.Type_kwd (loc lexbuf) in
let () = check_brackets () in
Token.TYPE(loc lexbuf)}
Data_parser.TYPE(loc lexbuf)}
| "prefix" {let () = update_data Entry.Prefix_kwd (loc lexbuf) in
let () = check_brackets () in
Token.PREFIX(loc lexbuf)}
Data_parser.PREFIX(loc lexbuf)}
| "infix" {let () = update_data Entry.Infix_kwd (loc lexbuf) in
let () = check_brackets () in
Token.INFIX(loc lexbuf)}
Data_parser.INFIX(loc lexbuf)}
| "binder" {let () = update_data Entry.Binder_kwd (loc lexbuf) in
let () = check_brackets () in
Token.BINDER(loc lexbuf)}
Data_parser.BINDER(loc lexbuf)}
| "lambda" {let () = update_data (Entry.Type_or_term Entry.LAMBDA) (loc lexbuf) in
Token.LAMBDA0(loc lexbuf)}
Data_parser.LAMBDA0(loc lexbuf)}
| "Lambda" {let () = update_data (Entry.Type_or_term Entry.LAMBDA) (loc lexbuf) in
Token.LAMBDA(loc lexbuf)}
Data_parser.LAMBDA(loc lexbuf)}
| "->" {let () = update_data (Entry.Type_or_term Entry.ARROW) (loc lexbuf) in
Token.LIN_ARROW(loc lexbuf)}
Data_parser.LIN_ARROW(loc lexbuf)}
| "=>" {let () = update_data (Entry.Type_or_term Entry.ARROW) (loc lexbuf) in
Token.ARROW(loc lexbuf)}
Data_parser.ARROW(loc lexbuf)}
| ":=" {let () = update_data Entry.Colon_equal (loc lexbuf) in
Token.COLON_EQUAL(loc lexbuf)}
Data_parser.COLON_EQUAL(loc lexbuf)}
| letter string {
let id = Lexing.lexeme lexbuf in
let () = match id with
| "extend" -> update_data Entry.Ext_kwd (loc lexbuf)
| "with" -> update_data Entry.With_kwd (loc lexbuf)
| _ -> update_data Entry.Id (loc lexbuf) in
Token.IDENT (id,loc lexbuf)}
Data_parser.IDENT (id,loc lexbuf)}
| symbol {let () = update_data Entry.Sym (loc lexbuf) in
Token.SYMBOL (Lexing.lexeme lexbuf,loc lexbuf)}
Data_parser.SYMBOL (Lexing.lexeme lexbuf,loc lexbuf)}
| _ as input_char {let () = Printf.fprintf stderr "%c" input_char in raise (Error.Error (Error.Lexer_error (Error.Bad_token,loc lexbuf)))}
and comment depth = parse
| "*)" {match depth with
......
......@@ -6,7 +6,8 @@
open Environment
module Typing_env = Utils.StringSet
type toto = Environment.t
type context =
| Signature of Environment.Signature1.t
......@@ -84,11 +85,10 @@
%token <Logic.Abstract_syntax.Abstract_syntax.location> SEMICOLON
%token <Logic.Abstract_syntax.Abstract_syntax.location> COMPOSE
%right COMPOSE
(*%right COMPOSE*)
%start <Environment.Environment.t -> Environment.Environment.t> main
%start <AcgData.Environment.Environment.t -> AcgData.Environment.Environment.t> main
%%
......@@ -104,15 +104,17 @@
if is_signature s e then
raise (Error.(Error (Env_error (Duplicated_signature s,loc))))
else
List.fold_left
(fun acc entry ->
try
Environnement.Signature1.add_entry (entry acc e) acc
with
| Environment.Signature1.Duplicate_type_definition ->
emit_parse_error (Error.Duplicated_type s) loc)
Environnement.Signature1.empty
entries
let new_sig =
List.fold_left
(fun acc entry ->
try
Environnement.Signature1.add_entry (entry acc e) acc
with
| Environment.Signature1.Duplicate_type_definition ->
emit_parse_error (Error.Duplicated_type s) loc)
Environnement.Signature1.empty
entries in
Environnement.(insert (Signature new_sig) false e)
}
......@@ -121,7 +123,9 @@ lexicon :
{fun e -> lex ~non_linear:false e }
| NL_LEX_OPEN lex=lex_declaration
{fun e -> lex ~non_linear:true e }
| LEX_OPEN lex=IDENT EQUAL exp=lexicon_exp {fun e -> exp lex e}
| LEX_OPEN lex=IDENT EQUAL exp=lexicon_exp {fun e ->
let new_lex = exp (Some lex) e in
Environment.(insert (Lexicon new_lex) false e)}
......@@ -136,29 +140,32 @@ lexicon :
raise (Error.(Error (Env_error (Duplicated_lexicon lex_name,lex_loc))))
else
let lex' = List.fold_left
(fun acc entry ->
try
Environnement.Lexicon.insert (entry acc abs obj e) acc
with
| Environment.Signature1.Duplicate_type_definition ->
emit_parse_error (Error.Duplicated_type s) loc)
Environnement.Lexicon.empty lex_name ~abs:abs' ~obj:obj' ~non_linear
entries in
let () = Environnement.Lexicon.check lex' in
lex'
(fun acc entry -> entry acc e)
(Environment.Lexicon.empty lex ~abs:abs' ~obj:obj' ~non_linear)
entries in
let () = Environment.Lexicon.check lex' in
Environment.(insert (Lexicon lex') false e)
}
lexicon_exp0 :
| lex = IDENT { fun e -> get_lex lex e }
| lex = IDENT { fun _ e -> get_lex lex e }
| LPAREN lex = lexicon_exp RPAREN { lex }
lexicon_exp :
| lex = lexicon_exp0 { lex }
| lex1 = lexicon_exp COMPOSE lex2 = lexicon_exp
{
let l1_name,(l1_start,_) = Environment.Lexicon.name lex1 in
let l2_name,(_,l2_end) = Environment.Lexicon.name lex2 in
let new_name = l1_name^" << "^l2_name in
let new_loc = l1_loc,l2_loc in
Environment.Lexicon.compose l1 l2 (new_name,new_loc) }
| lex1 = lexicon_exp0 COMPOSE lex2 = lexicon_exp
{
fun name e ->
let l1,l2 = (lex1 None e),(lex2 None e) in
let new_name =
match name with
| Some (n,loc) -> n,loc
| None ->
let l1_name,(l1_start,_) = Environment.Lexicon.name l1 in
let l2_name,(_,l2_end) = Environment.Lexicon.name l2 in
let new_name = l1_name^" << "^l2_name in
let new_loc = l1_start,l2_end in
new_name,new_loc in
Environment.Lexicon.compose l1 l2 new_name
}
%token <Logic.Abstract_syntax.Abstract_syntax.location> COLON_EQUAL
%start <AcgData.Environment.Environment.Lexicon.t -> AcgData.Environment.Environment.t -> AcgData.Environment.Environment.Lexicon.t > lex_entry_eoi
%%
id_or_sym :
| id = IDENT { id }
| sym = SYMBOL { sym }
lex_entry_eoi :
| e = lex_entry EOI { e }
%public lex_entry :
| cst = separated_nonempty_list(COMMA,id_or_sym) COLON_EQUAL t=atomic_type_or_term
{
fun lex e ->
let abs,obj = Environment.Lexicon.get_sig lex in
let id,loc = t in
let k,term_or_type =
if fst (Environment.Signature1.is_constant id obj) then
(Term t),Abstract_syntax.Const t
else if Environment.Signature1.is_type id obj then
Type t,Abstract_syntax.Type_atom (id,loc,[])
else raise (Error.(Error (Parse_error ((Unknown_constant_nor_variable id),loc)))) in
List.fold_left
(fun acc (id,l) ->
match k,Environment.Signature1.is_constant id abs,Environment.Signature1.is_type id abs with
| Term _,(true,_),false -> Environment.Lexicon.insert (Abstract_syntax.Constant (id,loc,term_or_type)) acc
| Type _,(false,_),true -> Environment.Lexicon.insert (Abstract_syntax.Type (id,loc,term_or_type)) acc
| Term _,(false,_), _ -> emit_parse_error (Error.Unknown_constant id) l
| Type _,_,false -> emit_parse_error (Error.Unknown_type id) l
| _ , (true,_), true -> failwith (Printf.sprintf "Bug: should not happen. \"%s\" should not be both a type and a term" id))
lex
cst
if fst (Environment.Signature1.is_constant id obj) then
let term = Abstract_syntax.Const t in
List.fold_left
(fun acc (id,l) ->
match Environment.Signature1.is_constant id abs,Environment.Signature1.is_type id abs with
| (true,_),false -> Environment.Lexicon.insert (Abstract_syntax.Constant (id,loc,term)) acc
| (false,_), _ -> emit_parse_error (Error.Unknown_constant id) l
| (true,_), true -> failwith (Printf.sprintf "Bug: should not happen. \"%s\" should not be both a type and a term" id))
lex
cst
else if Environment.Signature1.is_type id obj then
let stype = Abstract_syntax.Type_atom (id,loc,[]) in
List.fold_left
(fun acc (id,l) ->
match Environment.Signature1.is_constant id abs,Environment.Signature1.is_type id abs with
| (false,_),true -> Environment.Lexicon.insert (Abstract_syntax.Type (id,loc,stype)) acc
| _,false -> emit_parse_error (Error.Unknown_type id) l
| (true,_), true -> failwith (Printf.sprintf "Bug: should not happen. \"%s\" should not be both a type and a term" id))
lex
cst
else raise (Error.(Error (Parse_error ((Unknown_constant_nor_variable id),loc))))
}
| cst = separated_nonempty_list(COMMA,id_or_sym) COLON_EQUAL t=not_atomic_term
{
fun lex e ->
let abs,obj = Environment.Lexicon.get_sig lex in
let term,loc,ws = t obj e in
let term,loc,ws = t Typing_env.empty obj [] in
List.fold_left
(fun (acc,k) (id,l) ->
(fun acc (id,l) ->
match Environment.Signature1.is_constant id abs,Environment.Signature1.is_type id abs with
| (true,_),false -> Environment.Lexicon.insert (Abstract_syntax.Constant (id,loc,term)) acc
| (false,_), _ -> emit_parse_error (Error.Unknown_constant id) l
......@@ -50,7 +59,7 @@
let abs,obj = Environment.Lexicon.get_sig lex in
let term,loc,ws = t obj e in
List.fold_left
(fun (acc,k) (id,l) ->
(fun acc (id,l) ->
match Environment.Signature1.is_constant id abs,Environment.Signature1.is_type id abs with
| (false,_),true -> Environment.Lexicon.insert (Abstract_syntax.Type (id,loc,term)) acc
| _,false -> emit_parse_error (Error.Unknown_type id) l
......
open UtilsLib
open AcgData
open Environment
(* A short name for the incremental parser API. *)
module I = Data_parser.MenhirInterpreter
(* -------------------------------------------------------------------------- *)
(* The above loop is shown for explanatory purposes, but can in fact be
replaced with the following code, which exploits the functions
[lexer_lexbuf_to_supplier] and [loop_handle] offered by Menhir. *)
let succeed (data : Environment.t -> Environment.t) =
(* The parser has succeeded and produced a semantic value. Print it. *)
let () = Printf.printf "Success!\n%!" in data
let fail lexbuf (c : (Environment.t -> Environment.t) I.checkpoint) =
(* The parser has suspended itself because of a syntax error. Stop. *)
match c with
| I.HandlingError env ->
let msg = Error.compute_comment_for_position (Lexing.lexeme_start_p lexbuf) (Lexing.lexeme_end_p lexbuf) in
Printf.fprintf stderr "%s\nError:" msg (*message current_state_num*)
| _ -> failwith "Should not happen. Alway fails with a HandlingError"
let parse_data ?(override=false) ?(output=false) filename includes env =
try
let in_ch =
let fullname = Utils.find_file filename includes in
open_in fullname in
let lexbuf = Lexing.from_channel in_ch in
let () = Printf.printf "Parsing \"%s\"...\n%!" filename in
let supplier = I.lexer_lexbuf_to_supplier Data_lexer.lexer lexbuf in
let starting_parse_time = Sys.time () in
let e = I.loop supplier (Data_parser.Incremental.main lexbuf.lex_curr_p) env in
let ending_parse_time = Sys.time () in
let () = Printf.printf "Done (required %.3f seconds).\n%!" (ending_parse_time -. starting_parse_time) in
let () = match output with
| false -> ()
| true ->
Environment.iter
(function
| Environment.Signature sg ->
let () = Printf.printf "%s\n%!" (Environment.Signature1.to_string sg) in
Printf.printf "%s\n%!" (Error.warnings_to_string filename (Environment.Signature1.get_warnings sg))
| Environment.Lexicon lex ->
Printf.printf "%s\n%!" (Environment.Lexicon.to_string lex))
e in
Some e
with
| Utils.No_file(f,msg) ->
let e = Error.System_error (Printf.sprintf "No such file \"%s\" in %s" f msg) in
let () = Printf.fprintf stderr "Error: %s\n%!" (Error.error_msg e filename) in
None
| Sys_error s ->
let e = Error.System_error s in
let () = Printf.fprintf stderr "Error: %s\n%!" (Error.error_msg e filename) in
None
| Error.Error e ->
let () = Printf.fprintf stderr "Error: %s\n%!" (Error.error_msg e filename) in
None
let pp_error er t =
let () = Utils.sformat "@." in
let _ = Format.flush_str_formatter () in
let s,e = Error.get_loc_error er in
let s',e' = s.Lexing.pos_cnum - s.Lexing.pos_bol,e.Lexing.pos_cnum - e.Lexing.pos_bol in
let t_init = String.sub t 0 s' in
let t_error = Utils.red (String.sub t s' (e'-s')) in
let end_start_index = (s' + (e'-s')) in
let t_end = String.sub t end_start_index ((String.length t) - end_start_index) in
let () = Printf.fprintf stderr "%s%s%s\n" t_init t_error t_end in
Printf.fprintf stderr "Error: %s\n%!" (Error.error_msg er "stdin")
let parse_term ?(output=false) t sg =
let lexbuf = Lexing.from_string t in
let supplier = I.lexer_lexbuf_to_supplier Data_lexer.lexer lexbuf in
try
let abs_term,abs_type = I.loop supplier (Data_parser.Incremental.term_alone lexbuf.lex_curr_p) sg in
let () =
match output with
| true ->
let () = Utils.sformat "@[" in
let () = Utils.sformat "@[" in
let () = Environment.Signature1.term_to_formatted_string Format.str_formatter abs_term sg in
let () = Utils.sformat "@] :@ @[" in
let () = Environment.Signature1.type_to_formatted_string Format.str_formatter abs_type sg in
let () = Utils.sformat "@]" in
let expanded_term = Environment.Signature1.unfold abs_term sg in
let () =
if abs_term=expanded_term then
()
else
let () = Utils.sformat "@,@ @[=@ " in
let () = Environment.Signature1.term_to_formatted_string Format.str_formatter expanded_term sg in Utils.sformat "@]"
in
let () = Utils.sformat "@]" in
()
| false -> () in
Some (abs_term,abs_type)
with
| Error.Error er ->
let () = pp_error er t in
None
| End_of_file -> None
let parse_heterogenous_term ?(output=false) t lex =
let lexbuf = Lexing.from_string t in
let supplier = I.lexer_lexbuf_to_supplier Data_lexer.lexer lexbuf in
let abs,obj=Environment.Lexicon.get_sig lex in
try
let obj_term,abs_type =
I.loop supplier (Data_parser.Incremental.heterogenous_term_and_type lexbuf.lex_curr_p) abs obj in
let abs_type=Environment.Signature1.convert_type abs_type abs in
let obj_type=Environment.Lexicon.interpret_type abs_type lex in
let obj_term=Environment.Signature1.typecheck obj_term obj_type obj in
let () = match output with
| true ->
let () =
Printf.printf
"%s : %s (as image of %s)\n%!"
(Environment.Signature1.term_to_string obj_term obj)
(Environment.Signature1.type_to_string obj_type obj)
(Environment.Signature1.type_to_string abs_type abs) in
Printf.printf
"%s : %s (as image of %s)\n%!"
(Environment.Signature1.term_to_string (Environment.Signature1.unfold obj_term obj) obj)
(Environment.Signature1.type_to_string obj_type obj)
(Environment.Signature1.type_to_string abs_type abs)
| false -> () in
Some (obj_term,abs_type)
with
| Error.Error er ->
let () = pp_error er t in
None
| End_of_file -> None
let parse_sig_entry t sg =
let lexbuf = Lexing.from_string t in
let supplier = I.lexer_lexbuf_to_supplier Data_lexer.lexer lexbuf in
try
Some (I.loop supplier (Data_parser.Incremental.sig_entry_eoi lexbuf.lex_curr_p) sg )
with
| Error.Error er ->
let () = pp_error er t in
None
| End_of_file -> None
let parse_lex_entry t lex =
let lexbuf = Lexing.from_string t in
let supplier = I.lexer_lexbuf_to_supplier Data_lexer.lexer lexbuf in
try
Some (I.loop supplier (Data_parser.Incremental.lex_entry_eoi lexbuf.lex_curr_p) lex )
with
| Error.Error er ->
let () = pp_error er t in
None
| End_of_file -> None
......@@ -7,9 +7,13 @@
%token <Logic.Abstract_syntax.Abstract_syntax.location> INFIX
%token <Logic.Abstract_syntax.Abstract_syntax.location> BINDER
%start < AcgData.Environment.Environment.Signature1.t -> AcgData.Environment.Environment.t -> AcgData.Environment.Environment.Signature1.t> sig_entry_eoi
%%
sig_entry_eoi :
| e = sig_entry EOI { e }
%public sig_entry :
| decl = type_declaration { decl }
| def = type_definition { def }
......@@ -79,7 +83,7 @@
| Environment.Signature1.Duplicate_term_definition ->
emit_parse_error (Error.Duplicated_term id) loc)
s
dec
(dec s e)
}
......
......@@ -2,8 +2,32 @@
%token <Logic.Abstract_syntax.Abstract_syntax.location> LAMBDA0
%token <Logic.Abstract_syntax.Abstract_syntax.location> DOT
%%
(*%start < UtilsLib.Utils.StringSet.t-> AcgData.Environment.Environment.Signature1.t -> Logic.Abstract_syntax.Abstract_syntax.term * Logic.Abstract_syntax.Abstract_syntax.location * (AcgData.Error.warning list) > term
*)
%start < AcgData.Environment.Environment.Signature1.t -> Logic.Lambda.Lambda.term * Logic.Lambda.Lambda.stype > term_alone
%start < AcgData.Environment.Environment.Signature1.t -> AcgData.Environment.Environment.Signature1.t -> Logic.Abstract_syntax.Abstract_syntax.term * Logic.Abstract_syntax.Abstract_syntax.type_def > heterogenous_term_and_type
%%
%public term_alone:
| t = term COLON ty = type_expression EOI
{
fun sg ->
let t,_,ws = t Typing_env.empty sg [] in
let ty,_ = ty sg in
Environment.Signature1.convert_term t ty sg
}
%public heterogenous_term_and_type:
| t = term COLON ty = type_expression EOI
{
fun abs obj ->
let t,_,ws = t Typing_env.empty obj [] in
let ty,_ = ty abs in
t,ty
}
%public term:
| t = atomic_type_or_term {
fun type_env sg warnings ->
......@@ -23,15 +47,16 @@
%public not_atomic_term:
| t1 = term0 terms = term0+
{ fun type_env sg warning ->
let t1,l1, ws = t1 type_env sg warnings in
let terms,l,warnings =
let t1,l1, ws = t1 type_env sg warning in
let terms,warnings =
List.fold_left
(fun (lst,_,ws) t ->
let t',l',ws'= t type_env sg ws in
t'::lst,l',ws')
([],l1,ws)
(fun (lst,ws) t ->
let t',_,ws'= t type_env sg ws in
t'::lst,ws')
([],ws)
terms in
Term_sequence.parse_sequence [] (Some t1) (List.rev terms),new_loc l1 l, warnings }
let result,result_loc = Term_sequence.parse_sequence [] (Some t1) (List.rev terms) sg in
result,result_loc, warnings }
| t = bound_term { t }
%inline bound_term :
......@@ -43,7 +68,7 @@
type_env
vars in
let t',loc',ws' = t type_env sg warnings in
let n_loc = new_loc binder t' in
let n_loc = new_loc binder loc' in
((List.fold_left
(fun acc (var,loc) ->
(fun t -> acc (abs (var,loc,t) Abstract_syntax.Non_linear)))
......@@ -60,7 +85,7 @@
type_env
vars in
let t',loc',ws' = t type_env sg warnings in
let n_loc = new_loc binder t' in
let n_loc = new_loc binder loc' in
((List.fold_left
(fun acc (var,loc) ->
(fun t -> acc (abs (var,loc,t) Abstract_syntax.Linear)))
......@@ -80,21 +105,22 @@
(match Environment.Signature1.get_binder_argument_functional_type binder sg with
| None -> failwith "Bug: Binder of non functional type"
| Some k -> k)
| _ -> emit_parse_error (Error.Binder_expected binder) (p1,p2) in
| _ -> emit_parse_error (Error.Binder_expected binder) loc in
let type_env = List.fold_left
(fun acc (var,_) -> Typing_env.add var acc)
type_env
vars in
let t',loc',ws' = t type_env sg warnings in
let n_loc = new_loc binder t' in
let n_loc = new_loc loc loc' in
Abstract_syntax.App
(Const binder,
((List.fold_left
(Const (binder,loc),
(List.fold_left
(fun acc (var,loc) ->
(fun t -> acc (abs (var,loc,t) k )))
(fun t -> acc (abs (var,loc,t) linearity )))
(fun x -> x)
vars)
t')),
t',
n_loc),
n_loc,
ws' }
......
open Logic.Abstract_syntax
open Environment
open AcgData.Environment
open AcgData
type token =
......
......@@ -44,15 +44,15 @@ struct
type term =
| Var of string * location
| Var of (string * location)
(** If the term is variable (bound by a binder)*)
| Const of string * location
| Const of (string * location)
(** If the term is a constant (not bound by a binder) *)
| Abs of string * location * term * location
| Abs of (string * location * term * location)
(** If the term is a intuitionistic abstraction *)
| LAbs of string * location * term * location
| LAbs of (string * location * term * location)
(** If the term is a linear abstraction *)
| App of term * term * location
| App of (term * term * location)
(** If the term is an application *)
let rec unlinearize_term = function
......@@ -63,13 +63,13 @@ struct
| App (t,u,loc) -> App (unlinearize_term t,unlinearize_term u,loc)
type type_def =
| Type_atom of string * location * term list
| Type_atom of (string * location * term list)
(** If the type is atomic. The third parameter is the terms to
which the type is applied in case of a dependent type. The
list is empty if the type does not depend on any type *)
| Linear_arrow of type_def * type_def * location
| Linear_arrow of (type_def * type_def * location)
(** If the type is described with a linear abstraction *)
| Arrow of type_def * type_def * location
| Arrow of (type_def * type_def * location)
(** If the type is described with a intuitionistic abstraction
*)
(* | Dep of (string * location * type_def) * type_def * location
......
......@@ -50,19 +50,19 @@ sig
(** The type of terms provided by the parser. *)