lexer.mll 9.4 KB
Newer Older
1 2 3
(********************************************************************)
(*                                                                  *)
(*  The Why3 Verification Platform   /   The Why3 Development Team  *)
4
(*  Copyright 2010-2013   --   INRIA - CNRS - Paris-Sud University  *)
5 6 7 8 9 10
(*                                                                  *)
(*  This software is distributed under the terms of the GNU Lesser  *)
(*  General Public License version 2.1, with the special exception  *)
(*  on linking described in file LICENSE.                           *)
(*                                                                  *)
(********************************************************************)
11 12

{
13
  open Format
14
  open Lexing
15
  open Parser
16

17 18
  (* lexical errors *)

19 20 21
  exception IllegalCharacter of char
  exception UnterminatedComment
  exception UnterminatedString
22

23
  let () = Exn_printer.register (fun fmt e -> match e with
24 25 26
    | IllegalCharacter c -> fprintf fmt "illegal character %c" c
    | UnterminatedComment -> fprintf fmt "unterminated comment"
    | UnterminatedString -> fprintf fmt "unterminated string"
27
    | _ -> raise e)
28

29
  let keywords = Hashtbl.create 97
30 31
  let () =
    List.iter
32
      (fun (x,y) -> Hashtbl.add keywords x y)
33
      [
34 35 36
        "as", AS;
        "axiom", AXIOM;
        "clone", CLONE;
37
        "coinductive", COINDUCTIVE;
38
        "constant", CONSTANT;
39 40 41 42 43
        "else", ELSE;
        "end", END;
        "epsilon", EPSILON;
        "exists", EXISTS;
        "export", EXPORT;
44
        "false", FALSE;
45
        "forall", FORALL;
46
        "function", FUNCTION;
47 48 49 50 51 52 53 54 55 56 57
        "goal", GOAL;
        "if", IF;
        "import", IMPORT;
        "in", IN;
        "inductive", INDUCTIVE;
        "lemma", LEMMA;
        "let", LET;
        "match", MATCH;
        "meta", META;
        "namespace", NAMESPACE;
        "not", NOT;
58
        "predicate", PREDICATE;
59
        "prop", PROP;
60 61 62 63 64 65 66 67 68 69 70 71 72
        "then", THEN;
        "theory", THEORY;
        "true", TRUE;
        "type", TYPE;
        "use", USE;
        "with", WITH;
        (* programs *)
        "abstract", ABSTRACT;
        "absurd", ABSURD;
        "any", ANY;
        "assert", ASSERT;
        "assume", ASSUME;
        "begin", BEGIN;
73
        "check", CHECK;
74 75
        "do", DO;
        "done", DONE;
76
        "downto", DOWNTO;
77
        "ensures", ENSURES;
78 79 80
        "exception", EXCEPTION;
        "for", FOR;
        "fun", FUN;
81
        "ghost", GHOST;
82 83 84 85 86
        "invariant", INVARIANT;
        "loop", LOOP;
        "model", MODEL;
        "module", MODULE;
        "mutable", MUTABLE;
87
        "private", PRIVATE;
88 89 90 91
        "raise", RAISE;
        "raises", RAISES;
        "reads", READS;
        "rec", REC;
92 93
        "requires", REQUIRES;
        "returns", RETURNS;
94 95
        "to", TO;
        "try", TRY;
96
        "val", VAL;
97 98
        "variant", VARIANT;
        "while", WHILE;
99
        "writes", WRITES;
100
      ]
101

102 103
  let newline lexbuf =
    let pos = lexbuf.lex_curr_p in
104
    lexbuf.lex_curr_p <-
105 106
      { pos with pos_lnum = pos.pos_lnum + 1; pos_bol = pos.pos_cnum }

107
  let string_start_loc = ref Loc.dummy_position
108 109
  let string_buf = Buffer.create 1024

110 111
  let comment_start_loc = ref Loc.dummy_position

112 113 114 115 116 117 118 119
  let char_for_backslash = function
    | 'n' -> '\n'
    | 't' -> '\t'
    | c -> c

  let update_loc lexbuf file line chars =
    let pos = lexbuf.lex_curr_p in
    let new_file = match file with None -> pos.pos_fname | Some s -> s in
120
    lexbuf.lex_curr_p <-
121
      { pos with
122 123 124
          pos_fname = new_file;
          pos_lnum = int_of_string line;
          pos_bol = pos.pos_cnum - int_of_string chars;
125 126 127
      }

  let remove_leading_plus s =
128
    let n = String.length s in
129 130
    if n > 0 && s.[0] = '+' then String.sub s 1 (n-1) else s

131
  let loc lb = Loc.extract (lexeme_start_p lb, lexeme_end_p lb)
132

133 134 135 136 137 138 139 140 141 142 143
  let remove_underscores s =
    if String.contains s '_' then begin
      let count =
        let nb = ref 0 in
        String.iter (fun c -> if c = '_' then incr nb) s;
        !nb in
      let t = String.create (String.length s - count) in
      let i = ref 0 in
      String.iter (fun c -> if c <> '_' then (t.[!i] <-c; incr i)) s;
      t
    end else s
144 145 146 147
}

let newline = '\n'
let space = [' ' '\t' '\r']
148 149 150
let lalpha = ['a'-'z' '_']
let ualpha = ['A'-'Z']
let alpha = lalpha | ualpha
151
let digit = ['0'-'9']
152 153
let lident = lalpha (alpha | digit | '\'')*
let uident = ualpha (alpha | digit | '\'')*
154 155
let hexadigit = ['0'-'9' 'a'-'f' 'A'-'F']

156
let op_char_1 = ['=' '<' '>' '~']
157 158
let op_char_2 = ['+' '-']
let op_char_3 = ['*' '/' '%']
159
let op_char_4 = ['!' '$' '&' '?' '@' '^' '.' ':' '|' '#']
160 161 162 163
let op_char_34 = op_char_3 | op_char_4
let op_char_234 = op_char_2 | op_char_34
let op_char_1234 = op_char_1 | op_char_234

164 165
let op_char_pref = ['!' '?']

166
rule token = parse
167 168
  | "##" space* ("\"" ([^ '\010' '\013' '"' ]* as file) "\"")?
    space* (digit+ as line) space* (digit+ as char) space* "##"
169
      { update_loc lexbuf file line char; token lexbuf }
170 171 172 173 174
  | "#" space* "\"" ([^ '\010' '\013' '"' ]* as file) "\""
    space* (digit+ as line) space* (digit+ as bchar) space*
    (digit+ as echar) space* "#"
      { POSITION (Loc.user_position file (int_of_string line)
                 (int_of_string bchar) (int_of_string echar)) }
175
  | newline
176
      { newline lexbuf; token lexbuf }
177
  | space+
178
      { token lexbuf }
179 180
  | '_'
      { UNDERSCORE }
181
  | lident as id
182
      { try Hashtbl.find keywords id with Not_found -> LIDENT id }
183
  | uident as id
184
      { UIDENT id }
185
  | ['0'-'9'] ['0'-'9' '_']* as s
186
      { INTEGER (Number.int_const_dec (remove_underscores s)) }
187
  | '0' ['x' 'X'] (['0'-'9' 'A'-'F' 'a'-'f']['0'-'9' 'A'-'F' 'a'-'f' '_']* as s)
188
      { INTEGER (Number.int_const_hex (remove_underscores s)) }
189
  | '0' ['o' 'O'] (['0'-'7'] ['0'-'7' '_']* as s)
190
      { INTEGER (Number.int_const_oct (remove_underscores s)) }
191
  | '0' ['b' 'B'] (['0'-'1'] ['0'-'1' '_']* as s)
192
      { INTEGER (Number.int_const_bin (remove_underscores s)) }
193 194 195
  | (digit+ as i) ("" as f) ['e' 'E'] (['-' '+']? digit+ as e)
  | (digit+ as i) '.' (digit* as f) (['e' 'E'] (['-' '+']? digit+ as e))?
  | (digit* as i) '.' (digit+ as f) (['e' 'E'] (['-' '+']? digit+ as e))?
196
      { FLOAT (Number.real_const_dec i f (Opt.map remove_leading_plus e)) }
197 198
  | '0' ['x' 'X'] (hexadigit+ as i) ("" as f) ['p' 'P'] (['-' '+']? digit+ as e)
  | '0' ['x' 'X'] (hexadigit+ as i) '.' (hexadigit* as f)
199
        (['p' 'P'] (['-' '+']? digit+ as e))?
200
  | '0' ['x' 'X'] (hexadigit* as i) '.' (hexadigit+ as f)
201 202
        (['p' 'P'] (['-' '+']? digit+ as e))?
      { FLOAT (Number.real_const_hex i f (Opt.map remove_leading_plus e)) }
203 204
  | "(*)"
      { LEFTPAR_STAR_RIGHTPAR }
205
  | "(*"
206
      { comment_start_loc := loc lexbuf; comment lexbuf; token lexbuf }
207 208 209 210 211 212
  | "~'" (lident as id)
      { OPAQUE_QUOTE_LIDENT id }
  | "'" (lident as id)
      { QUOTE_LIDENT id }
  | "'" (uident as id)
      { QUOTE_UIDENT id }
213 214 215 216 217 218
  | ","
      { COMMA }
  | "("
      { LEFTPAR }
  | ")"
      { RIGHTPAR }
219 220 221 222
  | "{"
      { LEFTBRC }
  | "}"
      { RIGHTBRC }
223 224
  | ":"
      { COLON }
225 226
  | ";"
      { SEMICOLON }
227 228
  | "->"
      { ARROW }
229 230
  | "<-"
      { LARROW }
231 232
  | "<->"
      { LRARROW }
233 234 235 236
  | "&&"
      { AMPAMP }
  | "||"
      { BARBAR }
237 238 239 240
  | "/\\"
      { AND }
  | "\\/"
      { OR }
241
  | "\\"
Andrei Paskevich's avatar
Andrei Paskevich committed
242
      { LAMBDA }
243 244
  | "."
      { DOT }
245 246 247 248
  | "|"
      { BAR }
  | "="
      { EQUAL }
249 250
  | "<>"
      { LTGT }
251 252 253 254
  | "["
      { LEFTSQ }
  | "]"
      { RIGHTSQ }
255 256
  | op_char_pref op_char_4* as s
      { OPPREF s }
257 258 259 260 261 262 263 264
  | op_char_1234* op_char_1 op_char_1234* as s
      { OP1 s }
  | op_char_234*  op_char_2 op_char_234*  as s
      { OP2 s }
  | op_char_34*   op_char_3 op_char_34*  as s
      { OP3 s }
  | op_char_4+ as s
      { OP4 s }
265
  | "\""
266
      { string_start_loc := loc lexbuf; STRING (string lexbuf) }
267
  | eof
268 269
      { EOF }
  | _ as c
270
      { raise (IllegalCharacter c) }
271 272

and comment = parse
273 274
  | "(*)"
      { comment lexbuf }
275
  | "*)"
276
      { () }
277
  | "(*"
278
      { comment lexbuf; comment lexbuf }
279
  | newline
280 281
      { newline lexbuf; comment lexbuf }
  | eof
282
      { raise (Loc.Located (!comment_start_loc, UnterminatedComment)) }
283
  | _
284 285 286 287
      { comment lexbuf }

and string = parse
  | "\""
Jean-Christophe Filliâtre's avatar
Jean-Christophe Filliâtre committed
288
      { let s = Buffer.contents string_buf in
289 290
        Buffer.clear string_buf;
        s }
291 292
  | "\\" (_ as c)
      { Buffer.add_char string_buf (char_for_backslash c); string lexbuf }
293
  | newline
294 295
      { newline lexbuf; Buffer.add_char string_buf '\n'; string lexbuf }
  | eof
296
      { raise (Loc.Located (!string_start_loc, UnterminatedString)) }
297 298 299 300 301
  | _ as c
      { Buffer.add_char string_buf c; string lexbuf }

{

302
  let parse_logic_file env path lb =
303
    open_file token (Lexing.from_string "") (Typing.open_file env path);
304
    Loc.with_location (logic_file token) lb;
305
    Typing.close_file ()
306

307 308
  let parse_program_file inc lb =
    open_file token (Lexing.from_string "") inc;
309
    Loc.with_location (program_file token) lb
310

311 312 313 314 315 316 317 318 319 320 321
  let token_counter lb =
    let rec loop in_annot a p =
      match token lb with
        | LEFTBRC -> assert (not in_annot); loop true a p
        | RIGHTBRC -> assert in_annot; loop false a p
        | EOF -> assert (not in_annot); (a,p)
        | _ ->
            if in_annot
            then loop in_annot (a+1) p
            else loop in_annot a (p+1)
    in
322 323
    loop false 0 0

324
  let read_channel env path file c =
325 326
    let lb = Lexing.from_channel c in
    Loc.set_file file lb;
327
    (), parse_logic_file env path lb
328

329
  let library_of_env = Env.register_format "why" ["why"] read_channel
330
    ~desc:"Why@ logical@ language"
331

332
  let parse_logic_file env = parse_logic_file (library_of_env env)
333 334 335
}

(*
336
Local Variables:
337
compile-command: "unset LANG; make -C ../.. test"
338
End:
339 340
*)