Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
why3
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
126
Issues
126
List
Boards
Labels
Service Desk
Milestones
Merge Requests
16
Merge Requests
16
Operations
Operations
Incidents
Packages & Registries
Packages & Registries
Container Registry
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Why3
why3
Commits
113d78e9
Commit
113d78e9
authored
May 07, 2020
by
MARCHE Claude
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
compilation complete, at last
parent
a3139b50
Changes
6
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
295 additions
and
176 deletions
+295
-176
Makefile.in
Makefile.in
+3
-3
plugins/cfg/cfg_ast.ml
plugins/cfg/cfg_ast.ml
+2
-0
plugins/cfg/cfg_lexer.mll
plugins/cfg/cfg_lexer.mll
+272
-141
plugins/cfg/cfg_main.ml
plugins/cfg/cfg_main.ml
+13
-29
plugins/cfg/cfg_parser.mly
plugins/cfg/cfg_parser.mly
+2
-0
src/parser/parser_common.mly
src/parser/parser_common.mly
+3
-3
No files found.
Makefile.in
View file @
113d78e9
...
...
@@ -503,6 +503,7 @@ PLUGGENERATED = plugins/tptp/tptp_lexer.ml \
plugins/python/py_parser.ml plugins/python/py_parser.mli
\
plugins/microc/mc_lexer.ml
\
plugins/microc/mc_parser.ml plugins/microc/mc_parser.mli
\
plugins/cfg/cfg_lexer.ml
\
plugins/cfg/cfg_parser.ml plugins/cfg/cfg_parser.mli
\
plugins/parser/dimacs.ml
...
...
@@ -512,7 +513,7 @@ PLUG_TRANSFORM =
PLUG_TPTP
=
tptp_ast tptp_parser tptp_typing tptp_lexer tptp_printer
PLUG_PYTHON
=
py_ast py_parser py_lexer py_main
PLUG_MICROC
=
mc_ast mc_parser mc_lexer mc_printer mc_main
PLUG_CFG
=
cfg_ast cfg_tokens cfg_parser cfg_main
PLUG_CFG
=
cfg_ast cfg_tokens cfg_parser cfg_
lexer cfg_
main
PLUGINS
=
genequlin dimacs tptp python microc cfg
...
...
@@ -2141,8 +2142,7 @@ clean::
src/parser/parser.ml src/parser/parser.mli
:
$(PARSERS)
$(SHOW)
'Menhir $<'
$(HIDE)$(MENHIR)
--table
--explain
--strict
--base
src/parser/parser
\
$^
$(HIDE)$(MENHIR)
--table
--explain
--strict
--base
src/parser/parser
$^
plugins/cfg/cfg_parser.ml plugins/cfg/cfg_parser.mli
:
plugins/cfg/cfg_parser.mly
$(SHOW)
'Menhir $<'
...
...
plugins/cfg/cfg_ast.ml
View file @
113d78e9
...
...
@@ -33,6 +33,8 @@ and cfg_expr_desc =
(** Boolean literal [False] *)
|
CFGconst
of
Constant
.
constant
(** Constant literals *)
|
CFGgoto
of
ident
(** goto a label *)
(* TODO: expand *)
...
...
plugins/cfg/cfg_lexer.mll
View file @
113d78e9
...
...
@@ -9,156 +9,287 @@
(* *)
(********************************************************************)
(* This is a copy of [src/parser/lexer.mll], with the following minor changes:
* [open Parser] -> [open Cfg_tokens]
* addition of keywords:
[
"cfg", CFG;
"goto", GOTO;
]
*)
{
open
Lexing
open
Cfg_ast
open
Cfg_parser
exception
Lexing_error
of
string
let
()
=
Why3
.
Exn_printer
.
register
(
fun
fmt
exn
->
match
exn
with
|
Lexing_error
s
->
Format
.
fprintf
fmt
"syntax error: %s"
s
|
_
->
raise
exn
)
type
state
=
Code
|
OneLineSpec
|
MultiLineSpec
let
state
=
ref
Code
let
id_or_kwd
=
let
h1
=
Hashtbl
.
create
32
in
List
.
iter
(
fun
(
s
,
tok
)
->
Hashtbl
.
add
h1
s
tok
)
[
"if"
,
IF
;
"else"
,
ELSE
;
"return"
,
RETURN
;
"while"
,
WHILE
;
"for"
,
FOR
;
"break"
,
BREAK
;
"void"
,
VOID
;
"int"
,
INT
;
"scanf"
,
SCANF
;
];
let
h2
=
Hashtbl
.
create
32
in
List
.
iter
(
fun
(
s
,
tok
)
->
Hashtbl
.
add
h2
s
tok
)
[
"true"
,
TRUE
;
"false"
,
FALSE
;
"forall"
,
FORALL
;
"exists"
,
EXISTS
;
"then"
,
THEN
;
"let"
,
LET
;
"in"
,
LET
;
"at"
,
AT
;
"old"
,
OLD
;
"invariant"
,
INVARIANT
;
"variant"
,
VARIANT
;
"assert"
,
ASSERT
;
"assume"
,
ASSUME
;
"check"
,
CHECK
;
"lemma"
,
LEMMA
;
"axiom"
,
AXIOM
;
"goal"
,
GOAL
;
"requires"
,
REQUIRES
;
"ensures"
,
ENSURES
;
"label"
,
LABEL
;
"function"
,
FUNCTION
;
"predicate"
,
PREDICATE
;
];
fun
s
->
try
Hashtbl
.
find
h1
s
with
Not_found
->
if
!
state
=
Code
then
IDENT
s
else
try
Hashtbl
.
find
h2
s
with
Not_found
->
IDENT
s
let
string_buffer
=
Buffer
.
create
1024
open
Why3
open
Cfg_tokens
let
keywords
=
Hashtbl
.
create
97
let
()
=
List
.
iter
(
fun
(
x
,
y
)
->
Hashtbl
.
add
keywords
x
y
)
(* keep the following files synchronized:
doc/ext/why3.py
src/trywhy3/mode-why3.js
src/why3doc/doc_lexer.mll
share/emacs/why3.el
share/vim/syntax/why3.vim
*)
[
"cfg"
,
CFG
;
"goto"
,
GOTO
;
"abstract"
,
ABSTRACT
;
"absurd"
,
ABSURD
;
"alias"
,
ALIAS
;
"any"
,
ANY
;
"as"
,
AS
;
"assert"
,
ASSERT
;
"assume"
,
ASSUME
;
"at"
,
AT
;
"axiom"
,
AXIOM
;
"begin"
,
BEGIN
;
"break"
,
BREAK
;
"by"
,
BY
;
"check"
,
CHECK
;
"clone"
,
CLONE
;
"coinductive"
,
COINDUCTIVE
;
"constant"
,
CONSTANT
;
"continue"
,
CONTINUE
;
"diverges"
,
DIVERGES
;
"do"
,
DO
;
"done"
,
DONE
;
"downto"
,
DOWNTO
;
"else"
,
ELSE
;
"end"
,
END
;
"ensures"
,
ENSURES
;
"epsilon"
,
EPSILON
;
"exception"
,
EXCEPTION
;
"exists"
,
EXISTS
;
"export"
,
EXPORT
;
"false"
,
FALSE
;
"float"
,
FLOAT
;
(* contextual *)
"for"
,
FOR
;
"forall"
,
FORALL
;
"fun"
,
FUN
;
"function"
,
FUNCTION
;
"ghost"
,
GHOST
;
"goal"
,
GOAL
;
"if"
,
IF
;
"import"
,
IMPORT
;
"in"
,
IN
;
"inductive"
,
INDUCTIVE
;
"invariant"
,
INVARIANT
;
"label"
,
LABEL
;
"lemma"
,
LEMMA
;
"let"
,
LET
;
"match"
,
MATCH
;
"meta"
,
META
;
"module"
,
MODULE
;
"mutable"
,
MUTABLE
;
"not"
,
NOT
;
"old"
,
OLD
;
"partial"
,
PARTIAL
;
"predicate"
,
PREDICATE
;
"private"
,
PRIVATE
;
"pure"
,
PURE
;
"raise"
,
RAISE
;
"raises"
,
RAISES
;
"range"
,
RANGE
;
(* contextual *)
"reads"
,
READS
;
"rec"
,
REC
;
"ref"
,
REF
;
(* contextual *)
"requires"
,
REQUIRES
;
"return"
,
RETURN
;
"returns"
,
RETURNS
;
"scope"
,
SCOPE
;
"so"
,
SO
;
"then"
,
THEN
;
"theory"
,
THEORY
;
"to"
,
TO
;
"true"
,
TRUE
;
"try"
,
TRY
;
"type"
,
TYPE
;
"use"
,
USE
;
"val"
,
VAL
;
"variant"
,
VARIANT
;
"while"
,
WHILE
;
"with"
,
WITH
;
"writes"
,
WRITES
;
]
}
let
letter
=
[
'
a'
-
'
z'
'
A'
-
'
Z'
]
let
digit
=
[
'
0
'
-
'
9
'
]
let
ident
=
letter
(
letter
|
digit
|
'
_'
)
*
let
integer
=
[
'
0
'
-
'
9
'
]
+
let
space
=
'
'
|
'\t'
let
comment
=
"//"
[
^
'
@
'
'\n'
]
[
^
'\n'
]
*
rule
next_token
=
parse
|
'\n'
{
new_line
lexbuf
;
if
!
state
=
OneLineSpec
then
state
:=
Code
;
next_token
lexbuf
}
|
space
+
{
next_token
lexbuf
}
|
"//
\n
"
{
new_line
lexbuf
;
next_token
lexbuf
}
|
comment
{
next_token
lexbuf
}
|
'
#
'
space
*
"include"
space
*
'
<
'
([
^
'
>
'
'\n'
]
*
as
file
)
'
>
'
space
*
'\n'
{
new_line
lexbuf
;
INCLUDE
file
}
|
"/*"
{
comment
lexbuf
}
|
"*/"
{
if
!
state
<>
MultiLineSpec
then
raise
(
Lexing_error
"no comment to be closed"
);
state
:=
Code
;
next_token
lexbuf
}
|
"//@"
{
state
:=
OneLineSpec
;
next_token
lexbuf
}
|
"/*@"
{
state
:=
MultiLineSpec
;
next_token
lexbuf
}
|
"@"
{
if
!
state
<>
MultiLineSpec
then
raise
(
Lexing_error
"illegal character '@'"
);
next_token
lexbuf
}
|
ident
as
id
{
id_or_kwd
id
}
|
'
+
'
{
PLUS
}
|
'
-
'
{
MINUS
}
|
'
*
'
{
TIMES
}
|
"/"
{
DIV
}
|
'
%
'
{
MOD
}
|
'
=
'
{
EQUAL
}
|
"+="
{
PLUSEQUAL
}
|
"-="
{
MINUSEQUAL
}
|
"*="
{
TIMESEQUAL
}
|
"/="
{
DIVEQUAL
}
|
"=="
{
CMP
Beq
}
|
"!="
{
CMP
Bneq
}
|
"<"
{
CMP
Blt
}
|
"<="
{
CMP
Ble
}
|
">"
{
CMP
Bgt
}
|
">="
{
CMP
Bge
}
|
'
(
'
{
LEFTPAR
}
|
'
)
'
{
RIGHTPAR
}
|
'
[
'
{
LEFTSQ
}
|
'
]
'
{
RIGHTSQ
}
|
'
{
'
{
LBRC
}
|
'
}
'
{
RBRC
}
|
'
,
'
{
COMMA
}
|
'
:
'
{
COLON
}
|
'
;
'
{
SEMICOLON
}
|
"&&"
{
AND
}
|
"||"
{
OR
}
|
"!"
{
NOT
}
|
"++"
{
PLUSPLUS
}
|
"--"
{
MINUSMINUS
}
|
'
&
'
{
AMPERSAND
}
(* logic symbols *)
|
"->"
{
ARROW
}
|
"<-"
{
LARROW
}
|
"<->"
{
LRARROW
}
|
"."
{
DOT
}
|
integer
as
s
{
INTEGER
s
}
|
'
"' { STRING (string lexbuf) }
| eof { EOF }
| _ as c { raise (Lexing_error ("
illegal
character
:
" ^ String.make 1 c)) }
(* skip a comment, then resume next_token *)
and comment = parse
| "
*/
" { next_token lexbuf }
| '
\n
' { new_line lexbuf; comment lexbuf }
| _ { comment lexbuf }
| eof { raise (Lexing_error "
unterminated
comment
") }
and string = parse
| '"
'
{
let
s
=
Buffer
.
contents
string_buffer
in
Buffer
.
reset
string_buffer
;
s
}
|
"
\\
n"
{
Buffer
.
add_char
string_buffer
'\n'
;
string
lexbuf
}
|
"
\\\"
"
{
Buffer
.
add_char
string_buffer
'
"';
string lexbuf }
| _ as c
{ Buffer.add_char string_buffer c;
string lexbuf }
| eof
{ raise (Lexing_error "
unterminated
string
") }
let
space
=
[
'
'
'\t'
'\r'
]
let
quote
=
'\''
{
let
bin
=
[
'
0
'
'
1
'
]
let
oct
=
[
'
0
'
-
'
7
'
]
let
dec
=
[
'
0
'
-
'
9
'
]
let
hex
=
[
'
0
'
-
'
9
'
'
a'
-
'
f'
'
A'
-
'
F'
]
let parse file c =
let lb = Lexing.from_channel c in
Why3.Loc.set_file file lb;
Why3.Loc.with_location (Mc_parser.file next_token) lb
let
bin_sep
=
[
'
0
'
'
1
'
'
_'
]
let
oct_sep
=
[
'
0
'
-
'
7
'
'
_'
]
let
dec_sep
=
[
'
0
'
-
'
9
'
'
_'
]
let
hex_sep
=
[
'
0
'
-
'
9
'
'
a'
-
'
f'
'
A'
-
'
F'
'
_'
]
let
lalpha
=
[
'
a'
-
'
z'
]
let
ualpha
=
[
'
A'
-
'
Z'
]
let
alpha
=
[
'
a'
-
'
z'
'
A'
-
'
Z'
]
let
suffix
=
(
alpha
|
quote
*
dec_sep
)
*
quote
*
let
lident
=
[
'
a'
-
'
z'
'
_'
]
suffix
let
uident
=
[
'
A'
-
'
Z'
]
suffix
(* Entries for transformations: similar to lexer.mll *)
let build_parsing_function entry lb = Why3.Loc.with_location (entry next_token) lb
let
core_suffix
=
quote
alpha
suffix
let
core_lident
=
lident
core_suffix
+
let
core_uident
=
uident
core_suffix
+
let parse_term = build_parsing_function Mc_parser.term_eof
let
op_char_1
=
[
'
=
'
'
<
'
'
>
'
'
~
'
]
let
op_char_2
=
[
'
+
'
'
-
'
]
let
op_char_3
=
[
'
*
'
'
/
'
'\\'
'
%
'
]
let
op_char_4
=
[
'
!
'
'
$
'
'
&
'
'
?
'
'
@
'
'
^
'
'.'
'
:
'
'
|
'
'
#
'
]
let
op_char_34
=
op_char_3
|
op_char_4
let
op_char_234
=
op_char_2
|
op_char_34
let
op_char_1234
=
op_char_1
|
op_char_234
let parse_term_list = build_parsing_function Mc_parser.term_comma_list_eof
let
op_char_pref
=
[
'
!
'
'
?
'
]
let parse_list_ident = build_parsing_function Mc_parser.ident_comma_list_eof
rule
token
=
parse
|
"[##"
space
*
(
"
\"
"
([
^
'\010'
'\013'
'
"' ]* as file) "
\
""
)
?
space
*
(
dec
+
as
line
)
space
*
(
dec
+
as
char
)
space
*
"]"
{
Lexlib
.
update_loc
lexbuf
file
(
int_of_string
line
)
(
int_of_string
char
);
token
lexbuf
}
|
"[#"
space
*
"
\"
"
([
^
'\010'
'\013'
'
"' ]* as file) "
\
""
space
*
(
dec
+
as
line
)
space
*
(
dec
+
as
bchar
)
space
*
(
dec
+
as
echar
)
space
*
"]"
{
POSITION
(
Loc
.
user_position
file
(
int_of_string
line
)
(
int_of_string
bchar
)
(
int_of_string
echar
))
}
|
"[@"
space
*
([
^
'
'
'\n'
'
]
'
]
+
(
'
'
+
[
^
'
'
'\n'
'
]
'
]
+
)
*
as
lbl
)
space
*
'
]
'
{
ATTRIBUTE
lbl
}
|
'\n'
{
Lexing
.
new_line
lexbuf
;
token
lexbuf
}
|
space
+
{
token
lexbuf
}
|
'
_'
{
UNDERSCORE
}
|
lident
as
id
{
try
Hashtbl
.
find
keywords
id
with
Not_found
->
LIDENT
id
}
|
core_lident
as
id
{
CORE_LIDENT
id
}
|
uident
as
id
{
UIDENT
id
}
|
core_uident
as
id
{
CORE_UIDENT
id
}
|
dec
dec_sep
*
as
s
{
INTEGER
Number
.(
int_literal
ILitDec
~
neg
:
false
(
Lexlib
.
remove_underscores
s
))
}
|
'
0
'
[
'
x'
'
X'
]
(
hex
hex_sep
*
as
s
)
{
INTEGER
Number
.(
int_literal
ILitHex
~
neg
:
false
(
Lexlib
.
remove_underscores
s
))
}
|
'
0
'
[
'
o'
'
O'
]
(
oct
oct_sep
*
as
s
)
{
INTEGER
Number
.(
int_literal
ILitOct
~
neg
:
false
(
Lexlib
.
remove_underscores
s
))
}
|
'
0
'
[
'
b'
'
B'
]
(
bin
bin_sep
*
as
s
)
{
INTEGER
Number
.(
int_literal
ILitBin
~
neg
:
false
(
Lexlib
.
remove_underscores
s
))
}
|
(
dec
+
as
i
)
".."
{
Lexlib
.
backjump
lexbuf
2
;
INTEGER
Number
.(
int_literal
ILitDec
~
neg
:
false
i
)
}
|
'
0
'
[
'
x'
'
X'
]
(
hex
+
as
i
)
".."
{
Lexlib
.
backjump
lexbuf
2
;
INTEGER
Number
.(
int_literal
ILitHex
~
neg
:
false
i
)
}
|
(
dec
+
as
i
)
(
""
as
f
)
[
'
e'
'
E'
]
([
'
-
'
'
+
'
]
?
dec
+
as
e
)
|
(
dec
+
as
i
)
'.'
(
dec
*
as
f
)
([
'
e'
'
E'
]
([
'
-
'
'
+
'
]
?
dec
+
as
e
))
?
|
(
dec
*
as
i
)
'.'
(
dec
+
as
f
)
([
'
e'
'
E'
]
([
'
-
'
'
+
'
]
?
dec
+
as
e
))
?
{
REAL
(
Number
.
real_literal
~
radix
:
10
~
neg
:
false
~
int
:
i
~
frac
:
f
~
exp
:
(
Opt
.
map
Lexlib
.
remove_leading_plus
e
))
}
|
'
0
'
[
'
x'
'
X'
]
(
hex
+
as
i
)
(
""
as
f
)
[
'
p'
'
P'
]
([
'
-
'
'
+
'
]
?
dec
+
as
e
)
|
'
0
'
[
'
x'
'
X'
]
(
hex
+
as
i
)
'.'
(
hex
*
as
f
)
([
'
p'
'
P'
]
([
'
-
'
'
+
'
]
?
dec
+
as
e
))
?
|
'
0
'
[
'
x'
'
X'
]
(
hex
*
as
i
)
'.'
(
hex
+
as
f
)
([
'
p'
'
P'
]
([
'
-
'
'
+
'
]
?
dec
+
as
e
))
?
{
REAL
(
Number
.
real_literal
~
radix
:
16
~
neg
:
false
~
int
:
i
~
frac
:
f
~
exp
:
(
Opt
.
map
Lexlib
.
remove_leading_plus
e
))
}
|
"(*)"
{
Lexlib
.
backjump
lexbuf
2
;
LEFTPAR
}
|
"(*"
{
Lexlib
.
comment
lexbuf
;
token
lexbuf
}
|
"'"
(
lalpha
suffix
as
id
)
{
QUOTE_LIDENT
id
}
|
","
{
COMMA
}
|
"("
{
LEFTPAR
}
|
")"
{
RIGHTPAR
}
|
"{"
{
LEFTBRC
}
|
"}"
{
RIGHTBRC
}
|
":"
{
COLON
}
|
";"
{
SEMICOLON
}
|
"->"
{
ARROW
}
|
"->'"
{
Lexlib
.
backjump
lexbuf
1
;
ARROW
}
|
"<-"
{
LARROW
}
|
"<->"
{
LRARROW
}
|
"&&"
{
AMPAMP
}
|
"||"
{
BARBAR
}
|
"/
\\
"
{
AND
}
|
"
\\
/"
{
OR
}
|
"."
{
DOT
}
|
".."
{
DOTDOT
}
|
"&"
{
AMP
}
|
"|"
{
BAR
}
|
"<"
{
LT
}
|
">"
{
GT
}
|
"<>"
{
LTGT
}
|
"="
{
EQUAL
}
|
"-"
{
MINUS
}
|
"["
{
LEFTSQ
}
|
"]"
{
RIGHTSQ
}
|
"]"
(
quote
+
as
s
)
{
RIGHTSQ_QUOTE
s
}
|
")"
(
'\''
alpha
suffix
core_suffix
*
as
s
)
{
RIGHTPAR_QUOTE
s
}
|
")"
(
'
_'
alpha
suffix
core_suffix
*
as
s
)
{
RIGHTPAR_USCORE
s
}
|
op_char_pref
op_char_4
*
quote
*
as
s
{
OPPREF
s
}
|
op_char_1234
*
op_char_1
op_char_1234
*
quote
*
as
s
{
OP1
s
}
|
op_char_234
*
op_char_2
op_char_234
*
quote
*
as
s
{
OP2
s
}
|
op_char_34
*
op_char_3
op_char_34
*
quote
*
as
s
{
OP3
s
}
|
op_char_4
+
as
s
{
OP4
s
}
|
"
\"
"
{
STRING
(
Lexlib
.
string
lexbuf
)
}
|
eof
{
EOF
}
|
_
as
c
{
Lexlib
.
illegal_character
c
lexbuf
}
{
let
parse_channel
file
c
=
let
lb
=
Lexing
.
from_channel
c
in
Loc
.
set_file
file
lb
;
Cfg_parser
.
cfgfile
token
lb
}
plugins/cfg/cfg_main.ml
View file @
113d78e9
...
...
@@ -313,41 +313,25 @@ let decl = function
Typing.add_decl id.id_loc (Dlogic [d])
| Mc_ast.Dprop (pk, id, t) ->
Typing.add_decl id.id_loc (Dprop (pk, id, t))
*)
let
translate_decl
d
acc
=
match
d
with
|
Dmlw_decl
d
->
d
::
acc
|
Dletcfg
l
->
acc
(* (id,_,_) -> Dmeta (id,[]) *)
let translate
dl
=
List.iter decl dl
let
translate
(
m
,
dl
)
=
(
m
,
List
.
fold_right
translate_decl
dl
[]
)
let
read_channel
env
path
file
c
=
let f :
Mc_ast.file = Mc_lexer.parse
file c in
let
f
:
Cfg_ast
.
cfg_file
=
Cfg_lexer
.
parse_channel
file
c
in
Debug
.
dprintf
debug
"%s parsed successfully.@."
file
;
let file = Filename.basename file in
let file = Filename.chop_extension file in
let name = Strings.capitalize file in
Debug.dprintf debug "building module %s.@." name;
Typing.open_file env path;
let loc = Loc.user_position file 0 0 0 in
Typing.open_module (mk_id ~loc name);
let use_import (f, m) =
let m = mk_id ~loc m in
let qid = Qdot (Qident (mk_id ~loc f), m) in
let decl = Ptree.Duseimport(loc,false,[(qid,None)]) in
Typing.add_decl loc decl in
List.iter use_import
["int", "Int"; "ref", "Refint"; "microc", "MicroC"];
translate f;
Typing.close_module loc;
let mm = Typing.close_file () in
if path = [] && Debug.test_flag debug then begin
let add_m _ m modm = Ident.Mid.add m.mod_theory.Theory.th_name m modm in
let print_m _ m = Pmodule.print_module Format.err_formatter m in
Ident.Mid.iter print_m (Mstr.fold add_m mm Ident.Mid.empty)
end;
mm
*)
let
ptree
=
Modules
(
List
.
map
translate
f
)
in
Typing
.
type_mlw_file
env
[]
(
file
^
".mlw"
)
ptree
let
()
=
Env
.
register_format
mlw_language
"
CFG"
[
"
cfg"
]
read_channel
~
desc
:
"
CFG format
"
Env
.
register_format
mlw_language
"
mlcfg"
[
"ml
cfg"
]
read_channel
~
desc
:
"
whyml extending with functions implemented by control-flow-graphs
"
(*
(* Add an extension of task printing *)
...
...
plugins/cfg/cfg_parser.mly
View file @
113d78e9
...
...
@@ -54,4 +54,6 @@ body:
cfgexpr
:
|
TRUE
{
mk_cfgexpr
CFGtrue
$
startpos
$
endpos
}
|
FALSE
{
mk_cfgexpr
CFGfalse
$
startpos
$
endpos
}
|
GOTO
uident
{
mk_cfgexpr
(
CFGgoto
$
2
)
$
startpos
$
endpos
}
;
src/parser/parser_common.mly
View file @
113d78e9
...
...
@@ -1285,7 +1285,7 @@ for_dir:
(* Specification *)
spec
:
%
public
spec
:
|
(* epsilon *)
%
prec
prec_no_spec
{
empty_spec
}
|
single_spec
spec
{
spec_union
$
1
$
2
}
...
...
@@ -1343,7 +1343,7 @@ return_opt:
|
(* epsilon *)
{
mk_pat
Pwild
$
startpos
$
endpos
,
None
,
Ity
.
MaskVisible
}
|
COLON
return_named
{
let
pat
,
ty
,
mask
=
$
2
in
pat
,
Some
ty
,
mask
}
return_named
:
%
public
return_named
:
|
LEFTPAR
ret_cast
RIGHTPAR
{
$
2
}
|
LEFTPAR
comma_list2
(
ret_cast
)
RIGHTPAR
...
...
@@ -1494,7 +1494,7 @@ ident_nq:
|
lident_nq
{
$
1
}
|
lident_op_nq
{
$
1
}
uident
:
%
public
uident
:
|
UIDENT
{
mk_id
$
1
$
startpos
$
endpos
}
|
CORE_UIDENT
{
mk_id
$
1
$
startpos
$
endpos
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment