Commit 49da566f authored by Gérard Huet's avatar Gérard Huet

New style |> Html et Web

parent 0fb78fd9
This diff is collapsed.
No preview for this file type
......@@ -12,7 +12,6 @@
(* Generate the page displaying a view of the given corpus subdirectory.
The output channel is as always either [stdout] for CGI output or
a static HTML file (according to the "magic switch"
Web.output_channel). NB: No error handling is done by this
function. *)
[Web.output_channel]). NB: No error handling is done by this function. *)
value mk_page : string -> Web_corpus.permission -> unit
;
......@@ -76,9 +76,9 @@ value transducer = fun
| Vocc -> transducers.vocc (* consonant-initial vocatives *)
| Iiy -> transducers.iiy (* iic avyayiibhava *)
| Avy -> transducers.avya (* ifc avyayiibhava *)
| Inftu -> transducers.inftu (* infinitives in -tu *)
| Kama -> transducers.kama (* forms of kaama *)
| Sfx -> transducers.sfx (* ifc taddhita suffixes *)
| Inftu -> transducers.inftu (* infinitives in -tu iic. Renou HLS 72 *)
| Kama -> transducers.kama (* ifcs of kaama/manas: tyaktukaama dra.s.tumanas *)
| Sfx -> transducers.sfx (* ifc taddhita suffixes *)
| Isfx -> transducers.isfx (* iifc taddhita suffixes *)
| Cache -> transducers.cache (* cached forms *)
| Noun | Iic | Iik | Voca | Krid | Pvk | Vok
......
......@@ -423,14 +423,14 @@ value check_sentence translit us text_orig checkpoints sentence
] in do
{ make_visual cur_chunk.offset
; find_conflict 0
; pl html_break
; pl (html_latin16 "Sentence: ")
; ps (deva16_blue devainput) (* devanagari *)
; pl html_break
; ps (div_begin Latin16)
; pl (table_begin Spacing20)
; pl tr_begin
; ps (td_wrap (call_undo text checkpoints ^ "Undo"))
; html_break |> pl
; html_latin16 "Sentence: " |> pl
; deva16_blue devainput |> ps (* devanagari *)
; html_break |> ps
; div_begin Latin16 |> ps
; table_begin Spacing20 |> pl
; tr_begin |> pl
; td_wrap (call_undo text checkpoints ^ "Undo") |> ps
; let call_scl_parser n = (* invocation of scl parser *)
if scl_toggle then
ps (td_wrap (call_reader text cpts "o" ^ "UoH Analysis Mode"))
......@@ -438,34 +438,34 @@ value check_sentence translit us text_orig checkpoints sentence
match count with
[ Num.Int n -> if n > max_count then
(* too many solutions would choke the parsers *)
ps (td_wrap ("(" ^ string_of_int n ^ " Solutions)"))
td_wrap ("(" ^ string_of_int n ^ " Solutions)") |> ps
else if n=1 (* Unique remaining solution *) then do
{ ps (td_wrap (call_parser text cpts ^ "Unique Solution"))
; call_scl_parser 1
}
{ td_wrap (call_parser text cpts ^ "Unique Solution") |> ps
; call_scl_parser 1
}
else do
{ ps (td_wrap (call_reader text cpts "p" ^ "Filtered Solutions"))
; let info = string_of_int n ^ if flag then "" else " Partial" in
ps (td_wrap (call_reader text cpts "t" ^ "All " ^ info ^ " Solutions"))
; call_scl_parser n
}
| _ -> ps (td_wrap "(More than 2^32 Solutions!)")
{ td_wrap (call_reader text cpts "p" ^ "Filtered Solutions") |> ps
; let info = string_of_int n ^ if flag then "" else " Partial" in
td_wrap (call_reader text cpts "t" ^ "All " ^ info ^ " Solutions") |> ps
; call_scl_parser n
}
| _ -> td_wrap "(More than 2^32 Solutions!)" |> ps
]
; pl tr_end
; pl table_end
; ps div_end (* Latin16 *)
; pl html_break
; ps (div_begin Latin12)
; pl (table_begin Tcenter)
; ps tr_begin
; tr_end |> pl
; table_end |> pl
; div_end |> ps (* Latin16 *)
; html_break |> pl
; div_begin Latin12 |> ps
; table_begin Tcenter |> pl
; tr_begin |> ps
; List.iter update_col_length chunks
; if Paths.platform="Station" then print_all text checkpoints chunks 0
else List.iter print_first_server chunks
; pl tr_end
; tr_end |> pl
; print_interf text checkpoints ()
; pl table_end
; ps div_end (* Latin12 *)
; pl html_break
; table_end |> pl
; div_end |> ps (* Latin12 *)
; html_break |> pl
; reset_graph ()
; reset_visual ()
; set_cur_offset 0
......@@ -632,8 +632,8 @@ value graph_engine () = do
(* Save sentence button *)
; if corpus_permission = Web_corpus.Annotator then
(* TODO: use segment_all to compute the nb of sols instead of
passing 0 as nb_sols. *)
(* TODO: use [segment_all] to compute the nb of sols instead of
passing 0 to [nb_sols]. *)
save_button query (Num.num_of_int 0) |> pl
else
()
......
......@@ -173,7 +173,7 @@ value tags_of phase word =
as sup kridanta forms with preverbs. The preverbs are packed in pv. *)
| Tad (ph,sfx_ph) form sfx -> (* tag inherited from fake suffix entry *)
let sfx_tag = Deco.assoc sfx (morpho_tags sfx_ph) in
(* let stem_tag = Deco.assoc sfx (morpho_tags ph) in - possible extension *)
(* [let stem_tag = Deco.assoc sfx (morpho_tags ph) in] - possible extension *)
Taddhita (ph,form) [ 0 :: sfx ] sfx_ph sfx_tag (* 0 = "-" *)
| _ -> Atomic (Deco.assoc word (morpho_tags phase))
(* NB Atomic comprises tin verbal forms of roots as well as sup atomic forms
......
......@@ -48,18 +48,17 @@ value populate_corpus dirname file =
in
let rec aux i =
try
(* let line = input_line ch in *)
(* let state = *)
(* [ (Params.corpus_dir, dirname) *)
(* ; (Params.sentence_no, string_of_int i) *)
(* ; ("t", Paths.default_transliteration) *)
(* ] *)
(* in *)
failwith "TODO"
(* do *)
(* { extract_citation state (Corp.save_sentence True Web.graph_cgi) line i *)
(* ; aux (i + 1) *)
(* } *)
(* [let line = input_line ch in
let state =
[ (Params.corpus_dir, dirname)
; (Params.sentence_no, string_of_int i)
; ("t", Paths.default_transliteration)
] in] *)
failwith "TODO"
(* [do
{ extract_citation state (Corp.save_sentence True Web.graph_cgi) line i
; aux (i + 1)
}] *)
with
[ End_of_file -> () ]
in
......@@ -79,12 +78,12 @@ value populate_corpus dirname file =
(* Entry point *)
(***************)
value main =
let dirname = ref "" in
let opts =
Arg.align
[ ("-d", Arg.Set_string dirname,
" Specify the destination directory") ]
in
let dirname = ref "" in
let opts =
Arg.align
[ ("-d", Arg.Set_string dirname,
" Specify the destination directory") ]
in
let usage_msg =
Filename.basename Sys.argv.(0) ^ " -d <dest_dir> <citation_file>"
in
......
......@@ -57,7 +57,7 @@ value analysis_of_env env =
let cpts =
env
|> Cgi.decoded_get "cpts" ""
(* |> Checkpoints.parse_cpts *)
(* [|> Checkpoints.parse_cpts] *)
in
let nb_sols =
env
......
......@@ -88,7 +88,7 @@ and participle = (* participles *)
| Pfutm (* middle future participle *)
| Pfutp of kritya (* passive future/potential participle/gerundive 3 forms *)
| Action_noun (* generative only for auxiliaries, for cvi compounds *)
(*| Agent_noun, etc. -- non generative, must be lexicalized; see nominal *)
(*| [Agent_noun], etc. -- non generative, must be lexicalized; see nominal *)
]
;
(* Invariable verbal forms.
......@@ -149,7 +149,7 @@ and krit = (* coarser than Paninian krit suffixes *)
| Action_ti (* ktin \Pan{3,3,94} -ti f. *)
| Action_i (* ki \Pan{3,3,92-93} -i f. *)
| Action_root (* unknown krit of non-agent noun *)
| Object_root (* we should probably lump action and object in Non_agent *)
| Object_root (* we should probably lump action and object in [Non_agent] *)
| Object_a (* ka -a n. *)
| Instrument (* ka \Pan{3,1,136} 0/amui n. *)
| Instra (* .s.tran -tra n. -trii f. traa f. *)
......
......@@ -892,7 +892,7 @@ EXTEND Gram (* skt to nat *)
| LETTER "S" -> 148
| LETTER "H" -> 149
(* duplication with lower necessary in order to get proper sharing of prefix *)
| "\""; LETTER "m" -> 41
| "\""; LETTER "m" -> 15
| "\""; LETTER "n" -> 36
| LETTER "f" -> 36
| "\""; LETTER "s" -> 48
......
......@@ -13,7 +13,7 @@
(* Terminology. record functions will build the forms needed by Conjugation
and Stemming. After change of this file, and "make releasecgi", these tables
are updated. But the Reader/Parser needs a full pass of generation, with
"make scratch", in order to rebuild the full automata. *)
"make scratch" from Dictionary, in order to rebuild the full automata. *)
(*i module Verbs = struct i*)
......@@ -21,7 +21,7 @@ open List; (* map, length, rev *)
open Phonetics; (* [vowel, homonasal, duhify, mrijify, nahify, light, nasal,
gana, mult, aug, trunc_a, trunc_u, trunc_aa] *)
open Skt_morph;
open Inflected; (* [Conju, roots, enter1, morpho_gen, admits_aa] *)
open Inflected; (* [Conju, Invar, Inftu, roots, enter1, morpho_gen, admits_aa] *)
open Parts; (* [memo_part, record_part, cau_gana, fix, fix_augment, rfix,
compute_participles] *)
(* This module also uses modules [List2 Word Control Canon Encode Int_sandhi]
......@@ -3460,7 +3460,7 @@ value compute_perfect entry =
| "zvaa" -> let (strong, weak,_,_,_) = redup_perf "zuu" in (* \Pan{6,1,30} *)
compute_perfect_v strong weak entry (* Whitney 794b zizvaaya *)
(* Whitney 794b also jyaa pyaa vyaa hvaa; we treat vyaa above, and hvaa is huu.
Thus pyaa is covered by pii. jyaa#1 as jii gives jijyau same WR *)
Thus pyaa is covered by pii. jyaa1 as jii gives jijyau same WR *)
| "indh" -> compute_perfectm Primary (revcode "iidh") entry
| "mah" -> let (strong, weak, _, _, _) = redup_perf entry in
compute_perfectm Primary strong entry (* ZZ Atma for Para root *)
......@@ -4303,6 +4303,10 @@ value build_infinitive c inf_stem root = do
bhaavitum have to be entered as supplements; see Witney§1051c. *)
{ enter1 root (Invar (c,Infi) (fix inf_stem "tum"))
; enter1 root (Inftu c (fix inf_stem "tu")) (* Xtu-kaama compounds *)
(* NB. bahuv cpds in -kaama and -manas constructed with infinitives in -tu
See Renou HLS p72 from Patanjali; also Assimil p194 eg tyaktukaama
anu.s.thaatukaama "desirious to proceed" vaktukaama "who wants to speak"
dra.s.tumanas "inclined to see" *)
}
;
value perif conj perstem entry = do
......
......@@ -8,4 +8,4 @@
(**************************************************************************)
(* Generated by make version - see main Makefile *)
value version="3.02" and version_date="2017-10-05";
value version="3.02" and version_date="2017-10-13";
This diff is collapsed.
VERSION='3.02'
DATE='2017-10-05'
DATE='2017-10-13'
No preview for this file type
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment