start of completion
Ignore-this: 97dfaa9262f0aae90d15e3ce883356b darcs-hash:20110726232101-c41ad-08cb52d96233a95f2e6843e09a5eb967331f8ebd
This commit is contained in:
parent
e877a0ff72
commit
02c2395a79
|
@ -0,0 +1,97 @@
|
|||
(*
|
||||
* uTop_complete.ml
|
||||
* ----------------
|
||||
* Copyright : (c) 2011, Jeremie Dimino <jeremie@dimino.org>
|
||||
* Licence : BSD3
|
||||
*
|
||||
* This file is a part of utop.
|
||||
*)
|
||||
|
||||
open UTop_token
|
||||
|
||||
module String_set = Set.Make(String)
|
||||
|
||||
let set_of_list = List.fold_left (fun set x -> String_set.add x set) String_set.empty
|
||||
|
||||
(* +-----------------------------------------------------------------+
|
||||
| Directives |
|
||||
+-----------------------------------------------------------------+ *)
|
||||
|
||||
let get_directives () =
|
||||
Hashtbl.fold (fun k v set -> String_set.add k set) Toploop.directive_table String_set.empty
|
||||
|
||||
let directive_suffix dir =
|
||||
match Hashtbl.find Toploop.directive_table dir with
|
||||
| Toploop.Directive_none _ -> ";;"
|
||||
| Toploop.Directive_string _ -> " \""
|
||||
| Toploop.Directive_bool _ | Toploop.Directive_int _ | Toploop.Directive_ident _ -> " "
|
||||
|
||||
(* +-----------------------------------------------------------------+
|
||||
| Filtering |
|
||||
+-----------------------------------------------------------------+ *)
|
||||
|
||||
(* Filter blanks and comments except for the last token. *)
|
||||
let rec filter tokens =
|
||||
match tokens with
|
||||
| [] -> []
|
||||
| [((Blanks | Comment | Doc), start, stop, src)] -> [(Blanks, start, stop, src)]
|
||||
| ((Blanks | Comment | Doc), _, _, _) :: rest -> filter rest
|
||||
| x :: rest -> x :: filter rest
|
||||
|
||||
(* +-----------------------------------------------------------------+
|
||||
| Completion |
|
||||
+-----------------------------------------------------------------+ *)
|
||||
|
||||
let complete str =
|
||||
let tokens = UTop_lexer.lex_string str in
|
||||
(* Filter blanks and comments. *)
|
||||
let tokens = filter tokens in
|
||||
match tokens with
|
||||
|
||||
(* +-------------------------------------------------------------+
|
||||
| Completion on directives |
|
||||
+-------------------------------------------------------------+ *)
|
||||
|
||||
(* Completion on directive names. *)
|
||||
| [(Symbol, _, stop, "#")]
|
||||
| [(Symbol, _, _, "#"); (Blanks, _, stop, _)] ->
|
||||
(stop, List.map (fun dir -> (dir, directive_suffix dir)) (String_set.elements (get_directives ())))
|
||||
| [(Symbol, _, _, "#"); ((Lident | Uident), start, _, src)] ->
|
||||
let prefix, words = LTerm_read_line.lookup src (String_set.elements (get_directives ())) in
|
||||
(start, List.map (fun dir -> (dir, directive_suffix dir)) words)
|
||||
|
||||
(* Complete with ";;" when possible. *)
|
||||
| [(Symbol, _, _, "#"); ((Lident | Uident), _, _, _); (String true, _, stop, _)]
|
||||
| [(Symbol, _, _, "#"); ((Lident | Uident), _, _, _); (String true, _, _, _); (Blanks, _, stop, _)] ->
|
||||
(stop, [(";;", "")])
|
||||
|
||||
(* Completion on packages. *)
|
||||
| [(Symbol, _, _, "#"); (Lident, _, _, "require"); (String false, start, stop, str)] ->
|
||||
let pkg = String.sub str 1 (String.length str - 1) in
|
||||
let prefix, pkgs = LTerm_read_line.lookup pkg (Fl_package_base.list_packages ()) in
|
||||
(start + 1, List.map (fun pkg -> (pkg, "\";;")) (List.sort compare pkgs))
|
||||
|
||||
(* Generic completion. *)
|
||||
| [(Symbol, _, _, "#"); ((Lident | Uident), _, _, dir); (Blanks, _, stop, _)] ->
|
||||
(stop,
|
||||
match try Some (Hashtbl.find Toploop.directive_table dir) with Not_found -> None with
|
||||
| Some (Toploop.Directive_none _) -> [(";;", "")]
|
||||
| Some (Toploop.Directive_string _) -> [(" \"", "")]
|
||||
| Some (Toploop.Directive_bool _) -> [("true", ";;"); ("false", ";;")]
|
||||
| Some (Toploop.Directive_int _) -> []
|
||||
| Some (Toploop.Directive_ident _) -> []
|
||||
| None -> [])
|
||||
| [(Symbol, _, _, "#"); ((Lident | Uident), _, _, dir); ((Lident | Uident), start, _, id)] ->
|
||||
(start,
|
||||
match try Some (Hashtbl.find Toploop.directive_table dir) with Not_found -> None with
|
||||
| Some (Toploop.Directive_none _) -> []
|
||||
| Some (Toploop.Directive_string _) -> []
|
||||
| Some (Toploop.Directive_bool _) ->
|
||||
let _, words = LTerm_read_line.lookup id ["true"; "false"] in
|
||||
List.map (fun w -> (w, ";;")) words
|
||||
| Some (Toploop.Directive_int _) -> []
|
||||
| Some (Toploop.Directive_ident _) -> []
|
||||
| None -> [])
|
||||
|
||||
| _ ->
|
||||
(0, [])
|
|
@ -0,0 +1,14 @@
|
|||
(*
|
||||
* uTop_complete.mli
|
||||
* -----------------
|
||||
* Copyright : (c) 2011, Jeremie Dimino <jeremie@dimino.org>
|
||||
* Licence : BSD3
|
||||
*
|
||||
* This file is a part of utop.
|
||||
*)
|
||||
|
||||
(** OCaml completion. *)
|
||||
|
||||
val complete : string -> int * (string * string) list
|
||||
(** [complete str] returns the start of the completed word in [str]
|
||||
and the list of possible completions with their suffixes. *)
|
|
@ -96,11 +96,12 @@ class read_line ~term ~prompt = object(self)
|
|||
method stylise =
|
||||
let styled, position = super#stylise in
|
||||
let tokens = UTop_lexer.lex_string (!pending ^ LTerm_text.to_string styled) in
|
||||
let rec loop ofs_a tokens =
|
||||
let pending_length = Zed_utf8.length !pending in
|
||||
let rec loop tokens =
|
||||
match tokens with
|
||||
| [] ->
|
||||
()
|
||||
| (token, src) :: rest ->
|
||||
| (token, start, stop, src) :: rest ->
|
||||
let token_style =
|
||||
match token with
|
||||
| Symbol -> styles.style_symbol
|
||||
|
@ -108,38 +109,39 @@ class read_line ~term ~prompt = object(self)
|
|||
| Uident -> styles.style_ident
|
||||
| Constant -> styles.style_constant
|
||||
| Char -> styles.style_char
|
||||
| String -> styles.style_string
|
||||
| String _ -> styles.style_string
|
||||
| Quotation -> styles.style_quotation
|
||||
| Comment -> styles.style_comment
|
||||
| Doc -> styles.style_doc
|
||||
| Blanks -> styles.style_blanks
|
||||
| Error -> styles.style_error
|
||||
in
|
||||
let ofs_b = ofs_a + Zed_utf8.length src in
|
||||
for i = ofs_a to ofs_b - 1 do
|
||||
for i = start - pending_length to stop - pending_length - 1 do
|
||||
let ch, style = styled.(i) in
|
||||
styled.(i) <- (ch, LTerm_style.merge token_style style)
|
||||
done;
|
||||
loop ofs_b rest
|
||||
loop rest
|
||||
in
|
||||
let pending_length = Zed_utf8.length !pending in
|
||||
let rec skip idx tokens =
|
||||
let rec skip tokens =
|
||||
match tokens with
|
||||
| [] ->
|
||||
assert false
|
||||
| (token, src) :: rest ->
|
||||
let len = Zed_utf8.length src in
|
||||
let idx' = idx + len in
|
||||
if idx' = pending_length then
|
||||
loop 0 rest
|
||||
else if idx' > pending_length then
|
||||
loop 0 ((token, Zed_utf8.sub src (pending_length - idx) (len - (pending_length - idx))) :: rest)
|
||||
()
|
||||
| (token, start, stop, src) :: rest ->
|
||||
if stop = pending_length then
|
||||
loop rest
|
||||
else if stop > pending_length then
|
||||
loop ((token, pending_length, stop, Zed_utf8.sub src (pending_length - start) (stop - pending_length)) :: rest)
|
||||
else
|
||||
skip idx' rest
|
||||
skip rest
|
||||
in
|
||||
if pending_length = 0 then loop 0 tokens else skip 0 tokens;
|
||||
if pending_length = 0 then loop tokens else skip tokens;
|
||||
(styled, position)
|
||||
|
||||
method completion =
|
||||
let pos, words = UTop_complete.complete (!pending ^ Zed_rope.to_string self#input_prev) in
|
||||
let pending_length = Zed_utf8.length !pending in
|
||||
if pos < pending_length then self#set_completion 0 [] else self#set_completion (pos - pending_length) words
|
||||
|
||||
initializer
|
||||
(* Set the source signal for the size of the terminal. *)
|
||||
UTop_private.set_size self#size;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
UTop_console
|
||||
UTop_lexer
|
||||
UTop_token
|
||||
UTop_complete
|
||||
|
|
|
@ -86,7 +86,7 @@ rule token = parse
|
|||
| float_literal
|
||||
{ Constant }
|
||||
| '"'
|
||||
{ string lexbuf; String }
|
||||
{ string lexbuf }
|
||||
| "'" [^'\'' '\\'] "'"
|
||||
{ Char }
|
||||
| "'\\" ['\\' '"' 'n' 't' 'b' 'r' ' ' '\'' 'x' '0'-'9'] eof
|
||||
|
@ -130,13 +130,13 @@ and comment depth = parse
|
|||
|
||||
and string = parse
|
||||
| '"'
|
||||
{ () }
|
||||
{ String true }
|
||||
| "\\\""
|
||||
{ string lexbuf }
|
||||
| uchar
|
||||
{ string lexbuf }
|
||||
| eof
|
||||
{ () }
|
||||
{ String false }
|
||||
|
||||
and quotation = parse
|
||||
| ">>"
|
||||
|
@ -149,13 +149,15 @@ and quotation = parse
|
|||
{
|
||||
let lex_string str =
|
||||
let lexbuf = Lexing.from_string str in
|
||||
let rec loop ofs_a =
|
||||
let rec loop idx ofs_a =
|
||||
match try Some (token lexbuf) with End_of_file -> None with
|
||||
| Some token ->
|
||||
let ofs_b = Lexing.lexeme_end lexbuf in
|
||||
(token, String.sub str ofs_a (ofs_b - ofs_a)) :: loop ofs_b
|
||||
let src = String.sub str ofs_a (ofs_b - ofs_a) in
|
||||
let idx' = idx + Zed_utf8.length src in
|
||||
(token, idx, idx', src) :: loop idx' ofs_b
|
||||
| None ->
|
||||
[]
|
||||
in
|
||||
loop 0
|
||||
loop 0 0
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ type t =
|
|||
| Uident
|
||||
| Constant
|
||||
| Char
|
||||
| String
|
||||
| String of bool
|
||||
| Quotation
|
||||
| Comment
|
||||
| Doc
|
||||
|
|
Loading…
Reference in New Issue