module Make:
module Loc: Lexer.Loc
module Token: Lexer.Token
module Action: sig
.. end
module Lexer: Lexer
type
gram = {
|
gfilter : Token.Filter.t ; |
|
gkeywords : (string, int Pervasives.ref) Hashtbl.t ; |
|
glexer : Loc.t -> char Stream.t -> (Token.t * Loc.t) Stream.t ; |
|
warning_verbose : bool Pervasives.ref ; |
|
error_verbose : bool Pervasives.ref ; |
}
type
token_info = {
|
prev_loc : Loc.t ; |
|
cur_loc : Loc.t ; |
}
type
token_stream = (Token.t *
token_info)
Stream.t
type
efun = token_stream -> Action.t
type
token_pattern = (Token.t -> bool) * string
type
internal_entry = {
|
egram : gram ; |
|
ename : string ; |
|
mutable estart : int -> efun ; |
|
mutable econtinue : int -> Loc.t -> Action.t -> efun ; |
|
mutable edesc : desc ; |
}
type
desc =
type
level = {
}
type
symbol =
type
tree =
| |
Node of node |
| |
LocAct of Action.t * Action.t list |
| |
DeadEnd |
type
node = {
}
type
production_rule = symbol list * Action.t
type
single_extend_statment = string option * Camlp4.Sig.Grammar.assoc option *
production_rule list
type
extend_statment = Camlp4.Sig.Grammar.position option *
single_extend_statment list
type
delete_statment = symbol list
type ('a, 'b, 'c)
fold = internal_entry ->
symbol list ->
('a Stream.t -> 'b) -> 'a Stream.t -> 'c
type ('a, 'b, 'c)
foldsep = internal_entry ->
symbol list ->
('a Stream.t -> 'b) -> ('a Stream.t -> unit) -> 'a Stream.t -> 'c
val get_filter : gram ->
Token.Filter.t
val token_location : token_info ->
Loc.t
type 'a
not_filtered = 'a
val using : gram -> string -> unit
val removing : gram -> string -> unit