package camlp4

  1. Overview
  2. Docs
Legend:
Library
Module
Module type
Parameter
Class
Class type
module Loc : Sig.Loc
module Token : sig ... end
module Lexer : sig ... end
type gram = {
  1. gfilter : Token.Filter.t;
  2. gkeywords : (string, int ref) Hashtbl.t;
  3. glexer : Loc.t -> char Stream.t -> (Token.t * Loc.t) Stream.t;
  4. warning_verbose : bool ref;
  5. error_verbose : bool ref;
}
type token_info = {
  1. prev_loc : Loc.t;
  2. cur_loc : Loc.t;
  3. prev_loc_only : bool;
}
type token_stream = (Token.t * token_info) Stream.t
type efun = token_stream -> Action.t
type token_pattern = (Token.t -> bool) * string
type internal_entry = {
  1. egram : gram;
  2. ename : string;
  3. mutable estart : int -> efun;
  4. mutable econtinue : int -> Loc.t -> Action.t -> efun;
  5. mutable edesc : desc;
}
and desc =
  1. | Dlevels of level list
  2. | Dparser of token_stream -> Action.t
and level = {
  1. assoc : Sig.Grammar.assoc;
  2. lname : string option;
  3. lsuffix : tree;
  4. lprefix : tree;
}
and symbol =
  1. | Smeta of string * symbol list * Action.t
  2. | Snterm of internal_entry
  3. | Snterml of internal_entry * string
  4. | Slist0 of symbol
  5. | Slist0sep of symbol * symbol
  6. | Slist1 of symbol
  7. | Slist1sep of symbol * symbol
  8. | Sopt of symbol
  9. | Stry of symbol
  10. | Sself
  11. | Snext
  12. | Stoken of token_pattern
  13. | Skeyword of string
  14. | Stree of tree
and tree =
  1. | Node of node
  2. | LocAct of Action.t * Action.t list
  3. | DeadEnd
and node = {
  1. node : symbol;
  2. son : tree;
  3. brother : tree;
}
type production_rule = symbol list * Action.t
type single_extend_statment = string option * Sig.Grammar.assoc option * production_rule list
type extend_statment = Sig.Grammar.position option * single_extend_statment list
type delete_statment = symbol list
type (!'a, !'b, !'c) fold = internal_entry -> symbol list -> ('a Stream.t -> 'b) -> 'a Stream.t -> 'c
type (!'a, !'b, !'c) foldsep = internal_entry -> symbol list -> ('a Stream.t -> 'b) -> ('a Stream.t -> unit) -> 'a Stream.t -> 'c
val get_filter : gram -> Token.Filter.t
val using : gram -> string -> unit
val removing : gram -> string -> unit