12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091 |
- (** Lexing buffer. *)
- type t
- (** The state of the buffer *)
- val from_lexbuf : ?reset_line:bool -> Sedlexing.lexbuf -> t
- (** Create a new buffer *)
- val start : t -> unit
- (** Intialize a new run. *)
- val buffer : t -> Sedlexing.lexbuf
- (** Extract the sedlex buffer. Required in each rule. *)
- val positions : t -> Lexing.position * Lexing.position
- (** Extract the starting and ending position for the matched token.
- This function is used outside of the parser, in order to get the position
- of the latest token in the case of an error.
- *)
- val content : t -> string
- (** Extract the token matched by the rule *)
- val set_start_position : t -> Lexing.position -> unit
- (** Reset the starting position. Used while parsing the string to keep the
- begining of the whole string. *)
- val tokenize : (t -> 'a) -> t -> unit -> 'a * Lexing.position * Lexing.position
- (** Function to use in the parser in order to extract the token match, and the
- starting and ending position. *)
- val rollback : t -> unit
- (** Rollback the latest token matched *)
- (** {1 State in expressions}
- The comment system is terrible. The same symbol can be used for :
- - starting a comment
- - inequality operation
- In order to manage this, I try to identify the context in a very basic way,
- using a stack for determining the token to send.
- *)
- type lexer = t -> Tokens.token
- and buffer_builder = ?nested:bool -> Buffer.t -> t -> Tokens.token
- type stringWraper = {
- start_string : lexer -> lexer;
- (** Start a new string. This function is used insed the token lexer, in
- order to identify how to start a new string *)
- wrap : buffer_builder -> buffer_builder;
- (** function used to escape the character and add it to the buffer. This
- function is used inside the string lexer. *)
- end_string : lexer;
- (** Function used to match the end of the string. This function is used
- after the string lexer, in order to identify the end patten for a
- string *)
- }
- type state =
- | Token (** Default state, parsing the tokens *)
- | String of stringWraper (** String enclosed by [''] *)
- | MString of int (** String enclosed by [{}]*)
- | EndString of stringWraper
- (** State raised just before closing the string.
- The buffer is rollbacked and the position is the closing symbol. *)
- | Expression (** Expression where [!] is an operator *)
- val pp_state : Format.formatter -> state -> unit
- val state : t -> state option
- (** Get the current state for the lexer.
- @return [None] when in the default state *)
- val enter_state : t -> state -> unit
- (** Enter into a new state *)
- val leave_state : t -> unit
- (** Leave the current state *)
- val overlay : t -> lexer -> lexer
- val start_recovery : t -> unit
- (** Set the lexer in recovery mode, the lexer raise this mode after an error,
- in order to ignore the further errors until a new location *)
- val is_recovery : t -> bool
- (** Check if the lexer is in recovery mode *)
|