mirror of
https://github.com/c-cube/ocaml-containers.git
synced 2025-12-06 11:15:31 -05:00
more doc for CCParse
This commit is contained in:
parent
1517f64f55
commit
352fc10d3b
2 changed files with 26 additions and 1 deletions
|
|
@ -190,6 +190,19 @@ type state = {
|
|||
j: int; (* end pointer in [str], excluded. [len = j-i] *)
|
||||
memo : Memo_state.t option ref; (* Memoization table, if any *)
|
||||
}
|
||||
(* FIXME: replace memo with:
|
||||
[global : global_st ref]
|
||||
|
||||
where:
|
||||
[type global = {
|
||||
mutable memo: Memo_state.t option;
|
||||
line_offsets: int CCVector.vector;
|
||||
}
|
||||
|
||||
with line_offsets used to cache the offset where each line begins,
|
||||
and is computed lazily, to make {!Position.line_and_column}
|
||||
faster if called many times.
|
||||
*)
|
||||
|
||||
let[@inline] char_equal (a : char) b = Stdlib.(=) a b
|
||||
let string_equal = String.equal
|
||||
|
|
|
|||
|
|
@ -203,7 +203,19 @@ val char : char -> char t
|
|||
|
||||
type slice
|
||||
(** A slice of the input, as returned by some combinators such
|
||||
as {!split_1} or {split_n}.
|
||||
as {!split_1} or {split_list} or {!take}.
|
||||
|
||||
The idea is that one can use some parsers to cut the input into slices,
|
||||
e.g. split into lines, or split a line into fields (think CSV or TSV).
|
||||
Then a variety of parsers can be used on each slice to extract data from
|
||||
it using {!recurse}.
|
||||
|
||||
Slices contain enough information to make it possible
|
||||
for [recurse slice p] to report failures (if [p] fails) using locations
|
||||
from the original input, not relative to the slice.
|
||||
Therefore, even after splitting the input into lines using, say, {!each_line},
|
||||
a failure to parse the 500th line will be reported at line 500 and
|
||||
not at line 1.
|
||||
|
||||
{b EXPERIMENTAL}
|
||||
@since NEXT_RELEASE *)
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue