123456789101112131415161718192021222324252627282930313233343536373839404142(*************************************************************************)(* Copyright 2015-2019 MINES ParisTech -- Dual License LGPL 2.1+ / GPL3+ *)(* Copyright 2019-2024 Inria -- Dual License LGPL 2.1+ / GPL3+ *)(* Copyright 2024-2025 Emilio J. Gallego Arias -- LGPL 2.1+ / GPL3+ *)(* Copyright 2025 CNRS -- LGPL 2.1+ / GPL3+ *)(* Written by: Emilio J. Gallego Arias & coq-lsp contributors *)(*************************************************************************)(* Rocq Language Server Protocol: Rocq parsing API *)(*************************************************************************)moduleParsable=Pcoq.Parsableletparse~stps=(* Coq is missing this, so we add it here. Note that this MUST run inside
coq_protect *)Control.check_for_interrupt();letmode=State.mode~stinPcoq.Entry.parsePvernac.(main_entrymode)ps|>Option.mapAst.of_coqletparse~token~stps=(* This runs already inside Coq.protect *)State.in_state~token~st~f:(parse~st)ps(* Read the input stream until a dot or a "end of proof" token is encountered *)letparse_to_terminator:unitPcoq.Entry.t=(* type 'a parser_fun = { parser_fun : te LStream.t -> 'a } *)letrecdotkwstatest=matchGramlib.LStream.nextkwstatestwith|Tok.KEYWORD("."|"..."|"Qed"|"Defined"|"Admitted")|Tok.BULLET_->()|Tok.EOI->()|_->dotkwstatestinPcoq.Entry.of_parser"Coqtoplevel.dot"{parser_fun=dot}(* If an error occurred while parsing, we try to read the input until a dot
token is encountered. We assume that when a lexer error occurs, at least one
char was eaten *)letrecdiscard_to_dotps=tryPcoq.Entry.parseparse_to_terminatorpswith|CLexer.Error.E_->discard_to_dotps|ewhenCErrors.noncriticale->()