1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071(******************************************************************************)(* *)(* Menhir *)(* *)(* Copyright Inria. All rights reserved. This file is distributed under *)(* the terms of the GNU Library General Public License version 2, with a *)(* special exception on linking, as described in the file LICENSE. *)(* *)(******************************************************************************)openLexingopenPrintfletinitfilenamelexbuf=lexbuf.lex_curr_p<-{pos_fname=filename;pos_lnum=1;pos_bol=0;pos_cnum=0};lexbufletreadfilename=letc=open_infilenameinlettext=really_input_stringc(in_channel_lengthc)inclose_inc;letlexbuf=Lexing.from_stringtextintext,initfilenamelexbufletnewline=Lexing.new_lineletis_dummy(pos1,pos2)=pos1==dummy_pos||pos2==dummy_posletrange((pos1,pos2)asrange)=ifis_dummyrangethensprintf"At an unknown location:\n"elseletfile=pos1.pos_fnameinletline=pos1.pos_lnuminletchar1=pos1.pos_cnum-pos1.pos_bolinletchar2=pos2.pos_cnum-pos1.pos_bolin(* yes, [pos1.pos_bol] *)sprintf"File \"%s\", line %d, characters %d-%d:\n"filelinechar1char2(* use [char1 + 1] and [char2 + 1] if *not* using Caml mode *)lettabulate(typea)(is_eof:a->bool)(lexer:unit->a):unit->a=(* Read tokens from the lexer until we hit an EOF token. *)letrecreadtokens=lettoken=lexer()inlettokens=token::tokensinifis_eoftokenthen(* Once done, reverse the list and convert it to an array. *)tokens|>List.rev|>Array.of_listelsereadtokensin(* We now have an array of tokens. *)lettokens=read[]in(* Define a pseudo-lexer that reads from this array. *)leti=ref0inletlexer()=(* If this assertion is violated, then the parser is trying to read
past an EOF token. This should not happen. *)assert(!i<Array.lengthtokens);lettoken=Array.unsafe_gettokens!iini:=!i+1;tokeninlexer