123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105(* Yoann Padioleau
*
* Copyright (C) 2010 Facebook
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License (GPL)
* version 2 as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* file license.txt for more details.
*)openCommonmoduleFlag=Flag_parsingmoduleTH=Token_helpers_nwmodulePI=Parse_info(*****************************************************************************)(* Prelude *)(*****************************************************************************)(* Lots of copy paste with my other parsers (e.g. C++, PHP, sql) but
* copy paste is sometimes ok.
*)(*****************************************************************************)(* Types *)(*****************************************************************************)(* the token list contains also the comment-tokens *)typeprogram_and_tokens=Ast_nw.program*Lexer_nw.tokenlist(*****************************************************************************)(* Lexing only *)(*****************************************************************************)lettokens2file=lettable=PI.full_charpos_to_pos_largefileinCommon.with_open_infilefile(funchan->letlexbuf=Lexing.from_channelchaninLexer_nw.reset();letmytokenizerlexbuf=(matchLexer_nw.current_mode()with|Lexer_nw.INITIAL->Lexer_nw.texlexbuf|Lexer_nw.IN_VERBATIMs->Lexer_nw.verbatimslexbuf|Lexer_nw.IN_NOWEB_CHUNK->Lexer_nw.noweblexbuf)inletrectokens_auxacc=lettok=mytokenizerlexbufinif!Flag.debug_lexerthenCommon.pr2_gentok;lettok=tok|>TH.visitor_info_of_tok(funii->{iiwithPI.token=(* could assert pinfo.filename = file ? *)matchii.PI.tokenwith|PI.OriginTokpi->PI.OriginTok(PI.complete_token_location_largefiletablepi)|_->raiseTodo})inifTH.is_eoftokthenList.rev(tok::acc)elsetokens_aux(tok::acc)intokens_aux[])lettokensa=Common.profile_code"Parse_nw.tokens"(fun()->tokens2a)(*****************************************************************************)(* Fuzzy parsing *)(*****************************************************************************)letparse_fuzzyfile=lettoks=tokensfileinlettrees=Lib_ast_fuzzy.mk_trees{Lib_ast_fuzzy.tokf=TH.info_of_tok;kind=TH.token_kind_of_tok;}toksintrees,toks(*****************************************************************************)(* Main entry point *)(*****************************************************************************)letparse2filename=letstat=Parse_info.default_statfilenameinlet(ast,toks)=parse_fuzzyfilenamein(ast,toks),statletparsea=Common.profile_code"Parse_nw.parse"(fun()->parse2a)