123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221openReason_parsertype'apositioned='a*Lexing.position*Lexing.positiontypet={declarative_lexer_state:Reason_declarative_lexer.state;lexbuf:Lexing.lexbuf;mutablecomments:(string*Location.t)list;mutablequeued_tokens:tokenpositionedlist;mutablequeued_exn:exnoption;mutablelast_cnum:int;mutablecompletion_ident_offset:int;completion_ident_pos:Lexing.position}letinit?insert_completion_identlexbuf=letdeclarative_lexer_state=Reason_declarative_lexer.make()inletcompletion_ident_offset,completion_ident_pos=matchinsert_completion_identwith|None->(min_int,Lexing.dummy_pos)|Somepos->(pos.Lexing.pos_cnum,pos)in{declarative_lexer_state;lexbuf;comments=[];queued_tokens=[];queued_exn=None;last_cnum=-1;completion_ident_offset;completion_ident_pos;}letlexbufstate=state.lexbufletrectokenstate=matchReason_declarative_lexer.tokenstate.declarative_lexer_statestate.lexbufwith|COMMENT(s,comment_loc)->state.comments<-(s,comment_loc)::state.comments;tokenstate|tok->tok(* Routines for manipulating lexer state *)letsave_triplelexbuftok=(tok,lexbuf.Lexing.lex_start_p,lexbuf.Lexing.lex_curr_p)letfake_triplet(_,pos,_)=(t,pos,pos)(* insert ES6_FUN *)exceptionLex_balanced_failedoftokenpositionedlist*exnoptionletclosing_of=function|LPAREN->RPAREN|LBRACE->RBRACE|_->assertfalseletinject_es6_fun=function|tok::acc->tok::fake_tripleES6_FUNtok::acc|_->assertfalseletis_triggering_token=function|EQUALGREATER|COLON->true|_->falseletreclex_balanced_stepstateclosingacctok=letlexbuf=state.lexbufinletacc=save_triplelexbuftok::accinmatchtok,closingwith|(RPAREN,RPAREN)|(RBRACE,RBRACE)|(RBRACKET,RBRACKET)->acc|((RPAREN|RBRACE|RBRACKET|EOF),_)->raise(Lex_balanced_failed(acc,None))|((LBRACKET|LBRACKETLESS|LBRACKETGREATER|LBRACKETAT|LBRACKETPERCENT|LBRACKETPERCENTPERCENT),_)->lex_balancedstateclosing(lex_balancedstateRBRACKETacc)|((LPAREN|LBRACE),_)->letrparen=trylex_balancedstate(closing_oftok)[]with(Lex_balanced_failed(rparen,None))->raise(Lex_balanced_failed(rparen@acc,None))inbeginmatchtokenstatewith|exceptionexn->raise(Lex_balanced_failed(rparen@acc,Someexn))|tok'->letacc=ifis_triggering_tokentok'theninject_es6_funaccelseaccinlex_balanced_stepstateclosing(rparen@acc)tok'end|((LIDENT_|UNDERSCORE),_)->beginmatchtokenstatewith|exceptionexn->raise(Lex_balanced_failed(acc,Someexn))|tok'->letacc=ifis_triggering_tokentok'theninject_es6_funaccelseaccinlex_balanced_stepstateclosingacctok'end(* `...` with a closing `}` indicates that we're definitely not in an es6_fun
* Image the following:
* true ? (Update({...a, b: 1}), None) : x;
* true ? ({...a, b: 1}) : a;
* true ? (a, {...a, b: 1}) : a;
* The lookahead_esfun is triggered initiating the lex_balanced procedure.
* Since we now "over"-parse spread operators in pattern position (for
* better errors), the ... pattern in ({...a, b: 1}) is now a valid path.
* This means that the above expression `({...a, b: 1}) :` is seen as a pattern.
* I.e. the arguments of an es6 function: (pattern) :type => expr
* We exit here, to indicate that an expression needs to be parsed instead
* of a pattern.
*)|(DOTDOTDOT,RBRACE)->acc|_->lex_balancedstateclosingaccandlex_balancedstateclosingacc=matchtokenstatewith|exceptionexn->raise(Lex_balanced_failed(acc,Someexn))|tok->lex_balanced_stepstateclosingacctokletlookahead_esfunstate(tok,_,_aslparen)=matchlex_balancedstate(closing_oftok)[]with|exception(Lex_balanced_failed(tokens,exn))->state.queued_tokens<-List.revtokens;state.queued_exn<-exn;lparen|tokens->beginmatchtokenstatewith|exceptionexn->state.queued_tokens<-List.revtokens;state.queued_exn<-Someexn;lparen|token->lettokens=save_triplestate.lexbuftoken::tokensinifis_triggering_tokentokenthen(state.queued_tokens<-lparen::List.revtokens;fake_tripleES6_FUNlparen)else(state.queued_tokens<-List.revtokens;lparen)endlettokenstate=letlexbuf=state.lexbufinmatchstate.queued_tokens,state.queued_exnwith|[],Someexn->state.queued_exn<-None;raiseexn|[(LPAREN,_,_)aslparen],None->lookahead_esfunstatelparen|[(LBRACE,_,_)aslparen],None->lookahead_esfunstatelparen|[],None->beginmatchtokenstatewith|LPAREN|LBRACEastok->lookahead_esfunstate(save_triplestate.lexbuftok)|(LIDENT_|UNDERSCORE)astok->lettok=save_triplelexbuftokinbeginmatchtokenstatewith|exceptionexn->state.queued_exn<-Someexn;tok|tok'->ifis_triggering_tokentok'then(state.queued_tokens<-[tok;save_triplelexbuftok'];fake_tripleES6_FUNtok)else(state.queued_tokens<-[save_triplelexbuftok'];tok)end|token->save_triplelexbuftokenend|x::xs,_->state.queued_tokens<-xs;xlettokenstate=letspace_start=state.last_cnuminlet(token',start_p,curr_p)astoken=tokenstateinlettoken_start=start_p.Lexing.pos_cnuminlettoken_stop=curr_p.Lexing.pos_cnuminstate.last_cnum<-token_stop;ifstate.completion_ident_offset>min_int&&space_start<=state.completion_ident_offset&&token_stop>=state.completion_ident_offsetthen(matchtoken'with|LIDENT_|UIDENT_whentoken_start<=state.completion_ident_offset->state.completion_ident_offset<-min_int;token|_->state.queued_tokens<-token::state.queued_tokens;state.completion_ident_offset<-min_int;(LIDENT"_",state.completion_ident_pos,state.completion_ident_pos))elsetokentypecomment=string*Location.ttypeinvalid_docstrings=commentlistletempty_invalid_docstrings=[]letadd_invalid_docstringtextloc_startloc_endinvalid_docstrings=letloc={Location.loc_start;loc_end;loc_ghost=false}in((text,loc)::invalid_docstrings)letget_commentsstateinvalid_docstrings=letcnum(_,loc)=loc.Location.loc_start.Lexing.pos_cnuminletrecmerge_commentsacc=function|[],xs|xs,[]->List.rev_appendxsacc|((x::_)asxs),(y::ys)whencnumx>=cnumy->merge_comments(y::acc)(xs,ys)|x::xs,ys->merge_comments(x::acc)(xs,ys)inmerge_comments[](state.comments,invalid_docstrings)