123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407(*****************************************************************************)(* *)(* Open Source License *)(* Copyright (c) 2018-2021 Tarides <contact@tarides.com> *)(* Copyright (c) 2021 Nomadic Labs, <contact@nomadic-labs.com> *)(* *)(* Permission is hereby granted, free of charge, to any person obtaining a *)(* copy of this software and associated documentation files (the "Software"),*)(* to deal in the Software without restriction, including without limitation *)(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)(* and/or sell copies of the Software, and to permit persons to whom the *)(* Software is furnished to do so, subject to the following conditions: *)(* *)(* The above copyright notice and this permission notice shall be included *)(* in all copies or substantial portions of the Software. *)(* *)(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)(* DEALINGS IN THE SOFTWARE. *)(* *)(*****************************************************************************)openTezos_context_encoding.ContextmoduleEnv=EnvmoduletypeDB=Irmin.Generic_key.SwithmoduleSchema=SchemamoduleKinded_hash=structletof_context_hash=function|`Valueh->`Contents(Hash.of_context_hashh,())|`Nodeh->`Node(Hash.of_context_hashh)letto_context_hash=function|`Contents(h,())->`Value(Hash.to_context_hashh)|`Nodeh->`Node(Hash.to_context_hashh)endtypeproof_version_expanded={is_stream:bool;is_binary:bool}letstream_mask=0b1letbinary_mask=0b10letdecode_proof_versionv=letextract_bitvmask=(vlandmask<>0,vlandlnotmask)inletis_stream,v=extract_bitvstream_maskinletis_binary,v=extract_bitvbinary_maskinifv<>0thenError`Invalid_proof_versionelseOk{is_stream;is_binary}letencode_proof_version~is_stream~is_binary=(ifis_streamthenstream_maskelse0)lorifis_binarythenbinary_maskelse0moduleMake_config(Conf:Conf)=structletequal_config=Tezos_context_sigs.Config.equalletconfig_=Tezos_context_sigs.Config.v~entries:Conf.entries~stable_hash:Conf.stable_hash~inode_child_order:Conf.inode_child_orderendmoduleMake_tree(Conf:Conf)(Store:DB)=structincludeStore.TreeincludeMake_config(Conf)letpp=Irmin.Type.ppStore.tree_tletempty_=Store.Tree.empty()letequal=Irmin.Type.(unstage(equalStore.tree_t))letis_emptyt=equal(Store.Tree.empty())tlethasht=Hash.to_context_hash(Store.Tree.hasht)letaddtkv=Store.Tree.addtkvletkindt=matchStore.Tree.destructtwith`Contents_->`Value|`Node_->`Treeletto_valuet=letopenLwt_syntaxinmatchStore.Tree.destructtwith|`Contents(c,_)->let+v=Store.Tree.Contents.force_exncinSomev|`Node_->Lwt.return_noneletof_value_v=Store.Tree.add(Store.Tree.empty())[]vletfold?depthtk~(order:[`Sorted|`Undefined])~init~f=letopenLwt_syntaxinlet*o=find_treetkinmatchowith|None->Lwt.returninit|Somet->letorder=(order:>[`RandomofRandom.State.t|`Sorted|`Undefined])inStore.Tree.fold?depth~force:`True~cache:false~uniq:`False~order~tree:(funktacc->matchkindtwith|`Value->ifk=[]thenLwt.returnaccelsefktacc|`Tree->fktacc)tinittyperaw=[`Valueofbytes|`TreeofrawString.Map.t]typeconcrete=Store.Tree.concreteletrecraw_of_concrete:typea.(raw->a)->concrete->a=funk->function|`Treel->raw_of_node(funl->k(`Tree(String.Map.of_seql)))l|`Contents(v,_)->k(`Valuev)andraw_of_node:typea.((string*raw)Seq.t->a)->(string*concrete)list->a=funk->function|[]->kSeq.empty|(n,v)::t->raw_of_concrete(funv->raw_of_node(funt->k(fun()->Seq.Cons((n,v),t)))t)vletto_rawt=letopenLwt_syntaxinlet+c=Store.Tree.to_concretetinraw_of_concrete(funt->t)cletrecconcrete_of_raw:typea.(concrete->a)->raw->a=funk->function|`Treel->concrete_of_node(funl->k(`Treel))(String.Map.to_seql)|`Valuev->k(`Contents(v,()))andconcrete_of_node:typea.((string*concrete)list->a)->(string*raw)Seq.t->a=funkseq->matchseq()with|Nil->k[]|Cons((n,v),t)->concrete_of_raw(funv->concrete_of_node(funt->k((n,v)::t))t)vletof_raw=concrete_of_rawStore.Tree.of_concreteletraw_encoding:rawData_encoding.t=letopenData_encodinginmu"Tree.raw"(funencoding->letmap_encoding=convString.Map.bindings(funbindings->String.Map.of_seq(List.to_seqbindings))(list(tup2stringencoding))inunion[case~title:"tree"(Tag0)map_encoding(function`Treet->Somet|`Value_->None)(funt->`Treet);case~title:"value"(Tag1)bytes(function`Valuev->Somev|`Tree_->None)(funv->`Valuev);])(** [unshallow t] is the tree equivalent to [t] but with all subtrees evaluated,
i.e. without "reference" nodes.
This is done by calling `of_raw . to_raw`, which is *not* the identity function.
TODO: find a more efficient way to do the same, maybe with `fold` *)letunshallowt=letopenLwt_syntaxinlet*r=to_rawtinreturn(of_rawr)typerepo=Store.repoletmake_repo=letprng_state=lazy(Random.State.make_self_init())in(* [irmin-pack] stores implicitly share instances according to a string
argument (for persistent stores, this is the store's file path). To avoid
having hidden global state, we generate a unique string each time. *)letrandom_store_name()=letprng_state=Lazy.forceprng_stateinString.init64(fun_->Char.chr(Random.State.intprng_state256))infun()->Store.Repo.v@@Irmin_pack.config@@random_store_name()letkinded_keyt=matchStore.Tree.keytwith|(None|Some(`Node_))asr->r|Some(`Contents(v,()))->Some(`Valuev)letis_shallowtree=matchStore.Tree.inspecttreewith|`Node`Key->true|`Node(`Map|`Value|`Portable_dirty|`Pruned)|`Contents->falseletlisttree?offset?lengthkey=Store.Tree.list~cache:truetree?offset?lengthkeyletlengthtreekey=Store.Tree.length~cache:truetreekeyexceptionContext_dangling_hashofstringletfind_treetreekey=Lwt.catch(fun()->Store.Tree.find_treetreekey)(function|Store.Backend.Node.Val.Dangling_hash{context;hash}|Store.Tree.Dangling_hash{context;hash}->letstr=Fmt.str"%s encountered dangling hash %a"context(Irmin.Type.ppHash.t)hashinraise(Context_dangling_hashstr)|exn->raiseexn)let add_treetreekeyvalue=Lwt.catch(fun()->Store.Tree.add_treetreekeyvalue)(function|Store.Backend.Node.Val.Dangling_hash{context;hash}|Store.Tree.Dangling_hash {context;hash}->letstr=Fmt.str"%s encountered dangling hash %a"context(Irmin.Type.ppHash.t)hashinraise(Context_dangling_hashstr)|exn->raiseexn)endmoduleProof_encoding=Tezos_context_merkle_proof_encodingmoduleMake_proof(Store:DB)(Store_conf:Tezos_context_encoding.Context.Conf)=structmoduleDB_proof=Store.Tree.ProofmoduleProof=structincludeTezos_context_sigs.Context.Proof_typesmoduleState=structletrecto_inode:typeab.(a->b)->aDB_proof.inode->binode=funf{length;proofs}->{length;proofs=List.map(fun(k,v)->(k,fv))proofs}andto_tree:DB_proof.tree->tree=function|Contents(c,())->Valuec|Blinded_contents(h,())->Blinded_value(Hash.to_context_hashh)|Nodel->Node(List.map(fun(k,v)->(k,to_treev))l)|Blinded_nodeh->Blinded_node(Hash.to_context_hashh)|Inodei->Inode(to_inodeto_inode_treei)|Extendere->Extender(to_inode_extenderto_inode_treee)andto_inode_extender:typeab.(a->b)->aDB_proof.inode_extender->binode_extender=funf{length;segments=segment;proof}->{length;segment;proof=fproof}andto_inode_tree:DB_proof.inode_tree->inode_tree=function|Blinded_inodeh->Blinded_inode(Hash.to_context_hashh)|Inode_valuesl->Inode_values(List.map(fun(k,v)->(k,to_treev))l)|Inode_treei->Inode_tree(to_inodeto_inode_treei)|Inode_extendere->Inode_extender(to_inode_extenderto_inode_treee)letrecof_inode:typeab.(a->b)->ainode->bDB_proof.inode=funf{length;proofs}->{length;proofs=List.map(fun(k,v)->(k,fv))proofs}andof_tree:tree->DB_proof.tree=function|Valuec->Contents(c,())|Blinded_valueh->Blinded_contents(Hash.of_context_hashh,())|Nodel->Node(List.map(fun(k,v)->(k,of_treev))l)|Blinded_nodeh->Blinded_node(Hash.of_context_hashh)|Inodei->Inode(of_inodeof_inode_treei)|Extendere->Extender(of_inode_extenderof_inode_treee)andof_inode_extender:typeab.(a->b)->ainode_extender->bDB_proof.inode_extender=funf{length;segment=segments;proof}->{length;segments;proof=fproof}andof_inode_tree:inode_tree->DB_proof.inode_tree=function|Blinded_inodeh->Blinded_inode(Hash.of_context_hashh)|Inode_valuesl->Inode_values(List.map(fun(k,v)->(k,of_treev))l)|Inode_treei->Inode_tree(of_inodeof_inode_treei)|Inode_extendere->Inode_extender(of_inode_extenderof_inode_treee)letof_stream_elt:Stream.elt->DB_proof.elt=function|Valuec->Contentsc|Nodel->Node(List.map(fun(k,v)->(k,Kinded_hash.of_context_hashv))l)|Inodei->Inode(of_inodeHash.of_context_hashi)|Inode_extendere->Inode_extender(of_inode_extenderHash.of_context_hashe)letof_stream:stream->DB_proof.stream=Seq.mapof_stream_eltletto_stream_elt:DB_proof.elt->Stream.elt=function|Contentsc->Valuec|Nodel->Node(List.map(fun(k,v)->(k,Kinded_hash.to_context_hashv))l)|Inodei->Inode(to_inodeHash.to_context_hashi)|Inode_extendere->Inode_extender(to_inode_extenderHash.to_context_hashe)letto_stream:DB_proof.stream->stream=Seq.mapto_stream_eltendletis_binary=ifStore_conf.entries=2thentrueelseifStore_conf.entries=32thenfalseelseassertfalseletof_proof~is_streamfp=letbefore=Kinded_hash.to_context_hash(DB_proof.beforep)inletafter=Kinded_hash.to_context_hash(DB_proof.afterp)inletstate=f(DB_proof.statep)inletversion=encode_proof_version~is_stream~is_binaryin{version;before;after;state}letto_prooffp=letbefore=Kinded_hash.of_context_hashp.beforeinletafter=Kinded_hash.of_context_hashp.afterinletstate=fp.stateinDB_proof.v~before~afterstateletto_tree=of_proof~is_stream:falseState.to_treeletof_tree=to_proofState.of_treeletto_stream=of_proof~is_stream:trueState.to_streamletof_stream=to_proof State.of_streamendletproduce_tree_proofrepokeyf=letopenLwt_syntaxinletkey=matchkeywith`Noden->`Noden|`Valuev->`Contents(v,())inlet+p,r=Store.Tree.produce_proofrepokeyfin(Proof.to_treep,r)letverify_tree_proofprooff=letproof=Proof.of_treeproofinStore.Tree.verify_proofprooffletproduce_stream_proofrepokeyf=letopenLwt_syntaxinletkey=matchkeywith`Noden->`Noden|`Valuev->`Contents(v,())inlet+p,r=Store.Tree.produce_streamrepokey fin(Proof.to_streamp,r)letverify_stream_proof prooff=letproof=Proof.of_streamproofinStore.Tree.verify_streamprooffendtypeerror+=Unsupported_context_hash_versionofContext_hash.Version.tlet()=register_error_kind`Permanent~id:"context_hash.unsupported_version"~title:"Unsupported context hash version"~description:"Unsupported context hash version."~pp:(funppfversion->Format.fprintfppf"@[Context hash version %a is not supported.@,\
You might need to update the shell.@]"Context_hash.Version.ppversion)Data_encoding.(obj1(req"version"Context_hash.Version.encoding))(function|Unsupported_context_hash_versionversion->Someversion|_->None)(funversion->Unsupported_context_hash_versionversion)