1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249(*****************************************************************************)(* *)(* Open Source License *)(* Copyright (c) 2022 Nomadic Labs <contact@nomadic-labs.com> *)(* *)(* Permission is hereby granted, free of charge, to any person obtaining a *)(* copy of this software and associated documentation files (the "Software"),*)(* to deal in the Software without restriction, including without limitation *)(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)(* and/or sell copies of the Software, and to permit persons to whom the *)(* Software is furnished to do so, subject to the following conditions: *)(* *)(* The above copyright notice and this permission notice shall be included *)(* in all copies or substantial portions of the Software. *)(* *)(* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*)(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)(* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *)(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)(* DEALINGS IN THE SOFTWARE. *)(* *)(*****************************************************************************)openError_monadincludeCryptobox_intfmoduleSrs_g1=Kzg.Bls.Srs_g1moduleSrs_g2=Kzg.Bls.Srs_g2moduleScalar=Kzg.Bls.ScalarmodulePoly=Kzg.Bls.PolymoduleDomain=Kzg.Bls.DomainmoduleEvals=Kzg.Bls.EvalsmoduleFFT=Kzg.Utils.FFTmoduleDegree_check=Kzg.Degree_checkmoduleKate_amortized=Kzg.Kate_amortizedmoduleBase58=Tezos_crypto.Base58typeerror+=Failed_to_load_trusted_setupofstringlet()=register_error_kind`Permanent~id:"dal.node.trusted_setup_loading_failed"~title:"Trusted setup loading failed"~description:"Trusted setup failed to load"~pp:(funppfmsg->Format.fprintfppf"Trusted setup failed to load: %s"msg)Data_encoding.(obj1(req"msg"string))(function|Failed_to_load_trusted_setupparameter->Someparameter|_->None)(funparameter->Failed_to_load_trusted_setupparameter)[@@coverageoff]typeinitialisation_parameters=|Verifierof{is_fake:bool}|Proverof{is_fake:bool;srs_g1:Srs_g1.t;srs_g2:Srs_g2.t}(* Initialisation parameters are supposed to be instantiated once. *)letinitialisation_parameters=ref@@Verifier{is_fake=false}(* This function is expected to be called once. *)letload_parametersparameters=letopenResult_syntaxininitialisation_parameters:=parameters;return_unit(* FIXME https://gitlab.com/tezos/tezos/-/issues/3400
An integrity check is run to ensure the validity of the files. *)(* TODO catch Failed_to_load_trusted_setup *)letinitialisation_parameters_from_files~srs_g1_path~srs_g2_path~srs_size=letopenLwt_syntaxinlet*srs=Srs.read_srs~len:srs_size~srs_g1_path~srs_g2_path()inletopenResult_syntaxinLwt.return@@matchsrswith|Error(`End_of_files)->tzfail(Failed_to_load_trusted_setup("EOF: "^s))|Error(`Invalid_pointp)->tzfail(Failed_to_load_trusted_setup(Printf.sprintf"Invalid point %i"p))|Oksrs->returnsrsmoduleInner=structmoduleCommitment=structincludeKzg.Commitment.Single_G1typeTezos_crypto.Base58.data+=Dataoftletb58check_encoding=Tezos_crypto.Base58.register_encoding~prefix:Tezos_crypto.Base58.Prefix.slot_header~length:size~to_raw:to_string~of_raw:of_string_opt~wrap:(funx->Datax)[@@coverageoff]letraw_encoding=encoding[@@coverageoff]includeTezos_crypto.Helpers.Make(structtypet=Kzg.Commitment.Single_G1.tletname="DAL_commitment"lettitle="Commitment representation for the DAL"letb58check_encoding=b58check_encodingletraw_encoding=raw_encodingletcompare=compareletequal=equallethash_=(* The commitment is not hashed. This is ensured by the
function exposed. We only need the Base58 encoding and the
rpc_arg. *)assertfalse[@@coverageoff]letseeded_hash__=(* Same argument. *)assertfalse[@@coverageoff]end)letof_b58check=of_b58checkendmoduleProof=CommitmentmoduleCommitment_proof=Degree_check.Prooftypeslot=bytestypescalar=Scalar.ttypepolynomial=Poly.ttypecommitment=Commitment.ttypeshard_proof=Proof.ttypecommitment_proof=Commitment_proof.ttypepage_proof=Proof.ttypepage=bytestypeshare=Scalar.tarraytypeshard={index:int;share:share}typeshards_proofs_precomputation=Kate_amortized.preprocesstype('a,'b)error_container={given:'a;expected:'b}moduleEncoding=structopenData_encodingletpage_proof_encoding=Proof.encodingletshare_encoding=arrayScalar.encodingletshard_proof_encoding=Proof.encodingletshard_encoding=conv(fun{index;share}->(index,share))(fun(index,share)->{index;share})(tup2int31share_encoding)[@@coverageoff]letshards_proofs_precomputation_encoding=Kate_amortized.preprocess_encodingleterror_container_encoding(given_encoding:'aencoding)(expected_encoding:'bencoding):('a,'b)error_containerencoding=conv(fun{given;expected}->(given,expected))(fun(given,expected)->{given;expected})(obj2(req"given"given_encoding)(req"expected"expected_encoding))endincludeEncodingtypeerror+=Invalid_precomputation_hashof(string,string)error_containerlet()=register_error_kind`Permanent~id:"dal.node.invalid_precomputation_hash"~title:"Invalid_precomputation_hash"~description:"Unexpected precomputation hash"~pp:(funppf{given;expected}->Format.fprintfppf"Invalid precomputation hash: expected %s. Got %s"expectedgiven)(Encoding.error_container_encodingData_encoding.stringData_encoding.string)(functionInvalid_precomputation_hasherr->Someerr|_->None)(functionerr->Invalid_precomputation_hasherr)[@@coverageoff](* Builds group of nth roots of unity, a valid domain for the FFT. *)letmake_domainn=Domain.buildntypet={redundancy_factor:int;slot_size:int;page_size:int;number_of_shards:int;(* Maximum length of the polynomial representation of a slot, also called [polynomial_length]
in the comments. *)max_polynomial_length:int;(* Length of the erasure-encoded polynomial representation of a slot,
also called [erasure_encoded_polynomial_length] in the comments. *)erasure_encoded_polynomial_length:int;(* Domain for the FFT on slots as polynomials to be erasure encoded. *)domain_polynomial_length:Domain.t;domain_2_times_polynomial_length:Domain.t;(* Domain for the FFT on erasure encoded slots (as polynomials). *)domain_erasure_encoded_polynomial_length:Domain.t;(* Length of a shard in terms of scalar elements. *)shard_length:int;(* Number of slot pages. *)pages_per_slot:int;page_length:int;page_length_domain:int;(* Log of the number of evaluations that constitute an erasure encoded
polynomial. *)remaining_bytes:int;(* These srs_g2_* parameters are used by the verifier to check the proofs *)srs_verifier:Srs.srs_verifier;mode:[`Verifier|`Prover];(* Kate amortized public parameters for proving and verifying ;
contain, among others, SRS₁ *)kate_amortized:Kate_amortized.public_parameters;(* Degree check public parameters for proving (None when mode = `Verifier) *)degree_check:Srs_g2.toption;}letensure_validity~is_fake~mode~slot_size~page_size~redundancy_factor~number_of_shards=letopenResult_syntaxinlet*()=Parameters_check.ensure_validity_without_srs~slot_size~page_size~redundancy_factor~number_of_shardsinSrs.ensure_srs_validity~is_fake~mode~slot_size~page_size~redundancy_factor~number_of_shardstypeparameters=Dal_config.parameters={redundancy_factor:int;page_size:int;slot_size:int;number_of_shards:int;}letparameters_encoding=Dal_config.parameters_encodingletpages_per_slot{slot_size;page_size;_}=slot_size/page_sizemoduleCache=Hashtbl.Make(structtypet=parameters*initialisation_parametersletequal(param,init_param)(param',init_param')=param=param'&&match(init_param,init_param')with|Verifier{is_fake;_},Verifier{is_fake=is_fake';_}whenis_fake=is_fake'->true|Prover{is_fake;_},Prover{is_fake=is_fake';_}whenis_fake=is_fake'->true|_->falselethash=Hashtbl.hashend)(* Error cases of this functions are not encapsulated into
`tzresult` for modularity reasons. *)letmake=letopenResult_syntaxinlettable=Cache.create5inletwith_cache(parameters,initialisation_parameters)f=matchCache.find_opttable(parameters,initialisation_parameters)with|Somex->returnx|None->let*x=f()inCache.replacetable(parameters,initialisation_parameters)x;returnxinfun({redundancy_factor;slot_size;page_size;number_of_shards}asparameters)->(* The cryptobox is deterministically computed from the DAL parameters and
this computation takes time (on the order of 10ms) so we cache it. *)with_cache(parameters,!initialisation_parameters)@@fun()->letmax_polynomial_length,erasure_encoded_polynomial_length,shard_length=Parameters_check.compute_lengths~redundancy_factor~slot_size~page_size~number_of_shardsinletpage_length=Parameters_check.page_length~page_sizeinletpage_length_domain,_,_=FFT.select_fft_domainpage_lengthinletmode,is_fake,srs_g1,degree_check=match!initialisation_parameterswith|Verifier{is_fake}->letsrs=ifis_fakethenSrs.Internal_for_tests.get_verifier_srs1()elseSrs.get_verifier_srs1()in(`Verifier,is_fake,srs,None)|Prover{is_fake;srs_g1;srs_g2}->(`Prover,is_fake,srs_g1,Somesrs_g2)inlet*()=ensure_validity~is_fake~mode~slot_size~page_size~redundancy_factor~number_of_shardsinletsrs_verifier=(ifis_fakethenSrs.Internal_for_tests.get_verifier_srs2elseSrs.get_verifier_srs2)~max_polynomial_length~page_length_domain~shard_lengthinletkate_amortized=Kate_amortized.{max_polynomial_length;shard_length;srs_g1;number_of_shards}inreturn{redundancy_factor;slot_size;page_size;number_of_shards;max_polynomial_length;erasure_encoded_polynomial_length;domain_polynomial_length=make_domainmax_polynomial_length;domain_2_times_polynomial_length=make_domain(2*max_polynomial_length);domain_erasure_encoded_polynomial_length=make_domainerasure_encoded_polynomial_length;shard_length;pages_per_slot=pages_per_slotparameters;page_length;page_length_domain;remaining_bytes=page_sizemodParameters_check.scalar_bytes_amount;srs_verifier;mode;kate_amortized;degree_check;}letparameters({redundancy_factor;slot_size;page_size;number_of_shards;_}:t)={redundancy_factor;slot_size;page_size;number_of_shards}[@@coverageoff]letpolynomial_degree=Poly.degreeletpolynomial_evaluate=Poly.evaluate(* [polynomials_multiplication d ps] computes the product of the
polynomials [ps]. The degree of the resulting product must
be strictly less than the size of the domain [d].
Runtime is [O(n log n)] where [n = Domains.length d]. *)letpolynomials_productdps=letevaluations=List.map(FFT.fftd)psinFFT.ifft_inplaced(Evals.mul_c~evaluations())(* We encode by pages of [page_size] bytes each. The pages
are arranged in cosets to produce batched KZG proofs
[https://www.iacr.org/archive/asiacrypt2010/6477178/6477178.pdf]
using the technique described in https://eprint.iacr.org/2023/033. *)letpolynomial_from_slot(t:t)slot=(* Expects the length of the byte sequence to equal the slot size.
This can be achieved by adding some padding with null bytes to the
byte sequence if the length is strictly less than the slot size, or
truncate it if the length is strictly greater than the slot size. *)ifBytes.lengthslot<>t.slot_sizethenError(`Slot_wrong_size(Printf.sprintf"message must be %d bytes long"t.slot_size))elseletoffset=ref0inletcoefficients=Array.initt.max_polynomial_length(fun_->Scalar.(copyzero))in(* A slot is subdivided into contiguous segments, called pages.
The length of a page divides the slot size, that is:
[t.page_size * t.pages_per_slot = t.slot_size]
We parse the slot page by page. *)forpage=0tot.pages_per_slot-1do(* A permutation (so a bijection) is then applied to the elements
from a page. The code applies the permutation to a scalar
element just after it is read from the page, but it is easier to
picture it as two successive steps:
1. a serialization phase where pages are converted to sequences
of scalar elements, which is injective: if two outputs for two
slots are equal, then the slots are equal since we’re just
splitting a page into chunks of [Parameters_check.scalar_bytes_amount] bytes and
a last one of [t.remaining_bytes] bytes.
Parse the byte sequence slot = page_0 ... page_{t.pages_per_slot-1}
page by page by chunks of [Parameters_check.scalar_bytes_amount], or [t.remaining_bytes]
for the last page chunk.
to obtain the vector of length [polynomial_length = t.page_length * t.pages_per_slot]
(chunk p_i^j standing for i-th chunk of page j):
[ p_0^0 p_1^0 ... p_{page_length-1}^0
....
p_0^{t.pages_per_slot-1} ... p_{page_length-1}^{t.pages_per_slot-1}]
2. then apply the permutation, reindexing the page elements so
that their indices correspond to the appropriate coset.
[ p_0^0 p_0^1 ... p_0^{t.pages_per_slot-1}
...
p_{page_length-1}^0 p_{page_length-1}^1 ... p_{t.page_length-1}^{t.pages_per_slot-1}]
For a primitive k-th root of unity, the group generated by w, <w>,
can be split into cosets
<w> = Disjoint union_{j=0, ..., t.pages_per_slot - 1} w^j W
where W = { w^{t.pages_per_slot * i} }_{i=0, ..., t.page_length - 1}.
This way, the indices of the coefficients for each page
j=0, ..., t.pages_per_slot-1 coincide with the indices of the
elements of the cosets (w^j W). Indeed, the coefficients
of the elements of w^j W are
{i * t.pages_per_slot + j}_{i=0, ..., t.page_length-1}. *)forelt=0tot.page_length-2do(* [!offset >= t.slot_size] because we don't want to read past
the buffer [slot] bounds. *)if!offset>=t.slot_sizethen()elseletdst=Bytes.createParameters_check.scalar_bytes_amountinBytes.blitslot!offsetdst0Parameters_check.scalar_bytes_amount;offset:=!offset+Parameters_check.scalar_bytes_amount;(* Apply the permutation. *)coefficients.((elt*t.pages_per_slot)+page)<-Scalar.of_bytes_exndstdone;letdst=Bytes.createt.remaining_bytesinBytes.blitslot!offsetdst0t.remaining_bytes;offset:=!offset+t.remaining_bytes;(* Apply the permutation. *)coefficients.(((t.page_length-1)*t.pages_per_slot)+page)<-Scalar.of_bytes_exndstdone;(* The resulting vector is then interpolated. Polynomial
interpolation is a linear bijection (as a ring isomorphism)
between k-tuples of scalar elements and polynomials of degree < k
with coefficients in the scalar field.
Thus [polynomial_from_slot] is an injection from slots to
polynomials (as composition preserves injectivity). *)Ok(FFT.ifft_inplace(Domain.buildt.max_polynomial_length)(Evals.of_array(t.max_polynomial_length-1,coefficients)))(* [polynomial_to_slot] is the left-inverse function of
[polynomial_from_slot]. *)letpolynomial_to_slottp=(* The last operation of [polynomial_from_slot] is the interpolation,
so we undo it with an evaluation on the same domain [t.domain_polynomial_length]. *)letevaluations=FFT.fft(Domain.buildt.max_polynomial_length)pinletslot=Bytes.maket.slot_size'\x00'inletoffset=ref0in(* Reverse permutation from [polynomial_from_slot]. *)forpage=0tot.pages_per_slot-1doforelt=0tot.page_length-2doletidx=(elt*t.pages_per_slot)+pageinletcoeff=Scalar.to_bytes(Evals.getevaluationsidx)inBytes.blitcoeff0slot!offsetParameters_check.scalar_bytes_amount;offset:=!offset+Parameters_check.scalar_bytes_amountdone;letidx=((t.page_length-1)*t.pages_per_slot)+pageinletcoeff=Scalar.to_bytes(Evals.getevaluationsidx)inBytes.blitcoeff0slot!offsett.remaining_bytes;offset:=!offset+t.remaining_bytesdone;slot(* Encoding a message P = (P_0, ... ,P_{k-1}) amounts to evaluate
its associated polynomial P(x)=sum_{i=0}^{k-1} P_i x^i at the
evaluation points [t.domain_erasure_encoded_polynomial_length].
This can be achieved with an n-points discrete Fourier transform
supported by the [Scalar] field in time O(n log n). *)letencodetp=Evals.to_array(FFT.fft(Domain.buildt.erasure_encoded_polynomial_length)p)(* The shards are arranged in cosets to produce batches of KZG proofs
for the shards efficiently.
The domain of evaluation <w> is split into cosets:
<w> = Disjoint union_{i in ⟦0, t.number_of_shards-1⟧} W_i,
where W_0 = {w^{t.number_of_shards * j}}_{j in ⟦0, t.shard_length-1⟧}
and W_i = w^i W_0 (|W_0|=t.shard_length). *)letshards_from_polynomialtp=letcodeword=encodetpinletrecloopindexseq=ifindex<0thenseqelse(* For each shard index [index], a [share] consists of the
elements from [codeword] of indices w^index W_0.
A shard consists of its [index] and associated [share]. *)letshare=Array.initt.shard_length(fun_->Scalar.(copyzero))inforj=0tot.shard_length-1doshare.(j)<-codeword.((t.number_of_shards*j)+index)done;loop(index-1)(Seq.cons{index;share}seq)inloop(t.number_of_shards-1)Seq.emptymoduleShardSet=Set.Make(structtypet=shardletcompareab=Int.comparea.indexb.indexend)letencoded_share_sizet=(* FIXME: https://gitlab.com/tezos/tezos/-/issues/4289
Improve shard size computation *)letshare_scalar_len=t.erasure_encoded_polynomial_length/t.number_of_shardsin(share_scalar_len*Scalar.size_in_bytes)+4(* Let w be a primitive [t.erasure_encoded_polynomial_length]-th root of unity, where
[t.max_polynomial_length] and [t.erasure_encoded_polynomial_length = t.redundancy_factor * t.max_polynomial_length] divide
[Scalar.order - 1]. Let F be a finite field, and in this
context we use the prime field [Scalar].
We can decode a codeword c from
RS(n, k, (w^i)_i) ≔ { (P(w^i))_{i in ⟦0, n-1⟧} | P in F[x] /\ deg P < k }
with at least k components of c. By decode we mean finding the
underlying message represented by the polynomial P such that
c=(P(w^i))_{i in ⟦0, n-1⟧} with at least k components of c.
Without loss of generality, let c̃ = (c_0, ..., c_{k-1})
be the received codeword with erasures, i.e., the first k components of
a codeword. We can retrieve the original message with any k
components of c thanks to the Lagrange interpolation polynomial P(x)
defined as follows, where the x_i=w^i are the evaluation points of c̃:
P(x) = sum_{i=0}^{k-1} c_i prod_{j=0, j != i}^{k-1} (x-x_j)/(x_i - x_j).
As detailed in https://arxiv.org/pdf/0907.1788v1.pdf, the idea is
to rewrite P(x) as a product of two polynomials A(x) and B(x) so
that the convolution theorem allows us to recover P(x) using FFTs
in O(n log n).
Asymptotic complexity is O(n log n). *)letpolynomial_from_shardstshards=letshards=(* We always consider the first k codeword vector components,
the ShardSet allows collecting distinct indices.
[Seq.take] doesn't raise any exceptions as t.max_polynomial_length / t.shard_length
is (strictly) positive.
[t.max_polynomial_length / t.shard_length] is strictly less than [t.number_of_shards].
Indeed, [t.shard_length = t.erasure_encoded_polynomial_length / t.number_of_shards] where
[t.erasure_encoded_polynomial_length = t.max_polynomial_length * t.redundancy_factor] and [t.redundancy_factor > 1].
Thus,
[t.max_polynomial_length / t.shard_length = t.number_of_shards / t.redundancy_factor < t.number_of_shards].
Here, all variables are positive integers, and [t.redundancy_factor]
divides [t.number_of_shards], [t.number_of_shards] divides [t.erasure_encoded_polynomial_length]. *)Seq.take(t.max_polynomial_length/t.shard_length)shards|>ShardSet.of_seqin(* There should be [t.max_polynomial_length / t.shard_length] distinct shard indices. *)ift.max_polynomial_length/t.shard_length>ShardSet.cardinalshardsthenError(`Not_enough_shards(Printf.sprintf"there must be at least %d shards to decode"(t.max_polynomial_length/t.shard_length)))elseifShardSet.exists(fun{share;_}->Array.lengthshare<>t.shard_length)shardsthenError(`Invalid_shard_length(Printf.sprintf"At least one shard of invalid length: expected length %d."t.shard_length))elseifShardSet.exists(fun{index;_}->index>=t.number_of_shards||index<0)shardsthenError(`Shard_index_out_of_range(Printf.sprintf"At least one shard index out of range: expected indices within \
the range [%d, %d]."0(t.number_of_shards-1)))else(* 1. Computing A(x) = prod_{i=0}^{k-1} (x - x_i).
Let A(x) ≔ prod_{i=0}^{k-1} (x-x_i) and
A_i(x) ≔ prod_{j=0, j != i}^{k-1} (x-x_j).
Let n_i ≔ c_i / A_i(x_i).
The interpolation polynomial becomes:
P(x) = A(x) sum_{i=0}^{k-1} c_i / ((x - x_i) A_i(x_i))
= A(x) sum_{i=0}^{k-1} n_i / (x - x_i).
Note that A_i(x_i) != 0 by definition, so it is invertible
in the [Scalar] field. *)(* The codewords are split into chunks called shards.
For this purpose, let [t.shard_length=t.n/t.number_of_shards]
be the length of a shard, and w a primitive n-th root
of unity.
The domain of evaluation <w> is then split into cosets:
<w> = Disjoint union_{i in ⟦0, t.number_of_shards-1⟧} W_i,
where W_0 = {w^{t.number_of_shards * j}}_{j in ⟦0, t.shard_length-1⟧}
and W_i = w^i W_0 (|W_0|=t.shard_length).
For a set of [t.max_polynomial_length / shard_length] shard indices
Z subseteq {0, t.number_of_shards-1}, we reorganize the product
A(x)=prod_{i=0}^{k-1} (x-x_i) into
A(x) = prod_{i in Z, |Z|=t.max_polynomial_length/t.shard_length} Z_i
where Z_i = prod_{w' in W_i} (x - w').
We notice that Z_0(x)=x^{|W_0|}-1 (as its roots
are the elements of a group of order dividing |W_0|)
entails Z_i(x)=x^{|W_0|}-w^{i*|W_0|}
(multiplying all terms by a constant w^i in an
integral domain), which is a sparse polynomial.
This special form for Z_i(x) allows us to compute the product of
the Z_i(x)s more efficiently with the [mul_xn] function, which
multiplies an arbitrary polynomial P with a polynomial of the
form Z_i(x) in time linear in (degree P + degree Z_i(x)).
Thus A(x) = prod_{i in Z, |Z|=k/l} x^{|W_0|}-w^{i*|W_0|}.
More formally: every element of W_i is of the form
w^i w^{t.number_of_shards j} for j in ⟦0, t.shard_length-1⟧. Thus
Z_i(w^i w^{s j}) = (w^i w^{s j})^{|W_0|}-w^{i*|W_0|}
= (w^i)^{|W_0|} (w^{s j})^{|W_0|} - w^{i*|W_0|}
= w^{i * |W_0|} * 1 - w^{i*|W_0|}=0.
So every element of W_i is a root of Z_i(x).
Moreover, Z_i(x) is a degree |W_0|=t.shard_length polynomial
so has at most [t.shard_length] roots:
Z_i(x)'s only roots are W_i
[mul acc i] computes the polynomial acc * Z_i. *)letmulacci=(* The complexity of [mul_xn] is linear in
[Polynomials.degree acc + t.shard_length]. *)Poly.mul_xnacct.shard_length(Scalar.negate(Domain.gett.domain_erasure_encoded_polynomial_length(i*t.shard_length)))in(* [partition_products seq] builds two polynomials whose
product is A(x) from the input shards [seq]. *)letpartition_productsseq=ShardSet.fold(fun{index;_}(l,r)->(mulrindex,l))seq(Poly.one,Poly.one)in(* The computation of [p1], [p2] has asymptotic complexity
[O((t.max_polynomial_length + t.shard_length) * (t.max_polynomial_length / t.shard_length))
= O(t.max_polynomial_length * t.number_of_shards / t.redundancy_factor)].
It is the most costly operation of this function. *)letp1,p2=partition_productsshardsin(* A(x) is the product of [p1] and [p2], and has degree [polynomial_length] *)assert(Poly.degreep1+Poly.degreep2=t.max_polynomial_length);letmul_domain=Domain.build(2*t.max_polynomial_length)inleteep_domain=Domain.buildt.erasure_encoded_polynomial_lengthinleta_poly=polynomials_productmul_domain[p1;p2]inassert(Poly.degreea_poly=t.max_polynomial_length);(* 2. Computing formal derivative of A(x). *)leta'=Poly.derivativea_polyin(* 3. Computing A'(w^i) = A_i(w^i).
In order to compute A_i(x) more efficiently, we use the
fact that the formal derivative A'(x) of A(x) satisfies
for all i in ⟦0, k-1⟧: A'(x_i) = A_i(x_i).
So we can compute (A_i(x_i))_i by evaluating A'(x) at the
points (w^i)_i with an FFT.
Indeed:
A'(x) = (prod_{i=0}^{k-1} (x-x_i))'
= sum_{i=0}^{k-1} (x-x_i)' prod_{j=0, j != i}^{k-1} (x-x_j)
= sum_{j=0}^{k-1} A_j(x).
So A'(x_i) = sum_{j=0}^{k-1} A_j(x_i) = A_i(x_i) as the other
polynomials A_j(x) have x_i as root. *)leteval_a'=FFT.ffteep_domaina'in(* 4. Computing N(x) ≔ sum_{i=0}^{k-1} n_i / x_i x^i
where n_i ≔ c_i/A_i(x_i)
Writing the fraction
1 / (x_i-x) = sum_{j=0}^infty x^j / (x_i^{j+1}) as a formal power
series, we obtain
P(x) / A(x) = sum_{i=0}^{k-1} n_i / (x-x_i) mod x^k
= -sum_{i=0}^{k-1} [sum_{j=0}^{k-1} n_i / (x_i^{j+1}) x^j]
= -sum_{i=0}^{k-1} N(w^{-i}) x^i
where N(x) ≔ sum_{i=0}^{k-1} n_i / x_i x^i. *)letcompute_nteval_a'shards=letn_poly=Array.initt.erasure_encoded_polynomial_length(fun_->Scalar.(copyzero))inShardSet.iter(fun{index;share}->forj=0toArray.lengthshare-1doletc_i=share.(j)inleti=(t.number_of_shards*j)+indexinletx_i=Domain.gett.domain_erasure_encoded_polynomial_lengthiinlettmp=Evals.geteval_a'iinScalar.mul_inplacetmptmpx_i;(* The call below never fails, so we don't
catch exceptions.
Indeed, [tmp = A_i(x_i)] is a product of nonzero
elements and so is itself nonzero
(thus invertible). See point 1. *)Scalar.inverse_exn_inplacetmptmp;Scalar.mul_inplacetmptmpc_i;n_poly.(i)<-tmpdone)shards;Evals.of_array(t.erasure_encoded_polynomial_length-1,n_poly)inletn_poly=compute_nteval_a'shardsin(* 5. Computing B(x).
B(x) = P(x) / A(x) = -sum_{i=0}^{k-1} N(w^{-i}) x^i.
B(x) is thus given by the first k components
of -n * IFFT_n(N). *)letb=Poly.truncate~len:t.max_polynomial_length(FFT.ifft_inplaceeep_domainn_poly)inPoly.mul_by_scalar_inplaceb(Scalar.negate(Scalar.of_intt.erasure_encoded_polynomial_length))b;(* 6. Computing Lagrange interpolation polynomial P(x).
The product is given by the convolution theorem:
P = A * B = IFFT_{2k}(FFT_{2k}(A) o FFT_{2k}(B))
where o is the pairwise product. *)letp=polynomials_productmul_domain[a_poly;b]in(* P has degree [<= max_polynomial_length - 1] so [<= max_polynomial_length]
coefficients. *)Ok(Poly.truncate~len:t.max_polynomial_lengthp)letcommittp=matcht.modewith|`Verifier->Error`Prover_SRS_not_loaded|`Prover->(tryOk(Commitment.committ.kate_amortized.srs_g1p)withKzg.Commitment.SRS_too_short_->Error(`Invalid_degree_strictly_less_than_expected{given=Poly.degreep;expected=Srs_g1.sizet.kate_amortized.srs_g1;}))letpp_commit_errorfmt=function|`Invalid_degree_strictly_less_than_expected{given;expected}->Format.fprintffmt"Invalid degree: expecting input polynomial to commit function to \
have a degree strictly less than %d. Got %d."expectedgiven|`Prover_SRS_not_loaded->Format.fprintffmt"The prover's SRS was not loaded: cannot commit a polynomial without \
the prover's SRS."letstring_of_commit_errorerr=Format.asprintf"%a"pp_commit_errorerr(* p(X) of degree n. Max degree that can be committed: d, which is also the
SRS's length - 1. We take d = t.max_polynomial_length - 1 since we don't want to commit
polynomials with degree greater than polynomials to be erasure-encoded.
We consider the bilinear groups (G_1, G_2, G_T) with G_1=<g> and G_2=<h>.
- Commit (p X^{d-n}) such that deg (p X^{d-n}) = d the max degree
that can be committed
- Verify: checks if e(commit(p), commit(X^{d-n})) = e(commit(p X^{d-n}), h)
using the commitments for p and p X^{d-n}, and computing the commitment for
X^{d-n} on G_2. *)(* Proves that degree(p) < t.max_polynomial_length *)letprove_commitment({max_polynomial_length;degree_check;_}:t)p=matchdegree_checkwith|None->Error`Prover_SRS_not_loaded|Somesrs_g2->ifSrs_g2.sizesrs_g2>=max_polynomial_lengththenOk(Degree_check.prove~max_commit:(Srs_g2.sizesrs_g2-1)~max_degree:(max_polynomial_length-1)srs_g2p)elseError(`Invalid_degree_strictly_less_than_expected{given=max_polynomial_length;expected=Srs_g2.sizesrs_g2})(* Verifies that the degree of the committed polynomial is < t.max_polynomial_length *)letverify_commitment(t:t)cmproof=Degree_check.verifyt.srs_verifier.commitmentcmproofletsave_precompute_shards_proofsprecomputation~filename=protect(fun()->Lwt_io.with_file~mode:Outputfilename(funchan->letopenLwt_result_syntaxinletstr=Data_encoding.Binary.to_string_exnEncoding.shards_proofs_precomputation_encodingprecomputationinlet*!()=Lwt_io.writechanstrinreturn_unit))lethash_precomputationprecomputation=letencoding=Data_encoding.Binary.to_bytes_exnEncoding.shards_proofs_precomputation_encodingprecomputationinTezos_crypto.Blake2B.hash_bytes[encoding]letload_precompute_shards_proofs~hash~filename()=protect(fun()->Lwt_io.with_file~mode:Inputfilename(funchan->letopenLwt_result_syntaxinlet*!str=Lwt_io.readchaninletprecomputation=Data_encoding.Binary.of_string_exnEncoding.shards_proofs_precomputation_encodingstrinlet*()=matchhashwith|Somegiven->letexpected=hash_precomputationprecomputationinifTezos_crypto.Blake2B.equalgivenexpectedthenreturn_unitelsetzfail(Invalid_precomputation_hash{given=Tezos_crypto.Blake2B.to_stringgiven;expected=Tezos_crypto.Blake2B.to_stringexpected;})|None->return_unitinreturnprecomputation))letprecompute_shards_proofs{kate_amortized;_}=(* Precomputes step. 1 of multiple multi-reveals. *)tryOk(Kate_amortized.preprocess_multiple_multi_revealskate_amortized)withKzg.Commitment.SRS_too_short_->Error(`Invalid_degree_strictly_less_than_expected{given=Srs_g1.sizekate_amortized.srs_g1;expected=kate_amortized.max_polynomial_length;})letprove_shardst~precomputation~polynomial=(* Resizing input polynomial [p] to obtain an array of length [t.max_polynomial_length + 1]. *)letcoefficients=Array.init(t.max_polynomial_length+1)(fun_->Scalar.(copyzero))inletp_length=Poly.degreepolynomial+1inletp=Poly.to_dense_coefficientspolynomialinArray.blitp0coefficients0p_length;Kate_amortized.multiple_multi_revealst.kate_amortized~preprocess:precomputation~coefficientsletverify_shard(t:t)commitment{index=shard_index;share=evaluations}proof=ifshard_index<0||shard_index>=t.number_of_shardsthenError(`Shard_index_out_of_range(Printf.sprintf"Shard index out of range: got index %d, expected index within \
range [%d, %d]."shard_index0(t.number_of_shards-1)))elseletexpected_shard_length=t.shard_lengthinletgot_shard_length=Array.lengthevaluationsinifexpected_shard_length<>got_shard_lengththenError`Shard_length_mismatchelseletroot=Domain.gett.domain_erasure_encoded_polynomial_lengthshard_indexinletdomain=Domain.buildt.shard_lengthinletsrs_point=t.srs_verifier.shardsinifKate_amortized.verifyt.kate_amortized~commitment~srs_point~domain~root~evaluations~proofthenOk()elseError`Invalid_shardletverify_shard_multi(t:t)commitmentshard_listproof_list=letout_of_range_list=List.filter(funshard->shard.index<0||shard.index>=t.number_of_shards)shard_listinleterror_message=String.concat"\n"(List.map(funshard->Printf.sprintf"Shard index out of range: got index %d, expected index within \
range [%d, %d]."shard.index0(t.number_of_shards-1))out_of_range_list)inifout_of_range_list!=[]thenError(`Shard_index_out_of_rangeerror_message)elseletlength_mismatch_list=letexpected_shard_length=t.shard_lengthinList.filter(funshard->letgot_shard_length=Array.lengthshard.shareinexpected_shard_length<>got_shard_length)shard_listiniflength_mismatch_list!=[]thenError`Shard_length_mismatchelseletroot_list=List.map(funshard->Domain.gett.domain_erasure_encoded_polynomial_lengthshard.index)shard_listinletevaluations_list=List.map(funshard->shard.share)shard_listinletdomain=Domain.buildt.shard_lengthinletsrs_point=t.srs_verifier.shardsinifKate_amortized.verify_multit.kate_amortized~commitment~srs_point~domain~root_list~evaluations_list~proof_listthenOk()elseError`Invalid_shardletprove_pagetppage_index=ifpage_index<0||page_index>=t.pages_per_slotthenError`Page_index_out_of_rangeelseletwi=Domain.gett.domain_polynomial_lengthpage_indexinletquotient,_=Poly.division_xnpt.page_length_domain(Scalar.negate(Scalar.powwi(Z.of_intt.page_length_domain)))incommittquotient(* Parses the [slot_page] to get the evaluations that it contains. The
evaluation points are given by the [slot_page_index]. *)letverify_pagetcommitment~page_indexpageproof=ifpage_index<0||page_index>=t.pages_per_slotthenError`Page_index_out_of_rangeelseletexpected_page_length=t.page_sizeinletgot_page_length=Bytes.lengthpageinifexpected_page_length<>got_page_lengththenError`Page_length_mismatchelseletdomain=Domain.buildt.page_length_domaininletevaluations=Array.initt.page_length_domain(function|iwheni<t.page_length-1->(* Parse the [page] by chunks of [Parameters_check.scalar_bytes_amount] bytes.
These chunks are interpreted as [Scalar.t] elements and stored
in [evaluations]. *)letdst=Bytes.createParameters_check.scalar_bytes_amountinBytes.blitpage(i*Parameters_check.scalar_bytes_amount)dst0Parameters_check.scalar_bytes_amount;Scalar.of_bytes_exndst|iwheni=t.page_length-1->(* Store the remaining bytes in the last nonzero coefficient
of evaluations. *)letdst=Bytes.createt.remaining_bytesinBytes.blitpage(i*Parameters_check.scalar_bytes_amount)dst0t.remaining_bytes;Scalar.of_bytes_exndst|_->Scalar.(copyzero))inletroot=Domain.gett.domain_polynomial_lengthpage_indexinletsrs_point=t.srs_verifier.pagesinifKate_amortized.verifyt.kate_amortized~commitment~srs_point~domain~root~evaluations~proofthenOk()elseError`Invalid_pageendincludeInnermoduleVerifier=InnermoduleInternal_for_tests=structletparameters_initialisation()=Prover{is_fake=true;srs_g1=Lazy.forceSrs.Internal_for_tests.fake_srs1;srs_g2=Lazy.forceSrs.Internal_for_tests.fake_srs2;}(* Since computing fake_srs is costly, we avoid to recompute it. *)letinit_prover_dal()=initialisation_parameters:=parameters_initialisation()letinit_verifier_dal()=initialisation_parameters:=Verifier{is_fake=true}letinit_verifier_dal_default()=initialisation_parameters:=Verifier{is_fake=false}letmake_dummy_shards(t:t)~state=Random.set_statestate;letrecloopindexseq=ifindex=t.number_of_shardsthenseqelseletshare=Array.init(t.shard_length+1+Random.int100)(fun_->Scalar.(random~state()))inloop(index+1)(Seq.cons{index;share}seq)inloop0Seq.emptyletpolynomials_equal=Poly.equalletpage_proof_equal=Proof.equalletalter_page_proof(proof:page_proof)=Proof.alter_proofproofletalter_shard_proof(proof:shard_proof)=Proof.alter_proofproofletalter_commitment_proof(proof:commitment_proof)=Commitment_proof.alter_proofproofletminimum_number_of_shards_to_reconstruct_slot(t:t)=t.number_of_shards/t.redundancy_factorletselect_fft_domain=FFT.select_fft_domainletprecomputation_equal=Kate_amortized.preprocess_equalletdummy_commitment~state()=Commitment.random~state()letdummy_page_proof~state()=Proof.random~state()letdummy_shard_proof~state()=Proof.random~state()letmake_dummy_shard~state~index~length={index;share=Array.initlength(fun_->Scalar.(random~state()))}letnumber_of_pagest=t.pages_per_slotletshard_lengtht=t.shard_lengthletdummy_polynomial~state~degree=letrecnonzero()=letres=Scalar.random~state()inifScalar.is_zeroresthennonzero()elseresinPoly.init(degree+1)(funi->ifi=degreethennonzero()elseScalar.random~state())letsrs_size_g1t=Srs_g1.sizet.kate_amortized.srs_g1letencoded_share_size=encoded_share_sizeletensure_validity_without_srs{redundancy_factor;slot_size;page_size;number_of_shards;_}=Parameters_check.ensure_validity_without_srs~redundancy_factor~slot_size~page_size~number_of_shardsletensure_validity{redundancy_factor;slot_size;page_size;number_of_shards}=letmode,is_fake=match!initialisation_parameterswith|Verifier{is_fake}->(`Verifier,is_fake)|Prover{is_fake;_}->(`Prover,is_fake)inmatchensure_validity~is_fake~mode~slot_size~page_size~redundancy_factor~number_of_shardswith|Ok_->true|_->falseletslot_as_polynomial_length=Parameters_check.slot_as_polynomial_lengthendmoduleConfig=structtypet=Dal_config.t={activated:bool;use_mock_srs_for_testing:bool;bootstrap_peers:stringlist;}letencoding:tData_encoding.t=Dal_config.encodingletdefault=Dal_config.defaultletinit_verifier_daldal_config=letopenResult_syntaxinifdal_config.activatedthenletinitialisation_parameters=Verifier{is_fake=dal_config.use_mock_srs_for_testing}inload_parametersinitialisation_parameterselsereturn_unitletinit_prover_dal~find_srs_files?(srs_size_log2=21)dal_config=letopenLwt_result_syntaxinifdal_config.activatedthenlet*initialisation_parameters=ifdal_config.use_mock_srs_for_testingthenreturn(Internal_for_tests.parameters_initialisation())elselet*?srs_g1_path,srs_g2_path=find_srs_files()inlet*srs_g1,srs_g2=initialisation_parameters_from_files~srs_g1_path~srs_g2_path~srs_size:(1lslsrs_size_log2)inreturn(Prover{is_fake=false;srs_g1;srs_g2})inLwt.return(load_parametersinitialisation_parameters)elsereturn_unitend