Skip to content

Commit

Permalink
Merge pull request #63 from 0xPolygonHermez/develop
Browse files Browse the repository at this point in the history
Merging changes Etrog to main
  • Loading branch information
jbaylina authored Jan 17, 2024
2 parents e60ceae + 633a845 commit ed4bcd9
Show file tree
Hide file tree
Showing 22 changed files with 441 additions and 213 deletions.
2 changes: 1 addition & 1 deletion circuits.bn128/gl.circom
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ template GLCMulAdd() {
// = a0b2 + a2b0 + a2b2 + a1b1 -> 4*ina.maxNum * inb.maxNum

//Since all the elements of the array takes the same tag value, we set as the max value 5*ina.maxNum * inb.maxNum
var maxQuotientBits = log2((5*ina.maxNum * inb.maxNum - 1) \ p) + 1;
var maxQuotientBits = log2((5*ina.maxNum * inb.maxNum + inc.maxNum - 1) \ p) + 1;

signal k[3];

Expand Down
22 changes: 11 additions & 11 deletions circuits.bn128/stark_verifier.circom.ejs
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,7 @@ template GLC3() {
*/
template Transcript() {

signal input {maxNum} publics[<%- pil.publics.length %>];
signal input {maxNum} publics[<%- starkInfo.nPublics %>];
signal input rootC;
signal input root1;
signal input root2;
Expand All @@ -300,7 +300,7 @@ template Transcript() {
<%
const transcript = new Transcript();
transcript.put("rootC");
transcript.put("publics", pil.publics.length);
transcript.put("publics", starkInfo.nPublics);
transcript.put("root1");
transcript.getField("challenges[0]", 3);
transcript.getField("challenges[1]", 3);
Expand Down Expand Up @@ -385,7 +385,7 @@ template parallel VerifyFRI(prevStepBits, currStepBits, nextStepBits, e0, e1) {

signal {maxNum} coefs[1 << step][3] <== FFT(step, 1)(s_vals_curr);
signal {maxNum} evalXprime[3] <== GLCMul()(s_specialX, s_X);
signal evalPol[3] <== EvalPol(1 << step)(coefs, evalXprime);
signal {maxNum} evalPol[3] <== EvalPol(1 << step)(coefs, evalXprime);

signal s_keys_lowValues[nextStep];
for(var i = 0; i < nextStep; i++) { s_keys_lowValues[i] <== ys[i + nextStepBits]; }
Expand All @@ -412,7 +412,7 @@ template parallel VerifyEvaluations() {
signal input {maxNum} challenges4[3];
signal input {maxNum} challenges7[3];
signal input {maxNum} evals[<%- starkInfo.evMap.length %>][3];
signal input {maxNum} publics[<%- pil.publics.length %>];
signal input {maxNum} publics[<%- starkInfo.nPublics %>];
signal input enable;

var q = 0xFFFFFFFFFFFFFFFF;
Expand Down Expand Up @@ -639,7 +639,7 @@ template parallel VerifyFinalPol() {
}

template StarkVerifier() {
signal input publics[<%- pil.publics.length %>]; // constant polynomials
signal input publics[<%- starkInfo.nPublics %>]; // constant polynomials
signal input root1; // Merkle tree root of the evaluations of all trace polynomials
signal input root2; // Merkle tree root of the evaluations of polynomials h1 and h2 used for the plookup
signal input root3; // Merkle tree root of the evaluations of the grand product polynomials (Z)
Expand Down Expand Up @@ -704,7 +704,7 @@ template StarkVerifier() {
enable <== 1;
<% } -%>

signal {maxNum} publicsGL[<%- pil.publics.length %>];
signal {maxNum} publicsGL[<%- starkInfo.nPublics %>];
for(var i = 0; i < <%- starkInfo.nPublics %>; i++){
publicsGL[i] <== LessThan64Bits()(publics[i]);
}
Expand Down Expand Up @@ -950,7 +950,7 @@ template Main() {
signal input proverAddr;
signal output publicsHash;
signal input publics[<%- pil.publics.length %>];
signal input publics[<%- starkInfo.nPublics %>];
signal input root1;
signal input root2;
signal input root3;
Expand Down Expand Up @@ -1035,10 +1035,10 @@ template Main() {
// Calculate Publics Hash
//////
component publicsHasher = Sha256(<%- 160 + 64*pil.publics.length %>);
component publicsHasher = Sha256(<%- 160 + 64*starkInfo.nPublics %>);
component n2bProverAddr = Num2Bits(160);
component n2bPublics[<%- pil.publics.length %> ];
component cmpPublics[<%- pil.publics.length %> ];
component n2bPublics[<%- starkInfo.nPublics %> ];
component cmpPublics[<%- starkInfo.nPublics %> ];
n2bProverAddr.in <== proverAddr;
for (var i=0; i<160; i++) {
Expand All @@ -1047,7 +1047,7 @@ template Main() {
var offset = 160;
for (var i=0; i<<%- pil.publics.length %>; i++) {
for (var i=0; i<<%- starkInfo.nPublics %>; i++) {
n2bPublics[i] = Num2Bits(64);
cmpPublics[i] = CompConstant64(0xFFFFFFFF00000000);
n2bPublics[i].in <== publics[i];
Expand Down
8 changes: 8 additions & 0 deletions circuits.gl/mux1.circom
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,12 @@ template MultiMux1(n) {
out[i] <== (c[1][i] - c[0][i])*s + c[0][i];

}
}

template Mux1() {
signal input c[2]; // Constants
signal input s; // Selector
signal output out;

out <== (c[1] - c[0])*s + c[0];
}
8 changes: 4 additions & 4 deletions circuits.gl/stark_verifier.circom.ejs
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ class Transcript {
*/
template Transcript() {

signal input publics[<%- pil.publics.length %>];
signal input publics[<%- starkInfo.nPublics %>];
signal input rootC[4];
signal input root1[4];
signal input root2[4];
Expand All @@ -272,7 +272,7 @@ template Transcript() {
<%
const transcript = new Transcript();
transcript.put("rootC", 4);
transcript.put("publics", pil.publics.length);
transcript.put("publics", starkInfo.nPublics);
transcript.put("root1", 4);
transcript.getField("challenges[0]", 3);
transcript.getField("challenges[1]", 3);
Expand Down Expand Up @@ -351,7 +351,7 @@ template parallel VerifyEvaluations() {
signal input challenges4[3];
signal input challenges7[3];
signal input evals[<%- starkInfo.evMap.length %>][3];
signal input publics[<%- pil.publics.length %>];
signal input publics[<%- starkInfo.nPublics %>];
signal input enable;

// zMul stores all the powers of z (which is stored in challenge7) up to nBits, i.e, [z, z^2, ..., z^nBits]
Expand Down Expand Up @@ -530,7 +530,7 @@ template parallel VerifyFinalPol() {
}
}
template StarkVerifier() {
signal input publics[<%- pil.publics.length %>]; // constant polynomials
signal input publics[<%- starkInfo.nPublics %>]; // constant polynomials
signal input root1[4]; // Merkle tree root of the evaluations of all trace polynomials
signal input root2[4]; // Merkle tree root of the evaluations of polynomials h1 and h2 used for the plookup
signal input root3[4]; // Merkle tree root of the evaluations of the grand product polynomials (Z)
Expand Down
14 changes: 8 additions & 6 deletions index.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
const FGL = require("./src/f3g");
const F3g = require("./src/helpers/f3g");


module.exports.FGL = new FGL();
module.exports.starkSetup = require("./src/stark_setup.js");
module.exports.starkGen = require("./src/stark_gen.js");
module.exports.starkVerify = require("./src/stark_verify.js");
module.exports.r1cs2plonk = require("./src/r1cs2plonk");
module.exports.F3g = new F3g();
module.exports.pil2circom = require("./src/pil2circom.js");
module.exports.starkSetup = require("./src/stark/stark_setup.js");
module.exports.starkGen = require("./src/stark/stark_gen.js");
module.exports.starkVerify = require("./src/stark/stark_verify.js");
module.exports.r1cs2plonk = require("./src/r1cs2plonk");
module.exports.starkInfo = require("./src/stark/stark_info");
52 changes: 52 additions & 0 deletions src/compressor/compressor_exec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
const { newCommitPolsArray } = require("pilcom");
const { WitnessCalculatorBuilder } = require("circom_runtime");
const fs = require("fs");

module.exports.compressorExec = async function compressorExec(F, pil, wasm, input, exec) {
const cmPols = newCommitPolsArray(pil);

const nCols = cmPols.Compressor.a.length;

const { nAdds, nSMap, addsBuff, sMapBuff } = exec;

const wc = await WitnessCalculatorBuilder(wasm);
const w = await wc.calculateWitness(input);

for (let i=0; i<nAdds; i++) {
w.push( F.add( F.mul( w[addsBuff[i*4]], addsBuff[i*4 + 2]), F.mul( w[addsBuff[i*4+1]], addsBuff[i*4+3] )));
}

for (let i=0; i<nSMap; i++) {
for (let j=0; j<nCols; j++) {
if (sMapBuff[nCols*i+j] != 0) {
cmPols.Compressor.a[j][i] = w[sMapBuff[nCols*i+j]];
} else {
cmPols.Compressor.a[j][i] = 0n;
}
}
}

return cmPols;
}


module.exports.readExecFile = async function readExecFile(execFile, nCols) {

const fd =await fs.promises.open(execFile, "r");
const buffH = new BigUint64Array(2);
await fd.read(buffH, 0, 2*8);
const nAdds= Number(buffH[0]);
const nSMap= Number(buffH[1]);


const addsBuff = new BigUint64Array(nAdds*4);
await fd.read(addsBuff, 0, nAdds*4*8);

const sMapBuff = new BigUint64Array(nSMap*nCols);
await fd.read(sMapBuff, 0, nSMap*nCols*8);

await fd.close();

return { nAdds, nSMap, addsBuff, sMapBuff };

}
46 changes: 46 additions & 0 deletions src/compressor/compressor_setup.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
const {readR1cs} = require("r1csfile");
const plonkSetupC18 = require("./compressor18_setup.js");
const plonkSetupC12 = require("./compressor12_setup.js");

module.exports.compressorSetup = async function compressorSetup(F, r1csFile, cols, options = {}) {
const r1cs = await readR1cs(r1csFile, {F: F, logger:console });

if(![12,18].includes(cols)) throw new Error("Invalid number of cols");

let res;
if(cols === 12) {
res = await plonkSetupC12(F, r1cs, options);
} else {
res = await plonkSetupC18(F, r1cs, options);
}

const exec = await writeExecFile(res.plonkAdditions, res.sMap);

return {exec, pilStr: res.pilStr, constPols: res.constPols};
}



async function writeExecFile(adds, sMap) {

const size = 2 + adds.length*4 + sMap.length*sMap[0].length;
const buff = new BigUint64Array(size);

buff[0] = BigInt(adds.length);
buff[1] = BigInt(sMap[0].length);

for (let i=0; i< adds.length; i++) {
buff[2 + i*4 ] = BigInt(adds[i][0]);
buff[2 + i*4 + 1 ] = BigInt(adds[i][1]);
buff[2 + i*4 + 2 ] = adds[i][2];
buff[2 + i*4 + 3 ] = adds[i][3];
}

for (let i=0; i<sMap[0].length; i++) {
for (let c=0; c<sMap.length; c++) {
buff[2 + adds.length*4 + sMap.length*i + c] = BigInt(sMap[c][i]);
}
}

return buff;
}
55 changes: 5 additions & 50 deletions src/compressor/main_compressor_exec.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
const fs = require("fs");
const version = require("../../package").version;

const { compile, newCommitPolsArray } = require("pilcom");
const { compile } = require("pilcom");
const F3g = require("../helpers/f3g.js");
const { WitnessCalculatorBuilder } = require("circom_runtime");
const { compressorExec, readExecFile } = require("./compressor_exec");
const JSONbig = require('json-bigint')({ useNativeBigInt: true, alwaysParseAsBig: true });


Expand Down Expand Up @@ -32,39 +32,16 @@ async function run() {

const pil = await compile(F, pilFile, null, pilConfig);

const cmPols = newCommitPolsArray(pil);

const N = cmPols.Compressor.a[0].length;

const nCols =cmPols.Compressor.a.length;

const { nAdds, nSMap, addsBuff, sMapBuff } = await readExecFile(execFile, nCols);
const exec = await readExecFile(execFile, pil.references["Compressor.a"].len);

const fd =await fs.promises.open(wasmFile, "r");
const st =await fd.stat();
const wasm = new Uint8Array(st.size);
await fd.read(wasm, 0, st.size);
await fd.close();



const wc = await WitnessCalculatorBuilder(wasm);
const w = await wc.calculateWitness(input);

for (let i=0; i<nAdds; i++) {
w.push( F.add( F.mul( w[addsBuff[i*4]], addsBuff[i*4 + 2]), F.mul( w[addsBuff[i*4+1]], addsBuff[i*4+3] )));
}

for (let i=0; i<nSMap; i++) {
for (let j=0; j<nCols; j++) {
if (sMapBuff[nCols*i+j] != 0) {
cmPols.Compressor.a[j][i] = w[sMapBuff[nCols*i+j]];
} else {
cmPols.Compressor.a[j][i] = 0n;
}
}
}

const cmPols = await compressorExec(F, pil, wasm, input, exec);

await cmPols.saveToFile(commitFile);

Expand All @@ -78,26 +55,4 @@ run().then(()=> {
console.log(err.message);
console.log(err.stack);
process.exit(1);
});


async function readExecFile(execFile, nCols) {

const fd =await fs.promises.open(execFile, "r");
const buffH = new BigUint64Array(2);
await fd.read(buffH, 0, 2*8);
const nAdds= Number(buffH[0]);
const nSMap= Number(buffH[1]);


const addsBuff = new BigUint64Array(nAdds*4);
await fd.read(addsBuff, 0, nAdds*4*8);

const sMapBuff = new BigUint64Array(nSMap*nCols);
await fd.read(sMapBuff, 0, nSMap*nCols*8);

await fd.close();

return { nAdds, nSMap, addsBuff, sMapBuff };

}
});
Loading

0 comments on commit ed4bcd9

Please sign in to comment.