mirror of
https://github.com/arnaucube/circom.git
synced 2026-02-07 11:16:42 +01:00
Fixes and tests passed
This commit is contained in:
@@ -377,6 +377,8 @@ function buildFunction(name, paramValues) {
|
||||
const oldUniqueNames = ctx.uniqueNames;
|
||||
const oldFileName = ctx.fileName;
|
||||
const oldFilePath = ctx.oldFilePath;
|
||||
const oldReturnSizes = ctx.returnSizes;
|
||||
const oldReturnValue = ctx.returnValue;
|
||||
|
||||
|
||||
ctx.scopes = [{}];
|
||||
@@ -453,6 +455,8 @@ function buildFunction(name, paramValues) {
|
||||
ctx.uniqueNames = oldUniqueNames;
|
||||
ctx.fileName = oldFileName;
|
||||
ctx.filePath = oldFilePath;
|
||||
ctx.returnSizes = oldReturnSizes;
|
||||
ctx.returnValue = oldReturnValue;
|
||||
|
||||
ctx.definedFunctions[h] = res;
|
||||
|
||||
|
||||
29
src/c_gen.js
29
src/c_gen.js
@@ -470,9 +470,9 @@ function genVariable(ctx, ast) {
|
||||
const resRef = newRef(ctx, "BIGINT", "_v", null, v.sizes.slice(l));
|
||||
const res = ctx.refs[resRef];
|
||||
res.used = true;
|
||||
ctx.codeHeader += `PBigInt ${res};\n`;
|
||||
ctx.codeHeader += `PBigInt ${res.label};\n`;
|
||||
ctx.code += `${res.label} = ${v.label} + ${offset.label};\n`;
|
||||
return res;
|
||||
return resRef;
|
||||
} else {
|
||||
// return newSubRef(ctx, ast.name, ast.selectors);
|
||||
return newRef(ctx, "BIGINT", "_v", v.value.slice(offset.value[0], offset.value[0] + v.sizes[l]),v.sizes.slice(l));
|
||||
@@ -574,6 +574,9 @@ function genGetSignalSizes(ctx, cIdxRef, label) {
|
||||
|
||||
function genSetSignal(ctx, cIdxRef, sIdxRef, valueRef) {
|
||||
const v = ctx.refs[valueRef];
|
||||
if (!utils.isDefined(v)) {
|
||||
console.log("BREAK!!!");
|
||||
}
|
||||
if (!v.used) {
|
||||
instantiateRef(ctx, valueRef, v.value);
|
||||
}
|
||||
@@ -742,8 +745,8 @@ function genConstraint(ctx, ast) {
|
||||
const b = ctx.refs[bRef];
|
||||
if (ctx.error) return;
|
||||
const strErr = ast.fileName + ":" + ast.first_line + ":" + ast.first_column;
|
||||
instantiateRef(ctx, aRef);
|
||||
instantiateRef(ctx, bRef);
|
||||
instantiateRef(ctx, aRef, a.value);
|
||||
instantiateRef(ctx, bRef, b.value);
|
||||
ctx.code += `ctx->checkConstraint(${a.label}, ${b.label}, "${strErr}");`;
|
||||
}
|
||||
|
||||
@@ -792,6 +795,13 @@ function genArray(ctx, ast) {
|
||||
|
||||
|
||||
function genFunctionCall(ctx, ast) {
|
||||
if (ast.name == "log") {
|
||||
const vRef = gen(ctx, ast.params[0]);
|
||||
const val = ctx.refs[vRef];
|
||||
instantiateRef(ctx, vRef, val.value);
|
||||
ctx.code+=`ctx->log(${val.label});`;
|
||||
return vRef;
|
||||
}
|
||||
const params = [];
|
||||
for (let i=0; i<ast.params.length; i++) {
|
||||
const pRef = gen(ctx, ast.params[i]);
|
||||
@@ -913,9 +923,7 @@ function genLoop(ctx, ast) {
|
||||
condVar = ctx.refs[condVarRef];
|
||||
instantiateRef(ctx, condVarRef);
|
||||
|
||||
ctx.code =
|
||||
oldCode +
|
||||
ctx.code +
|
||||
ctx.code +=
|
||||
`${condVar.label} = ctx->field->isTrue(${cond2.label});\n` +
|
||||
`while (${condVar.label}) {\n`;
|
||||
} else {
|
||||
@@ -972,6 +980,7 @@ function genIf(ctx, ast) {
|
||||
}
|
||||
|
||||
ctx.code += "}\n";
|
||||
leaveConditionalCode(ctx);
|
||||
|
||||
} else {
|
||||
if (!utils.isDefined(cond.value)) return ctx.throwError(ast, "condition value not assigned");
|
||||
@@ -990,7 +999,7 @@ function genReturn(ctx, ast) {
|
||||
const vRef = gen(ctx, ast.value);
|
||||
const v= ctx.refs[vRef];
|
||||
if (ctx.returnSizes) {
|
||||
if (!utils.sizesEqual(v.sizes, ctx.returnSizes)) return ctx.throwError(ast, "Diferent return sizes");
|
||||
if (!utils.sameSizes(v.sizes, ctx.returnSizes)) return ctx.throwError(ast, "Diferent return sizes");
|
||||
} else {
|
||||
ctx.returnSizes = v.sizes;
|
||||
}
|
||||
@@ -1127,6 +1136,8 @@ function genTerCon(ctx, ast) {
|
||||
if (ctx.error) return;
|
||||
const then = ctx.refs[thenRef];
|
||||
|
||||
instantiateRef(ctx, thenRef, then.value);
|
||||
|
||||
ctx.code = oldCode + utils.ident(ctx.code);
|
||||
|
||||
ctx.code += `${rLabel} = ${then.label};\n`;
|
||||
@@ -1139,6 +1150,8 @@ function genTerCon(ctx, ast) {
|
||||
if (ctx.error) return;
|
||||
const els = ctx.refs[elseRef];
|
||||
|
||||
instantiateRef(ctx, elseRef, els.value);
|
||||
|
||||
ctx.code = oldCode + utils.ident(ctx.code);
|
||||
|
||||
ctx.code += `${rLabel} = ${els.label};\n`;
|
||||
|
||||
@@ -10,14 +10,16 @@ const exec = util.promisify(require("child_process").exec);
|
||||
|
||||
const stringifyBigInts = require("./utils").stringifyBigInts;
|
||||
const unstringifyBigInts = require("./utils").unstringifyBigInts;
|
||||
const bigInt = require("snarkjs").bigInt;
|
||||
const bigInt = require("big-integer");
|
||||
const utils = require("./utils");
|
||||
const loadR1cs = require("./r1csfile").loadR1cs;
|
||||
const ZqField = require("fflib").ZqField;
|
||||
|
||||
module.exports = c_tester;
|
||||
|
||||
|
||||
async function c_tester(circomFile, mainComponent, _options) {
|
||||
async function c_tester(circomFile, _options) {
|
||||
tmp.setGracefulCleanup();
|
||||
mainComponent = mainComponent || "main";
|
||||
|
||||
const dir = await tmp.dir({prefix: "circom_", unsafeCleanup: true });
|
||||
|
||||
@@ -26,31 +28,34 @@ async function c_tester(circomFile, mainComponent, _options) {
|
||||
|
||||
options.cSourceWriteStream = fs.createWriteStream(path.join(dir.path, baseName + ".cpp"));
|
||||
options.symWriteStream = fs.createWriteStream(path.join(dir.path, baseName + ".sym"));
|
||||
options.mainComponent = mainComponent;
|
||||
options.r1csFileName = path.join(dir.path, baseName + ".r1cs");
|
||||
await compiler(circomFile, options);
|
||||
|
||||
const cdir = path.join(__dirname, "..", "c");
|
||||
await exec("cp" +
|
||||
` ${path.join(dir.path, baseName + ".cpp")}` +
|
||||
" /tmp/circuit.cpp"
|
||||
);
|
||||
await exec("g++" +
|
||||
` ${path.join(dir.path, baseName + ".cpp")} ` +
|
||||
` ${path.join(cdir, "main.cpp")}` +
|
||||
` ${path.join(cdir, "calcwit.cpp")}` +
|
||||
` ${path.join(cdir, "utils.cpp")}` +
|
||||
` ${path.join(cdir, "zqfield.cpp")}` +
|
||||
` ${path.join(dir.path, baseName + ".cpp")} ` +
|
||||
` -o ${path.join(dir.path, baseName)}` +
|
||||
` -I ${cdir}` +
|
||||
" -lgmp -std=c++11 -DSANITY_CHECK"
|
||||
);
|
||||
|
||||
// console.log(dir.path);
|
||||
return new CTester(dir, baseName, mainComponent);
|
||||
return new CTester(dir, baseName);
|
||||
}
|
||||
|
||||
class CTester {
|
||||
|
||||
constructor(dir, baseName, mainComponent) {
|
||||
constructor(dir, baseName) {
|
||||
this.dir=dir;
|
||||
this.baseName = baseName;
|
||||
this.mainComponent = mainComponent;
|
||||
}
|
||||
|
||||
async release() {
|
||||
@@ -74,7 +79,8 @@ class CTester {
|
||||
return res;
|
||||
}
|
||||
|
||||
async _loadSymbols() {
|
||||
async loadSymbols() {
|
||||
if (this.symbols) return;
|
||||
this.symbols = {};
|
||||
const symsStr = await fs.promises.readFile(
|
||||
path.join(this.dir.path, this.baseName + ".sym"),
|
||||
@@ -91,9 +97,18 @@ class CTester {
|
||||
}
|
||||
}
|
||||
|
||||
async loadConstraints() {
|
||||
const self = this;
|
||||
if (this.constraints) return;
|
||||
const r1cs = await loadR1cs(path.join(this.dir.path, this.baseName + ".r1cs"),true, false);
|
||||
self.field = new ZqField(r1cs.prime);
|
||||
self.nWires = r1cs.nWires;
|
||||
self.constraints = r1cs.constraints;
|
||||
}
|
||||
|
||||
async assertOut(actualOut, expectedOut) {
|
||||
const self = this;
|
||||
if (!self.symbols) await self._loadSymbols();
|
||||
if (!self.symbols) await self.loadSymbols();
|
||||
|
||||
checkObject("main", expectedOut);
|
||||
|
||||
@@ -118,6 +133,51 @@ class CTester {
|
||||
}
|
||||
}
|
||||
|
||||
async getDecoratedOutput(witness) {
|
||||
const self = this;
|
||||
const lines = [];
|
||||
if (!self.symbols) await self.loadSymbols();
|
||||
for (let n in self.symbols) {
|
||||
let v;
|
||||
if (utils.isDefined(witness[self.symbols[n].idxWit])) {
|
||||
v = witness[self.symbols[n].idxWit].toString();
|
||||
} else {
|
||||
v = "undefined";
|
||||
}
|
||||
lines.push(`${n} --> ${v}`);
|
||||
}
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
async checkConstraints(witness) {
|
||||
const self = this;
|
||||
if (!self.constraints) await self.loadConstraints();
|
||||
for (let i=0; i<self.constraints.length; i++) {
|
||||
checkConstraint(self.constraints[i]);
|
||||
}
|
||||
|
||||
function checkConstraint(constraint) {
|
||||
const F = self.field;
|
||||
const a = evalLC(constraint.a);
|
||||
const b = evalLC(constraint.b);
|
||||
const c = evalLC(constraint.c);
|
||||
|
||||
assert (F.sub(F.mul(a,b), c).isZero(), "Constraint doesn't match");
|
||||
}
|
||||
|
||||
function evalLC(lc) {
|
||||
const F = self.field;
|
||||
let v = F.zero;
|
||||
for (let w in lc) {
|
||||
v = F.add(
|
||||
v,
|
||||
F.mul( lc[w], witness[w] )
|
||||
);
|
||||
}
|
||||
return v;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@ const fs = require("fs");
|
||||
const path = require("path");
|
||||
const bigInt = require("big-integer");
|
||||
const __P__ = new bigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617");
|
||||
const __MASK__ = new bigInt(2).pow(253).minus(1);
|
||||
const sONE = 0;
|
||||
const assert = require("assert");
|
||||
const buildC = require("./c_build");
|
||||
@@ -30,6 +29,7 @@ const lc = require("./lcalgebra");
|
||||
const Ctx = require("./ctx");
|
||||
const ZqField = require("./zqfield");
|
||||
const utils = require("./utils");
|
||||
const buildR1cs = require("./r1csfile").buildR1cs;
|
||||
|
||||
module.exports = compile;
|
||||
|
||||
@@ -85,6 +85,7 @@ async function compile(srcFile, options) {
|
||||
// Repeat while reductions are performed
|
||||
let oldNConstrains = -1;
|
||||
while (ctx.constraints.length != oldNConstrains) {
|
||||
console.log("Reducing constraints: "+ctx.constraints.length);
|
||||
oldNConstrains = ctx.constraints.length;
|
||||
reduceConstrains(ctx);
|
||||
}
|
||||
@@ -105,8 +106,8 @@ async function compile(srcFile, options) {
|
||||
// const mainCode = gen(ctx,ast);
|
||||
if (ctx.error) throw(ctx.error);
|
||||
|
||||
if (options.r1csWriteStream) {
|
||||
buildR1cs(ctx, options.r1csWriteStream);
|
||||
if (options.r1csFileName) {
|
||||
await buildR1cs(ctx, options.r1csFileName);
|
||||
}
|
||||
|
||||
if (options.symWriteStream) {
|
||||
@@ -505,63 +506,9 @@ function buildConstraints(ctx) {
|
||||
return res;
|
||||
}
|
||||
|
||||
function buildR1cs(ctx, strm) {
|
||||
|
||||
strm.write(Buffer.from([0x72,0x31,0x63,0x73]));
|
||||
writeU32(1);
|
||||
writeU32(4);
|
||||
writeU32(1 + ctx.totals.output + ctx.totals.pubInput + ctx.totals.prvInput + ctx.totals.internal);
|
||||
writeU32(ctx.totals.output);
|
||||
writeU32(ctx.totals.pubInput);
|
||||
writeU32(ctx.totals.prvInput);
|
||||
writeU32(ctx.constraints.length);
|
||||
|
||||
for (let i=0; i<ctx.constraints.length; i++) {
|
||||
if ((ctx.verbose)&&(i%10000 == 0)) console.log("writing constraint: ", i);
|
||||
writeConstraint(ctx.constraints[i]);
|
||||
}
|
||||
|
||||
function writeU32(v) {
|
||||
const b = Buffer.allocUnsafe(4);
|
||||
b.writeInt32LE(v);
|
||||
strm.write(b);
|
||||
}
|
||||
|
||||
function writeConstraint(c) {
|
||||
writeLC(c.a);
|
||||
writeLC(c.b);
|
||||
writeLC(lc.negate(c.c));
|
||||
}
|
||||
|
||||
function writeLC(lc) {
|
||||
const idxs = Object.keys(lc.values);
|
||||
writeU32(idxs.length);
|
||||
for (let s in lc.values) {
|
||||
let lSignal = ctx.signals[s];
|
||||
|
||||
while (lSignal.e >=0 ) lSignal = ctx.signals[lSignal.e];
|
||||
|
||||
writeU32(lSignal.id);
|
||||
writeBigInt(lc.values[s]);
|
||||
}
|
||||
}
|
||||
|
||||
function writeBigInt(n) {
|
||||
const bytes = [];
|
||||
let r = bigInt(n);
|
||||
while (r.greater(bigInt.zero)) {
|
||||
bytes.push(r.and(bigInt("255")).toJSNumber());
|
||||
r = r.shiftRight(8);
|
||||
}
|
||||
assert(bytes.length<=32);
|
||||
assert(bytes.length>0);
|
||||
strm.write( Buffer.from([bytes.length, ...bytes ]));
|
||||
}
|
||||
}
|
||||
|
||||
function buildSyms(ctx, strm) {
|
||||
|
||||
|
||||
let nSyms;
|
||||
|
||||
addSymbolsComponent(ctx.mainComponent + ".", ctx.getComponentIdx(ctx.mainComponent));
|
||||
|
||||
@@ -581,6 +528,8 @@ function buildSyms(ctx, strm) {
|
||||
let wId = ctx.signals[s].id;
|
||||
if (typeof(wId) == "undefined") wId=-1;
|
||||
strm.write(`${offset},${wId},${prefix}\n`);
|
||||
nSyms ++;
|
||||
if ((ctx.verbose)&&(nSyms%10000 == 0)) console.log("Symbols saved: "+nSyms);
|
||||
} else {
|
||||
addSymbolsComponent(prefix+".", offset);
|
||||
}
|
||||
|
||||
@@ -188,7 +188,7 @@ module.exports = class Ctx {
|
||||
errStr: errStr,
|
||||
ast: ast,
|
||||
message: errStr,
|
||||
errFile: ast.fileName
|
||||
errFile: this.fileName
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
|
||||
12
src/exec.js
12
src/exec.js
@@ -261,7 +261,13 @@ function getScopeRef(ctx, name, selectors) {
|
||||
}
|
||||
|
||||
for (let i=ctx.scopes.length-1; i>=0; i--) {
|
||||
if (ctx.scopes[i][name]) return select(ctx.scopes[i][name].value, sels, ctx.scopes[i][name].type);
|
||||
if (ctx.scopes[i][name]) {
|
||||
if (ctx.scopes[i][name].type == "COMPONENT") {
|
||||
return [null, sels, "COMPONENT"];
|
||||
} else {
|
||||
return select(ctx.scopes[i][name].value, sels, ctx.scopes[i][name].type);
|
||||
}
|
||||
}
|
||||
}
|
||||
return [null, [], ""];
|
||||
}
|
||||
@@ -766,6 +772,7 @@ function execVarAssignement(ctx, ast) {
|
||||
} else {
|
||||
v = ast.values[0];
|
||||
}
|
||||
|
||||
const [num, sels, typ] = getScopeRef(ctx, v.name, v.selectors);
|
||||
if (ctx.error) return;
|
||||
|
||||
@@ -1265,10 +1272,9 @@ function execConstrain(ctx, ast) {
|
||||
|
||||
if (!lc.isZero(res)) {
|
||||
ctx.constraints.push(lc.toQEQ(res));
|
||||
if ((ctx.constraints.length % 10000 == 0)&&(ctx.constraints.length>0)) console.log("Constraints: " + ctx.constraints.length);
|
||||
}
|
||||
|
||||
if ((ctx.constraints.length % 10000 == 0)&&(ctx.constraints.length>0)) console.log("Constraints: " + ctx.constraints.length);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
@@ -61,6 +61,7 @@ QEQ QEQ ERR ERR
|
||||
const bigInt = require("big-integer");
|
||||
const __P__ = new bigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617");
|
||||
const sONE = 0;
|
||||
const utils = require("./utils.js");
|
||||
|
||||
exports.add = add;
|
||||
exports.mul = mul;
|
||||
@@ -439,8 +440,8 @@ function canonize(ctx, a) {
|
||||
for (let k in a.values) {
|
||||
let s = k;
|
||||
while (ctx.signals[s].e>=0) s= ctx.signals[s].e;
|
||||
if ((typeof(ctx.signals[s].value) != "undefined")&&(k != sONE)) {
|
||||
const v = res.values[k].times(ctx.signals[s].value).mod(__P__);
|
||||
if (utils.isDefined(ctx.signals[s].v)&&(k != sONE)) {
|
||||
const v = res.values[k].times(ctx.signals[s].v).mod(__P__);
|
||||
if (!res.values[sONE]) {
|
||||
res.values[sONE]=v;
|
||||
} else {
|
||||
|
||||
289
src/r1csfile.js
Normal file
289
src/r1csfile.js
Normal file
@@ -0,0 +1,289 @@
|
||||
|
||||
const fs = require("fs");
|
||||
const assert = require("assert");
|
||||
const lc = require("./lcalgebra");
|
||||
const bigInt = require("big-integer");
|
||||
|
||||
module.exports.buildR1cs = buildR1cs;
|
||||
module.exports.loadR1cs = loadR1cs;
|
||||
|
||||
async function loadR1cs(fileName, loadConstraints, loadMap) {
|
||||
const res = {};
|
||||
const fd = await fs.promises.open(fileName, "r");
|
||||
|
||||
const b = Buffer.allocUnsafe(4);
|
||||
await fd.read(b, 0, 4, 0);
|
||||
|
||||
if (b.toString() != "r1cs") assert(false, "Invalid File format");
|
||||
|
||||
let p=4;
|
||||
|
||||
let v = await readU32();
|
||||
|
||||
if (v>1) assert(false, "Version not supported");
|
||||
|
||||
const nSections = await readU32();
|
||||
|
||||
let pHeader;
|
||||
let pConstraints;
|
||||
let headerSize;
|
||||
let constraintsSize;
|
||||
let pMap;
|
||||
let mapSize;
|
||||
for (let i=0; i<nSections; i++) {
|
||||
let ht = await readU32();
|
||||
let hl = await readU32();
|
||||
if (ht == 1) {
|
||||
if (typeof pHeader != "undefined") assert(false, "File has two headder sections");
|
||||
pHeader = p;
|
||||
headerSize = hl;
|
||||
} else if (ht==2) {
|
||||
if (typeof pConstraints != "undefined") assert(false, "File has two constraints sections");
|
||||
pConstraints = p;
|
||||
constraintsSize = hl;
|
||||
} else if (ht==3) {
|
||||
pMap = p;
|
||||
mapSize = hl;
|
||||
}
|
||||
p += hl;
|
||||
}
|
||||
|
||||
if (typeof pHeader == "undefined") assert(false, "File has two header");
|
||||
|
||||
// Read Header
|
||||
p = pHeader;
|
||||
const fieldDefSize = await readU32();
|
||||
const pFieldDef = p;
|
||||
|
||||
const defType = await readU32();
|
||||
if (defType != 1) if (typeof pConstraints != "undefined") assert(false, "Field type not supported");
|
||||
|
||||
res.prime = await readBigInt();
|
||||
|
||||
if ( p != pFieldDef + fieldDefSize) assert("Invalid fieldDef size");
|
||||
|
||||
const bigIntFormat = await readU32();
|
||||
if (bigIntFormat != 0) assert(false, "BigInt format not supported");
|
||||
|
||||
const idSize = await readU32();
|
||||
if (idSize != 4) assert(false, "idSize not supported. Mus be 4");
|
||||
|
||||
res.nWires = await readU32();
|
||||
res.nPubOuts = await readU32();
|
||||
res.nPubIns = await readU32();
|
||||
res.nPrvIns = await readU32();
|
||||
res.nLabels = await readU32();
|
||||
res.nConstraints = await readU32();
|
||||
|
||||
if (p != pHeader + headerSize) assert(false, "Invalid header section size");
|
||||
|
||||
if (loadConstraints) {
|
||||
// Read Constraints
|
||||
p = pConstraints;
|
||||
|
||||
res.constraints = [];
|
||||
for (let i=0; i<res.nConstraints; i++) {
|
||||
const c = await readConstraint();
|
||||
res.constraints.push(c);
|
||||
}
|
||||
if (p != pConstraints + constraintsSize) assert(false, "Invalid constraints size");
|
||||
}
|
||||
|
||||
// Read Labels
|
||||
|
||||
if (loadMap) {
|
||||
p = pMap;
|
||||
|
||||
res.map = [];
|
||||
for (let i=0; i<res.nLabels; i++) {
|
||||
const idx = await readU32();
|
||||
res.map.push(idx);
|
||||
}
|
||||
if (p != pMap + mapSize) assert(false, "Invalid Map size");
|
||||
}
|
||||
|
||||
await fd.close();
|
||||
|
||||
return res;
|
||||
|
||||
async function readU32() {
|
||||
const b = Buffer.allocUnsafe(4);
|
||||
await fd.read(b, 0, 4, p);
|
||||
|
||||
p+=4;
|
||||
|
||||
return b.readInt32LE(0);
|
||||
}
|
||||
|
||||
async function readBigInt() {
|
||||
const bl = Buffer.allocUnsafe(1);
|
||||
await fd.read(bl, 0, 1, p);
|
||||
p++;
|
||||
|
||||
const l = bl[0];
|
||||
const b = Buffer.allocUnsafe(l);
|
||||
await fd.read(b, 0, l, p);
|
||||
p += l;
|
||||
|
||||
const arr = Uint8Array.from(b);
|
||||
|
||||
const arrr = new Array(arr.length);
|
||||
for (let i=0; i<arr.length; i++) {
|
||||
arrr[i] = arr[arr.length-1-i];
|
||||
}
|
||||
|
||||
const n = bigInt.fromArray(arrr, 256);
|
||||
|
||||
return n;
|
||||
}
|
||||
|
||||
async function readConstraint() {
|
||||
const c = {};
|
||||
c.a = await readLC();
|
||||
c.b = await readLC();
|
||||
c.c = await readLC();
|
||||
return c;
|
||||
}
|
||||
|
||||
async function readLC() {
|
||||
const lc= {};
|
||||
const nIdx = await readU32();
|
||||
for (let i=0; i<nIdx; i++) {
|
||||
const idx = await readU32();
|
||||
const val = await readBigInt();
|
||||
lc[idx] = val;
|
||||
}
|
||||
return lc;
|
||||
}
|
||||
}
|
||||
|
||||
async function buildR1cs(ctx, fileName) {
|
||||
|
||||
const fd = await fs.promises.open(fileName, "w");
|
||||
|
||||
|
||||
await fd.write("r1cs"); // Magic "r1cs"
|
||||
|
||||
let p = 4;
|
||||
await writeU32(1); // Version
|
||||
await writeU32(3); // Number of Sections
|
||||
|
||||
// Write the header
|
||||
///////////
|
||||
await writeU32(1); // Header type
|
||||
const pHeaderSize = p;
|
||||
await writeU32(0); // Temporally set to 0 length
|
||||
|
||||
|
||||
// Field Def
|
||||
const pFieldDefSize = p;
|
||||
await writeU32(0); // Temporally set to 0 length
|
||||
await writeU32(1);
|
||||
await writeBigInt(ctx.field.p);
|
||||
const fieldDefSize = p - pFieldDefSize - 4;
|
||||
|
||||
await writeU32(0); // Variable bigInt format
|
||||
await writeU32(4); // Id Size
|
||||
|
||||
const NWires =
|
||||
ctx.totals[ctx.stONE] +
|
||||
ctx.totals[ctx.stOUTPUT] +
|
||||
ctx.totals[ctx.stPUBINPUT] +
|
||||
ctx.totals[ctx.stPRVINPUT] +
|
||||
ctx.totals[ctx.stINTERNAL];
|
||||
|
||||
await writeU32(NWires);
|
||||
await writeU32(ctx.totals[ctx.stOUTPUT]);
|
||||
await writeU32(ctx.totals[ctx.stPUBINPUT]);
|
||||
await writeU32(ctx.totals[ctx.stPRVINPUT]);
|
||||
await writeU32(ctx.signals.length);
|
||||
await writeU32(ctx.constraints.length);
|
||||
|
||||
const headerSize = p - pHeaderSize - 4;
|
||||
|
||||
// Write constraints
|
||||
///////////
|
||||
await writeU32(2); // Constraints type
|
||||
const pConstraintsSize = p;
|
||||
await writeU32(0); // Temporally set to 0 length
|
||||
|
||||
for (let i=0; i<ctx.constraints.length; i++) {
|
||||
if ((ctx.verbose)&&(i%10000 == 0)) {
|
||||
if (ctx.verbose) console.log("writing constraint: ", i);
|
||||
await fd.datasync();
|
||||
}
|
||||
await writeConstraint(ctx.constraints[i]);
|
||||
}
|
||||
|
||||
const constraintsSize = p - pConstraintsSize - 4;
|
||||
|
||||
// Write map
|
||||
///////////
|
||||
await writeU32(3); // wires2label type
|
||||
const pMapSize = p;
|
||||
await writeU32(0); // Temporally set to 0 length
|
||||
|
||||
|
||||
const arr = new Array(NWires);
|
||||
for (let i=0; i<ctx.signals.length; i++) {
|
||||
const outIdx = ctx.signals[i].id;
|
||||
if (ctx.signals[i].e>=0) continue; // If has an alias, continue..
|
||||
assert(typeof outIdx != "undefined", `Signal ${i} does not have index`);
|
||||
if (outIdx>=NWires) continue; // Is a constant or a discarded variable
|
||||
if (typeof arr[ctx.signals[i].id] == "undefined") {
|
||||
arr[outIdx] = i;
|
||||
}
|
||||
}
|
||||
for (let i=0; i<arr.length; i++) {
|
||||
await writeU32(arr[i]);
|
||||
if ((ctx.verbose)&&(i%100000)) console.log("writing label2wire map: ", i);
|
||||
}
|
||||
|
||||
const mapSize = p - pMapSize -4;
|
||||
|
||||
// Write sizes
|
||||
await writeU32(headerSize, pHeaderSize);
|
||||
await writeU32(fieldDefSize, pFieldDefSize);
|
||||
await writeU32(constraintsSize, pConstraintsSize);
|
||||
await writeU32(mapSize, pMapSize);
|
||||
|
||||
await fd.sync();
|
||||
await fd.close();
|
||||
|
||||
async function writeU32(v, pos) {
|
||||
const b = Buffer.allocUnsafe(4);
|
||||
b.writeInt32LE(v);
|
||||
|
||||
await fd.write(b, 0, 4, pos);
|
||||
|
||||
if (typeof(pos) == "undefined") p += 4;
|
||||
}
|
||||
|
||||
async function writeConstraint(c) {
|
||||
await writeLC(c.a);
|
||||
await writeLC(c.b);
|
||||
await writeLC(lc.negate(c.c));
|
||||
}
|
||||
|
||||
async function writeLC(lc) {
|
||||
const idxs = Object.keys(lc.values);
|
||||
await writeU32(idxs.length);
|
||||
for (let s in lc.values) {
|
||||
let lSignal = ctx.signals[s];
|
||||
|
||||
while (lSignal.e >=0 ) lSignal = ctx.signals[lSignal.e];
|
||||
|
||||
await writeU32(lSignal.id);
|
||||
await writeBigInt(lc.values[s]);
|
||||
}
|
||||
}
|
||||
|
||||
async function writeBigInt(n) {
|
||||
|
||||
const bytes = bigInt(n).toArray(256).value.reverse();
|
||||
|
||||
await fd.write(Buffer.from([bytes.length, ...bytes ]));
|
||||
|
||||
p += bytes.length+1;
|
||||
}
|
||||
}
|
||||
@@ -99,7 +99,7 @@ function unstringifyBigInts(o) {
|
||||
}
|
||||
return res;
|
||||
} else {
|
||||
return o;
|
||||
return bigInt(o);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,8 @@ const assert = require("assert");
|
||||
|
||||
module.exports = class ZqField {
|
||||
constructor(p) {
|
||||
this.one = bigInt.one;
|
||||
this.zero = bigInt.zero;
|
||||
this.p = p;
|
||||
this.bitLength = p.bitLength();
|
||||
this.mask = bigInt.one.shiftLeft(this.bitLength - 1).minus(bigInt.one);
|
||||
@@ -11,7 +13,7 @@ module.exports = class ZqField {
|
||||
add(a, b) {
|
||||
let res = a.add(b);
|
||||
if (res.geq(this.p)) {
|
||||
res = res.minsu(this.p);
|
||||
res = res.minus(this.p);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user