Almost ready for 0.5.0

This commit is contained in:
Jordi Baylina
2020-03-26 17:42:25 +01:00
parent ef899e618b
commit eb8cb0af74
62 changed files with 1249 additions and 15153 deletions

View File

@@ -19,7 +19,7 @@ module.exports = function buildSyms(ctx) {
while (ctx.signals[s].e >= 0) s = ctx.signals[s].e;
let wId = ctx.signals[s].id;
if (typeof(wId) == "undefined") wId=-1;
rs.push(`${actual.offset},${wId},${actual.name}\n`);
rs.push(`${actual.offset},${wId},${actual.cIdx},${actual.name}\n`);
it.next();
counter ++;
@@ -32,11 +32,12 @@ module.exports = function buildSyms(ctx) {
class SignalIt {
constructor (ctx, offset, prefix) {
constructor (ctx, offset, prefix, cIdx) {
this.ctx = ctx;
this.offset = offset;
this.prefix = prefix;
this.cur = 0;
this.cIdx = cIdx;
}
next() {
@@ -47,16 +48,16 @@ class SignalIt {
current() {
if (this.cur == 0) {
return {offset: this.offset, name: this.prefix};
return {offset: this.offset, name: this.prefix, cIdx: this.cIdx};
}
}
}
class ArrayIt {
constructor (ctx, type, sizes, offset, prefix) {
constructor (ctx, type, sizes, offset, prefix, cIdx) {
if (sizes.length == 0) {
if (type == "S") {
return new SignalIt(ctx, offset, prefix);
return new SignalIt(ctx, offset, prefix, cIdx);
} else {
return new ComponentIt(ctx, offset, prefix);
}
@@ -67,6 +68,7 @@ class ArrayIt {
this.sizes = sizes;
this.offset = offset;
this.prefix = prefix;
this.cIdx = cIdx;
@@ -86,7 +88,7 @@ class ArrayIt {
_loadSubIt() {
if (this.cur < this.sizes[0]) {
this.subIt = new ArrayIt(this.ctx, this.type, this.sizes.slice(1), this.offset + this.cur*this.subArrSize, this.prefix + "[" + this.cur + "]");
this.subIt = new ArrayIt(this.ctx, this.type, this.sizes.slice(1), this.offset + this.cur*this.subArrSize, this.prefix + "[" + this.cur + "]", this.cIdx);
}
}
@@ -129,7 +131,7 @@ class ComponentIt {
_loadSubIt() {
if (this.cur < this.names.length) {
const entrie = this.ctx.components[this.idxComponent].names.o[this.names[this.cur]];
this.subIt = new ArrayIt(this.ctx, entrie.type, entrie.sizes, entrie.offset, this.prefix + "." + this.names[this.cur]);
this.subIt = new ArrayIt(this.ctx, entrie.type, entrie.sizes, entrie.offset, this.prefix + "." + this.names[this.cur], this.idxComponent);
}
}

View File

@@ -18,14 +18,14 @@
*/
const bigInt = require("big-integer");
const __P__ = new bigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617");
const __P__ = bigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617");
const sONE = 0;
const build = require("./build");
const BuilderC = require("../ports/c/builder.js");
const BuilderWasm = require("../ports/wasm/builder.js");
const constructionPhase = require("./construction_phase");
const Ctx = require("./ctx");
const ZqField = require("fflib").ZqField;
const ZqField = require("ffjavascript").ZqField;
const utils = require("./utils");
const buildR1cs = require("./r1csfile").buildR1cs;
const BigArray = require("./bigarray");
@@ -49,7 +49,7 @@ async function compile(srcFile, options) {
constructionPhase(ctx, srcFile);
console.log("NConstraints Before: "+ctx.constraints.length);
if (ctx.verbose) console.log("NConstraints Before: "+ctx.constraints.length);
if (ctx.error) {
throw(ctx.error);
@@ -70,13 +70,13 @@ async function compile(srcFile, options) {
// Repeat while reductions are performed
let oldNConstrains = -1;
while (ctx.constraints.length != oldNConstrains) {
console.log("Reducing constraints: "+ctx.constraints.length);
if (ctx.verbose) console.log("Reducing constraints: "+ctx.constraints.length);
oldNConstrains = ctx.constraints.length;
reduceConstrains(ctx);
}
}
console.log("NConstraints After: "+ctx.constraints.length);
if (ctx.verbose) console.log("NConstraints After: "+ctx.constraints.length);
generateWitnessNames(ctx);
@@ -94,7 +94,7 @@ async function compile(srcFile, options) {
}
if ((options.wasmWriteStream)||(options.watWriteStream)) {
ctx.builder = new BuilderWasm(options.sanityCheck);
ctx.builder = new BuilderWasm();
build(ctx);
if (options.wasmWriteStream) {
const rdStream = ctx.builder.build("wasm");

View File

@@ -924,7 +924,9 @@ function execConstrain(ctx, ast) {
if (!ctx.lc.isZero(res)) {
ctx.constraints.push(ctx.lc.toQEX(res));
if ((ctx.constraints.length % 10000 == 0)&&(ctx.constraints.length>0)) console.log("Constraints: " + ctx.constraints.length);
if (ctx.verbose) {
if ((ctx.constraints.length % 10000 == 0)&&(ctx.constraints.length>0)) console.log("Constraints: " + ctx.constraints.length);
}
}
return a;

View File

@@ -4,157 +4,6 @@ const assert = require("assert");
const bigInt = require("big-integer");
module.exports.buildR1cs = buildR1cs;
module.exports.loadR1cs = loadR1cs;
async function loadR1cs(fileName, loadConstraints, loadMap) {
const res = {};
const fd = await fs.promises.open(fileName, "r");
const b = Buffer.allocUnsafe(4);
await fd.read(b, 0, 4, 0);
if (b.toString() != "r1cs") assert(false, "Invalid File format");
let p=4;
let v = await readU32();
if (v>1) assert(false, "Version not supported");
const nSections = await readU32();
let pHeader;
let pConstraints;
let headerSize;
let constraintsSize;
let pMap;
let mapSize;
for (let i=0; i<nSections; i++) {
let ht = await readU32();
let hl = await readU32();
if (ht == 1) {
if (typeof pHeader != "undefined") assert(false, "File has two headder sections");
pHeader = p;
headerSize = hl;
} else if (ht==2) {
if (typeof pConstraints != "undefined") assert(false, "File has two constraints sections");
pConstraints = p;
constraintsSize = hl;
} else if (ht==3) {
pMap = p;
mapSize = hl;
}
p += hl;
}
if (typeof pHeader == "undefined") assert(false, "File has two header");
// Read Header
p = pHeader;
const fieldDefSize = await readU32();
const pFieldDef = p;
const defType = await readU32();
if (defType != 1) if (typeof pConstraints != "undefined") assert(false, "Field type not supported");
res.prime = await readBigInt();
if ( p != pFieldDef + fieldDefSize) assert("Invalid fieldDef size");
const bigIntFormat = await readU32();
if (bigIntFormat != 0) assert(false, "BigInt format not supported");
const idSize = await readU32();
if (idSize != 4) assert(false, "idSize not supported. Mus be 4");
res.nWires = await readU32();
res.nPubOuts = await readU32();
res.nPubIns = await readU32();
res.nPrvIns = await readU32();
res.nLabels = await readU32();
res.nConstraints = await readU32();
if (p != pHeader + headerSize) assert(false, "Invalid header section size");
if (loadConstraints) {
// Read Constraints
p = pConstraints;
res.constraints = [];
for (let i=0; i<res.nConstraints; i++) {
const c = await readConstraint();
res.constraints.push(c);
}
if (p != pConstraints + constraintsSize) assert(false, "Invalid constraints size");
}
// Read Labels
if (loadMap) {
p = pMap;
res.map = [];
for (let i=0; i<res.nLabels; i++) {
const idx = await readU32();
res.map.push(idx);
}
if (p != pMap + mapSize) assert(false, "Invalid Map size");
}
await fd.close();
return res;
async function readU32() {
const b = Buffer.allocUnsafe(4);
await fd.read(b, 0, 4, p);
p+=4;
return b.readInt32LE(0);
}
async function readBigInt() {
const bl = Buffer.allocUnsafe(1);
await fd.read(bl, 0, 1, p);
p++;
const l = bl[0];
const b = Buffer.allocUnsafe(l);
await fd.read(b, 0, l, p);
p += l;
const arr = Uint8Array.from(b);
const arrr = new Array(arr.length);
for (let i=0; i<arr.length; i++) {
arrr[i] = arr[arr.length-1-i];
}
const n = bigInt.fromArray(arrr, 256);
return n;
}
async function readConstraint() {
const c = {};
c.a = await readLC();
c.b = await readLC();
c.c = await readLC();
return c;
}
async function readLC() {
const lc= {};
const nIdx = await readU32();
for (let i=0; i<nIdx; i++) {
const idx = await readU32();
const val = await readBigInt();
lc[idx] = val;
}
return lc;
}
}
async function buildR1cs(ctx, fileName) {
@@ -171,18 +20,13 @@ async function buildR1cs(ctx, fileName) {
///////////
await writeU32(1); // Header type
const pHeaderSize = p;
await writeU32(0); // Temporally set to 0 length
await writeU64(0); // Temporally set to 0 length
const n8 = (Math.floor( (ctx.field.p.bitLength() - 1) / 64) +1)*8;
// Field Def
const pFieldDefSize = p;
await writeU32(0); // Temporally set to 0 length
await writeU32(1);
await writeU32(n8); // Temporally set to 0 length
await writeBigInt(ctx.field.p);
const fieldDefSize = p - pFieldDefSize - 4;
await writeU32(0); // Variable bigInt format
await writeU32(4); // Id Size
const NWires =
ctx.totals[ctx.stONE] +
@@ -195,16 +39,16 @@ async function buildR1cs(ctx, fileName) {
await writeU32(ctx.totals[ctx.stOUTPUT]);
await writeU32(ctx.totals[ctx.stPUBINPUT]);
await writeU32(ctx.totals[ctx.stPRVINPUT]);
await writeU32(ctx.signals.length);
await writeU64(ctx.signals.length);
await writeU32(ctx.constraints.length);
const headerSize = p - pHeaderSize - 4;
const headerSize = p - pHeaderSize - 8;
// Write constraints
///////////
await writeU32(2); // Constraints type
const pConstraintsSize = p;
await writeU32(0); // Temporally set to 0 length
await writeU64(0); // Temporally set to 0 length
for (let i=0; i<ctx.constraints.length; i++) {
if ((ctx.verbose)&&(i%10000 == 0)) {
@@ -214,13 +58,13 @@ async function buildR1cs(ctx, fileName) {
await writeConstraint(ctx.constraints[i]);
}
const constraintsSize = p - pConstraintsSize - 4;
const constraintsSize = p - pConstraintsSize - 8;
// Write map
///////////
await writeU32(3); // wires2label type
const pMapSize = p;
await writeU32(0); // Temporally set to 0 length
await writeU64(0); // Temporally set to 0 length
const arr = new Array(NWires);
@@ -234,15 +78,14 @@ async function buildR1cs(ctx, fileName) {
}
}
for (let i=0; i<arr.length; i++) {
await writeU32(arr[i]);
await writeU64(arr[i]);
if ((ctx.verbose)&&(i%100000)) console.log("writing wire2label map: ", i);
}
const mapSize = p - pMapSize -4;
const mapSize = p - pMapSize - 8;
// Write sizes
await writeU32(headerSize, pHeaderSize);
await writeU32(fieldDefSize, pFieldDefSize);
await writeU32(constraintsSize, pConstraintsSize);
await writeU32(mapSize, pMapSize);
@@ -258,6 +101,15 @@ async function buildR1cs(ctx, fileName) {
if (typeof(pos) == "undefined") p += 4;
}
async function writeU64(v, pos) {
const b = Buffer.allocUnsafe(8);
b.writeBigUInt64LE(BigInt(v));
await fd.write(b, 0, 8, pos);
if (typeof(pos) == "undefined") p += 8;
}
async function writeConstraint(c) {
await writeLC(c.a);
await writeLC(c.b);
@@ -277,12 +129,18 @@ async function buildR1cs(ctx, fileName) {
}
}
async function writeBigInt(n) {
async function writeBigInt(n, pos) {
const b = Buffer.allocUnsafe(n8);
const bytes = bigInt(n).toArray(256).value.reverse();
const dwords = bigInt(n).toArray(0x100000000).value;
await fd.write(Buffer.from([bytes.length, ...bytes ]));
for (let i=0; i<dwords.length; i++) {
b.writeUInt32LE(dwords[dwords.length-1-i], i*4, 4 );
}
b.fill(0, dwords.length*4);
p += bytes.length+1;
await fd.write(b, 0, fs, pos);
if (typeof(pos) == "undefined") p += n8;
}
}