Compare commits

..

7 Commits

Author SHA1 Message Date
Jordi Baylina
c39423e411 0.5.17 2020-07-25 14:01:31 +02:00
Jordi Baylina
06b6c1a49e Fast write constraints 2020-07-25 13:55:17 +02:00
Jordi Baylina
6b712f3587 0.5.16 2020-07-23 15:56:47 +02:00
Jordi Baylina
26cad30222 faster write r1cs 2020-07-23 15:55:27 +02:00
Jordi Baylina
f48de61ca9 lIdx BigArray 2020-07-21 14:34:53 +02:00
Jordi Baylina
89cea4755c 0.5.15 2020-07-21 08:47:27 +02:00
Jordi Baylina
9bf6ecc4f3 Reducing constraints big array 2020-07-21 08:47:10 +02:00
6 changed files with 139 additions and 38 deletions

15
package-lock.json generated
View File

@@ -1,6 +1,6 @@
{
"name": "circom",
"version": "0.5.14",
"version": "0.5.17",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@@ -576,9 +576,9 @@
"dev": true
},
"fastfile": {
"version": "0.0.7",
"resolved": "https://registry.npmjs.org/fastfile/-/fastfile-0.0.7.tgz",
"integrity": "sha512-Zk7sdqsV6DsN/rhjULDfCCowPiMDsziTMFicdkrKN80yybr/6YFf9H91ELXN85dVEf6EYkVR5EHkZNc0dMqZKA=="
"version": "0.0.9",
"resolved": "https://registry.npmjs.org/fastfile/-/fastfile-0.0.9.tgz",
"integrity": "sha512-njh6lH2SJiS0u0JofJQf2YfEOSgGfbYPtmFnpEXXy6OilWoX1wGw3klaSKIwhq8+E5MqYpqJXMiaqmptaU2wig=="
},
"ffiasm": {
"version": "0.0.2",
@@ -1167,6 +1167,13 @@
"requires": {
"fastfile": "0.0.7",
"ffjavascript": "0.2.4"
},
"dependencies": {
"fastfile": {
"version": "0.0.7",
"resolved": "https://registry.npmjs.org/fastfile/-/fastfile-0.0.7.tgz",
"integrity": "sha512-Zk7sdqsV6DsN/rhjULDfCCowPiMDsziTMFicdkrKN80yybr/6YFf9H91ELXN85dVEf6EYkVR5EHkZNc0dMqZKA=="
}
}
},
"regexpp": {

View File

@@ -1,6 +1,6 @@
{
"name": "circom",
"version": "0.5.14",
"version": "0.5.17",
"description": "Language to generate logic circuits",
"main": "index.js",
"directories": {
@@ -31,7 +31,7 @@
"dependencies": {
"chai": "^4.2.0",
"circom_runtime": "0.0.6",
"fastfile": "0.0.7",
"fastfile": "0.0.9",
"ffiasm": "0.0.2",
"ffjavascript": "0.2.4",
"ffwasm": "0.0.7",

View File

@@ -46,6 +46,19 @@ class _BigArray {
if (idx >= this.length) this.length = idx+1;
return true;
}
getKeys() {
const newA = new BigArray();
for (let i=0; i<this.arr.length; i++) {
if (this.arr[i]) {
for (let j=0; j<this.arr[i].length; j++) {
if (typeof this.arr[i][j] !== "undefined") {
newA.push(i*SUBARRAY_SIZE+j);
}
}
}
}
return newA;
}
}
class BigArray {

View File

@@ -28,8 +28,30 @@ const utils = require("./utils");
const buildR1cs = require("./r1csfile").buildR1cs;
const BigArray = require("./bigarray");
const buildSyms = require("./buildsyms");
const {performance} = require("perf_hooks");
module.exports = compile;
const measures = {};
function ms2String(v) {
v = Math.floor(v);
const ms = v % 1000;
v = Math.floor(v/1000);
const secs = v % 60;
v = Math.floor(v/60);
const mins = v % 60;
v = Math.floor(v/60);
const hours = v % 24;
const days = Math.floor(v/24);
let S = "";
if (days) S = S + days + "D ";
if ((S!="")||(hours)) S = S + hours.toString().padStart(2, "0") + ":";
if ((S!="")||(mins)) S = S + mins.toString().padStart(2, "0") + ":";
if ((S!="")||(secs)) S = S + secs.toString().padStart(2, "0");
S+=".";
S = S + ms.toString().padStart(3, "0");
return S;
}
async function compile(srcFile, options) {
options.prime = options.prime || Scalar.fromString("21888242871839275222246405745257275088548364400416034343698204186575808495617");
@@ -44,9 +66,12 @@ async function compile(srcFile, options) {
ctx.mainComponent = options.mainComponent || "main";
ctx.newThreadTemplates = options.newThreadTemplates;
measures.constructionPhase = -performance.now();
constructionPhase(ctx, srcFile);
measures.constructionPhase += performance.now();
if (ctx.verbose) console.log("NConstraints Before: "+ctx.constraints.length);
if (ctx.verbose) console.log("NSignals Before: "+ctx.signals.length);
if (ctx.error) {
throw(ctx.error);
@@ -57,10 +82,15 @@ async function compile(srcFile, options) {
}
if (ctx.verbose) console.log("Classify Signals");
measures.classifySignals = -performance.now();
classifySignals(ctx);
measures.classifySignals += performance.now();
if (ctx.verbose) console.log("Reduce Constants");
measures.reduceConstants = -performance.now();
reduceConstants(ctx);
measures.reduceConstants += performance.now();
if (options.reduceConstraints) {
if (ctx.verbose) console.log("Reduce Constraints");
@@ -73,13 +103,17 @@ async function compile(srcFile, options) {
reduceConstrains(ctx);
}
*/
measures.reduceConstraints = -performance.now();
await reduceConstrains(ctx);
measures.reduceConstraints += performance.now();
reduceConstrains(ctx);
}
if (ctx.verbose) console.log("NConstraints After: "+ctx.constraints.length);
measures.generateWitnessNames = -performance.now();
generateWitnessNames(ctx);
measures.generateWitnessNames += performance.now();
if (ctx.error) {
throw(ctx.error);
@@ -87,16 +121,19 @@ async function compile(srcFile, options) {
if (options.cSourceWriteStream) {
if (ctx.verbose) console.log("Generating c...");
measures.generateC = -performance.now();
ctx.builder = new BuilderC(options.prime);
build(ctx);
const rdStream = ctx.builder.build();
rdStream.pipe(options.cSourceWriteStream);
measures.generateC += performance.now();
// await new Promise(fulfill => options.cSourceWriteStream.on("finish", fulfill));
}
if ((options.wasmWriteStream)||(options.watWriteStream)) {
if (ctx.verbose) console.log("Generating wasm...");
measures.generateWasm = -performance.now();
ctx.builder = new BuilderWasm(options.prime);
build(ctx);
if (options.wasmWriteStream) {
@@ -107,6 +144,7 @@ async function compile(srcFile, options) {
const rdStream = ctx.builder.build("wat");
rdStream.pipe(options.watWriteStream);
}
measures.generateWasm += performance.now();
// await new Promise(fulfill => options.wasmWriteStream.on("finish", fulfill));
}
@@ -115,18 +153,27 @@ async function compile(srcFile, options) {
if (ctx.error) throw(ctx.error);
if (options.r1csFileName) {
measures.generateR1cs = -performance.now();
await buildR1cs(ctx, options.r1csFileName);
measures.generateR1cs += performance.now();
}
if (options.symWriteStream) {
measures.generateSyms = -performance.now();
const rdStream = buildSyms(ctx);
rdStream.pipe(options.symWriteStream);
measures.generateSyms += performance.now();
// await new Promise(fulfill => options.symWriteStream.on("finish", fulfill));
await new Promise(fulfill => options.symWriteStream.on("finish", fulfill));
}
// const def = buildCircuitDef(ctx, mainCode);
if (ctx.verbose) {
for (let [mStr, mValue] of Object.entries(measures)) {
console.log(mStr + ": " + ms2String(mValue));
}
}
}
@@ -152,6 +199,7 @@ function classifySignals(ctx) {
// First classify the signals
for (let s=0; s<ctx.signals.length; s++) {
if ((ctx.verbose)&&(s%100000 == 0)) console.log(`classify signals: ${s}/${ctx.signals.length}`);
const signal = ctx.signals[s];
let tAll = ctx.stINTERNAL;
let lSignal = signal;
@@ -255,17 +303,19 @@ function reduceConstants(ctx) {
ctx.constraints = newConstraints;
}
function reduceConstrains(ctx) {
const sig2constraint = {};
let removedSignals = {};
async function reduceConstrains(ctx) {
const sig2constraint = new BigArray();
let removedSignals = new BigArray();
let nRemoved;
let lIdx;
let possibleConstraints = new Array(ctx.constraints.length);
let possibleConstraints = new BigArray(ctx.constraints.length);
let nextPossibleConstraints;
for (let i=0; i<ctx.constraints.length; i++) {
const insertedSig = {};
if ((ctx.verbose)&&(i%100000 == 0)) console.log(`indexing constraints: ${i}/${ctx.constraints.length}`);
const insertedSig = { 0: true}; // Do not insert one.
const c = ctx.constraints[i];
for (let s in c.a.coefs) {
if (!insertedSig[s]) {
@@ -292,12 +342,13 @@ function reduceConstrains(ctx) {
}
while (possibleConstraints.length >0) {
nextPossibleConstraints = {};
removedSignals = {};
nextPossibleConstraints = new BigArray();
removedSignals = new BigArray();
nRemoved = 0;
lIdx = {};
lIdx = new BigArray();
for (let i=0;i<possibleConstraints.length;i++) {
if ((ctx.verbose)&&(i%10000 == 0)) {
await Promise.resolve();
console.log(`reducing constraints: ${i}/${possibleConstraints.length} reduced: ${nRemoved}`);
}
const c = ctx.constraints[possibleConstraints[i]];
@@ -348,9 +399,13 @@ function reduceConstrains(ctx) {
}
}
nextPossibleConstraints = Object.keys(nextPossibleConstraints);
nextPossibleConstraints = nextPossibleConstraints.getKeys();
for (let i=0; i<nextPossibleConstraints.length;i++) {
if ((ctx.verbose)&&(i%10000 == 0)) {
await Promise.resolve();
console.log(`substituting constraints: ${i}/${nextPossibleConstraints.length}`);
}
const c = ctx.constraints[nextPossibleConstraints[i]];
if (c) {
const nc = {
@@ -366,7 +421,11 @@ function reduceConstrains(ctx) {
}
}
for (let s in removedSignals) {
const removedSignalsList = removedSignals.getKeys;
for (let i=0; i<removedSignalsList.length; i++) {
if ((ctx.verbose )&&(i%100000 == 0)) console.log(`removing signals: ${i}/${removedSignalsList.length}`);
const s = removedSignalsList[i];
let lSignal = ctx.signals[s];
while (lSignal.e>=0) {
@@ -377,11 +436,11 @@ function reduceConstrains(ctx) {
}
possibleConstraints = nextPossibleConstraints;
}
let o=0;
for (let i=0; i<ctx.constraints.length;i++) {
if ((ctx.verbose)&&(i%100000 == 0)) console.log(`reordering constraints: ${i}/${ctx.constraints.length}`);
if (ctx.constraints[i]) {
if (!ctx.lc.isZero(ctx.constraints[i])) {
ctx.constraints[o] = ctx.constraints[i];

View File

@@ -123,6 +123,7 @@ class LCAlgebra {
}
_signal2lc(a) {
const self = this;
if (a.t == "S") {
const lc = {
t: "LC",

View File

@@ -4,11 +4,14 @@ const assert = require("assert");
module.exports.buildR1cs = buildR1cs;
async function buildR1cs(ctx, fileName) {
const fd = await fastFile.createOverride(fileName);
const buffBigInt = new Uint8Array(ctx.F.n8);
const type = "r1cs";
const buff = new Uint8Array(4);
for (let i=0; i<4; i++) buff[i] = type.charCodeAt(i);
@@ -91,33 +94,51 @@ async function buildR1cs(ctx, fileName) {
await fd.close();
async function writeConstraint(c) {
await writeLC(c.a);
await writeLC(c.b);
await writeLC(ctx.lc.neg(c.c));
}
async function writeLC(lc) {
const idxs = Object.keys(lc.coefs);
await fd.writeULE32(idxs.length);
for (let s in lc.coefs) {
let lSignal = ctx.signals[s];
function writeConstraint(c) {
const n8 = ctx.F.n8;
const idxA = Object.keys(c.a.coefs);
const idxB = Object.keys(c.b.coefs);
const idxC = Object.keys(c.c.coefs);
const buff = new Uint8Array((idxA.length+idxB.length+idxC.length)*(n8+4) + 12);
const buffV = new DataView(buff.buffer);
let o=0;
buffV.setUint32(o, idxA.length, true); o+=4;
for (let i=0; i<idxA.length; i++) {
const coef = idxA[i];
let lSignal = ctx.signals[coef];
while (lSignal.e >=0 ) lSignal = ctx.signals[lSignal.e];
await fd.writeULE32(lSignal.id);
await writeBigInt(lc.coefs[s]);
buffV.setUint32(o, lSignal.id, true); o+=4;
ctx.F.toRprLE(buff, o, c.a.coefs[coef]); o+=n8;
}
buffV.setUint32(o, idxB.length, true); o+=4;
for (let i=0; i<idxB.length; i++) {
const coef = idxB[i];
let lSignal = ctx.signals[coef];
while (lSignal.e >=0 ) lSignal = ctx.signals[lSignal.e];
buffV.setUint32(o, lSignal.id, true); o+=4;
ctx.F.toRprLE(buff, o, c.b.coefs[coef]); o+=n8;
}
buffV.setUint32(o, idxC.length, true); o+=4;
for (let i=0; i<idxC.length; i++) {
const coef = idxC[i];
let lSignal = ctx.signals[coef];
while (lSignal.e >=0 ) lSignal = ctx.signals[lSignal.e];
buffV.setUint32(o, lSignal.id, true); o+=4;
ctx.F.toRprLE(buff, o, ctx.F.neg(c.c.coefs[coef])); o+=n8;
}
return fd.write(buff);
}
async function writeBigInt(n, pos) {
const s = n.toString(16);
const b = Buffer.from(s.padStart(n8*2, "0"), "hex");
const buff = new Uint8Array(b.length);
for (let i=0; i<b.length; i++) buff[i] = b[b.length-1-i];
ctx.F.toRprLE(buffBigInt, 0, n);
await fd.write(buff, pos);
await fd.write(buffBigInt, pos);
}
}