1
0
Fork 0
mirror of synced 2025-09-23 20:28:36 +00:00

merge dev

This commit is contained in:
schaeff 2020-04-21 13:19:18 +02:00
commit ddc4c196bf
227 changed files with 13223 additions and 3567 deletions

View file

@ -57,6 +57,14 @@ jobs:
# - run:
# name: Generate code coverage report
# command: ./scripts/cov.sh
cpp_format:
docker:
- image: dark64/clang-format-checker:env
steps:
- checkout
- run:
name: Check cpp format (clang-format)
command: run-clang-format.py -r $(pwd)/zokrates_core/lib
wasm_test:
docker:
- image: rustlang/rust:nightly-slim
@ -72,14 +80,11 @@ jobs:
keys:
- v4-cargo-cache-{{ arch }}-{{ checksum "Cargo.lock" }}
- run:
name: Download wasm32 target
command: rustup target add wasm32-unknown-unknown
name: Install wasm testing env
command: ./scripts/install_wasm_testing.sh
- run:
name: Install libsnark prerequisites
command: ./scripts/install_libsnark_prerequisites.sh
- run:
name: Run tests with WASM enabled
command: cd zokrates_core && cargo test --release --features wasm -- --test-threads=1
name: Test on firefox
command: ZOKRATES_HOME=$(pwd)/zokrates_stdlib/stdlib/ cd zokrates_core && wasm-pack test --firefox --headless -- --features wasm
integration_test:
docker:
- image: rustlang/rust:nightly-slim
@ -107,7 +112,7 @@ jobs:
command: ZOKRATES_HOME=$(pwd)/zokrates_stdlib/stdlib/ WITH_LIBSNARK=1 RUSTFLAGS="-D warnings" ./full_test.sh
deploy:
docker:
- image: circleci/python
- image: circleci/python:latest-node
steps:
- checkout
- setup_remote_docker:
@ -115,15 +120,32 @@ jobs:
- run:
name: Release
command: ./scripts/release.sh
zokrates_js_build:
docker:
- image: dark64/rust-wasm-env:latest
steps:
- checkout
- run:
name: Build
command: cd zokrates_js && npm run build
zokrates_js_test:
docker:
- image: circleci/node
steps:
- checkout
- run:
command: cd zokrates_js && npm run test
workflows:
version: 2
build-test-and-deploy:
jobs:
- build
- test
- cpp_format
- wasm_test
- integration_test
- zokrates_js_build
- zokrates_js_test
- deploy:
filters:
branches:
@ -132,6 +154,8 @@ workflows:
requires:
- build
- test
- cpp_format
- wasm_test
- integration_test
- zokrates_js_build
- zokrates_js_test

3
.gitignore vendored
View file

@ -5,6 +5,7 @@
# ZoKrates default files
out
out.ztf
abi.json
proof.json
proving.key
verification.key
@ -18,3 +19,5 @@ witness
.DS_Store
.idea
.vscode
*.iml

View file

@ -21,8 +21,8 @@ matrix:
- env: TARGET=x86_64-unknown-linux-gnu
# OSX
- env: TARGET=i686-apple-darwin
os: osx
# - env: TARGET=i686-apple-darwin
# os: osx
- env: TARGET=x86_64-apple-darwin
os: osx

1623
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,7 @@
[workspace]
members = [
"zokrates_common",
"zokrates_core",
"zokrates_cli",
"zokrates_fs_resolver",
@ -9,4 +10,6 @@ members = [
"zokrates_abi",
"zokrates_test",
"zokrates_core_test",
]
]
exclude = ["zokrates_js"]

View file

@ -4,7 +4,7 @@ MAINTAINER JacobEberhardt <jacob.eberhardt@tu-berlin.de>, Thibaut Schaeffer <thi
RUN useradd -u 1000 -m zokrates
ARG RUST_TOOLCHAIN=nightly-2019-06-02
ARG RUST_TOOLCHAIN=nightly-2020-01-01
ENV WITH_LIBSNARK=1
ENV ZOKRATES_HOME=/home/zokrates/.zokrates

View file

@ -13,16 +13,20 @@ _This is a proof-of-concept implementation. It has not been tested for productio
## Getting Started
Load the ZoKrates Plugin on [Remix](https://remix.ethereum.org) to write your first SNARK program!
Alternatively, you can install the ZoKrates CLI:
```bash
curl -LSfs get.zokrat.es | sh
```
Have a look at the [documentation](https://zokrates.github.io/) for more information about using ZoKrates.
Have a look at the [documentation](https://zokrates.github.io/) for more information about using ZoKrates.
A getting started tutorial can be found [here](https://zokrates.github.io/sha256example.html).
## Getting Help
If you run into problems, ZoKrates has a Gitter room. You can come ask for help on [Gitter](https://gitter.im/ZoKrates/Lobby).
If you run into problems, ZoKrates has a [Gitter](https://gitter.im/ZoKrates/Lobby) room.
## License
@ -30,6 +34,6 @@ ZoKrates is released under the GNU Lesser General Public License v3.
## Contributing
We happily welcome contributions. You can either pick an existing issue, or reach out on [Gitter](https://gitter.im/ZoKrates/Lobby).
We happily welcome contributions. You can either pick an existing issue or reach out on [Gitter](https://gitter.im/ZoKrates/Lobby).
Unless you explicitly state otherwise, any contribution you intentionally submit for inclusion in the work shall be licensed as above, without any additional terms or conditions.

View file

@ -3,8 +3,4 @@
# Exit if any subcommand fails
set -e
if [ -n "$WITH_LIBSNARK" ]; then
cargo -Z package-features build --package zokrates_cli --features="libsnark"
else
cargo build
fi
cargo build

View file

@ -1,3 +1,5 @@
#!/bin/bash
# This script takes care of building your crate and packaging it for release
set -ex

View file

@ -1,3 +1,5 @@
#!/bin/bash
set -ex
main() {

View file

@ -1,8 +1,10 @@
#!/bin/bash
# This script takes care of testing your crate
set -ex
# This is the test phase. We will only build as tests happened before.
# This is the test phase. We will only build if tests happened before.
main() {
cross build --target $TARGET
cross build --target $TARGET --release

View file

@ -4,7 +4,3 @@
set -e
cargo test --release -- --ignored
if [ -n "$WITH_LIBSNARK" ]; then
cargo -Z package-features test --release --package zokrates_cli --features="libsnark" -- --ignored
fi

8
scripts/clang-format.sh Executable file
View file

@ -0,0 +1,8 @@
#!/bin/bash
# Usage: ./clang-format.sh zokrates_core/lib
dir=$1
for file in $dir/*.cpp $dir/*.hpp; do
clang-format -i -style=WebKit -verbose $file
done

View file

@ -0,0 +1,2 @@
apt-get update && apt-get install curl firefox-esr -y --no-install-recommends
curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh

View file

@ -31,7 +31,7 @@ get_bitness() {
# ELF files start out "\x7fELF", and the following byte is
# 0x01 for 32-bit and
# 0x02 for 64-bit.
# The printf builtin on some shells like dash only supports octal
# The printf builtin on some shells - like dash - only supports octal
# escape sequences, so we use those.
local _current_exe_head
_current_exe_head=$(head -c 5 /proc/self/exe )
@ -302,7 +302,7 @@ main() {
# install ZoKrates
for f in $(ls $td); do
# put folders into $dest
if [ -d $td/$f ]; then
if [ -d $td/$f ]; then
if [ -e "$dest/$f" ] && [ $force = false ]; then
err "$f already exists in $dest, use --force to overwrite"
else
@ -327,7 +327,7 @@ main() {
cat <<'EOF'
ZoKrates was installed succesfully!
ZoKrates was installed successfully!
If this is the first time you're installing ZoKrates run the following:
export PATH=$PATH:$HOME/.zokrates/bin
export ZOKRATES_HOME=$HOME/.zokrates/stdlib

View file

@ -1,3 +1,5 @@
#!/bin/bash
# Exit if any subcommand fails
set -e
@ -29,10 +31,24 @@ echo "Published zokrates/zokrates:$TAG"
git tag $TAG
git push origin $TAG
# Build zokrates js
docker build -t zokrates_js -f zokrates_js/Dockerfile .
CID=$(docker create zokrates_js)
docker cp ${CID}:/build zokrates_js/dist
docker rm -f ${CID}
cd zokrates_js/dist
# Publish zokrates_js to npmjs
chmod +x publish.sh
./publish.sh
# Publish book
MDBOOK_TAR="https://github.com/rust-lang-nursery/mdBook/releases/download/v0.2.1/mdbook-v0.2.1-x86_64-unknown-linux-gnu.tar.gz"
cd zokrates_book
cd ../../zokrates_book
## Install mdbook
wget -qO- $MDBOOK_TAR | tar xvz

View file

@ -4,7 +4,3 @@
set -e
cargo test --release
if [ -n "$WITH_LIBSNARK" ]; then
cargo -Z package-features test --release --package zokrates_cli --features="libsnark"
fi

View file

@ -1,6 +1,6 @@
[package]
name = "zokrates_abi"
version = "0.1.0"
version = "0.1.1"
authors = ["Thibaut Schaeffer <thibaut@schaeff.fr>"]
edition = "2018"
@ -9,4 +9,4 @@ zokrates_field = { version = "0.3", path = "../zokrates_field" }
zokrates_core = { version = "0.4", path = "../zokrates_core" }
serde = "1.0"
serde_derive = "1.0"
serde_json = "1.0"
serde_json = "1.0"

View file

@ -89,17 +89,17 @@ impl<T: Field> Value<T> {
match (self, ty) {
(Value::Field(f), Type::FieldElement) => Ok(CheckedValue::Field(f)),
(Value::Boolean(b), Type::Boolean) => Ok(CheckedValue::Boolean(b)),
(Value::Array(a), Type::Array(box inner_ty, size)) => {
if a.len() != size {
(Value::Array(a), Type::Array(array_type)) => {
if a.len() != array_type.size {
Err(format!(
"Expected array of size {}, found array of size {}",
size,
array_type.size,
a.len()
))
} else {
let a = a
.into_iter()
.map(|val| val.check(inner_ty.clone()))
.map(|val| val.check(*array_type.ty.clone()))
.collect::<Result<Vec<_>, _>>()?;
Ok(CheckedValue::Array(a))
}
@ -114,10 +114,10 @@ impl<T: Field> Value<T> {
} else {
let s = members
.into_iter()
.map(|(id, ty)| {
s.remove(&id)
.ok_or_else(|| format!("Member with id `{}` not found", id))
.map(|v| v.check(ty).map(|v| (id, v)))
.map(|member| {
s.remove(&member.id)
.ok_or_else(|| format!("Member with id `{}` not found", member.id))
.map(|v| v.check(*member.ty.clone()).map(|v| (member.id, v)))
})
.collect::<Result<Vec<_>, _>>()?
.into_iter()
@ -187,19 +187,19 @@ impl<T: From<usize> + PartialEq + Clone> Decode<T> for CheckedValue<T> {
unreachable!()
})
}
Type::Array(box inner_ty, _) => CheckedValue::Array(
raw.chunks(inner_ty.get_primitive_count())
.map(|c| CheckedValue::decode(c.to_vec(), inner_ty.clone()))
Type::Array(array_type) => CheckedValue::Array(
raw.chunks(array_type.ty.get_primitive_count())
.map(|c| CheckedValue::decode(c.to_vec(), *array_type.ty.clone()))
.collect(),
),
Type::Struct(members) => CheckedValue::Struct(
members
.into_iter()
.scan(0, |state, (id, ty)| {
let new_state = *state + ty.get_primitive_count();
let res = CheckedValue::decode(raw[*state..new_state].to_vec(), ty);
.scan(0, |state, member| {
let new_state = *state + member.ty.get_primitive_count();
let res = CheckedValue::decode(raw[*state..new_state].to_vec(), *member.ty);
*state = new_state;
Some((id, res))
Some((member.id, res))
})
.collect(),
),
@ -365,6 +365,7 @@ mod tests {
mod strict {
use super::*;
use zokrates_core::typed_absy::types::StructMember;
#[test]
fn fields() {
@ -409,7 +410,10 @@ mod tests {
assert_eq!(
parse_strict::<Bn128Field>(
s,
vec![Type::Struct(vec![("a".into(), Type::FieldElement)])]
vec![Type::Struct(vec![StructMember::new(
"a".into(),
Type::FieldElement
)])]
)
.unwrap(),
CheckedValues(vec![CheckedValue::Struct(
@ -423,7 +427,10 @@ mod tests {
assert_eq!(
parse_strict::<Bn128Field>(
s,
vec![Type::Struct(vec![("a".into(), Type::FieldElement)])]
vec![Type::Struct(vec![StructMember::new(
"a".into(),
Type::FieldElement
)])]
)
.unwrap_err(),
Error::Type("Member with id `a` not found".into())
@ -433,7 +440,10 @@ mod tests {
assert_eq!(
parse_strict::<Bn128Field>(
s,
vec![Type::Struct(vec![("a".into(), Type::FieldElement)])]
vec![Type::Struct(vec![StructMember::new(
"a".into(),
Type::FieldElement
)])]
)
.unwrap_err(),
Error::Type("Expected 1 member(s), found 0".into())
@ -443,7 +453,10 @@ mod tests {
assert_eq!(
parse_strict::<Bn128Field>(
s,
vec![Type::Struct(vec![("a".into(), Type::FieldElement)])]
vec![Type::Struct(vec![StructMember::new(
"a".into(),
Type::FieldElement
)])]
)
.unwrap_err(),
Error::Type("Value `false` doesn't match expected type `field`".into())

View file

@ -18,6 +18,8 @@
- [Proving schemes](reference/proving_schemes.md)
- [Verification](reference/verification.md)
- [ZIR](reference/ir.md)
- [JSON ABI](reference/abi.md)
- [Testing](reference/testing.md)
- [Tutorial: Proof of preimage](./sha256example.md)
- [zokrates.js](./zokrates_js.md)

View file

@ -14,7 +14,7 @@ Arguments are passed by value.
### If expressions
An if expression allows you to branch your code depending on a condition.
An if-expression allows you to branch your code depending on a condition.
```zokrates
{{#include ../../../zokrates_cli/examples/book/if_else.zok}}
@ -32,5 +32,5 @@ For loops are available with the following syntax:
{{#include ../../../zokrates_cli/examples/book/for.zok}}
```
The bounds have to be known at compile time, so only constants are allowed.
The bounds have to be known at compile-time, so only constants are allowed.
For-loops define their own scope.

View file

@ -21,7 +21,7 @@ When defining a variable as the return value of a function, types are optional:
{{#include ../../../zokrates_cli/examples/book/multi_def.zok}}
```
If there is an ambiguity, providing the types of some of the assigned variables is necessary.
If there is any ambiguity, providing the types of some of the assigned variables is necessary.
```zokrates
{{#include ../../../zokrates_cli/examples/book/type_annotations.zok}}

View file

@ -35,13 +35,13 @@ from "./path/to/my/module" import main as module
// `main` is now in scope under the alias `module`.
```
Note that this legacy method is likely to be become deprecated, so it is recommended to use the preferred way instead.
Note that this legacy method is likely to become deprecated, so it is recommended to use the preferred way instead.
### Symbols
Two type of symbols can be imported
Two types of symbols can be imported
#### Functions
Functions are imported by name. If many functions have the same name but different signatures, all of them get imported, and which one to use in a particular call is infered.
Functions are imported by name. If many functions have the same name but different signatures, all of them get imported, and which one to use in a particular call is inferred.
#### User-defined types
User-defined types declared with the `struct` keyword are imported by name.
@ -58,12 +58,11 @@ There also is a handy syntax to import from the parent directory:
from "../mycode" import foo
```
Also imports further up the file-system are supported:
Also, imports further up the file-system are supported:
```zokrates
from "../../../mycode" import foo
```
### Absolute Imports
Absolute imports don't start with `./` or `../` in the path and are used to import components from the ZoKrates standard library. Please check the according [section](./stdlib.html) for more details.
`
Absolute imports don't start with `./` or `../` in the path and are used to import components from the ZoKrates standard library. Please check the according [section](./stdlib.html) for more details.

View file

@ -23,7 +23,7 @@ import "hashes/sha256/512bit.zok"
A function that takes 2 `field[256]` arrays as inputs and returns their sha256 compression function as an array of 256 field elements.
The difference with `sha256` is that no padding is added at the end of the message, which makes it more efficient but also less compatible with Solidity.
There also is support for 2-round (1024-bit input) and and 3-round (1536-bit input) variants, using `hashes/1024bit.zok` or `hashes/1536bit.zok` respectively.
There also is support for 2-round (1024-bit input) and 3-round (1536-bit input) variants, using `hashes/1024bit.zok` and `hashes/1536bit.zok` respectively.
#### sha256packed
@ -31,9 +31,9 @@ There also is support for 2-round (1024-bit input) and and 3-round (1536-bit inp
import "hashes/sha256/512bitPacked.zok"
```
A function that takes an array of 4 field elements as inputs, unpacks each of them to 128 bits (big endian), concatenates them and applies sha256. It then returns an array of 2 field elements, each representing 128 bits of the result.
A function that takes an array of 4 field elements as inputs, unpacks each of them to 128 bits (big-endian), concatenates them and applies sha256. It then returns an array of 2 field elements, each representing 128 bits of the result.
### Public-key Cryptography
### Public-key Cryptography
#### Proof of private-key ownership
@ -41,8 +41,8 @@ A function that takes an array of 4 field elements as inputs, unpacks each of th
import "ecc/proofOfOwnership.zok"
```
Verifies match of a given public/private keypair. Checks if the following equation holds for the provided keypair:
`pk = sk*G`
Verifies match of a given public/private keypair. Checks if the following equation holds for the provided keypair:
`pk = sk*G`
where `G` is the chosen base point of the subgroup and `*` denotes scalar multiplication in the subgroup.
#### Signature verification
@ -85,5 +85,5 @@ Packs 256 field elements as one. Overflows can occur.
import "utils/pack/nonStrictUnpack256"
```
Unpacks a field element to 256 field elements. Uniqueness of the output is not guaranteed.
Unpacks a field element into 256 field elements. Uniqueness of the output is not guaranteed.

View file

@ -20,9 +20,9 @@ While `field` values mostly behave like unsigned integers, one should keep in mi
ZoKrates has limited support for booleans, to the extent that they can only be used as the condition in `if ... else ... endif` expressions.
You can use them for equality checks and inequality checks between `field` values.
You can use them for equality and inequality checks between `field` values.
Note that while equality checks are cheap, inequality checks should be use wisely as they are orders of magnitude more expensive.
Note that while equality checks are cheap, inequality checks should be used wisely as they are orders of magnitude more expensive.
## Complex Types
@ -33,18 +33,18 @@ ZoKrates provides two complex types, Arrays and Structs.
ZoKrates supports static arrays, i.e., their length needs to be known at compile time.
Arrays can contain elements of any type and have arbitrary dimensions.
The following examples code shows examples of how to use arrays:
The following example code shows examples of how to use arrays:
```zokrates
{{#include ../../../zokrates_cli/examples/book/array.zok}}
```
#### Declaration and Initialization
An array is defined by appending `[]` to a type literal representing the type of the array's elements.
An array is defined by appending `[]` to a type literal representing the type of the array's elements.
Initialization always needs to happen in the same statement as declaration, unless the array is declared within a function's signature.
Initialization always needs to happen in the same statement as a declaration, unless the array is declared within a function's signature.
For initialization, a list of comma-separated values is provided within brackets `[]`.
For initialization, a list of comma-separated values is provided within brackets `[]`.
ZoKrates offers a special shorthand syntax to initialize an array with a constant value:
`[value;repetitions]`
@ -60,7 +60,7 @@ The following code provides examples for declaration and initialization:
As an array can contain any type of elements, it can contain arrays again.
There is a special syntax to declare such multi-dimensional arrays, i.e., arrays of arrays.
To declare an array of an inner array, i.e., and array of elements of a type, prepend brackets `[size]` to the declaration of the inner array.
To declare an array of an inner array, i.e., and an array of elements of a type, prepend brackets `[size]` to the declaration of the inner array.
In summary, this leads to the following scheme for array declarations:
`data_type[size of 1st dimension][size of 2nd dimension]`.
Consider the following example:
@ -76,7 +76,7 @@ ZoKrates provides some syntactic sugar to retrieve subsets of arrays.
The spread operator `...` applied to an array copies the elements of the existing array.
This can be used to conveniently compose new arrays, as shown in the following example:
```
field[3] = [1, 2, 3]
field[3] a = [1, 2, 3]
field[4] c = [...a, 4] // initialize an array copying values from `a`, followed by 4
```
@ -89,8 +89,8 @@ field[2] b = a[1..3] // initialize an array copying a slice from `a`
```
### Structs
A struct is a composite datatype representing a named collection of variables.
The contained variables can be of any type.
A struct is a composite datatype representing a named collection of variables.
The contained variables can be of any type.
The following code shows an example of how to use structs.
@ -111,7 +111,7 @@ struct Point {
#### Declaration and Initialization
Initialization of a variable of a struct type always needs to happen in the same statement as declaration, unless the struct-typed variable is declared within a function's signature.
Initialization of a variable of a struct type always needs to happen in the same statement as a declaration, unless the struct-typed variable is declared within a function's signature.
The following example shows declaration and initialization of a variable of the `Point` struct type:

View file

@ -2,6 +2,10 @@
## Installation
### Remix online IDE
To write your first SNARK program, check out the ZoKrates plugin in the [Remix online IDE](https://remix.ethereum.org)!
### One-line install
We provide a one-line install for Linux, MacOS and FreeBSD:
@ -22,7 +26,7 @@ From there on, you can use the `zokrates` CLI.
### From source
You can build the container yourself from [source](https://github.com/ZoKrates/ZoKrates/) with the following commands:
You can build ZoKrates from [source](https://github.com/ZoKrates/ZoKrates/) with the following commands:
```bash
git clone https://github.com/ZoKrates/ZoKrates
@ -47,15 +51,15 @@ Then run the different phases of the protocol:
```bash
# compile
./zokrates compile -i root.zok
zokrates compile -i root.zok
# perform the setup phase
./zokrates setup
zokrates setup
# execute the program
./zokrates compute-witness -a 337 113569
zokrates compute-witness -a 337 113569
# generate a proof of computation
./zokrates generate-proof
zokrates generate-proof
# export a solidity verifier
./zokrates export-verifier
zokrates export-verifier
```
The CLI commands are explained in more detail in the [CLI reference](reference/cli.md).

View file

@ -6,7 +6,7 @@ ZoKrates is a toolbox for zkSNARKs on Ethereum. It helps you use verifiable comp
## Background on zkSNARKs
Zero-knowledge proofs (ZKPs) are a family of probabilistic protocols, first described by [Goldwasser, Micali and Rackoff](http://people.csail.mit.edu/silvio/Selected%20Scientific%20Papers/Proof%20Systems/The_Knowledge_Complexity_Of_Interactive_Proof_Systems.pdf) in 1985.
One particular family of ZKPs is described as zero-knowledge **S**uccinct **N**on-interactive **AR**guments of **K**nowledge, a.k.a. zkSNARKs. zkSNARKs are the most widely used zero-knowledge protocols, with the anonymous cryptocurrency Zcash and the smart-contract platform Ethereum among the notable early adopters.
For further details we refer the reader to some introductory material provided by the community: [[1]](https://z.cash/technology/zksnarks/),[[2]](https://medium.com/@VitalikButerin/zkSNARKs-under-the-hood-b33151a013f6), [[3]](https://blog.decentriq.ch/zk-SNARKs-primer-part-one/).

View file

@ -0,0 +1,96 @@
# ZoKrates ABI
In order to interact programatically with compiled ZoKrates programs, ZoKrates supports passing arguments using an ABI.
To illustrate this, we'll use the following example program:
```
struct Bar {
field a
}
struct Foo {
field a
Bar b
}
def main(private Foo foo, bool[2] bar, field num) -> (field):
return 42
```
## ABI specification
When compiling a program, an ABI specification is generated and describes the interface of the program.
In this example, the ABI specification is:
```json
{
"inputs": [
{
"name": "foo",
"public": false,
"type": "struct",
"components": [
{
"name": "a",
"type": "field"
},
{
"name": "b",
"type": "struct",
"components": [
{
"name": "a",
"type": "field"
}
]
}
]
},
{
"name": "bar",
"public": "true",
"type": "array",
"components": {
"size": 2,
"type": "bool"
}
},
{
"name": "num",
"public": "true",
"type": "field"
}
],
"outputs": [
{
"type": "field"
}
]
}
```
## ABI input format
When executing a program, arguments can be passed as a JSON object of the following form:
```json
[
{
"a": "42",
"b":
{
"a": "42"
}
},
[
true,
false
],
"42"
]
```
Note that field elements are passed as JSON strings in order to support arbitrary large numbers.

View file

@ -5,23 +5,28 @@ You can see an overview of the available subcommands by running
```sh
zokrates
<<<<<<< HEAD
```
You can get help about a particular subcommand with `--help`, for example:
```sh
zokrates compile --help
=======
>>>>>>> develop
```
For each command, you can get the list of expected arguments using `--help`.
## `compile`
```sh
zokrates compile -i /path/to/add.zok
```
Compiles a `.zok` source code file into ZoKrates internal representation of arithmetic circuits.
Compiles a `.zok` source code file into ZoKrates internal representation of arithmetic circuits.
Creates a compiled binary file at `./out`.
Unless the `--light` flag is set, a human readable `.ztf` file is generated, which displays the compilation output in ZoKrates Text Format.
Unless the `--light` flag is set, a human-readable `.ztf` file is generated, which displays the compilation output in ZoKrates Text Format.
## `compute-witness`
@ -29,9 +34,15 @@ Unless the `--light` flag is set, a human readable `.ztf` file is generated, whi
zokrates compute-witness -a 1 2 3
```
Computes a witness for the compiled program found at `./out` and arguments to the program.
A witness is a valid assignment of the variables, which include the results of the computation.
Arguments to the program are passed as a space-separated list with the `-a` flag, or over `stdin`.
Computes a witness for the compiled program found at `./out` and computes arguments of the program.
A witness is a valid assignment of the variables, including the results of the computation.
Arguments of the program are passed as a space-separated list with the `-a` flag, or over `stdin` with the `--stdin` flag.
With the `--abi` flag, arguments are passed in the ZoKrates JSON ABI format described [here](reference/abi.md):
```sh
cat arguments.json | zokrates compute-witness --stdin --abi
```
Creates a witness file at `./witness`
@ -44,7 +55,7 @@ zokrates setup
Generates a trusted setup for the compiled program found at `./out`.
Creates a proving key and a verifying key at `./proving.key` and `./verifying.key`.
These keys are derived from a source of randomness, commonly referred to as “toxic waste”. Anyone having access to the source of randomness can produce fake proofs that will be accepted by a verifier following the protocol.
These keys are derived from a source of randomness, commonly referred to as "toxic waste". Anyone having access to the source of randomness can produce fake proofs that will be accepted by a verifier following the protocol.
The [proving scheme](proving_schemes.md) and curve can be chosen with the `proving-scheme` and `curve` flags.
@ -54,7 +65,7 @@ The [proving scheme](proving_schemes.md) and curve can be chosen with the `provi
zokrates export-verifier
```
Using the verifying key at `./verifying.key`, generates a Solidity contract which contains the generated verification key and a public function to verify a solution to the compiled program at `./out`.
Using the verifying key at `./verifying.key`, generates a Solidity contract that contains the generated verification key and a public function to verify a solution to the compiled program at `./out`.
Creates a verifier contract at `./verifier.sol`.

View file

@ -7,4 +7,5 @@ The reference covers the details of various areas of ZoKrates.
- [Proving schemes](proving_schemes.md)
- [Verification](verification.md)
- [ZIR](ir.md)
- [JSON ABI](abi.md)
- [Testing](testing.md)

View file

@ -39,7 +39,7 @@ cargo +nightly -Z package-features build --release --package zokrates_cli --feat
## G16 malleability
When using G16, developers should pay attention to the fact that an attacker seeing a valid proof can very easily generate a different but still valid proof. Therefore, depending on the use case, making sure on chain that the same proof cannot be submitted twice may *not* be enough to guarantee that attackers cannot replay proofs. Mechanisms to solve this issue include:
When using G16, developers should pay attention to the fact that an attacker, seeing a valid proof, can very easily generate a different but still valid proof. Therefore, depending on the use case, making sure on chain that the same proof cannot be submitted twice may *not* be enough to guarantee that attackers cannot replay proofs. Mechanisms to solve this issue include:
- signed proofs
- nullifiers
- usage of an ethereum address as a public input to the program

View file

@ -1,10 +1,10 @@
# Testing
Before running any tests, make sure make sure your `ZOKRATES_HOME` environment variable is set correctly.
Before running any tests, make sure your `ZOKRATES_HOME` environment variable is set correctly.
It has to point to `zokrates_stdlib/stdlib/`
## Unit tests
In ZoKrates, unit tests comprise
In ZoKrates, unit tests comprise of
- internal tests for all zokrates crates
- compilation tests for all examples in `zokrates_cli/examples`. These tests only ensure that the examples compile.
- compilation + witness-computation tests. These tests compile the test cases, compute a witness and compare the result with a pre-defined expected result.
@ -24,10 +24,10 @@ Integration tests are excluded from `cargo test` by default.
They are defined in the `zokrates_cli` crate in `integration.rs` and use the test cases specified in `zokrates_cli/tests/code`.
Before running integration tests:
1. Make sure your `$ZOKRATES_HOME` is set correctly
1. Make sure your `$ZOKRATES_HOME` is set correctly
2. You have [solc](https://github.com/ethereum/solc-js) installed and in your `$PATH`.
Solc can conveniently be installed through `npm` by running
Solc can conveniently be installed through `npm` by running
```
npm install -g solc
```

View file

@ -1,9 +1,9 @@
# Tutorial: Proving knowledge of a hash preimage
Lets jump into ZoKrates by working through a hands-on project together!
Let's jump into ZoKrates by working through a hands-on project together!
Well implement an operation that's very typical in blockchain use-cases: proving knowledge of the preimage for a given hash digest.
In particular, we'll show how ZoKrates and the Ethereum blockchain can be used to allow a prover, lets call her Peggy, to demonstrate beyond any reasonable doubt to a verifier, lets call him Victor, that she knows a hash preimage for a digest chosen by Victor, without revealing what the preimage is.
We'll implement an operation that's very typical in blockchain use-cases: proving knowledge of the preimage for a given hash digest.
In particular, we'll show how ZoKrates and the Ethereum blockchain can be used to allow a prover, let's call her Peggy, to demonstrate beyond any reasonable doubt to a verifier, let's call him Victor, that she knows a hash preimage for a digest chosen by Victor, without revealing what the preimage is.
## Pre-requisites
@ -21,9 +21,9 @@ First, we create a new file named `hashexample.zok` with the following content:
The first line imports the `sha256packed` function from the ZoKrates standard library.
`sha256packed` is a SHA256 implementation that is optimized for the use in the ZoKrates DSL. Here is how it works: We want to pass 512 bits of input to sha256. However, a `field` value can only hold 254 bits due to the size of the underlying prime field we are using. As a consequence, we use four field elements, each one encoding 128 bits, to represent our input. The four elements are then concatenated in ZoKrates and passed to SHA256. Given that the resulting hash is 256 bit long, we split it in two and return each value as a 128 bit number.
`sha256packed` is a SHA256 implementation that is optimized for the use in the ZoKrates DSL. Here is how it works: We want to pass 512 bits of input to SHA256. However, a `field` value can only hold 254 bits due to the size of the underlying prime field we are using. As a consequence, we use four field elements, each one encoding 128 bits, to represent our input. The four elements are then concatenated in ZoKrates and passed to SHA256. Given that the resulting hash is 256 bit long, we split it in two and return each value as a 128 bit number.
In case you are interested in an example that is fully compliant with existing SHA256 implementations in Python or Solidity you can have a look at this [blog](https://blog.decentriq.ch/proving-hash-pre-image-zksnarks-zokrates) post.
In case you are interested in an example that is fully compliant with existing SHA256 implementations in Python or Solidity, you can have a look at this [blog](https://blog.decentriq.ch/proving-hash-pre-image-zksnarks-zokrates) post.
Our code is really just using the `sha256packed`, returning the computed hash.
@ -63,7 +63,7 @@ Hence, by concatenating the outputs as 128 bit numbers, we arrive at the followi
For now, we have seen that we can compute a hash using ZoKrates.
Let's recall our goal: Peggy wants to prove that she knows a preimage for a digest chosen by Victor, without revealing what the preimage is. Without loss of generality, let's now assume that Victor choses the digest to be the one we found in our example above.
Let's recall our goal: Peggy wants to prove that she knows a preimage for a digest chosen by Victor, without revealing what the preimage is. Without loss of generality, let's now assume that Victor chooses the digest to be the one we found in our example above.
To make it work, the two parties have to follow their roles in the protocol:
@ -81,7 +81,7 @@ So, having defined the program, Victor is now ready to compile the code:
./zokrates compile -i hashexample.zok
```
Based on that Victor can run the setup phase and export verifier smart contract as a Solidity file:
Based on that Victor can run the setup phase and export a verifier smart contract as a Solidity file:
```sh
./zokrates setup
@ -104,14 +104,14 @@ Finally, Peggy can run the command to construct the proof:
./zokrates generate-proof
```
As the inputs were declared as private in the program, they do not appear in the proof thanks to the zero knowledge property of the protocol.
As the inputs were declared as private in the program, they do not appear in the proof thanks to the zero-knowledge property of the protocol.
ZoKrates creates a file, `proof.json`, consisting of the three elliptic curve points that make up the zkSNARKs proof. The `verifyTx` function in the smart contract deployed by Victor accepts these three values, along with an array of public inputs. The array of public inputs consists of:
* any public inputs to the main function, declared without the `private` keyword
* the return values of the ZoKrates function
In the example we're considering, all inputs are private and there is a single return value of `1`, hence Peggy has to define her public input array as follows: `[1]`
In the example we're considering, all inputs are private and there is a single return value of `1`, hence Peggy has to define her public input array as follows: `[1]`.
Peggy can then submit her proof by calling `verifyTx`.
@ -119,10 +119,10 @@ Victor monitors the verification smart contract for the `Verified` event, which
## Conclusion
At this point, youve successfully ran you first zkSNARK on the Ethereum blockchain. Congratulations!
At this point, you've successfully ran you first zkSNARK on the Ethereum blockchain. Congratulations!
>Remember that in this example only two parties were involved. This special case makes it easy to deal with the trust assumptions of zkSNARKs: only Victor was interested in verifying the claim by Peggy, hence he can trust his execution of the setup phase.
>
>In general, multiple parties may be interested in verifying the correctness of Peggy's statement. For example, in the zero-knowledge based cryptocurrency Zcash, each node needs to be able to validate the correctness of transactions. In order to generalize the setup phase to these multi-party use-cases a tricky process, commonly referred to as “trusted setup” or "ceremony" needs to be conducted.
>In general, multiple parties may be interested in verifying the correctness of Peggy's statement. For example, in the zero-knowledge based cryptocurrency Zcash, each node needs to be able to validate the correctness of transactions. In order to generalize the setup phase to these multi-party use-cases a tricky process, commonly referred to as "trusted setup" or "ceremony" needs to be conducted.
>
>ZoKrates would welcome ideas to add support for such ceremonies!

View file

@ -0,0 +1,37 @@
# zokrates.js
You can get JavaScript bindings for ZoKrates by running
```bash
npm install zokrates-js
```
## API
| Function | Description |
| ------ | ------ |
| initialize | Loads binding wasm module and returns a promise with ZoKrates provider |
| compile | Compiles source code into ZoKrates internal representation of arithmetic circuits |
| computeWitness | Computes a valid assignment of the variables, which include the results of the computation |
| setup | Generates a trusted setup for the compiled program |
| exportSolidityVerifier | Generates a Solidity contract which contains the generated verification key and a public function to verify a solution to the compiled program |
| generateProof | Generates a proof for a computation of the compiled program |
## Usage
```js
import { initialize } from 'zokrates-js';
function importResolver(location, path) {
// implement your resolving logic here
return {
source: "def main() -> (): return",
location: path
};
}
initialize().then((zokratesProvider) => {
// we have to initialize the wasm module before calling api functions
zokratesProvider.compile("def main(private field a) -> (field): return a", "main", importResolver)
});
```

View file

@ -1,6 +1,6 @@
[package]
name = "zokrates_cli"
version = "0.5.0"
version = "0.5.2"
authors = ["Jacob Eberhardt <jacob.eberhardt@tu-berlin.de>", "Dennis Kuhnert <mail@kyroy.com>", "Thibaut Schaeffer <thibaut@schaeff.fr>"]
repository = "https://github.com/JacobEberhardt/ZoKrates.git"
edition = "2018"
@ -8,7 +8,6 @@ edition = "2018"
[features]
default = []
libsnark = ["zokrates_core/libsnark"]
wasm = ["zokrates_core/wasm"]
[dependencies]
clap = "2.26.2"
@ -17,7 +16,7 @@ regex = "0.2"
zokrates_field = { version = "0.3", path = "../zokrates_field" }
zokrates_abi = { version = "0.1", path = "../zokrates_abi" }
zokrates_core = { version = "0.4", path = "../zokrates_core" }
zokrates_fs_resolver = { version = "0.4", path = "../zokrates_fs_resolver"}
zokrates_fs_resolver = { version = "0.5", path = "../zokrates_fs_resolver"}
serde_json = "1.0"
[dev-dependencies]

View file

@ -4,6 +4,6 @@ def foo(field[3] a) -> (field):
def main() -> (field, field):
field[3] a = [0, 0, 0]
res = foo(a)
field res = foo(a)
a[1] == 0
return res, a[1]

View file

@ -1,5 +1,5 @@
import "hashes/sha256/512bitPacked" as sha256packed
def main(private field a, private field b, private field c, private field d) -> (field[2]):
h = sha256packed([a, b, c, d])
field[2] h = sha256packed([a, b, c, d])
return h

View file

@ -1,7 +1,7 @@
import "hashes/sha256/512bitPacked" as sha256packed
def main(private field a, private field b, private field c, private field d) -> (field):
h = sha256packed([a, b, c, d])
field[2] h = sha256packed([a, b, c, d])
h[0] == 263561599766550617289250058199814760685
h[1] == 65303172752238645975888084098459749904
return 1

View file

@ -2,5 +2,5 @@ def foo() -> (field, field):
return 21, 42
def main() -> (field):
a, b = foo()
field a, field b = foo()
return 1

View file

@ -5,7 +5,7 @@ struct Bar {
struct Foo {
Bar a
bool b
bool b
}
def main() -> (Foo):

View file

@ -5,5 +5,5 @@ def foo() -> (field, field):
return 1, 2
def main() -> (field):
a, field[3] b = foo()
field a, field[3] b = foo()
return 1

View file

@ -3,5 +3,5 @@ def main(field a, field b) -> (field):
field y = if a + 2 == 4 || b * 2 == 2 then 1 else 0 fi
field z = if y == 1 || y == 0 then y else 1 fi
z == 1
return z
return z

View file

@ -9,6 +9,7 @@ def main(field order) -> (field, field, field, field):
// LSB
field amount = 0
field exponent = 1
field bit = 0
for field i in 0..120 do
bit, order = popLeastSignificantBit(order)
amount = amount + (bit * exponent)

View file

@ -43,20 +43,20 @@ def checkConstraints(field[3] amount, field[3] sourceToken, field[3] targetToken
field[3] sourceTokenPriceOrder = [0, 0, 0]
field[3] targetTokenPriceOrder = [0, 0, 0]
for field i in 0..3 do
sourceTokenPriceOrder[i] = priceToken[sourceToken[i]]
targetTokenPriceOrder[i] = priceToken[targetToken[i]]
endfor
// orders are only touched, if the limit price is below the calculated price:
// orders are only touched if the limit price is below the calculated price:
for field i in 0..3 do
1 == if volume[i] == 0 then 1 else limitLessThanPrice(sourceTokenPriceOrder[i], targetTokenPriceOrder[i], limit[i]) fi
endfor
// the amount of sell volume for a token equals its buy volume:
buyVolumeToken = tupleForTokensWithValue(0)
sellVolumeToken = tupleForTokensWithValue(0)
field[3] buyVolumeToken = tupleForTokensWithValue(0)
field[3] sellVolumeToken = tupleForTokensWithValue(0)
for field i in 0..3 do
buyVolumeToken = addVolumesForOrder(buyVolumeToken, targetToken[i], volume[i] * sourceTokenPriceOrder[i])
@ -66,9 +66,9 @@ def checkConstraints(field[3] amount, field[3] sourceToken, field[3] targetToken
buyVolumeToken == sellVolumeToken
// If an order σ ∈ Oi→j with a limit price p has a positive trading volume, then every order in Oi→j with a lower limit price should be completely fulfilled.
highestTouchedOrder = tupleForTokenPairsWithValue(0)
field[9] highestTouchedOrder = tupleForTokenPairsWithValue(0)
for field i in 0..3 do
for field i in 0..3 do
highestTouchedOrder = updateHighestTouchedOrder(highestTouchedOrder, sourceToken[i], targetToken[i], limit[i], volume[i])
endfor
@ -76,7 +76,7 @@ def checkConstraints(field[3] amount, field[3] sourceToken, field[3] targetToken
1 == verifyCompletelyFulfilledIfLimitLowerHighestTouchedOrder(highestTouchedOrder, amount[i], sourceToken[i], targetToken[i], limit[i], volume[i])
endfor
return 1 // Could return total volume to maximize for
return 1 // Could return the total volume to maximize for
def main(private field[3] encodedOrder, private field[3] bitmapOrder, private field[3] volume, private field[3] priceToken) -> (field):
// Remove orders that are not double signed
@ -88,8 +88,8 @@ def main(private field[3] encodedOrder, private field[3] bitmapOrder, private fi
field[3] limit = [0, 0, 0]
// Decode orders
for field i in 0..3 do
a, s, t, l = decodeOrder(encodedOrder[i])
for field i in 0..3 do
field a, field s, field t, field l = decodeOrder(encodedOrder[i])
amount[i] = a
sourceToken[i] = s
targetToken[i] = t

View file

@ -4,7 +4,7 @@ def add(field a,field b) -> (field):
// Expected for inputs 1,1: c=4, d=7, e=10
def main(field a,field b) -> (field):
c = add(a*2+3*b-a,b-1)
d = add(a*b+2, a*b*c)
e = add(add(a,d),add(a,b))
field c = add(a*2+3*b-a,b-1)
field d = add(a*b+2, a*b*c)
field e = add(add(a,d),add(a,b))
return e

View file

@ -2,5 +2,5 @@ def add(field f,field g) -> (field):
return f+g
def main(field a, field b) -> (field):
c = add(a,b)
field c = add(a,b)
return c

View file

@ -4,7 +4,7 @@ def lt(field a,field b) -> (field):
def cutoff() -> (field):
return 31337
def getThing(index) -> (field):
def getThing(field index) -> (field):
field result = 3
result = if index == 0 then 13 else result fi
result = if index == 1 then 23 else result fi

View file

@ -4,8 +4,8 @@ def add(field a, field b) -> (field):
def main(field a, field b,field c, field d) -> (field):
field g = a + b
x = add(a,b)
y = add(c,d)
field x = add(a,b)
field y = add(c,d)
g = add(x, g)
g = add(x, g)
field f = c + d + a

View file

@ -6,6 +6,6 @@ def sub(field a, field b) -> (field):
return a-b
def main(field a, field b) -> (field):
c = add(a,b)
d = sub(a,b)
field c = add(a,b)
field d = sub(a,b)
return 0

View file

@ -1,18 +1,19 @@
import "hashes/pedersen/512bit" as hash
import "ecc/edwardsCompress" as edwardsCompress
import "ecc/babyjubjubParams" as context
from "ecc/babyjubjubParams" import BabyJubJubParams
import "hashes/utils/256bitsDirectionHelper" as multiplex
import "utils/binary/not" as NOT
// Merke-Tree inclusion proof for tree depth 3 using SNARK efficient pedersen hashes
// Merke-Tree inclusion proof for tree depth 3 using SNARK efficient pedersen hashes
// directionSelector=> 1/true if current digest is on the rhs of the hash
def main(field[256] rootDigest, private field[256] leafDigest, private field[3] directionSelector, field[256] PathDigest0, private field[256] PathDigest1, private field[256] PathDigest2) -> (field):
context = context()
BabyJubJubParams context = context()
//Setup
field[256] currentDigest = leafDigest
//Loop up the tree
preimage = multiplex(directionSelector[0], currentDigest, PathDigest0)
field[512] preimage = multiplex(directionSelector[0], currentDigest, PathDigest0)
currentDigest = hash(preimage)
preimage = multiplex(directionSelector[1], currentDigest, PathDigest1)
@ -23,5 +24,5 @@ def main(field[256] rootDigest, private field[256] leafDigest, private field[3]
rootDigest == currentDigest
return 1 //return true in success
return 1 //return true in success

View file

@ -2,7 +2,7 @@ import "hashes/sha256/512bit" as sha256
import "utils/multiplexer/256bit" as multiplex
import "utils/binary/not" as NOT
// Merke-Tree inclusion proof for tree depth 3
// Merkle-Tree inclusion proof for tree depth 3
def main(field treeDepth, field[256] rootDigest, private field[256] leafDigest, private field[2] directionSelector, field[256] PathDigest0, private field[256] PathDigest1) -> (field):
@ -13,8 +13,8 @@ def main(field treeDepth, field[256] rootDigest, private field[256] leafDigest,
//Loop up the tree
currentDirection = directionSelector[0]
lhs = multiplex(currentDirection, currentDigest, PathDigest0)
rhs = multiplex(NOT(currentDirection), currentDigest, PathDigest0)
field[256] lhs = multiplex(currentDirection, currentDigest, PathDigest0)
field[256] rhs = multiplex(NOT(currentDirection), currentDigest, PathDigest0)
currentDigest = sha256(lhs, rhs)
counter = counter + 1
@ -28,4 +28,4 @@ def main(field treeDepth, field[256] rootDigest, private field[256] leafDigest,
counter == treeDepth
rootDigest == currentDigest
return 1 //return true in success
return 1 //return true in success

View file

@ -1,5 +1,5 @@
import "./pedersenPathProof3"
// Merke-Tree inclusion proof for tree depth 3
// Merke-Tree inclusion proof for tree depth 3
// def main(field treeDepth, field[256] rootDigest, private field[256] leafDigest, private field[2] directionSelector, field[256] PathDigest0, private field[256] PathDigest1) -> (field):
def main() -> (field):
@ -16,5 +16,5 @@ def main() -> (field):
field success = pedersenPathProof3(rootDigest, leafDigest, [0, 0, 0], PathDigest0, PathDigest1, PathDigest2)
return 1 //return true in success
return 1 //return true in success

View file

@ -4,8 +4,8 @@ def main() -> (field):
field treeDepth = 3
field[256] rootDigest = [1 ,1 ,1 ,1 ,1 ,0 ,1 ,1 ,1 ,1 ,0 ,1 ,1 ,0 ,0 ,0 ,0 ,1 ,0 ,1 ,1 ,0 ,1 ,1 ,0 ,1 ,1 ,1 ,1 ,1 ,0 ,1 ,1 ,1 ,1 ,1 ,1 ,1 ,1 ,0 ,1 ,1 ,1 ,1 ,0 ,0 ,1 ,1 ,1 ,1 ,0 ,0 ,1 ,0 ,1 ,0 ,1 ,0 ,0 ,1 ,1 ,0 ,0 ,0 ,1 ,1 ,0 ,1 ,0 ,1 ,1 ,0 ,1 ,0 ,1 ,1 ,0 ,0 ,0 ,1 ,1 ,1 ,1 ,1 ,1 ,1 ,0 ,0 ,0 ,0 ,0 ,1 ,1 ,1 ,1 ,1 ,1 ,1 ,1 ,1 ,0 ,1 ,1 ,1 ,1 ,1 ,0 ,1 ,1 ,1 ,0 ,1 ,1 ,0 ,0 ,0 ,0 ,1 ,0 ,0 ,1 ,1 ,0 ,0 ,0 ,0 ,0 ,1 ,0 ,1 ,1 ,1 ,1 ,0 ,0 ,0 ,0 ,0 ,0 ,1 ,1 ,1 ,0 ,1 ,0 ,0 ,0 ,0 ,0 ,1 ,0 ,0 ,1 ,0 ,0 ,1 ,0 ,1 ,1 ,1 ,0 ,1 ,0 ,1 ,0 ,0 ,0 ,0 ,1 ,0 ,1 ,1 ,0 ,1 ,1 ,1 ,1 ,1 ,0 ,0 ,0 ,1 ,1 ,0 ,1 ,0 ,1 ,1 ,0 ,0 ,0 ,1 ,0 ,0 ,0 ,1 ,0 ,0 ,1 ,0 ,1 ,1 ,1 ,1 ,1 ,1 ,1 ,0 ,1 ,0 ,1 ,1 ,0 ,0 ,0 ,0 ,1 ,0 ,0 ,0 ,0 ,0 ,0 ,0 ,0 ,0 ,0 ,1 ,1 ,1 ,1 ,1 ,0 ,0 ,1 ,0 ,0 ,1 ,1 ,0 ,0 ,1 ,0 ,0 ,0 ,0 ,0 ,1 ,0 ,1 ,1 ,1 ,1 ,1 ,1 ,0]
field[256] leafDigest = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1]
field[2] directionSelector = [0, 0]
field[256] PathDigest0 = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1]
field[2] directionSelector = [0, 0]
field[256] PathDigest0 = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1]
field[256] PathDigest1 = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1]
field out = merkleTreeProof(treeDepth,rootDigest,leafDigest,directionSelector,PathDigest0,PathDigest1)

View file

@ -3,5 +3,5 @@ def foo(field a, field b) -> (field, field):
return a, b
def main() -> (field):
a, b = foo(1, 1)
field a, field b = foo(1, 1)
return a + b

View file

@ -1,5 +1,5 @@
// we can compare numbers up to 2^(pbits - 2) - 1, ie any number which fits in (pbits - 2) bits
// lt should not work for the maxvalue = 2^(pbits - 2) - 1 augmented by one
// It should not work for the maxvalue = 2^(pbits - 2) - 1 augmented by one
// /!\ should be called with a = 0
def main(field a) -> (field):

View file

@ -1,7 +0,0 @@
// ANDXORANDXORAND
import "utils/binary/xor" as XOR
import "utils/binary/and" as AND
def main(field a, field b, field c) -> (field):
return XOR(XOR(AND(a, b), AND(a, c)), AND(b, c))

View file

@ -1,8 +0,0 @@
// ANDXORNOTAND
import "utils/binary/and" as AND
import "utils/binary/xor" as XOR
import "utils/binary/not" as NOT
def main(field a, field b, field c) -> (field):
return XOR(AND(a, b), AND(NOT(a), c))

View file

@ -1,10 +0,0 @@
// FULLADD
import "./halfadd" as HALFADD
import "utils/binary/or" as OR
def main(field a, field b, field car) -> (field, field):
out1, car1 = HALFADD(a, b)
out2, car2 = HALFADD(out1, car)
car3 = OR(car1, car2)
return out2, car3

View file

@ -1,7 +0,0 @@
// HALFADD
import "utils/binary/xor" as XOR
import "utils/binary/and" as AND
def main(field a, field b) -> (field, field):
return XOR(a, b), AND(a, b)

View file

@ -1,11 +0,0 @@
// AND
import "utils/binary/and" as AND
def main(field[32] b, field[32] c) -> (field[32]):
field[32] result = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
for field i in 0..32 do
r = AND(b[i], c[i])
result[i] = r
endfor
return result

View file

@ -1,11 +0,0 @@
// ANDXORANDXORAND
import "./../../binary/andxorandxorand" as ANDXORANDXORAND
def main(field[32] b, field[32] c, field[32] d) -> (field[32]):
field[32] result = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
for field i in 0..32 do
r = ANDXORANDXORAND(b[i], c[i], d[i])
result[i] = r
endfor
return result

View file

@ -1,11 +0,0 @@
// ANDXORNOTAND
import "./../../binary/andxornotand" as ANDXORNOTAND
def main(field[32] b, field[32] c, field[32] d) -> (field[32]):
field[32] result = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
for field i in 0..32 do
r = ANDXORNOTAND(b[i], c[i], d[i])
result[i] = r
endfor
return result

View file

@ -1,11 +0,0 @@
// NOT
import "utils/binary/not" as NOT
def main(field[32] b) -> (field[32]):
field[32] result = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
for field i in 0..32 do
r = NOT(b[i])
result[i] = r
endfor
return result

View file

@ -1,11 +0,0 @@
// XOR
import "utils/binary/xor" as XOR
def main(field[32] b, field[32] c) -> (field[32]):
field[32] result = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
for field i in 0..32 do
r = XOR(b[i], c[i])
result[i] = r
endfor
return result

View file

@ -1,13 +0,0 @@
// ADD
import "../../binary/fulladd" as FULLADD
def main(field[32] b, field[32] c) -> (field[32]):
field[33] car = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
field[32] d = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
for field i in 0..32 do
d0, car0 = FULLADD(b[i], c[i], car[i])
d[i] = d0
car[i + 1] = car0
endfor
return d

View file

@ -1,21 +0,0 @@
// AR17XAR19XAR10
import "../../bitwise/32/xor" as XOR
def RR17(field[32] b) -> (field[32]):
return [b[15], b[16], b[17], b[18], b[19], b[20], b[21], b[22], b[23], b[24], b[25], b[26], b[27], b[28], b[29], b[30], b[31], b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7], b[8], b[9], b[10], b[11], b[12], b[13], b[14]]
def RR19(field[32] b) -> (field[32]):
return [b[13], b[14], b[15], b[16], b[17], b[18], b[19], b[20], b[21], b[22], b[23], b[24], b[25], b[26], b[27], b[28], b[29], b[30], b[31], b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7], b[8], b[9], b[10], b[11], b[12]]
def RS10(field[32] b) -> (field[32]):
return [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7], b[8], b[9], b[10], b[11], b[12], b[13], b[14], b[15], b[16], b[17], b[18], b[19], b[20], b[21]]
def main(field[32] a) -> (field[32]):
u = RR17(a)
v = RR19(a)
w = RS10(a)
x = XOR(u, v)
z = XOR(w, x)
return z

View file

@ -1,21 +0,0 @@
// AR2XAR13XAR22
import "../../bitwise/32/xor" as XOR
def RR2(field[32] b) -> (field[32]):
return [b[30], b[31], b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7], b[8], b[9], b[10], b[11], b[12], b[13], b[14], b[15], b[16], b[17], b[18], b[19], b[20], b[21], b[22], b[23], b[24], b[25], b[26], b[27], b[28], b[29]]
def RR13(field[32] b) -> (field[32]):
return [b[19], b[20], b[21], b[22], b[23], b[24], b[25], b[26], b[27], b[28], b[29], b[30], b[31], b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7], b[8], b[9], b[10], b[11], b[12], b[13], b[14], b[15], b[16], b[17], b[18]]
def RR22(field[32] b) -> (field[32]):
return [b[10], b[11], b[12], b[13], b[14], b[15], b[16], b[17], b[18], b[19], b[20], b[21], b[22], b[23], b[24], b[25], b[26], b[27], b[28], b[29], b[30], b[31], b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7], b[8], b[9]]
def main(field[32] a) -> (field[32]):
u = RR2(a)
v = RR13(a)
w = RR22(a)
x = XOR(u, v)
z = XOR(w, x)
return z

View file

@ -1,20 +0,0 @@
// AR6XAR11XAR25
import "../../bitwise/32/xor" as XOR
def RR6(field[32] b) -> (field[32]):
return [b[26], b[27], b[28], b[29], b[30], b[31], b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7], b[8], b[9], b[10], b[11], b[12], b[13], b[14], b[15], b[16], b[17], b[18], b[19], b[20], b[21], b[22], b[23], b[24], b[25]]
def RR11(field[32] b) -> (field[32]):
return [b[21], b[22], b[23], b[24], b[25], b[26], b[27], b[28], b[29], b[30], b[31], b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7], b[8], b[9], b[10], b[11], b[12], b[13], b[14], b[15], b[16], b[17], b[18], b[19], b[20]]
def RR25(field[32] b) -> (field[32]):
return [b[7], b[8], b[9], b[10], b[11], b[12], b[13], b[14], b[15], b[16], b[17], b[18], b[19], b[20], b[21], b[22], b[23], b[24], b[25], b[26], b[27], b[28], b[29], b[30], b[31], b[0], b[1], b[2], b[3], b[4], b[5], b[6]]
def main(field[32] a) -> (field[32]):
u = RR6(a)
v = RR11(a)
w = RR25(a)
x = XOR(u, v)
z = XOR(w, x)
return z

View file

@ -1,20 +0,0 @@
// AR7XAR18XAR3
import "../../bitwise/32/xor" as XOR
def RR7(field[32] b) -> (field[32]):
return [b[25], b[26], b[27], b[28], b[29], b[30], b[31], b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7], b[8], b[9], b[10], b[11], b[12], b[13], b[14], b[15], b[16], b[17], b[18], b[19], b[20], b[21], b[22], b[23], b[24]]
def RR18(field[32] b) -> (field[32]):
return [b[14], b[15], b[16], b[17], b[18], b[19], b[20], b[21], b[22], b[23], b[24], b[25], b[26], b[27], b[28], b[29], b[30], b[31], b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7], b[8], b[9], b[10], b[11], b[12], b[13]]
def RS3(field[32] b) -> (field[32]):
return [0, 0, 0, b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7], b[8], b[9], b[10], b[11], b[12], b[13], b[14], b[15], b[16], b[17], b[18], b[19], b[20], b[21], b[22], b[23], b[24], b[25], b[26], b[27], b[28]]
def main(field[32] a) -> (field[32]):
u = RR7(a)
v = RR18(a)
w = RS3(a)
x = XOR(u, v)
z = XOR(w, x)
return z

View file

@ -1,42 +0,0 @@
// COMPRESSION ROUND
import "./ar6xar11xar25" as AR6XAR11XAR25
import "./ar2xar13xar22" as AR2XAR13XAR22
import "../../bitwise/32/andxornotand" as ANDXORNOTAND
import "../../bitwise/32/andxorandxorand" as ANDXORANDXORAND
import "./add" as ADD2
def ADD5(field[32] a, field[32] b, field[32] c, field[32] d, field[32] e) -> (field[32]):
ab = ADD2(a, b)
cd = ADD2(c, d)
abcd = ADD2(ab, cd)
abcde = ADD2(abcd, e)
return abcde
def main(field[32] k, field[32] w, field[32] a, field[32] b, field[32] c, field[32] d, field[32] e, field[32] f, field[32] g, field[32] h) -> (field[32], field[32], field[32], field[32], field[32], field[32], field[32], field[32]):
// S1 := (e rightrotate 6) xor (e rightrotate 11) xor (e rightrotate 25)
SOne = AR6XAR11XAR25(e)
// ch := (e and f) xor ((not e) and g)
ch = ANDXORNOTAND(e, f, g)
// temp1 := h + S1 + ch + k[i] + w[i]
tempOne = ADD5(h, SOne, ch, k, w)
// S0 := (a rightrotate 2) xor (a rightrotate 13) xor (a rightrotate 22)
SZero = AR2XAR13XAR22(a)
// maj := (a and b) xor (a and c) xor (b and c)
maj = ANDXORANDXORAND(a, b, c)
// temp2 := S0 + maj
tempTwo = ADD2(SZero, maj)
// en := d + temp1
en = ADD2(d, tempOne)
// an := temp1 + temp2
an = ADD2(tempOne, tempTwo)
return an, a, b, c, en, e, f, g

View file

@ -1,20 +0,0 @@
// EXTEND
import "./ar7xar18xars3" as AR7XAR18XAR3
import "./ar17xar19xars10" as AR17XAR19XAR10
import "./add" as ADD
def ADD(field[32] a, field[32] b, field[32] c, field[32] d) -> (field[32]):
ab = ADD(a, b)
cd = ADD(c, d)
abcd = ADD(ab, cd)
return abcd
def main(field[32] wm15, field[32] wm2, field[32] wm16, field[32] wm7) -> (field[32]):
// s0 := (w[i-15] rightrotate 7) xor (w[i-15] rightrotate 18) xor (w[i-15] rightshift 3)
szero = AR7XAR18XAR3(wm15)
// s1 := (w[i-2] rightrotate 17) xor (w[i-2] rightrotate 19) xor (w[i-2] rightshift 10)
sone = AR17XAR19XAR10(wm2)
// w[i] := w[i-16] + s0 + w[i-7] + s1
wfb = ADD(wm16, szero, wm7, sone)
return wfb

View file

@ -18,8 +18,8 @@ def isWaldo(field a, field p, field q) -> (field):
// define all
def main(field a0, field a1, field a2, field a3, private field index, private field p, private field q) -> (field):
// prover provides the index of Waldo
field waldo = if index == 0 then a0 else 0 fi
waldo = waldo + if index == 1 then a1 else 0 fi
waldo = waldo + if index == 2 then a2 else 0 fi
field waldo = if index == 0 then a0 else 0 fi
waldo = waldo + if index == 1 then a1 else 0 fi
waldo = waldo + if index == 2 then a2 else 0 fi
waldo = waldo + if index == 3 then a3 else 0 fi
return isWaldo(waldo, p, q)

View file

@ -4,19 +4,22 @@
// @author Dennis Kuhnert <dennis.kuhnert@campus.tu-berlin.de>
// @date 2017
use bincode::{serialize_into, Infinite};
use clap::{App, AppSettings, Arg, ArgMatches, SubCommand};
use serde_json::Value;
use serde_json::{from_reader, to_writer_pretty, Value};
use std::env;
use std::fs::File;
use std::io::{stdin, BufReader, BufWriter, Read, Write};
use std::path::{Path, PathBuf};
use std::string::String;
use std::{env, io};
use zokrates_abi::Encode;
use zokrates_core::compile::compile;
use zokrates_core::compile::{compile, CompilationArtifacts, CompileError};
use zokrates_core::ir::{self, ProgEnum};
use zokrates_core::proof_system::*;
use zokrates_core::typed_absy::abi::Abi;
use zokrates_core::typed_absy::{types::Signature, Type};
use zokrates_field::{Bls12Field, Bn128Field, Field};
use zokrates_fs_resolver::resolve as fs_resolve;
use zokrates_fs_resolver::FileSystemResolver;
const CURVES: &[&str] = &["bn128", "bls12_381"];
#[cfg(feature = "libsnark")]
@ -31,19 +34,6 @@ fn main() {
})
}
fn resolve<'a>(
location: Option<String>,
source: &'a str,
) -> Result<(BufReader<File>, String, &'a str), io::Error> {
#[cfg(feature = "github")]
{
if is_github_import(source) {
return github_resolve(location, source);
};
}
fs_resolve(location, source)
}
fn cli_generate_proof<T: Field, P: ProofSystem<T>>(
program: ir::Prog<T>,
sub_matches: &ArgMatches,
@ -60,29 +50,48 @@ fn cli_generate_proof<T: Field, P: ProofSystem<T>>(
let witness = ir::Witness::read(witness_file)
.map_err(|why| format!("could not load witness: {:?}", why))?;
let pk_path = sub_matches.value_of("provingkey").unwrap();
let proof_path = sub_matches.value_of("proofpath").unwrap();
let pk_path = Path::new(sub_matches.value_of("provingkey").unwrap());
let proof_path = Path::new(sub_matches.value_of("proofpath").unwrap());
let pk_file = File::open(&pk_path)
.map_err(|why| format!("couldn't open {}: {}", pk_path.display(), why))?;
let mut pk: Vec<u8> = Vec::new();
let mut pk_reader = BufReader::new(pk_file);
pk_reader
.read_to_end(&mut pk)
.map_err(|why| format!("couldn't read {}: {}", pk_path.display(), why))?;
let proof = P::generate_proof(program, witness, pk);
let mut proof_file = File::create(proof_path).unwrap();
proof_file
.write(proof.as_ref())
.map_err(|why| format!("couldn't write to {}: {}", proof_path.display(), why))?;
println!("generate-proof successful: {}", format!("{}", proof));
println!(
"generate-proof successful: {:?}",
P::generate_proof(program, witness, pk_path, proof_path)
);
Ok(())
}
fn cli_export_verifier<T: Field, P: ProofSystem<T>>(
sub_matches: &ArgMatches,
) -> Result<(), String> {
let is_abiv2 = sub_matches.value_of("abi").unwrap() == "v2";
let is_abiv2 = sub_matches.value_of("solidity-abi").unwrap() == "v2";
println!("Exporting verifier...");
// read vk file
let input_path = Path::new(sub_matches.value_of("input").unwrap());
let input_file = File::open(&input_path)
.map_err(|why| format!("couldn't open {}: {}", input_path.display(), why))?;
let reader = BufReader::new(input_file);
let mut reader = BufReader::new(input_file);
let verifier = P::export_solidity_verifier(reader, is_abiv2);
let mut vk = String::new();
reader
.read_to_string(&mut vk)
.map_err(|why| format!("couldn't read {}: {}", input_path.display(), why))?;
let verifier = P::export_solidity_verifier(vk, is_abiv2);
//write output file
let output_path = Path::new(sub_matches.value_of("output").unwrap());
@ -110,11 +119,27 @@ fn cli_setup<T: Field, P: ProofSystem<T>>(
}
// get paths for proving and verification keys
let pk_path = sub_matches.value_of("proving-key-path").unwrap();
let vk_path = sub_matches.value_of("verification-key-path").unwrap();
let pk_path = Path::new(sub_matches.value_of("proving-key-path").unwrap());
let vk_path = Path::new(sub_matches.value_of("verification-key-path").unwrap());
// run setup phase
P::setup(program, pk_path, vk_path);
let keypair = P::setup(program);
// write verification key
let mut vk_file = File::create(vk_path)
.map_err(|why| format!("couldn't create {}: {}", vk_path.display(), why))?;
vk_file
.write(keypair.vk.as_ref())
.map_err(|why| format!("couldn't write to {}: {}", vk_path.display(), why))?;
// write proving key
let mut pk_file = File::create(pk_path)
.map_err(|why| format!("couldn't create {}: {}", pk_path.display(), why))?;
pk_file
.write(keypair.pk.as_ref())
.map_err(|why| format!("couldn't write to {}: {}", pk_path.display(), why))?;
println!("Setup completed.");
Ok(())
}
@ -127,8 +152,6 @@ fn cli_compute<T: Field>(ir_prog: ir::Prog<T>, sub_matches: &ArgMatches) -> Resu
println!("{}", ir_prog);
}
let signature = ir_prog.signature.clone();
let is_stdin = sub_matches.is_present("stdin");
let is_abi = sub_matches.is_present("abi");
@ -136,6 +159,22 @@ fn cli_compute<T: Field>(ir_prog: ir::Prog<T>, sub_matches: &ArgMatches) -> Resu
return Err("ABI input as inline argument is not supported. Please use `--stdin`.".into());
}
let signature = match is_abi {
true => {
let path = Path::new(sub_matches.value_of("abi_spec").unwrap());
let file = File::open(&path)
.map_err(|why| format!("couldn't open {}: {}", path.display(), why))?;
let mut reader = BufReader::new(file);
let abi: Abi = from_reader(&mut reader).map_err(|why| why.to_string())?;
abi.signature()
}
false => Signature::new()
.inputs(vec![Type::FieldElement; ir_prog.main.arguments.len()])
.outputs(vec![Type::FieldElement; ir_prog.main.returns.len()]),
};
use zokrates_abi::Inputs;
// get arguments
@ -195,7 +234,7 @@ fn cli_compute<T: Field>(ir_prog: ir::Prog<T>, sub_matches: &ArgMatches) -> Resu
let results_json_value: serde_json::Value =
zokrates_abi::CheckedValues::decode(witness.return_values(), signature.outputs).into();
println!("\nWitness: \n\n{}", results_json_value.to_string());
println!("\nWitness: \n\n{}", results_json_value);
// write witness to file
let output_path = Path::new(sub_matches.value_of("output").unwrap());
@ -215,37 +254,69 @@ fn cli_compile<T: Field>(sub_matches: &ArgMatches) -> Result<(), String> {
println!("Compiling {}\n", sub_matches.value_of("input").unwrap());
let path = PathBuf::from(sub_matches.value_of("input").unwrap());
let location = path
.parent()
.unwrap()
.to_path_buf()
.into_os_string()
.into_string()
.unwrap();
let light = sub_matches.occurrences_of("light") > 0;
let bin_output_path = Path::new(sub_matches.value_of("output").unwrap());
let abi_spec_path = Path::new(sub_matches.value_of("abi_spec").unwrap());
let hr_output_path = bin_output_path.to_path_buf().with_extension("ztf");
let file = File::open(path.clone()).unwrap();
let file = File::open(path.clone())
.map_err(|why| format!("Couldn't open input file {}: {}", path.display(), why))?;
let mut reader = BufReader::new(file);
let mut source = String::new();
reader.read_to_string(&mut source).unwrap();
let program_flattened: ir::Prog<T> = compile(&mut reader, Some(location), Some(resolve))
.map_err(|e| format!("Compilation failed:\n\n {}", e))?;
let fmt_error = |e: &CompileError| {
format!(
"{}:{}",
e.file()
.canonicalize()
.unwrap()
.strip_prefix(std::env::current_dir().unwrap())
.unwrap()
.display(),
e.value()
)
};
let resolver = FileSystemResolver::new();
let artifacts: CompilationArtifacts<T> =
compile(source, path, Some(&resolver)).map_err(|e| {
format!(
"Compilation failed:\n\n{}",
e.0.iter()
.map(|e| fmt_error(e))
.collect::<Vec<_>>()
.join("\n\n")
)
})?;
let program_flattened = artifacts.prog();
// number of constraints the flattened program will translate to.
let num_constraints = program_flattened.constraint_count();
// serialize flattened program and write to binary file
let bin_output_file = File::create(&bin_output_path)
.map_err(|why| format!("couldn't create {}: {}", bin_output_path.display(), why))?;
.map_err(|why| format!("Couldn't create {}: {}", bin_output_path.display(), why))?;
let mut writer = BufWriter::new(bin_output_file);
program_flattened.serialize(&mut writer);
serialize_into(&mut writer, &program_flattened, Infinite)
.map_err(|_| "Unable to write data to file.".to_string())?;
// serialize ABI spec and write to JSON file
let abi_spec_file = File::create(&abi_spec_path)
.map_err(|why| format!("Couldn't create {}: {}", abi_spec_path.display(), why))?;
let abi = artifacts.abi();
let mut writer = BufWriter::new(abi_spec_file);
to_writer_pretty(&mut writer, &abi).map_err(|_| "Unable to write data to file.".to_string())?;
if !light {
// write human-readable output file
@ -277,6 +348,7 @@ fn cli_compile<T: Field>(sub_matches: &ArgMatches) -> Result<(), String> {
fn cli() -> Result<(), String> {
const FLATTENED_CODE_DEFAULT_PATH: &str = "out";
const ABI_SPEC_DEFAULT_PATH: &str = "abi.json";
const VERIFICATION_KEY_DEFAULT_PATH: &str = "verification.key";
const PROVING_KEY_DEFAULT_PATH: &str = "proving.key";
const VERIFICATION_CONTRACT_DEFAULT_PATH: &str = "verifier.sol";
@ -301,10 +373,18 @@ fn cli() -> Result<(), String> {
.value_name("FILE")
.takes_value(true)
.required(true)
).arg(Arg::with_name("abi_spec")
.short("s")
.long("abi_spec")
.help("Path of the ABI specification")
.value_name("FILE")
.takes_value(true)
.required(false)
.default_value(ABI_SPEC_DEFAULT_PATH)
).arg(Arg::with_name("output")
.short("o")
.long("output")
.help("Path of the output file")
.help("Path of the output binary")
.value_name("FILE")
.takes_value(true)
.required(false)
@ -328,7 +408,7 @@ fn cli() -> Result<(), String> {
.arg(Arg::with_name("input")
.short("i")
.long("input")
.help("Path of compiled code")
.help("Path of the binary")
.value_name("FILE")
.takes_value(true)
.required(false)
@ -399,9 +479,9 @@ fn cli() -> Result<(), String> {
.required(false)
.possible_values(SCHEMES)
.default_value(&default_scheme)
).arg(Arg::with_name("abi")
).arg(Arg::with_name("solidity-abi")
.short("a")
.long("abi")
.long("solidity-abi")
.help("Flag for setting the version of the ABI Encoder used in the contract")
.takes_value(true)
.possible_values(&["v1", "v2"])
@ -414,11 +494,19 @@ fn cli() -> Result<(), String> {
.arg(Arg::with_name("input")
.short("i")
.long("input")
.help("Path of compiled code")
.help("Path of the binary")
.value_name("FILE")
.takes_value(true)
.required(false)
.default_value(FLATTENED_CODE_DEFAULT_PATH)
).arg(Arg::with_name("abi_spec")
.short("s")
.long("abi_spec")
.help("Path of the ABI specification")
.value_name("FILE")
.takes_value(true)
.required(false)
.default_value(ABI_SPEC_DEFAULT_PATH)
).arg(Arg::with_name("output")
.short("o")
.long("output")
@ -478,7 +566,7 @@ fn cli() -> Result<(), String> {
).arg(Arg::with_name("input")
.short("i")
.long("input")
.help("Path of compiled code")
.help("Path of the binary")
.value_name("FILE")
.takes_value(true)
.required(false)
@ -595,12 +683,11 @@ fn cli() -> Result<(), String> {
("generate-proof", Some(sub_matches)) => {
let proof_system = sub_matches.value_of("proving-scheme").unwrap();
// read compiled program
let path = Path::new(sub_matches.value_of("input").unwrap());
let file = File::open(&path)
.map_err(|why| format!("couldn't open {}: {}", path.display(), why))?;
let program_path = Path::new(sub_matches.value_of("input").unwrap());
let program_file = File::open(&program_path)
.map_err(|why| format!("couldn't open {}: {}", program_path.display(), why))?;
let mut reader = BufReader::new(file);
let mut reader = BufReader::new(program_file);
let prog = ProgEnum::deserialize(&mut reader).map_err(|_| "wrong file".to_string())?;
@ -672,12 +759,18 @@ mod tests {
#[test]
fn examples() {
for p in glob("./examples/**/*.zok").expect("Failed to read glob pattern") {
for p in glob("./examples/**/*").expect("Failed to read glob pattern") {
let path = match p {
Ok(x) => x,
Err(why) => panic!("Error: {:?}", why),
};
if !path.is_file() {
continue;
}
assert!(path.extension().expect("extension expected") == "zok");
if path.to_str().unwrap().contains("error") {
continue;
}
@ -687,23 +780,20 @@ mod tests {
let file = File::open(path.clone()).unwrap();
let mut reader = BufReader::new(file);
let location = path
.parent()
.unwrap()
.to_path_buf()
.into_os_string()
.into_string()
.unwrap();
let _: ir::Prog<Bn128Field> =
compile(&mut reader, Some(location), Some(resolve)).unwrap();
let mut source = String::new();
reader.read_to_string(&mut source).unwrap();
let resolver = FileSystemResolver::new();
let _: CompilationArtifacts<Bn128Field> =
compile(source, path, Some(&resolver)).unwrap();
}
}
#[test]
fn examples_with_input_success() {
//these examples should compile and run
for p in glob("./examples/test*.zok").expect("Failed to read glob pattern") {
for p in glob("./examples/test*").expect("Failed to read glob pattern") {
let path = match p {
Ok(x) => x,
Err(why) => panic!("Error: {:?}", why),
@ -712,20 +802,16 @@ mod tests {
let file = File::open(path.clone()).unwrap();
let location = path
.parent()
.unwrap()
.to_path_buf()
.into_os_string()
.into_string()
.unwrap();
let mut reader = BufReader::new(file);
let mut source = String::new();
reader.read_to_string(&mut source).unwrap();
let program_flattened: ir::Prog<Bn128Field> =
compile(&mut reader, Some(location), Some(resolve)).unwrap();
let resolver = FileSystemResolver::new();
let artifacts: CompilationArtifacts<Bn128Field> =
compile(source, path, Some(&resolver)).unwrap();
let _ = program_flattened
let _ = artifacts
.prog()
.execute(&vec![Bn128Field::from(0)])
.unwrap();
}
@ -734,29 +820,26 @@ mod tests {
#[test]
#[should_panic]
fn examples_with_input_failure() {
println!("something");
//these examples should compile but not run
for p in glob("./examples/runtime_errors/*.zok").expect("Failed to read glob pattern") {
let path = p.unwrap();
for p in glob("./examples/runtime_errors/*").expect("Failed to read glob pattern") {
let path = match p {
Ok(x) => x,
Err(why) => panic!("Error: {:?}", why),
};
println!("Testing {:?}", path);
let file = File::open(path.clone()).unwrap();
let location = path
.parent()
.unwrap()
.to_path_buf()
.into_os_string()
.into_string()
.unwrap();
let mut reader = BufReader::new(file);
let mut source = String::new();
reader.read_to_string(&mut source).unwrap();
let program_flattened: ir::Prog<Bn128Field> =
compile(&mut reader, Some(location), Some(resolve)).unwrap();
let resolver = FileSystemResolver::new();
let artifacts: CompilationArtifacts<Bn128Field> =
compile(source, path, Some(&resolver)).unwrap();
let _ = program_flattened
let _ = artifacts
.prog()
.execute(&vec![Bn128Field::from(0)])
.unwrap();
}

View file

@ -5,7 +5,7 @@ def main(private field[256] expected) -> (field):
field[256] a = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
field[256] b = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1]
field[256] IV = [0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1]
expected == sha256([a[0], a[1], a[2], a[3], a[4], a[5], a[6], a[7], a[8], a[9], a[10], a[11], a[12], a[13], a[14], a[15], a[16], a[17], a[18], a[19], a[20], a[21], a[22], a[23], a[24], a[25], a[26], a[27], a[28], a[29], a[30], a[31], a[32], a[33], a[34], a[35], a[36], a[37], a[38], a[39], a[40], a[41], a[42], a[43], a[44], a[45], a[46], a[47], a[48], a[49], a[50], a[51], a[52], a[53], a[54], a[55], a[56], a[57], a[58], a[59], a[60], a[61], a[62], a[63], a[64], a[65], a[66], a[67], a[68], a[69], a[70], a[71], a[72], a[73], a[74], a[75], a[76], a[77], a[78], a[79], a[80], a[81], a[82], a[83], a[84], a[85], a[86], a[87], a[88], a[89], a[90], a[91], a[92], a[93], a[94], a[95], a[96], a[97], a[98], a[99], a[100], a[101], a[102], a[103], a[104], a[105], a[106], a[107], a[108], a[109], a[110], a[111], a[112], a[113], a[114], a[115], a[116], a[117], a[118], a[119], a[120], a[121], a[122], a[123], a[124], a[125], a[126], a[127], a[128], a[129], a[130], a[131], a[132], a[133], a[134], a[135], a[136], a[137], a[138], a[139], a[140], a[141], a[142], a[143], a[144], a[145], a[146], a[147], a[148], a[149], a[150], a[151], a[152], a[153], a[154], a[155], a[156], a[157], a[158], a[159], a[160], a[161], a[162], a[163], a[164], a[165], a[166], a[167], a[168], a[169], a[170], a[171], a[172], a[173], a[174], a[175], a[176], a[177], a[178], a[179], a[180], a[181], a[182], a[183], a[184], a[185], a[186], a[187], a[188], a[189], a[190], a[191], a[192], a[193], a[194], a[195], a[196], a[197], a[198], a[199], a[200], a[201], a[202], a[203], a[204], a[205], a[206], a[207], a[208], a[209], a[210], a[211], a[212], a[213], a[214], a[215], a[216], a[217], a[218], a[219], a[220], a[221], a[222], a[223], a[224], a[225], a[226], a[227], a[228], a[229], a[230], a[231], a[232], a[233], a[234], a[235], a[236], a[237], a[238], a[239], a[240], a[241], a[242], a[243], a[244], a[245], a[246], a[247], a[248], a[249], a[250], a[251], a[252], a[253], a[254], a[255], b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7], b[8], b[9], b[10], b[11], b[12], b[13], b[14], b[15], b[16], b[17], b[18], b[19], b[20], b[21], b[22], b[23], b[24], b[25], b[26], b[27], b[28], b[29], b[30], b[31], b[32], b[33], b[34], b[35], b[36], b[37], b[38], b[39], b[40], b[41], b[42], b[43], b[44], b[45], b[46], b[47], b[48], b[49], b[50], b[51], b[52], b[53], b[54], b[55], b[56], b[57], b[58], b[59], b[60], b[61], b[62], b[63], b[64], b[65], b[66], b[67], b[68], b[69], b[70], b[71], b[72], b[73], b[74], b[75], b[76], b[77], b[78], b[79], b[80], b[81], b[82], b[83], b[84], b[85], b[86], b[87], b[88], b[89], b[90], b[91], b[92], b[93], b[94], b[95], b[96], b[97], b[98], b[99], b[100], b[101], b[102], b[103], b[104], b[105], b[106], b[107], b[108], b[109], b[110], b[111], b[112], b[113], b[114], b[115], b[116], b[117], b[118], b[119], b[120], b[121], b[122], b[123], b[124], b[125], b[126], b[127], b[128], b[129], b[130], b[131], b[132], b[133], b[134], b[135], b[136], b[137], b[138], b[139], b[140], b[141], b[142], b[143], b[144], b[145], b[146], b[147], b[148], b[149], b[150], b[151], b[152], b[153], b[154], b[155], b[156], b[157], b[158], b[159], b[160], b[161], b[162], b[163], b[164], b[165], b[166], b[167], b[168], b[169], b[170], b[171], b[172], b[173], b[174], b[175], b[176], b[177], b[178], b[179], b[180], b[181], b[182], b[183], b[184], b[185], b[186], b[187], b[188], b[189], b[190], b[191], b[192], b[193], b[194], b[195], b[196], b[197], b[198], b[199], b[200], b[201], b[202], b[203], b[204], b[205], b[206], b[207], b[208], b[209], b[210], b[211], b[212], b[213], b[214], b[215], b[216], b[217], b[218], b[219], b[220], b[221], b[222], b[223], b[224], b[225], b[226], b[227], b[228], b[229], b[230], b[231], b[232], b[233], b[234], b[235], b[236], b[237], b[238], b[239], b[240], b[241], b[242], b[243], b[244], b[245], b[246], b[247], b[248], b[249], b[250], b[251], b[252], b[253], b[254], b[255]], IV)
return 1

View file

@ -9,7 +9,7 @@
"author": "Paul Etscheit",
"license": "LGPL-3.0-only",
"dependencies": {
"solc": "^0.5.9",
"solc": "^0.6.1",
"web3": "^1.0.0"
}
}

View file

@ -31,8 +31,8 @@ let jsonInterface = JSON.parse(solc.compile(jsonContractSource));
let abi = jsonInterface.contracts[contractPath]["Verifier"].abi
let bytecode = jsonInterface.contracts[contractPath]["Verifier"].evm.bytecode
//There is a solc issue, that for unknown reasons wont link the BN256G2 Library automatically for gm17 v1 and v2 contracts. I dont know why this is happening,
//the contracts compile and deploy without any issue on remix. To fix this, the the BN256G2 Library must be compiled and deployed by itself, after that,
//There is a solc issue, that for unknown reasons wont link the BN256G2 Library automatically for gm17 v1 and v2 contracts. I dont know why this is happening,
//the contracts compile and deploy without any issue on remix. To fix this, the the BN256G2 Library must be compiled and deployed by itself, after that,
//the library placeholder must be replaced with the library address in the contracts bytecode
if (format == "gm17") {
let library = await deployLibrary();

View file

@ -4,6 +4,7 @@ extern crate serde_json;
#[cfg(test)]
mod integration {
use assert_cli;
use serde_json::from_reader;
use std::fs;
use std::fs::File;
use std::io::{BufReader, Read};
@ -11,8 +12,7 @@ mod integration {
use std::path::Path;
use tempdir::TempDir;
use zokrates_abi::{parse_strict, Encode};
use zokrates_core::ir;
use zokrates_field::Bn128Field;
use zokrates_core::typed_absy::abi::Abi;
#[test]
#[ignore]
@ -60,6 +60,7 @@ mod integration {
let tmp_base = tmp_dir.path();
let test_case_path = tmp_base.join(program_name);
let flattened_path = tmp_base.join(program_name).join("out");
let abi_spec_path = tmp_base.join(program_name).join("abi.json");
let witness_path = tmp_base.join(program_name).join("witness");
let inline_witness_path = tmp_base.join(program_name).join("inline_witness");
let proof_path = tmp_base.join(program_name).join("proof.json");
@ -85,6 +86,8 @@ mod integration {
"compile",
"-i",
program_path.to_str().unwrap(),
"-s",
abi_spec_path.to_str().unwrap(),
"-o",
flattened_path.to_str().unwrap(),
"--light",
@ -95,40 +98,42 @@ mod integration {
// COMPUTE_WITNESS
// derive program signature from IR program representation
let file = File::open(&flattened_path)
.map_err(|why| format!("couldn't open {}: {}", flattened_path.display(), why))
.unwrap();
let mut reader = BufReader::new(file);
let ir_prog: ir::Prog<Bn128Field> = match ir::ProgEnum::deserialize(&mut reader).unwrap() {
ir::ProgEnum::Bn128Program(p) => p,
_ => unreachable!(),
};
let signature = ir_prog.signature.clone();
// run witness-computation for ABI-encoded inputs through stdin
let json_input_str = fs::read_to_string(inputs_path).unwrap();
let compute = vec![
"../target/release/zokrates",
"compute-witness",
"-i",
flattened_path.to_str().unwrap(),
"-s",
abi_spec_path.to_str().unwrap(),
"-o",
witness_path.to_str().unwrap(),
"--stdin",
"--abi",
];
// run witness-computation for ABI-encoded inputs through stdin
let json_input_str = fs::read_to_string(inputs_path).unwrap();
assert_cli::Assert::command(&compute)
.stdin(&json_input_str)
.succeeds()
.unwrap();
// run witness-computation for raw-encoded inputs (converted) with `-a <arguments>`
// First we need to convert our test input into raw field elements. We need to ABI spec for that
let file = File::open(&abi_spec_path)
.map_err(|why| format!("couldn't open {}: {}", flattened_path.display(), why))
.unwrap();
let mut reader = BufReader::new(file);
let abi: Abi = from_reader(&mut reader)
.map_err(|why| why.to_string())
.unwrap();
let signature = abi.signature().clone();
let inputs_abi: zokrates_abi::Inputs<zokrates_field::Bn128Field> =
parse_strict(&json_input_str, signature.inputs)
.map(|parsed| zokrates_abi::Inputs::Abi(parsed))

View file

@ -0,0 +1,9 @@
[package]
name = "zokrates_common"
version = "0.1.0"
authors = ["dark64 <darem966@gmail.com>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]

View file

@ -0,0 +1,9 @@
use std::path::PathBuf;
pub trait Resolver<E> {
fn resolve(
&self,
current_location: PathBuf,
import_location: PathBuf,
) -> Result<(String, PathBuf), E>;
}

View file

@ -1,6 +1,6 @@
[package]
name = "zokrates_core"
version = "0.4.0"
version = "0.4.2"
authors = ["Jacob Eberhardt <jacob.eberhardt@tu-berlin.de>", "Dennis Kuhnert <mail@kyroy.com>"]
repository = "https://github.com/JacobEberhardt/ZoKrates"
readme = "README.md"
@ -9,10 +9,10 @@ build = "build.rs"
[features]
default = []
libsnark = ["cc", "cmake", "git2"]
wasm = ["wasmi", "parity-wasm", "rustc-hex"]
wasm = ["bellman_ce/wasm", "zokrates_embed/wasm"]
multicore = ["bellman_ce/multicore"]
[dependencies]
libc = "0.2.0"
num = {version = "0.1.36", default-features = false}
num-bigint = {version = "0.1.36", default-features = false}
lazy_static = "1.4"
@ -25,21 +25,20 @@ serde_json = "1.0"
serde_bytes = "0.10"
bincode = "0.8.0"
regex = "0.2"
bellman_ce = "0.3"
pairing_ce = "0.18"
ff_ce = "0.7"
pairing_ce = "0.20"
ff_ce = "0.9"
zokrates_field = { version = "0.3.0", path = "../zokrates_field" }
zokrates_pest_ast = { version = "0.1.0", path = "../zokrates_pest_ast" }
zokrates_embed = { path = "../zokrates_embed" }
zokrates_common = { path = "../zokrates_common" }
rand = "0.4"
wasmi = { version = "=0.4.5", optional = true }
parity-wasm = { version = "0.35.3", optional = true }
rustc-hex = { version = "1.0", optional = true }
csv = "1"
bellman_ce = { version = "0.3.3", default-features = false}
[dev-dependencies]
glob = "0.2.11"
assert_cli = "0.5"
wasm-bindgen-test = "0.3.0"
[build-dependencies]
cc = { version = "1.0", features = ["parallel"], optional = true }

View file

@ -14,11 +14,11 @@ fn main() {
use std::path::PathBuf;
// fetch libsnark source
const LIBSNARK_URL: &'static str = "https://github.com/scipr-lab/libsnark.git";
const LIBSNARK_COMMIT: &'static str = "f7c87b88744ecfd008126d415494d9b34c4c1b20";
let libsnark_source_path = &PathBuf::from(env::var("OUT_DIR").unwrap()).join("LIBSNARK");
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
let libsnark_source_path = &out_path.join("libsnark");
let repo = Repository::open(libsnark_source_path).unwrap_or_else(|_| {
remove_dir(libsnark_source_path).ok();
@ -36,9 +36,10 @@ fn main() {
}
// build libsnark
let libsnark = cmake::Config::new(libsnark_source_path)
.define("WITH_SUPERCOP", "OFF")
.define("WITH_PROCPS", "OFF")
.define("WITH_SUPERCOP", "OFF")
.define("CURVE", "ALT_BN128")
.define("USE_PT_COMPRESSION", "OFF")
.define("MONTGOMERY_OUTPUT", "ON")
@ -46,7 +47,6 @@ fn main() {
.build();
// build backends
cc::Build::new()
.cpp(true)
.debug(cfg!(debug_assertions))
@ -55,10 +55,11 @@ fn main() {
.include(libsnark_source_path.join("depends/libff"))
.include(libsnark_source_path.join("depends/libfqfft"))
.define("CURVE_ALT_BN128", None)
.file("lib/ffi.cpp")
.file("lib/util.cpp")
.file("lib/gm17.cpp")
.file("lib/pghr13.cpp")
.compile("libwraplibsnark.a");
.compile("libsnark_wrapper.a");
println!(
"cargo:rustc-link-search=native={}",

View file

@ -0,0 +1,6 @@
#include "ffi.hpp"
void __free(uint8_t* ptr)
{
free(ptr);
}

43
zokrates_core/lib/ffi.hpp Normal file
View file

@ -0,0 +1,43 @@
#pragma once
#include <cstdlib>
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
struct buffer_t {
uint8_t* data;
int32_t length;
};
struct setup_result_t {
buffer_t vk;
buffer_t pk;
setup_result_t(buffer_t& vk_buf, buffer_t& pk_buf)
: vk(vk_buf)
, pk(pk_buf)
{
}
};
struct proof_result_t {
buffer_t proof;
proof_result_t(buffer_t& proof_buf)
: proof(proof_buf)
{
}
};
void __free(uint8_t* ptr);
#ifdef __cplusplus
} // extern "C"
#endif
static inline void __alloc(buffer_t* buffer, size_t length)
{
buffer->data = (uint8_t*)malloc(length);
buffer->length = length;
}

View file

@ -1,210 +1,210 @@
/**
* @file wraplibsnark.cpp
* @file gm17.cpp
* @author Jacob Eberhardt <jacob.eberhardt@tu-berlin.de
* @author Dennis Kuhnert <dennis.kuhnert@campus.tu-berlin.de>
* @date 2017
*/
#include "util.hpp"
#include "gm17.hpp"
#include <fstream>
#include <iostream>
#include "util.hpp"
#include <cassert>
#include <iomanip>
#include <sstream>
#include <string>
// contains definition of alt_bn128 ec public parameters
#include "libff/algebra/curves/alt_bn128/alt_bn128_pp.hpp"
// contains required interfaces and types (keypair, proof, generator, prover, verifier)
#include <libsnark/zk_proof_systems/ppzksnark/r1cs_se_ppzksnark/r1cs_se_ppzksnark.hpp>
typedef long integer_coeff_t;
using namespace std;
using namespace libsnark;
using std::cout;
using std::endl;
namespace gm17 {
//takes input and puts it into constraint system
r1cs_se_ppzksnark_constraint_system<libff::alt_bn128_pp> createConstraintSystem(const uint8_t* A, const uint8_t* B, const uint8_t* C, int A_len, int B_len, int C_len, int constraints, int variables, int inputs)
r1cs_se_ppzksnark_constraint_system<libff::alt_bn128_pp> createConstraintSystem(const uint8_t* A, const uint8_t* B, const uint8_t* C, int32_t A_len, int32_t B_len, int32_t C_len, int32_t constraints, int32_t variables, int32_t inputs)
{
r1cs_se_ppzksnark_constraint_system<libff::alt_bn128_pp> cs;
cs.primary_input_size = inputs;
cs.auxiliary_input_size = variables - inputs - 1; // ~one not included
r1cs_se_ppzksnark_constraint_system<libff::alt_bn128_pp> cs;
cs.primary_input_size = inputs;
cs.auxiliary_input_size = variables - inputs - 1; // ~one not included
cout << "num variables: " << variables <<endl;
cout << "num constraints: " << constraints <<endl;
cout << "num inputs: " << inputs <<endl;
cout << "num variables: " << variables << endl;
cout << "num constraints: " << constraints << endl;
cout << "num inputs: " << inputs << endl;
struct VariableValueMapping {
int constraint_id;
int variable_id;
uint8_t variable_value[32];
};
const VariableValueMapping* A_vvmap = (VariableValueMapping*) A;
const VariableValueMapping* B_vvmap = (VariableValueMapping*) B;
const VariableValueMapping* C_vvmap = (VariableValueMapping*) C;
struct VariableValueMapping {
int constraint_id;
int variable_id;
uint8_t variable_value[32];
};
int A_id = 0;
int B_id = 0;
int C_id = 0;
const VariableValueMapping* A_vvmap = (VariableValueMapping*)A;
const VariableValueMapping* B_vvmap = (VariableValueMapping*)B;
const VariableValueMapping* C_vvmap = (VariableValueMapping*)C;
libff::alt_bn128_pp::init_public_params();
int A_id = 0;
int B_id = 0;
int C_id = 0;
for (int row = 0; row < constraints; row++) {
linear_combination<libff::Fr<libff::alt_bn128_pp> > lin_comb_A, lin_comb_B, lin_comb_C;
// initialize curve parameters
libff::alt_bn128_pp::init_public_params();
while (A_id < A_len && A_vvmap[A_id].constraint_id == row) {
libff::bigint<libff::alt_bn128_r_limbs> value = libsnarkBigintFromBytes(A_vvmap[A_id].variable_value);
if (!value.is_zero())
lin_comb_A.add_term(A_vvmap[A_id].variable_id, value);
A_id++;
}
while (B_id < B_len && B_vvmap[B_id].constraint_id == row) {
libff::bigint<libff::alt_bn128_r_limbs> value = libsnarkBigintFromBytes(B_vvmap[B_id].variable_value);
if (!value.is_zero())
lin_comb_B.add_term(B_vvmap[B_id].variable_id, value);
B_id++;
}
while (C_id < C_len && C_vvmap[C_id].constraint_id == row) {
libff::bigint<libff::alt_bn128_r_limbs> value = libsnarkBigintFromBytes(C_vvmap[C_id].variable_value);
if (!value.is_zero())
lin_comb_C.add_term(C_vvmap[C_id].variable_id, value);
C_id++;
}
cs.add_constraint(r1cs_constraint<libff::Fr<libff::alt_bn128_pp> >(lin_comb_A, lin_comb_B, lin_comb_C));
}
return cs;
}
// keypair generateKeypair(constraints)
r1cs_se_ppzksnark_keypair<libff::alt_bn128_pp> generateKeypair(const r1cs_se_ppzksnark_constraint_system<libff::alt_bn128_pp> &cs){
// from r1cs_se_ppzksnark.hpp
return r1cs_se_ppzksnark_generator<libff::alt_bn128_pp>(cs);
}
void serializeProvingKeyToFile(r1cs_se_ppzksnark_proving_key<libff::alt_bn128_pp> pk, const char* pk_path){
writeToFile(pk_path, pk);
}
r1cs_se_ppzksnark_proving_key<libff::alt_bn128_pp> deserializeProvingKeyFromFile(const char* pk_path){
return loadFromFile<r1cs_se_ppzksnark_proving_key<libff::alt_bn128_pp>>(pk_path);
}
void serializeVerificationKeyToFile(r1cs_se_ppzksnark_verification_key<libff::alt_bn128_pp> vk, const char* vk_path){
std::stringstream ss;
unsigned queryLength = vk.query.size();
ss << "\t\tvk.h = " << outputPointG2AffineAsHex(vk.H) << endl;
ss << "\t\tvk.g_alpha = " << outputPointG1AffineAsHex(vk.G_alpha) << endl;
ss << "\t\tvk.h_beta = " << outputPointG2AffineAsHex(vk.H_beta) << endl;
ss << "\t\tvk.g_gamma = " << outputPointG1AffineAsHex(vk.G_gamma) << endl;
ss << "\t\tvk.h_gamma = " << outputPointG2AffineAsHex(vk.H_gamma) << endl;
ss << "\t\tvk.query.len() = " << queryLength << endl;
for (size_t i = 0; i < queryLength; ++i)
{
auto vkqueryi = outputPointG1AffineAsHex(vk.query[i]);
ss << "\t\tvk.query[" << i << "] = " << vkqueryi << endl;
}
std::ofstream fh;
fh.open(vk_path, std::ios::binary);
ss.rdbuf()->pubseekpos(0, std::ios_base::out);
fh << ss.rdbuf();
fh.flush();
fh.close();
}
void exportProof(r1cs_se_ppzksnark_proof<libff::alt_bn128_pp> proof, const char* proof_path, const uint8_t* public_inputs,
int public_inputs_length){
//create JSON file
std::stringstream ss;
ss << "{" << "\n";
ss << "\t\"proof\":" << "\n";
ss << "\t{" << "\n";
ss << "\t\t\"a\":" <<outputPointG1AffineAsHexJson(proof.A) << ",\n";
ss << "\t\t\"b\":" << "\n";
ss << "\t\t\t" << outputPointG2AffineAsHexJson(proof.B) << ",\n";
ss << "\t\t\n";
ss << "\t\t\"c\":" <<outputPointG1AffineAsHexJson(proof.C) << "\n";
ss << "\t}," << "\n";
//add input to json
ss << "\t\"inputs\":" << "[";
for (int i = 1; i < public_inputs_length; i++) {
if(i!=1){
ss << ",";
for (int row = 0; row < constraints; row++) {
linear_combination<libff::Fr<libff::alt_bn128_pp>> lin_comb_A, lin_comb_B, lin_comb_C;
while (A_id < A_len && A_vvmap[A_id].constraint_id == row) {
libff::bigint<libff::alt_bn128_r_limbs> value = libsnarkBigintFromBytes(A_vvmap[A_id].variable_value);
if (!value.is_zero()) {
lin_comb_A.add_term(A_vvmap[A_id].variable_id, value);
}
A_id++;
}
ss << outputInputAsHex(libsnarkBigintFromBytes(public_inputs + i*32));
}
ss << "]" << "\n";
ss << "}" << "\n";
std::string s = ss.str();
//write json string to proof_path
writeToFile(proof_path, s);
while (B_id < B_len && B_vvmap[B_id].constraint_id == row) {
libff::bigint<libff::alt_bn128_r_limbs> value = libsnarkBigintFromBytes(B_vvmap[B_id].variable_value);
if (!value.is_zero()) {
lin_comb_B.add_term(B_vvmap[B_id].variable_id, value);
}
B_id++;
}
while (C_id < C_len && C_vvmap[C_id].constraint_id == row) {
libff::bigint<libff::alt_bn128_r_limbs> value = libsnarkBigintFromBytes(C_vvmap[C_id].variable_value);
if (!value.is_zero()) {
lin_comb_C.add_term(C_vvmap[C_id].variable_id, value);
}
C_id++;
}
cs.add_constraint(r1cs_constraint<libff::Fr<libff::alt_bn128_pp>>(lin_comb_A, lin_comb_B, lin_comb_C));
}
return cs;
}
}
bool _gm17_setup(const uint8_t* A, const uint8_t* B, const uint8_t* C, int A_len, int B_len, int C_len, int constraints, int variables, int inputs, const char* pk_path, const char* vk_path)
r1cs_se_ppzksnark_keypair<libff::alt_bn128_pp> generateKeypair(const r1cs_se_ppzksnark_constraint_system<libff::alt_bn128_pp>& cs)
{
libff::inhibit_profiling_info = true;
libff::inhibit_profiling_counters = true;
//initialize curve parameters
libff::alt_bn128_pp::init_public_params();
auto cs = gm17::createConstraintSystem(A, B, C, A_len, B_len, C_len, constraints, variables, inputs);
assert(cs.num_variables() >= (unsigned)inputs);
assert(cs.num_inputs() == (unsigned)inputs);
assert(cs.num_constraints() == (unsigned)constraints);
// create keypair
auto keypair = r1cs_se_ppzksnark_generator<libff::alt_bn128_pp>(cs);
// Export vk and pk to files
gm17::serializeProvingKeyToFile(keypair.pk, pk_path);
gm17::serializeVerificationKeyToFile(keypair.vk, vk_path);
return true;
return r1cs_se_ppzksnark_generator<libff::alt_bn128_pp>(cs); //from r1cs_se_ppzksnark.hpp
}
bool _gm17_generate_proof(const char* pk_path, const char* proof_path, const uint8_t* public_inputs, int public_inputs_length, const uint8_t* private_inputs, int private_inputs_length)
std::string serializeVerificationKey(r1cs_se_ppzksnark_verification_key<libff::alt_bn128_pp>* vk)
{
libff::inhibit_profiling_info = true;
libff::inhibit_profiling_counters = true;
std::stringstream ss;
unsigned queryLength = vk->query.size();
//initialize curve parameters
libff::alt_bn128_pp::init_public_params();
auto pk = gm17::deserializeProvingKeyFromFile(pk_path);
// assign variables based on witness values, excludes ~one
r1cs_variable_assignment<libff::Fr<libff::alt_bn128_pp> > full_variable_assignment;
for (int i = 1; i < public_inputs_length; i++) {
full_variable_assignment.push_back(libff::Fr<libff::alt_bn128_pp>(libsnarkBigintFromBytes(public_inputs + i*32)));
}
for (int i = 0; i < private_inputs_length; i++) {
full_variable_assignment.push_back(libff::Fr<libff::alt_bn128_pp>(libsnarkBigintFromBytes(private_inputs + i*32)));
}
// split up variables into primary and auxiliary inputs. Does *NOT* include the constant 1
// Public variables belong to primary input, private variables are auxiliary input.
r1cs_primary_input<libff::Fr<libff::alt_bn128_pp>> primary_input(full_variable_assignment.begin(), full_variable_assignment.begin() + public_inputs_length-1);
r1cs_primary_input<libff::Fr<libff::alt_bn128_pp>> auxiliary_input(full_variable_assignment.begin() + public_inputs_length-1, full_variable_assignment.end());
// for debugging
// cout << "full variable assignment:"<< endl << full_variable_assignment;
// cout << "primary input:"<< endl << primary_input;
// cout << "auxiliary input:"<< endl << auxiliary_input;
// Proof Generation
auto proof = r1cs_se_ppzksnark_prover<libff::alt_bn128_pp>(pk, primary_input, auxiliary_input);
gm17::exportProof(proof, proof_path, public_inputs, public_inputs_length);
return true;
ss << "vk.h = " << outputPointG2AffineAsHex(vk->H) << endl;
ss << "vk.g_alpha = " << outputPointG1AffineAsHex(vk->G_alpha) << endl;
ss << "vk.h_beta = " << outputPointG2AffineAsHex(vk->H_beta) << endl;
ss << "vk.g_gamma = " << outputPointG1AffineAsHex(vk->G_gamma) << endl;
ss << "vk.h_gamma = " << outputPointG2AffineAsHex(vk->H_gamma) << endl;
ss << "vk.query.len() = " << queryLength << endl;
for (size_t i = 0; i < queryLength; ++i) {
auto vk_query_i = outputPointG1AffineAsHex(vk->query[i]);
ss << "vk.query[" << i << "] = " << vk_query_i << endl;
}
return ss.str();
}
std::string serializeProof(r1cs_se_ppzksnark_proof<libff::alt_bn128_pp>* proof, const uint8_t* public_inputs, int32_t public_inputs_length)
{
std::stringstream ss;
ss << "{"
<< "\n";
ss << "\t\"proof\": {"
<< "\n";
ss << "\t\t\"a\": " << outputPointG1AffineAsHexJson(proof->A) << ",\n";
ss << "\t\t\"b\": " << outputPointG2AffineAsHexJson(proof->B) << ",\n";
ss << "\t\t\"c\": " << outputPointG1AffineAsHexJson(proof->C) << "\n";
ss << "\t},"
<< "\n";
ss << "\t\"inputs\": "
<< "[";
for (int i = 1; i < public_inputs_length; i++) {
if (i != 1) {
ss << ",";
}
ss << outputInputAsHex(libsnarkBigintFromBytes(public_inputs + i * 32));
}
ss << "]"
<< "\n";
ss << "}"
<< "\n";
std::string str = ss.str();
return str;
}
}
setup_result_t gm17_setup(const uint8_t* A, const uint8_t* B, const uint8_t* C, int32_t A_len, int32_t B_len, int32_t C_len, int32_t constraints, int32_t variables, int32_t inputs)
{
libff::inhibit_profiling_info = true;
libff::inhibit_profiling_counters = true;
// initialize curve parameters
libff::alt_bn128_pp::init_public_params();
auto cs = gm17::createConstraintSystem(A, B, C, A_len, B_len, C_len, constraints, variables, inputs);
assert(cs.num_variables() >= (unsigned)inputs);
assert(cs.num_inputs() == (unsigned)inputs);
assert(cs.num_constraints() == (unsigned)constraints);
// create keypair
auto keypair = r1cs_se_ppzksnark_generator<libff::alt_bn128_pp>(cs);
auto vk = gm17::serializeVerificationKey(&keypair.vk);
std::stringstream ss;
ss << keypair.pk;
std::string pk = ss.str();
buffer_t vk_buf, pk_buf;
__alloc(&vk_buf, vk.size());
__alloc(&pk_buf, pk.size());
vk.copy(reinterpret_cast<char*>(vk_buf.data), vk_buf.length);
pk.copy(reinterpret_cast<char*>(pk_buf.data), pk_buf.length);
setup_result_t result(vk_buf, pk_buf);
return result;
}
proof_result_t gm17_generate_proof(buffer_t* pk_buf, const uint8_t* public_inputs, int32_t public_inputs_length, const uint8_t* private_inputs, int32_t private_inputs_length)
{
libff::inhibit_profiling_info = true;
libff::inhibit_profiling_counters = true;
//initialize curve parameters
libff::alt_bn128_pp::init_public_params();
r1cs_se_ppzksnark_proving_key<libff::alt_bn128_pp> proving_key;
std::stringstream ss;
ss.write(reinterpret_cast<const char*>(pk_buf->data), pk_buf->length);
ss.rdbuf()->pubseekpos(0, std::ios_base::in);
ss >> proving_key;
// assign variables based on witness values, excludes ~one
r1cs_variable_assignment<libff::Fr<libff::alt_bn128_pp>> full_variable_assignment;
for (int i = 1; i < public_inputs_length; i++) {
full_variable_assignment.push_back(libff::Fr<libff::alt_bn128_pp>(libsnarkBigintFromBytes(public_inputs + i * 32)));
}
for (int i = 0; i < private_inputs_length; i++) {
full_variable_assignment.push_back(libff::Fr<libff::alt_bn128_pp>(libsnarkBigintFromBytes(private_inputs + i * 32)));
}
// split up variables into primary and auxiliary inputs. Does *NOT* include the constant 1
// Public variables belong to primary input, private variables are auxiliary input.
r1cs_primary_input<libff::Fr<libff::alt_bn128_pp>> primary_input(full_variable_assignment.begin(), full_variable_assignment.begin() + public_inputs_length - 1);
r1cs_primary_input<libff::Fr<libff::alt_bn128_pp>> auxiliary_input(full_variable_assignment.begin() + public_inputs_length - 1, full_variable_assignment.end());
// for debugging
// cout << "full variable assignment:" << endl << full_variable_assignment;
// cout << "primary input:" << endl << primary_input;
// cout << "auxiliary input:" << endl << auxiliary_input;
// Proof Generation
auto proof = r1cs_se_ppzksnark_prover<libff::alt_bn128_pp>(proving_key, primary_input, auxiliary_input);
auto proof_json = gm17::serializeProof(&proof, public_inputs, public_inputs_length);
buffer_t proof_buf;
__alloc(&proof_buf, proof_json.size());
proof_json.copy(reinterpret_cast<char*>(proof_buf.data), proof_buf.length);
proof_result_t result(proof_buf);
return result;
}

View file

@ -11,29 +11,25 @@
extern "C" {
#endif
#include <stdbool.h>
#include <stdint.h>
#include "ffi.hpp"
bool _gm17_setup(const uint8_t* A,
const uint8_t* B,
const uint8_t* C,
int A_len,
int B_len,
int C_len,
int constraints,
int variables,
int inputs,
const char* pk_path,
const char* vk_path
);
setup_result_t gm17_setup(
const uint8_t* A,
const uint8_t* B,
const uint8_t* C,
int32_t A_len,
int32_t B_len,
int32_t C_len,
int32_t constraints,
int32_t variables,
int32_t inputs);
bool _gm17_generate_proof(const char* pk_path,
const char* proof_path,
const uint8_t* public_inputs,
int public_inputs_length,
const uint8_t* private_inputs,
int private_inputs_length
);
proof_result_t gm17_generate_proof(
buffer_t* pk_buf,
const uint8_t* public_inputs,
int32_t public_inputs_length,
const uint8_t* private_inputs,
int32_t private_inputs_length);
#ifdef __cplusplus
} // extern "C"

View file

@ -1,217 +1,220 @@
/**
* @file wraplibsnark.cpp
* @file pghr13.cpp
* @author Jacob Eberhardt <jacob.eberhardt@tu-berlin.de
* @author Dennis Kuhnert <dennis.kuhnert@campus.tu-berlin.de>
* @date 2017
*/
#include "util.hpp"
#include "pghr13.hpp"
#include <fstream>
#include <iostream>
#include "util.hpp"
#include <cassert>
#include <iomanip>
#include <sstream>
#include <string>
// contains definition of alt_bn128 ec public parameters
// contains definitions of alt_bn128 ec public parameters
#include "libff/algebra/curves/alt_bn128/alt_bn128_pp.hpp"
// contains required interfaces and types (keypair, proof, generator, prover, verifier)
#include <libsnark/zk_proof_systems/ppzksnark/r1cs_ppzksnark/r1cs_ppzksnark.hpp>
typedef long integer_coeff_t;
using namespace std;
using namespace libsnark;
using std::cout;
using std::endl;
namespace pghr13 {
//takes input and puts it into constraint system
r1cs_ppzksnark_constraint_system<libff::alt_bn128_pp> createConstraintSystem(const uint8_t* A, const uint8_t* B, const uint8_t* C, int A_len, int B_len, int C_len, int constraints, int variables, int inputs)
{
r1cs_ppzksnark_constraint_system<libff::alt_bn128_pp> cs;
cs.primary_input_size = inputs;
cs.auxiliary_input_size = variables - inputs - 1; // ~one not included
r1cs_ppzksnark_constraint_system<libff::alt_bn128_pp> cs;
cs.primary_input_size = inputs;
cs.auxiliary_input_size = variables - inputs - 1; // ~one not included
cout << "num variables: " << variables <<endl;
cout << "num constraints: " << constraints <<endl;
cout << "num inputs: " << inputs <<endl;
cout << "num variables: " << variables << endl;
cout << "num constraints: " << constraints << endl;
cout << "num inputs: " << inputs << endl;
struct VariableValueMapping {
int constraint_id;
int variable_id;
uint8_t variable_value[32];
};
const VariableValueMapping* A_vvmap = (VariableValueMapping*) A;
const VariableValueMapping* B_vvmap = (VariableValueMapping*) B;
const VariableValueMapping* C_vvmap = (VariableValueMapping*) C;
struct VariableValueMapping {
int constraint_id;
int variable_id;
uint8_t variable_value[32];
};
int A_id = 0;
int B_id = 0;
int C_id = 0;
const VariableValueMapping* A_vvmap = (VariableValueMapping*)A;
const VariableValueMapping* B_vvmap = (VariableValueMapping*)B;
const VariableValueMapping* C_vvmap = (VariableValueMapping*)C;
libff::alt_bn128_pp::init_public_params();
int A_id = 0;
int B_id = 0;
int C_id = 0;
for (int row = 0; row < constraints; row++) {
linear_combination<libff::Fr<libff::alt_bn128_pp> > lin_comb_A, lin_comb_B, lin_comb_C;
// initialize curve parameters
libff::alt_bn128_pp::init_public_params();
while (A_id < A_len && A_vvmap[A_id].constraint_id == row) {
libff::bigint<libff::alt_bn128_r_limbs> value = libsnarkBigintFromBytes(A_vvmap[A_id].variable_value);
if (!value.is_zero())
lin_comb_A.add_term(A_vvmap[A_id].variable_id, value);
A_id++;
for (int row = 0; row < constraints; row++) {
linear_combination<libff::Fr<libff::alt_bn128_pp>> lin_comb_A, lin_comb_B, lin_comb_C;
while (A_id < A_len && A_vvmap[A_id].constraint_id == row) {
libff::bigint<libff::alt_bn128_r_limbs> value = libsnarkBigintFromBytes(A_vvmap[A_id].variable_value);
if (!value.is_zero()) {
lin_comb_A.add_term(A_vvmap[A_id].variable_id, value);
}
A_id++;
}
while (B_id < B_len && B_vvmap[B_id].constraint_id == row) {
libff::bigint<libff::alt_bn128_r_limbs> value = libsnarkBigintFromBytes(B_vvmap[B_id].variable_value);
if (!value.is_zero()) {
lin_comb_B.add_term(B_vvmap[B_id].variable_id, value);
}
B_id++;
}
while (C_id < C_len && C_vvmap[C_id].constraint_id == row) {
libff::bigint<libff::alt_bn128_r_limbs> value = libsnarkBigintFromBytes(C_vvmap[C_id].variable_value);
if (!value.is_zero()) {
lin_comb_C.add_term(C_vvmap[C_id].variable_id, value);
}
C_id++;
}
cs.add_constraint(r1cs_constraint<libff::Fr<libff::alt_bn128_pp>>(lin_comb_A, lin_comb_B, lin_comb_C));
}
while (B_id < B_len && B_vvmap[B_id].constraint_id == row) {
libff::bigint<libff::alt_bn128_r_limbs> value = libsnarkBigintFromBytes(B_vvmap[B_id].variable_value);
if (!value.is_zero())
lin_comb_B.add_term(B_vvmap[B_id].variable_id, value);
B_id++;
}
while (C_id < C_len && C_vvmap[C_id].constraint_id == row) {
libff::bigint<libff::alt_bn128_r_limbs> value = libsnarkBigintFromBytes(C_vvmap[C_id].variable_value);
if (!value.is_zero())
lin_comb_C.add_term(C_vvmap[C_id].variable_id, value);
C_id++;
}
cs.add_constraint(r1cs_constraint<libff::Fr<libff::alt_bn128_pp> >(lin_comb_A, lin_comb_B, lin_comb_C));
}
return cs;
return cs;
}
// keypair generateKeypair(constraints)
r1cs_ppzksnark_keypair<libff::alt_bn128_pp> generateKeypair(const r1cs_ppzksnark_constraint_system<libff::alt_bn128_pp> &cs){
// from r1cs_ppzksnark.hpp
return r1cs_ppzksnark_generator<libff::alt_bn128_pp>(cs);
}
void serializeProvingKeyToFile(r1cs_ppzksnark_proving_key<libff::alt_bn128_pp> pk, const char* pk_path){
writeToFile(pk_path, pk);
}
r1cs_ppzksnark_proving_key<libff::alt_bn128_pp> deserializeProvingKeyFromFile(const char* pk_path){
return loadFromFile<r1cs_ppzksnark_proving_key<libff::alt_bn128_pp>>(pk_path);
}
void serializeVerificationKeyToFile(r1cs_ppzksnark_verification_key<libff::alt_bn128_pp> vk, const char* vk_path){
std::stringstream ss;
unsigned icLength = vk.encoded_IC_query.rest.indices.size() + 1;
ss << "\t\tvk.a = " << outputPointG2AffineAsHex(vk.alphaA_g2) << endl;
ss << "\t\tvk.b = " << outputPointG1AffineAsHex(vk.alphaB_g1) << endl;
ss << "\t\tvk.c = " << outputPointG2AffineAsHex(vk.alphaC_g2) << endl;
ss << "\t\tvk.gamma = " << outputPointG2AffineAsHex(vk.gamma_g2) << endl;
ss << "\t\tvk.gamma_beta_1 = " << outputPointG1AffineAsHex(vk.gamma_beta_g1) << endl;
ss << "\t\tvk.gamma_beta_2 = " << outputPointG2AffineAsHex(vk.gamma_beta_g2) << endl;
ss << "\t\tvk.z = " << outputPointG2AffineAsHex(vk.rC_Z_g2) << endl;
ss << "\t\tvk.ic.len() = " << icLength << endl;
ss << "\t\tvk.ic[0] = " << outputPointG1AffineAsHex(vk.encoded_IC_query.first) << endl;
for (size_t i = 1; i < icLength; ++i)
{
auto vkICi = outputPointG1AffineAsHex(vk.encoded_IC_query.rest.values[i - 1]);
ss << "\t\tvk.IC[" << i << "] = " << vkICi << endl;
}
std::ofstream fh;
fh.open(vk_path, std::ios::binary);
ss.rdbuf()->pubseekpos(0, std::ios_base::out);
fh << ss.rdbuf();
fh.flush();
fh.close();
}
void exportProof(r1cs_ppzksnark_proof<libff::alt_bn128_pp> proof, const char* proof_path, const uint8_t* public_inputs,
int public_inputs_length){
//create JSON file
std::stringstream ss;
ss << "{" << "\n";
ss << "\t\"proof\":" << "\n";
ss << "\t{" << "\n";
ss << "\t\t\"a\":" <<outputPointG1AffineAsHexJson(proof.g_A.g) << ",\n";
ss << "\t\t\"a_p\":" <<outputPointG1AffineAsHexJson(proof.g_A.h) << ",\n";
ss << "\t\t\"b\":" << "\n";
ss << "\t\t\t" << outputPointG2AffineAsHexJson(proof.g_B.g) << ",\n";
ss << "\t\t\n";
ss << "\t\t\"b_p\":" <<outputPointG1AffineAsHexJson(proof.g_B.h) << ",\n";
ss << "\t\t\"c\":" <<outputPointG1AffineAsHexJson(proof.g_C.g) << ",\n";
ss << "\t\t\"c_p\":" <<outputPointG1AffineAsHexJson(proof.g_C.h) << ",\n";
ss << "\t\t\"h\":" <<outputPointG1AffineAsHexJson(proof.g_H) << ",\n";
ss << "\t\t\"k\":" <<outputPointG1AffineAsHexJson(proof.g_K) << "\n";
ss << "\t}," << "\n";
//add input to json
ss << "\t\"inputs\":" << "[";
for (int i = 1; i < public_inputs_length; i++) {
if(i!=1){
ss << ",";
}
ss << outputInputAsHex(libsnarkBigintFromBytes(public_inputs + i*32));
}
ss << "]" << "\n";
ss << "}" << "\n";
std::string s = ss.str();
//write json string to proof_path
writeToFile(proof_path, s);
}
}
bool _pghr13_setup(const uint8_t* A, const uint8_t* B, const uint8_t* C, int A_len, int B_len, int C_len, int constraints, int variables, int inputs, const char* pk_path, const char* vk_path)
r1cs_ppzksnark_keypair<libff::alt_bn128_pp> generateKeypair(const r1cs_ppzksnark_constraint_system<libff::alt_bn128_pp>& cs)
{
libff::inhibit_profiling_info = true;
libff::inhibit_profiling_counters = true;
//initialize curve parameters
libff::alt_bn128_pp::init_public_params();
auto cs = pghr13::createConstraintSystem(A, B, C, A_len, B_len, C_len, constraints, variables, inputs);
assert(cs.num_variables() >= (unsigned)inputs);
assert(cs.num_inputs() == (unsigned)inputs);
assert(cs.num_constraints() == (unsigned)constraints);
// create keypair
auto keypair = r1cs_ppzksnark_generator<libff::alt_bn128_pp>(cs);
// Export vk and pk to files
pghr13::serializeProvingKeyToFile(keypair.pk, pk_path);
pghr13::serializeVerificationKeyToFile(keypair.vk, vk_path);
return true;
return r1cs_ppzksnark_generator<libff::alt_bn128_pp>(cs); // from r1cs_ppzksnark.hpp
}
bool _pghr13_generate_proof(const char* pk_path, const char* proof_path, const uint8_t* public_inputs, int public_inputs_length, const uint8_t* private_inputs, int private_inputs_length)
std::string serializeVerificationKey(r1cs_ppzksnark_verification_key<libff::alt_bn128_pp>* vk)
{
libff::inhibit_profiling_info = true;
libff::inhibit_profiling_counters = true;
std::stringstream ss;
unsigned icLength = vk->encoded_IC_query.rest.indices.size() + 1;
//initialize curve parameters
libff::alt_bn128_pp::init_public_params();
auto pk = pghr13::deserializeProvingKeyFromFile(pk_path);
// assign variables based on witness values, excludes ~one
r1cs_variable_assignment<libff::Fr<libff::alt_bn128_pp> > full_variable_assignment;
for (int i = 1; i < public_inputs_length; i++) {
full_variable_assignment.push_back(libff::Fr<libff::alt_bn128_pp>(libsnarkBigintFromBytes(public_inputs + i*32)));
}
for (int i = 0; i < private_inputs_length; i++) {
full_variable_assignment.push_back(libff::Fr<libff::alt_bn128_pp>(libsnarkBigintFromBytes(private_inputs + i*32)));
}
// split up variables into primary and auxiliary inputs. Does *NOT* include the constant 1
// Public variables belong to primary input, private variables are auxiliary input.
r1cs_primary_input<libff::Fr<libff::alt_bn128_pp>> primary_input(full_variable_assignment.begin(), full_variable_assignment.begin() + public_inputs_length-1);
r1cs_primary_input<libff::Fr<libff::alt_bn128_pp>> auxiliary_input(full_variable_assignment.begin() + public_inputs_length-1, full_variable_assignment.end());
// for debugging
// cout << "full variable assignment:"<< endl << full_variable_assignment;
// cout << "primary input:"<< endl << primary_input;
// cout << "auxiliary input:"<< endl << auxiliary_input;
// Proof Generation
auto proof = r1cs_ppzksnark_prover<libff::alt_bn128_pp>(pk, primary_input, auxiliary_input);
pghr13::exportProof(proof, proof_path, public_inputs, public_inputs_length);
return true;
ss << "vk.a = " << outputPointG2AffineAsHex(vk->alphaA_g2) << endl;
ss << "vk.b = " << outputPointG1AffineAsHex(vk->alphaB_g1) << endl;
ss << "vk.c = " << outputPointG2AffineAsHex(vk->alphaC_g2) << endl;
ss << "vk.gamma = " << outputPointG2AffineAsHex(vk->gamma_g2) << endl;
ss << "vk.gamma_beta_1 = " << outputPointG1AffineAsHex(vk->gamma_beta_g1) << endl;
ss << "vk.gamma_beta_2 = " << outputPointG2AffineAsHex(vk->gamma_beta_g2) << endl;
ss << "vk.z = " << outputPointG2AffineAsHex(vk->rC_Z_g2) << endl;
ss << "vk.ic.len() = " << icLength << endl;
ss << "vk.ic[0] = " << outputPointG1AffineAsHex(vk->encoded_IC_query.first) << endl;
for (size_t i = 1; i < icLength; ++i) {
auto vk_ic_i = outputPointG1AffineAsHex(vk->encoded_IC_query.rest.values[i - 1]);
ss << "vk.ic[" << i << "] = " << vk_ic_i << endl;
}
std::string str = ss.str();
return str;
}
std::string serializeProof(r1cs_ppzksnark_proof<libff::alt_bn128_pp>* proof, const uint8_t* public_inputs, int public_inputs_length)
{
std::stringstream ss;
ss << "{"
<< "\n";
ss << "\t\"proof\": {"
<< "\n";
ss << "\t\t\"a\": " << outputPointG1AffineAsHexJson(proof->g_A.g) << ",\n";
ss << "\t\t\"a_p\": " << outputPointG1AffineAsHexJson(proof->g_A.h) << ",\n";
ss << "\t\t\"b\": " << outputPointG2AffineAsHexJson(proof->g_B.g) << ",\n";
ss << "\t\t\"b_p\": " << outputPointG1AffineAsHexJson(proof->g_B.h) << ",\n";
ss << "\t\t\"c\": " << outputPointG1AffineAsHexJson(proof->g_C.g) << ",\n";
ss << "\t\t\"c_p\": " << outputPointG1AffineAsHexJson(proof->g_C.h) << ",\n";
ss << "\t\t\"h\": " << outputPointG1AffineAsHexJson(proof->g_H) << ",\n";
ss << "\t\t\"k\": " << outputPointG1AffineAsHexJson(proof->g_K) << "\n";
ss << "\t},"
<< "\n";
ss << "\t\"inputs\": "
<< "[";
for (int i = 1; i < public_inputs_length; i++) {
if (i != 1) {
ss << ",";
}
ss << outputInputAsHex(libsnarkBigintFromBytes(public_inputs + i * 32));
}
ss << "]"
<< "\n";
ss << "}"
<< "\n";
std::string str = ss.str();
return str;
}
}
setup_result_t pghr13_setup(const uint8_t* A, const uint8_t* B, const uint8_t* C, int32_t A_len, int32_t B_len, int32_t C_len, int32_t constraints, int32_t variables, int32_t inputs)
{
libff::inhibit_profiling_info = true;
libff::inhibit_profiling_counters = true;
// initialize curve parameters
libff::alt_bn128_pp::init_public_params();
auto cs = pghr13::createConstraintSystem(A, B, C, A_len, B_len, C_len, constraints, variables, inputs);
assert(cs.num_variables() >= (unsigned)inputs);
assert(cs.num_inputs() == (unsigned)inputs);
assert(cs.num_constraints() == (unsigned)constraints);
// create keypair
auto keypair = r1cs_ppzksnark_generator<libff::alt_bn128_pp>(cs);
auto vk = pghr13::serializeVerificationKey(&keypair.vk);
std::stringstream ss;
ss << keypair.pk;
std::string pk = ss.str();
buffer_t vk_buf, pk_buf;
__alloc(&vk_buf, vk.size());
__alloc(&pk_buf, pk.size());
vk.copy(reinterpret_cast<char*>(vk_buf.data), vk_buf.length);
pk.copy(reinterpret_cast<char*>(pk_buf.data), pk_buf.length);
setup_result_t result(vk_buf, pk_buf);
return result;
}
proof_result_t pghr13_generate_proof(buffer_t* pk_buf, const uint8_t* public_inputs, int32_t public_inputs_length, const uint8_t* private_inputs, int32_t private_inputs_length)
{
libff::inhibit_profiling_info = true;
libff::inhibit_profiling_counters = true;
// initialize curve parameters
libff::alt_bn128_pp::init_public_params();
r1cs_ppzksnark_proving_key<libff::alt_bn128_pp> proving_key;
std::stringstream ss;
ss.write(reinterpret_cast<const char*>(pk_buf->data), pk_buf->length);
ss.rdbuf()->pubseekpos(0, std::ios_base::in);
ss >> proving_key;
// assign variables based on witness values, excludes ~one
r1cs_variable_assignment<libff::Fr<libff::alt_bn128_pp>> full_variable_assignment;
for (int i = 1; i < public_inputs_length; i++) {
full_variable_assignment.push_back(libff::Fr<libff::alt_bn128_pp>(libsnarkBigintFromBytes(public_inputs + i * 32)));
}
for (int i = 0; i < private_inputs_length; i++) {
full_variable_assignment.push_back(libff::Fr<libff::alt_bn128_pp>(libsnarkBigintFromBytes(private_inputs + i * 32)));
}
// split up variables into primary and auxiliary inputs. Does *NOT* include the constant 1
// Public variables belong to primary input, private variables are auxiliary input.
r1cs_primary_input<libff::Fr<libff::alt_bn128_pp>> primary_input(full_variable_assignment.begin(), full_variable_assignment.begin() + public_inputs_length - 1);
r1cs_primary_input<libff::Fr<libff::alt_bn128_pp>> auxiliary_input(full_variable_assignment.begin() + public_inputs_length - 1, full_variable_assignment.end());
// for debugging
// cout << "full variable assignment:" << endl << full_variable_assignment;
// cout << "primary input:" << endl << primary_input;
// cout << "auxiliary input:" << endl << auxiliary_input;
// Proof Generation
auto proof = r1cs_ppzksnark_prover<libff::alt_bn128_pp>(proving_key, primary_input, auxiliary_input);
auto proof_json = pghr13::serializeProof(&proof, public_inputs, public_inputs_length);
buffer_t proof_buf;
__alloc(&proof_buf, proof_json.size());
proof_json.copy(reinterpret_cast<char*>(proof_buf.data), proof_buf.length);
proof_result_t result(proof_buf);
return result;
}

View file

@ -7,36 +7,30 @@
#pragma once
#include "util.hpp"
#ifdef __cplusplus
extern "C" {
#endif
#include <stdbool.h>
#include <stdint.h>
#include "ffi.hpp"
bool _pghr13_setup(const uint8_t* A,
const uint8_t* B,
const uint8_t* C,
int A_len,
int B_len,
int C_len,
int constraints,
int variables,
int inputs,
const char* pk_path,
const char* vk_path
);
setup_result_t pghr13_setup(
const uint8_t* A,
const uint8_t* B,
const uint8_t* C,
int32_t A_len,
int32_t B_len,
int32_t C_len,
int32_t constraints,
int32_t variables,
int32_t inputs);
bool _pghr13_generate_proof(const char* pk_path,
const char* proof_path,
const uint8_t* public_inputs,
int public_inputs_length,
const uint8_t* private_inputs,
int private_inputs_length
);
proof_result_t pghr13_generate_proof(
buffer_t* pk_buf,
const uint8_t* public_inputs,
int32_t public_inputs_length,
const uint8_t* private_inputs,
int32_t private_inputs_length);
#ifdef __cplusplus
} // extern "C"
#endif
#endif

View file

@ -1,5 +1,5 @@
/**
* @file wraplibsnark.cpp
* @file util.cpp
* @author Jacob Eberhardt <jacob.eberhardt@tu-berlin.de
* @author Dennis Kuhnert <dennis.kuhnert@campus.tu-berlin.de>
* @date 2017
@ -7,83 +7,64 @@
#include "util.hpp"
using namespace std;
// conversion byte[32] <-> libsnark bigint.
libff::bigint<libff::alt_bn128_r_limbs> libsnarkBigintFromBytes(const uint8_t* _x)
{
libff::bigint<libff::alt_bn128_r_limbs> x;
for (unsigned i = 0; i < 4; i++) {
for (unsigned j = 0; j < 8; j++) {
x.data[3 - i] |= uint64_t(_x[i * 8 + j]) << (8 * (7-j));
libff::bigint<libff::alt_bn128_r_limbs> x;
for (unsigned i = 0; i < 4; i++) {
for (unsigned j = 0; j < 8; j++) {
x.data[3 - i] |= uint64_t(_x[i * 8 + j]) << (8 * (7 - j));
}
}
}
return x;
return x;
}
std::string HexStringFromLibsnarkBigint(libff::bigint<libff::alt_bn128_r_limbs> _x){
std::string HexStringFromLibsnarkBigint(libff::bigint<libff::alt_bn128_r_limbs> _x)
{
uint8_t x[32];
for (unsigned i = 0; i < 4; i++)
for (unsigned j = 0; j < 8; j++)
x[i * 8 + j] = uint8_t(uint64_t(_x.data[3 - i]) >> (8 * (7 - j)));
for (unsigned i = 0; i < 4; i++) {
for (unsigned j = 0; j < 8; j++) {
x[i * 8 + j] = uint8_t(uint64_t(_x.data[3 - i]) >> (8 * (7 - j)));
}
}
std::stringstream ss;
ss << std::setfill('0');
for (unsigned i = 0; i<32; i++) {
ss << std::hex << std::setw(2) << (int)x[i];
for (unsigned i = 0; i < 32; i++) {
ss << std::hex << std::setw(2) << (int)x[i];
}
return ss.str();
}
std::string outputInputAsHex(libff::bigint<libff::alt_bn128_r_limbs> _x){
return "\"0x" + HexStringFromLibsnarkBigint(_x) + "\"";
std::string outputInputAsHex(libff::bigint<libff::alt_bn128_r_limbs> _x)
{
return "\"0x" + HexStringFromLibsnarkBigint(_x) + "\"";
}
std::string outputPointG1AffineAsHex(libff::alt_bn128_G1 _p)
{
libff::alt_bn128_G1 aff = _p;
aff.to_affine_coordinates();
return
"0x" +
HexStringFromLibsnarkBigint(aff.X.as_bigint()) +
", 0x" +
HexStringFromLibsnarkBigint(aff.Y.as_bigint());
libff::alt_bn128_G1 aff = _p;
aff.to_affine_coordinates();
return "0x" + HexStringFromLibsnarkBigint(aff.X.as_bigint()) + ", 0x" + HexStringFromLibsnarkBigint(aff.Y.as_bigint());
}
std::string outputPointG1AffineAsHexJson(libff::alt_bn128_G1 _p)
{
libff::alt_bn128_G1 aff = _p;
aff.to_affine_coordinates();
return
"[\"0x" +
HexStringFromLibsnarkBigint(aff.X.as_bigint()) +
"\", \"0x" +
HexStringFromLibsnarkBigint(aff.Y.as_bigint())+"\"]";
libff::alt_bn128_G1 aff = _p;
aff.to_affine_coordinates();
return "[\"0x" + HexStringFromLibsnarkBigint(aff.X.as_bigint()) + "\", \"0x" + HexStringFromLibsnarkBigint(aff.Y.as_bigint()) + "\"]";
}
std::string outputPointG2AffineAsHex(libff::alt_bn128_G2 _p)
{
libff::alt_bn128_G2 aff = _p;
aff.to_affine_coordinates();
return
"[0x" +
HexStringFromLibsnarkBigint(aff.X.c1.as_bigint()) + ", 0x" +
HexStringFromLibsnarkBigint(aff.X.c0.as_bigint()) + "], [0x" +
HexStringFromLibsnarkBigint(aff.Y.c1.as_bigint()) + ", 0x" +
HexStringFromLibsnarkBigint(aff.Y.c0.as_bigint()) + "]";
libff::alt_bn128_G2 aff = _p;
aff.to_affine_coordinates();
return "[0x" + HexStringFromLibsnarkBigint(aff.X.c1.as_bigint()) + ", 0x" + HexStringFromLibsnarkBigint(aff.X.c0.as_bigint()) + "], [0x" + HexStringFromLibsnarkBigint(aff.Y.c1.as_bigint()) + ", 0x" + HexStringFromLibsnarkBigint(aff.Y.c0.as_bigint()) + "]";
}
std::string outputPointG2AffineAsHexJson(libff::alt_bn128_G2 _p)
{
libff::alt_bn128_G2 aff = _p;
aff.to_affine_coordinates();
return
"[[\"0x" +
HexStringFromLibsnarkBigint(aff.X.c1.as_bigint()) + "\", \"0x" +
HexStringFromLibsnarkBigint(aff.X.c0.as_bigint()) + "\"], [\"0x" +
HexStringFromLibsnarkBigint(aff.Y.c1.as_bigint()) + "\", \"0x" +
HexStringFromLibsnarkBigint(aff.Y.c0.as_bigint()) + "\"]]";
libff::alt_bn128_G2 aff = _p;
aff.to_affine_coordinates();
return "[[\"0x" + HexStringFromLibsnarkBigint(aff.X.c1.as_bigint()) + "\", \"0x" + HexStringFromLibsnarkBigint(aff.X.c0.as_bigint()) + "\"], [\"0x" + HexStringFromLibsnarkBigint(aff.Y.c1.as_bigint()) + "\", \"0x" + HexStringFromLibsnarkBigint(aff.Y.c0.as_bigint()) + "\"]]";
}

View file

@ -1,52 +1,18 @@
#pragma once
// contains definition of alt_bn128 ec public parameters
// contains definitions of alt_bn128 ec public parameters
#include "libff/algebra/curves/alt_bn128/alt_bn128_pp.hpp"
#include <fstream>
#include <iostream>
#include <cassert>
#include <iomanip>
#include <iostream>
#include <sstream>
#include <string>
libff::bigint<libff::alt_bn128_r_limbs> libsnarkBigintFromBytes(const uint8_t* _x);
std::string HexStringFromLibsnarkBigint(libff::bigint<libff::alt_bn128_r_limbs> _x);
std::string outputInputAsHex(libff::bigint<libff::alt_bn128_r_limbs> _x);
std::string outputPointG1AffineAsHex(libff::alt_bn128_G1 _p);
std::string outputPointG1AffineAsHexJson(libff::alt_bn128_G1 _p);
std::string outputPointG2AffineAsHex(libff::alt_bn128_G2 _p);
std::string outputPointG2AffineAsHexJson(libff::alt_bn128_G2 _p);
template<typename T>
void writeToFile(std::string path, T& obj) {
std::stringstream ss;
ss << obj;
std::ofstream fh;
fh.open(path, std::ios::binary);
ss.rdbuf()->pubseekpos(0, std::ios_base::out);
fh << ss.rdbuf();
fh.flush();
fh.close();
}
template<typename T>
T loadFromFile(std::string path) {
std::stringstream ss;
std::ifstream fh(path, std::ios::binary);
assert(fh.is_open());
ss << fh.rdbuf();
fh.close();
ss.rdbuf()->pubseekpos(0, std::ios_base::in);
T obj;
ss >> obj;
return obj;
}
std::string outputPointG2AffineAsHexJson(libff::alt_bn128_G2 _p);

View file

@ -25,13 +25,13 @@ impl<'ast> From<pest::ImportDirective<'ast>> for absy::ImportNode<'ast> {
match import {
pest::ImportDirective::Main(import) => {
imports::Import::new(None, import.source.span.as_str())
imports::Import::new(None, std::path::Path::new(import.source.span.as_str()))
.alias(import.alias.map(|a| a.span.as_str()))
.span(import.span)
}
pest::ImportDirective::From(import) => imports::Import::new(
Some(import.symbol.span.as_str()),
import.source.span.as_str(),
std::path::Path::new(import.source.span.as_str()),
)
.alias(import.alias.map(|a| a.span.as_str()))
.span(import.span),
@ -156,76 +156,83 @@ fn statements_from_statement<'ast, T: Field>(
pest::Statement::Definition(s) => statements_from_definition(s),
pest::Statement::Iteration(s) => vec![absy::StatementNode::from(s)],
pest::Statement::Assertion(s) => vec![absy::StatementNode::from(s)],
pest::Statement::Assignment(s) => vec![absy::StatementNode::from(s)],
pest::Statement::Return(s) => vec![absy::StatementNode::from(s)],
pest::Statement::MultiAssignment(s) => statements_from_multi_assignment(s),
}
}
fn statements_from_multi_assignment<'ast, T: Field>(
assignment: pest::MultiAssignmentStatement<'ast>,
) -> Vec<absy::StatementNode<T>> {
use absy::NodeValue;
let declarations = assignment
.lhs
.clone()
.into_iter()
.filter(|i| i.ty.is_some())
.map(|i| {
absy::Statement::Declaration(
absy::Variable::new(
i.id.span.as_str(),
absy::UnresolvedTypeNode::from(i.ty.unwrap()),
)
.span(i.id.span),
)
.span(i.span)
});
let lhs = assignment
.lhs
.into_iter()
.map(|i| absy::Assignee::Identifier(i.id.span.as_str()).span(i.id.span))
.collect();
let multi_def = absy::Statement::MultipleDefinition(
lhs,
absy::Expression::FunctionCall(
&assignment.function_id.span.as_str(),
assignment
.arguments
.into_iter()
.map(|e| absy::ExpressionNode::from(e))
.collect(),
)
.span(assignment.function_id.span),
)
.span(assignment.span);
declarations.chain(std::iter::once(multi_def)).collect()
}
fn statements_from_definition<'ast, T: Field>(
definition: pest::DefinitionStatement<'ast>,
) -> Vec<absy::StatementNode<T>> {
use absy::NodeValue;
vec![
absy::Statement::Declaration(
absy::Variable::new(
definition.id.span.as_str(),
absy::UnresolvedTypeNode::from(definition.ty),
let lhs = definition.lhs;
match lhs.len() {
1 => {
// Definition or assignment
let a = lhs[0].clone();
let e: absy::ExpressionNode<T> = absy::ExpressionNode::from(definition.expression);
let s = match e.value {
absy::Expression::FunctionCall(..) => absy::Statement::MultipleDefinition(
vec![absy::AssigneeNode::from(a.a.clone())],
e,
),
_ => absy::Statement::Definition(absy::AssigneeNode::from(a.a.clone()), e),
};
match a.ty {
Some(ty) => {
assert_eq!(a.a.accesses.len(), 0);
let declaration = absy::Statement::Declaration(
absy::Variable::new(
a.a.id.span.as_str(),
absy::UnresolvedTypeNode::from(ty),
)
.span(a.a.id.span.clone()),
)
.span(definition.span.clone());
vec![declaration, s.span(definition.span)]
}
None => {
// Assignment
vec![s.span(definition.span)]
}
}
}
_ => {
// Multidefinition
let declarations = lhs.clone().into_iter().filter(|i| i.ty.is_some()).map(|a| {
let ty = a.ty;
let a = a.a;
assert_eq!(a.accesses.len(), 0);
absy::Statement::Declaration(
absy::Variable::new(
a.id.span.as_str(),
absy::UnresolvedTypeNode::from(ty.unwrap()),
)
.span(a.id.span),
)
.span(a.span)
});
let lhs = lhs
.into_iter()
.map(|i| absy::Assignee::Identifier(i.a.id.span.as_str()).span(i.a.id.span))
.collect();
let multi_def = absy::Statement::MultipleDefinition(
lhs,
absy::ExpressionNode::from(definition.expression),
)
.span(definition.id.span.clone()),
)
.span(definition.span.clone()),
absy::Statement::Definition(
absy::AssigneeNode::from(definition.id),
absy::ExpressionNode::from(definition.expression),
)
.span(definition.span),
]
.span(definition.span);
declarations.chain(std::iter::once(multi_def)).collect()
}
}
}
impl<'ast, T: Field> From<pest::ReturnStatement<'ast>> for absy::StatementNode<'ast, T> {
@ -283,34 +290,12 @@ impl<'ast, T: Field> From<pest::IterationStatement<'ast>> for absy::StatementNod
.flat_map(|s| statements_from_statement(s))
.collect();
let from = match from.value {
absy::Expression::FieldConstant(n) => n,
e => unimplemented!("For loop bounds should be constants, found {}", e),
};
let to = match to.value {
absy::Expression::FieldConstant(n) => n,
e => unimplemented!("For loop bounds should be constants, found {}", e),
};
let var = absy::Variable::new(index, ty).span(statement.index.span);
absy::Statement::For(var, from, to, statements).span(statement.span)
}
}
impl<'ast, T: Field> From<pest::AssignmentStatement<'ast>> for absy::StatementNode<'ast, T> {
fn from(statement: pest::AssignmentStatement<'ast>) -> absy::StatementNode<T> {
use absy::NodeValue;
absy::Statement::Definition(
absy::AssigneeNode::from(statement.assignee),
absy::ExpressionNode::from(statement.expression),
)
.span(statement.span)
}
}
impl<'ast, T: Field> From<pest::Expression<'ast>> for absy::ExpressionNode<'ast, T> {
fn from(expression: pest::Expression<'ast>) -> absy::ExpressionNode<'ast, T> {
match expression {
@ -981,4 +966,147 @@ mod tests {
absy::Module::<Bn128Field>::from(ast);
}
}
#[test]
fn declarations() {
use self::pest::Span;
let span = Span::new(&"", 0, 0).unwrap();
// For different definitions, we generate declarations
// Case 1: `id = expr` where `expr` is not a function call
// This is a simple assignment, doesn't implicitely declare a variable
// A `Definition` is generatedm and no `Declaration`s
let definition = pest::DefinitionStatement {
lhs: vec![pest::OptionallyTypedAssignee {
ty: None,
a: pest::Assignee {
id: pest::IdentifierExpression {
value: String::from("a"),
span: span.clone(),
},
accesses: vec![],
span: span.clone(),
},
span: span.clone(),
}],
expression: pest::Expression::Constant(pest::ConstantExpression::DecimalNumber(
pest::DecimalNumberExpression {
value: String::from("42"),
span: span.clone(),
},
)),
span: span.clone(),
};
let statements: Vec<absy::StatementNode<Bn128Field>> =
statements_from_definition(definition);
assert_eq!(statements.len(), 1);
match &statements[0].value {
absy::Statement::Definition(..) => {}
s => {
panic!("should be a Definition, found {}", s);
}
};
// Case 2: `id = expr` where `expr` is a function call
// A MultiDef is generated
let definition = pest::DefinitionStatement {
lhs: vec![pest::OptionallyTypedAssignee {
ty: None,
a: pest::Assignee {
id: pest::IdentifierExpression {
value: String::from("a"),
span: span.clone(),
},
accesses: vec![],
span: span.clone(),
},
span: span.clone(),
}],
expression: pest::Expression::Postfix(pest::PostfixExpression {
id: pest::IdentifierExpression {
value: String::from("foo"),
span: span.clone(),
},
accesses: vec![pest::Access::Call(pest::CallAccess {
expressions: vec![],
span: span.clone(),
})],
span: span.clone(),
}),
span: span.clone(),
};
let statements: Vec<absy::StatementNode<Bn128Field>> =
statements_from_definition(definition);
assert_eq!(statements.len(), 1);
match &statements[0].value {
absy::Statement::MultipleDefinition(..) => {}
s => {
panic!("should be a Definition, found {}", s);
}
};
// Case 3: `ids = expr` where `expr` is a function call
// This implicitely declares all variables which are type annotated
// `field a, b = foo()`
let definition = pest::DefinitionStatement {
lhs: vec![
pest::OptionallyTypedAssignee {
ty: Some(pest::Type::Basic(pest::BasicType::Field(pest::FieldType {
span: span.clone(),
}))),
a: pest::Assignee {
id: pest::IdentifierExpression {
value: String::from("a"),
span: span.clone(),
},
accesses: vec![],
span: span.clone(),
},
span: span.clone(),
},
pest::OptionallyTypedAssignee {
ty: None,
a: pest::Assignee {
id: pest::IdentifierExpression {
value: String::from("b"),
span: span.clone(),
},
accesses: vec![],
span: span.clone(),
},
span: span.clone(),
},
],
expression: pest::Expression::Postfix(pest::PostfixExpression {
id: pest::IdentifierExpression {
value: String::from("foo"),
span: span.clone(),
},
accesses: vec![pest::Access::Call(pest::CallAccess {
expressions: vec![],
span: span.clone(),
})],
span: span.clone(),
}),
span: span.clone(),
};
let statements: Vec<absy::StatementNode<Bn128Field>> =
statements_from_definition(definition);
assert_eq!(statements.len(), 2);
match &statements[1].value {
absy::Statement::MultipleDefinition(..) => {}
s => {
panic!("should be a Definition, found {}", s);
}
};
}
}

View file

@ -16,6 +16,7 @@ pub use crate::absy::parameter::{Parameter, ParameterNode};
use crate::absy::types::{FunctionIdentifier, UnresolvedSignature, UnresolvedType, UserTypeId};
pub use crate::absy::variable::{Variable, VariableNode};
use embed::FlatEmbed;
use std::path::PathBuf;
use crate::imports::ImportNode;
use std::fmt;
@ -27,7 +28,7 @@ use std::collections::HashMap;
pub type Identifier<'ast> = &'ast str;
/// The identifier of a `Module`, typically a path or uri
pub type ModuleId = String;
pub type ModuleId = PathBuf;
/// A collection of `Module`s
pub type Modules<'ast, T> = HashMap<ModuleId, Module<'ast, T>>;
@ -171,7 +172,12 @@ impl<'ast> SymbolImport<'ast> {
impl<'ast> fmt::Display for SymbolImport<'ast> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} from {}", self.symbol_id, self.module_id)
write!(
f,
"{} from {}",
self.symbol_id,
self.module_id.display().to_string()
)
}
}
@ -297,7 +303,12 @@ pub enum Statement<'ast, T> {
Declaration(VariableNode<'ast>),
Definition(AssigneeNode<'ast, T>, ExpressionNode<'ast, T>),
Condition(ExpressionNode<'ast, T>, ExpressionNode<'ast, T>),
For(VariableNode<'ast>, T, T, Vec<StatementNode<'ast, T>>),
For(
VariableNode<'ast>,
ExpressionNode<'ast, T>,
ExpressionNode<'ast, T>,
Vec<StatementNode<'ast, T>>,
),
MultipleDefinition(Vec<AssigneeNode<'ast, T>>, ExpressionNode<'ast, T>),
}

View file

@ -13,13 +13,31 @@ use static_analysis::Analyse;
use std::collections::HashMap;
use std::fmt;
use std::io;
use std::io::BufRead;
use std::path::PathBuf;
use typed_absy::abi::Abi;
use typed_arena::Arena;
use zokrates_common::Resolver;
use zokrates_field::Field;
use zokrates_pest_ast as pest;
#[derive(Debug)]
pub struct CompileErrors(Vec<CompileError>);
pub struct CompilationArtifacts<T: Field> {
prog: ir::Prog<T>,
abi: Abi,
}
impl<T: Field> CompilationArtifacts<T> {
pub fn prog(&self) -> &ir::Prog<T> {
&self.prog
}
pub fn abi(&self) -> &Abi {
&self.abi
}
}
#[derive(Debug)]
pub struct CompileErrors(pub Vec<CompileError>);
impl From<CompileError> for CompileErrors {
fn from(e: CompileError) -> CompileErrors {
@ -27,50 +45,46 @@ impl From<CompileError> for CompileErrors {
}
}
impl fmt::Display for CompileErrors {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}",
self.0
.iter()
.map(|e| format!("{}", e))
.collect::<Vec<_>>()
.join("\n\n")
)
}
}
#[derive(Debug)]
pub enum CompileErrorInner {
ParserError(pest::Error),
ImportError(imports::Error),
SemanticError(semantics::Error),
SemanticError(semantics::ErrorInner),
ReadError(io::Error),
}
impl CompileErrorInner {
pub fn with_context(self, context: &Option<String>) -> CompileError {
pub fn in_file(self, context: &PathBuf) -> CompileError {
CompileError {
value: self,
context: context.clone(),
file: context.clone(),
}
}
}
#[derive(Debug)]
pub struct CompileError {
context: Option<String>,
file: PathBuf,
value: CompileErrorInner,
}
impl CompileError {
pub fn file(&self) -> &PathBuf {
&self.file
}
pub fn value(&self) -> &CompileErrorInner {
&self.value
}
}
impl CompileErrors {
pub fn with_context(self, context: Option<String>) -> Self {
pub fn with_context(self, file: PathBuf) -> Self {
CompileErrors(
self.0
.into_iter()
.map(|e| CompileError {
context: context.clone(),
file: file.clone(),
..e
})
.collect(),
@ -78,16 +92,6 @@ impl CompileErrors {
}
}
impl fmt::Display for CompileError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let context = match self.context {
Some(ref x) => x.clone(),
None => "???".to_string(),
};
write!(f, "{}:{}", context, self.value)
}
}
impl From<pest::Error> for CompileErrorInner {
fn from(error: pest::Error) -> Self {
CompileErrorInner::ParserError(error)
@ -106,50 +110,45 @@ impl From<io::Error> for CompileErrorInner {
}
}
impl From<semantics::Error> for CompileErrorInner {
impl From<semantics::Error> for CompileError {
fn from(error: semantics::Error) -> Self {
CompileErrorInner::SemanticError(error)
CompileError {
value: CompileErrorInner::SemanticError(error.inner),
file: error.module_id,
}
}
}
impl fmt::Display for CompileErrorInner {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let res = match *self {
CompileErrorInner::ParserError(ref e) => format!("{}", e),
CompileErrorInner::SemanticError(ref e) => format!("{}", e),
CompileErrorInner::ReadError(ref e) => format!("{}", e),
CompileErrorInner::ImportError(ref e) => format!("{}", e),
};
write!(f, "{}", res)
match *self {
CompileErrorInner::ParserError(ref e) => write!(f, "{}", e),
CompileErrorInner::SemanticError(ref e) => write!(f, "{}", e),
CompileErrorInner::ReadError(ref e) => write!(f, "{}", e),
CompileErrorInner::ImportError(ref e) => write!(f, "{}", e),
}
}
}
pub type Resolve<S, E> = fn(Option<String>, &str) -> Result<(S, String, &str), E>;
type FilePath = PathBuf;
pub fn compile<T: Field, R: BufRead, S: BufRead, E: Into<imports::Error>>(
reader: &mut R,
location: Option<String>,
resolve_option: Option<Resolve<S, E>>,
) -> Result<ir::Prog<T>, CompileErrors> {
pub fn compile<T: Field, E: Into<imports::Error>>(
source: String,
location: FilePath,
resolver: Option<&dyn Resolver<E>>,
) -> Result<CompilationArtifacts<T>, CompileErrors> {
let arena = Arena::new();
let mut source = String::new();
reader.read_to_string(&mut source).unwrap();
let source = arena.alloc(source);
let compiled = compile_program(source, location.clone(), resolve_option, &arena)?;
let compiled = compile_program(source, location.clone(), resolver, &arena)?;
// check semantics
let typed_ast = Checker::check(compiled).map_err(|errors| {
CompileErrors(
errors
.into_iter()
.map(|e| CompileErrorInner::from(e).with_context(&location))
.collect(),
)
CompileErrors(errors.into_iter().map(|e| CompileError::from(e)).collect())
})?;
let abi = typed_ast.abi();
// analyse (unroll and constant propagation)
let typed_ast = typed_ast.analyse();
@ -165,26 +164,24 @@ pub fn compile<T: Field, R: BufRead, S: BufRead, E: Into<imports::Error>>(
// optimize
let optimized_ir_prog = ir_prog.optimize();
Ok(optimized_ir_prog)
// analyse (check for unused constraints)
let optimized_ir_prog = optimized_ir_prog.analyse();
Ok(CompilationArtifacts {
prog: optimized_ir_prog,
abi,
})
}
pub fn compile_program<'ast, T: Field, S: BufRead, E: Into<imports::Error>>(
pub fn compile_program<'ast, T: Field, E: Into<imports::Error>>(
source: &'ast str,
location: Option<String>,
resolve_option: Option<Resolve<S, E>>,
location: FilePath,
resolver: Option<&dyn Resolver<E>>,
arena: &'ast Arena<String>,
) -> Result<Program<'ast, T>, CompileErrors> {
let mut modules = HashMap::new();
let main = compile_module(
&source,
location.clone(),
resolve_option,
&mut modules,
&arena,
)?;
let location = location.unwrap_or("???".to_string());
let main = compile_module(&source, location.clone(), resolver, &mut modules, &arena)?;
modules.insert(location.clone(), main);
@ -194,21 +191,21 @@ pub fn compile_program<'ast, T: Field, S: BufRead, E: Into<imports::Error>>(
})
}
pub fn compile_module<'ast, T: Field, S: BufRead, E: Into<imports::Error>>(
pub fn compile_module<'ast, T: Field, E: Into<imports::Error>>(
source: &'ast str,
location: Option<String>,
resolve_option: Option<Resolve<S, E>>,
location: FilePath,
resolver: Option<&dyn Resolver<E>>,
modules: &mut HashMap<ModuleId, Module<'ast, T>>,
arena: &'ast Arena<String>,
) -> Result<Module<'ast, T>, CompileErrors> {
let ast = pest::generate_ast(&source)
.map_err(|e| CompileErrors::from(CompileErrorInner::from(e).with_context(&location)))?;
.map_err(|e| CompileErrors::from(CompileErrorInner::from(e).in_file(&location)))?;
let module_without_imports: Module<T> = Module::from(ast);
Importer::new().apply_imports(
module_without_imports,
location.clone(),
resolve_option,
resolver,
modules,
&arena,
)
@ -217,44 +214,38 @@ pub fn compile_module<'ast, T: Field, S: BufRead, E: Into<imports::Error>>(
#[cfg(test)]
mod test {
use super::*;
use std::io::{BufReader, Empty};
use zokrates_field::Bn128Field;
#[test]
fn no_resolver_with_imports() {
let mut r = BufReader::new(
r#"
let source = r#"
import "./path/to/file" as foo
def main() -> (field):
return foo()
"#
.as_bytes(),
.to_string();
let res: Result<CompilationArtifacts<Bn128Field>, CompileErrors> = compile(
source,
"./path/to/file".into(),
None::<&dyn Resolver<io::Error>>,
);
let res: Result<ir::Prog<Bn128Field>, CompileErrors> = compile(
&mut r,
Some(String::from("./path/to/file")),
None::<Resolve<BufReader<Empty>, io::Error>>,
);
assert!(res
.unwrap_err()
assert!(res.unwrap_err().0[0]
.value()
.to_string()
.contains(&"Can't resolve import without a resolver"));
}
#[test]
fn no_resolver_without_imports() {
let mut r = BufReader::new(
r#"
let source = r#"
def main() -> (field):
return 1
"#
.as_bytes(),
);
let res: Result<ir::Prog<Bn128Field>, CompileErrors> = compile(
&mut r,
Some(String::from("./path/to/file")),
None::<Resolve<BufReader<Empty>, io::Error>>,
.to_string();
let res: Result<CompilationArtifacts<Bn128Field>, CompileErrors> = compile(
source,
"./path/to/file".into(),
None::<&dyn Resolver<io::Error>>,
);
assert!(res.is_ok());
}

View file

@ -1,17 +1,17 @@
use crate::helpers::{DirectiveStatement, Helper, RustHelper};
use crate::solvers::Solver;
use bellman::pairing::ff::ScalarEngine;
use flat_absy::{
FlatExpression, FlatExpressionList, FlatFunction, FlatParameter, FlatStatement, FlatVariable,
FlatDirective, FlatExpression, FlatExpressionList, FlatFunction, FlatParameter, FlatStatement,
FlatVariable,
};
use reduce::Reduce;
use std::collections::HashMap;
use typed_absy::types::{FunctionKey, Signature, Type};
use zokrates_embed::{generate_sha256_round_constraints, BellmanConstraint};
use zokrates_field::Field;
/// A low level function that contains non-deterministic introduction of variables. It is carried as is until
/// A low level function that contains non-deterministic introduction of variables. It is carried out as is until
/// the flattening step when it can be inlined.
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Hash)]
pub enum FlatEmbed {
Sha256Round,
Unpack,
@ -56,32 +56,34 @@ impl FlatEmbed {
}
// util to convert a vector of `(variable_id, coefficient)` to a flat_expression
// we build a binary tree of additions by splitting the vector recursively
fn flat_expression_from_vec<T: Field>(
v: Vec<(usize, <<T as Field>::BellmanEngine as ScalarEngine>::Fr)>,
v: &[(usize, <<T as Field>::BellmanEngine as ScalarEngine>::Fr)],
) -> FlatExpression<T> {
match v
.into_iter()
.map(|(key, val)| {
match v.len() {
0 => FlatExpression::Number(T::zero()),
1 => {
let (key, val) = v[0].clone();
FlatExpression::Mult(
box FlatExpression::Number(T::from_bellman(val)),
box FlatExpression::Identifier(FlatVariable::new(key)),
)
})
.reduce(|acc, e| FlatExpression::Add(box acc, box e))
{
Some(e @ FlatExpression::Mult(..)) => {
FlatExpression::Add(box FlatExpression::Number(T::zero()), box e)
} // the R1CS serializer only recognizes Add
Some(e) => e,
None => FlatExpression::Number(T::zero()),
}
n => {
let (u, v) = v.split_at(n / 2);
FlatExpression::Add(
box flat_expression_from_vec(u),
box flat_expression_from_vec(v),
)
}
}
}
impl<T: Field> From<BellmanConstraint<T::BellmanEngine>> for FlatStatement<T> {
fn from(c: zokrates_embed::BellmanConstraint<T::BellmanEngine>) -> FlatStatement<T> {
let rhs_a = flat_expression_from_vec(c.a);
let rhs_b = flat_expression_from_vec(c.b);
let lhs = flat_expression_from_vec(c.c);
let rhs_a = flat_expression_from_vec(&c.a);
let rhs_b = flat_expression_from_vec(&c.b);
let lhs = flat_expression_from_vec(&c.c);
FlatStatement::Condition(lhs, FlatExpression::Mult(box rhs_a, box rhs_b))
}
@ -123,15 +125,6 @@ pub fn sha256_round<T: Field>() -> FlatFunction<T> {
.into_iter()
.map(|i| i + variable_count);
// define the signature of the resulting function
let signature = Signature {
inputs: vec![
Type::array(Type::FieldElement, input_indices.len()),
Type::array(Type::FieldElement, current_hash_indices.len()),
],
outputs: vec![Type::array(Type::FieldElement, output_indices.len())],
};
// define parameters to the function based on the variables
let arguments = input_argument_indices
.clone()
@ -166,13 +159,13 @@ pub fn sha256_round<T: Field>() -> FlatFunction<T> {
.collect();
// insert a directive to set the witness based on the bellman gadget and inputs
let directive_statement = FlatStatement::Directive(DirectiveStatement {
let directive_statement = FlatStatement::Directive(FlatDirective {
outputs: cs_indices.map(|i| FlatVariable::new(i)).collect(),
inputs: input_argument_indices
.chain(current_hash_argument_indices)
.map(|i| FlatVariable::new(i).into())
.collect(),
helper: Helper::Rust(RustHelper::Sha256Round),
solver: Solver::Sha256Round,
});
// insert a statement to return the subset of the witness
@ -190,7 +183,6 @@ pub fn sha256_round<T: Field>() -> FlatFunction<T> {
FlatFunction {
arguments,
statements,
signature,
}
}
@ -211,7 +203,7 @@ fn use_variable(
/// * the return value of the `FlatFunction` is not deterministic: as we decompose over log_2(p) + 1 bits, some
/// elements can have multiple representations: For example, `unpack(0)` is `[0, ..., 0]` but also `unpack(p)`
pub fn unpack<T: Field>() -> FlatFunction<T> {
let nbits = T::get_required_bits();
let bit_width = T::get_required_bits();
let mut counter = 0;
@ -229,28 +221,23 @@ pub fn unpack<T: Field>() -> FlatFunction<T> {
format!("i0"),
&mut counter,
))];
let directive_outputs: Vec<FlatVariable> = (0..T::get_required_bits())
let directive_outputs: Vec<FlatVariable> = (0..bit_width)
.map(|index| use_variable(&mut layout, format!("o{}", index), &mut counter))
.collect();
let helper = Helper::bits();
let signature = Signature {
inputs: vec![Type::FieldElement],
outputs: vec![Type::array(Type::FieldElement, nbits)],
};
let solver = Solver::bits(bit_width);
let outputs = directive_outputs
.iter()
.enumerate()
.filter(|(index, _)| *index >= T::get_required_bits() - nbits)
.filter(|(index, _)| *index >= T::get_required_bits() - bit_width)
.map(|(_, o)| FlatExpression::Identifier(o.clone()))
.collect();
// o253, o252, ... o{253 - (nbits - 1)} are bits
let mut statements: Vec<FlatStatement<T>> = (0..nbits)
// o253, o252, ... o{253 - (bit_width - 1)} are bits
let mut statements: Vec<FlatStatement<T>> = (0..bit_width)
.map(|index| {
let bit = FlatExpression::Identifier(FlatVariable::new(T::get_required_bits() - index));
let bit = FlatExpression::Identifier(FlatVariable::new(bit_width - index));
FlatStatement::Condition(
bit.clone(),
FlatExpression::Mult(box bit.clone(), box bit.clone()),
@ -258,14 +245,14 @@ pub fn unpack<T: Field>() -> FlatFunction<T> {
})
.collect();
// sum check: o253 + o252 * 2 + ... + o{253 - (nbits - 1)} * 2**(nbits - 1)
// sum check: o253 + o252 * 2 + ... + o{253 - (bit_width - 1)} * 2**(bit_width - 1)
let mut lhs_sum = FlatExpression::Number(T::from(0));
for i in 0..nbits {
for i in 0..bit_width {
lhs_sum = FlatExpression::Add(
box lhs_sum,
box FlatExpression::Mult(
box FlatExpression::Identifier(FlatVariable::new(T::get_required_bits() - i)),
box FlatExpression::Identifier(FlatVariable::new(bit_width - i)),
box FlatExpression::Number(T::from(2).pow(i)),
),
);
@ -281,10 +268,10 @@ pub fn unpack<T: Field>() -> FlatFunction<T> {
statements.insert(
0,
FlatStatement::Directive(DirectiveStatement {
FlatStatement::Directive(FlatDirective {
inputs: directive_inputs,
outputs: directive_outputs,
helper: helper,
solver: solver,
}),
);
@ -295,7 +282,6 @@ pub fn unpack<T: Field>() -> FlatFunction<T> {
FlatFunction {
arguments,
statements,
signature,
}
}
@ -322,11 +308,11 @@ mod tests {
); // 128 bit checks, 1 directive, 1 sum check, 1 return
assert_eq!(
unpack.statements[0],
FlatStatement::Directive(DirectiveStatement::new(
FlatStatement::Directive(FlatDirective::new(
(0..Bn128Field::get_required_bits())
.map(|i| FlatVariable::new(i + 1))
.collect(),
Helper::bits(),
Solver::bits(Bn128Field::get_required_bits()),
vec![FlatVariable::new(0)]
))
);
@ -349,17 +335,6 @@ mod tests {
fn generate_sha256_constraints() {
let compiled = sha256_round();
// function should have a signature of 768 inputs and 256 outputs
assert_eq!(
compiled.signature,
Signature::new()
.inputs(vec![
Type::array(Type::FieldElement, 512),
Type::array(Type::FieldElement, 256)
])
.outputs(vec![Type::array(Type::FieldElement, 256)])
);
// function should have 768 inputs
assert_eq!(compiled.arguments.len(), 768,);
@ -419,12 +394,6 @@ mod tests {
let prog = crate::ir::Prog {
main: f,
private: vec![true; 768],
signature: Signature::new()
.inputs(vec![
Type::array(Type::FieldElement, 512),
Type::array(Type::FieldElement, 256),
])
.outputs(vec![Type::array(Type::FieldElement, 256)]),
};
let input = (0..512)

View file

@ -11,8 +11,7 @@ pub mod flat_variable;
pub use self::flat_parameter::FlatParameter;
pub use self::flat_variable::FlatVariable;
use crate::helpers::DirectiveStatement;
use crate::typed_absy::types::Signature;
use solvers::{Signed, Solver};
use std::collections::HashMap;
use std::fmt;
use zokrates_field::Field;
@ -41,8 +40,6 @@ pub struct FlatFunction<T: Field> {
pub arguments: Vec<FlatParameter>,
/// Vector of statements that are executed when running the function
pub statements: Vec<FlatStatement<T>>,
/// Typed signature
pub signature: Signature,
}
impl<T: Field> fmt::Display for FlatFunction<T> {
@ -68,9 +65,8 @@ impl<T: Field> fmt::Debug for FlatFunction<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"FlatFunction(arguments: {:?}, signature: {:?}):\n{}",
"FlatFunction(arguments: {:?}):\n{}",
self.arguments,
self.signature,
self.statements
.iter()
.map(|x| format!("\t{:?}", x))
@ -95,7 +91,7 @@ pub enum FlatStatement<T: Field> {
Return(FlatExpressionList<T>),
Condition(FlatExpression<T>, FlatExpression<T>),
Definition(FlatVariable, FlatExpression<T>),
Directive(DirectiveStatement<T>),
Directive(FlatDirective<T>),
}
impl<T: Field> fmt::Display for FlatStatement<T> {
@ -149,7 +145,7 @@ impl<T: Field> FlatStatement<T> {
.map(|i| i.apply_substitution(substitution))
.collect();
FlatStatement::Directive(DirectiveStatement {
FlatStatement::Directive(FlatDirective {
outputs,
inputs,
..d
@ -159,7 +155,51 @@ impl<T: Field> FlatStatement<T> {
}
}
#[derive(Clone, PartialEq)]
impl<T: Field> FlatDirective<T> {
pub fn new<E: Into<FlatExpression<T>>>(
outputs: Vec<FlatVariable>,
solver: Solver,
inputs: Vec<E>,
) -> Self {
let (in_len, out_len) = solver.get_signature();
assert_eq!(in_len, inputs.len());
assert_eq!(out_len, outputs.len());
FlatDirective {
solver,
inputs: inputs.into_iter().map(|i| i.into()).collect(),
outputs,
}
}
}
impl<T: Field> fmt::Display for FlatDirective<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"# {} = {}({})",
self.outputs
.iter()
.map(|o| o.to_string())
.collect::<Vec<String>>()
.join(", "),
self.solver,
self.inputs
.iter()
.map(|i| i.to_string())
.collect::<Vec<String>>()
.join(", ")
)
}
}
#[derive(Clone, PartialEq, Debug, Serialize, Deserialize)]
pub struct FlatDirective<T> {
pub inputs: Vec<FlatExpression<T>>,
pub outputs: Vec<FlatVariable>,
pub solver: Solver,
}
#[derive(Clone, PartialEq, Serialize, Deserialize)]
pub enum FlatExpression<T> {
Number(T),
Identifier(FlatVariable),
@ -221,10 +261,10 @@ impl<T: Field> fmt::Display for FlatExpression<T> {
}
}
impl<T: Field> fmt::Debug for FlatExpression<T> {
impl<T: fmt::Debug> fmt::Debug for FlatExpression<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
FlatExpression::Number(ref i) => write!(f, "Num({})", i),
FlatExpression::Number(ref i) => write!(f, "Num({:?})", i),
FlatExpression::Identifier(ref var) => write!(f, "Ide({})", var),
FlatExpression::Add(ref lhs, ref rhs) => write!(f, "Add({:?}, {:?})", lhs, rhs),
FlatExpression::Sub(ref lhs, ref rhs) => write!(f, "Sub({:?}, {:?})", lhs, rhs),

View file

@ -1,4 +1,4 @@
//! Module containing the `Flattener` to process a program that it is R1CS-able.
//! Module containing the `Flattener` to process a program that is R1CS-able.
//!
//! @file flatten.rs
//! @author Dennis Kuhnert <dennis.kuhnert@campus.tu-berlin.de>
@ -6,7 +6,7 @@
//! @date 2017
use crate::flat_absy::*;
use crate::helpers::{DirectiveStatement, Helper, RustHelper};
use crate::solvers::Solver;
use crate::typed_absy::types::{FunctionIdentifier, FunctionKey, MemberId, Signature, Type};
use crate::typed_absy::*;
use std::collections::HashMap;
@ -122,7 +122,7 @@ impl<'ast, T: Field> Flattener<'ast, T> {
///
/// # Arguments
///
/// * `symbols` - Available functions in in this context
/// * `symbols` - Available functions in this context
/// * `statements_flattened` - Vector where new flattened statements can be added.
/// * `condition` - the condition as a `BooleanExpression`.
/// * `consequence` - the consequence of type U.
@ -218,9 +218,9 @@ impl<'ast, T: Field> Flattener<'ast, T> {
let members = s.ty().clone();
let expected_output_size = members
.iter()
.find(|(id, _)| *id == member_id)
.find(|member| *member.id == member_id)
.unwrap()
.1
.ty
.get_primitive_count();
let res =
@ -231,8 +231,8 @@ impl<'ast, T: Field> Flattener<'ast, T> {
values
.into_iter()
.zip(members.into_iter())
.filter(|(_, (id, _))| *id == member_id)
.flat_map(|(v, (_, t))| match t {
.filter(|(_, member)| *member.id == member_id)
.flat_map(|(v, member)| match *member.ty {
Type::FieldElement => FieldElementExpression::try_from(v)
.unwrap()
.flatten(self, symbols, statements_flattened),
@ -259,16 +259,16 @@ impl<'ast, T: Field> Flattener<'ast, T> {
// the struct is encoded as a sequence, so we need to identify the offset at which this member starts
let offset = members
.iter()
.take_while(|(id, _)| *id != member_id)
.map(|(_, ty)| ty.get_primitive_count())
.take_while(|member| *member.id != member_id)
.map(|member| member.ty.get_primitive_count())
.sum();
// we also need the size of this member
let size = members
.iter()
.find(|(id, _)| *id == member_id)
.find(|member| *member.id == member_id)
.unwrap()
.1
.ty
.get_primitive_count();
self.layout.get(&id).unwrap()[offset..(offset + size)]
.into_iter()
@ -278,16 +278,16 @@ impl<'ast, T: Field> Flattener<'ast, T> {
StructExpressionInner::Select(box array, box index) => {
let offset = members
.iter()
.take_while(|(id, _)| *id != member_id)
.map(|(_, ty)| ty.get_primitive_count())
.take_while(|member| *member.id != member_id)
.map(|member| member.ty.get_primitive_count())
.sum();
// we also need the size of this member
let size = members
.iter()
.find(|(id, _)| *id == member_id)
.find(|member| *member.id == member_id)
.unwrap()
.1
.ty
.get_primitive_count();
self.flatten_select_expression::<StructExpression<'ast, T>>(
@ -302,12 +302,12 @@ impl<'ast, T: Field> Flattener<'ast, T> {
StructExpressionInner::IfElse(box condition, box consequence, box alternative) => {
// if the struct is `(if c then a else b)`, we want to access `(if c then a else b).member`
// we reduce to `if c then a.member else b.member`
let ty = members
let ty = *members
.clone()
.into_iter()
.find(|(id, _)| *id == member_id)
.find(|member| *member.id == member_id)
.unwrap()
.1;
.ty;
match ty {
Type::FieldElement => self.flatten_if_else_expression(
@ -345,16 +345,16 @@ impl<'ast, T: Field> Flattener<'ast, T> {
let offset = members
.iter()
.take_while(|(id, _)| *id != member_id)
.map(|(_, ty)| ty.get_primitive_count())
.take_while(|member| *member.id != member_id)
.map(|member| member.ty.get_primitive_count())
.sum();
// we also need the size of this member
let size = members
.iter()
.find(|(id, _)| *id == member_id)
.find(|member| *member.id == member_id)
.unwrap()
.1
.ty
.get_primitive_count();
e[offset..(offset + size)].into()
@ -466,7 +466,7 @@ impl<'ast, T: Field> Flattener<'ast, T> {
e => {
// we have array[e] with e an arbitrary expression
// first we check that e is in 0..array.len(), so we check that sum(if e == i then 1 else 0) == 1
// here depending on the size, we could use a proper range check based on bits
// here, depending on the size, we could use a proper range check based on bits
let range_check = (0..size)
.map(|i| {
FieldElementExpression::IfElse(
@ -546,7 +546,7 @@ impl<'ast, T: Field> Flattener<'ast, T> {
///
/// # Arguments
///
/// * `symbols` - Available functions in in this context
/// * `symbols` - Available functions in this context
/// * `statements_flattened` - Vector where new flattened statements can be added.
/// * `expression` - `BooleanExpression` that will be flattened.
///
@ -566,8 +566,9 @@ impl<'ast, T: Field> Flattener<'ast, T> {
FlatExpression::Identifier(self.layout.get(&x).unwrap().clone()[0])
}
BooleanExpression::Lt(box lhs, box rhs) => {
// Get the bitwidth to know the size of the binary decompsitions for this Field
let bitwidth = T::get_required_bits();
// Get the bit width to know the size of the binary decompositions for this Field
let bit_width = T::get_required_bits();
let safe_width = bit_width - 2; // making sure we don't overflow, assert here?
// We know from semantic checking that lhs and rhs have the same type
// What the expression will flatten to depends on that type
@ -581,28 +582,28 @@ impl<'ast, T: Field> Flattener<'ast, T> {
let lhs_id = self.use_sym();
statements_flattened.push(FlatStatement::Definition(lhs_id, lhs_flattened));
// check that lhs and rhs are within the right range, ie, their higher two bits are zero. We use big-endian so they are at positions 0 and 1
// check that lhs and rhs are within the right range, i.e., their higher two bits are zero. We use big-endian so they are at positions 0 and 1
// lhs
{
// define variables for the bits
let lhs_bits_be: Vec<FlatVariable> =
(0..bitwidth).map(|_| self.use_sym()).collect();
(0..safe_width).map(|_| self.use_sym()).collect();
// add a directive to get the bits
statements_flattened.push(FlatStatement::Directive(DirectiveStatement::new(
statements_flattened.push(FlatStatement::Directive(FlatDirective::new(
lhs_bits_be.clone(),
Helper::bits(),
Solver::bits(safe_width),
vec![lhs_id],
)));
// bitness checks
for i in 0..bitwidth - 2 {
for i in 0..safe_width {
statements_flattened.push(FlatStatement::Condition(
FlatExpression::Identifier(lhs_bits_be[i + 2]),
FlatExpression::Identifier(lhs_bits_be[i]),
FlatExpression::Mult(
box FlatExpression::Identifier(lhs_bits_be[i + 2]),
box FlatExpression::Identifier(lhs_bits_be[i + 2]),
box FlatExpression::Identifier(lhs_bits_be[i]),
box FlatExpression::Identifier(lhs_bits_be[i]),
),
));
}
@ -610,12 +611,12 @@ impl<'ast, T: Field> Flattener<'ast, T> {
// bit decomposition check
let mut lhs_sum = FlatExpression::Number(T::from(0));
for i in 0..bitwidth - 2 {
for i in 0..safe_width {
lhs_sum = FlatExpression::Add(
box lhs_sum,
box FlatExpression::Mult(
box FlatExpression::Identifier(lhs_bits_be[i + 2]),
box FlatExpression::Number(T::from(2).pow(bitwidth - 2 - i - 1)),
box FlatExpression::Identifier(lhs_bits_be[i]),
box FlatExpression::Number(T::from(2).pow(safe_width - i - 1)),
),
);
}
@ -634,22 +635,22 @@ impl<'ast, T: Field> Flattener<'ast, T> {
{
// define variables for the bits
let rhs_bits_be: Vec<FlatVariable> =
(0..bitwidth).map(|_| self.use_sym()).collect();
(0..safe_width).map(|_| self.use_sym()).collect();
// add a directive to get the bits
statements_flattened.push(FlatStatement::Directive(DirectiveStatement::new(
statements_flattened.push(FlatStatement::Directive(FlatDirective::new(
rhs_bits_be.clone(),
Helper::bits(),
Solver::bits(safe_width),
vec![rhs_id],
)));
// bitness checks
for i in 0..bitwidth - 2 {
for i in 0..safe_width {
statements_flattened.push(FlatStatement::Condition(
FlatExpression::Identifier(rhs_bits_be[i + 2]),
FlatExpression::Identifier(rhs_bits_be[i]),
FlatExpression::Mult(
box FlatExpression::Identifier(rhs_bits_be[i + 2]),
box FlatExpression::Identifier(rhs_bits_be[i + 2]),
box FlatExpression::Identifier(rhs_bits_be[i]),
box FlatExpression::Identifier(rhs_bits_be[i]),
),
));
}
@ -657,12 +658,12 @@ impl<'ast, T: Field> Flattener<'ast, T> {
// bit decomposition check
let mut rhs_sum = FlatExpression::Number(T::from(0));
for i in 0..bitwidth - 2 {
for i in 0..safe_width {
rhs_sum = FlatExpression::Add(
box rhs_sum,
box FlatExpression::Mult(
box FlatExpression::Identifier(rhs_bits_be[i + 2]),
box FlatExpression::Number(T::from(2).pow(bitwidth - 2 - i - 1)),
box FlatExpression::Identifier(rhs_bits_be[i]),
box FlatExpression::Number(T::from(2).pow(safe_width - i - 1)),
),
);
}
@ -687,17 +688,17 @@ impl<'ast, T: Field> Flattener<'ast, T> {
// define variables for the bits
let sub_bits_be: Vec<FlatVariable> =
(0..bitwidth).map(|_| self.use_sym()).collect();
(0..bit_width).map(|_| self.use_sym()).collect();
// add a directive to get the bits
statements_flattened.push(FlatStatement::Directive(DirectiveStatement::new(
statements_flattened.push(FlatStatement::Directive(FlatDirective::new(
sub_bits_be.clone(),
Helper::bits(),
Solver::bits(bit_width),
vec![subtraction_result.clone()],
)));
// bitness checks
for i in 0..bitwidth {
for i in 0..bit_width {
statements_flattened.push(FlatStatement::Condition(
FlatExpression::Identifier(sub_bits_be[i]),
FlatExpression::Mult(
@ -710,19 +711,19 @@ impl<'ast, T: Field> Flattener<'ast, T> {
// sum(sym_b{i} * 2**i)
let mut expr = FlatExpression::Number(T::from(0));
for i in 0..bitwidth {
for i in 0..bit_width {
expr = FlatExpression::Add(
box expr,
box FlatExpression::Mult(
box FlatExpression::Identifier(sub_bits_be[i]),
box FlatExpression::Number(T::from(2).pow(bitwidth - i - 1)),
box FlatExpression::Number(T::from(2).pow(bit_width - i - 1)),
),
);
}
statements_flattened.push(FlatStatement::Condition(subtraction_result, expr));
FlatExpression::Identifier(sub_bits_be[bitwidth - 1])
FlatExpression::Identifier(sub_bits_be[bit_width - 1])
}
BooleanExpression::BoolEq(box lhs, box rhs) => {
// lhs and rhs are booleans, they flatten to 0 or 1
@ -776,9 +777,9 @@ impl<'ast, T: Field> Flattener<'ast, T> {
FieldElementExpression::Sub(box lhs, box rhs),
);
statements_flattened.push(FlatStatement::Directive(DirectiveStatement::new(
statements_flattened.push(FlatStatement::Directive(FlatDirective::new(
vec![name_y, name_m],
Helper::Rust(RustHelper::ConditionEq),
Solver::ConditionEq,
vec![x.clone()],
)));
statements_flattened.push(FlatStatement::Condition(
@ -856,6 +857,7 @@ impl<'ast, T: Field> Flattener<'ast, T> {
true => T::from(1),
false => T::from(0),
}),
BooleanExpression::FunctionCall(..) => unreachable!(),
BooleanExpression::IfElse(box condition, box consequence, box alternative) => self
.flatten_if_else_expression(
symbols,
@ -923,8 +925,8 @@ impl<'ast, T: Field> Flattener<'ast, T> {
replacement_map.insert(formal_argument.id, new_var);
}
// Ensure Renaming and correct returns:
// add all flattened statements, adapt return statement
// Ensure renaming and correct returns:
// add all flattened statements, adapt return statements
let (mut return_statements, statements): (Vec<_>, Vec<_>) =
funct.statements.into_iter().partition(|s| match s {
@ -963,9 +965,9 @@ impl<'ast, T: Field> Flattener<'ast, T> {
.into_iter()
.map(|i| i.apply_substitution(&replacement_map))
.collect();
FlatStatement::Directive(DirectiveStatement {
FlatStatement::Directive(FlatDirective {
outputs: new_outputs,
helper: d.helper,
solver: d.solver,
inputs: new_inputs,
})
}
@ -990,7 +992,7 @@ impl<'ast, T: Field> Flattener<'ast, T> {
///
/// # Arguments
///
/// * `symbols` - Available functions in in this context
/// * `symbols` - Available functions in this context
/// * `statements_flattened` - Vector where new flattened statements can be added.
/// * `expr` - `TypedExpression` that will be flattened.
fn flatten_expression(
@ -1039,7 +1041,7 @@ impl<'ast, T: Field> Flattener<'ast, T> {
///
/// # Arguments
///
/// * `symbols` - Available functions in in this context
/// * `symbols` - Available functions in this context
/// * `statements_flattened` - Vector where new flattened statements can be added.
/// * `expr` - `FieldElementExpression` that will be flattened.
fn flatten_field_expression(
@ -1138,9 +1140,9 @@ impl<'ast, T: Field> Flattener<'ast, T> {
let inverse = self.use_sym();
// # invb = 1/b
statements_flattened.push(FlatStatement::Directive(DirectiveStatement::new(
statements_flattened.push(FlatStatement::Directive(FlatDirective::new(
vec![invb],
Helper::Rust(RustHelper::Div),
Solver::Div,
vec![FlatExpression::Number(T::one()), new_right.clone()],
)));
@ -1151,9 +1153,9 @@ impl<'ast, T: Field> Flattener<'ast, T> {
));
// # c = a/b
statements_flattened.push(FlatStatement::Directive(DirectiveStatement::new(
statements_flattened.push(FlatStatement::Directive(FlatDirective::new(
vec![inverse],
Helper::Rust(RustHelper::Div),
Solver::Div,
vec![new_left.clone(), new_right.clone()],
)));
@ -1226,7 +1228,7 @@ impl<'ast, T: Field> Flattener<'ast, T> {
// construct the result iterating through the bits, multiplying by the associated power iff the bit is true
ebits_le.into_iter().zip(powers).fold(
FlatExpression::Number(T::from(1)), // initialise the result at 1. If we have no bits to iterate through, we're computing x**0 == 1
FlatExpression::Number(T::from(1)), // initialise the result at 1. If we have no bits to itegrate through, we're computing x**0 == 1
|acc, (bit, power)| match bit {
true => {
// update the result by introducing a new variable
@ -1282,7 +1284,7 @@ impl<'ast, T: Field> Flattener<'ast, T> {
///
/// # Arguments
///
/// * `symbols` - Available functions in in this context
/// * `symbols` - Available functions in this context
/// * `statements_flattened` - Vector where new flattened statements can be added.
/// * `expr` - `StructExpression` that will be flattened.
fn flatten_struct_expression(
@ -1320,29 +1322,29 @@ impl<'ast, T: Field> Flattener<'ast, T> {
StructExpressionInner::IfElse(box condition, box consequence, box alternative) => {
members
.into_iter()
.flat_map(|(id, ty)| match ty {
.flat_map(|member| match ty {
Type::FieldElement => FieldElementExpression::if_else(
condition.clone(),
FieldElementExpression::member(consequence.clone(), id.clone()),
FieldElementExpression::member(alternative.clone(), id.clone()),
FieldElementExpression::member(consequence.clone(), member.id.clone()),
FieldElementExpression::member(alternative.clone(), member.id.clone()),
)
.flatten(self, symbols, statements_flattened),
Type::Boolean => BooleanExpression::if_else(
condition.clone(),
BooleanExpression::member(consequence.clone(), id.clone()),
BooleanExpression::member(alternative.clone(), id.clone()),
BooleanExpression::member(consequence.clone(), member.id.clone()),
BooleanExpression::member(alternative.clone(), member.id.clone()),
)
.flatten(self, symbols, statements_flattened),
Type::Struct(..) => StructExpression::if_else(
condition.clone(),
StructExpression::member(consequence.clone(), id.clone()),
StructExpression::member(alternative.clone(), id.clone()),
StructExpression::member(consequence.clone(), member.id.clone()),
StructExpression::member(alternative.clone(), member.id.clone()),
)
.flatten(self, symbols, statements_flattened),
Type::Array(..) => ArrayExpression::if_else(
condition.clone(),
ArrayExpression::member(consequence.clone(), id.clone()),
ArrayExpression::member(alternative.clone(), id.clone()),
ArrayExpression::member(consequence.clone(), member.id.clone()),
ArrayExpression::member(alternative.clone(), member.id.clone()),
)
.flatten(self, symbols, statements_flattened),
})
@ -1368,7 +1370,7 @@ impl<'ast, T: Field> Flattener<'ast, T> {
///
/// # Arguments
///
/// * `symbols` - Available functions in in this context
/// * `symbols` - Available functions in this context
/// * `statements_flattened` - Vector where new flattened statements can be added.
/// * `expr` - `ArrayExpression` that will be flattened.
/// # Remarks
@ -1447,7 +1449,7 @@ impl<'ast, T: Field> Flattener<'ast, T> {
///
/// # Arguments
///
/// * `symbols` - Available functions in in this context
/// * `symbols` - Available functions in this context
/// * `statements_flattened` - Vector where new flattened statements can be added.
/// * `stat` - `TypedStatement` that will be flattened.
fn flatten_statement(
@ -1564,7 +1566,7 @@ impl<'ast, T: Field> Flattener<'ast, T> {
///
/// # Arguments
///
/// * `symbols` - Available functions in in this context
/// * `symbols` - Available functions in this context
/// * `funct` - `TypedFunction` that will be flattened
fn flatten_function(
&mut self,
@ -1591,7 +1593,6 @@ impl<'ast, T: Field> Flattener<'ast, T> {
FlatFunction {
arguments: arguments_flattened,
statements: statements_flattened,
signature: funct.signature,
}
}
@ -1735,7 +1736,6 @@ mod tests {
expressions: vec![FlatExpression::Identifier(FlatVariable::new(1))],
}),
],
signature: Signature::new().outputs(vec![Type::FieldElement]),
};
let flattened = flattener.flatten_function(&mut HashMap::new(), function);
@ -1802,7 +1802,6 @@ mod tests {
expressions: vec![FlatExpression::Identifier(FlatVariable::new(2))],
}),
],
signature: Signature::new().outputs(vec![Type::FieldElement]),
};
let flattened = flattener.flatten_function(&mut HashMap::new(), function);
@ -1922,7 +1921,6 @@ mod tests {
expressions: vec![FlatExpression::Identifier(FlatVariable::new(7))],
}),
],
signature: Signature::new().outputs(vec![Type::FieldElement]),
};
let flattened = flattener.flatten_function(&mut HashMap::new(), function);
@ -2041,9 +2039,9 @@ mod tests {
FlatStatement::Definition(five, FlatExpression::Number(Bn128Field::from(5))),
FlatStatement::Definition(b0, b.into()),
// check div by 0
FlatStatement::Directive(DirectiveStatement::new(
FlatStatement::Directive(FlatDirective::new(
vec![invb0],
Helper::Rust(RustHelper::Div),
Solver::Div,
vec![FlatExpression::Number(Bn128Field::from(1)), b0.into()]
)),
FlatStatement::Condition(
@ -2051,9 +2049,9 @@ mod tests {
FlatExpression::Mult(box invb0.into(), box b0.into()),
),
// execute div
FlatStatement::Directive(DirectiveStatement::new(
FlatStatement::Directive(FlatDirective::new(
vec![sym_0],
Helper::Rust(RustHelper::Div),
Solver::Div,
vec![five, b0]
)),
FlatStatement::Condition(
@ -2064,9 +2062,9 @@ mod tests {
FlatStatement::Definition(sym_1, sym_0.into()),
FlatStatement::Definition(b1, b.into()),
// check div by 0
FlatStatement::Directive(DirectiveStatement::new(
FlatStatement::Directive(FlatDirective::new(
vec![invb1],
Helper::Rust(RustHelper::Div),
Solver::Div,
vec![FlatExpression::Number(Bn128Field::from(1)), b1.into()]
)),
FlatStatement::Condition(
@ -2074,9 +2072,9 @@ mod tests {
FlatExpression::Mult(box invb1.into(), box b1.into()),
),
// execute div
FlatStatement::Directive(DirectiveStatement::new(
FlatStatement::Directive(FlatDirective::new(
vec![sym_2],
Helper::Rust(RustHelper::Div),
Solver::Div,
vec![sym_1, b1]
)),
FlatStatement::Condition(

View file

@ -6,14 +6,16 @@
use crate::absy::*;
use crate::compile::compile_module;
use crate::compile::{CompileErrorInner, CompileErrors, Resolve};
use crate::compile::{CompileErrorInner, CompileErrors};
use crate::embed::FlatEmbed;
use crate::parser::Position;
use std::collections::HashMap;
use std::fmt;
use std::io;
use std::io::BufRead;
use std::path::{Path, PathBuf};
use typed_arena::Arena;
use zokrates_common::Resolver;
use zokrates_field::Field;
#[derive(PartialEq, Debug)]
@ -54,9 +56,11 @@ impl From<io::Error> for Error {
}
}
type ImportPath<'ast> = &'ast Path;
#[derive(PartialEq, Clone)]
pub struct Import<'ast> {
source: Identifier<'ast>,
source: ImportPath<'ast>,
symbol: Option<Identifier<'ast>>,
alias: Option<Identifier<'ast>>,
}
@ -64,7 +68,7 @@ pub struct Import<'ast> {
pub type ImportNode<'ast> = Node<Import<'ast>>;
impl<'ast> Import<'ast> {
pub fn new(symbol: Option<Identifier<'ast>>, source: Identifier<'ast>) -> Import<'ast> {
pub fn new(symbol: Option<Identifier<'ast>>, source: ImportPath<'ast>) -> Import<'ast> {
Import {
symbol,
source,
@ -78,7 +82,7 @@ impl<'ast> Import<'ast> {
pub fn new_with_alias(
symbol: Option<Identifier<'ast>>,
source: Identifier<'ast>,
source: ImportPath<'ast>,
alias: Identifier<'ast>,
) -> Import<'ast> {
Import {
@ -93,7 +97,7 @@ impl<'ast> Import<'ast> {
self
}
pub fn get_source(&self) -> &Identifier<'ast> {
pub fn get_source(&self) -> &ImportPath<'ast> {
&self.source
}
}
@ -101,8 +105,8 @@ impl<'ast> Import<'ast> {
impl<'ast> fmt::Display for Import<'ast> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.alias {
Some(ref alias) => write!(f, "import {} as {}", self.source, alias),
None => write!(f, "import {}", self.source),
Some(ref alias) => write!(f, "import {} as {}", self.source.display(), alias),
None => write!(f, "import {}", self.source.display()),
}
}
}
@ -110,8 +114,13 @@ impl<'ast> fmt::Display for Import<'ast> {
impl<'ast> fmt::Debug for Import<'ast> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.alias {
Some(ref alias) => write!(f, "import(source: {}, alias: {})", self.source, alias),
None => write!(f, "import(source: {})", self.source),
Some(ref alias) => write!(
f,
"import(source: {}, alias: {})",
self.source.display(),
alias
),
None => write!(f, "import(source: {})", self.source.display()),
}
}
}
@ -123,11 +132,11 @@ impl Importer {
Importer {}
}
pub fn apply_imports<'ast, T: Field, S: BufRead, E: Into<Error>>(
pub fn apply_imports<'ast, T: Field, E: Into<Error>>(
&self,
destination: Module<'ast, T>,
location: Option<String>,
resolve_option: Option<Resolve<S, E>>,
location: PathBuf,
resolver: Option<&dyn Resolver<E>>,
modules: &mut HashMap<ModuleId, Module<'ast, T>>,
arena: &'ast Arena<String>,
) -> Result<Module<'ast, T>, CompileErrors> {
@ -139,7 +148,7 @@ impl Importer {
let alias = import.alias;
// handle the case of special bellman and packing imports
if import.source.starts_with("EMBED") {
match import.source.as_ref() {
match import.source.to_str().unwrap() {
"EMBED/sha256round" => {
let alias = alias.unwrap_or("sha256round");
@ -166,31 +175,48 @@ impl Importer {
return Err(CompileErrorInner::ImportError(
Error::new(format!("Embed {} not found. Options are \"EMBED/sha256round\", \"EMBED/unpack\"", s)).with_pos(Some(pos)),
)
.with_context(&location)
.in_file(&location)
.into());
}
}
} else {
// to resolve imports, we need a resolver
match resolve_option {
Some(resolve) => match resolve(location.clone(), &import.source) {
Ok((mut reader, location, alias)) => {
let mut source = String::new();
reader.read_to_string(&mut source).unwrap();
match resolver {
Some(res) => match res.resolve(location.clone(), import.source.to_path_buf()) {
Ok((source, new_location)) => {
// generate an alias from the imported path if none was given explicitely
let alias = import.alias.unwrap_or(
std::path::Path::new(import.source)
.file_stem()
.ok_or(CompileErrors::from(
CompileErrorInner::ImportError(Error::new(format!(
"Could not determine alias for import {}",
import.source.display()
)))
.in_file(&location),
))?
.to_str()
.unwrap(),
);
let source = arena.alloc(source);
match modules.get(&new_location) {
Some(_) => {}
None => {
let source = arena.alloc(source);
let compiled = compile_module(
source,
Some(location),
resolve_option,
modules,
&arena,
)
.map_err(|e| e.with_context(Some(import.source.to_string())))?;
let alias = import.alias.clone().unwrap_or(alias);
let compiled = compile_module(
source,
new_location.clone(),
resolver,
modules,
&arena,
)?;
modules.insert(import.source.to_string(), compiled);
assert!(modules
.insert(new_location.clone(), compiled)
.is_none());
}
};
symbols.push(
SymbolDeclaration {
@ -198,7 +224,7 @@ impl Importer {
symbol: Symbol::There(
SymbolImport::with_id_in_module(
import.symbol.unwrap_or("main"),
import.source.clone(),
new_location.display().to_string(),
)
.start_end(pos.0, pos.1),
),
@ -210,7 +236,7 @@ impl Importer {
return Err(CompileErrorInner::ImportError(
err.into().with_pos(Some(pos)),
)
.with_context(&location)
.in_file(&location)
.into());
}
},
@ -218,7 +244,7 @@ impl Importer {
return Err(CompileErrorInner::from(Error::new(
"Can't resolve import without a resolver",
))
.with_context(&location)
.in_file(&location)
.into());
}
}
@ -243,10 +269,10 @@ mod tests {
#[test]
fn create_with_no_alias() {
assert_eq!(
Import::new(None, "./foo/bar/baz.zok"),
Import::new(None, Path::new("./foo/bar/baz.zok")),
Import {
symbol: None,
source: "./foo/bar/baz.zok",
source: Path::new("./foo/bar/baz.zok"),
alias: None,
}
);
@ -255,10 +281,10 @@ mod tests {
#[test]
fn create_with_alias() {
assert_eq!(
Import::new_with_alias(None, "./foo/bar/baz.zok", &"myalias"),
Import::new_with_alias(None, Path::new("./foo/bar/baz.zok"), &"myalias"),
Import {
symbol: None,
source: "./foo/bar/baz.zok",
source: Path::new("./foo/bar/baz.zok"),
alias: Some("myalias"),
}
);

Some files were not shown because too many files have changed in this diff Show more