Skip to content

Commit

Permalink
Merge pull request #16543 from MinaProtocol/georgeee/merge-compatible…
Browse files Browse the repository at this point in the history
…-to-develop-28jan2025

Merge compatible to develop (28 Jan 2025)
  • Loading branch information
mrmr1993 authored Jan 28, 2025
2 parents bbebf48 + 9e6401d commit 05fd7d3
Show file tree
Hide file tree
Showing 35 changed files with 663 additions and 82 deletions.
2 changes: 1 addition & 1 deletion buildkite/scripts/bench/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ EXTRA_ARGS=""

source buildkite/scripts/bench/install.sh

MAINLINE_BRANCHES="-m develop -m compatile -m master -m dkijania/build_performance_tooling_in_ci"
MAINLINE_BRANCHES="-m develop -m compatible -m master -m dkijania/fix_benchmark_upload"
while [[ "$#" -gt 0 ]]; do case $1 in
heap-usage) BENCHMARK="heap-usage"; ;;
mina-base) BENCHMARK="mina-base"; ;;
Expand Down
2 changes: 1 addition & 1 deletion nix/impure-shell.nix
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ pkgs.mkShell {
postgresql.out
sodium-static.out
sodium-static.dev
go_1_21
go_1_19
capnproto
zlib.dev
bzip2.dev
Expand Down
3 changes: 2 additions & 1 deletion scripts/benchmarks/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@

upload_bench = subparsers.add_parser('upload')
upload_bench.add_argument("--infile")
upload_bench.add_argument("--benchmark", type=BenchmarkType, help="benchmark to upload")

test_bench = subparsers.add_parser('test', help="Performs entire cycle of operations from run till upload")
test_bench.add_argument("--benchmark", type=BenchmarkType, help="benchmark to test")
Expand All @@ -72,7 +73,7 @@
test_bench.add_argument("--min-num-updates", default=2, type=int)


upload_bench = subparsers.add_parser('ls')
ls_bench = subparsers.add_parser('ls')

args = parser.parse_args()

Expand Down
17 changes: 12 additions & 5 deletions scripts/benchmarks/lib/influx.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,11 +88,16 @@ def check_envs():
if Influx.bucket not in os.environ:
raise RuntimeError(f"{Influx.bucket} env var not defined")

@staticmethod
def influx_host():
url = os.environ[Influx.host]
return url if url.startswith('https://') else 'https://' + url


def client(self):
Influx.check_envs()
url = os.environ[Influx.host]
return influxdb_client.InfluxDBClient(
url= url if url.startswith('https://') else 'https://' + url,
url=Influx.influx_host(),
token=os.environ[Influx.token],
org=os.environ[Influx.org],
bucket=os.environ[Influx.bucket])
Expand Down Expand Up @@ -152,13 +157,15 @@ def upload_csv(self, file):
)

process = subprocess.Popen([
"influx", "write", "--http-debug", "--format=csv", f"--file={file}"
"influx", "write", f"--host={Influx.influx_host()}", "--http-debug", "--format=csv", f"--file={file}"
],
stderr=subprocess.PIPE)

stderr=subprocess.PIPE )
timeout = time.time() + 60 # 1 minute
while True:
line = process.stderr.readline()
logger.info(f"influx write output - stderr: {line}")

if b"HTTP/2.0 204 No Content" in line or time.time() > timeout:
process.kill()
break
Expand Down
5 changes: 2 additions & 3 deletions src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml
Original file line number Diff line number Diff line change
Expand Up @@ -228,9 +228,8 @@ let setup_daemon logger ~itn_features =
and snark_work_fee =
flag "--snark-worker-fee" ~aliases:[ "snark-worker-fee" ]
~doc:
(sprintf
"FEE Amount a worker wants to get compensated for generating a \
snark proof" )
"FEE Amount a worker wants to get compensated for generating a snark \
proof"
(optional txn_fee)
and work_reassignment_wait =
flag "--work-reassignment-wait"
Expand Down
2 changes: 0 additions & 2 deletions src/app/cli/src/init/client.ml
Original file line number Diff line number Diff line change
Expand Up @@ -567,7 +567,6 @@ let send_payment_graphql =
graphql_endpoint
({ Cli_lib.Flag.sender; fee; nonce; memo }, receiver, amount)
->
let open Deferred.Let_syntax in
let fee = Option.value ~default:default_transaction_fee fee in
let%map response =
let input =
Expand Down Expand Up @@ -596,7 +595,6 @@ let delegate_stake_graphql =
graphql_endpoint
({ Cli_lib.Flag.sender; fee; nonce; memo }, receiver)
->
let open Deferred.Let_syntax in
let fee = Option.value ~default:default_transaction_fee fee in
let%map response =
Graphql_client.query_exn
Expand Down
5 changes: 4 additions & 1 deletion src/dune-project
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
(package (name data_hash_lib))
(package (name debug_assert))
(package (name direction))
(package (name disk_cache))
(package (name downloader))
(package (name dummy_values))
(package (name empty_hashes))
Expand Down Expand Up @@ -103,6 +104,7 @@
(package (name mina_ledger))
(package (name mina_lib))
(package (name mina_lib_tests))
(package (name mina_lmdb_storage))
(package (name mina_metrics))
(package (name mina_net2))
(package (name mina_networking))
Expand Down Expand Up @@ -151,6 +153,7 @@
(package (name ppx_version))
(package (name precomputed_values))
(package (name promise))
(package (name proof_cache_tag))
(package (name proof_carrying_data))
(package (name protocol_version))
(package (name prover))
Expand Down Expand Up @@ -228,4 +231,4 @@
(package (name work_selector))
(package (name zkapp_command_builder))
(package (name zkapps_examples))
(package (name zkapp_limits))
(package (name zkapp_limits))
10 changes: 10 additions & 0 deletions src/lib/crypto/kimchi_bindings/js/bindings/srs.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,16 @@ var caml_fp_srs_read = function (offset, path) {
}
};

// Provides: caml_fp_srs_lagrange_commitments_whole_domain
// Requires: tsSrs
var caml_fp_srs_lagrange_commitments_whole_domain =
tsSrs.fp.lagrangeCommitmentsWholeDomain;

// Provides: caml_fq_srs_lagrange_commitments_whole_domain
// Requires: tsSrs
var caml_fq_srs_lagrange_commitments_whole_domain =
tsSrs.fq.lagrangeCommitmentsWholeDomain;

// Provides: caml_fp_srs_lagrange_commitment
// Requires: tsSrs
var caml_fp_srs_lagrange_commitment = tsSrs.fp.lagrangeCommitment;
Expand Down
4 changes: 2 additions & 2 deletions src/lib/crypto/kimchi_bindings/stubs/src/srs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,8 @@ macro_rules! impl_srs {
pub fn [<$name:snake _lagrange_commitments_whole_domain>](
srs: $name,
domain_size: ocaml::Int,
) -> Result<Vec<CamlPolyComm<$CamlG>>, ocaml::Error> {
Ok(srs.get_lagrange_basis_from_domain_size(domain_size as usize).clone().into_iter().map(|x| x.into()).collect())
) -> Vec<CamlPolyComm<$CamlG>> {
srs.get_lagrange_basis_from_domain_size(domain_size as usize).clone().into_iter().map(|x| x.into()).collect()
}


Expand Down
6 changes: 6 additions & 0 deletions src/lib/crypto/kimchi_bindings/wasm/src/projective.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,12 +75,18 @@ macro_rules! impl_projective {
proj.into()
}

// improper_ctypes_definitions is allowed here because the CamlBase/ScalarField struct
// already has #[repr(C)] in its definition
#[allow(improper_ctypes_definitions)]
#[wasm_bindgen]
pub extern "C" fn [<caml_ $name:snake _endo_base>]() -> $CamlBaseField {
let (endo_q, _endo_r) = poly_commitment::srs::endos::<GAffine>();
endo_q.into()
}

// improper_ctypes_definitions is allowed here because the CamlBase/ScalarField struct
// already has #[repr(C)] in its definition
#[allow(improper_ctypes_definitions)]
#[wasm_bindgen]
pub extern "C" fn [<caml_ $name:snake _endo_scalar>]() -> $CamlScalarField {
let (_endo_q, endo_r) = poly_commitment::srs::endos::<GAffine>();
Expand Down
50 changes: 38 additions & 12 deletions src/lib/crypto/kimchi_bindings/wasm/src/srs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use ark_poly::DenseUVPolynomial;
use ark_poly::{univariate::DensePolynomial, EvaluationDomain, Evaluations};
use paste::paste;
use poly_commitment::SRS as ISRS;
use poly_commitment::{commitment::b_poly_coefficients, srs::SRS, hash_map_cache::HashMapCache};
use poly_commitment::{commitment::b_poly_coefficients, hash_map_cache::HashMapCache, srs::SRS};
use serde::{Deserialize, Serialize};
use std::ops::Deref;
use std::{
Expand Down Expand Up @@ -124,6 +124,33 @@ macro_rules! impl_srs {
Ok(Some(Arc::new(srs).into()))
}

#[wasm_bindgen]
pub fn [<$name:snake _lagrange_commitments_whole_domain_ptr>](
srs: &[<Wasm $field_name:camel Srs>],
domain_size: i32,
) -> *mut WasmVector<$WasmPolyComm> {
// this is the best workaround we have, for now
// returns a pointer to the commitment
// later, we read the commitment from the pointer
let comm = srs
.get_lagrange_basis_from_domain_size(domain_size as usize)
.clone()
.into_iter()
.map(|x| x.into())
.collect();
let boxed_comm = Box::<WasmVector<WasmPolyComm>>::new(comm);
Box::into_raw(boxed_comm)
}

#[wasm_bindgen]
pub fn [<$name:snake _lagrange_commitments_whole_domain_read_from_ptr>](
ptr: *mut WasmVector<$WasmPolyComm>,
) -> WasmVector<$WasmPolyComm> {
// read the commitment at the pointers address, hack for the web worker implementation (see o1js web worker impl for details)
let b = unsafe { Box::from_raw(ptr) };
b.as_ref().clone()
}

#[wasm_bindgen]
pub fn [<$name:snake _lagrange_commitment>](
srs: &[<Wasm $field_name:camel Srs>],
Expand Down Expand Up @@ -216,7 +243,6 @@ pub mod fp {
use mina_curves::pasta::{Fp, Vesta as G};

impl_srs!(caml_fp_srs, WasmPastaFp, WasmG, Fp, G, WasmPolyComm, Fp);

#[wasm_bindgen]
pub fn caml_fp_srs_create_parallel(depth: i32) -> WasmFpSrs {
crate::rayon::run_in_pool(|| Arc::new(SRS::<G>::create_parallel(depth as usize)).into())
Expand Down Expand Up @@ -267,9 +293,10 @@ pub mod fp {
domain_size: i32,
input_bases: WasmVector<WasmPolyComm>,
) {
srs.lagrange_bases.get_or_generate(domain_size as usize, || {
input_bases.into_iter().map(Into::into).collect()
});
srs.lagrange_bases
.get_or_generate(domain_size as usize, || {
input_bases.into_iter().map(Into::into).collect()
});
}

// compute & add lagrange basis internally, return the entire basis
Expand All @@ -279,8 +306,7 @@ pub mod fp {
domain_size: i32,
) -> WasmVector<WasmPolyComm> {
// compute lagrange basis
let basis =
crate::rayon::run_in_pool(|| {
let basis = crate::rayon::run_in_pool(|| {
let domain =
EvaluationDomain::<Fp>::new(domain_size as usize).expect("invalid domain size");
srs.get_lagrange_basis(domain)
Expand Down Expand Up @@ -347,9 +373,10 @@ pub mod fq {
domain_size: i32,
input_bases: WasmVector<WasmPolyComm>,
) {
srs.lagrange_bases.get_or_generate(domain_size as usize, || {
input_bases.into_iter().map(Into::into).collect()
});
srs.lagrange_bases
.get_or_generate(domain_size as usize, || {
input_bases.into_iter().map(Into::into).collect()
});
}

// compute & add lagrange basis internally, return the entire basis
Expand All @@ -359,8 +386,7 @@ pub mod fq {
domain_size: i32,
) -> WasmVector<WasmPolyComm> {
// compute lagrange basis
let basis =
crate::rayon::run_in_pool(|| {
let basis = crate::rayon::run_in_pool(|| {
let domain =
EvaluationDomain::<Fq>::new(domain_size as usize).expect("invalid domain size");
srs.get_lagrange_basis(domain)
Expand Down
20 changes: 20 additions & 0 deletions src/lib/disk_cache/disk_cache.mli
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
open Core_kernel
open Async

module Make : functor (T : Binable.S) -> sig
type t

(** Initialize the on-disk cache explicitly before interactions with it take place. *)
val initialize :
string
-> logger:Logger.t
-> (t, [> `Initialization_error of Error.t ]) Deferred.Result.t

type id

(** Put the value to disk, return an identifier that is associated with a special handler in GC. *)
val put : t -> T.t -> id

(** Read from the cache, crashing if the value cannot be found. *)
val get : t -> id -> T.t
end
14 changes: 14 additions & 0 deletions src/lib/disk_cache/dune
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
(library
(public_name disk_cache)
(virtual_modules disk_cache)
(default_implementation disk_cache.filesystem)
(libraries
;; opam libraries
core_kernel
async
;; local libraries
logger
)
(preprocess
(pps ppx_mina ppx_version))
(instrumentation (backend bisect_ppx)))
60 changes: 60 additions & 0 deletions src/lib/disk_cache/filesystem/disk_cache.ml
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
(* Cache proofs using the filesystem, one file per proof. *)

open Core
open Async

module Make (B : sig
include Binable.S
end) =
struct
type t = string * int ref

type id = { idx : int }

let failed_to_get_cache_folder_status ~logger ~(error_msg : string) ~path =
[%log error] "%s" error_msg ~metadata:[ ("path", `String path) ] ;
failwithf "%s (%s)" error_msg path ()

let initialize path ~logger =
let open Deferred.Let_syntax in
match%bind Sys.is_directory path with
| `Yes ->
let%bind () = File_system.clear_dir path in
Deferred.Result.return (path, ref 0)
| `No -> (
match%bind Sys.is_file path with
| `Yes ->
failed_to_get_cache_folder_status ~logger
~error_msg:
"Invalid path to proof cache folder. Path points to a file"
~path
| `No ->
let%bind () = File_system.create_dir path in
Deferred.Result.return (path, ref 0)
| `Unknown ->
failed_to_get_cache_folder_status ~logger
~error_msg:"Cannot evaluate existence of cache folder" ~path )
| `Unknown ->
failed_to_get_cache_folder_status ~logger
~error_msg:"Cannot evaluate existence of cache folder" ~path

let path root i = root ^ Filename.dir_sep ^ Int.to_string i

let get ((root, _counter) : t) (id : id) : B.t =
(* Read from the file. *)
In_channel.with_file ~binary:true (path root id.idx) ~f:(fun chan ->
let str = In_channel.input_all chan in
Binable.of_string (module B) str )

let put ((root, counter) : t) x : id =
let new_counter = !counter in
incr counter ;
let res = { idx = new_counter } in
(* When this reference is GC'd, delete the file. *)
Core.Gc.Expert.add_finalizer_last_exn res (fun () ->
Core.Unix.unlink (path root new_counter) ) ;
(* Write the proof to the file. *)
Out_channel.with_file ~binary:true (path root new_counter) ~f:(fun chan ->
Out_channel.output_string chan @@ Binable.to_string (module B) x ) ;
res
end
16 changes: 16 additions & 0 deletions src/lib/disk_cache/filesystem/dune
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
(library
(public_name disk_cache.filesystem)
(name disk_cache_filesystem)
(implements disk_cache)
(libraries
;; opam libraries
core
async
;; local libraries
logger
file_system
)
(preprocess
(pps ppx_mina ppx_version ppx_jane))
(instrumentation (backend bisect_ppx))
)
Loading

0 comments on commit 05fd7d3

Please sign in to comment.