diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..51851fd --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,30 @@ +name: Lint + +on: + pull_request: + branches: [main, dev] + push: + branches: [main] + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v3 + with: + version: "latest" + + - name: Set up Python + run: uv python install 3.12 + + - name: Install dependencies + run: uv sync + + - name: Run ruff check + run: uv run --with ruff ruff check . + + - name: Run ruff format check + run: uv run --with ruff ruff format --check . diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 76dbc60..76e3370 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,8 +1,10 @@ name: Build & Release on: + pull_request: + branches: [main] push: - branches: [main, fix/build-*] + branches: [fix/build-*] tags: - "v*.*.*" - "*.*.*" @@ -11,6 +13,7 @@ jobs: build_wheels: name: Build wheels on ${{ matrix.os }} runs-on: ${{ matrix.os }} + if: startsWith(github.ref, 'refs/tags/') || matrix.os == 'macos-latest' strategy: matrix: os: [ubuntu-latest, macos-latest] @@ -26,7 +29,7 @@ jobs: uses: pypa/cibuildwheel@v2.21 env: # Python versions to build - CIBW_SKIP: "pp* cp38-* cp39-* cp310-* cp311-* *-musllinux_*" + CIBW_SKIP: "pp* cp38-* cp39-* cp310-* *-musllinux_*" # Use manylinux_2_28 (glibc 2.28+) CIBW_MANYLINUX_X86_64_IMAGE: manylinux_2_28 @@ -42,7 +45,7 @@ jobs: CIBW_ENVIRONMENT_MACOS: MACOSX_DEPLOYMENT_TARGET=11.0 # Install Python build dependencies - CIBW_BEFORE_BUILD: pip install Cython setuptools wheel + CIBW_BEFORE_BUILD: pip install Cython setuptools wheel "packaging>=24.2" # Architectures to build CIBW_ARCHS_LINUX: x86_64 aarch64 @@ -56,6 +59,7 @@ jobs: build_sdist: name: Build source distribution runs-on: ubuntu-latest + if: startsWith(github.ref, 'refs/tags/') steps: - uses: actions/checkout@v4 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..879fd79 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,21 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-yaml + - id: check-toml + - id: end-of-file-fixer + - id: trailing-whitespace + + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.3.0 + hooks: + - id: ruff + args: [ --fix ] + - id: ruff-format + + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.8.0 + hooks: + - id: mypy + additional_dependencies: [types-setuptools] diff --git a/dot_ring/curve/curve.py b/dot_ring/curve/curve.py index 9b709b3..80db784 100644 --- a/dot_ring/curve/curve.py +++ b/dot_ring/curve/curve.py @@ -105,15 +105,9 @@ def _validate_parameters(self) -> bool: # Original scalar field validation # Allow int or custom Scalar types if isinstance(self.GENERATOR_X, int) and isinstance(self.GENERATOR_Y, int): - if not ( - 0 <= self.GENERATOR_X < self.PRIME_FIELD - and 0 <= self.GENERATOR_Y < self.PRIME_FIELD - ): + if not (0 <= self.GENERATOR_X < self.PRIME_FIELD and 0 <= self.GENERATOR_Y < self.PRIME_FIELD): return False - elif not ( - isinstance(self.GENERATOR_X, (tuple, list)) - or isinstance(self.GENERATOR_Y, (tuple, list)) - ): + elif not (isinstance(self.GENERATOR_X, (tuple, list)) or isinstance(self.GENERATOR_Y, (tuple, list))): # Assume custom field elements (like Scalar) # We can't easily check bounds against int PRIME_FIELD if they are opaque, # but we assume they are valid if they are passed. @@ -124,12 +118,7 @@ def _validate_parameters(self) -> bool: # if not self.is_on_curve(self.GENERATOR_X, self.GENERATOR_Y): #already given in point class # return False - return ( - self.PRIME_FIELD > 2 - and self.ORDER > 2 - and self.COFACTOR > 0 - and self.PRIME_FIELD != self.ORDER - ) + return self.PRIME_FIELD > 2 and self.ORDER > 2 and self.COFACTOR > 0 and self.PRIME_FIELD != self.ORDER def hash_to_field(self, msg: bytes, count: int) -> list[int]: """ @@ -181,11 +170,7 @@ def expand_message_xmd(self, msg: bytes, len_in_bytes: int) -> bytes: b_in_bytes = self.H_A().digest_size ell = math.ceil(len_in_bytes / b_in_bytes) - if ( - (ell > 255 and self.PRIME_FIELD.bit_length() < 384) - or len_in_bytes > 65535 - or len(self.DST) > 255 - ): + if (ell > 255 and self.PRIME_FIELD.bit_length() < 384) or len_in_bytes > 65535 or len(self.DST) > 255: # Relax ell check for large curves like P-521 where len_in_bytes might be large # But strictly, RFC 9380 says ell <= 255. # If len_in_bytes is huge, maybe we should just allow it if the curve is large? @@ -214,9 +199,7 @@ def expand_message_xmd(self, msg: bytes, len_in_bytes: int) -> bytes: # but 26KB seems wrong. # Let's assume the user knows what they are doing if they request such length. # But XMD structure relies on 1 byte for 'i' in loop. So ell cannot exceed 255. - raise ValueError( - f"Invalid input size parameters: ell={ell}, len={len_in_bytes}, dst_len={len(self.DST)}" - ) + raise ValueError(f"Invalid input size parameters: ell={ell}, len={len_in_bytes}, dst_len={len(self.DST)}") DST_prime = self.DST + self.I2OSP(len(self.DST), 1) Z_pad = self.I2OSP(0, cast(int, self.S_in_bytes)) @@ -231,9 +214,7 @@ def expand_message_xmd(self, msg: bytes, len_in_bytes: int) -> bytes: b_values = [b_1] for i in range(2, ell + 1): - b_i = self.H_A( - self.strxor(b_0, b_values[-1]) + self.I2OSP(i, 1) + DST_prime - ).digest() + b_i = self.H_A(self.strxor(b_0, b_values[-1]) + self.I2OSP(i, 1) + DST_prime).digest() b_values.append(b_i) uniform_bytes = b"".join(b_values) diff --git a/dot_ring/curve/field_element.py b/dot_ring/curve/field_element.py index 061ecbe..d4de42d 100644 --- a/dot_ring/curve/field_element.py +++ b/dot_ring/curve/field_element.py @@ -34,9 +34,7 @@ def __add__(self, other: FieldElement | int) -> FieldElement: if isinstance(other, FieldElement): if self.p != other.p: raise ValueError("Cannot add elements from different fields") - return FieldElement( - (self.re + other.re) % self.p, (self.im + other.im) % self.p, self.p - ) + return FieldElement((self.re + other.re) % self.p, (self.im + other.im) % self.p, self.p) return FieldElement((self.re + other) % self.p, self.im, self.p) def __sub__(self, other: FieldElement | int) -> FieldElement: @@ -44,9 +42,7 @@ def __sub__(self, other: FieldElement | int) -> FieldElement: if isinstance(other, FieldElement): if self.p != other.p: raise ValueError("Cannot subtract elements from different fields") - return FieldElement( - (self.re - other.re) % self.p, (self.im - other.im) % self.p, self.p - ) + return FieldElement((self.re - other.re) % self.p, (self.im - other.im) % self.p, self.p) return FieldElement((self.re - other) % self.p, self.im, self.p) def __mul__(self, other: FieldElement | int) -> FieldElement: @@ -58,9 +54,7 @@ def __mul__(self, other: FieldElement | int) -> FieldElement: re = (self.re * other.re - self.im * other.im) % self.p im = (self.re * other.im + self.im * other.re) % self.p return FieldElement(re, im, self.p) - return FieldElement( - (self.re * other) % self.p, (self.im * other) % self.p, self.p - ) + return FieldElement((self.re * other) % self.p, (self.im * other) % self.p, self.p) def __truediv__(self, other: FieldElement | int) -> FieldElement: """Divide two field elements or a field element by an integer.""" @@ -74,9 +68,7 @@ def inv(self) -> FieldElement: # For Fp2, the inverse of (a + bi) is (a - bi)/(a² + b²) denom = (self.re * self.re + self.im * self.im) % self.p inv_denom = pow(denom, -1, self.p) - return FieldElement( - (self.re * inv_denom) % self.p, (-self.im * inv_denom) % self.p, self.p - ) + return FieldElement((self.re * inv_denom) % self.p, (-self.im * inv_denom) % self.p, self.p) def __neg__(self) -> FieldElement: """Negate the field element.""" diff --git a/dot_ring/curve/glv.py b/dot_ring/curve/glv.py index e74ee09..b1bdc48 100644 --- a/dot_ring/curve/glv.py +++ b/dot_ring/curve/glv.py @@ -52,9 +52,7 @@ def _validate_parameters(self) -> bool: """ return self.lambda_param != 0 and self.constant_b != 0 and self.constant_c != 0 - def extended_euclidean_algorithm( - self, n: int, lam: int - ) -> list[tuple[int, int, int]]: + def extended_euclidean_algorithm(self, n: int, lam: int) -> list[tuple[int, int, int]]: """ Compute extended Euclidean algorithm sequence. @@ -84,9 +82,7 @@ def extended_euclidean_algorithm( return sequence[:-1] @lru_cache(maxsize=1024) # noqa: B019 - def find_short_vectors( - self, n: int, lam: int - ) -> tuple[tuple[int, int], tuple[int, int]]: + def find_short_vectors(self, n: int, lam: int) -> tuple[tuple[int, int], tuple[int, int]]: """ Find short vectors for scalar decomposition. @@ -187,9 +183,7 @@ def compute_endomorphism(self, point: AffinePointT) -> AffinePointT: return point.__class__(x_a, y_a) - def windowed_simultaneous_mult( - self, k1: int, k2: int, P1: AffinePointT, P2: AffinePointT, w: int = 2 - ) -> AffinePointT: + def windowed_simultaneous_mult(self, k1: int, k2: int, P1: AffinePointT, P2: AffinePointT, w: int = 2) -> AffinePointT: """ Compute k1 * P1 + k2 * P2 using windowed simultaneous multi-scalar multiplication. @@ -235,9 +229,7 @@ def windowed_simultaneous_mult( assert projective_to_affine is not None # Use compiled MSM - rx, ry, rz, rt = _compiled_msm( - k1, k2, P1.x, P1.y, 1, p1_t, P2.x, P2.y, 1, p2_t, a_coeff, d_coeff, p, w - ) + rx, ry, rz, rt = _compiled_msm(k1, k2, P1.x, P1.y, 1, p1_t, P2.x, P2.y, 1, p2_t, a_coeff, d_coeff, p, w) # Convert back to affine ax, ay = projective_to_affine(rx, ry, rz, p) @@ -288,16 +280,7 @@ def multi_scalar_mult_4( d_coeff = P1.curve.EdwardsD # Convert to projective coordinates - if ( - P1.x is None - or P1.y is None - or P2.x is None - or P2.y is None - or P3.x is None - or P3.y is None - or P4.x is None - or P4.y is None - ): + if P1.x is None or P1.y is None or P2.x is None or P2.y is None or P3.x is None or P3.y is None or P4.x is None or P4.y is None: # Fallback to simple addition for identity points res = P1 * k1 + P2 * k2 # type: ignore[operator] res = res + P3 * k3 # type: ignore[operator] diff --git a/dot_ring/curve/montgomery/mg_affine_point.py b/dot_ring/curve/montgomery/mg_affine_point.py index fc8674d..14b177c 100644 --- a/dot_ring/curve/montgomery/mg_affine_point.py +++ b/dot_ring/curve/montgomery/mg_affine_point.py @@ -75,9 +75,7 @@ def __add__(self, other: MGAffinePoint[C]) -> MGAffinePoint[C]: # if y == 0 then slope denominator = 0 => result is identity if y1 % p == 0: return self.__class__(None, None) - numerator = ( - 3 * cast(int, x1) * cast(int, x1) + 2 * cast(int, A) * cast(int, x1) + 1 - ) % p + numerator = (3 * cast(int, x1) * cast(int, x1) + 2 * cast(int, A) * cast(int, x1) + 1) % p denominator = (2 * cast(int, B) * cast(int, y1)) % p # Check if denominator is zero before computing inverse if denominator == 0: @@ -99,9 +97,7 @@ def __add__(self, other: MGAffinePoint[C]) -> MGAffinePoint[C]: raise ValueError("Unexpected zero denominator in point addition") lam = (numerator * pow(denominator, -1, p)) % p # Corrected formula for x3 in point addition - x3 = ( - cast(int, B) * lam * lam - cast(int, A) - cast(int, x1) - cast(int, x2) - ) % p + x3 = (cast(int, B) * lam * lam - cast(int, A) - cast(int, x1) - cast(int, x2)) % p # Corrected formula for y3 y3 = (lam * (cast(int, x1) - x3) - cast(int, y1)) % p return self.__class__(x3, y3) @@ -266,18 +262,14 @@ def encode_to_curve( # Check if it's an ELL2 variant (ELL2 or ELL2_NU) if cls.curve.E2C in (E2C_Variant.ELL2, E2C_Variant.ELL2_NU): if cls.curve.E2C.value.endswith("_NU_"): - return cls.encode_to_curve_hash2_suite_nu( - alpha_string, salt, General_Check - ) + return cls.encode_to_curve_hash2_suite_nu(alpha_string, salt, General_Check) return cls.encode_to_curve_hash2_suite_ro(alpha_string, salt, General_Check) else: raise ValueError(f"Unexpected E2C Variant: {cls.curve.E2C}") @classmethod - def encode_to_curve_hash2_suite_nu( - cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False - ) -> MGAffinePoint[C] | Any: + def encode_to_curve_hash2_suite_nu(cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False) -> MGAffinePoint[C] | Any: """ Encode a string to a curve point using Elligator 2. @@ -297,9 +289,7 @@ def encode_to_curve_hash2_suite_nu( return R.clear_cofactor() @classmethod - def encode_to_curve_hash2_suite_ro( - cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False - ) -> MGAffinePoint[C] | Any: + def encode_to_curve_hash2_suite_ro(cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False) -> MGAffinePoint[C] | Any: """ Encode a string to a curve point using Elligator 2. @@ -390,12 +380,8 @@ def point_to_string(self) -> bytes: # Encode u and v coordinates as little-endian bytes if self.x is None or self.y is None: raise ValueError("Cannot serialize identity point") - x_bytes = int(cast(int, self.x)).to_bytes( - field_byte_len, cast(Literal["little", "big"], self.curve.ENDIAN) - ) - y_bytes = int(cast(int, self.y)).to_bytes( - field_byte_len, cast(Literal["little", "big"], self.curve.ENDIAN) - ) + x_bytes = int(cast(int, self.x)).to_bytes(field_byte_len, cast(Literal["little", "big"], self.curve.ENDIAN)) + y_bytes = int(cast(int, self.y)).to_bytes(field_byte_len, cast(Literal["little", "big"], self.curve.ENDIAN)) return x_bytes + y_bytes else: raise NotImplementedError("Compressed encoding not implemented") diff --git a/dot_ring/curve/montgomery/mg_curve.py b/dot_ring/curve/montgomery/mg_curve.py index 64b29d3..ef9af39 100644 --- a/dot_ring/curve/montgomery/mg_curve.py +++ b/dot_ring/curve/montgomery/mg_curve.py @@ -126,11 +126,7 @@ def __eq__(self, other: object) -> bool: """Check if two curves are equal.""" if not isinstance(other, MGCurve): return False - return ( - self.PRIME_FIELD == other.PRIME_FIELD - and self.A == other.A - and self.B == other.B - ) + return self.PRIME_FIELD == other.PRIME_FIELD and self.A == other.A and self.B == other.B def __hash__(self) -> int: """Hash for use as dictionary keys.""" @@ -142,7 +138,4 @@ def __str__(self) -> str: def __repr__(self) -> str: """Detailed string representation.""" - return ( - f"MGCurve(PRIME_FIELD={self.PRIME_FIELD}, A={self.A}, B={self.B}, " - f"equation: {self.B}*v² = u³ + {self.A}*u² + u)" - ) + return f"MGCurve(PRIME_FIELD={self.PRIME_FIELD}, A={self.A}, B={self.B}, equation: {self.B}*v² = u³ + {self.A}*u² + u)" diff --git a/dot_ring/curve/point.py b/dot_ring/curve/point.py index d2e5735..4a93f28 100644 --- a/dot_ring/curve/point.py +++ b/dot_ring/curve/point.py @@ -35,20 +35,15 @@ class PointProtocol(Protocol[C]): y: int curve: C - def __add__(self, other: PointProtocol[C]) -> PointProtocol[C]: - ... + def __add__(self, other: PointProtocol[C]) -> PointProtocol[C]: ... - def __mul__(self, scalar: int) -> PointProtocol[C]: - ... + def __mul__(self, scalar: int) -> PointProtocol[C]: ... - def __rmul__(self, scalar: int) -> PointProtocol[C]: - ... + def __rmul__(self, scalar: int) -> PointProtocol[C]: ... - def is_on_curve(self) -> bool: - ... + def is_on_curve(self) -> bool: ... - def is_identity(self) -> bool: - ... + def is_identity(self) -> bool: ... class CurvePoint(Generic[C]): @@ -117,10 +112,7 @@ def _validate_coordinates(self) -> bool: """ if self.is_identity(): return True - return ( - 0 <= cast(int, self.x) < self.curve.PRIME_FIELD - and 0 <= cast(int, self.y) < self.curve.PRIME_FIELD - ) + return 0 <= cast(int, self.x) < self.curve.PRIME_FIELD and 0 <= cast(int, self.y) < self.curve.PRIME_FIELD @classmethod def msm(cls, points: list[Self], scalars: list[int]) -> Self: @@ -203,11 +195,7 @@ def point_to_string(self) -> bytes: p = self.curve.PRIME_FIELD n_bytes = (p.bit_length() + 7) // 8 - y_bytes = bytearray( - cast(int, self.y).to_bytes( - n_bytes, cast(Literal["little", "big"], self.curve.ENDIAN) - ) - ) + y_bytes = bytearray(cast(int, self.y).to_bytes(n_bytes, cast(Literal["little", "big"], self.curve.ENDIAN))) # Compute x sign bit x_sign_bit = 1 if cast(int, self.x) > (-cast(int, self.x) % p) else 0 @@ -262,12 +250,8 @@ def uncompressed_p2s(self) -> bytes: if endian != "little" and endian != "big": raise ValueError("Invalid endianness") # Encode u and v coordinates as little-endian bytes - x_bytes = cast(int, self.x).to_bytes( - byte_length, cast(Literal["little", "big"], endian) - ) - y_bytes = cast(int, self.y).to_bytes( - byte_length, cast(Literal["little", "big"], endian) - ) + x_bytes = cast(int, self.x).to_bytes(byte_length, cast(Literal["little", "big"], endian)) + y_bytes = cast(int, self.y).to_bytes(byte_length, cast(Literal["little", "big"], endian)) return x_bytes + y_bytes @classmethod @@ -339,9 +323,7 @@ def encode_to_curve_tai(cls, alpha_string: bytes | str, salt: bytes = b"") -> Se H: Self | str = "INVALID" front = b"\x01" back = b"\x00" - alpha_string = ( - alpha_string.encode() if isinstance(alpha_string, str) else alpha_string - ) + alpha_string = alpha_string.encode() if isinstance(alpha_string, str) else alpha_string salt = salt.encode() if isinstance(salt, str) else salt suite_string = cls.curve.SUITE_STRING while H == "INVALID" or H == cast(Any, cls).identity_point(): diff --git a/dot_ring/curve/short_weierstrass/sw_affine_point.py b/dot_ring/curve/short_weierstrass/sw_affine_point.py index c87c699..18e47ac 100644 --- a/dot_ring/curve/short_weierstrass/sw_affine_point.py +++ b/dot_ring/curve/short_weierstrass/sw_affine_point.py @@ -205,9 +205,7 @@ def string_to_point(cls, octet_string: str | bytes) -> SWAffinePoint | str: if prefix in (0x02, 0x03): expected_len = 1 + field_byte_len if len(octet_string) != expected_len: - raise ValueError( - f"Invalid compressed point length: expected {expected_len}, got {len(octet_string)}" - ) + raise ValueError(f"Invalid compressed point length: expected {expected_len}, got {len(octet_string)}") # Extract x-coordinate x_bytes = octet_string[1:] @@ -246,9 +244,7 @@ def string_to_point(cls, octet_string: str | bytes) -> SWAffinePoint | str: elif prefix == 0x04: expected_len = 1 + 2 * field_byte_len if len(octet_string) != expected_len: - raise ValueError( - f"Invalid uncompressed point length: expected {expected_len}, got {len(octet_string)}" - ) + raise ValueError(f"Invalid uncompressed point length: expected {expected_len}, got {len(octet_string)}") # Extract x and y coordinates x_bytes = octet_string[1 : 1 + field_byte_len] @@ -276,9 +272,7 @@ def string_to_point(cls, octet_string: str | bytes) -> SWAffinePoint | str: # Hybrid format: prefix + x + y, where prefix encodes y parity redundantly expected_len = 1 + 2 * field_byte_len if len(octet_string) != expected_len: - raise ValueError( - f"Invalid hybrid point length: expected {expected_len}, got {len(octet_string)}" - ) + raise ValueError(f"Invalid hybrid point length: expected {expected_len}, got {len(octet_string)}") x_bytes = octet_string[1 : 1 + field_byte_len] y_bytes = octet_string[1 + field_byte_len :] @@ -319,12 +313,7 @@ def _validate_coordinates(self) -> bool: return True # Handle FieldElement points (for Fp2) - if ( - self.x is not None - and self.y is not None - and hasattr(self.x, "re") - and hasattr(self.y, "re") - ): + if self.x is not None and self.y is not None and hasattr(self.x, "re") and hasattr(self.y, "re"): # For FieldElement, check that the prime field matches x_fe: Any = self.x y_fe: Any = self.y @@ -341,17 +330,11 @@ def _validate_coordinates(self) -> bool: if isinstance(self.x, (tuple, list)) and isinstance(self.y, (tuple, list)): if len(self.x) != 2 or len(self.y) != 2: return False - return all( - isinstance(coord, int) and 0 <= coord < self.curve.PRIME_FIELD - for coord in (*self.x, *self.y) - ) + return all(isinstance(coord, int) and 0 <= coord < self.curve.PRIME_FIELD for coord in (*self.x, *self.y)) # Handle Fp points (integers) if isinstance(self.x, int) and isinstance(self.y, int): - return bool( - 0 <= self.x < self.curve.PRIME_FIELD - and 0 <= self.y < self.curve.PRIME_FIELD - ) + return bool(0 <= self.x < self.curve.PRIME_FIELD and 0 <= self.y < self.curve.PRIME_FIELD) return False @@ -400,11 +383,7 @@ def is_on_curve(self) -> bool: try: x_int, y_int = cast(int, self.x), cast(int, self.y) left = pow(y_int, 2, self.curve.PRIME_FIELD) - right = ( - pow(x_int, 3, self.curve.PRIME_FIELD) - + self.curve.WeierstrassA * x_int - + self.curve.WeierstrassB - ) % self.curve.PRIME_FIELD + right = (pow(x_int, 3, self.curve.PRIME_FIELD) + self.curve.WeierstrassA * x_int + self.curve.WeierstrassB) % self.curve.PRIME_FIELD return bool(left == right) except (TypeError, AttributeError): return False @@ -587,9 +566,7 @@ def encode_to_curve( raise ValueError(f"Unexpected E2C Variant: {cls.curve.E2C}") @classmethod - def sswu_hash2_curve_ro( - cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False - ) -> SWAffinePoint | Any: + def sswu_hash2_curve_ro(cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False) -> SWAffinePoint | Any: """ Encode a string to a curve point using Elligator 2. @@ -613,9 +590,7 @@ def sswu_hash2_curve_ro( return R.clear_cofactor() @classmethod - def sswu_hash2_curve_nu( - cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False - ) -> SWAffinePoint | Any: + def sswu_hash2_curve_nu(cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False) -> SWAffinePoint | Any: """ Encode a string to a curve point using Elligator 2. @@ -643,27 +618,12 @@ def apply_isogeny(cls, x_p: int, y_p: int) -> Self: """ p = cls.curve.PRIME_FIELD coeffs = cls.curve.Isogeny_Coeffs - x_num = ( - coeffs["x_num"][0] * pow(x_p, 3, p) - + coeffs["x_num"][1] * pow(x_p, 2, p) - + coeffs["x_num"][2] * x_p - + coeffs["x_num"][3] - ) % p + x_num = (coeffs["x_num"][0] * pow(x_p, 3, p) + coeffs["x_num"][1] * pow(x_p, 2, p) + coeffs["x_num"][2] * x_p + coeffs["x_num"][3]) % p x_den = (pow(x_p, 2, p) + coeffs["x_den"][1] * x_p + coeffs["x_den"][2]) % p - y_num = ( - coeffs["y_num"][0] * pow(x_p, 3, p) - + coeffs["y_num"][1] * pow(x_p, 2, p) - + coeffs["y_num"][2] * x_p - + coeffs["y_num"][3] - ) % p - - y_den = ( - pow(x_p, 3, p) - + coeffs["y_den"][1] * pow(x_p, 2, p) - + coeffs["y_den"][2] * x_p - + coeffs["y_den"][3] - ) % p + y_num = (coeffs["y_num"][0] * pow(x_p, 3, p) + coeffs["y_num"][1] * pow(x_p, 2, p) + coeffs["y_num"][2] * x_p + coeffs["y_num"][3]) % p + + y_den = (pow(x_p, 3, p) + coeffs["y_den"][1] * pow(x_p, 2, p) + coeffs["y_den"][2] * x_p + coeffs["y_den"][3]) % p x_den_inv = pow(x_den, -1, p) y_den_inv = pow(y_den, -1, p) diff --git a/dot_ring/curve/short_weierstrass/sw_curve.py b/dot_ring/curve/short_weierstrass/sw_curve.py index 225ce11..5c23c5a 100644 --- a/dot_ring/curve/short_weierstrass/sw_curve.py +++ b/dot_ring/curve/short_weierstrass/sw_curve.py @@ -51,12 +51,8 @@ def is_fp2(value: Any) -> bool: if is_fp2(A) or is_fp2(B): # For Fp2, we'll just check that the parameters are not both zero # A more thorough check would involve Fp2 arithmetic - a_is_zero = ( - all(x == 0 for x in A) if isinstance(A, (tuple, list)) else A == 0 - ) - b_is_zero = ( - all(x == 0 for x in B) if isinstance(B, (tuple, list)) else B == 0 - ) + a_is_zero = all(x == 0 for x in A) if isinstance(A, (tuple, list)) else A == 0 + b_is_zero = all(x == 0 for x in B) if isinstance(B, (tuple, list)) else B == 0 return not (a_is_zero and b_is_zero) # Original Fp validation diff --git a/dot_ring/curve/specs/baby_jubjub.py b/dot_ring/curve/specs/baby_jubjub.py index 4a4f0b9..165cbe9 100644 --- a/dot_ring/curve/specs/baby_jubjub.py +++ b/dot_ring/curve/specs/baby_jubjub.py @@ -23,35 +23,23 @@ class BabyJubJubParams: DST = b"" # Curve parameters - PRIME_FIELD: Final[ - int - ] = 21888242871839275222246405745257275088548364400416034343698204186575808495617 - ORDER: Final[ - int - ] = 2736030358979909402780800718157159386076813972158567259200215660948447373041 + PRIME_FIELD: Final[int] = 21888242871839275222246405745257275088548364400416034343698204186575808495617 + ORDER: Final[int] = 2736030358979909402780800718157159386076813972158567259200215660948447373041 COFACTOR: Final[int] = 8 # Generator point - GENERATOR_X: Final[ - int - ] = 19698561148652590122159747500897617769866003486955115824547446575314762165298 - GENERATOR_Y: Final[ - int - ] = 19298250018296453272277890825869354524455968081175474282777126169995084727839 + GENERATOR_X: Final[int] = 19698561148652590122159747500897617769866003486955115824547446575314762165298 + GENERATOR_Y: Final[int] = 19298250018296453272277890825869354524455968081175474282777126169995084727839 # Edwards curve parameters EDWARDS_A: Final[int] = 1 - EDWARDS_D: Final[ - int - ] = 9706598848417545097372247223557719406784115219466060233080913168975159366771 + EDWARDS_D: Final[int] = 9706598848417545097372247223557719406784115219466060233080913168975159366771 # Z Z: Final[int] = 5 M: Final[int] = 1 K: Final[int] = 128 L: Final[int] = 32 # can define func as well - S_in_bytes: Final[ - int - ] = 128 # can be taken as hsh_fn.block_size #not sure as its supposed to be 128 for sha512 + S_in_bytes: Final[int] = 128 # can be taken as hsh_fn.block_size #not sure as its supposed to be 128 for sha512 H_A = hashlib.sha512 ENDIAN = "little" Requires_Isogeny: Final[bool] = False @@ -60,12 +48,8 @@ class BabyJubJubParams: CHALLENGE_LENGTH: Final[int] = 32 # 128 bits # Blinding Base For Pedersen - BBx: Final[ - int - ] = 8170247200255741810297410022472365370979789984587637609570347196251706043122 - BBy: Final[ - int - ] = 16313972569917201570489077828713531620741538540099917729994937953803219324220 + BBx: Final[int] = 8170247200255741810297410022472365370979789984587637609570347196251706043122 + BBy: Final[int] = 16313972569917201570489077828713531620741538540099917729994937953803219324220 UNCOMPRESSED = False POINT_LEN: Final[int] = 32 @@ -109,6 +93,4 @@ class BabyJubJubPoint(TEAffinePoint): curve: TECurve = BabyJubJub_TE_Curve -BabyJubJub = CurveVariant( - name="BabyJubJub", curve=BabyJubJub_TE_Curve, point=BabyJubJubPoint -) +BabyJubJub = CurveVariant(name="BabyJubJub", curve=BabyJubJub_TE_Curve, point=BabyJubJubPoint) diff --git a/dot_ring/curve/specs/bandersnatch.py b/dot_ring/curve/specs/bandersnatch.py index c47e237..f6a5b2e 100644 --- a/dot_ring/curve/specs/bandersnatch.py +++ b/dot_ring/curve/specs/bandersnatch.py @@ -26,38 +26,22 @@ class BandersnatchParams: DST = b"ECVRF_Bandersnatch_XMD:SHA-512_ELL2_RO_Bandersnatch_SHA-512_ELL2" # Curve parameters - PRIME_FIELD: Final[ - int - ] = 0x73EDA753299D7D483339D80809A1D80553BDA402FFFE5BFEFFFFFFFF00000001 - ORDER: Final[ - int - ] = 0x1CFB69D4CA675F520CCE760202687600FF8F87007419047174FD06B52876E7E1 + PRIME_FIELD: Final[int] = 0x73EDA753299D7D483339D80809A1D80553BDA402FFFE5BFEFFFFFFFF00000001 + ORDER: Final[int] = 0x1CFB69D4CA675F520CCE760202687600FF8F87007419047174FD06B52876E7E1 COFACTOR: Final[int] = 4 # Generator point - GENERATOR_X: ClassVar[ - Scalar - ] = Scalar(18886178867200960497001835917649091219057080094937609519140440539760939937304) - GENERATOR_Y: ClassVar[ - Scalar - ] = Scalar(19188667384257783945677642223292697773471335439753913231509108946878080696678) + GENERATOR_X: ClassVar[Scalar] = Scalar(18886178867200960497001835917649091219057080094937609519140440539760939937304) + GENERATOR_Y: ClassVar[Scalar] = Scalar(19188667384257783945677642223292697773471335439753913231509108946878080696678) # Edwards curve parameters EDWARDS_A: ClassVar[Scalar] = Scalar(-5) - EDWARDS_D: ClassVar[ - Scalar - ] = Scalar(0x6389C12633C267CBC66E3BF86BE3B6D8CB66677177E54F92B369F2F5188D58E7) + EDWARDS_D: ClassVar[Scalar] = Scalar(0x6389C12633C267CBC66E3BF86BE3B6D8CB66677177E54F92B369F2F5188D58E7) # GLV parameters - GLV_LAMBDA: Final[ - int - ] = 0x13B4F3DC4A39A493EDF849562B38C72BCFC49DB970A5056ED13D21408783DF05 - GLV_B: Final[ - int - ] = 0x52C9F28B828426A561F00D3A63511A882EA712770D9AF4D6EE0F014D172510B4 - GLV_C: Final[ - int - ] = 0x6CC624CF865457C3A97C6EFD6C17D1078456ABCFFF36F4E9515C806CDF650B3D + GLV_LAMBDA: Final[int] = 0x13B4F3DC4A39A493EDF849562B38C72BCFC49DB970A5056ED13D21408783DF05 + GLV_B: Final[int] = 0x52C9F28B828426A561F00D3A63511A882EA712770D9AF4D6EE0F014D172510B4 + GLV_C: Final[int] = 0x6CC624CF865457C3A97C6EFD6C17D1078456ABCFFF36F4E9515C806CDF650B3D # Challenge length in bytes for VRF (aligned with 256-bit security level) CHALLENGE_LENGTH: Final[int] = 32 # 256 bits @@ -67,20 +51,14 @@ class BandersnatchParams: M: Final[int] = 1 K: Final[int] = 128 L: Final[int] = 48 # can define func as well - S_in_bytes: Final[ - int - ] = 48 # can be taken as hsh_fn.block_size #not sure as its supposed to be 128 for sha512 + S_in_bytes: Final[int] = 48 # can be taken as hsh_fn.block_size #not sure as its supposed to be 128 for sha512 H_A = hashlib.sha512 ENDIAN = "little" Requires_Isogeny: Final[bool] = False Isogeny_Coeffs = None - BBx: Final[ - int - ] = 6150229251051246713677296363717454238956877613358614224171740096471278798312 - BBy: Final[ - int - ] = 28442734166467795856797249030329035618871580593056783094884474814923353898473 + BBx: Final[int] = 6150229251051246713677296363717454238956877613358614224171740096471278798312 + BBy: Final[int] = 28442734166467795856797249030329035618871580593056783094884474814923353898473 UNCOMPRESSED = False POINT_LEN: Final[int] = 32 @@ -144,9 +122,7 @@ def __mul__(self, scalar: int) -> Self: k1, k2 = BandersnatchGLV.decompose_scalar(scalar % n, n) phi = BandersnatchGLV.compute_endomorphism(self) - return cast( - Self, BandersnatchGLV.windowed_simultaneous_mult(k1, k2, self, phi, w=2) - ) + return cast(Self, BandersnatchGLV.windowed_simultaneous_mult(k1, k2, self, phi, w=2)) def __add__(self, other: Any) -> Self: return cast(Self, super().__add__(other)) @@ -176,9 +152,7 @@ def msm(cls, points: list[Self], scalars: list[int]) -> Self: return cast( Self, - BandersnatchGLV.multi_scalar_mult_4( - k1_1, k1_2, k2_1, k2_2, points[0], phi_P1, points[1], phi_P2 - ), + BandersnatchGLV.multi_scalar_mult_4(k1_1, k1_2, k2_1, k2_2, points[0], phi_P1, points[1], phi_P2), ) if len(points) == 4: diff --git a/dot_ring/curve/specs/bandersnatch_sw.py b/dot_ring/curve/specs/bandersnatch_sw.py index 31b3b30..c2d8744 100644 --- a/dot_ring/curve/specs/bandersnatch_sw.py +++ b/dot_ring/curve/specs/bandersnatch_sw.py @@ -16,27 +16,15 @@ class BandersnatchSWParams: SUITE_STRING = b"Bandersnatch_SW_SHA-512_TAI" DST = b"ECVRF_Bandersnatch_XMD:SHA-512_TAI_RO_Bandersnatch_SW_SHA-512_TAI" - PRIME_FIELD: Final[ - int - ] = 52435875175126190479447740508185965837690552500527637822603658699938581184513 - ORDER: Final[ - int - ] = 0x1CFB69D4CA675F520CCE760202687600FF8F87007419047174FD06B52876E7E1 + PRIME_FIELD: Final[int] = 52435875175126190479447740508185965837690552500527637822603658699938581184513 + ORDER: Final[int] = 0x1CFB69D4CA675F520CCE760202687600FF8F87007419047174FD06B52876E7E1 COFACTOR: Final[int] = 4 - WEIERSTRASS_A: Final[ - int - ] = 10773120815616481058602537765553212789256758185246796157495669123169359657269 - WEIERSTRASS_B: Final[ - int - ] = 29569587568322301171008055308580903175558631321415017492731745847794083609535 + WEIERSTRASS_A: Final[int] = 10773120815616481058602537765553212789256758185246796157495669123169359657269 + WEIERSTRASS_B: Final[int] = 29569587568322301171008055308580903175558631321415017492731745847794083609535 - GENERATOR_X: Final[ - int - ] = 30900340493481298850216505686589334086208278925799850409469406976849338430199 - GENERATOR_Y: Final[ - int - ] = 12663882780877899054958035777720958383845500985908634476792678820121468453298 + GENERATOR_X: Final[int] = 30900340493481298850216505686589334086208278925799850409469406976849338430199 + GENERATOR_Y: Final[int] = 12663882780877899054958035777720958383845500985908634476792678820121468453298 Z: Final[int] = -11 M: Final[int] = 1 @@ -45,12 +33,8 @@ class BandersnatchSWParams: S_in_bytes: Final[int] = 64 H_A = hashlib.sha512 ENDIAN = "little" - BBx: Final[ - int - ] = 43295201540795761503961631609120105078472641399392666499799525033203881929458 - BBy: Final[ - int - ] = 47295792057744344182638225978402781315571475472700428341116949953237551542374 + BBx: Final[int] = 43295201540795761503961631609120105078472641399392666499799525033203881929458 + BBy: Final[int] = 47295792057744344182638225978402781315571475472700428341116949953237551542374 CHALLENGE_LENGTH: Final[int] = 32 Requires_Isogeny: Final[bool] = False @@ -128,9 +112,7 @@ def point_to_string(self, compressed: bool = False) -> bytes: # Serialize x-coordinate if self.x is None: raise ValueError("Cannot serialize identity point") - x_bytes = int(cast(int, self.x)).to_bytes( - (field_bit_len + 7) // 8, cast(Literal["little", "big"], self.curve.ENDIAN) - ) + x_bytes = int(cast(int, self.x)).to_bytes((field_bit_len + 7) // 8, cast(Literal["little", "big"], self.curve.ENDIAN)) # Copy x_bytes into buffer of total length result = bytearray(output_byte_len) @@ -214,9 +196,7 @@ def encode_to_curve_tai(cls, alpha_string: bytes | str, salt: bytes = b"") -> Se H: Self | None = None front = b"\x01" back = b"\x00" - alpha_string = ( - alpha_string.encode() if isinstance(alpha_string, str) else alpha_string - ) + alpha_string = alpha_string.encode() if isinstance(alpha_string, str) else alpha_string salt = salt.encode() if isinstance(salt, str) else salt suite_string = cls.curve.SUITE_STRING diff --git a/dot_ring/curve/specs/bls12_381_G1.py b/dot_ring/curve/specs/bls12_381_G1.py index 54cf28a..c28221d 100644 --- a/dot_ring/curve/specs/bls12_381_G1.py +++ b/dot_ring/curve/specs/bls12_381_G1.py @@ -19,30 +19,20 @@ class BLS12_381_G1Params: # Domain separation / hash-to-curve strings (RFC drafts / implementations) SUITE_STRING: Final[bytes] = b"BLS12381G1_XMD:SHA-256_SSWU_RO_" - DST: Final[ - bytes - ] = b"QUUX-V01-CS02-with-BLS12381G1_XMD:SHA-256_SSWU_RO_" # common default DST + DST: Final[bytes] = b"QUUX-V01-CS02-with-BLS12381G1_XMD:SHA-256_SSWU_RO_" # common default DST # Prime field p (F_q) for BLS12-381 - PRIME_FIELD: Final[ - int - ] = 0x1A0111EA397FE69A4B1BA7B6434BACD7_64774B84F38512BF6730D2A0F6B0F624_1EABFFFEB153FFFFB9FEFFFFFFFFAAAB + PRIME_FIELD: Final[int] = 0x1A0111EA397FE69A4B1BA7B6434BACD7_64774B84F38512BF6730D2A0F6B0F624_1EABFFFEB153FFFFB9FEFFFFFFFFAAAB # Order r of the prime-order subgroup (G1 and G2 share the same r) - ORDER: Final[ - int - ] = 0x73EDA753299D7D483339D80809A1D805_53BDA402FFFE5BFEFFFFFFFF00000001 + ORDER: Final[int] = 0x73EDA753299D7D483339D80809A1D805_53BDA402FFFE5BFEFFFFFFFF00000001 # Cofactor for G1 (h1) COFACTOR: Final[int] = 0xD201000000010001 # Generator point (affine coordinates) for G1 (from standard definitions) - GENERATOR_X: Final[ - int - ] = 0x17F1D3A73197D7942695638C4FA9AC0F_C3688C4F9774B905A14E3A3F171BAC58_6C55E83FF97A1AEFFB3AF00ADB22C6BB - GENERATOR_Y: Final[ - int - ] = 0x08B3F481E3AAA0F1A09E30ED741D8AE4_FCF5E095D5D00AF600DB18CB2C04B3ED_D03CC744A2888AE40CAA232946C5E7E1 + GENERATOR_X: Final[int] = 0x17F1D3A73197D7942695638C4FA9AC0F_C3688C4F9774B905A14E3A3F171BAC58_6C55E83FF97A1AEFFB3AF00ADB22C6BB + GENERATOR_Y: Final[int] = 0x08B3F481E3AAA0F1A09E30ED741D8AE4_FCF5E095D5D00AF600DB18CB2C04B3ED_D03CC744A2888AE40CAA232946C5E7E1 # Short Weierstrass parameters for y^2 = x^3 + a*x + b WEIERSTRASS_A: Final[int] = 0x00 @@ -108,9 +98,7 @@ def __init__(self, e2c_variant: E2C_Variant = E2C_Variant.SSWU) -> None: S_in_bytes=BLS12_381_G1Params.S_IN_BYTES, H_A=BLS12_381_G1Params.H_A, Requires_Isogeny=BLS12_381_G1Params.Requires_Isogeny, - Isogeny_Coeffs=cast( - dict[str, list[int]] | None, BLS12_381_G1Params.Isogeny_Coeffs - ), + Isogeny_Coeffs=cast(dict[str, list[int]] | None, BLS12_381_G1Params.Isogeny_Coeffs), UNCOMPRESSED=BLS12_381_G1Params.UNCOMPRESSED, ENDIAN=BLS12_381_G1Params.ENDIAN, POINT_LEN=BLS12_381_G1Params.POINT_LEN, diff --git a/dot_ring/curve/specs/bls12_381_G2.py b/dot_ring/curve/specs/bls12_381_G2.py index 5b3bc18..b7973d1 100644 --- a/dot_ring/curve/specs/bls12_381_G2.py +++ b/dot_ring/curve/specs/bls12_381_G2.py @@ -29,19 +29,15 @@ class BLS12_381_G2Params: SUITE_STRING = b"BLS12381G2_XMD:SHA-256_SSWU_RO_" DST = b"QUUX-V01-CS02-with-BLS12381G2_XMD:SHA-256_SSWU_RO_" # Use the suite string as DST by default per RFC 9380 # Base field characteristic (modulus) - PRIME_FIELD: Final[ - int - ] = 0x1A0111EA397FE69A4B1BA7B6434BACD764774B84F38512BF6730D2A0F6B0F6241EABFFFEB153FFFFB9FEFFFFFFFFAAAB + PRIME_FIELD: Final[int] = 0x1A0111EA397FE69A4B1BA7B6434BACD764774B84F38512BF6730D2A0F6B0F6241EABFFFEB153FFFFB9FEFFFFFFFFAAAB # Subgroup order (r) - ORDER: Final[ - int - ] = 0x73EDA753299D7D483339D80809A1D80553BDA402FFFE5BFEFFFFFFFF00000001 + ORDER: Final[int] = 0x73EDA753299D7D483339D80809A1D80553BDA402FFFE5BFEFFFFFFFF00000001 # Cofactor (h) - COFACTOR: Final[ - int - ] = 0xBC69F08F2EE75B3584C6A0EA91B352888E2A8E9145AD7689986FF031508FFE1329C2F178731DB956D82BF015D1212B02EC0EC69D7477C1AE954CBC06689F6A359894C0ADEBBF6B4E8020005AAA95551 # noqa: E501 + COFACTOR: Final[int] = ( + 0xBC69F08F2EE75B3584C6A0EA91B352888E2A8E9145AD7689986FF031508FFE1329C2F178731DB956D82BF015D1212B02EC0EC69D7477C1AE954CBC06689F6A359894C0ADEBBF6B4E8020005AAA95551 # noqa: E501 + ) # Generator point (G2) GENERATOR_X: Final[Fp2] = ( @@ -301,9 +297,7 @@ def __mul__(self, scalar: int) -> BLS12_381_G2Point: return self.__class__(x, y) @classmethod - def sswu_hash2_curve_ro( - cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False - ) -> dict | Self: + def sswu_hash2_curve_ro(cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False) -> dict | Self: """ Encode a string to a curve point using SSWU map with 3-isogeny (Random Oracle variant). @@ -333,9 +327,7 @@ def sswu_hash2_curve_ro( return cast(Self, R) @classmethod - def sswu_hash2_curve_nu( - cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False - ) -> Self | Any: + def sswu_hash2_curve_nu(cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False) -> Self | Any: """ Encode a string to a curve point using Elligator 2. @@ -448,9 +440,7 @@ def _sswu_map_to_e_prime(cls, u: FieldElement) -> tuple[FieldElement, FieldEleme return x, cast(FieldElement, y) @classmethod - def _apply_3_isogeny( - cls, point: tuple[FieldElement, FieldElement] - ) -> tuple[FieldElement, FieldElement]: + def _apply_3_isogeny(cls, point: tuple[FieldElement, FieldElement]) -> tuple[FieldElement, FieldElement]: x_prime, y_prime = point p = cls.curve.PRIME_FIELD @@ -524,16 +514,12 @@ def _apply_3_isogeny( p, ) - x_num = ( - k_1_3 * (x_prime**3) + k_1_2 * (x_prime**2) + k_1_1 * x_prime + k_1_0 - ) + x_num = k_1_3 * (x_prime**3) + k_1_2 * (x_prime**2) + k_1_1 * x_prime + k_1_0 x_den = x_prime**2 + k_2_1 * x_prime + k_2_0 x = x_num / x_den # can use inv as well # Calculate y numerator and denominator - y_num = ( - k_3_3 * (x_prime**3) + k_3_2 * (x_prime**2) + k_3_1 * x_prime + k_3_0 - ) + y_num = k_3_3 * (x_prime**3) + k_3_2 * (x_prime**2) + k_3_1 * x_prime + k_3_0 y_den = x_prime**3 + k_4_2 * (x_prime**2) + k_4_1 * x_prime + k_4_0 y = y_prime * (y_num / y_den) # can u inv() as well diff --git a/dot_ring/curve/specs/curve25519.py b/dot_ring/curve/specs/curve25519.py index 6484ee2..b076bba 100644 --- a/dot_ring/curve/specs/curve25519.py +++ b/dot_ring/curve/specs/curve25519.py @@ -30,9 +30,7 @@ class Curve25519Params: COFACTOR: Final[int] = 8 # Generator point (u, v) - corresponds to the base point of edwards25519 GENERATOR_U: Final[int] = 9 - GENERATOR_V: Final[ - int - ] = 14781619447589544791020593568409986887264606134616475288964881837755586237401 + GENERATOR_V: Final[int] = 14781619447589544791020593568409986887264606134616475288964881837755586237401 # Montgomery curve parameters: v² = u³ + Au² + u A: Final[int] = 486662 @@ -53,12 +51,8 @@ class Curve25519Params: CHALLENGE_LENGTH: Final[int] = 16 # Blinding base for Pedersen VRF (project-specific: keep if you need them) - BBu: Final[ - int - ] = GENERATOR_U # 0x2a4f9ef57d59ee131c7c4e1d9b4e3a1e1e1e1e1e1e1e1e1e1e1e1e1e1e1e1e1e1e1e1 - BBv: Final[ - int - ] = GENERATOR_V # 0x1a8d1d5a5f9e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8 + BBu: Final[int] = GENERATOR_U # 0x2a4f9ef57d59ee131c7c4e1d9b4e3a1e1e1e1e1e1e1e1e1e1e1e1e1e1e1e1e1e1e1e1 + BBv: Final[int] = GENERATOR_V # 0x1a8d1d5a5f9e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8e8 UNCOMPRESSED = True POINT_LEN: Final[int] = 32 diff --git a/dot_ring/curve/specs/curve448.py b/dot_ring/curve/specs/curve448.py index a523706..4e8d34b 100644 --- a/dot_ring/curve/specs/curve448.py +++ b/dot_ring/curve/specs/curve448.py @@ -25,9 +25,7 @@ class Curve448Params: # Curve parameters PRIME_FIELD: Final[int] = 2**448 - 2**224 - 1 - ORDER: Final[int] = ( - 2**446 - 0x8335DC163BB124B65129C96FDE933D8D723A70AADC873D6D54A7BB0D - ) + ORDER: Final[int] = 2**446 - 0x8335DC163BB124B65129C96FDE933D8D723A70AADC873D6D54A7BB0D COFACTOR: Final[int] = 4 # Generator point (u, v) - corresponds to the base point of edwards448 @@ -36,9 +34,9 @@ class Curve448Params: # v-coordinate is derived from the curve equation v^2 = u^3 + A*u^2 + u mod p # Using the positive square root that has even least significant bit (LSB) - GENERATOR_V: Final[ - int - ] = 355293926785568175264127502063783334808976399387714271831880898435169088786967410002932673765864550910142774147268105838985595290606362 + GENERATOR_V: Final[int] = ( + 355293926785568175264127502063783334808976399387714271831880898435169088786967410002932673765864550910142774147268105838985595290606362 + ) # Montgomery curve parameters: v² = u³ + Au² + u A: Final[int] = 156326 diff --git a/dot_ring/curve/specs/ed25519.py b/dot_ring/curve/specs/ed25519.py index 97ad4a6..3547421 100644 --- a/dot_ring/curve/specs/ed25519.py +++ b/dot_ring/curve/specs/ed25519.py @@ -23,25 +23,15 @@ class Ed25519Params: DST = b"QUUX-V01-CS02-with-edwards25519_XMD:SHA-512_ELL2_RO_" # Curve parameters - PRIME_FIELD: Final[ - int - ] = 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFED + PRIME_FIELD: Final[int] = 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFED ORDER: Final[int] = 2**252 + 0x14DEF9DEA2F79CD65812631A5CF5D3ED COFACTOR: Final[int] = 8 # Generator point - GENERATOR_X: Final[ - int - ] = 0x216936D3CD6E53FEC0A4E231FDD6DC5C692CC7609525A7B2C9562D608F25D51A - GENERATOR_Y: Final[ - int - ] = 0x6666666666666666666666666666666666666666666666666666666666666658 + GENERATOR_X: Final[int] = 0x216936D3CD6E53FEC0A4E231FDD6DC5C692CC7609525A7B2C9562D608F25D51A + GENERATOR_Y: Final[int] = 0x6666666666666666666666666666666666666666666666666666666666666658 # Edwards curve parameters - EDWARDS_A: Final[ - int - ] = 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEC - EDWARDS_D: Final[ - int - ] = 0x52036CEE2B6FFE738CC740797779E89800700A4D4141D8AB75EB4DCA135978A3 + EDWARDS_A: Final[int] = 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEC + EDWARDS_D: Final[int] = 0x52036CEE2B6FFE738CC740797779E89800700A4D4141D8AB75EB4DCA135978A3 # Z parameter for Elligator 2 mapping (from RFC 9380 Section 4.1) Z: Final[int] = 2 # Curve25519 uses Z = 2 for Elligator 2 mapping @@ -56,12 +46,8 @@ class Ed25519Params: # Challenge length in bytes for VRF (from RFC 9381) CHALLENGE_LENGTH: Final[int] = 16 # 128 bits - BBx: Final[ - int - ] = 52417091031015867055192825304177001039906336859819158874861527659737645967040 - BBy: Final[ - int - ] = 24364467899048426341436922427697710961180476432856951893648702734568269272170 + BBx: Final[int] = 52417091031015867055192825304177001039906336859819158874861527659737645967040 + BBy: Final[int] = 24364467899048426341436922427697710961180476432856951893648702734568269272170 UNCOMPRESSED = False POINT_LEN: Final[int] = 32 diff --git a/dot_ring/curve/specs/ed448.py b/dot_ring/curve/specs/ed448.py index 3baaa43..e33acea 100644 --- a/dot_ring/curve/specs/ed448.py +++ b/dot_ring/curve/specs/ed448.py @@ -26,9 +26,7 @@ class Ed448Params: # Curve parameters from RFC 8032 PRIME_FIELD: Final[int] = 2**448 - 2**224 - 1 - ORDER: Final[int] = ( - 2**446 - 0x8335DC163BB124B65129C96FDE933D8D723A70AADC873D6D54A7BB0D - ) + ORDER: Final[int] = 2**446 - 0x8335DC163BB124B65129C96FDE933D8D723A70AADC873D6D54A7BB0D COFACTOR: Final[int] = 4 # Generator point (x, y) - Valid Ed448 base point that satisfies the curve equation @@ -38,9 +36,9 @@ class Ed448Params: # ) # GENERATOR_Y: Final[int] = ( # 60824814231572554857908961302747 - GENERATOR_X: Final[ - int - ] = 117812161263436946737282484343310064665180535357016373416879082147939404277809514858788439644911793978499419995990477371552926308078495 + GENERATOR_X: Final[int] = ( + 117812161263436946737282484343310064665180535357016373416879082147939404277809514858788439644911793978499419995990477371552926308078495 + ) GENERATOR_Y: Final[int] = 19 # Twisted Edwards parameters: ax² + y² = 1 + dx²y² (mod p) @@ -211,13 +209,7 @@ def mont_to_ed448(cls, u: int, v: int) -> Self: y_num = -(pow(u, 5, p) - 2 * pow(u, 3, p) - 4 * u * pow(v, 2, p) + u) % p # y denominator: u^5 - 2u^2v^2 - 2u^3 - 2v^2 + u - y_den = ( - pow(u, 5, p) - - 2 * pow(u, 2, p) * pow(v, 2, p) - - 2 * pow(u, 3, p) - - 2 * pow(v, 2, p) - + u - ) % p + y_den = (pow(u, 5, p) - 2 * pow(u, 2, p) * pow(v, 2, p) - 2 * pow(u, 3, p) - 2 * pow(v, 2, p) + u) % p y = (y_num * cls.curve.inv(y_den)) % p return cls(x, y) diff --git a/dot_ring/curve/specs/jubjub.py b/dot_ring/curve/specs/jubjub.py index 6be9387..6d5a6da 100644 --- a/dot_ring/curve/specs/jubjub.py +++ b/dot_ring/curve/specs/jubjub.py @@ -23,36 +23,24 @@ class JubJubParams: DST = b"" # f_len=q_len=32 # Curve parameters - PRIME_FIELD: Final[ - int - ] = 0x73EDA753299D7D483339D80809A1D80553BDA402FFFE5BFEFFFFFFFF00000001 - ORDER: Final[ - int - ] = 0x0E7DB4EA6533AFA906673B0101343B00A6682093CCC81082D0970E5ED6F72CB7 + PRIME_FIELD: Final[int] = 0x73EDA753299D7D483339D80809A1D80553BDA402FFFE5BFEFFFFFFFF00000001 + ORDER: Final[int] = 0x0E7DB4EA6533AFA906673B0101343B00A6682093CCC81082D0970E5ED6F72CB7 COFACTOR: Final[int] = 8 # Generator point - GENERATOR_X: Final[ - int - ] = 8076246640662884909881801758704306714034609987455869804520522091855516602923 - GENERATOR_Y: Final[ - int - ] = 13262374693698910701929044844600465831413122818447359594527400194675274060458 + GENERATOR_X: Final[int] = 8076246640662884909881801758704306714034609987455869804520522091855516602923 + GENERATOR_Y: Final[int] = 13262374693698910701929044844600465831413122818447359594527400194675274060458 # Edwards curve parameters EDWARDS_A: Final[int] = -1 - EDWARDS_D: Final[ - int - ] = 19257038036680949359750312669786877991949435402254120286184196891950884077233 + EDWARDS_D: Final[int] = 19257038036680949359750312669786877991949435402254120286184196891950884077233 # Z Z: Final[int] = 5 M: Final[int] = 1 K: Final[int] = 128 L: Final[int] = 48 # can define func as well - S_in_bytes: Final[ - int - ] = 48 # can be taken as hsh_fn.block_size #not sure as its supposed to be 128 for sha512 + S_in_bytes: Final[int] = 48 # can be taken as hsh_fn.block_size #not sure as its supposed to be 128 for sha512 H_A = hashlib.sha512 ENDIAN = "little" Requires_Isogeny: Final[bool] = False @@ -60,12 +48,8 @@ class JubJubParams: CHALLENGE_LENGTH: Final[int] = 32 # Blinding Base For Pedersen - BBx: Final[ - int - ] = 42257337814662035284373945156525735092765968053982822992704750832078779438788 - BBy: Final[ - int - ] = 47476395315228831116309413527962830333178159651930104661512857647213254194102 + BBx: Final[int] = 42257337814662035284373945156525735092765968053982822992704750832078779438788 + BBy: Final[int] = 47476395315228831116309413527962830333178159651930104661512857647213254194102 UNCOMPRESSED = False POINT_LEN: Final[int] = 32 diff --git a/dot_ring/curve/specs/p256.py b/dot_ring/curve/specs/p256.py index 5a04612..c3f748d 100644 --- a/dot_ring/curve/specs/p256.py +++ b/dot_ring/curve/specs/p256.py @@ -25,27 +25,17 @@ class P256Params: DST = b"QUUX-V01-CS02-with-P256_XMD:SHA-256_SSWU_RO_" # Default DST is the same as SUITE_STRING # Curve parameters for y² = x³ - 3x + b - PRIME_FIELD: Final[ - int - ] = 0xFFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFF - ORDER: Final[ - int - ] = 0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551 + PRIME_FIELD: Final[int] = 0xFFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFF + ORDER: Final[int] = 0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551 COFACTOR: Final[int] = 1 # Generator point - GENERATOR_X: Final[ - int - ] = 0x6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296 - GENERATOR_Y: Final[ - int - ] = 0x4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5 + GENERATOR_X: Final[int] = 0x6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296 + GENERATOR_Y: Final[int] = 0x4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5 # Short Weierstrass parameters: y² = x³ + ax + b WEIERSTRASS_A: Final[int] = -3 # a = -3 - WEIERSTRASS_B: Final[ - int - ] = 0x5AC635D8AA3A93E7B3EBBD55769886BC651D06B0CC53B0F63BCE3C3E27D2604B + WEIERSTRASS_B: Final[int] = 0x5AC635D8AA3A93E7B3EBBD55769886BC651D06B0CC53B0F63BCE3C3E27D2604B # Z parameter for SSWU mapping (from RFC 9380 Section 8.1) Z: Final[int] = -10 # P-256 uses Z = -10 for SSWU mapping @@ -58,12 +48,8 @@ class P256Params: ENDIAN = "big" # Blinding Base For Pedersen VRF # These are arbitrary points on the curve for blinding - BBx: Final[ - int - ] = 55516455597544811540149985232155473070193196202193483189274003004283034832642 - BBy: Final[ - int - ] = 48580550536742846740990228707183741745344724157532839324866819111997786854582 + BBx: Final[int] = 55516455597544811540149985232155473070193196202193483189274003004283034832642 + BBy: Final[int] = 48580550536742846740990228707183741745344724157532839324866819111997786854582 # Challenge length in bytes for VRF (from RFC 9381) CHALLENGE_LENGTH: Final[int] = 16 # 128 bits Requires_Isogeny: Final[bool] = False diff --git a/dot_ring/curve/specs/p384.py b/dot_ring/curve/specs/p384.py index 0490292..5ec4415 100644 --- a/dot_ring/curve/specs/p384.py +++ b/dot_ring/curve/specs/p384.py @@ -25,27 +25,17 @@ class P384Params: DST = b"QUUX-V01-CS02-with-P384_XMD:SHA-384_SSWU_RO_" # Default DST is the same as SUITE_STRING # Curve parameters for y² = x³ - 3x + b - PRIME_FIELD: Final[ - int - ] = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFF0000000000000000FFFFFFFF - ORDER: Final[ - int - ] = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC7634D81F4372DDF581A0DB248B0A77AECEC196ACCC52973 + PRIME_FIELD: Final[int] = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFF0000000000000000FFFFFFFF + ORDER: Final[int] = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC7634D81F4372DDF581A0DB248B0A77AECEC196ACCC52973 COFACTOR: Final[int] = 1 # Generator point - GENERATOR_X: Final[ - int - ] = 0xAA87CA22BE8B05378EB1C71EF320AD746E1D3B628BA79B9859F741E082542A385502F25DBF55296C3A545E3872760AB7 - GENERATOR_Y: Final[ - int - ] = 0x3617DE4A96262C6F5D9E98BF9292DC29F8F41DBD289A147CE9DA3113B5F0B8C00A60B1CE1D7E819D7A431D7C90EA0E5F + GENERATOR_X: Final[int] = 0xAA87CA22BE8B05378EB1C71EF320AD746E1D3B628BA79B9859F741E082542A385502F25DBF55296C3A545E3872760AB7 + GENERATOR_Y: Final[int] = 0x3617DE4A96262C6F5D9E98BF9292DC29F8F41DBD289A147CE9DA3113B5F0B8C00A60B1CE1D7E819D7A431D7C90EA0E5F # Short Weierstrass parameters: y² = x³ + ax + b WEIERSTRASS_A: Final[int] = -3 # a = -3 - WEIERSTRASS_B: Final[ - int - ] = 0xB3312FA7E23EE7E4988E056BE3F82D19181D9C6EFE8141120314088F5013875AC656398D8A2ED19D2A85C8EDD3EC2AEF + WEIERSTRASS_B: Final[int] = 0xB3312FA7E23EE7E4988E056BE3F82D19181D9C6EFE8141120314088F5013875AC656398D8A2ED19D2A85C8EDD3EC2AEF # Z parameter for SSWU mapping (from RFC 9380 Section 8.3) Z: Final[int] = -12 # P-384 uses Z = -12 for SSWU mapping @@ -62,13 +52,9 @@ class P384Params: # Blinding Base For Pedersen VRF # These are arbitrary points on the curve for blinding # sample blinding base - BBx: Final[ - int - ] = 0xAA87CA22BE8B05378EB1C71EF320AD746E1D3B628BA79B9859F741E082542A385502F25DBF55296C3A545E3872760AB7 # noqa: E501 + BBx: Final[int] = 0xAA87CA22BE8B05378EB1C71EF320AD746E1D3B628BA79B9859F741E082542A385502F25DBF55296C3A545E3872760AB7 # noqa: E501 ELL2_C1 = 0x1C9B74C1A04954B78B4B6035E97A5E078A5A0F28EC96D547BFEE9ACE803AC012345678901234567890123456789012 # noqa: E501 - BBy: Final[ - int - ] = 0x3617DE4A96262C6F5D9E98BF9292DC29F8F41DBD289A147CE9DA3113B5F0B8C00A60B1CE1D7E819D7A431D7C90EA0E5F # noqa: E501 + BBy: Final[int] = 0x3617DE4A96262C6F5D9E98BF9292DC29F8F41DBD289A147CE9DA3113B5F0B8C00A60B1CE1D7E819D7A431D7C90EA0E5F # noqa: E501 ELL2_C2 = 0x2D3C6863973926E049E637CB1B5F40A36DAC28AF1766968C30C2313F3A38945678901234567890123456789012345 # noqa: E501 Isogeny_Coeffs = None UNCOMPRESSED = False diff --git a/dot_ring/curve/specs/p521.py b/dot_ring/curve/specs/p521.py index daf669e..040bff5 100644 --- a/dot_ring/curve/specs/p521.py +++ b/dot_ring/curve/specs/p521.py @@ -25,27 +25,27 @@ class P521Params: DST = b"QUUX-V01-CS02-with-P521_XMD:SHA-512_SSWU_RO_" # Default DST is the same as SUITE_STRING # Curve parameters for y² = x³ - 3x + b - PRIME_FIELD: Final[ - int - ] = 0x01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF - ORDER: Final[ - int - ] = 0x01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFA51868783BF2F966B7FCC0148F709A5D03BB5C9B8899C47AEBB6FB71E91386409 + PRIME_FIELD: Final[int] = ( + 0x01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF + ) + ORDER: Final[int] = ( + 0x01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFA51868783BF2F966B7FCC0148F709A5D03BB5C9B8899C47AEBB6FB71E91386409 + ) COFACTOR: Final[int] = 1 # Generator point - GENERATOR_X: Final[ - int - ] = 0x00C6858E06B70404E9CD9E3ECB662395B4429C648139053FB521F828AF606B4D3DBAA14B5E77EFE75928FE1DC127A2FFA8DE3348B3C1856A429BF97E7E31C2E5BD66 - GENERATOR_Y: Final[ - int - ] = 0x011839296A789A3BC0045C8A5FB42C7D1BD998F54449579B446817AFBD17273E662C97EE72995EF42640C550B9013FAD0761353C7086A272C24088BE94769FD16650 + GENERATOR_X: Final[int] = ( + 0x00C6858E06B70404E9CD9E3ECB662395B4429C648139053FB521F828AF606B4D3DBAA14B5E77EFE75928FE1DC127A2FFA8DE3348B3C1856A429BF97E7E31C2E5BD66 + ) + GENERATOR_Y: Final[int] = ( + 0x011839296A789A3BC0045C8A5FB42C7D1BD998F54449579B446817AFBD17273E662C97EE72995EF42640C550B9013FAD0761353C7086A272C24088BE94769FD16650 + ) # Short Weierstrass parameters: y² = x³ + ax + b WEIERSTRASS_A: Final[int] = -3 # a = -3 - WEIERSTRASS_B: Final[ - int - ] = 0x0051953EB9618E1C9A1F929A21A0B68540EEA2DA725B99B315F3B8B489918EF109E156193951EC7E937B1652C0BD3BB1BF073573DF883D2C34F1EF451FD46B503F00 + WEIERSTRASS_B: Final[int] = ( + 0x0051953EB9618E1C9A1F929A21A0B68540EEA2DA725B99B315F3B8B489918EF109E156193951EC7E937B1652C0BD3BB1BF073573DF883D2C34F1EF451FD46B503F00 + ) # Z parameter for SSWU mapping (from RFC 9380 Section 8.4) Z: Final[int] = -4 # P-521 uses Z = -4 for SSWU mapping @@ -61,13 +61,13 @@ class P521Params: Requires_Isogeny: Final[bool] = False # Blinding Base For Pedersen VRF # These are arbitrary points on the curve for blinding - BBx: Final[ - int - ] = 0x00C6858E06B70404E9CD9E3ECB662395B4429C648139053FB521F828AF606B4D3DBAA14B5E77EFE75928FE1DC127A2FFA8DE3348B3C1856A429BF97E7E31C2E5BD66 # noqa: E501 + BBx: Final[int] = ( + 0x00C6858E06B70404E9CD9E3ECB662395B4429C648139053FB521F828AF606B4D3DBAA14B5E77EFE75928FE1DC127A2FFA8DE3348B3C1856A429BF97E7E31C2E5BD66 # noqa: E501 + ) ELL2_C1 = 0x01C9B74C1A04954B78B4B6035E97A5E078A5A0F28EC96D547BFEE9ACE803AC012345678901234567890123456789012345678901234567890123456789012345678 # noqa: E501 - BBy: Final[ - int - ] = 0x011839296A789A3BC0045C8A5FB42C7D1BD998F54449579B446817AFBD17273E662C97EE72995EF42640C550B9013FAD0761353C7086A272C24088BE94769FD16650 # noqa: E501 + BBy: Final[int] = ( + 0x011839296A789A3BC0045C8A5FB42C7D1BD998F54449579B446817AFBD17273E662C97EE72995EF42640C550B9013FAD0761353C7086A272C24088BE94769FD16650 # noqa: E501 + ) ELL2_C2 = 0x02D3C6863973926E049E637CB1B5F40A36DAC28AF1766968C30C2313F3A38945678901234567890123456789012345678901234567890123456789012345678901 # noqa: E501 Isogeny_Coeffs = None UNCOMPRESSED = False diff --git a/dot_ring/curve/specs/secp256k1.py b/dot_ring/curve/specs/secp256k1.py index 7590f1a..ee5d07b 100644 --- a/dot_ring/curve/specs/secp256k1.py +++ b/dot_ring/curve/specs/secp256k1.py @@ -26,29 +26,19 @@ class Secp256k1Params: # Curve parameters for y² = x³ + 7 - PRIME_FIELD: Final[ - int - ] = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F - ORDER: Final[ - int - ] = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 + PRIME_FIELD: Final[int] = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F + ORDER: Final[int] = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 COFACTOR: Final[int] = 1 # Generator point - GENERATOR_X: Final[ - int - ] = 0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798 - GENERATOR_Y: Final[ - int - ] = 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8 + GENERATOR_X: Final[int] = 0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798 + GENERATOR_Y: Final[int] = 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8 # Short Weierstrass parameters: y² = x³ + ax + b WEIERSTRASS_A: Final[int] = 0 # a = 0 WEIERSTRASS_B: Final[int] = 7 # b = 7 # GLV parameters for secp256k1 - GLV_LAMBDA: Final[ - int - ] = 0x5363AD4CC05C30E0A5261C028812645A122E22EA20816678DF02967C1B23BD72 + GLV_LAMBDA: Final[int] = 0x5363AD4CC05C30E0A5261C028812645A122E22EA20816678DF02967C1B23BD72 GLV_B: Final[int] = 0x3086D221A7D46BCDE86C90E49284EB15 GLV_C: Final[int] = 0xE4437ED6010E88286F547FA90ABFE4C3 diff --git a/dot_ring/curve/twisted_edwards/te_affine_point.py b/dot_ring/curve/twisted_edwards/te_affine_point.py index bf1d4a6..e8f7f70 100644 --- a/dot_ring/curve/twisted_edwards/te_affine_point.py +++ b/dot_ring/curve/twisted_edwards/te_affine_point.py @@ -107,10 +107,7 @@ def __add__(self, other: CurvePoint[C]) -> Self: # Compute result coordinates x3 = ((x1y2 + x2y1) * pow(1 + dx1x2y1y2, -1, self.curve.PRIME_FIELD)) % p - y3 = ( - (y1y2 - self.curve.EdwardsA * x1x2) - * pow(1 - dx1x2y1y2, -1, self.curve.PRIME_FIELD) - ) % p + y3 = ((y1y2 - self.curve.EdwardsA * x1x2) * pow(1 - dx1x2y1y2, -1, self.curve.PRIME_FIELD)) % p return self.__class__(x3, y3) @@ -158,10 +155,7 @@ def double(self) -> Self: # Calculate new coordinates x3 = (2 * x1 * y1 * pow(denom_x, -1, self.curve.PRIME_FIELD)) % p - y3 = ( - (y1**2 - self.curve.EdwardsA * x1**2) - * pow(denom_y, -1, self.curve.PRIME_FIELD) - ) % p + y3 = ((y1**2 - self.curve.EdwardsA * x1**2) * pow(denom_y, -1, self.curve.PRIME_FIELD)) % p return self.__class__(x3, y3) @@ -221,9 +215,7 @@ def encode_to_curve( if cls.curve.E2C in [E2C_Variant.ELL2, E2C_Variant.ELL2_NU]: if cls.curve.E2C.value.endswith("_NU_"): - return cls.encode_to_curve_hash2_suite_nu( - alpha_string, salt, General_Check - ) + return cls.encode_to_curve_hash2_suite_nu(alpha_string, salt, General_Check) return cls.encode_to_curve_hash2_suite_ro(alpha_string, salt, General_Check) elif cls.curve.E2C == E2C_Variant.TAI: return cls.encode_to_curve_tai(alpha_string, salt) @@ -231,9 +223,7 @@ def encode_to_curve( raise ValueError("Unexpected E2C Variant") @classmethod - def encode_to_curve_hash2_suite_ro( - cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False - ) -> Self | Any: + def encode_to_curve_hash2_suite_ro(cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False) -> Self | Any: """ Encode a string to a curve point using Elligator 2. @@ -255,9 +245,7 @@ def encode_to_curve_hash2_suite_ro( return R.clear_cofactor() @classmethod - def encode_to_curve_hash2_suite_nu( - cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False - ) -> Self | Any: + def encode_to_curve_hash2_suite_nu(cls, alpha_string: bytes, salt: bytes = b"", General_Check: bool = False) -> Self | Any: """ Encode a string to a curve point using Elligator 2. @@ -292,9 +280,7 @@ def encode_to_curve_tai(cls, alpha_string: bytes | str, salt: bytes = b"") -> Se H: Self | str = "INVALID" front = b"\x01" back = b"\x00" - alpha_string = ( - alpha_string.encode() if isinstance(alpha_string, str) else alpha_string - ) + alpha_string = alpha_string.encode() if isinstance(alpha_string, str) else alpha_string salt = salt.encode() if isinstance(salt, str) else salt suite_string = cls.curve.SUITE_STRING while H == "INVALID" or H == cls.identity_point(): diff --git a/dot_ring/ring_proof/columns/columns.py b/dot_ring/ring_proof/columns/columns.py index 3840617..ffc9020 100644 --- a/dot_ring/ring_proof/columns/columns.py +++ b/dot_ring/ring_proof/columns/columns.py @@ -52,9 +52,7 @@ class PublicColumnBuilder: prime: int = S_PRIME omega: int = OMEGA - def _pad_ring_with_padding_point( - self, pk_ring: list[tuple[int, int]], size: int = MAX_RING_SIZE - ) -> list[tuple[int, int]]: + def _pad_ring_with_padding_point(self, pk_ring: list[tuple[int, int]], size: int = MAX_RING_SIZE) -> list[tuple[int, int]]: """Pad ring in‑place with the special padding point until size.""" # padding_sw = sw.from_twisted_edwards(PaddingPoint) padding_sw = PaddingPoint @@ -62,17 +60,12 @@ def _pad_ring_with_padding_point( pk_ring.append(padding_sw) return pk_ring - def _h_vector( - self, blinding_base: tuple[int, int] = Blinding_Base - ) -> list[tuple[int, int]]: + def _h_vector(self, blinding_base: tuple[int, int] = Blinding_Base) -> list[tuple[int, int]]: """Return `[2⁰·H, 2¹·H, …]` in short‑Weierstrass coords.""" # sw_bb = sw.from_twisted_edwards(blinding_base) sw_bb = blinding_base # print("Blinding Base:",sw_bb) - res = [ - cast(tuple[int, int], TE.mul(pow(2, i, S_PRIME), sw_bb)) - for i in range(self.size) - ] + res = [cast(tuple[int, int], TE.mul(pow(2, i, S_PRIME), sw_bb)) for i in range(self.size)] return res # B_Neck def build(self, ring_pk: list[tuple[int, int]]) -> tuple[Column, Column, Column]: @@ -119,18 +112,12 @@ def _bits_vector(self) -> list[int]: bv.append(0) # padding bit return bv - def _conditional_sum_accumulator( - self, b_vector: list[int] - ) -> tuple[list[int], list[int]]: + def _conditional_sum_accumulator(self, b_vector: list[int]) -> tuple[list[int], list[int]]: seed_sw = SeedPoint acc = [seed_sw] for i in range(1, self.size - 3): - next_pt = ( - acc[i - 1] - if b_vector[i - 1] == 0 - else cast(tuple[int, int], TE.add(acc[i - 1], self.ring_pk[i - 1])) - ) + next_pt = acc[i - 1] if b_vector[i - 1] == 0 else cast(tuple[int, int], TE.add(acc[i - 1], self.ring_pk[i - 1])) acc.append(next_pt) return H.unzip(acc) diff --git a/dot_ring/ring_proof/constants.py b/dot_ring/ring_proof/constants.py index 2775362..b905c50 100644 --- a/dot_ring/ring_proof/constants.py +++ b/dot_ring/ring_proof/constants.py @@ -38,15 +38,11 @@ S_B: int = 29569587568322301171008055308580903175558631321415017492731745847794083609535 -OMEGA_2048: int = ( - 49307615728544765012166121802278658070711169839041683575071795236746050763237 -) +OMEGA_2048: int = 49307615728544765012166121802278658070711169839041683575071795236746050763237 # 512‑th root -OMEGA_USED: int = ( - 4214636447306890335450803789410475782380792963881561516561680164772024173390 -) +OMEGA_USED: int = 4214636447306890335450803789410475782380792963881561516561680164772024173390 # Compute the 512‑th root ourselves to cross‑check SIZE: int = 512 # FFT domain size for witness polynomials diff --git a/dot_ring/ring_proof/constraints/aggregation.py b/dot_ring/ring_proof/constraints/aggregation.py index c869b83..9e73993 100644 --- a/dot_ring/ring_proof/constraints/aggregation.py +++ b/dot_ring/ring_proof/constraints/aggregation.py @@ -2,6 +2,7 @@ from collections.abc import Sequence +from dot_ring.curve.native_field.vector_ops import vect_add from dot_ring.ring_proof.constants import D_512 as D from dot_ring.ring_proof.constants import S_PRIME from dot_ring.ring_proof.polynomial.interpolation import ( @@ -10,7 +11,6 @@ ) from dot_ring.ring_proof.polynomial.ops import ( poly_multiply, - vect_add, vect_scalar_mul, ) diff --git a/dot_ring/ring_proof/constraints/constraints.py b/dot_ring/ring_proof/constraints/constraints.py index 3d7705a..2254fe0 100644 --- a/dot_ring/ring_proof/constraints/constraints.py +++ b/dot_ring/ring_proof/constraints/constraints.py @@ -4,6 +4,8 @@ from dataclasses import dataclass, field from typing import Any, cast +from dot_ring.curve.native_field.vector_ops import vect_add, vect_mul, vect_sub + # from concurrent.futures import ProcessPoolExecutor from dot_ring.curve.specs.bandersnatch import BandersnatchParams from dot_ring.ring_proof.constants import ( @@ -18,11 +20,7 @@ SIZE, SeedPoint, ) -from dot_ring.ring_proof.polynomial.ops import ( - lagrange_basis_polynomial, - poly_evaluate -) -from dot_ring.curve.native_field.vector_ops import vect_add, vect_sub, vect_mul +from dot_ring.ring_proof.polynomial.ops import lagrange_basis_polynomial, poly_evaluate def _to_radix4(vec: Sequence[int]) -> list[int]: @@ -86,9 +84,7 @@ def __post_init__(self) -> None: # convenient classmethod for Column builders @classmethod - def from_columns( - cls, columns: Mapping[str, Sequence[int]], Result_plus_Seed: Any - ) -> RingConstraintBuilder: + def from_columns(cls, columns: Mapping[str, Sequence[int]], Result_plus_Seed: Any) -> RingConstraintBuilder: return cls( Result_plus_Seed=Result_plus_Seed, acc_ip=columns["accip"], @@ -128,9 +124,7 @@ def _c2(self) -> list[list[int]]: accx_w = _shift(self._accx4) accy_w = _shift(self._accy4) - te_coeff_a = ( - BandersnatchParams.EDWARDS_A - ) # BandersnatchParams.EDWARDS_A % S_PRIME + te_coeff_a = BandersnatchParams.EDWARDS_A # BandersnatchParams.EDWARDS_A % S_PRIME b = bx x1, x2, x3 = self._accx4, self._px4, accx_w y1, y2, y3 = self._accy4, self._py4, accy_w @@ -145,9 +139,7 @@ def _c2(self) -> list[list[int]]: x3_m_x1 = vect_sub(x3, x1, S_PRIME) term1 = vect_mul(x3, vect_add(y1_y2, a_x1_x2, S_PRIME), S_PRIME) - term2 = vect_mul( - b, vect_sub(term1, vect_add(x1_y1, y2_x2, S_PRIME), S_PRIME), S_PRIME - ) + term2 = vect_mul(b, vect_sub(term1, vect_add(x1_y1, y2_x2, S_PRIME), S_PRIME), S_PRIME) term3 = vect_add(term2, vect_mul(one_m_b, x3_m_x1, S_PRIME), S_PRIME) c2x = vect_mul(term3, _NOT_LAST, S_PRIME) @@ -193,9 +185,7 @@ def _c2(self) -> list[list[int]]: y3_m_y1 = vect_sub(y3, y1, S_PRIME) term1 = vect_mul(y3, vect_sub(x1_y2, x2_y1, S_PRIME), S_PRIME) - term2 = vect_mul( - b, vect_sub(term1, vect_sub(x1_y1, y2_x2, S_PRIME), S_PRIME), S_PRIME - ) + term2 = vect_mul(b, vect_sub(term1, vect_sub(x1_y1, y2_x2, S_PRIME), S_PRIME), S_PRIME) term3 = vect_add(term2, vect_mul(one_m_b, y3_m_y1, S_PRIME), S_PRIME) c3x = vect_mul(term3, _NOT_LAST, S_PRIME) return [c2x, c3x] diff --git a/dot_ring/ring_proof/helpers.py b/dot_ring/ring_proof/helpers.py index 14e3d22..6b2f09f 100644 --- a/dot_ring/ring_proof/helpers.py +++ b/dot_ring/ring_proof/helpers.py @@ -92,10 +92,7 @@ def bls_g2_compress(g2_point: tuple) -> str: # compress the point # compress the point compressed = point_compression.compress_G2(point) - return str( - compressed[0].to_bytes(48, "big").hex() - + compressed[1].to_bytes(48, "big").hex() - ) + return str(compressed[0].to_bytes(48, "big").hex() + compressed[1].to_bytes(48, "big").hex()) @staticmethod # for fiat_shamir @@ -123,9 +120,7 @@ def to_scalar_int(string: str | bytes) -> int: return int.from_bytes(byts, "little") @staticmethod - def bls_projective_2_affine( - points_3d: list[tuple[Any, Any, Any]] - ) -> list[tuple[Any, Any]]: + def bls_projective_2_affine(points_3d: list[tuple[Any, Any, Any]]) -> list[tuple[Any, Any]]: """ Convert a list of 3D coordinate points to 2D by removing the z-coordinate. """ @@ -151,9 +146,7 @@ def str_to_int(byte_array: bytes, order: Literal["little", "big"]) -> int: return int.from_bytes(byte_array, order) @staticmethod - def int_to_str( - val: int, order: Literal["little", "big"], n_bytes: int = 32 - ) -> bytes: + def int_to_str(val: int, order: Literal["little", "big"], n_bytes: int = 32) -> bytes: return val.to_bytes(n_bytes, order) @staticmethod diff --git a/dot_ring/ring_proof/pcs/kzg.py b/dot_ring/ring_proof/pcs/kzg.py index 80b6099..d668020 100644 --- a/dot_ring/ring_proof/pcs/kzg.py +++ b/dot_ring/ring_proof/pcs/kzg.py @@ -81,9 +81,7 @@ def commit(cls, coeffs: CoeffVector) -> G1Point: result = blst.P1() # point at infinity else: # Use Pippenger multi-scalar multiplication - result = blst.P1_Affines.mult_pippenger( - blst.P1_Affines.as_memory(blst_points), active_scalars - ) + result = blst.P1_Affines.mult_pippenger(blst.P1_Affines.as_memory(blst_points), active_scalars) return blst_p1_to_fq_tuple(result) @classmethod @@ -193,28 +191,26 @@ def batch_verify( lhs_points = [] lhs_scalars = [] - + rhs_points = [] rhs_scalars = [] sum_v = 0 - - for coeff, (commitment, proof, point, value) in zip( - coeffs, verifications, strict=False - ): + + for coeff, (commitment, proof, point, value) in zip(coeffs, verifications, strict=False): comm_blst = commitment proof_blst = proof # LHS terms lhs_points.append(comm_blst) lhs_scalars.append(coeff) - + sum_v = (sum_v + coeff * value) % order - + coeff_z = (coeff * point) % order lhs_points.append(proof_blst) lhs_scalars.append(coeff_z) - + # RHS terms rhs_points.append(proof_blst) rhs_scalars.append(coeff) @@ -223,13 +219,9 @@ def batch_verify( lhs_points.append(g1_gen) lhs_scalars.append((-sum_v) % order) - lhs_point = blst.P1_Affines.mult_pippenger( - blst.P1_Affines.as_memory(lhs_points), lhs_scalars - ) - - rhs_point = blst.P1_Affines.mult_pippenger( - blst.P1_Affines.as_memory(rhs_points), rhs_scalars - ) + lhs_point = blst.P1_Affines.mult_pippenger(blst.P1_Affines.as_memory(lhs_points), lhs_scalars) + + rhs_point = blst.P1_Affines.mult_pippenger(blst.P1_Affines.as_memory(rhs_points), rhs_scalars) lhs = blst_miller_loop(lhs_point, g2_gen) rhs = blst_miller_loop(rhs_point, g2_tau) diff --git a/dot_ring/ring_proof/pcs/pairing.py b/dot_ring/ring_proof/pcs/pairing.py index 6b477c7..38f7e82 100644 --- a/dot_ring/ring_proof/pcs/pairing.py +++ b/dot_ring/ring_proof/pcs/pairing.py @@ -21,9 +21,7 @@ def _ensure_blst_p2_affine(point: blst.P2 | blst.P2_Affine) -> blst.P2_Affine: raise TypeError(f"Unsupported G2 point type: {type(point)}") -def blst_miller_loop( - p1: blst.P1 | blst.P1_Affine, p2: blst.P2 | blst.P2_Affine -) -> blst.PT: +def blst_miller_loop(p1: blst.P1 | blst.P1_Affine, p2: blst.P2 | blst.P2_Affine) -> blst.PT: """Compute Miller loop for pairing.""" return blst.PT(_ensure_blst_p2_affine(p2), _ensure_blst_p1_affine(p1)) diff --git a/dot_ring/ring_proof/pcs/srs.py b/dot_ring/ring_proof/pcs/srs.py index 1f8b8db..05356bd 100644 --- a/dot_ring/ring_proof/pcs/srs.py +++ b/dot_ring/ring_proof/pcs/srs.py @@ -13,16 +13,9 @@ from .utils import g1_to_blst, g2_to_blst -def read_srs_file() -> ( - tuple[list[tuple[int, int]], list[tuple[tuple[int, int], tuple[int, int]]]] -): +def read_srs_file() -> tuple[list[tuple[int, int]], list[tuple[tuple[int, int], tuple[int, int]]]]: base_dir = Path(__file__).resolve().parent - filename = ( - base_dir.parent.parent - / "vrf" - / "data" - / "bls12-381-srs-2-11-uncompressed-zcash.bin" - ) + filename = base_dir.parent.parent / "vrf" / "data" / "bls12-381-srs-2-11-uncompressed-zcash.bin" with open(filename, "rb") as f: data = f.read() @@ -81,9 +74,7 @@ class SRS: blst_g1: Sequence[blst.P1] blst_g2: Sequence[blst.P2] - def __init__( - self, g1_raw: list, g2_raw: list, g1_points: list, g2_points: list - ) -> None: + def __init__(self, g1_raw: list, g2_raw: list, g1_points: list, g2_points: list) -> None: self.g1 = [self._to_jacobian_g1(p) for p in g1_raw] self.g2 = [self._to_jacobian_g2(p) for p in g2_raw[:2]] self.g1_points = g1_points diff --git a/dot_ring/ring_proof/polynomial/fft.py b/dot_ring/ring_proof/polynomial/fft.py index 7f9e534..f8f6d66 100644 --- a/dot_ring/ring_proof/polynomial/fft.py +++ b/dot_ring/ring_proof/polynomial/fft.py @@ -106,9 +106,7 @@ def inverse_fft(values: list[int], omega: int, prime: int) -> list[int]: return [(c * inv_n) % prime for c in coeffs] -def evaluate_poly_over_domain( - poly: list[int], domain: list[int], omega: int, prime: int -) -> list[int]: +def evaluate_poly_over_domain(poly: list[int], domain: list[int], omega: int, prime: int) -> list[int]: """Evaluate polynomial over a structured domain using FFT. Assumes domain = [omega^0, omega^1, ..., omega^(n-1)] mod prime. @@ -144,9 +142,7 @@ def evaluate_poly_over_domain( return coeffs -def evaluate_poly_fft( - poly: list[int], domain_size: int, omega: int, prime: int, coset_offset: int = 1 -) -> list[int]: +def evaluate_poly_fft(poly: list[int], domain_size: int, omega: int, prime: int, coset_offset: int = 1) -> list[int]: """Evaluate polynomial over a coset domain using FFT. Args: diff --git a/dot_ring/ring_proof/polynomial/interpolation.py b/dot_ring/ring_proof/polynomial/interpolation.py index a011c39..d3e231c 100644 --- a/dot_ring/ring_proof/polynomial/interpolation.py +++ b/dot_ring/ring_proof/polynomial/interpolation.py @@ -24,14 +24,10 @@ def fft(a: list[int], omega: int, p: int) -> list[int]: # coeffs to evaluation return coeffs -def poly_interpolate_fft( - a: list[int], omega: int, p: int -) -> list[int]: # funcs like inverse_fft from evals to poly coeffs +def poly_interpolate_fft(a: list[int], omega: int, p: int) -> list[int]: # funcs like inverse_fft from evals to poly coeffs n = len(a) N = next_power_of_two(n) - omega_2048 = ( - 49307615728544765012166121802278658070711169839041683575071795236746050763237 - ) + omega_2048 = 49307615728544765012166121802278658070711169839041683575071795236746050763237 if N > SIZE: omega = pow(omega_2048, (2048 // N), p) @@ -48,14 +44,10 @@ def next_power_of_two(n: int) -> int: def poly_mul_fft(a: list[int], b: list[int], prime: int) -> list[int]: target_len = 2048 N = next_power_of_two(target_len) - omega = ( - 49307615728544765012166121802278658070711169839041683575071795236746050763237 - ) + omega = 49307615728544765012166121802278658070711169839041683575071795236746050763237 # Scale root of unity if needed root_order = N - omega_N = pow( - omega, (2048 // root_order), prime - ) # If omega is for 2048, scale down + omega_N = pow(omega, (2048 // root_order), prime) # If omega is for 2048, scale down # Pad inputs A = a + [0] * (N - len(a)) diff --git a/dot_ring/ring_proof/polynomial/ops.py b/dot_ring/ring_proof/polynomial/ops.py index 548541c..64dad45 100644 --- a/dot_ring/ring_proof/polynomial/ops.py +++ b/dot_ring/ring_proof/polynomial/ops.py @@ -2,7 +2,6 @@ from dot_ring.ring_proof.constants import D_512, D_2048, OMEGA, OMEGA_2048 from dot_ring.ring_proof.polynomial.fft import evaluate_poly_fft, inverse_fft -from dot_ring.curve.native_field.vector_ops import vect_add, vect_sub, vect_mul def mod_inverse(val: int, prime: int) -> int: @@ -12,9 +11,7 @@ def mod_inverse(val: int, prime: int) -> int: return pow(val, prime - 2, prime) -def poly_add( - poly1: list | Sequence[int], poly2: list | Sequence[int], prime: int -) -> list[int]: +def poly_add(poly1: list | Sequence[int], poly2: list | Sequence[int], prime: int) -> list[int]: """Add two polynomials in a prime field.""" # Make them the same length result_len = max(len(poly1), len(poly2)) @@ -142,9 +139,7 @@ def poly_evaluate_single(poly: list | Sequence[int], x: int, prime: int) -> int: return result -def poly_evaluate( - poly: list | Sequence[int], xs: list | int | Sequence[int], prime: int -) -> list[int] | int: +def poly_evaluate(poly: list | Sequence[int], xs: list | int | Sequence[int], prime: int) -> list[int] | int: """Evaluate polynomial at points xs. Uses FFT when xs is one of the predefined evaluation domains (D_512, D_2048). @@ -204,12 +199,7 @@ def lagrange_basis_polynomial(x_coords: list[int], i: int, prime: int) -> list[i L_i(x) = (x - x_j) / (x_i - x_j) """ # Optimization for roots of unity domains - if ( - x_coords is D_512 - or (len(x_coords) == 512 and x_coords == D_512) - or x_coords is D_2048 - or (len(x_coords) == 2048 and x_coords == D_2048) - ): + if x_coords is D_512 or (len(x_coords) == 512 and x_coords == D_512) or x_coords is D_2048 or (len(x_coords) == 2048 and x_coords == D_2048): n = len(x_coords) x_i = x_coords[i] @@ -253,8 +243,6 @@ def lagrange_basis_polynomial(x_coords: list[int], i: int, prime: int) -> list[i return basis_poly -def vect_scalar_mul( - vec: list[int] | Sequence[int], scalar: int, mod: int | None = None -) -> list[int]: +def vect_scalar_mul(vec: list[int] | Sequence[int], scalar: int, mod: int | None = None) -> list[int]: """Multiply each element in the vector by the scalar""" return [(x * scalar) % mod if mod else x * scalar for x in vec] diff --git a/dot_ring/ring_proof/proof/aggregation_poly.py b/dot_ring/ring_proof/proof/aggregation_poly.py index 4f0361d..3fbd39e 100644 --- a/dot_ring/ring_proof/proof/aggregation_poly.py +++ b/dot_ring/ring_proof/proof/aggregation_poly.py @@ -6,9 +6,7 @@ class AggPoly: # get the aggregated poly @classmethod - def aggregated_poly( - cls, fixed_cols: list, witness_cols: list, Q_p: list[int], cf_vectors: list[int] - ) -> list[int]: + def aggregated_poly(cls, fixed_cols: list, witness_cols: list, Q_p: list[int], cf_vectors: list[int]) -> list[int]: poly_I = [ fixed_cols[0].coeffs, fixed_cols[1].coeffs, @@ -22,9 +20,7 @@ def aggregated_poly( V_list = cf_vectors agg_poly = [0] for i in range(len(poly_I)): - agg_poly = poly_add( - agg_poly, poly_scalar(poly_I[i], V_list[i], S_PRIME), S_PRIME - ) + agg_poly = poly_add(agg_poly, poly_scalar(poly_I[i], V_list[i], S_PRIME), S_PRIME) return agg_poly # two proof openings diff --git a/dot_ring/ring_proof/proof/linearization_poly.py b/dot_ring/ring_proof/proof/linearization_poly.py index b9fa341..ad1cb8e 100644 --- a/dot_ring/ring_proof/proof/linearization_poly.py +++ b/dot_ring/ring_proof/proof/linearization_poly.py @@ -1,5 +1,6 @@ from typing import cast +from dot_ring.curve.native_field.vector_ops import vect_mul from dot_ring.curve.specs.bandersnatch import BandersnatchParams from dot_ring.ring_proof.columns.columns import Column from dot_ring.ring_proof.constants import D_512 as D @@ -9,7 +10,6 @@ poly_add, poly_evaluate, poly_scalar, - vect_mul, ) from dot_ring.ring_proof.transcript.phases import phase2_eval_point from dot_ring.ring_proof.transcript.transcript import Transcript @@ -33,41 +33,18 @@ def __init__( self.a = BandersnatchParams.EDWARDS_A def evaluate_polys_at_zeta(self) -> None: - if ( - self.fs[0].coeffs is None - or self.fs[1].coeffs is None - or self.fs[2].coeffs is None - ): + if self.fs[0].coeffs is None or self.fs[1].coeffs is None or self.fs[2].coeffs is None: raise ValueError("Fixed columns not interpolated") - if ( - self.wts[0].coeffs is None - or self.wts[1].coeffs is None - or self.wts[2].coeffs is None - or self.wts[3].coeffs is None - ): + if self.wts[0].coeffs is None or self.wts[1].coeffs is None or self.wts[2].coeffs is None or self.wts[3].coeffs is None: raise ValueError("Witness columns not interpolated") - self.P_x_zeta = cast( - list[int], poly_evaluate(self.fs[0].coeffs, [self.zeta], S_PRIME) - )[0] - self.P_y_zeta = cast( - list[int], poly_evaluate(self.fs[1].coeffs, [self.zeta], S_PRIME) - )[0] - self.s_zeta = cast( - list[int], poly_evaluate(self.fs[2].coeffs, [self.zeta], S_PRIME) - )[0] - self.b_zeta = cast( - list[int], poly_evaluate(self.wts[0].coeffs, [self.zeta], S_PRIME) - )[0] - self.acc_ip_zeta = cast( - list[int], poly_evaluate(self.wts[3].coeffs, [self.zeta], S_PRIME) - )[0] - self.acc_x_zeta = cast( - list[int], poly_evaluate(self.wts[1].coeffs, [self.zeta], S_PRIME) - )[0] - self.acc_y_zeta = cast( - list[int], poly_evaluate(self.wts[2].coeffs, [self.zeta], S_PRIME) - )[0] + self.P_x_zeta = cast(list[int], poly_evaluate(self.fs[0].coeffs, [self.zeta], S_PRIME))[0] + self.P_y_zeta = cast(list[int], poly_evaluate(self.fs[1].coeffs, [self.zeta], S_PRIME))[0] + self.s_zeta = cast(list[int], poly_evaluate(self.fs[2].coeffs, [self.zeta], S_PRIME))[0] + self.b_zeta = cast(list[int], poly_evaluate(self.wts[0].coeffs, [self.zeta], S_PRIME))[0] + self.acc_ip_zeta = cast(list[int], poly_evaluate(self.wts[3].coeffs, [self.zeta], S_PRIME))[0] + self.acc_x_zeta = cast(list[int], poly_evaluate(self.wts[1].coeffs, [self.zeta], S_PRIME))[0] + self.acc_y_zeta = cast(list[int], poly_evaluate(self.wts[2].coeffs, [self.zeta], S_PRIME))[0] def compute_l1(self) -> list[int]: if self.wts[3].coeffs is None: @@ -80,9 +57,7 @@ def compute_l2(self) -> list[int]: b = self.b_zeta coeff_a = self.a - C_acc_x = ( - b * (y1 * y2 + (coeff_a * x1 * x2)) % S_PRIME + (1 - b) % S_PRIME - ) % S_PRIME + C_acc_x = (b * (y1 * y2 + (coeff_a * x1 * x2)) % S_PRIME + (1 - b) % S_PRIME) % S_PRIME C_acc_y = 0 C_acc_x_f = C_acc_x * self.scalar_term C_acc_y_f = C_acc_y * self.scalar_term @@ -134,9 +109,7 @@ def l_agg_poly(self) -> tuple[Transcript, int, dict[str, int], list[int], int, i l3 = self.compute_l3() l_agg = self.linearize(l1, l2, l3) - l_agg_zeta_omega = cast( - list[int], poly_evaluate(l_agg, [self.zeta_omega], S_PRIME) - )[0] + l_agg_zeta_omega = cast(list[int], poly_evaluate(l_agg, [self.zeta_omega], S_PRIME))[0] return ( self.t, self.zeta, diff --git a/dot_ring/ring_proof/transcript/phases.py b/dot_ring/ring_proof/transcript/phases.py index bcd329b..dda1a79 100644 --- a/dot_ring/ring_proof/transcript/phases.py +++ b/dot_ring/ring_proof/transcript/phases.py @@ -35,7 +35,7 @@ def phase2_eval_point(t: Transcript, C_q_commitment: Any) -> Any: def phase3_nu_vector( t: Transcript, - rel_poly_evals: list[int], + rel_poly_evals: list[int] | bytes, agg_poly_eval: int, ) -> list[int]: """Append evaluation bundle and linearisation eval, return 8 ν‑challenges.""" diff --git a/dot_ring/ring_proof/transcript/serialize.py b/dot_ring/ring_proof/transcript/serialize.py index ea12f89..4499dba 100644 --- a/dot_ring/ring_proof/transcript/serialize.py +++ b/dot_ring/ring_proof/transcript/serialize.py @@ -11,7 +11,7 @@ def serialize(obj: Any) -> bytes: """Serialize objects into bytes format exactly as in the original implementation.""" # Handle Scalar types by converting to int - if type(obj).__name__ == 'Scalar': + if type(obj).__name__ == "Scalar": return serialize(int(obj)) if isinstance(obj, int): diff --git a/dot_ring/ring_proof/verify.py b/dot_ring/ring_proof/verify.py index c568b45..87eab5e 100644 --- a/dot_ring/ring_proof/verify.py +++ b/dot_ring/ring_proof/verify.py @@ -1,14 +1,18 @@ +from functools import cache from typing import Any, cast -from functools import lru_cache -from py_ecc.optimized_bls12_381 import curve_order # type: ignore[import-untyped] -from py_ecc.optimized_bls12_381 import normalize as nm # type: ignore[import-untyped] +from py_ecc.optimized_bls12_381 import ( # type: ignore[import-untyped] + curve_order, +) +from py_ecc.optimized_bls12_381 import ( + normalize as nm, +) -from dot_ring import blst as _blst # type: ignore[import-untyped] +from dot_ring import blst as _blst from dot_ring.curve.native_field.scalar import Scalar from dot_ring.curve.specs.bandersnatch import BandersnatchParams from dot_ring.ring_proof.constants import D_512 as D -from dot_ring.ring_proof.constants import OMEGA, S_PRIME, SIZE +from dot_ring.ring_proof.constants import OMEGA, OMEGA_2048, S_PRIME, SIZE from dot_ring.ring_proof.helpers import Helpers as H from dot_ring.ring_proof.pcs.kzg import KZG from dot_ring.ring_proof.pcs.utils import g1_to_blst @@ -28,7 +32,7 @@ ONE_S = Scalar(1) ZERO_S = Scalar(0) EDWARDS_A_S = Scalar(BandersnatchParams.EDWARDS_A) -INV_SIZE_S = SIZE_S ** -1 +INV_SIZE_S = SIZE_S**-1 OMEGA_POW_SIZE_MINUS_4 = OMEGA_S ** (SIZE - 4) @@ -42,10 +46,9 @@ def blst_msm(points: list, scalars: list) -> Any: return blst.P1_Affines.mult_pippenger(blst.P1_Affines.as_memory(points), scalars) -@lru_cache(maxsize=None) -def lagrange_at_zeta( - domain_size: int, index: int, zeta: int, omega: int, prime: int -) -> Scalar: + +@cache +def lagrange_at_zeta(domain_size: int, index: int, zeta: int, omega: int, prime: int) -> Scalar: """ Compute L_i(zeta) using closed-form formula for roots of unity domain. @@ -54,18 +57,16 @@ def lagrange_at_zeta( This is O(1) instead of O(n) polynomial evaluation! """ # Use Scalar for optimized arithmetic - # n_s = Scalar(domain_size) # Use SIZE_S global - # omega_s = Scalar(omega) # Use OMEGA_S global zeta_s = Scalar(zeta) - + # omega^i if index == 0: omega_i = ONE_S elif index == SIZE - 4: omega_i = OMEGA_POW_SIZE_MINUS_4 else: - omega_i = OMEGA_S ** index - + omega_i = OMEGA_S**index + # zeta - omega^i zeta_minus_omega_i = zeta_s - omega_i @@ -74,14 +75,13 @@ def lagrange_at_zeta( return ONE_S # L_i(omega^i) = 1 # zeta^n - 1 - zeta_n_minus_1 = (zeta_s ** domain_size) - ONE_S + zeta_n_minus_1 = (zeta_s**domain_size) - ONE_S # omega^i / n - # inv_n = SIZE_S ** -1 # Use INV_SIZE_S global omega_i_over_n = omega_i * INV_SIZE_S # Final: (omega^i / n) * (zeta^n - 1) / (zeta - omega^i) - result = omega_i_over_n * zeta_n_minus_1 * (zeta_minus_omega_i ** -1) + result = omega_i_over_n * zeta_n_minus_1 * (zeta_minus_omega_i**-1) return result @@ -90,12 +90,14 @@ class Verify: def __init__( self, proof: tuple, - vk: dict, + vk: dict | bytes, fixed_cols: list, - rl_to_proove: tuple, + rl_to_proove: tuple | bytes, rps: tuple, seed_point: tuple, Domain: list, + raw_proof_bytes: dict | None = None, + transcript_challenge: bytes = b"Bandersnatch_SHA-512_ELL2", ) -> None: ( self.Cb, @@ -133,56 +135,64 @@ def __init__( self.Cpy_blst = g1_to_blst(self.Cpy) self.Cs_blst = g1_to_blst(self.Cs) - # can even put as separate function - self.t = Transcript(S_PRIME, b"Bandersnatch_SHA-512_ELL2") - self.cur_t, self.alpha_list = phase1_alphas( + self.t = Transcript(S_PRIME, transcript_challenge) + + # Absorb into transcript + self.t, self.alpha_list = phase1_alphas( self.t, self.verifier_key, self.relation_to_proove, list(H.to_int(nm(cmt)) for cmt in self.proof_ptr[:4]), - ) # cb, caccip, caccx, caccy - - self.cur_t, self.zeta_p = phase2_eval_point( - self.cur_t, H.to_int(nm(self.proof_ptr[-4])) - ) - self.V_list = phase3_nu_vector( - self.cur_t, list(self.proof_ptr[4:11]), self.proof_ptr[-3] ) + # Add quotient and get zeta + self.t, self.zeta_p = phase2_eval_point(self.t, H.to_int(nm(self.proof_ptr[-4]))) + + # Phase 3: Add evaluations and get ν challenges + evals_bytes = b"".join(v.to_bytes(32, "little") for v in self.proof_ptr[4:11]) + lin_eval_bytes = self.proof_ptr[-3].to_bytes(32, "little") + + self.V_list = phase3_nu_vector(self.t, evals_bytes, lin_eval_bytes) + + # Save transcript + self.cur_t = self.t + def contributions_to_constraints_eval_at_zeta( self, ) -> tuple[Scalar, Scalar, Scalar, Scalar, Scalar, Scalar, Scalar]: # Convert to Scalar for optimized arithmetic zeta = Scalar(self.zeta_p) sx, sy = Scalar(self.sp[0]), Scalar(self.sp[1]) - + # Precompute common values - zeta_minus_d4 = zeta - D_S[-4] + zeta_minus_d4 = zeta - Scalar(self.D[-4]) # Inline lagrange_at_zeta for index=0 and index=SIZE-4 # L_i(zeta) = (omega^i / n) * (zeta^n - 1) / (zeta - omega^i) - + # Shared term: zeta^n - 1 - zeta_n_minus_1 = (zeta ** SIZE) - ONE_S - + domain_size = len(self.D) + zeta_n_minus_1 = (zeta**domain_size) - ONE_S + # L_0: index=0, omega^0 = 1 # omega^0 / n = 1/n = INV_SIZE_S # zeta - omega^0 = zeta - 1 + inv_size = Scalar(domain_size) ** -1 zeta_minus_1 = zeta - ONE_S if zeta_minus_1 == ZERO_S: L_0_zeta = ONE_S else: - L_0_zeta = INV_SIZE_S * zeta_n_minus_1 * (zeta_minus_1 ** -1) - - # L_N_4: index=SIZE-4, omega^(SIZE-4) = OMEGA_POW_SIZE_MINUS_4 - # omega^(SIZE-4) / n = OMEGA_POW_SIZE_MINUS_4 * INV_SIZE_S - omega_i_N_4 = OMEGA_POW_SIZE_MINUS_4 - omega_i_over_n_N_4 = omega_i_N_4 * INV_SIZE_S + L_0_zeta = inv_size * zeta_n_minus_1 * (zeta_minus_1**-1) + + # L_N_4: index=SIZE-4, omega^(SIZE-4) from the domain + # omega^(SIZE-4) / n + omega_i_N_4 = Scalar(self.D[-4]) + omega_i_over_n_N_4 = omega_i_N_4 * inv_size zeta_minus_omega_i_N_4 = zeta - omega_i_N_4 if zeta_minus_omega_i_N_4 == ZERO_S: L_N_4_zeta = ONE_S else: - L_N_4_zeta = omega_i_over_n_N_4 * zeta_n_minus_1 * (zeta_minus_omega_i_N_4 ** -1) + L_N_4_zeta = omega_i_over_n_N_4 * zeta_n_minus_1 * (zeta_minus_omega_i_N_4**-1) # Pre-fetch instance variables and convert to Scalar b = Scalar(self.b_zeta) @@ -202,10 +212,10 @@ def contributions_to_constraints_eval_at_zeta( x1_y1 = accx * accy x2_y2 = px * py one_minus_b = ONE_S - b - + c2 = b * -(x1_y1 + x2_y2) + one_minus_b * -accx c2_zeta = c2 * zeta_minus_d4 - + c3 = b * -(x1_y1 - x2_y2) + one_minus_b * -accy c3_zeta = c3 * zeta_minus_d4 @@ -216,10 +226,15 @@ def contributions_to_constraints_eval_at_zeta( c5_zeta = (accx - sx) * L_0_zeta + (accx - rps0) * L_N_4_zeta c6_zeta = (accy - sy) * L_0_zeta + (accy - rps1) * L_N_4_zeta c7_zeta = accip * L_0_zeta + (accip - ONE_S) * L_N_4_zeta - + return ( - c1_zeta, c2_zeta, c3_zeta, c4_zeta, - c5_zeta, c6_zeta, c7_zeta + c1_zeta, + c2_zeta, + c3_zeta, + c4_zeta, + c5_zeta, + c6_zeta, + c7_zeta, ) def divide(self, numr: int, denom: int) -> int: @@ -237,32 +252,27 @@ def _prepare_quotient_poly_verification(self) -> tuple[Any, Any, int, int]: alphas_list = [Scalar(a) for a in self.alpha_list] zeta = Scalar(self.zeta_p) v_list = [Scalar(v) for v in self.V_list] - + # cs are now Scalars cs = self.contributions_to_constraints_eval_at_zeta() # Precompute vanishing polynomial evaluation - combine pow operations prod_sum = ONE_S for k in range(1, 4): - prod_sum = prod_sum * (zeta - D_S[-k]) + prod_sum = prod_sum * (zeta - Scalar(self.D[-k])) # Calculate numerator efficiently linear_combination = ZERO_S for alpha, c in zip(alphas_list, cs, strict=False): linear_combination = linear_combination + alpha * c - numerator = linear_combination - # Re-calculating based on original quotient definition, but with optimized s_sum s_sum = linear_combination + Scalar(self.l_zeta_omega) - zeta_pow_size_minus_1 = (zeta ** SIZE) - ONE_S - + domain_size = len(self.D) + zeta_pow_size_minus_1 = (zeta**domain_size) - ONE_S + # q_zeta = (s_sum * prod_sum) / zeta_pow_size_minus_1 - q_zeta = (s_sum * prod_sum) * (zeta_pow_size_minus_1 ** -1) - - # Convert q_zeta to int for blst_msm if needed, or keep as Scalar if blst_msm handles it? - # blst_msm expects ints. - q_zeta_int = int(q_zeta) + q_zeta = (s_sum * prod_sum) * (zeta_pow_size_minus_1**-1) C_a_blst = [ self.Cpx_blst, @@ -287,7 +297,7 @@ def _prepare_quotient_poly_verification(self) -> tuple[Any, Any, int, int]: accip = Scalar(self.accip_zeta) accx = Scalar(self.accx_zeta) accy = Scalar(self.accy_zeta) - q = q_zeta # Scalar + q = q_zeta # Scalar terms = [ v_list[0] * px, @@ -310,8 +320,8 @@ def _prepare_linearization_poly_verification(self) -> tuple[Any, Any, int, int]: """Prepare KZG verification data for linearization polynomial""" alphas_list = [Scalar(a) for a in self.alpha_list] zeta = Scalar(self.zeta_p) - - zeta_minus_d4 = zeta - D_S[-4] + + zeta_minus_d4 = zeta - Scalar(self.D[-4]) # Cl1 scalar scalar_cl1 = zeta_minus_d4 @@ -321,18 +331,14 @@ def _prepare_linearization_poly_verification(self) -> tuple[Any, Any, int, int]: x2, y2 = Scalar(self.px_zeta), Scalar(self.py_zeta) b = Scalar(self.b_zeta) coeff_a = EDWARDS_A_S - + # S_PRIME is scalar field modulus, which Scalar handles implicitly - - C_acc_x_cl2 = ( - b * (y1 * y2 + (coeff_a * x1 * x2)) + (ONE_S - b) - ) + + C_acc_x_cl2 = b * (y1 * y2 + (coeff_a * x1 * x2)) + (ONE_S - b) C_acc_x_f_cl2 = C_acc_x_cl2 * zeta_minus_d4 # Cl3 scalars - C_acc_y_cl3 = ( - (b * (x1 * y2 - x2 * y1)) + (ONE_S - b) - ) + C_acc_y_cl3 = (b * (x1 * y2 - x2 * y1)) + (ONE_S - b) C_acc_y_f_cl3 = C_acc_y_cl3 * zeta_minus_d4 # Combined scalars @@ -345,15 +351,27 @@ def _prepare_linearization_poly_verification(self) -> tuple[Any, Any, int, int]: scalars = [int(scalar_accip), int(scalar_accx), int(scalar_accy)] Cl = blst_msm(points, scalars) - zeta_omega = zeta * Scalar(OMEGA) + # Compute omega for the actual domain size + domain_size = len(self.D) + if domain_size == 512: + omega = OMEGA + elif domain_size == 1024: + omega = pow(OMEGA_2048, 2048 // 1024, S_PRIME) + elif domain_size == 2048: + omega = OMEGA_2048 + else: + # Fallback: compute from OMEGA_2048 + omega = pow(OMEGA_2048, 2048 // domain_size, S_PRIME) + + zeta_omega = zeta * Scalar(omega) return (Cl, self.Phi_zeta_omega_blst, int(zeta_omega), int(Scalar(self.l_zeta_omega))) # Legacy methods for backwards compatibility def evaluation_of_quotient_poly_at_zeta(self) -> bool: """Legacy method - use is_valid() with batch verification instead""" + raise NotImplementedError("Use is_valid() with batch verification instead") def evaluation_of_linearization_poly_at_zeta_omega(self) -> bool: """Legacy method - use is_valid() with batch verification instead""" - verification = self._prepare_linearization_poly_verification() - return KZG.verify(*verification) \ No newline at end of file + raise NotImplementedError("Use is_valid() with batch verification instead") diff --git a/dot_ring/vrf/ietf/ietf.py b/dot_ring/vrf/ietf/ietf.py index 55ce782..c9a87ab 100644 --- a/dot_ring/vrf/ietf/ietf.py +++ b/dot_ring/vrf/ietf/ietf.py @@ -52,9 +52,7 @@ def from_bytes(cls, proof_bytes: bytes) -> IETF_VRF: proof_bytes[output_point_end:c_end], cast(Literal["little", "big"], cls.cv.curve.ENDIAN), ) - s = Helpers.str_to_int( - proof_bytes[c_end:], cast(Literal["little", "big"], cls.cv.curve.ENDIAN) - ) + s = Helpers.str_to_int(proof_bytes[c_end:], cast(Literal["little", "big"], cls.cv.curve.ENDIAN)) if s >= cls.cv.curve.ORDER: raise ValueError("Response scalar s is not less than the curve order") return cls(output_point, c, s) @@ -74,9 +72,7 @@ def to_bytes(self) -> bytes: cast(Literal["little", "big"], self.cv.curve.ENDIAN), self.cv.curve.CHALLENGE_LENGTH, ) - + Helpers.int_to_str( - self.s, cast(Literal["little", "big"], self.cv.curve.ENDIAN), scalar_len - ) + + Helpers.int_to_str(self.s, cast(Literal["little", "big"], self.cv.curve.ENDIAN), scalar_len) ) return proof @@ -100,12 +96,7 @@ def prove( Returns: Tuple[BandersnatchPoint, Tuple[int, int]]: (output_point, (c, s)) """ - secret_key_int = ( - Helpers.str_to_int( - secret_key, cast(Literal["little", "big"], cls.cv.curve.ENDIAN) - ) - % cls.cv.curve.ORDER - ) + secret_key_int = Helpers.str_to_int(secret_key, cast(Literal["little", "big"], cls.cv.curve.ENDIAN)) % cls.cv.curve.ORDER # Create generator point generator = cls.cv.point.generator_point() @@ -126,15 +117,11 @@ def prove( V = input_point * nonce # Generate challenge - c = cls.challenge( - [public_key, input_point, output_point, U, V], additional_data - ) + c = cls.challenge([public_key, input_point, output_point, U, V], additional_data) s = (nonce + (c * secret_key_int)) % cls.cv.curve.ORDER return cls(output_point, c, s) - def verify( - self, public_key: bytes, input: bytes, additional_data: bytes, salt: bytes = b"" - ) -> bool: + def verify(self, public_key: bytes, input: bytes, additional_data: bytes, salt: bytes = b"") -> bool: """ Verify IETF VRF proof. @@ -161,8 +148,6 @@ def verify( V = self.cv.point.msm([input_point, self.output_point], [self.s, neg_c]) # Verify challenge - expected_c = self.challenge( - [public_key_pt, input_point, self.output_point, U, V], additional_data - ) + expected_c = self.challenge([public_key_pt, input_point, self.output_point, U, V], additional_data) return self.c == expected_c diff --git a/dot_ring/vrf/pedersen/pedersen.py b/dot_ring/vrf/pedersen/pedersen.py index 442f0de..40bef83 100644 --- a/dot_ring/vrf/pedersen/pedersen.py +++ b/dot_ring/vrf/pedersen/pedersen.py @@ -45,29 +45,18 @@ def from_bytes(cls, proof: bytes) -> PedersenVRF: if cls.cv.curve.UNCOMPRESSED: point_length *= 2 - output_point = cls.cv.point.string_to_point( - proof[point_length * 0 : point_length * 1] - ) + output_point = cls.cv.point.string_to_point(proof[point_length * 0 : point_length * 1]) - public_key_cp = cls.cv.point.string_to_point( - proof[point_length * 1 : point_length * 2] - ) + public_key_cp = cls.cv.point.string_to_point(proof[point_length * 1 : point_length * 2]) R = cls.cv.point.string_to_point(proof[point_length * 2 : point_length * 3]) Ok = cls.cv.point.string_to_point(proof[point_length * 3 : point_length * 4]) s = Helpers.str_to_int( proof[-scalar_len * 2 : -scalar_len], cast(Literal["little", "big"], cls.cv.curve.ENDIAN), ) - Sb = Helpers.str_to_int( - proof[-scalar_len:], cast(Literal["little", "big"], cls.cv.curve.ENDIAN) - ) + Sb = Helpers.str_to_int(proof[-scalar_len:], cast(Literal["little", "big"], cls.cv.curve.ENDIAN)) - if ( - isinstance(output_point, str) - or isinstance(public_key_cp, str) - or isinstance(R, str) - or isinstance(Ok, str) - ): + if isinstance(output_point, str) or isinstance(public_key_cp, str) or isinstance(R, str) or isinstance(Ok, str): raise ValueError("Invalid point in proof") return cls( @@ -97,9 +86,7 @@ def to_bytes(self) -> bytes: + self.blinded_pk.point_to_string() + self.result_point.point_to_string() + self.ok.point_to_string() - + Helpers.int_to_str( - self.s, cast(Literal["little", "big"], self.cv.curve.ENDIAN), scalar_len - ) + + Helpers.int_to_str(self.s, cast(Literal["little", "big"], self.cv.curve.ENDIAN), scalar_len) + Helpers.int_to_str( self.sb, cast(Literal["little", "big"], self.cv.curve.ENDIAN), @@ -131,12 +118,7 @@ def prove( """ scalar_len = (cls.cv.curve.PRIME_FIELD.bit_length() + 7) // 8 - secret_key_int = ( - Helpers.str_to_int( - secret_key, cast(Literal["little", "big"], cls.cv.curve.ENDIAN) - ) - % cls.cv.curve.ORDER - ) + secret_key_int = Helpers.str_to_int(secret_key, cast(Literal["little", "big"], cls.cv.curve.ENDIAN)) % cls.cv.curve.ORDER # Create generator point generator = cls.cv.point.generator_point() @@ -144,12 +126,8 @@ def prove( b_base = cls.cv.point(cast(int, cls.cv.curve.BBx), cast(int, cls.cv.curve.BBy)) input_point = cast(Any, cls.cv.point).encode_to_curve(alpha, salt) # Use curve's endianness for secret key serialization - secret_key_bytes = secret_key_int.to_bytes( - scalar_len, cast(Literal["little", "big"], cls.cv.curve.ENDIAN) - ) - blinding_factor = cls.blinding( - secret_key_bytes, input_point.point_to_string(), additional_data - ) + secret_key_bytes = secret_key_int.to_bytes(scalar_len, cast(Literal["little", "big"], cls.cv.curve.ENDIAN)) + blinding_factor = cls.blinding(secret_key_bytes, input_point.point_to_string(), additional_data) output_point = input_point * secret_key_int @@ -164,9 +142,7 @@ def prove( public_key_cp = cast(Any, generator) * secret_key_int + b_base * blinding_factor R = cast(Any, generator) * k + b_base * Kb Ok = input_point * k - c = cls.challenge( - [public_key_cp, input_point, output_point, R, Ok], additional_data - ) + c = cls.challenge([public_key_cp, input_point, output_point, R, Ok], additional_data) s = (k + c * secret_key_int) % cls.cv.curve.ORDER Sb = (Kb + c * blinding_factor) % cls.cv.curve.ORDER @@ -193,9 +169,7 @@ def verify(self, input: bytes, additional_data: bytes) -> bool: """ generator = self.cv.point.generator_point() input_point = cast(Any, self.cv.point).encode_to_curve(input) - b_base = self.cv.point( - cast(int, self.cv.curve.BBx), cast(int, self.cv.curve.BBy) - ) + b_base = self.cv.point(cast(int, self.cv.curve.BBx), cast(int, self.cv.curve.BBy)) c = self.challenge( [ @@ -210,9 +184,7 @@ def verify(self, input: bytes, additional_data: bytes) -> bool: # Check 1: ok + c * output_point - s * input_point == 0 # 1*ok + c*output_point + (-s)*input_point == identity - check1 = self.cv.point.msm( - [self.ok, self.output_point, input_point], [1, c, -self.s] - ) + check1 = self.cv.point.msm([self.ok, self.output_point, input_point], [1, c, -self.s]) Theta0 = check1.is_identity() # Check 2: result_point + c * blinded_pk - s * generator - sb * b_base == 0 diff --git a/dot_ring/vrf/ring/ring_root.py b/dot_ring/vrf/ring/ring_root.py index 06c8d1f..5f0dbb9 100644 --- a/dot_ring/vrf/ring/ring_root.py +++ b/dot_ring/vrf/ring/ring_root.py @@ -18,17 +18,11 @@ def to_bytes(self) -> bytes: # or require a specific conversion method not shown here. # This implementation assumes 'ring_root' refers to the commitments themselves. comm_keys = ( - H.bls_g1_compress( - cast(Any, self.px.commitment) - ), # Cast to Any or a more specific tuple type if known + H.bls_g1_compress(cast(Any, self.px.commitment)), # Cast to Any or a more specific tuple type if known H.bls_g1_compress(cast(Any, self.py.commitment)), H.bls_g1_compress(cast(Any, self.s.commitment)), ) - return ( - bytes.fromhex(comm_keys[0]) - + bytes.fromhex(comm_keys[1]) - + bytes.fromhex(comm_keys[2]) - ) + return bytes.fromhex(comm_keys[0]) + bytes.fromhex(comm_keys[1]) + bytes.fromhex(comm_keys[2]) @classmethod def from_bytes(cls, data: bytes) -> "RingRoot": diff --git a/dot_ring/vrf/ring/ring_vrf.py b/dot_ring/vrf/ring/ring_vrf.py index ad2a2a9..be4f56b 100644 --- a/dot_ring/vrf/ring/ring_vrf.py +++ b/dot_ring/vrf/ring/ring_vrf.py @@ -119,21 +119,13 @@ def from_bytes(cls, proof: bytes, skip_pedersen: bool = False) -> "RingVRF": commitment_size = 48 # Size of compressed G1 point - c_b_commitment = H.bls_g1_decompress( - proof[offset : offset + commitment_size].hex() - ) + c_b_commitment = H.bls_g1_decompress(proof[offset : offset + commitment_size].hex()) offset += commitment_size - c_accip_commitment = H.bls_g1_decompress( - proof[offset : offset + commitment_size].hex() - ) + c_accip_commitment = H.bls_g1_decompress(proof[offset : offset + commitment_size].hex()) offset += commitment_size - c_accx_commitment = H.bls_g1_decompress( - proof[offset : offset + commitment_size].hex() - ) + c_accx_commitment = H.bls_g1_decompress(proof[offset : offset + commitment_size].hex()) offset += commitment_size - c_accy_commitment = H.bls_g1_decompress( - proof[offset : offset + commitment_size].hex() - ) + c_accy_commitment = H.bls_g1_decompress(proof[offset : offset + commitment_size].hex()) offset += commitment_size px_zeta = H.to_scalar_int(proof[offset : offset + 32]) @@ -151,21 +143,15 @@ def from_bytes(cls, proof: bytes, skip_pedersen: bool = False) -> "RingVRF": accy_zeta = H.to_scalar_int(proof[offset : offset + 32]) offset += 32 - c_q_commitment = H.bls_g1_decompress( - proof[offset : offset + commitment_size].hex() - ) + c_q_commitment = H.bls_g1_decompress(proof[offset : offset + commitment_size].hex()) offset += commitment_size l_zeta_omega = H.to_scalar_int(proof[offset : offset + 32]) offset += 32 - open_agg_zeta_commitment = H.bls_g1_decompress( - proof[offset : offset + commitment_size].hex() - ) + open_agg_zeta_commitment = H.bls_g1_decompress(proof[offset : offset + commitment_size].hex()) offset += commitment_size - open_l_zeta_omega_commitment = H.bls_g1_decompress( - proof[offset : offset + commitment_size].hex() - ) + open_l_zeta_omega_commitment = H.bls_g1_decompress(proof[offset : offset + commitment_size].hex()) offset += commitment_size return cls( pedersen_proof=pedersen_proof, @@ -183,12 +169,8 @@ def from_bytes(cls, proof: bytes, skip_pedersen: bool = False) -> "RingVRF": c_q=Column(name="c_q", evals=[], commitment=c_q_commitment), l_zeta_omega=l_zeta_omega, # TODO: Fix Opening initialization; unsafe scalar 0 used temporarily - open_agg_zeta=Opening( - proof=open_agg_zeta_commitment, y=0 - ), # We only need opening proof to verify - open_l_zeta_omega=Opening( - proof=open_l_zeta_omega_commitment, y=0 - ), # We only need opening proof to verify + open_agg_zeta=Opening(proof=open_agg_zeta_commitment, y=0), # We only need opening proof to verify + open_l_zeta_omega=Opening(proof=open_l_zeta_omega_commitment, y=0), # We only need opening proof to verify ) @classmethod @@ -243,9 +225,7 @@ def generate_bls_signature( fixed_cols = ring_root.build(keys_as_bs_points) s_v = fixed_cols[-1].evals producer_index = keys_as_bs_points.index(producer_key_pt) - witness_obj = WitnessColumnBuilder( - keys_as_bs_points, s_v, producer_index, blinding_factor - ) + witness_obj = WitnessColumnBuilder(keys_as_bs_points, s_v, producer_index, blinding_factor) witness_res = witness_obj.build() witness_relation_res = witness_obj.result(Blinding_Base) Result_plus_Seed = witness_obj.result_p_seed(witness_relation_res) @@ -360,9 +340,7 @@ def verify_ring_proof( for c in fixed_cols_cmts: assert c is not None fixed_cols_cmts_safe.append(c) - comm_keys_affine = H.bls_projective_2_affine( - cast(list[Any], fixed_cols_cmts_safe) - ) + comm_keys_affine = H.bls_projective_2_affine(cast(list[Any], fixed_cols_cmts_safe)) comm_keys_int = [H.to_int(pt) for pt in comm_keys_affine] verifier_key: dict[str, Any] = { "g1": srs.g1_points[0], @@ -437,9 +415,7 @@ def prove( pedersen_proof = PedersenVRF[cast(Any, cls).cv].prove(alpha, secret_key, ad) # type: ignore[misc] # ring_proof - ring_proof = cls.generate_bls_signature( - pedersen_proof._blinding_factor, producer_key, keys - ) + ring_proof = cls.generate_bls_signature(pedersen_proof._blinding_factor, producer_key, keys) return cls(pedersen_proof, *ring_proof) @@ -472,8 +448,6 @@ def verify( if self.pedersen_proof is None: raise ValueError("Pedersen proof is missing") p_proof_valid = self.pedersen_proof.verify(input, ad_data) - ring_proof_valid = self.verify_ring_proof( - self.pedersen_proof.blinded_pk, ring_root - ) + ring_proof_valid = self.verify_ring_proof(self.pedersen_proof.blinded_pk, ring_root) return p_proof_valid and ring_proof_valid diff --git a/dot_ring/vrf/vrf.py b/dot_ring/vrf/vrf.py index c68d771..f668e38 100644 --- a/dot_ring/vrf/vrf.py +++ b/dot_ring/vrf/vrf.py @@ -20,9 +20,7 @@ class VRFProtocol(Protocol[C, P]): point_type: type[P] @abstractmethod - def proof( - self, alpha: bytes, secret_key: int, additional_data: bytes - ) -> tuple[P, tuple[int, int]]: + def proof(self, alpha: bytes, secret_key: int, additional_data: bytes) -> tuple[P, tuple[int, int]]: """Generate VRF proof.""" ... @@ -66,9 +64,7 @@ def __class_getitem__(cls, curve_variant: CurveVariant | Any) -> type[Self] | An """ if not isinstance(curve_variant, CurveVariant): return cls - new_class = type( - f"{cls.__name__}[{curve_variant.name}]", (cls,), {"cv": curve_variant} - ) + new_class = type(f"{cls.__name__}[{curve_variant.name}]", (cls,), {"cv": curve_variant}) return new_class @classmethod @@ -102,9 +98,7 @@ def generate_nonce(cls, secret_key: int, input_point: CurvePoint) -> int: data = sk_hash + input_point_octet # Generate final nonce nonce_hash = cls.cv.curve.hash(data, hash_len) - nonce = Helpers.str_to_int( - nonce_hash, cast(Literal["little", "big"], cls.cv.curve.ENDIAN) - ) + nonce = Helpers.str_to_int(nonce_hash, cast(Literal["little", "big"], cls.cv.curve.ENDIAN)) # Calculate k = nonce % order k = nonce % cls.cv.curve.ORDER @@ -145,9 +139,7 @@ def challenge(cls, points: list[CurvePoint], additional_data: bytes) -> int: return Helpers.b_endian_2_int(challenge_hash) % cls.cv.curve.ORDER @classmethod - def ecvrf_nonce_rfc6979( - cls, secret_scalar: int, h_string: bytes, hash_func: str = "sha256" - ) -> int: + def ecvrf_nonce_rfc6979(cls, secret_scalar: int, h_string: bytes, hash_func: str = "sha256") -> int: """ nonce generation as per rfc_6979 Deterministically derives a nonce from secret scalar and input bytes. @@ -193,12 +185,8 @@ def ecvrf_decode_proof(cls, pi_string: bytes | str) -> tuple[CurvePoint, int, in pi_string = bytes.fromhex(pi_string) # Get lengths from curve parameters - challenge_len = ( - cls.cv.curve.CHALLENGE_LENGTH - ) # Dynamic challenge length from curve - scalar_len = ( - cls.cv.curve.ORDER.bit_length() + 7 - ) // 8 # Scalar length based on curve order + challenge_len = cls.cv.curve.CHALLENGE_LENGTH # Dynamic challenge length from curve + scalar_len = (cls.cv.curve.ORDER.bit_length() + 7) // 8 # Scalar length based on curve order # Calculate positions in the proof gamma_end = cls.cv.curve.POINT_LEN @@ -218,12 +206,8 @@ def ecvrf_decode_proof(cls, pi_string: bytes | str) -> tuple[CurvePoint, int, in gamma = cls.cv.point.string_to_point(gamma_string) if isinstance(gamma, str): raise ValueError("Invalid gamma point") - C = Helpers.str_to_int( - c_string, cast(Literal["little", "big"], cls.cv.curve.ENDIAN) - ) - S = Helpers.str_to_int( - s_string, cast(Literal["little", "big"], cls.cv.curve.ENDIAN) - ) + C = Helpers.str_to_int(c_string, cast(Literal["little", "big"], cls.cv.curve.ENDIAN)) + S = Helpers.str_to_int(s_string, cast(Literal["little", "big"], cls.cv.curve.ENDIAN)) if S >= cls.cv.curve.ORDER: raise ValueError("Response scalar S is not less than the curve order") @@ -274,12 +258,7 @@ def proof_to_hash(cls, gamma: CurvePoint, mul_cofactor: bool = False) -> bytes: @classmethod def get_public_key(cls, secret_key: bytes) -> bytes: """Take the Secret_Key and return Public Key""" - secret_key_int = ( - Helpers.str_to_int( - secret_key, cast(Literal["little", "big"], cls.cv.curve.ENDIAN) - ) - % cls.cv.curve.ORDER - ) + secret_key_int = Helpers.str_to_int(secret_key, cast(Literal["little", "big"], cls.cv.curve.ENDIAN)) % cls.cv.curve.ORDER # Create generator point generator = cls.cv.point.generator_point() public_key: CurvePoint = cast(Any, generator) * secret_key_int diff --git a/pyproject.toml b/pyproject.toml index 4b4fc69..23257c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,13 +1,13 @@ [project] name = "dot-ring" -version = "0.1.4" +dynamic = ["version"] description = "Serves as a library to generate and verify a signature using IETF, Pedersen and Ring VRF-AD Schemes" authors = [ {name = "prasad-kumkar", email = "prasad@chainscore.finance"} ] readme = "README.md" -license = "MIT" -requires-python = ">=3.12" +license = {text = "MIT"} +requires-python = ">=3.11" keywords = [ "VRF", "VRF-AD", "IETF VRF", "Pedersen VRF", "Ring Proof", "Ring VRF", "Signature", "Proof", @@ -23,6 +23,7 @@ classifiers = [ "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3 :: Only", @@ -59,6 +60,9 @@ dot_ring = [ "blst/*.pyd", ] +[tool.setuptools_scm] + + [project.optional-dependencies] dev = [ "pytest>=8.0.0", @@ -94,7 +98,7 @@ Issues = "https://github.com/chainscore/dot-ring/issues" Documentation = "https://github.com/chainscore/dot-ring#readme" [build-system] -requires = ["setuptools>=65.0.0", "wheel", "Cython>=3.0.0"] +requires = ["setuptools>=65.0.0,<77.0.0", "wheel", "Cython>=3.0.0", "setuptools_scm>=8.0"] build-backend = "setuptools.build_meta" [dependency-groups] diff --git a/scripts/generate_test_vectors.py b/scripts/generate_test_vectors.py index 028d3b7..074808d 100644 --- a/scripts/generate_test_vectors.py +++ b/scripts/generate_test_vectors.py @@ -133,9 +133,7 @@ def _get_point_len(self) -> int: point_len *= 2 return cast(int, point_len) - def generate_ietf_vector( - self, comment: str, seed: int, alpha: bytes, salt: bytes, ad: bytes - ) -> dict[str, Any]: + def generate_ietf_vector(self, comment: str, seed: int, alpha: bytes, salt: bytes, ad: bytes) -> dict[str, Any]: """Generate a single IETF VRF test vector.""" sk = self._secret_from_seed(seed) curve = self.curve @@ -175,9 +173,7 @@ def generate_ietf_vector( "proof_s": s_bytes.hex(), } - def generate_pedersen_vector( - self, comment: str, seed: int, alpha: bytes, salt: bytes, ad: bytes - ) -> dict[str, Any]: + def generate_pedersen_vector(self, comment: str, seed: int, alpha: bytes, salt: bytes, ad: bytes) -> dict[str, Any]: """Generate a single Pedersen VRF test vector.""" sk = self._secret_from_seed(seed) curve = self.curve @@ -195,14 +191,10 @@ def generate_pedersen_vector( scalar_len = self._get_scalar_len() # Get blinding factor - sk_scalar = ( - Helpers.str_to_int(sk, self.curve.curve.ENDIAN) % self.curve.curve.ORDER - ) + sk_scalar = Helpers.str_to_int(sk, self.curve.curve.ENDIAN) % self.curve.curve.ORDER sk_bytes = sk_scalar.to_bytes(scalar_len, self.curve.curve.ENDIAN) blinding = PedersenVRF[curve].blinding(sk_bytes, h.point_to_string(), ad) # type: ignore[valid-type, misc] - blinding_bytes = Helpers.int_to_str( - blinding, self.curve.curve.ENDIAN, scalar_len - ) + blinding_bytes = Helpers.int_to_str(blinding, self.curve.curve.ENDIAN, scalar_len) s_bytes = Helpers.int_to_str(proof.s, self.curve.curve.ENDIAN, scalar_len) sb_bytes = Helpers.int_to_str(proof.sb, self.curve.curve.ENDIAN, scalar_len) @@ -267,23 +259,13 @@ def generate_ring_vector( scalar_len = self._get_scalar_len() # Get blinding factor - sk_scalar = ( - Helpers.str_to_int(sk, self.curve.curve.ENDIAN) % self.curve.curve.ORDER - ) + sk_scalar = Helpers.str_to_int(sk, self.curve.curve.ENDIAN) % self.curve.curve.ORDER sk_bytes = sk_scalar.to_bytes(scalar_len, self.curve.curve.ENDIAN) - blinding = PedersenVRF.__class_getitem__(curve).blinding( - sk_bytes, h.point_to_string(), ad - ) - blinding_bytes = Helpers.int_to_str( - blinding, self.curve.curve.ENDIAN, scalar_len - ) + blinding = PedersenVRF.__class_getitem__(curve).blinding(sk_bytes, h.point_to_string(), ad) + blinding_bytes = Helpers.int_to_str(blinding, self.curve.curve.ENDIAN, scalar_len) - s_bytes = Helpers.int_to_str( - ring_proof.pedersen_proof.s, self.curve.curve.ENDIAN, scalar_len - ) - sb_bytes = Helpers.int_to_str( - ring_proof.pedersen_proof.sb, self.curve.curve.ENDIAN, scalar_len - ) + s_bytes = Helpers.int_to_str(ring_proof.pedersen_proof.s, self.curve.curve.ENDIAN, scalar_len) + sb_bytes = Helpers.int_to_str(ring_proof.pedersen_proof.sb, self.curve.curve.ENDIAN, scalar_len) # Construct ring root # Construct ring root @@ -506,9 +488,7 @@ def generate_ring_vectors(generator: TestVectorGenerator) -> list[dict]: for i, (seed, alpha, ad, idx, _desc) in enumerate(test_cases, 1): comment = f"{suite} - vector-{i}" try: - vector = generator.generate_ring_vector( - comment, seed, alpha, b"", ad, prover_idx=idx - ) + vector = generator.generate_ring_vector(comment, seed, alpha, b"", ad, prover_idx=idx) vectors.append(vector) except Exception as e: print(f"Warning: Failed to generate ring vector {i}: {e}") @@ -537,9 +517,7 @@ def generate_ring_edge_case_vectors(generator: TestVectorGenerator) -> list[dict for i, (seed, alpha, ad, idx, desc) in enumerate(edge_cases, 1): comment = f"{suite} - edge-{i} - {desc}" try: - vector = generator.generate_ring_vector( - comment, seed, alpha, b"", ad, prover_idx=idx - ) + vector = generator.generate_ring_vector(comment, seed, alpha, b"", ad, prover_idx=idx) vectors.append(vector) except Exception as e: print(f"Warning: Failed to generate ring edge case {i} ({desc}): {e}") diff --git a/scripts/setup_cython.py b/scripts/setup_cython.py index 3530f2b..8155b68 100644 --- a/scripts/setup_cython.py +++ b/scripts/setup_cython.py @@ -5,11 +5,12 @@ python scripts/setup_cython.py build_ext --inplace """ -from Cython.Build import cythonize -from setuptools import Extension, setup import sys from pathlib import Path +from Cython.Build import cythonize +from setuptools import setup + # Add project root to sys.path sys.path.append(str(Path(__file__).parent.parent)) diff --git a/scripts/setup_env.py b/scripts/setup_env.py index 51d04e1..2f55ad5 100644 --- a/scripts/setup_env.py +++ b/scripts/setup_env.py @@ -25,9 +25,7 @@ def install_blst() -> None: if should_clone: print("Cloning blst...") - subprocess.check_call( - ["git", "clone", "https://github.com/supranational/blst.git", str(blst_dir)] - ) + subprocess.check_call(["git", "clone", "https://github.com/supranational/blst.git", str(blst_dir)]) # 2. Build python bindings print("Building blst python bindings...") @@ -50,12 +48,7 @@ def install_blst() -> None: if sys.prefix != sys.base_prefix: # We are in a venv if os.name == "posix": - site_packages = ( - Path(sys.prefix) - / "lib" - / f"python{sys.version_info.major}.{sys.version_info.minor}" - / "site-packages" - ) + site_packages = Path(sys.prefix) / "lib" / f"python{sys.version_info.major}.{sys.version_info.minor}" / "site-packages" elif os.name == "nt": site_packages = Path(sys.prefix) / "Lib" / "site-packages" @@ -112,9 +105,7 @@ def build_cython_extensions() -> None: print("Error: setup_cython.py not found in scripts/ or root.") sys.exit(1) - subprocess.check_call( - [sys.executable, str(setup_script), "build_ext", "--inplace"], cwd=root_dir - ) + subprocess.check_call([sys.executable, str(setup_script), "build_ext", "--inplace"], cwd=root_dir) print("Cython extensions built successfully.") diff --git a/scripts/vectors_print.py b/scripts/vectors_print.py index 92e58a1..fc63818 100644 --- a/scripts/vectors_print.py +++ b/scripts/vectors_print.py @@ -37,9 +37,7 @@ } -def print_entry( - entry: dict, key: str, max_length: int = 64, continuation_prefix: str = ".." -) -> None: +def print_entry(entry: dict, key: str, max_length: int = 64, continuation_prefix: str = "..") -> None: """Print a single entry, wrapping long values.""" value = entry.get(key, "-") if value is None: @@ -116,9 +114,7 @@ def compare_vectors(file1: str, file2: str) -> None: val1 = v1.get(key) val2 = v2.get(key) if val1 != val2: - diffs.append( - f" {key}: {val1[:20] if val1 else '-'}... != {val2[:20] if val2 else '-'}..." - ) + diffs.append(f" {key}: {val1[:20] if val1 else '-'}... != {val2[:20] if val2 else '-'}...") if diffs: print(f"Vector {i + 1}: DIFFERS") diff --git a/setup.py b/setup.py index beb8146..741594e 100644 --- a/setup.py +++ b/setup.py @@ -23,6 +23,9 @@ def get_compile_args() -> list[str]: + if sys.platform == "win32": + return ["/O2", "/arch:AVX2", "/D_CRT_SECURE_NO_WARNINGS"] + args = ["-O3", "-ffast-math", "-flto"] if sys.platform != "darwin": args.append("-march=native") @@ -59,7 +62,7 @@ def build_cython_extensions() -> list[Extension]: ], include_dirs=["dot_ring/curve/native_field"], extra_compile_args=compile_args, - extra_link_args=["-flto"], + extra_link_args=[] if sys.platform == "win32" else ["-flto"], ), Extension( "dot_ring.curve.native_field.vector_ops", @@ -69,7 +72,7 @@ def build_cython_extensions() -> list[Extension]: ], include_dirs=["dot_ring/curve/native_field"], extra_compile_args=compile_args, - extra_link_args=["-flto"], + extra_link_args=[] if sys.platform == "win32" else ["-flto"], ), ] @@ -139,7 +142,7 @@ def build_blst(self) -> None: os.chmod(run_me, 0o755) if sys.platform == "win32": - subprocess.check_call(["sh", str(run_me)], cwd=bindings_dir) + subprocess.check_call([sys.executable, str(run_me)], cwd=bindings_dir) else: subprocess.check_call([str(run_me)], cwd=bindings_dir) diff --git a/tests/benchmark/bench_ietf.py b/tests/benchmark/bench_ietf.py index 4b38d60..b272cb8 100644 --- a/tests/benchmark/bench_ietf.py +++ b/tests/benchmark/bench_ietf.py @@ -21,12 +21,7 @@ def load_test_data(): """Load test vector data.""" - vector_path = ( - Path(__file__).parent - / "vectors" - / "ark-vrf" - / "bandersnatch_ed_sha512_ell2_ietf.json" - ) + vector_path = Path(__file__).parent / "vectors" / "ark-vrf" / "bandersnatch_ed_sha512_ell2_ietf.json" with open(vector_path) as f: return json.load(f)[0] diff --git a/tests/benchmark/bench_pedersen.py b/tests/benchmark/bench_pedersen.py index ade70c2..661f223 100644 --- a/tests/benchmark/bench_pedersen.py +++ b/tests/benchmark/bench_pedersen.py @@ -21,12 +21,7 @@ def load_test_data(): """Load test vector data.""" - vector_path = ( - Path(__file__).parent - / "vectors" - / "ark-vrf" - / "bandersnatch_ed_sha512_ell2_pedersen.json" - ) + vector_path = Path(__file__).parent / "vectors" / "ark-vrf" / "bandersnatch_ed_sha512_ell2_pedersen.json" with open(vector_path) as f: return json.load(f)[0] diff --git a/tests/benchmark/bench_ring_proof.py b/tests/benchmark/bench_ring_proof.py index 5ab3747..e45c422 100644 --- a/tests/benchmark/bench_ring_proof.py +++ b/tests/benchmark/bench_ring_proof.py @@ -25,12 +25,7 @@ def load_test_data(): """Load test vector data.""" - vector_path = ( - Path(__file__).parent - / "vectors" - / "ark-vrf" - / "bandersnatch_ed_sha512_ell2_ring.json" - ) + vector_path = Path(__file__).parent / "vectors" / "ark-vrf" / "bandersnatch_ed_sha512_ell2_ring.json" with open(vector_path) as f: return json.load(f)[0] diff --git a/tests/benchmark/test_bench_ietf.py b/tests/benchmark/test_bench_ietf.py index 9a283a8..4cf6e6b 100644 --- a/tests/benchmark/test_bench_ietf.py +++ b/tests/benchmark/test_bench_ietf.py @@ -17,9 +17,7 @@ def load_test_data(): """Load test vectors from JSON file - returns only first test case""" - file_path = os.path.join( - HERE, "../vectors", "ark-vrf/bandersnatch_ed_sha512_ell2_ietf.json" - ) + file_path = os.path.join(HERE, "../vectors", "ark-vrf/bandersnatch_ed_sha512_ell2_ietf.json") with open(file_path) as f: data = json.load(f) return [data[0]] # Return only first test case diff --git a/tests/benchmark/test_bench_pedersen.py b/tests/benchmark/test_bench_pedersen.py index 3c7c7d3..f35826e 100644 --- a/tests/benchmark/test_bench_pedersen.py +++ b/tests/benchmark/test_bench_pedersen.py @@ -17,9 +17,7 @@ def load_test_data(): """Load test vectors from JSON file - returns only first test case""" - file_path = os.path.join( - HERE, "../vectors", "ark-vrf/bandersnatch_ed_sha512_ell2_pedersen.json" - ) + file_path = os.path.join(HERE, "../vectors", "ark-vrf/bandersnatch_ed_sha512_ell2_pedersen.json") with open(file_path) as f: data = json.load(f) return [data[0]] # Return only first test case diff --git a/tests/benchmark/test_bench_ring.py b/tests/benchmark/test_bench_ring.py index f66edbb..2bc64e9 100644 --- a/tests/benchmark/test_bench_ring.py +++ b/tests/benchmark/test_bench_ring.py @@ -49,7 +49,7 @@ def test_bench_ring_prove(): sort_by="cumulative", limit=25, ): - ring_vrf_proof = RingVRF[Bandersnatch].prove(alpha, ad, s_k, p_k, keys) + _ = RingVRF[Bandersnatch].prove(alpha, ad, s_k, p_k, keys) # Verify correctness assert p_k.hex() == item["pk"], "Invalid Public Key" @@ -82,13 +82,7 @@ def test_bench_ring_verify(): ring_root_bytes = bytes.fromhex(item["ring_pks_com"]) proof_hex = ( - item["gamma"] - + item["proof_pk_com"] - + item["proof_r"] - + item["proof_ok"] - + item["proof_s"] - + item["proof_sb"] - + item["ring_proof"] + item["gamma"] + item["proof_pk_com"] + item["proof_r"] + item["proof_ok"] + item["proof_s"] + item["proof_sb"] + item["ring_proof"] ) proof_bytes = bytes.fromhex(proof_hex) diff --git a/tests/benchmark/test_bench_ring_proof.py b/tests/benchmark/test_bench_ring_proof.py index 8c70fc6..a8a9cc3 100644 --- a/tests/benchmark/test_bench_ring_proof.py +++ b/tests/benchmark/test_bench_ring_proof.py @@ -1,60 +1,44 @@ -import pytest import sys from pathlib import Path +import pytest + # Add blst to path if needed sys.path.insert(0, str(Path(__file__).parent.parent / "dot_ring" / "blst" / "bindings" / "python")) from dot_ring import Bandersnatch from dot_ring.vrf.ring.ring_vrf import RingVRF + # MAX_RING_SIZE is 255 per the constraint system @pytest.fixture(scope="module", params=[255]) def ring_data(request): ring_size = request.param - + s_k = b"secret_key_seed" * 2 s_k = s_k[:32] p_k = RingVRF[Bandersnatch].get_public_key(s_k) - + keys = [p_k] for i in range(ring_size - 1): - sk_i = (int.from_bytes(s_k, 'little') + i + 1).to_bytes(32, 'little') + sk_i = (int.from_bytes(s_k, "little") + i + 1).to_bytes(32, "little") pk_i = RingVRF[Bandersnatch].get_public_key(sk_i) keys.append(pk_i) - + alpha = b"test_message" ad = b"test_ad" - + ring_root = RingVRF[Bandersnatch].construct_ring_root(keys) - + # Pre-calculate a proof for verification benchmark proof = RingVRF[Bandersnatch].prove(alpha, ad, s_k, p_k, keys) - - return { - "s_k": s_k, - "p_k": p_k, - "keys": keys, - "alpha": alpha, - "ad": ad, - "ring_root": ring_root, - "proof": proof - } + + return {"s_k": s_k, "p_k": p_k, "keys": keys, "alpha": alpha, "ad": ad, "ring_root": ring_root, "proof": proof} + def test_prove(benchmark, ring_data): - benchmark( - RingVRF[Bandersnatch].prove, - ring_data["alpha"], - ring_data["ad"], - ring_data["s_k"], - ring_data["p_k"], - ring_data["keys"] - ) + benchmark(RingVRF[Bandersnatch].prove, ring_data["alpha"], ring_data["ad"], ring_data["s_k"], ring_data["p_k"], ring_data["keys"]) + def test_verify(benchmark, ring_data): - benchmark( - ring_data["proof"].verify, - ring_data["alpha"], - ring_data["ad"], - ring_data["ring_root"] - ) + benchmark(ring_data["proof"].verify, ring_data["alpha"], ring_data["ad"], ring_data["ring_root"]) diff --git a/tests/test_bandersnatch_ark.py b/tests/test_bandersnatch_ark.py index 24c1dc2..d4218e4 100644 --- a/tests/test_bandersnatch_ark.py +++ b/tests/test_bandersnatch_ark.py @@ -13,9 +13,7 @@ def test_ietf_ark_bandersnatch(): - data_dir = os.path.join( - HERE, "vectors/ark-vrf/bandersnatch_ed_sha512_ell2_ietf.json" - ) + data_dir = os.path.join(HERE, "vectors/ark-vrf/bandersnatch_ed_sha512_ell2_ietf.json") data_dir = os.path.abspath(data_dir) gamma_len = 32 @@ -53,10 +51,7 @@ def test_ietf_ark_bandersnatch(): assert proof_s.hex() == vector["proof_s"] if "beta" in vector: - assert ( - IETF_VRF[Bandersnatch].ecvrf_proof_to_hash(proof_bytes).hex() - == vector["beta"] - ) + assert IETF_VRF[Bandersnatch].ecvrf_proof_to_hash(proof_bytes).hex() == vector["beta"] assert proof.verify(pk_bytes, alpha, additional_data) assert proof_rt.to_bytes() == proof_bytes @@ -64,9 +59,7 @@ def test_ietf_ark_bandersnatch(): def test_pedersen_ark_bandersnatch(): - data_dir = os.path.join( - HERE, "vectors/ark-vrf/bandersnatch_ed_sha512_ell2_pedersen.json" - ) + data_dir = os.path.join(HERE, "vectors/ark-vrf/bandersnatch_ed_sha512_ell2_pedersen.json") data_dir = os.path.abspath(data_dir) with open(data_dir) as f: @@ -86,9 +79,7 @@ def test_pedersen_ark_bandersnatch(): if "h" in vector: assert input_point.point_to_string().hex() == vector["h"] - proof = PedersenVRF[Bandersnatch].prove( - alpha, secret_scalar, additional_data - ) + proof = PedersenVRF[Bandersnatch].prove(alpha, secret_scalar, additional_data) proof_bytes = proof.to_bytes() proof_rt = PedersenVRF[Bandersnatch].from_bytes(proof_bytes) @@ -105,20 +96,10 @@ def test_pedersen_ark_bandersnatch(): (Bandersnatch.curve.PRIME_FIELD.bit_length() + 7) // 8, Bandersnatch.curve.ENDIAN, ) == bytes.fromhex(vector["proof_sb"]) - assert ( - PedersenVRF[Bandersnatch] - .ecvrf_proof_to_hash(proof.output_point.point_to_string()) - .hex() - == vector["beta"] - ) + assert PedersenVRF[Bandersnatch].ecvrf_proof_to_hash(proof.output_point.point_to_string()).hex() == vector["beta"] if "beta" in vector: - assert ( - PedersenVRF[Bandersnatch] - .ecvrf_proof_to_hash(proof.output_point.point_to_string()) - .hex() - == vector["beta"] - ) + assert PedersenVRF[Bandersnatch].ecvrf_proof_to_hash(proof.output_point.point_to_string()).hex() == vector["beta"] assert proof.verify(alpha, additional_data) assert proof_rt.to_bytes() == proof_bytes @@ -126,9 +107,7 @@ def test_pedersen_ark_bandersnatch(): def test_ring_proof(): - file_path = os.path.join( - HERE, "vectors/ark-vrf/bandersnatch_ed_sha512_ell2_ring.json" - ) + file_path = os.path.join(HERE, "vectors/ark-vrf/bandersnatch_ed_sha512_ell2_ring.json") with open(file_path) as f: data = json.load(f) for index in range(len(data)): @@ -142,15 +121,11 @@ def test_ring_proof(): start = time() ring_root = RingVRF[Bandersnatch].construct_ring_root(keys) ring_time = time() - print( - f"\nTime taken for Ring Root Construction: \t\t {1000 * (ring_time - start):.2f} ms" - ) + print(f"\nTime taken for Ring Root Construction: \t\t {1000 * (ring_time - start):.2f} ms") p_k = RingVRF[Bandersnatch].get_public_key(s_k) ring_vrf_proof = RingVRF[Bandersnatch].prove(alpha, ad, s_k, p_k, keys) pk_time = time() - print( - f"Time taken for Proof Generation: \t {1000 * (pk_time - ring_time):.2f} ms" - ) + print(f"Time taken for Proof Generation: \t {1000 * (pk_time - ring_time):.2f} ms") proof_bytes = ring_vrf_proof.to_bytes() proof_rt = RingVRF[Bandersnatch].from_bytes(proof_bytes) @@ -158,13 +133,7 @@ def test_ring_proof(): assert ring_root.to_bytes().hex() == item["ring_pks_com"], "Invalid Ring Root" assert ( ring_vrf_proof.to_bytes().hex() - == item["gamma"] - + item["proof_pk_com"] - + item["proof_r"] - + item["proof_ok"] - + item["proof_s"] - + item["proof_sb"] - + item["ring_proof"] + == item["gamma"] + item["proof_pk_com"] + item["proof_r"] + item["proof_ok"] + item["proof_s"] + item["proof_sb"] + item["ring_proof"] ), "Unexpected Proof" assert ring_vrf_proof.verify(alpha, ad, ring_root), "Verification Failed" @@ -172,7 +141,5 @@ def test_ring_proof(): start = time() assert proof_rt.verify(alpha, ad, ring_root) verify_time = time() - print( - f"Time taken for Proof Verification: \t {1000 * (verify_time - start):.2f} ms" - ) + print(f"Time taken for Proof Verification: \t {1000 * (verify_time - start):.2f} ms") print(f"✅ Testcase {index + 1} of {os.path.basename(file_path)}") diff --git a/tests/test_coverage/test_bls12_381_g2.py b/tests/test_coverage/test_bls12_381_g2.py index 2436626..283c30f 100644 --- a/tests/test_coverage/test_bls12_381_g2.py +++ b/tests/test_coverage/test_bls12_381_g2.py @@ -1,12 +1,9 @@ """Additional tests for BLS12-381 G2 module.""" -import pytest - from dot_ring.curve.specs.bls12_381_G2 import ( + BLS12_381_G2_RO, BLS12_381_G2Curve, BLS12_381_G2Params, - BLS12_381_G2Point, - BLS12_381_G2_RO, ) @@ -16,7 +13,7 @@ class TestBLS12381G2Curve: def test_curve_parameters(self): """Test curve parameters are set.""" curve = BLS12_381_G2Curve() - + assert curve.PRIME_FIELD is not None assert curve.ORDER is not None assert curve.COFACTOR is not None @@ -24,7 +21,7 @@ def test_curve_parameters(self): def test_curve_generator(self): """Test generator point.""" curve = BLS12_381_G2Curve() - + assert curve.GENERATOR_X is not None assert curve.GENERATOR_Y is not None diff --git a/tests/test_coverage/test_blst.py b/tests/test_coverage/test_blst.py index 9ad8ca9..bb0e91a 100644 --- a/tests/test_coverage/test_blst.py +++ b/tests/test_coverage/test_blst.py @@ -1,7 +1,5 @@ """Additional tests for blst module to improve coverage.""" -import pytest - from dot_ring import blst diff --git a/tests/test_coverage/test_curve_base.py b/tests/test_coverage/test_curve_base.py index 0c84848..1a589a4 100644 --- a/tests/test_coverage/test_curve_base.py +++ b/tests/test_coverage/test_curve_base.py @@ -1,10 +1,7 @@ """Additional tests for curve.py module to improve coverage.""" -import pytest - -from dot_ring.curve.curve import CurveVariant -from dot_ring.curve.specs.ed25519 import Ed25519_RO, Ed25519Curve from dot_ring.curve.specs.bandersnatch import Bandersnatch_TE_Curve +from dot_ring.curve.specs.ed25519 import Ed25519_RO, Ed25519Curve class TestCurveVariant: @@ -17,7 +14,7 @@ def test_variant_has_curve(self): def test_bandersnatch_te_curve_params(self): """Test Bandersnatch TE curve parameters.""" curve = Bandersnatch_TE_Curve - + assert curve.PRIME_FIELD is not None assert curve.ORDER is not None @@ -28,7 +25,7 @@ class TestEdwardsCurve: def test_ed25519_curve_params(self): """Test Ed25519 curve parameters.""" curve = Ed25519Curve() - + assert curve.PRIME_FIELD is not None assert curve.ORDER is not None assert curve.GENERATOR_X is not None @@ -37,7 +34,7 @@ def test_ed25519_curve_params(self): def test_ed25519_curve_cofactor(self): """Test Ed25519 cofactor.""" curve = Ed25519Curve() - + assert curve.COFACTOR == 8 def test_ed25519_curve_name(self): diff --git a/tests/test_coverage/test_fft.py b/tests/test_coverage/test_fft.py index 71c6888..b2e45f3 100644 --- a/tests/test_coverage/test_fft.py +++ b/tests/test_coverage/test_fft.py @@ -1,7 +1,6 @@ """Tests for FFT module to improve coverage.""" -import pytest - +from dot_ring.ring_proof.constants import OMEGA, S_PRIME from dot_ring.ring_proof.polynomial.fft import ( _fft_in_place, _get_bit_reverse, @@ -11,7 +10,6 @@ evaluate_poly_over_domain, inverse_fft, ) -from dot_ring.ring_proof.constants import OMEGA, S_PRIME class TestFFT: @@ -20,7 +18,7 @@ class TestFFT: # Test constants - using a smaller prime for easier testing SMALL_PRIME = 17 SMALL_OMEGA = 3 # primitive 4th root of unity mod 17 (3^4 = 81 = 4*17 + 13... let's use a proper one) - + # Actually for mod 17: 17-1 = 16 = 2^4, so primitive roots exist # 3^4 mod 17 = 81 mod 17 = 13, 3^8 mod 17 = 16, 3^16 mod 17 = 1 # So 3 is a primitive 16th root of unity @@ -58,16 +56,16 @@ def test_get_twiddle_factors_basic(self): n = 4 omega = 4 # 4th root of unity mod 17 prime = 17 - + twiddles = _get_twiddle_factors(n, omega, prime) - + # Should have log2(4) = 2 stages assert len(twiddles) == 2 - + # Stage 0 (m=2): half_m=1, stride=2, w_step = omega^2 = 16 # twiddles[0] = [1] assert twiddles[0] == [1] - + # Stage 1 (m=4): half_m=2, stride=1, w_step = omega^1 = 4 # twiddles[1] = [1, 4] assert twiddles[1] == [1, 4] @@ -77,14 +75,14 @@ def test_get_roots(self): n = 8 omega = 4 prime = 17 - + # omega^4 mod 17 = 256 mod 17 = 1, but we need 8th root # Let's find 8th root: need omega^8 = 1 mod 17 # 2^8 = 256 mod 17 = 1. Yes! 2 is 8th root omega = 2 - + roots = _get_roots(n, omega, prime) - + # Should have n/2 = 4 roots assert len(roots) == 4 # roots[i] = omega^i @@ -107,7 +105,7 @@ def test_fft_in_place_size_2(self): # Use larger prime field for proper FFT omega_2 = pow(OMEGA, 512 // 2, S_PRIME) # Scale down from 512-th root coeffs = [3, 5] - + _fft_in_place(coeffs, omega_2, S_PRIME) # Just verify the result has same length assert len(coeffs) == 2 @@ -116,19 +114,18 @@ def test_inverse_fft(self): """Test inverse FFT recovers original coefficients.""" # Use the actual module constants original = [1, 2, 3, 4, 5, 6, 7, 8] - n = len(original) - + # Need to find proper omega for size 8 in S_PRIME # Using module omega scaled down omega_8 = pow(OMEGA, 512 // 8, S_PRIME) # Scale down from 512-th root - + # Compute FFT values = original[:] _fft_in_place(values, omega_8, S_PRIME) - + # Compute inverse FFT recovered = inverse_fft(values, omega_8, S_PRIME) - + assert recovered == original def test_evaluate_poly_over_domain_basic(self): @@ -139,9 +136,9 @@ def test_evaluate_poly_over_domain_basic(self): omega = 4 # 4th root of unity mod 17 prime = 17 domain = [pow(omega, i, prime) for i in range(n)] # [1, 4, 16, 13] - + result = evaluate_poly_over_domain(poly, domain, omega, prime) - + # Should evaluate at each domain point # At x=1: 1 + 2*1 = 3 # At x=4: 1 + 2*4 = 9 @@ -156,9 +153,9 @@ def test_evaluate_poly_over_domain_with_padding(self): omega = 4 prime = 17 domain = [pow(omega, i, prime) for i in range(n)] - + result = evaluate_poly_over_domain(poly, domain, omega, prime) - + # Constant polynomial evaluates to same value everywhere assert len(result) == 4 @@ -170,9 +167,9 @@ def test_evaluate_poly_over_domain_with_folding(self): omega = 4 prime = 17 domain = [pow(omega, i, prime) for i in range(n)] - + result = evaluate_poly_over_domain(poly, domain, omega, prime) - + # Result should fold coefficients assert len(result) == 4 @@ -182,9 +179,9 @@ def test_evaluate_poly_fft_standard(self): domain_size = 4 omega = 4 prime = 17 - + result = evaluate_poly_fft(poly, domain_size, omega, prime, coset_offset=1) - + assert len(result) == domain_size def test_evaluate_poly_fft_with_coset(self): @@ -194,9 +191,9 @@ def test_evaluate_poly_fft_with_coset(self): omega = 4 prime = 17 coset_offset = 2 # non-trivial coset - + result = evaluate_poly_fft(poly, domain_size, omega, prime, coset_offset=coset_offset) - + assert len(result) == domain_size def test_evaluate_poly_fft_larger_poly(self): @@ -206,9 +203,9 @@ def test_evaluate_poly_fft_larger_poly(self): domain_size = 4 omega = 4 prime = 17 - + result = evaluate_poly_fft(poly, domain_size, omega, prime, coset_offset=1) - + assert len(result) == domain_size def test_evaluate_poly_fft_larger_poly_with_coset(self): @@ -218,9 +215,9 @@ def test_evaluate_poly_fft_larger_poly_with_coset(self): omega = 4 prime = 17 coset_offset = 3 - + result = evaluate_poly_fft(poly, domain_size, omega, prime, coset_offset=coset_offset) - + assert len(result) == domain_size def test_fft_roundtrip_with_real_constants(self): @@ -228,16 +225,16 @@ def test_fft_roundtrip_with_real_constants(self): # Use actual constants from the module n = 8 omega_n = pow(OMEGA, 512 // n, S_PRIME) - + original = [i * 12345 for i in range(n)] # Some test values - + # Forward FFT values = original[:] _fft_in_place(values, omega_n, S_PRIME) - + # Inverse FFT recovered = inverse_fft(values, omega_n, S_PRIME) - + assert recovered == original def test_caching_bit_reverse(self): @@ -246,7 +243,7 @@ def test_caching_bit_reverse(self): result1 = _get_bit_reverse(16) # Second call should return cached result result2 = _get_bit_reverse(16) - + assert result1 == result2 # Check cache is being used (same object) assert result1 is result2 @@ -255,7 +252,7 @@ def test_caching_twiddle_factors(self): """Test that twiddle factor caching works correctly.""" result1 = _get_twiddle_factors(8, OMEGA, S_PRIME) result2 = _get_twiddle_factors(8, OMEGA, S_PRIME) - + assert result1 == result2 assert result1 is result2 # Same cached object @@ -263,6 +260,6 @@ def test_caching_roots(self): """Test that roots caching works correctly.""" result1 = _get_roots(16, OMEGA, S_PRIME) result2 = _get_roots(16, OMEGA, S_PRIME) - + assert result1 == result2 assert result1 is result2 diff --git a/tests/test_coverage/test_field_element.py b/tests/test_coverage/test_field_element.py index b99d6b6..b264f4f 100644 --- a/tests/test_coverage/test_field_element.py +++ b/tests/test_coverage/test_field_element.py @@ -111,7 +111,7 @@ def test_mul_fp2_elements(self): b = FieldElement(2, 4, 17) result = a * b assert result.re == 15 # (10 - 12) mod 17 = -2 mod 17 = 15 - assert result.im == 9 # (20 + 6) mod 17 = 26 mod 17 = 9 + assert result.im == 9 # (20 + 6) mod 17 = 26 mod 17 = 9 def test_mul_with_int(self): """Test multiplication with integer.""" @@ -193,13 +193,13 @@ class TestFieldElementPower: def test_pow_positive(self): """Test positive exponent.""" a = FieldElement(2, 0, 17) - result = a ** 4 + result = a**4 assert result.re == 16 # 2^4 def test_pow_zero(self): """Test zero exponent.""" a = FieldElement(5, 0, 17) - result = a ** 0 + result = a**0 assert result.re == 1 assert result.im == 0 @@ -222,7 +222,7 @@ def test_pow_negative_larger(self): def test_pow_fp2(self): """Test power of Fp2 element.""" a = FieldElement(2, 3, 17) - result = a ** 3 + result = a**3 # Just verify it computes without error assert result is not None @@ -230,7 +230,7 @@ def test_pow_invalid_type_raises(self): """Test that non-integer exponent raises TypeError.""" a = FieldElement(2, 0, 17) with pytest.raises(TypeError, match="Exponent must be an integer"): - _ = a ** 2.5 # type: ignore + _ = a**2.5 # type: ignore class TestFieldElementComparison: @@ -242,7 +242,7 @@ def test_eq_field_elements(self): b = FieldElement(5, 3, 17) c = FieldElement(5, 4, 17) d = FieldElement(6, 3, 17) - + assert a == b assert not (a == c) assert not (a == d) @@ -282,7 +282,7 @@ def test_is_zero_false(self): """Test is_zero returns False for non-zero element.""" a = FieldElement(5, 0, 17) assert a.is_zero() is False - + b = FieldElement(0, 3, 17) assert b.is_zero() is False diff --git a/tests/test_coverage/test_glv.py b/tests/test_coverage/test_glv.py index a3adb82..40f0568 100644 --- a/tests/test_coverage/test_glv.py +++ b/tests/test_coverage/test_glv.py @@ -1,7 +1,5 @@ """Additional tests for GLV module to improve coverage.""" -import pytest - from dot_ring.curve.glv import GLV @@ -11,18 +9,18 @@ class TestGLVScalarDecomposition: def test_decompose_scalar_small(self): """Test scalar decomposition with small scalar.""" from dot_ring.curve.specs.bandersnatch import BandersnatchParams - + glv = GLV( lambda_param=BandersnatchParams.GLV_LAMBDA, constant_b=BandersnatchParams.GLV_B, constant_c=BandersnatchParams.GLV_C, ) - + scalar = 12345 n = BandersnatchParams.ORDER - + k1, k2 = glv.decompose_scalar(scalar, n) - + # k1 + k2 * lambda should equal scalar mod n lambda_param = BandersnatchParams.GLV_LAMBDA reconstructed = (k1 + k2 * lambda_param) % n @@ -31,19 +29,19 @@ def test_decompose_scalar_small(self): def test_decompose_scalar_large(self): """Test scalar decomposition with large scalar.""" from dot_ring.curve.specs.bandersnatch import BandersnatchParams - + glv = GLV( lambda_param=BandersnatchParams.GLV_LAMBDA, constant_b=BandersnatchParams.GLV_B, constant_c=BandersnatchParams.GLV_C, ) - + # Use a large scalar scalar = 2**200 + 12345 n = BandersnatchParams.ORDER - + k1, k2 = glv.decompose_scalar(scalar % n, n) - + # k1 + k2 * lambda should equal scalar mod n lambda_param = BandersnatchParams.GLV_LAMBDA reconstructed = (k1 + k2 * lambda_param) % n @@ -52,18 +50,18 @@ def test_decompose_scalar_large(self): def test_decompose_scalar_zero(self): """Test scalar decomposition with zero scalar.""" from dot_ring.curve.specs.bandersnatch import BandersnatchParams - + glv = GLV( lambda_param=BandersnatchParams.GLV_LAMBDA, constant_b=BandersnatchParams.GLV_B, constant_c=BandersnatchParams.GLV_C, ) - + scalar = 0 n = BandersnatchParams.ORDER - + k1, k2 = glv.decompose_scalar(scalar, n) - + # 0 = k1 + k2 * lambda mod n lambda_param = BandersnatchParams.GLV_LAMBDA reconstructed = (k1 + k2 * lambda_param) % n @@ -75,29 +73,24 @@ class TestGLVEndomorphism: def test_compute_endomorphism(self): """Test computing endomorphism.""" - from dot_ring.curve.specs.bandersnatch import BandersnatchPoint, Bandersnatch_TE_Curve, BandersnatchGLV - + from dot_ring.curve.specs.bandersnatch import Bandersnatch_TE_Curve, BandersnatchGLV, BandersnatchPoint + G = BandersnatchPoint(Bandersnatch_TE_Curve.GENERATOR_X, Bandersnatch_TE_Curve.GENERATOR_Y) - + phi_G = BandersnatchGLV.compute_endomorphism(G) - + assert phi_G is not None assert not phi_G.is_identity() def test_endomorphism_lambda_times_generator(self): """Test that phi(P) = lambda * P for the endomorphism.""" - from dot_ring.curve.specs.bandersnatch import ( - BandersnatchPoint, - Bandersnatch_TE_Curve, - BandersnatchGLV, - BandersnatchParams - ) - + from dot_ring.curve.specs.bandersnatch import Bandersnatch_TE_Curve, BandersnatchGLV, BandersnatchParams, BandersnatchPoint + G = BandersnatchPoint(Bandersnatch_TE_Curve.GENERATOR_X, Bandersnatch_TE_Curve.GENERATOR_Y) - + phi_G = BandersnatchGLV.compute_endomorphism(G) lambda_G = G * BandersnatchParams.GLV_LAMBDA - + # phi(G) should equal lambda * G assert phi_G == lambda_G @@ -107,60 +100,60 @@ class TestGLVWindowedMult: def test_windowed_simultaneous_mult_basic(self): """Test windowed simultaneous multiplication.""" - from dot_ring.curve.specs.bandersnatch import BandersnatchPoint, Bandersnatch_TE_Curve, BandersnatchGLV - + from dot_ring.curve.specs.bandersnatch import Bandersnatch_TE_Curve, BandersnatchGLV, BandersnatchPoint + G = BandersnatchPoint(Bandersnatch_TE_Curve.GENERATOR_X, Bandersnatch_TE_Curve.GENERATOR_Y) phi_G = BandersnatchGLV.compute_endomorphism(G) - + k1 = 100 k2 = 200 - + result = BandersnatchGLV.windowed_simultaneous_mult(k1, k2, G, phi_G, w=2) - + expected = G * k1 + phi_G * k2 assert result == expected def test_windowed_mult_with_negative_k1(self): """Test windowed mult with negative k1.""" - from dot_ring.curve.specs.bandersnatch import BandersnatchPoint, Bandersnatch_TE_Curve, BandersnatchGLV - + from dot_ring.curve.specs.bandersnatch import Bandersnatch_TE_Curve, BandersnatchGLV, BandersnatchPoint + G = BandersnatchPoint(Bandersnatch_TE_Curve.GENERATOR_X, Bandersnatch_TE_Curve.GENERATOR_Y) phi_G = BandersnatchGLV.compute_endomorphism(G) - + k1 = -50 k2 = 100 - + result = BandersnatchGLV.windowed_simultaneous_mult(k1, k2, G, phi_G, w=2) - + expected = G * k1 + phi_G * k2 assert result == expected def test_windowed_mult_with_negative_k2(self): """Test windowed mult with negative k2.""" - from dot_ring.curve.specs.bandersnatch import BandersnatchPoint, Bandersnatch_TE_Curve, BandersnatchGLV - + from dot_ring.curve.specs.bandersnatch import Bandersnatch_TE_Curve, BandersnatchGLV, BandersnatchPoint + G = BandersnatchPoint(Bandersnatch_TE_Curve.GENERATOR_X, Bandersnatch_TE_Curve.GENERATOR_Y) phi_G = BandersnatchGLV.compute_endomorphism(G) - + k1 = 100 k2 = -75 - + result = BandersnatchGLV.windowed_simultaneous_mult(k1, k2, G, phi_G, w=2) - + expected = G * k1 + phi_G * k2 assert result == expected def test_windowed_mult_both_negative(self): """Test windowed mult with both k1 and k2 negative.""" - from dot_ring.curve.specs.bandersnatch import BandersnatchPoint, Bandersnatch_TE_Curve, BandersnatchGLV - + from dot_ring.curve.specs.bandersnatch import Bandersnatch_TE_Curve, BandersnatchGLV, BandersnatchPoint + G = BandersnatchPoint(Bandersnatch_TE_Curve.GENERATOR_X, Bandersnatch_TE_Curve.GENERATOR_Y) phi_G = BandersnatchGLV.compute_endomorphism(G) - + k1 = -30 k2 = -40 - + result = BandersnatchGLV.windowed_simultaneous_mult(k1, k2, G, phi_G, w=2) - + expected = G * k1 + phi_G * k2 assert result == expected diff --git a/tests/test_coverage/test_helpers.py b/tests/test_coverage/test_helpers.py index 46c4bc9..8c103eb 100644 --- a/tests/test_coverage/test_helpers.py +++ b/tests/test_coverage/test_helpers.py @@ -1,7 +1,6 @@ """Tests for helpers module to improve coverage.""" -import pytest -from py_ecc.optimized_bls12_381 import FQ, FQ2 +from py_ecc.optimized_bls12_381 import FQ from dot_ring.ring_proof.helpers import Helpers as H @@ -222,10 +221,10 @@ def test_bls_g1_compress_affine(self): # This is the generator point of BLS12-381 G1 x = 3685416753713387016781088315183077757961620795782546409894578378688607592378376318836054947676345821548104185464507 y = 1339506544944476473020471379941921221584933875938349620426543736416511423956333506472724655353366534992391756441569 - + point = (x, y) result = H.bls_g1_compress(point) - + assert isinstance(result, str) assert len(result) == 96 # 48 bytes in hex @@ -234,10 +233,10 @@ def test_bls_g1_compress_projective(self): x = 3685416753713387016781088315183077757961620795782546409894578378688607592378376318836054947676345821548104185464507 y = 1339506544944476473020471379941921221584933875938349620426543736416511423956333506472724655353366534992391756441569 z = 1 - + point = (x, y, z) result = H.bls_g1_compress(point) - + assert isinstance(result, str) assert len(result) == 96 @@ -246,13 +245,13 @@ def test_bls_g1_decompress_bytes(self): # Compress a point first, then decompress x = 3685416753713387016781088315183077757961620795782546409894578378688607592378376318836054947676345821548104185464507 y = 1339506544944476473020471379941921221584933875938349620426543736416511423956333506472724655353366534992391756441569 - + point = (x, y) compressed_hex = H.bls_g1_compress(point) compressed_bytes = bytes.fromhex(compressed_hex) - + result = H.bls_g1_decompress(compressed_bytes) - + assert isinstance(result, tuple) assert len(result) == 3 # (FQ, FQ, FQ) @@ -260,12 +259,12 @@ def test_bls_g1_decompress_hex_string(self): """Test G1 point decompression from hex string.""" x = 3685416753713387016781088315183077757961620795782546409894578378688607592378376318836054947676345821548104185464507 y = 1339506544944476473020471379941921221584933875938349620426543736416511423956333506472724655353366534992391756441569 - + point = (x, y) compressed_hex = H.bls_g1_compress(point) - + result = H.bls_g1_decompress(compressed_hex) - + assert isinstance(result, tuple) assert len(result) == 3 @@ -274,23 +273,23 @@ def test_bls_g2_compress_projective(self): # Use a simple G2 point structure # G2 points have FQ2 coordinates from py_ecc.optimized_bls12_381 import G2 - + result = H.bls_g2_compress(G2) - + assert isinstance(result, str) assert len(result) == 192 # 96 bytes in hex (2x 48 bytes) def test_bls_g2_compress_affine(self): """Test G2 point compression with affine coordinates.""" from py_ecc.optimized_bls12_381 import G2, normalize - + # Normalize to get affine coordinates g2_affine = normalize(G2) x, y = g2_affine - + # Create 2D point point = (x, y) result = H.bls_g2_compress(point) - + assert isinstance(result, str) assert len(result) == 192 diff --git a/tests/test_coverage/test_interpolation.py b/tests/test_coverage/test_interpolation.py index c9289fa..26116dd 100644 --- a/tests/test_coverage/test_interpolation.py +++ b/tests/test_coverage/test_interpolation.py @@ -1,9 +1,7 @@ """Additional tests for interpolation module to improve coverage.""" -import pytest - -from dot_ring.ring_proof.polynomial.interpolation import poly_interpolate_fft from dot_ring.ring_proof.constants import OMEGA, S_PRIME +from dot_ring.ring_proof.polynomial.interpolation import poly_interpolate_fft class TestInterpolation: @@ -13,26 +11,26 @@ def test_poly_interpolate_fft_power_of_two(self): """Test FFT interpolation with power of 2 size.""" # 8 values y_values = [1, 2, 3, 4, 5, 6, 7, 8] - + # Use real constants coeffs = poly_interpolate_fft(y_values, OMEGA, S_PRIME) - + assert coeffs is not None def test_poly_interpolate_fft_single_value(self): """Test FFT interpolation with single value.""" y_values = [42] - + coeffs = poly_interpolate_fft(y_values, OMEGA, S_PRIME) - + assert coeffs is not None def test_poly_interpolate_fft_zeros(self): """Test FFT interpolation with all zeros.""" y_values = [0, 0, 0, 0] - + coeffs = poly_interpolate_fft(y_values, OMEGA, S_PRIME) - + assert coeffs is not None # All zero input should give all zero coeffs assert all(c == 0 for c in coeffs[:4]) diff --git a/tests/test_coverage/test_kzg.py b/tests/test_coverage/test_kzg.py index 4c41124..45328d5 100644 --- a/tests/test_coverage/test_kzg.py +++ b/tests/test_coverage/test_kzg.py @@ -13,7 +13,6 @@ p2_scalar_mul, ) from dot_ring.ring_proof.pcs.srs import srs -from dot_ring import blst class TestKZGHelpers: @@ -23,9 +22,9 @@ def test_p1_scalar_mul(self): """Test P1 scalar multiplication.""" p = srs.blst_g1[0] # Generator scalar = 5 - + result = p1_scalar_mul(p, scalar) - + assert result is not None # Result should be different from input (unless scalar is 1) assert not result.is_equal(p) @@ -34,17 +33,17 @@ def test_p1_add(self): """Test P1 point addition.""" p1 = srs.blst_g1[0] p2 = srs.blst_g1[1] - + result = p1_add(p1, p2) - + assert result is not None def test_p1_neg(self): """Test P1 point negation.""" p = srs.blst_g1[0] - + neg_p = p1_neg(p) - + # p + (-p) should be the point at infinity result = p1_add(p, neg_p) assert result.is_inf() @@ -53,26 +52,26 @@ def test_p2_scalar_mul(self): """Test P2 scalar multiplication.""" p = srs.blst_g2[0] scalar = 5 - + result = p2_scalar_mul(p, scalar) - + assert result is not None def test_p2_add(self): """Test P2 point addition.""" p1 = srs.blst_g2[0] p2 = srs.blst_g2[1] - + result = p2_add(p1, p2) - + assert result is not None def test_p2_neg(self): """Test P2 point negation.""" p = srs.blst_g2[0] - + neg_p = p2_neg(p) - + # p + (-p) should be the point at infinity result = p2_add(p, neg_p) assert result.is_inf() @@ -84,9 +83,9 @@ class TestKZGCommit: def test_commit_simple_poly(self): """Test commitment to a simple polynomial.""" coeffs = [1, 2, 3] - + commitment = KZG.commit(coeffs) - + assert commitment is not None assert isinstance(commitment, tuple) # Commitment is (x, y, z) in projective coordinates @@ -95,33 +94,33 @@ def test_commit_simple_poly(self): def test_commit_zero_poly(self): """Test commitment to zero polynomial.""" coeffs = [0, 0, 0] - + commitment = KZG.commit(coeffs) - + # Should be the point at infinity assert commitment is not None def test_commit_single_coeff(self): """Test commitment to constant polynomial.""" coeffs = [5] - + commitment = KZG.commit(coeffs) - + assert commitment is not None def test_commit_sparse_poly(self): """Test commitment to sparse polynomial (many zeros).""" coeffs = [1, 0, 0, 0, 0, 5, 0, 0, 0, 3] - + commitment = KZG.commit(coeffs) - + assert commitment is not None def test_commit_exceeds_srs_raises(self): """Test that committing to too large polynomial raises.""" # Create polynomial larger than SRS coeffs = [1] * (len(srs.g1) + 100) - + with pytest.raises(ValueError, match="polynomial degree exceeds SRS size"): KZG.commit(coeffs) @@ -133,9 +132,9 @@ def test_open_simple(self): """Test opening a polynomial at a point.""" coeffs = [1, 2, 3] # 1 + 2x + 3x^2 x = 5 - + opening = KZG.open(coeffs, x) - + assert isinstance(opening, Opening) assert opening.proof is not None assert opening.y is not None @@ -147,9 +146,9 @@ def test_open_at_zero(self): """Test opening at x=0.""" coeffs = [7, 2, 3] # 7 + 2x + 3x^2 x = 0 - + opening = KZG.open(coeffs, x) - + # y should be the constant term assert opening.y == 7 @@ -161,46 +160,46 @@ def test_verify_valid_opening(self): """Test verification of a valid opening.""" coeffs = [1, 2, 3] x = 5 - + commitment = KZG.commit(coeffs) opening = KZG.open(coeffs, x) - + result = KZG.verify(commitment, opening.proof, x, opening.y) - + assert result is True def test_verify_invalid_value(self): """Test verification fails with wrong value.""" coeffs = [1, 2, 3] x = 5 - + commitment = KZG.commit(coeffs) opening = KZG.open(coeffs, x) - + # Use wrong value wrong_value = (opening.y + 1) % (2**256) - + result = KZG.verify(commitment, opening.proof, x, wrong_value) - + assert result is False def test_verify_blst_point_inputs(self): """Test verification with blst.P1 inputs directly.""" coeffs = [1, 2, 3] x = 5 - + # Get blst.P1 commitment directly from dot_ring.ring_proof.pcs.utils import g1_to_blst - + commitment = KZG.commit(coeffs) opening = KZG.open(coeffs, x) - + # Convert to blst.P1 comm_blst = g1_to_blst(commitment) proof_blst = g1_to_blst(opening.proof) - + result = KZG.verify(comm_blst, proof_blst, x, opening.y) - + assert result is True @@ -216,77 +215,77 @@ def test_batch_verify_single(self): """Test batch verification with single verification.""" coeffs = [1, 2, 3] x = 5 - + commitment = KZG.commit(coeffs) opening = KZG.open(coeffs, x) - + # Convert to blst.P1 for batch verification from dot_ring.ring_proof.pcs.utils import g1_to_blst - + comm_blst = g1_to_blst(commitment) proof_blst = g1_to_blst(opening.proof) - + verifications = [(comm_blst, proof_blst, x, opening.y)] - + result = KZG.batch_verify(verifications) - + assert result is True def test_batch_verify_multiple_valid(self): """Test batch verification with multiple valid openings.""" from dot_ring.ring_proof.pcs.utils import g1_to_blst - + verifications = [] - + # Create multiple polynomial openings for i in range(3): coeffs = [1 + i, 2 + i, 3 + i] x = 5 + i - + commitment = KZG.commit(coeffs) opening = KZG.open(coeffs, x) - + comm_blst = g1_to_blst(commitment) proof_blst = g1_to_blst(opening.proof) - + verifications.append((comm_blst, proof_blst, x, opening.y)) - + result = KZG.batch_verify(verifications) - + assert result is True def test_batch_verify_one_invalid(self): """Test batch verification fails if one is invalid.""" from dot_ring.ring_proof.pcs.utils import g1_to_blst - + verifications = [] - + # Create valid openings for i in range(2): coeffs = [1 + i, 2 + i, 3 + i] x = 5 + i - + commitment = KZG.commit(coeffs) opening = KZG.open(coeffs, x) - + comm_blst = g1_to_blst(commitment) proof_blst = g1_to_blst(opening.proof) - + verifications.append((comm_blst, proof_blst, x, opening.y)) - + # Add one invalid opening (wrong value) coeffs = [1, 2, 3] x = 10 commitment = KZG.commit(coeffs) opening = KZG.open(coeffs, x) - + comm_blst = g1_to_blst(commitment) proof_blst = g1_to_blst(opening.proof) - + # Use wrong value wrong_value = (opening.y + 1000) % (2**256) verifications.append((comm_blst, proof_blst, x, wrong_value)) - + result = KZG.batch_verify(verifications) - + assert result is False diff --git a/tests/test_coverage/test_ops.py b/tests/test_coverage/test_ops.py index a423717..681141b 100644 --- a/tests/test_coverage/test_ops.py +++ b/tests/test_coverage/test_ops.py @@ -2,21 +2,21 @@ import pytest +from dot_ring.ring_proof.constants import D_512, D_2048, S_PRIME from dot_ring.ring_proof.polynomial.ops import ( + get_root_of_unity, + lagrange_basis_polynomial, mod_inverse, poly_add, - poly_subtract, - poly_multiply, - poly_scalar, - poly_evaluate_single, - poly_evaluate, poly_division_general, + poly_evaluate, + poly_evaluate_single, poly_mul_linear, - lagrange_basis_polynomial, - get_root_of_unity, + poly_multiply, + poly_scalar, + poly_subtract, vect_scalar_mul, ) -from dot_ring.ring_proof.constants import D_512, D_2048, S_PRIME, OMEGA class TestPolynomialOps: @@ -99,9 +99,9 @@ def test_poly_multiply_larger_fft(self): # Create polynomials large enough to trigger FFT p1 = list(range(1, 65)) # 64 coefficients p2 = list(range(1, 65)) - + result = poly_multiply(p1, p2, S_PRIME) - + # Result should have len(p1) + len(p2) - 1 = 127 coefficients assert len(result) == 127 @@ -231,15 +231,15 @@ def test_lagrange_basis_polynomial_basic(self): # L_0(x) at points [0, 1, 2] # L_0(x) = (x-1)(x-2) / (0-1)(0-2) = (x-1)(x-2) / 2 basis = lagrange_basis_polynomial(x_coords, 0, 17) - + # Verify L_0(0) = 1 val_at_0 = poly_evaluate_single(basis, 0, 17) assert val_at_0 == 1 - + # Verify L_0(1) = 0 val_at_1 = poly_evaluate_single(basis, 1, 17) assert val_at_1 == 0 - + # Verify L_0(2) = 0 val_at_2 = poly_evaluate_single(basis, 2, 17) assert val_at_2 == 0 @@ -262,7 +262,7 @@ def test_get_root_of_unity_caching(self): root1 = get_root_of_unity(8, S_PRIME) # Second call should be cached root2 = get_root_of_unity(8, S_PRIME) - + assert root1 == root2 # Verify it's actually a root of unity assert pow(root1, 8, S_PRIME) == 1 diff --git a/tests/test_coverage/test_pairing.py b/tests/test_coverage/test_pairing.py index e395301..13a98f0 100644 --- a/tests/test_coverage/test_pairing.py +++ b/tests/test_coverage/test_pairing.py @@ -6,8 +6,8 @@ from dot_ring.ring_proof.pcs.pairing import ( _ensure_blst_p1_affine, _ensure_blst_p2_affine, - blst_miller_loop, blst_final_verify, + blst_miller_loop, ) from dot_ring.ring_proof.pcs.srs import srs @@ -22,18 +22,18 @@ def test_ensure_p1_affine_from_p1(self): # Create P1 from bytes (serialize then deserialize as P1) p1_bytes = p1_affine.serialize() p1_proj = blst.P1(p1_bytes) - + result = _ensure_blst_p1_affine(p1_proj) - + assert isinstance(result, blst.P1_Affine) def test_ensure_p1_affine_from_p1_affine(self): """Test that P1_Affine is converted (not returned as-is since SRS stores P1).""" # The SRS actually stores P1 objects, not P1_Affine p1 = srs.blst_g1[0] - + result = _ensure_blst_p1_affine(p1) - + # Should return a P1_Affine assert isinstance(result, blst.P1_Affine) @@ -49,18 +49,18 @@ def test_ensure_p2_affine_from_p2(self): # Create P2 from bytes p2_bytes = p2_affine.serialize() p2_proj = blst.P2(p2_bytes) - + result = _ensure_blst_p2_affine(p2_proj) - + assert isinstance(result, blst.P2_Affine) def test_ensure_p2_affine_from_p2_affine(self): """Test that P2_Affine is converted.""" # The SRS stores P2 objects p2 = srs.blst_g2[0] - + result = _ensure_blst_p2_affine(p2) - + # Should return a P2_Affine assert isinstance(result, blst.P2_Affine) @@ -77,25 +77,25 @@ def test_miller_loop_affine_points(self): """Test Miller loop with affine points.""" p1 = srs.blst_g1[0] p2 = srs.blst_g2[0] - + result = blst_miller_loop(p1, p2) - + assert isinstance(result, blst.PT) def test_miller_loop_projective_points(self): """Test Miller loop with projective points.""" p1 = srs.blst_g1[0] p2 = srs.blst_g2[0] - + # Create projective points from bytes - p1_bytes = p1.serialize() if hasattr(p1, 'serialize') else p1.to_affine().serialize() - p2_bytes = p2.serialize() if hasattr(p2, 'serialize') else p2.to_affine().serialize() - + p1_bytes = p1.serialize() if hasattr(p1, "serialize") else p1.to_affine().serialize() + p2_bytes = p2.serialize() if hasattr(p2, "serialize") else p2.to_affine().serialize() + p1_proj = blst.P1(p1_bytes) p2_proj = blst.P2(p2_bytes) - + result = blst_miller_loop(p1_proj, p2_proj) - + assert isinstance(result, blst.PT) @@ -106,12 +106,12 @@ def test_final_verify_equal_pairings(self): """Test final verify with equal pairings returns True.""" p1 = srs.blst_g1[0] p2 = srs.blst_g2[0] - + pt1 = blst_miller_loop(p1, p2) pt2 = blst_miller_loop(p1, p2) - + result = blst_final_verify(pt1, pt2) - + assert result is True def test_final_verify_different_pairings(self): @@ -119,10 +119,10 @@ def test_final_verify_different_pairings(self): p1_1 = srs.blst_g1[0] p1_2 = srs.blst_g1[1] # Different G1 point p2 = srs.blst_g2[0] - + pt1 = blst_miller_loop(p1_1, p2) pt2 = blst_miller_loop(p1_2, p2) - + result = blst_final_verify(pt1, pt2) - + assert result is False diff --git a/tests/test_coverage/test_pcs_utils.py b/tests/test_coverage/test_pcs_utils.py index d366a67..31591a8 100644 --- a/tests/test_coverage/test_pcs_utils.py +++ b/tests/test_coverage/test_pcs_utils.py @@ -1,9 +1,7 @@ """Additional tests for utils module to improve coverage.""" -import pytest - -from dot_ring.ring_proof.pcs.utils import synthetic_div from dot_ring.ring_proof.constants import S_PRIME +from dot_ring.ring_proof.pcs.utils import synthetic_div class TestPCSUtils: @@ -15,9 +13,9 @@ def test_synthetic_div_basic(self): poly = [1, 2, 1] # 1 + 2x + x^2 x = 1 y = 4 # f(1) = 1 + 2 + 1 = 4 - + quotient = synthetic_div(poly, x, y) - + assert quotient is not None def test_synthetic_div_with_modulo(self): @@ -26,9 +24,9 @@ def test_synthetic_div_with_modulo(self): x = 5 # Compute y = f(x) y = sum(c * pow(x, i, S_PRIME) for i, c in enumerate(poly)) % S_PRIME - + quotient = synthetic_div(poly, x, y) - + assert quotient is not None def test_synthetic_div_simple_linear(self): @@ -38,9 +36,9 @@ def test_synthetic_div_simple_linear(self): poly = [2, 3] # 2 + 3x x = 2 y = 8 - + quotient = synthetic_div(poly, x, y) - + assert quotient is not None # Quotient should be just [3] since f(x) = 8 + 3(x - 2) = 8 + 3x - 6 = 2 + 3x @@ -50,8 +48,8 @@ def test_synthetic_div_larger_poly(self): x = 3 # f(3) = 1 + 3 + 9 + 27 + 81 = 121 y = 121 - + quotient = synthetic_div(poly, x, y) - + assert quotient is not None assert len(quotient) == len(poly) - 1 diff --git a/tests/test_coverage/test_serialize.py b/tests/test_coverage/test_serialize.py index 2e38f82..b8b6ae5 100644 --- a/tests/test_coverage/test_serialize.py +++ b/tests/test_coverage/test_serialize.py @@ -1,7 +1,5 @@ """Additional tests for serialize module to improve coverage.""" -import pytest - from dot_ring.ring_proof.transcript.serialize import serialize @@ -11,53 +9,53 @@ class TestSerialize: def test_serialize_int(self): """Test serializing an integer.""" scalar = 12345 - + result = serialize(scalar) - + assert result is not None assert isinstance(result, bytes) def test_serialize_large_int(self): """Test serializing a large integer.""" scalar = 2**200 - + result = serialize(scalar) - + assert result is not None assert isinstance(result, bytes) def test_serialize_zero(self): """Test serializing zero.""" scalar = 0 - + result = serialize(scalar) - + assert result is not None assert isinstance(result, bytes) def test_serialize_list_of_ints(self): """Test serializing a list of ints.""" data = [1, 2, 3] - + result = serialize(data) - + assert result is not None assert isinstance(result, bytes) def test_serialize_nested_list(self): """Test serializing a nested list.""" data = [[1, 2], [3, 4]] - + result = serialize(data) - + assert result is not None assert isinstance(result, bytes) def test_serialize_tuple_of_two(self): """Test serializing a tuple of two integers (like coordinates).""" data = (1, 2) - + result = serialize(data) - + assert result is not None assert isinstance(result, bytes) diff --git a/tests/test_coverage/test_srs.py b/tests/test_coverage/test_srs.py index d27da84..e587da9 100644 --- a/tests/test_coverage/test_srs.py +++ b/tests/test_coverage/test_srs.py @@ -1,8 +1,6 @@ """Additional tests for SRS module to improve coverage.""" -import pytest - -from dot_ring.ring_proof.pcs.srs import srs, SRS +from dot_ring.ring_proof.pcs.srs import SRS, srs class TestSRS: diff --git a/tests/test_coverage/test_sw_curve.py b/tests/test_coverage/test_sw_curve.py index 11402b1..3586574 100644 --- a/tests/test_coverage/test_sw_curve.py +++ b/tests/test_coverage/test_sw_curve.py @@ -1,12 +1,10 @@ """Tests for SW curve module to improve coverage using existing curve specs.""" -import pytest - -from dot_ring.curve.specs.secp256k1 import Secp256k1Curve, Secp256k1Params, Secp256k1_SW_Curve -from dot_ring.curve.specs.p256 import P256Curve, P256Params -from dot_ring.curve.specs.p384 import P384Curve, P384Params -from dot_ring.curve.specs.p521 import P521Curve, P521Params -from dot_ring.curve.specs.bls12_381_G1 import BLS12_381_G1Curve, BLS12_381_G1Params +from dot_ring.curve.specs.bls12_381_G1 import BLS12_381_G1Curve +from dot_ring.curve.specs.p256 import P256Curve +from dot_ring.curve.specs.p384 import P384Curve +from dot_ring.curve.specs.p521 import P521Curve +from dot_ring.curve.specs.secp256k1 import Secp256k1_SW_Curve class TestSWCurveIsOnCurve: diff --git a/tests/test_coverage/test_transcript.py b/tests/test_coverage/test_transcript.py index 5f2f319..e7504da 100644 --- a/tests/test_coverage/test_transcript.py +++ b/tests/test_coverage/test_transcript.py @@ -1,9 +1,7 @@ """Additional tests for transcript module to improve coverage.""" -import pytest - -from dot_ring.ring_proof.transcript.transcript import Transcript from dot_ring.ring_proof.constants import S_PRIME +from dot_ring.ring_proof.transcript.transcript import Transcript class TestTranscriptAdditional: @@ -48,9 +46,9 @@ def test_transcript_challenge(self): """Test getting challenge.""" transcript = Transcript(S_PRIME, b"test") transcript.append(b"some_data") - + challenge = transcript.challenge(b"challenge") - + assert isinstance(challenge, int) assert 0 <= challenge < S_PRIME @@ -58,9 +56,9 @@ def test_transcript_read_reduce(self): """Test read_reduce method.""" transcript = Transcript(S_PRIME, b"test") transcript.write(b"data") - + result = transcript.read_reduce() - + assert isinstance(result, int) assert 0 <= result < S_PRIME @@ -75,9 +73,9 @@ def test_transcript_separate(self): def test_transcript_get_constraints_aggregation_coeffs(self): """Test getting constraint aggregation coefficients.""" transcript = Transcript(S_PRIME, b"test") - + coeffs = transcript.get_constraints_aggregation_coeffs(3) - + assert len(coeffs) == 3 for c in coeffs: assert isinstance(c, int) @@ -86,18 +84,18 @@ def test_transcript_get_constraints_aggregation_coeffs(self): def test_transcript_get_evaluation_point(self): """Test getting evaluation point.""" transcript = Transcript(S_PRIME, b"test") - + points = transcript.get_evaluation_point(1) - + assert len(points) == 1 assert isinstance(points[0], int) def test_transcript_get_kzg_aggregation_challenges(self): """Test getting KZG aggregation challenges.""" transcript = Transcript(S_PRIME, b"test") - + challenges = transcript.get_kzg_aggregation_challenges(5) - + assert len(challenges) == 5 for c in challenges: assert isinstance(c, int) diff --git a/tests/test_coverage/test_verify.py b/tests/test_coverage/test_verify.py index f3db685..aaff3ca 100644 --- a/tests/test_coverage/test_verify.py +++ b/tests/test_coverage/test_verify.py @@ -1,11 +1,12 @@ """Additional tests to improve coverage for verify module.""" import pytest +from dot_ring.curve.native_field.scalar import Scalar +from py_ecc.optimized_bls12_381 import curve_order -from dot_ring.ring_proof.verify import lagrange_at_zeta, blst_msm -from dot_ring.ring_proof.constants import SIZE, OMEGA, S_PRIME +from dot_ring.ring_proof.constants import OMEGA, OMEGA_2048, S_PRIME, SIZE from dot_ring.ring_proof.pcs.srs import srs -from dot_ring import blst +from dot_ring.ring_proof.verify import Verify, blst_msm, lagrange_at_zeta class TestVerifyHelpers: @@ -19,9 +20,9 @@ def test_lagrange_at_zeta_basic(self): zeta = 12345 # arbitrary point omega = OMEGA prime = S_PRIME - + result = lagrange_at_zeta(domain_size, index, zeta, omega, prime) - + # Just verify it returns a scalar-like value assert result is not None @@ -32,12 +33,12 @@ def test_lagrange_at_zeta_at_omega_power(self): index = 5 omega = OMEGA prime = S_PRIME - + # zeta = omega^index zeta = pow(omega, index, prime) - + result = lagrange_at_zeta(domain_size, index, zeta, omega, prime) - + # Should be 1 # The Scalar class has an internal value assert int(result) == 1 @@ -49,9 +50,9 @@ def test_lagrange_at_zeta_index_zero(self): zeta = 99999 omega = OMEGA prime = S_PRIME - + result = lagrange_at_zeta(domain_size, index, zeta, omega, prime) - + assert result is not None def test_lagrange_at_zeta_index_size_minus_4(self): @@ -61,9 +62,9 @@ def test_lagrange_at_zeta_index_size_minus_4(self): zeta = 12345 omega = OMEGA prime = S_PRIME - + result = lagrange_at_zeta(domain_size, index, zeta, omega, prime) - + assert result is not None def test_lagrange_at_zeta_caching(self): @@ -73,12 +74,136 @@ def test_lagrange_at_zeta_caching(self): zeta = 54321 omega = OMEGA prime = S_PRIME - + result1 = lagrange_at_zeta(domain_size, index, zeta, omega, prime) result2 = lagrange_at_zeta(domain_size, index, zeta, omega, prime) - + assert result1 == result2 + def test_divide_modular_inverse(self): + """Test modular division helper.""" + verifier = Verify.__new__(Verify) + + numerator = 10 + denominator = 3 + expected = (numerator * pow(denominator, -1, curve_order)) % curve_order + + assert verifier.divide(numerator, denominator) == expected + + def test_legacy_methods_raise(self): + """Legacy methods should raise NotImplementedError.""" + verifier = Verify.__new__(Verify) + + with pytest.raises(NotImplementedError): + verifier.evaluation_of_quotient_poly_at_zeta() + + with pytest.raises(NotImplementedError): + verifier.evaluation_of_linearization_poly_at_zeta_omega() + + def test_contributions_handles_zeta_equal_domain_point(self): + """Zeta equal to a domain point should hit the zero-difference branches.""" + verifier = Verify.__new__(Verify) + + verifier.zeta_p = 1 + verifier.sp = (2, 3) + verifier.D = [1, 2, 3, 4] + verifier.b_zeta = 1 + verifier.accx_zeta = 5 + verifier.accy_zeta = 7 + verifier.accip_zeta = 11 + verifier.px_zeta = 13 + verifier.py_zeta = 17 + verifier.s_zeta = 19 + verifier.Result_plus_Seed = (23, 29) + + result = verifier.contributions_to_constraints_eval_at_zeta() + + assert len(result) == 7 + assert all(isinstance(value, Scalar) for value in result) + + def test_linearization_uses_expected_omega(self): + """Verify omega selection for non-512 domains.""" + verifier = Verify.__new__(Verify) + + verifier.alpha_list = [1, 2, 3] + verifier.zeta_p = 7 + verifier.D = list(range(1024)) + verifier.accx_zeta = 2 + verifier.accy_zeta = 3 + verifier.px_zeta = 5 + verifier.py_zeta = 7 + verifier.b_zeta = 11 + verifier.Caccip_blst = srs.blst_g1[0] + verifier.Caccx_blst = srs.blst_g1[1] + verifier.Caccy_blst = srs.blst_g1[2] + verifier.Phi_zeta_omega_blst = srs.blst_g1[3] + verifier.l_zeta_omega = 13 + + _, _, zeta_omega, _ = verifier._prepare_linearization_poly_verification() + expected_omega = pow(OMEGA_2048, 2048 // 1024, S_PRIME) + expected_zeta_omega = (verifier.zeta_p * expected_omega) % S_PRIME + + assert zeta_omega == expected_zeta_omega + + def test_linearization_uses_fallback_omega(self): + """Verify omega selection for non-standard domain sizes.""" + verifier = Verify.__new__(Verify) + + verifier.alpha_list = [1, 2, 3] + verifier.zeta_p = 7 + verifier.D = list(range(16)) + verifier.accx_zeta = 2 + verifier.accy_zeta = 3 + verifier.px_zeta = 5 + verifier.py_zeta = 7 + verifier.b_zeta = 11 + verifier.Caccip_blst = srs.blst_g1[0] + verifier.Caccx_blst = srs.blst_g1[1] + verifier.Caccy_blst = srs.blst_g1[2] + verifier.Phi_zeta_omega_blst = srs.blst_g1[3] + verifier.l_zeta_omega = 13 + + _, _, zeta_omega, _ = verifier._prepare_linearization_poly_verification() + expected_omega = pow(OMEGA_2048, 2048 // 16, S_PRIME) + expected_zeta_omega = (verifier.zeta_p * expected_omega) % S_PRIME + + assert zeta_omega == expected_zeta_omega + + def test_prepare_quotient_poly_verification_smoke(self): + """Smoke test for quotient verification preparation.""" + verifier = Verify.__new__(Verify) + + verifier.alpha_list = [1, 2, 3, 4, 5, 6, 7] + verifier.zeta_p = 7 + verifier.V_list = [1, 2, 3, 4, 5, 6, 7, 8] + verifier.D = [1, 2, 3, 4] + verifier.sp = (2, 3) + verifier.Result_plus_Seed = (23, 29) + verifier.b_zeta = 1 + verifier.accx_zeta = 5 + verifier.accy_zeta = 7 + verifier.accip_zeta = 11 + verifier.px_zeta = 13 + verifier.py_zeta = 17 + verifier.s_zeta = 19 + verifier.l_zeta_omega = 31 + + verifier.Cpx_blst = srs.blst_g1[0] + verifier.Cpy_blst = srs.blst_g1[1] + verifier.Cs_blst = srs.blst_g1[2] + verifier.Cb_blst = srs.blst_g1[3] + verifier.Caccip_blst = srs.blst_g1[4] + verifier.Caccx_blst = srs.blst_g1[5] + verifier.Caccy_blst = srs.blst_g1[6] + verifier.Cq_blst = srs.blst_g1[7] + verifier.Phi_zeta_blst = srs.blst_g1[8] + + _, phi, zeta, agg = verifier._prepare_quotient_poly_verification() + + assert phi is verifier.Phi_zeta_blst + assert zeta == verifier.zeta_p + assert isinstance(agg, int) + class TestBLSTMSM: """Test blst multi-scalar multiplication.""" @@ -86,7 +211,7 @@ class TestBLSTMSM: def test_blst_msm_empty(self): """Test MSM with empty inputs.""" result = blst_msm([], []) - + # Should return point at infinity assert result.is_inf() @@ -94,9 +219,9 @@ def test_blst_msm_single_point(self): """Test MSM with single point.""" points = [srs.blst_g1[0]] scalars = [5] - + result = blst_msm(points, scalars) - + assert result is not None assert not result.is_inf() @@ -104,7 +229,7 @@ def test_blst_msm_multiple_points(self): """Test MSM with multiple points.""" points = [srs.blst_g1[0], srs.blst_g1[1], srs.blst_g1[2]] scalars = [1, 2, 3] - + result = blst_msm(points, scalars) - + assert result is not None diff --git a/tests/test_curve_ops/test_curve.py b/tests/test_curve_ops/test_curve.py index 94fc347..7101df2 100644 --- a/tests/test_curve_ops/test_curve.py +++ b/tests/test_curve_ops/test_curve.py @@ -1,11 +1,11 @@ - import pytest -from dot_ring.curve.twisted_edwards.te_affine_point import TEAffinePoint -from dot_ring.curve.specs.bandersnatch_sw import Bandersnatch_SW_Point, BandersnatchSWParams + from dot_ring.curve.specs.ed25519 import Ed25519_RO +from dot_ring.curve.twisted_edwards.te_affine_point import TEAffinePoint from dot_ring.ring_proof.proof.quotient_poly import QuotientPoly from dot_ring.ring_proof.transcript.serialize import serialize + class TestCoverageCurve: def test_te_affine_double_identity(self): """Test TEAffinePoint.double with identity point.""" @@ -42,14 +42,14 @@ def test_serialize_bytes_bytearray(self): """Test serialize with bytes and bytearray.""" b = b"test" assert serialize(b) == b - + ba = bytearray(b"test") assert serialize(ba) == b def test_serialize_invalid_type(self): """Test serialize with invalid type.""" with pytest.raises(TypeError, match="Unsupported object type"): - serialize(1.5) # type: ignore + serialize(1.5) # type: ignore def test_te_affine_double_coverage(self): """Test TEAffinePoint.double with a valid point.""" @@ -63,18 +63,19 @@ def test_te_affine_double_coverage(self): def test_mg_affine_ops(self): """Test MGAffinePoint operations.""" from dot_ring.curve.specs.curve448 import Curve448_RO + PointClass = Curve448_RO.point g = PointClass.generator_point() - + # Identity identity = PointClass.identity() assert (g + identity) == g assert (identity + g) == g - + # Negation neg_g = -g assert (g + neg_g) == identity - + # Subtraction assert (g - g) == identity assert (g - neg_g) == (g + g) @@ -82,35 +83,37 @@ def test_mg_affine_ops(self): def test_mg_affine_encode_map(self): """Test MGAffinePoint encode and map to curve.""" from dot_ring.curve.specs.curve448 import Curve448_RO + PointClass = Curve448_RO.point - + # Encode to curve msg = b"test message" p = PointClass.encode_to_curve(msg, b"DST") assert p.is_on_curve() - + # Map to curve (if applicable directly, usually via encode) # map_to_curve is often internal or specific - + # --- SWAffinePoint Tests (P256) --- def test_sw_affine_ops(self): """Test SWAffinePoint operations.""" from dot_ring.curve.specs.p256 import P256_RO + PointClass = P256_RO.point g = PointClass.generator_point() - + # Identity identity = PointClass.identity() assert identity.is_identity() - + # Add with identity assert (g + identity) == g assert (identity + g) == g - + # Negation neg_g = -g assert (g + neg_g).is_identity() - + # Multiplication p2 = g * 2 assert p2 == (g + g) @@ -118,14 +121,15 @@ def test_sw_affine_ops(self): def test_sw_affine_from_bytes(self): """Test SWAffinePoint from_bytes (compressed/uncompressed).""" from dot_ring.curve.specs.p256 import P256_RO + PointClass = P256_RO.point g = PointClass.generator_point() - + # Uncompressed b_uncomp = g.point_to_string(compressed=False) p_uncomp = PointClass.string_to_point(b_uncomp) assert p_uncomp == g - + # Compressed b_comp = g.point_to_string(compressed=True) p_comp = PointClass.string_to_point(b_comp) @@ -134,8 +138,9 @@ def test_sw_affine_from_bytes(self): def test_sw_affine_encode(self): """Test SWAffinePoint encode_to_curve.""" from dot_ring.curve.specs.p256 import P256_RO + PointClass = P256_RO.point - + msg = b"test message" p = PointClass.encode_to_curve(msg, b"DST") assert p.is_on_curve() @@ -143,20 +148,21 @@ def test_sw_affine_encode(self): def test_sw_affine_string_to_point_errors(self): """Test SWAffinePoint string_to_point error cases.""" from dot_ring.curve.specs.p256 import P256_RO + PointClass = P256_RO.point - + # Empty string with pytest.raises(ValueError, match="Empty octet string"): PointClass.string_to_point(b"") - + # Invalid prefix with pytest.raises(ValueError, match="Invalid point encoding prefix"): - PointClass.string_to_point(b"\x05" + b"\x00"*32) - + PointClass.string_to_point(b"\x05" + b"\x00" * 32) + # Invalid length for compressed with pytest.raises(ValueError, match="Invalid compressed point length"): PointClass.string_to_point(b"\x02" + b"\x00") - + # Invalid length for uncompressed with pytest.raises(ValueError, match="Invalid uncompressed point length"): PointClass.string_to_point(b"\x04" + b"\x00") @@ -164,38 +170,55 @@ def test_sw_affine_string_to_point_errors(self): def test_mg_affine_string_to_point_errors(self): """Test MGAffinePoint string_to_point error cases.""" from dot_ring.curve.specs.curve448 import Curve448_RO + PointClass = Curve448_RO.point - + # Invalid point (not on curve) # u=0, v=1 => 0 != 1 (invalid) # 56 bytes for u (0), 56 bytes for v (1) u_bytes = b"\x00" * 56 - v_bytes = b"\x01" + b"\x00" * 55 # Little endian 1 + v_bytes = b"\x01" + b"\x00" * 55 # Little endian 1 with pytest.raises(ValueError, match="Point is not on the curve"): PointClass.string_to_point(u_bytes + v_bytes) def test_tonelli_shanks_coverage(self): """Test Tonelli-Shanks algorithm with a mock curve (p % 8 == 1).""" + from dot_ring.curve.e2c import E2C_Variant from dot_ring.curve.montgomery.mg_affine_point import MGAffinePoint from dot_ring.curve.montgomery.mg_curve import MGCurve - from dot_ring.curve.e2c import E2C_Variant - + class MockMGCurve(MGCurve): def __init__(self): # p = 17 (1 mod 8) super().__init__( PRIME_FIELD=17, - ORDER=17, # Dummy - GENERATOR_X=0, GENERATOR_Y=0, # Dummy + ORDER=17, # Dummy + GENERATOR_X=0, + GENERATOR_Y=0, # Dummy COFACTOR=1, - Z=1, A=0, B=1, # Dummy - SUITE_STRING=b"", DST=b"", + Z=1, + A=0, + B=1, # Dummy + SUITE_STRING=b"", + DST=b"", E2C=E2C_Variant.ELL2, - BBx=0, BBy=0, L=0, M=1, K=0, H_A=None, S_in_bytes=0, - Requires_Isogeny=False, Isogeny_Coeffs=None, - UNCOMPRESSED=True, ENDIAN="little", POINT_LEN=2, CHALLENGE_LENGTH=0 + BBx=0, + BBy=0, + L=0, + M=1, + K=0, + H_A=None, + S_in_bytes=0, + Requires_Isogeny=False, + Isogeny_Coeffs=None, + UNCOMPRESSED=True, + ENDIAN="little", + POINT_LEN=2, + CHALLENGE_LENGTH=0, ) - def __post_init__(self): pass + + def __post_init__(self): + pass class MockMGPoint(MGAffinePoint): curve = MockMGCurve() @@ -204,45 +227,65 @@ class MockMGPoint(MGAffinePoint): # Squares mod 17: 0, 1, 4, 9, 16, 8, 2, 15, 13 # 2 is a square (6^2 = 36 = 2 mod 17) # 3 is NOT a square - + p = MockMGPoint(None, None) - + # Test square root = p._sqrt_mod_p(2) assert root is not None assert (root * root) % 17 == 2 - + # Test non-square root_non = p._sqrt_mod_p(3) assert root_non is None def test_mg_affine_unimplemented_errors(self): """Test MGAffinePoint unimplemented methods.""" + from dot_ring.curve.e2c import E2C_Variant from dot_ring.curve.montgomery.mg_affine_point import MGAffinePoint from dot_ring.curve.montgomery.mg_curve import MGCurve - from dot_ring.curve.e2c import E2C_Variant - + class MockMGCurve(MGCurve): def __init__(self): super().__init__( - PRIME_FIELD=17, ORDER=17, GENERATOR_X=0, GENERATOR_Y=0, COFACTOR=1, - Z=1, A=0, B=1, SUITE_STRING=b"", DST=b"", E2C=E2C_Variant.ELL2, - BBx=0, BBy=0, L=0, M=1, K=0, H_A=None, S_in_bytes=0, - Requires_Isogeny=False, Isogeny_Coeffs=None, - UNCOMPRESSED=False, # Set to False to test error - ENDIAN="little", POINT_LEN=2, CHALLENGE_LENGTH=0 + PRIME_FIELD=17, + ORDER=17, + GENERATOR_X=0, + GENERATOR_Y=0, + COFACTOR=1, + Z=1, + A=0, + B=1, + SUITE_STRING=b"", + DST=b"", + E2C=E2C_Variant.ELL2, + BBx=0, + BBy=0, + L=0, + M=1, + K=0, + H_A=None, + S_in_bytes=0, + Requires_Isogeny=False, + Isogeny_Coeffs=None, + UNCOMPRESSED=False, # Set to False to test error + ENDIAN="little", + POINT_LEN=2, + CHALLENGE_LENGTH=0, ) - def __post_init__(self): pass + + def __post_init__(self): + pass class MockMGPoint(MGAffinePoint): curve = MockMGCurve() p = MockMGPoint(0, 0) - + # Test point_to_string compressed error with pytest.raises(NotImplementedError, match="Compressed encoding not implemented"): p.point_to_string() - + # Test _x_recover NotImplementedError with pytest.raises(NotImplementedError): MockMGPoint._x_recover(1) @@ -250,19 +293,19 @@ class MockMGPoint(MGAffinePoint): def test_sw_tonelli_shanks_coverage(self): """Test SWAffinePoint.tonelli_shanks with p % 8 == 1.""" from dot_ring.curve.short_weierstrass.sw_affine_point import SWAffinePoint - + # p = 17 (1 mod 8) # Squares: 0, 1, 4, 9, 16, 8, 2, 15, 13 - + # Test square root = SWAffinePoint.tonelli_shanks(2, 17) assert root is not None assert (root * root) % 17 == 2 - + # Test non-square root_non = SWAffinePoint.tonelli_shanks(3, 17) assert root_non is None - + # Test p % 4 == 3 case (e.g., p=7) # Squares mod 7: 0, 1, 4, 2 root_fast = SWAffinePoint.tonelli_shanks(2, 7) @@ -272,26 +315,27 @@ def test_sw_tonelli_shanks_coverage(self): def test_sw_hybrid_format(self): """Test SWAffinePoint hybrid format (0x06/0x07).""" from dot_ring.curve.specs.p256 import P256_RO + PointClass = P256_RO.point g = PointClass.generator_point() - + # Construct hybrid bytes manually # Prefix 0x06 if y is even, 0x07 if y is odd - y_int = int(g.y) # type: ignore + y_int = int(g.y) # type: ignore prefix = b"\x06" if y_int % 2 == 0 else b"\x07" - - x_bytes = int(g.x).to_bytes(32, "big") # type: ignore - y_bytes = int(g.y).to_bytes(32, "big") # type: ignore - + + x_bytes = int(g.x).to_bytes(32, "big") # type: ignore + y_bytes = int(g.y).to_bytes(32, "big") # type: ignore + hybrid_bytes = prefix + x_bytes + y_bytes - + p = PointClass.string_to_point(hybrid_bytes) assert p == g - + # Test invalid hybrid length with pytest.raises(ValueError, match="Invalid hybrid point length"): PointClass.string_to_point(b"\x06" + b"\x00") - + # Test invalid hybrid parity wrong_prefix = b"\x07" if y_int % 2 == 0 else b"\x06" wrong_hybrid = wrong_prefix + x_bytes + y_bytes @@ -300,31 +344,59 @@ def test_sw_hybrid_format(self): def test_curve_point_base_coverage(self): """Test base CurvePoint methods.""" - from dot_ring.curve.point import CurvePoint - from dot_ring.curve.montgomery.mg_curve import MGCurve from dot_ring.curve.e2c import E2C_Variant - + from dot_ring.curve.montgomery.mg_curve import MGCurve + from dot_ring.curve.point import CurvePoint + class MockCurve(MGCurve): def __init__(self): super().__init__( - PRIME_FIELD=17, ORDER=17, GENERATOR_X=0, GENERATOR_Y=0, COFACTOR=1, - Z=1, A=0, B=1, SUITE_STRING=b"", DST=b"", E2C=E2C_Variant.ELL2, - BBx=0, BBy=0, L=0, M=1, K=0, H_A=None, S_in_bytes=0, - Requires_Isogeny=False, Isogeny_Coeffs=None, - UNCOMPRESSED=True, ENDIAN="little", POINT_LEN=2, CHALLENGE_LENGTH=0 + PRIME_FIELD=17, + ORDER=17, + GENERATOR_X=0, + GENERATOR_Y=0, + COFACTOR=1, + Z=1, + A=0, + B=1, + SUITE_STRING=b"", + DST=b"", + E2C=E2C_Variant.ELL2, + BBx=0, + BBy=0, + L=0, + M=1, + K=0, + H_A=None, + S_in_bytes=0, + Requires_Isogeny=False, + Isogeny_Coeffs=None, + UNCOMPRESSED=True, + ENDIAN="little", + POINT_LEN=2, + CHALLENGE_LENGTH=0, ) - def __post_init__(self): pass + + def __post_init__(self): + pass class MockPoint(CurvePoint): - def is_on_curve(self): return True - def is_identity(self): return self.x is None and self.y is None - def _validate_coordinates(self): return True + def is_on_curve(self): + return True + + def is_identity(self): + return self.x is None and self.y is None + + def _validate_coordinates(self): + return True + @classmethod - def identity(cls): return cls(None, None) + def identity(cls): + return cls(None, None) c = MockCurve() p = MockPoint(0, 0, curve=c) - + # Test NotImplementedError with pytest.raises(NotImplementedError): p + p @@ -332,20 +404,21 @@ def identity(cls): return cls(None, None) p - p with pytest.raises(NotImplementedError): p * 2 - + # Test msm errors with pytest.raises(ValueError, match="Points and scalars must have same length"): MockPoint.msm([p], [1, 2]) - + # Test msm empty # Should return identity res = MockPoint.msm([], []) assert res.x is None and res.y is None - + # Test __hash__ with complex types class ComplexCoord: - def __init__(self, re, im): self.re, self.im = re, im - + def __init__(self, re, im): + self.re, self.im = re, im + p_complex = MockPoint(ComplexCoord(1, 2), ComplexCoord(3, 4), curve=c) h = hash(p_complex) assert isinstance(h, int) @@ -353,97 +426,142 @@ def __init__(self, re, im): self.re, self.im = re, im def test_te_affine_errors(self): """Test TEAffinePoint error cases.""" from dot_ring.curve.specs.ed25519 import Ed25519_RO + PointClass = Ed25519_RO.point g = PointClass.generator_point() - + # __add__ invalid type with pytest.raises(TypeError, match="Can only add TEAffinePoints"): - g + 1 # type: ignore - + g + 1 # type: ignore + # __mul__ 0 p0 = g * 0 assert p0.is_identity() - + # __mul__ negative p_neg = g * -1 assert p_neg == -g - + # encode_to_curve invalid variant - from dot_ring.curve.twisted_edwards.te_curve import TECurve - from dot_ring.curve.twisted_edwards.te_affine_point import TEAffinePoint from dot_ring.curve.e2c import E2C_Variant - + from dot_ring.curve.twisted_edwards.te_curve import TECurve + class MockTECurve(TECurve): def __init__(self): super().__init__( - PRIME_FIELD=17, ORDER=18, GENERATOR_X=0, GENERATOR_Y=1, COFACTOR=1, - Z=1, EdwardsA=1, EdwardsD=2, SUITE_STRING=b"", DST=b"", - E2C=E2C_Variant.SSWU, # SSWU not supported for TE + PRIME_FIELD=17, + ORDER=18, + GENERATOR_X=0, + GENERATOR_Y=1, + COFACTOR=1, + Z=1, + EdwardsA=1, + EdwardsD=2, + SUITE_STRING=b"", + DST=b"", + E2C=E2C_Variant.SSWU, # SSWU not supported for TE # Wait, TEAffinePoint.encode_to_curve supports TAI if implemented? # Line 277 raises ValueError "Unexpected E2C Variant" - BBx=0, BBy=0, L=0, M=1, K=0, H_A=None, S_in_bytes=0, - Requires_Isogeny=False, Isogeny_Coeffs=None, - UNCOMPRESSED=True, ENDIAN="little", POINT_LEN=2, CHALLENGE_LENGTH=0 + BBx=0, + BBy=0, + L=0, + M=1, + K=0, + H_A=None, + S_in_bytes=0, + Requires_Isogeny=False, + Isogeny_Coeffs=None, + UNCOMPRESSED=True, + ENDIAN="little", + POINT_LEN=2, + CHALLENGE_LENGTH=0, ) - def calculate_j_k(self): return 0, 0 - + + def calculate_j_k(self): + return 0, 0 + class MockTEPoint(TEAffinePoint): curve = MockTECurve() - + # Test encode_to_curve with TAI (which might raise if not handled in base) # TEAffinePoint.encode_to_curve checks for ELL2/ELL2_NU. # If E2C is TAI, it raises ValueError. with pytest.raises(ValueError, match="Unexpected E2C Variant"): MockTEPoint.encode_to_curve(b"test") - + # Test point_to_string uncompressed # MockTECurve has UNCOMPRESSED=True - p = MockTEPoint(0, 1) # Identity + p = MockTEPoint(0, 1) # Identity # Identity serialization should succeed for TE (0, 1) s = p.point_to_string() - assert len(s) == 2 # 1 byte x, 1 byte y (p=17 -> 5 bits -> 1 byte) - + assert len(s) == 2 # 1 byte x, 1 byte y (p=17 -> 5 bits -> 1 byte) + # Valid point serialization (uncompressed) - p_valid = MockTEPoint(0, 16) # 16 = -1 mod 17 + p_valid = MockTEPoint(0, 16) # 16 = -1 mod 17 s = p_valid.point_to_string() - assert len(s) == 2 # 1 byte x, 1 byte y + assert len(s) == 2 # 1 byte x, 1 byte y def test_te_affine_hash_complex(self): """Test TEAffinePoint.__hash__ with complex types.""" - from dot_ring.curve.twisted_edwards.te_affine_point import TEAffinePoint - from dot_ring.curve.twisted_edwards.te_curve import TECurve from dot_ring.curve.e2c import E2C_Variant - + from dot_ring.curve.twisted_edwards.te_curve import TECurve + class MockTECurve(TECurve): def __init__(self): super().__init__( - PRIME_FIELD=17, ORDER=18, GENERATOR_X=0, GENERATOR_Y=1, COFACTOR=1, - Z=1, EdwardsA=1, EdwardsD=2, SUITE_STRING=b"", DST=b"", + PRIME_FIELD=17, + ORDER=18, + GENERATOR_X=0, + GENERATOR_Y=1, + COFACTOR=1, + Z=1, + EdwardsA=1, + EdwardsD=2, + SUITE_STRING=b"", + DST=b"", E2C=E2C_Variant.SSWU, - BBx=0, BBy=0, L=0, M=1, K=0, H_A=None, S_in_bytes=0, - Requires_Isogeny=False, Isogeny_Coeffs=None, - UNCOMPRESSED=True, ENDIAN="little", POINT_LEN=2, CHALLENGE_LENGTH=0 + BBx=0, + BBy=0, + L=0, + M=1, + K=0, + H_A=None, + S_in_bytes=0, + Requires_Isogeny=False, + Isogeny_Coeffs=None, + UNCOMPRESSED=True, + ENDIAN="little", + POINT_LEN=2, + CHALLENGE_LENGTH=0, ) - def calculate_j_k(self): return 0, 0 - + + def calculate_j_k(self): + return 0, 0 + class MockTEPoint(TEAffinePoint): curve = MockTECurve() - def is_on_curve(self): return True # Skip validation for complex types - def _validate_coordinates(self): return True + + def is_on_curve(self): + return True # Skip validation for complex types + + def _validate_coordinates(self): + return True # Test with re/im (FieldElement-like) class ComplexCoord: - def __init__(self, re, im): self.re, self.im = re, im - + def __init__(self, re, im): + self.re, self.im = re, im + p1 = MockTEPoint(ComplexCoord(1, 2), ComplexCoord(3, 4)) h1 = hash(p1) # x_val = 1+2=3, y_val = 3+4=7. sum=10. 10 % 18 = 10. assert h1 == 10 - + # Test with coeffs (FQ2-like) class CoeffsCoord: - def __init__(self, coeffs): self.coeffs = coeffs - + def __init__(self, coeffs): + self.coeffs = coeffs + p2 = MockTEPoint(CoeffsCoord([1, 2]), CoeffsCoord([3, 4])) h2 = hash(p2) # x_val = 3, y_val = 7. sum=10. diff --git a/tests/test_curve_ops/test_gaps.py b/tests/test_curve_ops/test_gaps.py index 32731c0..44fd491 100644 --- a/tests/test_curve_ops/test_gaps.py +++ b/tests/test_curve_ops/test_gaps.py @@ -65,9 +65,7 @@ def test_ecvrf_decode_proof_invalid_s(self): from dot_ring.ring_proof.helpers import Helpers - invalid_s_bytes = Helpers.int_to_str( - invalid_s, "little", s_len - ) # Bandersnatch is little endian + invalid_s_bytes = Helpers.int_to_str(invalid_s, "little", s_len) # Bandersnatch is little endian # Note: Helpers.int_to_str might mask the overflow if not careful, but here we want to inject bytes that decode to >= order. # If s_len is fixed, we might not be able to fit order+1 if order is max for that length. @@ -76,9 +74,7 @@ def test_ecvrf_decode_proof_invalid_s(self): invalid_proof = gamma_bytes + c_bytes + invalid_s_bytes - with pytest.raises( - ValueError, match="Response scalar S is not less than the curve order" - ): + with pytest.raises(ValueError, match="Response scalar S is not less than the curve order"): IETF_VRF[Bandersnatch].ecvrf_decode_proof(invalid_proof) def test_ecvrf_proof_to_hash_string_input(self): @@ -130,9 +126,7 @@ def test_ietf_from_bytes_invalid_s(self): invalid_proof = gamma_bytes + c_bytes + invalid_s_bytes - with pytest.raises( - ValueError, match="Response scalar s is not less than the curve order" - ): + with pytest.raises(ValueError, match="Response scalar s is not less than the curve order"): IETF_VRF[Bandersnatch].from_bytes(invalid_proof) def test_ietf_verify_invalid_public_key(self): @@ -164,9 +158,7 @@ def test_pedersen_from_bytes_invalid_point(self): invalid_proof = b"\xff" * (point_len * 4 + scalar_len * 2) # It might raise ValueError from string_to_point - with pytest.raises( - ValueError - ): # Message might vary depending on which point fails first + with pytest.raises(ValueError): # Message might vary depending on which point fails first PedersenVRF[Bandersnatch].from_bytes(invalid_proof) def test_pedersen_proof_to_hash_string(self): diff --git a/tests/test_curve_ops/test_glv.py b/tests/test_curve_ops/test_glv.py index 5221e47..ab8889e 100644 --- a/tests/test_curve_ops/test_glv.py +++ b/tests/test_curve_ops/test_glv.py @@ -1,7 +1,8 @@ - import pytest -from dot_ring.curve.specs.ed25519 import Ed25519_RO + from dot_ring.curve.glv import GLV +from dot_ring.curve.specs.ed25519 import Ed25519_RO + class TestCoverageGLV: def test_glv_identity_handling(self): @@ -10,31 +11,31 @@ def test_glv_identity_handling(self): # We need access to the GLV instance. # It's usually attached to the curve or used by the point. # But here we can instantiate GLV directly and pass points. - + # Create a dummy GLV instance glv = GLV(lambda_param=1, constant_b=1, constant_c=1) - + g = PointClass.generator_point() identity = PointClass.identity_point() - + # Test windowed_simultaneous_mult with identity # P1 identity res1 = glv.windowed_simultaneous_mult(1, 1, identity, g) assert res1 == g - + # P2 identity res2 = glv.windowed_simultaneous_mult(1, 1, g, identity) assert res2 == g - + # Both identity res3 = glv.windowed_simultaneous_mult(1, 1, identity, identity) assert res3.is_identity() - + # Test multi_scalar_mult_4 with identity # One identity res4 = glv.multi_scalar_mult_4(1, 1, 1, 1, identity, g, g, g) assert res4 == g + g + g - + # All identity res5 = glv.multi_scalar_mult_4(1, 1, 1, 1, identity, identity, identity, identity) assert res5.is_identity() @@ -42,11 +43,11 @@ def test_glv_identity_handling(self): def test_glv_errors(self): """Test GLV error conditions.""" glv = GLV(lambda_param=1, constant_b=1, constant_c=1) - + # Invalid parameters with pytest.raises(ValueError, match="Invalid GLV parameters"): GLV(lambda_param=0, constant_b=1, constant_c=1).__post_init__() - + # Extended Euclidean Algorithm invalid inputs with pytest.raises(ValueError, match="Inputs must be positive"): glv.extended_euclidean_algorithm(0, 1) diff --git a/tests/test_curve_ops/test_native_field.py b/tests/test_curve_ops/test_native_field.py index 950a749..e30378e 100644 --- a/tests/test_curve_ops/test_native_field.py +++ b/tests/test_curve_ops/test_native_field.py @@ -1,50 +1,56 @@ -import pytest import random + from dot_ring.curve.native_field.scalar import Scalar + from dot_ring.curve.specs.bandersnatch import BandersnatchParams MODULUS = BandersnatchParams.PRIME_FIELD + def test_scalar_init(): s = Scalar(123) assert s.to_int() == 123 - + s = Scalar(MODULUS - 1) assert s.to_int() == MODULUS - 1 - + s = Scalar(0) assert s.to_int() == 0 + def test_scalar_add(): for _ in range(100): a = random.randint(0, MODULUS - 1) b = random.randint(0, MODULUS - 1) - + expected = (a + b) % MODULUS res = Scalar(a) + Scalar(b) - + assert res.to_int() == expected, f"Failed add: {a} + {b}" + def test_scalar_sub(): for _ in range(100): a = random.randint(0, MODULUS - 1) b = random.randint(0, MODULUS - 1) - + expected = (a - b) % MODULUS res = Scalar(a) - Scalar(b) - + assert res.to_int() == expected, f"Failed sub: {a} - {b}" + def test_scalar_mul(): for _ in range(100): a = random.randint(0, MODULUS - 1) b = random.randint(0, MODULUS - 1) - + expected = (a * b) % MODULUS res = Scalar(a) * Scalar(b) - + assert res.to_int() == expected, f"Failed mul: {a} * {b}" + def test_scalar_mul_edge_cases(): # Test 0 * x assert (Scalar(0) * Scalar(123)).to_int() == 0 @@ -55,22 +61,25 @@ def test_scalar_mul_edge_cases(): expected = (max_val * max_val) % MODULUS assert (Scalar(max_val) * Scalar(max_val)).to_int() == expected + def test_scalar_pow(): a = Scalar(2) res = pow(a, 3) assert res.to_int() == 8 - + # Test modular inverse a = Scalar(12345) inv = pow(a, -1) res = a * inv assert res.to_int() == 1 + def test_scalar_neg(): a = Scalar(1) res = -a assert res.to_int() == MODULUS - 1 + def test_scalar_eq(): a = Scalar(123) b = Scalar(123) @@ -80,6 +89,7 @@ def test_scalar_eq(): assert a == 123 assert a != 456 + def test_scalar_mod(): a = Scalar(123) assert a % MODULUS == 123 diff --git a/tests/test_curve_ops/test_vector_ops.py b/tests/test_curve_ops/test_vector_ops.py index 44480da..cb5e65b 100644 --- a/tests/test_curve_ops/test_vector_ops.py +++ b/tests/test_curve_ops/test_vector_ops.py @@ -1,67 +1,73 @@ - -import pytest import random + from dot_ring.curve.native_field.scalar import Scalar -from dot_ring.curve.native_field.vector_ops import vect_add, vect_sub, vect_mul +from dot_ring.curve.native_field.vector_ops import vect_add, vect_mul, vect_sub + from dot_ring.curve.specs.bandersnatch import BandersnatchParams MODULUS = BandersnatchParams.PRIME_FIELD + def test_vect_add(): a = [Scalar(random.randint(0, MODULUS - 1)) for _ in range(10)] b = [Scalar(random.randint(0, MODULUS - 1)) for _ in range(10)] - + res = vect_add(a, b, MODULUS) - + for i in range(10): expected = a[i] + b[i] assert res[i] == expected, f"Mismatch at index {i}: {res[i]} != {expected}" + def test_vect_sub(): a = [Scalar(random.randint(0, MODULUS - 1)) for _ in range(10)] b = [Scalar(random.randint(0, MODULUS - 1)) for _ in range(10)] - + res = vect_sub(a, b, MODULUS) - + for i in range(10): expected = a[i] - b[i] assert res[i] == expected, f"Mismatch at index {i}: {res[i]} != {expected}" + def test_vect_mul(): a = [Scalar(random.randint(0, MODULUS - 1)) for _ in range(10)] b = [Scalar(random.randint(0, MODULUS - 1)) for _ in range(10)] - + res = vect_mul(a, b, MODULUS) - + for i in range(10): expected = a[i] * b[i] assert res[i] == expected, f"Mismatch at index {i}: {res[i]} != {expected}" + def test_vect_add_scalar(): a = [Scalar(random.randint(0, MODULUS - 1)) for _ in range(10)] b = Scalar(random.randint(0, MODULUS - 1)) - + res = vect_add(a, b, MODULUS) - + for i in range(10): expected = a[i] + b assert res[i] == expected, f"Mismatch at index {i}: {res[i]} != {expected}" + def test_vect_mul_scalar(): a = [Scalar(random.randint(0, MODULUS - 1)) for _ in range(10)] b = Scalar(random.randint(0, MODULUS - 1)) - + res = vect_mul(a, b, MODULUS) - + for i in range(10): expected = a[i] * b assert res[i] == expected, f"Mismatch at index {i}: {res[i]} != {expected}" + def test_perf_vect_add(benchmark): a = [Scalar(i) for i in range(1000)] b = [Scalar(i) for i in range(1000)] - + def run(): vect_add(a, b, MODULUS) - + benchmark(run) diff --git a/tests/test_h2c_suites/test_bls12_381_G2_ssw_nu.py b/tests/test_h2c_suites/test_bls12_381_G2_ssw_nu.py index 80ff4a4..fe18006 100644 --- a/tests/test_h2c_suites/test_bls12_381_G2_ssw_nu.py +++ b/tests/test_h2c_suites/test_bls12_381_G2_ssw_nu.py @@ -13,9 +13,7 @@ class TestBLS12_381_G2_SSWU_RO(unittest.TestCase): @classmethod def setUpClass(cls): # Load test vectors - test_vectors_path = os.path.join( - os.path.dirname(__file__), "../vectors/h2c", "bls12_381_G2_nu.json" - ) + test_vectors_path = os.path.join(os.path.dirname(__file__), "../vectors/h2c", "bls12_381_G2_nu.json") with open(test_vectors_path) as f: cls.test_vectors = json.load(f) @@ -45,9 +43,7 @@ def test_sswu_hash2_curve(self): ) # Encode message to curve - result = BLS12_381_G2Point.encode_to_curve( - msg.encode("utf-8"), b"", True - ) + result = BLS12_381_G2Point.encode_to_curve(msg.encode("utf-8"), b"", True) # Extract computed values computed_P = result["R"] diff --git a/tests/test_h2c_suites/test_bls12_381_G2_ssw_ro.py b/tests/test_h2c_suites/test_bls12_381_G2_ssw_ro.py index f4ab72e..df545b1 100644 --- a/tests/test_h2c_suites/test_bls12_381_G2_ssw_ro.py +++ b/tests/test_h2c_suites/test_bls12_381_G2_ssw_ro.py @@ -10,9 +10,7 @@ class TestBLS12_381_G2_SSWU_RO(unittest.TestCase): @classmethod def setUpClass(cls): # Load test vectors - test_vectors_path = os.path.join( - os.path.dirname(__file__), "../vectors/h2c", "bls12_381_G2_ro.json" - ) + test_vectors_path = os.path.join(os.path.dirname(__file__), "../vectors/h2c", "bls12_381_G2_ro.json") with open(test_vectors_path) as f: cls.test_vectors = json.load(f) @@ -49,9 +47,7 @@ def test_sswu_hash2_curve(self): ) # Encode message to curve - result = BLS12_381_G2_RO.point.encode_to_curve( - msg.encode("utf-8"), b"", True - ) + result = BLS12_381_G2_RO.point.encode_to_curve(msg.encode("utf-8"), b"", True) # Extract computed values computed_P = result["R"] diff --git a/tests/test_h2c_suites/test_e2c_bandersnatch.py b/tests/test_h2c_suites/test_e2c_bandersnatch.py index f52be1a..3c24d0e 100644 --- a/tests/test_h2c_suites/test_e2c_bandersnatch.py +++ b/tests/test_h2c_suites/test_e2c_bandersnatch.py @@ -54,22 +54,10 @@ def test_m2c(): p1 = BandersnatchPoint.map_to_curve(u[1]) # Test vector from specification - assert ( - p0.x - == 45311200032263316917859627542467284358670199398458214934254495151428460867180 - ) - assert ( - p0.y - == 12776320642587906524824617948027275973876805685686439823724827627303230293583 - ) - assert ( - p1.x - == 4062918070531615925962241074596089620660059154890696073867928698119996156623 - ) - assert ( - p1.y - == 28091649524129975855673249115644895380082395569265826631567705939331162643040 - ) + assert p0.x == 45311200032263316917859627542467284358670199398458214934254495151428460867180 + assert p0.y == 12776320642587906524824617948027275973876805685686439823724827627303230293583 + assert p1.x == 4062918070531615925962241074596089620660059154890696073867928698119996156623 + assert p1.y == 28091649524129975855673249115644895380082395569265826631567705939331162643040 def test_e2c(): @@ -78,11 +66,5 @@ def test_e2c(): u = BandersnatchPoint.encode_to_curve(data) # Test vector from specification - assert ( - u.x - == 26037012954893424526367048031037997009889535281273781660989300420960588198291 - ) - assert ( - u.y - == 2904166584983200306316763312322681981821413355244066354672834649878949825050 - ) + assert u.x == 26037012954893424526367048031037997009889535281273781660989300420960588198291 + assert u.y == 2904166584983200306316763312322681981821413355244066354672834649878949825050 diff --git a/tests/test_ietf/test_ietf_ark.py b/tests/test_ietf/test_ietf_ark.py index fbf4e59..78674ed 100644 --- a/tests/test_ietf/test_ietf_ark.py +++ b/tests/test_ietf/test_ietf_ark.py @@ -57,9 +57,7 @@ def test_ietf_ark(curve_variant, file_prefix, subdir, gamma_len): if "h" in vector: assert input_point.point_to_string().hex() == vector["h"] - proof = IETF_VRF[curve_variant].prove( - alpha, secret_scalar, additional_data, salt - ) + proof = IETF_VRF[curve_variant].prove(alpha, secret_scalar, additional_data, salt) proof_bytes = proof.to_bytes() proof_rt = IETF_VRF[curve_variant].from_bytes(proof_bytes) @@ -75,10 +73,7 @@ def test_ietf_ark(curve_variant, file_prefix, subdir, gamma_len): assert int(proof_s.hex(), 16) == int(vector["proof_s"], 16) if "beta" in vector: - assert ( - IETF_VRF[curve_variant].ecvrf_proof_to_hash(proof_bytes).hex() - == vector["beta"] - ) + assert IETF_VRF[curve_variant].ecvrf_proof_to_hash(proof_bytes).hex() == vector["beta"] assert proof.verify(pk_bytes, alpha, additional_data, salt) assert proof_rt.to_bytes() == proof_bytes diff --git a/tests/test_ietf/test_ietf_base.py b/tests/test_ietf/test_ietf_base.py index 95f88e8..0a3d15b 100644 --- a/tests/test_ietf/test_ietf_base.py +++ b/tests/test_ietf/test_ietf_base.py @@ -52,9 +52,7 @@ def test_ietf_base(curve_variant, file_prefix): # Public Key pk_bytes = IETF_VRF[curve_variant].get_public_key(secret_scalar) - proof = IETF_VRF[curve_variant].prove( - alpha, secret_scalar, additional_data - ) + proof = IETF_VRF[curve_variant].prove(alpha, secret_scalar, additional_data) proof_bytes = proof.to_bytes() proof_rt = IETF_VRF[curve_variant].from_bytes(proof_bytes) diff --git a/tests/test_pedersen/test_pedersen_ark.py b/tests/test_pedersen/test_pedersen_ark.py index 30a3b02..cfcd9e2 100644 --- a/tests/test_pedersen/test_pedersen_ark.py +++ b/tests/test_pedersen/test_pedersen_ark.py @@ -52,17 +52,13 @@ def test_pedersen_ietf(curve_variant, file_prefix, subdir): if "h" in vector: assert input_point.point_to_string().hex() == vector["h"] - proof = PedersenVRF[curve_variant].prove( - alpha, secret_scalar, additional_data - ) + proof = PedersenVRF[curve_variant].prove(alpha, secret_scalar, additional_data) proof_bytes = proof.to_bytes() proof_rt = PedersenVRF[curve_variant].from_bytes(proof_bytes) assert pk_bytes.hex() == vector["pk"], "Invalid Public Key" assert proof.output_point.point_to_string().hex() == vector["gamma"] - assert ( - proof.blinded_pk.point_to_string().hex() == vector["proof_pk_com"] - ) + assert proof.blinded_pk.point_to_string().hex() == vector["proof_pk_com"] assert proof.result_point.point_to_string().hex() == vector["proof_r"] assert proof.ok.point_to_string().hex() == vector["proof_ok"] assert proof.s.to_bytes( @@ -73,20 +69,10 @@ def test_pedersen_ietf(curve_variant, file_prefix, subdir): (curve_variant.curve.PRIME_FIELD.bit_length() + 7) // 8, curve_variant.curve.ENDIAN, ) == bytes.fromhex(vector["proof_sb"]) - assert ( - PedersenVRF[curve_variant] - .ecvrf_proof_to_hash(proof.output_point.point_to_string()) - .hex() - == vector["beta"] - ) + assert PedersenVRF[curve_variant].ecvrf_proof_to_hash(proof.output_point.point_to_string()).hex() == vector["beta"] if "beta" in vector: - assert ( - PedersenVRF[curve_variant] - .ecvrf_proof_to_hash(proof.output_point.point_to_string()) - .hex() - == vector["beta"] - ) + assert PedersenVRF[curve_variant].ecvrf_proof_to_hash(proof.output_point.point_to_string()).hex() == vector["beta"] assert proof.verify(alpha, additional_data) assert proof_rt.to_bytes() == proof_bytes diff --git a/tests/test_pedersen/test_pedersen_base.py b/tests/test_pedersen/test_pedersen_base.py index 0122c48..6968d3d 100644 --- a/tests/test_pedersen/test_pedersen_base.py +++ b/tests/test_pedersen/test_pedersen_base.py @@ -53,9 +53,7 @@ def test_pedersen_base(curve_variant, file_prefix, slice_end): curve_variant.point.encode_to_curve(alpha) - proof = PedersenVRF[curve_variant].prove( - alpha, secret_scalar, additional_data - ) + proof = PedersenVRF[curve_variant].prove(alpha, secret_scalar, additional_data) proof_bytes = proof.to_bytes() proof_rt = PedersenVRF[curve_variant].from_bytes(proof_bytes) diff --git a/tests/test_ring_vrf/test_ring_vrf.py b/tests/test_ring_vrf/test_ring_vrf.py index 732ce3b..df2b946 100644 --- a/tests/test_ring_vrf/test_ring_vrf.py +++ b/tests/test_ring_vrf/test_ring_vrf.py @@ -24,9 +24,7 @@ def test_ring_proof(): start_time = time.time() ring_root = RingVRF[Bandersnatch].construct_ring_root(keys) ring_time = time.time() - print( - f"\nTime taken for Ring Root Construction: \t\t {ring_time - start_time} seconds" - ) + print(f"\nTime taken for Ring Root Construction: \t\t {ring_time - start_time} seconds") p_k = RingVRF[Bandersnatch].get_public_key(s_k) ring_vrf_proof = RingVRF[Bandersnatch].prove(alpha, ad, s_k, p_k, keys) @@ -34,21 +32,13 @@ def test_ring_proof(): proof_rt = RingVRF[Bandersnatch].from_bytes(proof_bytes) end_time = time.time() - print( - f"Time taken for Ring VRF Proof Generation: \t {end_time - ring_time} seconds" - ) + print(f"Time taken for Ring VRF Proof Generation: \t {end_time - ring_time} seconds") assert p_k.hex() == item["pk"], "Invalid Public Key" assert ring_root.to_bytes().hex() == item["ring_pks_com"], "Invalid Ring Root" assert ( ring_vrf_proof.to_bytes().hex() - == item["gamma"] - + item["proof_pk_com"] - + item["proof_r"] - + item["proof_ok"] - + item["proof_s"] - + item["proof_sb"] - + item["ring_proof"] + == item["gamma"] + item["proof_pk_com"] + item["proof_r"] + item["proof_ok"] + item["proof_s"] + item["proof_sb"] + item["ring_proof"] ), "Unexpected Proof" start = time.time() assert ring_vrf_proof.verify(alpha, ad, ring_root), "Verification Failed" diff --git a/tests/test_vectors.py b/tests/test_vectors.py index c518d61..639eb0a 100644 --- a/tests/test_vectors.py +++ b/tests/test_vectors.py @@ -60,32 +60,24 @@ def verify_ietf_vector(vector: dict[str, Any], curve) -> None: # Verify output point matches gamma_bytes = proof.output_point.point_to_string() - assert ( - gamma_bytes == expected_gamma - ), f"gamma mismatch: expected {expected_gamma.hex()}, got {gamma_bytes.hex()}" + assert gamma_bytes == expected_gamma, f"gamma mismatch: expected {expected_gamma.hex()}, got {gamma_bytes.hex()}" # Verify proof challenge challenge_len = curve.curve.CHALLENGE_LENGTH c_bytes = Helpers.int_to_str(proof.c, curve.curve.ENDIAN, challenge_len) - assert ( - c_bytes == expected_c - ), f"challenge mismatch: expected {expected_c.hex()}, got {c_bytes.hex()}" + assert c_bytes == expected_c, f"challenge mismatch: expected {expected_c.hex()}, got {c_bytes.hex()}" # Verify proof response scalar_len = (curve.curve.PRIME_FIELD.bit_length() + 7) // 8 s_bytes = Helpers.int_to_str(proof.s, curve.curve.ENDIAN, scalar_len) - assert ( - s_bytes == expected_s - ), f"response mismatch: expected {expected_s.hex()}, got {s_bytes.hex()}" + assert s_bytes == expected_s, f"response mismatch: expected {expected_s.hex()}, got {s_bytes.hex()}" # Verify the proof assert proof.verify(pk, alpha, ad, salt), "Proof verification failed" # Verify output hash beta = IETF_VRF[curve].proof_to_hash(proof.output_point) - assert ( - beta == expected_beta - ), f"beta mismatch: expected {expected_beta.hex()}, got {beta.hex()}" + assert beta == expected_beta, f"beta mismatch: expected {expected_beta.hex()}, got {beta.hex()}" # Bandersnatch IETF vectors @@ -172,9 +164,7 @@ def verify_pedersen_vector(vector: dict[str, Any], curve) -> None: # Verify output point matches gamma_bytes = proof.output_point.point_to_string() - assert ( - gamma_bytes == expected_gamma - ), f"gamma mismatch: expected {expected_gamma.hex()}, got {gamma_bytes.hex()}" + assert gamma_bytes == expected_gamma, f"gamma mismatch: expected {expected_gamma.hex()}, got {gamma_bytes.hex()}" # Verify proof components assert proof.blinded_pk.point_to_string() == expected_pk_com, "pk_com mismatch" @@ -185,23 +175,17 @@ def verify_pedersen_vector(vector: dict[str, Any], curve) -> None: scalar_len = (curve.curve.PRIME_FIELD.bit_length() + 7) // 8 s_bytes = Helpers.int_to_str(proof.s, curve.curve.ENDIAN, scalar_len) - assert ( - s_bytes == expected_s - ), f"s mismatch: expected {expected_s.hex()}, got {s_bytes.hex()}" + assert s_bytes == expected_s, f"s mismatch: expected {expected_s.hex()}, got {s_bytes.hex()}" sb_bytes = Helpers.int_to_str(proof.sb, curve.curve.ENDIAN, scalar_len) - assert ( - sb_bytes == expected_sb - ), f"sb mismatch: expected {expected_sb.hex()}, got {sb_bytes.hex()}" + assert sb_bytes == expected_sb, f"sb mismatch: expected {expected_sb.hex()}, got {sb_bytes.hex()}" # Verify the proof assert proof.verify(alpha, ad), "Proof verification failed" # Verify output hash beta = PedersenVRF[curve].proof_to_hash(proof.output_point) - assert ( - beta == expected_beta - ), f"beta mismatch: expected {expected_beta.hex()}, got {beta.hex()}" + assert beta == expected_beta, f"beta mismatch: expected {expected_beta.hex()}, got {beta.hex()}" # Bandersnatch Pedersen vectors @@ -290,9 +274,7 @@ def verify_ring_vector(vector: dict[str, Any], curve) -> None: # Verify output point matches gamma_bytes = proof.pedersen_proof.output_point.point_to_string() - assert ( - gamma_bytes == expected_gamma - ), f"gamma mismatch: expected {expected_gamma.hex()}, got {gamma_bytes.hex()}" + assert gamma_bytes == expected_gamma, f"gamma mismatch: expected {expected_gamma.hex()}, got {gamma_bytes.hex()}" # Construct ring root and verify ring_root = RingVRF[curve].construct_ring_root(ring_pks) @@ -300,9 +282,7 @@ def verify_ring_vector(vector: dict[str, Any], curve) -> None: # Verify output hash beta = RingVRF[curve].proof_to_hash(proof.pedersen_proof.output_point) - assert ( - beta == expected_beta - ), f"beta mismatch: expected {expected_beta.hex()}, got {beta.hex()}" + assert beta == expected_beta, f"beta mismatch: expected {expected_beta.hex()}, got {beta.hex()}" # Bandersnatch Ring vectors @@ -341,12 +321,8 @@ class TestNegativeCases: def test_wrong_public_key_ietf(self): """IETF VRF verification should fail with wrong public key.""" # Generate a valid proof - sk1 = bytes.fromhex( - "0101010101010101010101010101010101010101010101010101010101010101" - ) - sk2 = bytes.fromhex( - "0202020202020202020202020202020202020202020202020202020202020202" - ) + sk1 = bytes.fromhex("0101010101010101010101010101010101010101010101010101010101010101") + sk2 = bytes.fromhex("0202020202020202020202020202020202020202020202020202020202020202") pk1 = IETF_VRF[Bandersnatch].get_public_key(sk1) pk2 = IETF_VRF[Bandersnatch].get_public_key(sk2) @@ -365,9 +341,7 @@ def test_wrong_public_key_ietf(self): def test_wrong_input_ietf(self): """IETF VRF verification should fail with wrong input.""" - sk = bytes.fromhex( - "0101010101010101010101010101010101010101010101010101010101010101" - ) + sk = bytes.fromhex("0101010101010101010101010101010101010101010101010101010101010101") pk = IETF_VRF[Bandersnatch].get_public_key(sk) alpha1 = b"correct_input" @@ -385,9 +359,7 @@ def test_wrong_input_ietf(self): def test_wrong_ad_ietf(self): """IETF VRF verification should fail with wrong additional data.""" - sk = bytes.fromhex( - "0101010101010101010101010101010101010101010101010101010101010101" - ) + sk = bytes.fromhex("0101010101010101010101010101010101010101010101010101010101010101") pk = IETF_VRF[Bandersnatch].get_public_key(sk) alpha = b"test_input" @@ -405,9 +377,7 @@ def test_wrong_ad_ietf(self): def test_wrong_input_pedersen(self): """Pedersen VRF verification should fail with wrong input.""" - sk = bytes.fromhex( - "0101010101010101010101010101010101010101010101010101010101010101" - ) + sk = bytes.fromhex("0101010101010101010101010101010101010101010101010101010101010101") alpha1 = b"correct_input" alpha2 = b"wrong_input" @@ -424,9 +394,7 @@ def test_wrong_input_pedersen(self): def test_wrong_ring_root(self): """Ring VRF verification should fail with wrong ring root.""" - sk = bytes.fromhex( - "0101010101010101010101010101010101010101010101010101010101010101" - ) + sk = bytes.fromhex("0101010101010101010101010101010101010101010101010101010101010101") pk = RingVRF[Bandersnatch].get_public_key(sk) # Create two different rings @@ -465,44 +433,29 @@ class TestDeterminism: def test_ietf_deterministic(self): """IETF VRF proofs should be deterministic.""" - sk = bytes.fromhex( - "0101010101010101010101010101010101010101010101010101010101010101" - ) + sk = bytes.fromhex("0101010101010101010101010101010101010101010101010101010101010101") alpha = b"deterministic_test" ad = b"test_ad" proof1 = IETF_VRF[Bandersnatch].prove(alpha, sk, ad) proof2 = IETF_VRF[Bandersnatch].prove(alpha, sk, ad) - assert ( - proof1.output_point.point_to_string() - == proof2.output_point.point_to_string() - ) + assert proof1.output_point.point_to_string() == proof2.output_point.point_to_string() assert proof1.c == proof2.c assert proof1.s == proof2.s def test_pedersen_deterministic(self): """Pedersen VRF proofs should be deterministic.""" - sk = bytes.fromhex( - "0101010101010101010101010101010101010101010101010101010101010101" - ) + sk = bytes.fromhex("0101010101010101010101010101010101010101010101010101010101010101") alpha = b"deterministic_test" ad = b"test_ad" proof1 = PedersenVRF[Bandersnatch].prove(alpha, sk, ad) proof2 = PedersenVRF[Bandersnatch].prove(alpha, sk, ad) - assert ( - proof1.output_point.point_to_string() - == proof2.output_point.point_to_string() - ) - assert ( - proof1.blinded_pk.point_to_string() == proof2.blinded_pk.point_to_string() - ) - assert ( - proof1.result_point.point_to_string() - == proof2.result_point.point_to_string() - ) + assert proof1.output_point.point_to_string() == proof2.output_point.point_to_string() + assert proof1.blinded_pk.point_to_string() == proof2.blinded_pk.point_to_string() + assert proof1.result_point.point_to_string() == proof2.result_point.point_to_string() assert proof1.ok.point_to_string() == proof2.ok.point_to_string() assert proof1.s == proof2.s assert proof1.sb == proof2.sb diff --git a/tests/test_verify_rust_proof.py b/tests/test_verify_rust_proof.py new file mode 100644 index 0000000..37d6326 --- /dev/null +++ b/tests/test_verify_rust_proof.py @@ -0,0 +1,257 @@ +import json +import sys +from pathlib import Path + +import pytest + +# Add project root to path +sys.path.insert(0, str(Path(__file__).parent.parent)) + +from dot_ring import blst +from dot_ring.ring_proof.constants import D_512, D_2048, OMEGA_2048, S_PRIME +from dot_ring.ring_proof.curve.bandersnatch import TwistedEdwardCurve +from dot_ring.ring_proof.pcs import srs +from dot_ring.ring_proof.verify import Verify + +# Import serialization utilities +from tests.utils.arkworks_serde import ( + compressed_g1_to_uncompressed_bytes, + compressed_g2_to_uncompressed_bytes, + deserialize_bandersnatch_point, + deserialize_bls12_381_g1, + deserialize_fq_field_element, +) + + +def load_test_parameters() -> dict: + """ + Load deterministic test parameters from JSON file. + + These parameters were extracted once from Rust's test_rng() and saved + for deterministic test execution without needing to rebuild Rust code. + + Returns: + Dictionary with h, seed, result, domain_size, etc. + """ + params_file = Path(__file__).parent / "vectors" / "others" / "test_parameters.json" + with open(params_file) as f: + return json.load(f) + + +def parse_proof_from_json(proof_json: dict) -> tuple: + """ + Parse proof from Rust-generated JSON. + + Returns (proof_tuple, raw_bytes_dict) where: + - proof_tuple: deserialized proof compatible with Python Verify class + - raw_bytes_dict: raw arkworks-serialized bytes for transcript + """ + proof = proof_json["proof"] + + # Parse column commitments (4 × 48-byte G1 points) + col_cmts_hex = proof["column_commitments"] + col_cmts_bytes = bytes.fromhex(col_cmts_hex) + + c_b = deserialize_bls12_381_g1(col_cmts_bytes[0:48]) + c_accip = deserialize_bls12_381_g1(col_cmts_bytes[48:96]) + c_accx = deserialize_bls12_381_g1(col_cmts_bytes[96:144]) + c_accy = deserialize_bls12_381_g1(col_cmts_bytes[144:192]) + + # Parse evaluations at zeta (7 × 32-byte Fq field elements) + cols_at_zeta_hex = proof["columns_at_zeta"] + cols_at_zeta_bytes = bytes.fromhex(cols_at_zeta_hex) + + px_zeta = deserialize_fq_field_element(cols_at_zeta_bytes[0:32]) + py_zeta = deserialize_fq_field_element(cols_at_zeta_bytes[32:64]) + s_zeta = deserialize_fq_field_element(cols_at_zeta_bytes[64:96]) + b_zeta = deserialize_fq_field_element(cols_at_zeta_bytes[96:128]) + accip_zeta = deserialize_fq_field_element(cols_at_zeta_bytes[128:160]) + accx_zeta = deserialize_fq_field_element(cols_at_zeta_bytes[160:192]) + accy_zeta = deserialize_fq_field_element(cols_at_zeta_bytes[192:224]) + + # Parse other proof components + cq_bytes = bytes.fromhex(proof["quotient_commitment"]) + c_q = deserialize_bls12_381_g1(cq_bytes) + l_zeta_omega = deserialize_fq_field_element(bytes.fromhex(proof["lin_at_zeta_omega"])) + + # KZG opening proofs (48-byte G1 points) + phi_zeta = deserialize_bls12_381_g1(bytes.fromhex(proof["agg_at_zeta_proof"])) + phi_zeta_omega = deserialize_bls12_381_g1(bytes.fromhex(proof["lin_at_zeta_omega_proof"])) + + proof_tuple = ( + c_b, + c_accip, + c_accx, + c_accy, + px_zeta, + py_zeta, + s_zeta, + b_zeta, + accip_zeta, + accx_zeta, + accy_zeta, + c_q, + l_zeta_omega, + phi_zeta, + phi_zeta_omega, + ) + + # Store raw bytes for transcript (matching arkworks serialization) + raw_bytes = { + "col_commitments": [ + col_cmts_bytes[0:48], + col_cmts_bytes[48:96], + col_cmts_bytes[96:144], + col_cmts_bytes[144:192], + ], + "quotient_commitment": cq_bytes, + } + + return (proof_tuple, raw_bytes) + + +def parse_verifier_key(vk_json: dict, use_global_srs: bool = True) -> dict: + """ + Parse verifier key from JSON. + + VerifierKey serialization order (384 bytes): + 1. pcs_raw_vk (RawKzgVerifierKey, 240 bytes): + - g1: 48 bytes G1 + - g2: 96 bytes G2 + - tau_g2: 96 bytes G2 + 2. fixed_columns_committed (FixedColumnsCommitted, 144 bytes): + - points[0]: C_px (48 bytes G1) + - points[1]: C_py (48 bytes G1) + - ring_selector: C_s (48 bytes G1) + + If use_global_srs is True, uses the global SRS instead of parsing G1/G2 from vk. + """ + vk_hex = vk_json["verifier_key"]["verification_key"] + vk_bytes = bytes.fromhex(vk_hex) + + # Fixed columns commitments (offsets 240-384) + c_px = deserialize_bls12_381_g1(vk_bytes[240:288]) + c_py = deserialize_bls12_381_g1(vk_bytes[288:336]) + c_s = deserialize_bls12_381_g1(vk_bytes[336:384]) + + # Convert to format expected by Python verifier + from py_ecc.optimized_bls12_381 import normalize as nm + + from dot_ring.ring_proof.helpers import Helpers as H + + if use_global_srs: + # Use the global SRS which has already been updated with Rust values + g1_int = H.to_int(srs.srs.g1_points[0]) + g2_altered = H.altered_points(srs.srs.g2_points) + else: + # Parse from verifier key bytes + kzg_g1 = deserialize_bls12_381_g1(vk_bytes[0:48]) + g1_normalized = nm(kzg_g1) + g1_int = H.to_int(g1_normalized) + + # G2 parsing would go here but is complex + # For now, fall back to using global SRS + g2_altered = H.altered_points(srs.srs.g2_points) + + # Create verifier_key dict in the format expected by Verify class + verifier_key_dict = {"g1": g1_int, "g2": g2_altered, "commitments": [H.to_int(nm(c_px)), H.to_int(nm(c_py)), H.to_int(nm(c_s))]} + + return {"fixed_cols": [c_px, c_py, c_s], "verifier_key": verifier_key_dict} + + +@pytest.fixture(scope="module") +def rust_parameters(): + """Load deterministic test parameters from JSON.""" + return load_test_parameters() + + +@pytest.fixture(scope="module") +def proof_data(): + """Load Rust-generated proof.""" + proof_path = Path(__file__).parent / "vectors" / "others" / "ring_proof_rust_generated.json" + with open(proof_path) as f: + return json.load(f) + + +def test_verify_rust_generated_proof(rust_parameters, proof_data): + """ + Test verification of Rust-generated ring proof. + + This test demonstrates that the Python verifier can verify + proofs generated by the Rust reference implementation. + """ + params = rust_parameters + + # Extract and update SRS from verifier key + vk_compressed = bytes.fromhex(proof_data["verifier_key"]["verification_key"]) + g1_0_bytes = vk_compressed[0:48] + g2_0_bytes = vk_compressed[48:144] + g2_1_bytes = vk_compressed[144:240] + + g1_0_blst = blst.P1(blst.P1_Affine(g1_0_bytes)) + g2_0_blst = blst.P2(blst.P2_Affine(g2_0_bytes)) + g2_1_blst = blst.P2(blst.P2_Affine(g2_1_bytes)) + + original_g1 = srs.srs.blst_g1 + original_g2 = srs.srs.blst_g2 + + srs.srs.blst_g1 = [g1_0_blst] + list(original_g1[1:]) + srs.srs.blst_g2 = [g2_0_blst, g2_1_blst] + list(original_g2[2:]) + + # Compute domain based on size + domain_size = params["domain_size"] + if domain_size == 512: + domain = D_512 + elif domain_size == 1024: + omega_1024 = pow(OMEGA_2048, 2048 // 1024, S_PRIME) + domain = [pow(omega_1024, i, S_PRIME) for i in range(1024)] + elif domain_size == 2048: + domain = D_2048 + else: + raise ValueError(f"Unsupported domain size: {domain_size}") + + # Deserialize Bandersnatch points + seed_x_bytes = bytes.fromhex(params["seed"]["x"]) + seed_y_bytes = bytes.fromhex(params["seed"]["y"]) + seed_point = deserialize_bandersnatch_point(seed_x_bytes, seed_y_bytes) + + result_x_bytes = bytes.fromhex(params["result"]["x"]) + result_y_bytes = bytes.fromhex(params["result"]["y"]) + result_point = deserialize_bandersnatch_point(result_x_bytes, result_y_bytes) + + result_ark_bytes = result_x_bytes + result_y_bytes + result_plus_seed = TwistedEdwardCurve.add(result_point, seed_point) + + # Parse proof and verifier key + proof_tuple, raw_bytes = parse_proof_from_json(proof_data) + vk_dict = parse_verifier_key(proof_data) + + # Prepare raw bytes for transcript + vk_uncompressed = ( + compressed_g1_to_uncompressed_bytes(vk_compressed[0:48]) + + compressed_g2_to_uncompressed_bytes(vk_compressed[48:144]) + + compressed_g2_to_uncompressed_bytes(vk_compressed[144:240]) + + compressed_g1_to_uncompressed_bytes(vk_compressed[240:288]) + + compressed_g1_to_uncompressed_bytes(vk_compressed[288:336]) + + compressed_g1_to_uncompressed_bytes(vk_compressed[336:384]) + ) + + quotient_compressed = bytes.fromhex(proof_data["proof"]["quotient_commitment"]) + quotient_uncompressed = compressed_g1_to_uncompressed_bytes(quotient_compressed) + raw_bytes["quotient_commitment_uncompressed"] = quotient_uncompressed + + # Create verifier and verify + verifier = Verify( + proof=proof_tuple, + vk=vk_uncompressed, + fixed_cols=vk_dict["fixed_cols"], + rl_to_proove=result_ark_bytes, + rps=result_plus_seed, + seed_point=seed_point, + Domain=domain, + raw_proof_bytes=raw_bytes, + transcript_challenge=b"w3f-ring-proof-test", + ) + + # Verify proof + assert verifier.is_valid(), "Proof verification failed!" diff --git a/tests/utils/arkworks_serde.py b/tests/utils/arkworks_serde.py new file mode 100644 index 0000000..270ecf3 --- /dev/null +++ b/tests/utils/arkworks_serde.py @@ -0,0 +1,391 @@ +""" +Arkworks serialization/deserialization utilities. + +Functions to convert between arkworks compressed format and Python types +for BLS12-381 (G1, G2) and Bandersnatch points. +""" + +from py_ecc.optimized_bls12_381 import FQ, FQ2 + + +def deserialize_fq_field_element(data: bytes) -> int: + """ + Deserialize arkworks Fq field element (32 bytes, little-endian). + + Arkworks uses Montgomery form internally but serializes as regular integers. + """ + return int.from_bytes(data, byteorder="little") + + +def deserialize_bandersnatch_point(x_bytes: bytes, y_bytes: bytes) -> tuple[int, int]: + """ + Deserialize Bandersnatch point from arkworks compressed format. + + Args: + x_bytes: x-coordinate (32 bytes, little-endian Fq) + y_bytes: y-coordinate (32 bytes, little-endian Fq) + + Returns: + (x, y) tuple as Python integers + """ + x = deserialize_fq_field_element(x_bytes) + y = deserialize_fq_field_element(y_bytes) + return (x, y) + + +def compressed_bandersnatch_to_uncompressed_bytes(compressed: bytes) -> bytes: + """ + Convert compressed Bandersnatch point (32 bytes) to uncompressed (64 bytes) for transcript. + + Arkworks compressed format for Bandersnatch: + - 32 bytes: x-coordinate (little-endian) with flags in high bits + - y-coordinate is recovered from curve equation + + Arkworks uncompressed format: + - 32 bytes: x-coordinate (little-endian, no flags) + - 32 bytes: y-coordinate (little-endian) + + Args: + compressed: 32-byte compressed point + + Returns: + 64-byte uncompressed point (x || y) + """ + if len(compressed) != 32: + raise ValueError(f"Expected 32 bytes for compressed Bandersnatch point, got {len(compressed)}") + + # Check flags in last byte (little-endian, so flags are at the end) + flags = compressed[-1] + is_infinity = (flags & 0x40) != 0 + is_positive = (flags & 0x80) == 0 # Sign bit + + if is_infinity: + # Point at infinity + return b"\x00" * 64 + + # Extract x-coordinate (remove flag bits from last byte) + x_bytes = compressed[:-1] + bytes([compressed[-1] & 0x3F]) + x = int.from_bytes(x_bytes, "little") + + # Recover y from Twisted Edwards curve equation: a*x^2 + y^2 = 1 + d*x^2*y^2 + # Rearranged: y^2 = (1 - a*x^2) / (1 - d*x^2) + # Bandersnatch parameters from the curve specification + from dot_ring.curve.specs.bandersnatch import BandersnatchParams + + a = BandersnatchParams.EDWARDS_A + d = BandersnatchParams.EDWARDS_D + p = BandersnatchParams.MODULUS + + x_squared = (x * x) % p + numerator = (1 - a * x_squared) % p + denominator = (1 - d * x_squared) % p + denominator_inv = pow(denominator, p - 2, p) # Fermat's little theorem + y_squared = (numerator * denominator_inv) % p + + # Compute square root using Tonelli-Shanks or p ≡ 3 mod 4 method + # For Bandersnatch, p ≡ 1 mod 4, so we need Tonelli-Shanks + # But for simplicity, we can use the fact that we know the sign + y = pow(y_squared, (p + 1) // 4, p) # This might not always work + + # Actually, let's check if p ≡ 3 mod 4 + # Bandersnatch uses BLS12-381 scalar field, which is p ≡ 3 mod 4 + y = pow(y_squared, (p + 1) // 4, p) + + # Check which square root to use based on sign bit + # The sign bit indicates whether y is positive or negative + if (y > (p - 1) // 2) != is_positive: + y = p - y + + # Serialize as uncompressed: x (32 bytes) || y (32 bytes), little-endian, no flags + x_uncompressed = x.to_bytes(32, "little") + y_bytes = y.to_bytes(32, "little") + + return x_uncompressed + y_bytes + + +def deserialize_bls12_381_g1(data: bytes) -> tuple: + """ + Deserialize BLS12-381 G1 point from arkworks compressed format. + + Arkworks compressed G1: 48 bytes, big-endian + - Bit 7 (MSB): compression flag (1 = compressed) + - Bit 6: infinity flag (1 = point at infinity) + - Bit 5: y-coordinate sign/parity + - Remaining bits: x-coordinate + + Returns py_ecc Jacobian tuple (FQ, FQ, FQ) + """ + if len(data) != 48: + raise ValueError(f"Expected 48 bytes for G1 point, got {len(data)}") + + # Check flags in first byte + flags = data[0] + is_compressed = (flags & 0x80) != 0 + is_infinity = (flags & 0x40) != 0 + y_parity = (flags & 0x20) != 0 + + if is_infinity: + # Point at infinity - return identity element + return (FQ(0), FQ(1), FQ(0)) + + if not is_compressed: + # Uncompressed format (not typically used by arkworks for G1) + raise ValueError("Uncompressed G1 points not supported") + + # Extract x-coordinate (remove flag bits from first byte) + data_clean = bytes([data[0] & 0x1F]) + data[1:] + x = int.from_bytes(data_clean, "big") + + # Recover y from curve equation: y^2 = x^3 + 4 + from py_ecc.bls12_381 import bls12_381_pairing as pairing + + field_modulus = pairing.field_modulus + + x_fq = FQ(x) + y_squared = x_fq * x_fq * x_fq + FQ(4) + + # Compute square root (y_squared^((p+1)/4) mod p for p ≡ 3 mod 4) + y = y_squared ** ((field_modulus + 1) // 4) + + # Choose correct square root based on sign/parity bit + # The sign bit indicates if y > (p-1)/2 (lexicographically largest), not if y is odd! + # This follows the ZCash BLS12-381 spec and matches arkworks serialization. + y_is_lexicographically_largest = int(y) > (field_modulus - 1) // 2 + + # If the computed y doesn't match the sign bit, use the other square root + if y_is_lexicographically_largest != y_parity: + y = FQ(field_modulus) - y + + return (x_fq, y, FQ(1)) + + +def compressed_g1_to_uncompressed_bytes(compressed: bytes) -> bytes: + """ + Convert compressed BLS12-381 G1 point to uncompressed bytes for transcript. + + Args: + compressed: 48-byte compressed point + + Returns: + 96-byte uncompressed point (x || y, no flags) + """ + # Deserialize to get (x, y) coordinates + point = deserialize_bls12_381_g1(compressed) + x_fq, y_fq, z_fq = point + + # Handle point at infinity + if z_fq == FQ(0): + return b"\x00" * 96 + + # Convert to affine coordinates (already in affine if z=1) + x = int(x_fq) + y = int(y_fq) + + # Serialize as uncompressed: x (48 bytes) || y (48 bytes), big-endian, no flags + x_bytes = x.to_bytes(48, "big") + y_bytes = y.to_bytes(48, "big") + + return x_bytes + y_bytes + + +def compressed_g2_to_uncompressed_bytes(compressed: bytes) -> bytes: + """ + Convert compressed BLS12-381 G2 point to uncompressed bytes for transcript. + + Args: + compressed: 96-byte compressed G2 point (arkworks format: c1 || c0) + + Returns: + 192-byte uncompressed G2 point (arkworks format: x_c1 || x_c0 || y_c1 || y_c0, no flags) + """ + if len(compressed) != 96: + raise ValueError(f"Expected 96 bytes for compressed G2 point, got {len(compressed)}") + + # Deserialize the compressed G2 point + g2_point = deserialize_bls12_381_g2(compressed) + + # g2_point is ((x, y, z)) in Jacobian coordinates where x, y are FQ2 elements + # We need to convert to affine: (x, y) + from py_ecc.optimized_bls12_381 import normalize as nm + + affine = nm(g2_point) + x_fq2, y_fq2 = affine + + # Each FQ2 element has two coefficients (c0, c1) + x_c0, x_c1 = x_fq2.coeffs + y_c0, y_c1 = y_fq2.coeffs + + # Serialize as uncompressed: arkworks uses c1 || c0 || c1 || c0 format + # Each coefficient is 48 bytes, big-endian, no flags + x_c0_bytes = int(x_c0).to_bytes(48, "big") + x_c1_bytes = int(x_c1).to_bytes(48, "big") + y_c0_bytes = int(y_c0).to_bytes(48, "big") + y_c1_bytes = int(y_c1).to_bytes(48, "big") + + return x_c1_bytes + x_c0_bytes + y_c1_bytes + y_c0_bytes + + +def legendre_fq(a: int, p: int) -> int: + """Compute Legendre symbol (a/p) = a^((p-1)/2) mod p. + + Returns: + 1 if a is a quadratic residue + -1 if a is a quadratic non-residue + 0 if a is 0 + """ + if a == 0: + return 0 + result = pow(a, (p - 1) // 2, p) + return -1 if result == p - 1 else result + + +def sqrt_fq2(a) -> tuple[int, int] | None: + """ + Compute square root of FQ2 element using arkworks algorithm. + + Implements Algorithm 8 from https://eprint.iacr.org/2012/685.pdf (page 15) + This matches arkworks' implementation exactly. + + Args: + a: FQ2 element (can be either regular or optimized FQ2) + + Returns: + Tuple (c0, c1) representing the square root, or None if no root exists. + """ + from py_ecc.bls12_381 import bls12_381_pairing as pairing + + field_modulus = pairing.field_modulus + + # Extract coefficients (works for both FQ2 types) + c0, c1 = a.coeffs + c0_int = int(c0) % field_modulus + c1_int = int(c1) % field_modulus + + # Special case: a = 0 + if c0_int == 0 and c1_int == 0: + return (0, 0) + + # If c1 is zero, return sqrt(c0) + 0*i + if c1_int == 0: + # Use Fp square root: since p ≡ 3 mod 4, sqrt(x) = x^((p+1)/4) + sqrt_c0 = pow(c0_int, (field_modulus + 1) // 4, field_modulus) + if pow(sqrt_c0, 2, field_modulus) == c0_int: + return (sqrt_c0, 0) + return None + + # Compute alpha = norm(a) = c0^2 - β*c1^2 where β is the non-residue + # For BLS12-381, Fp2 = Fp[X]/(X^2 + 1), so β = -1 + # Therefore: norm = c0^2 - (-1)*c1^2 = c0^2 + c1^2 + alpha = (c0_int * c0_int + c1_int * c1_int) % field_modulus + + # Compute sqrt(alpha) + sqrt_alpha = pow(alpha, (field_modulus + 1) // 4, field_modulus) + + # Verify sqrt(alpha) is correct + if pow(sqrt_alpha, 2, field_modulus) != alpha: + return None + + # Compute two_inv = 1/2 + two_inv = pow(2, field_modulus - 2, field_modulus) + + # Compute delta = (sqrt_alpha + c0) / 2 + delta = ((sqrt_alpha + c0_int) * two_inv) % field_modulus + + # Check if delta is a quadratic non-residue + if legendre_fq(delta, field_modulus) == -1: + # delta = (c0 - sqrt_alpha) / 2 + delta = ((c0_int - sqrt_alpha) * two_inv) % field_modulus + + # Compute c0_result = sqrt(delta) + c0_result = pow(delta, (field_modulus + 1) // 4, field_modulus) + + # Verify c0_result + if pow(c0_result, 2, field_modulus) != delta: + return None + + # Compute c0_inv = 1/c0_result + if c0_result == 0: + return None + c0_inv = pow(c0_result, field_modulus - 2, field_modulus) + + # Compute c1_result = c1 / (2 * c0_result) = c1 * two_inv * c0_inv + c1_result = (c1_int * two_inv * c0_inv) % field_modulus + + # Verify the candidate is actually the square root by checking the coefficients + # sqrt_cand^2 should equal a + # (c0_result + c1_result*X)^2 = c0_result^2 - c1_result^2 * X^2 + 2*c0_result*c1_result*X + # Using X^2 = -1: = c0_result^2 - c1_result^2 * (-1) + 2*c0_result*c1_result*X + # = c0_result^2 - c1_result^2 + 2*c0_result*c1_result*X + sqrt_c0_squared = (c0_result * c0_result - c1_result * c1_result) % field_modulus + sqrt_c1_squared = (2 * c0_result * c1_result) % field_modulus + + if sqrt_c0_squared == c0_int and sqrt_c1_squared == c1_int: + return (c0_result, c1_result) + + return None + + +def deserialize_bls12_381_g2(data: bytes) -> tuple: + """ + Deserialize BLS12-381 G2 point from arkworks compressed format. + + G2 points are 96 bytes (2 × 48-byte Fp2 elements). + Similar compression scheme as G1 but for Fp2 field. + """ + if len(data) != 96: + raise ValueError(f"Expected 96 bytes for G2 point, got {len(data)}") + + # G2 uses Fp2 = Fp[u]/(u^2 + 1) + # Each coordinate is (c0, c1) where coordinate = c0 + c1*u + + # Extract x-coordinate (first 96 bytes encode both c0 and c1) + flags = data[0] + is_compressed = (flags & 0x80) != 0 + is_infinity = (flags & 0x40) != 0 + y_parity = (flags & 0x20) != 0 + + if is_infinity: + return (FQ2([0, 0]), FQ2([1, 0]), FQ2([0, 0])) + + if not is_compressed: + raise ValueError("Uncompressed G2 points not supported") + + # Extract x-coordinate: arkworks serializes Fp2 as c1 || c0 (c1 first, c0 second) + # Each coefficient is 48 bytes, big-endian + x_c1_bytes = bytes([data[0] & 0x1F]) + data[1:48] # c1 in bytes 0-47 + x_c0_bytes = data[48:96] # c0 in bytes 48-95 + + x_c0 = int.from_bytes(x_c0_bytes, "big") + x_c1 = int.from_bytes(x_c1_bytes, "big") + + x_fq2 = FQ2([x_c0, x_c1]) + + # Recover y from curve equation: y^2 = x^3 + 4(1 + u) + from py_ecc.bls12_381 import bls12_381_pairing as pairing + + field_modulus = pairing.field_modulus + + # G2 curve equation: y^2 = x^3 + 4(1 + u) + b = FQ2([4, 4]) # 4(1 + u) = 4 + 4u + y_squared = x_fq2 * x_fq2 * x_fq2 + b + + # Compute square root in Fp2 + y_sqrt = sqrt_fq2(y_squared) + + if y_sqrt is None: + raise ValueError("No square root exists for y^2") + + # y_sqrt is (c0, c1) as integers + c0, c1 = y_sqrt + + # Choose correct root based on parity + # The parity bit indicates if the imaginary part (c1) is lexicographically largest + y_is_lexicographically_largest = c1 > (field_modulus - 1) // 2 + + if y_is_lexicographically_largest != y_parity: + # Negate: -y = (p - c0, p - c1) + c0 = (field_modulus - c0) % field_modulus + c1 = (field_modulus - c1) % field_modulus + + y_fq2 = FQ2([c0, c1]) + return (x_fq2, y_fq2, FQ2([1, 0])) diff --git a/tests/utils/profiler.py b/tests/utils/profiler.py index cadc12b..36de7ec 100644 --- a/tests/utils/profiler.py +++ b/tests/utils/profiler.py @@ -54,9 +54,7 @@ def _print_stats(self): ps = pstats.Stats(self.profiler, stream=s).sort_stats(self.sort_by) ps.print_stats(self.limit) - print( - f"\n📊 Profiling results for {self.name} (top {self.limit}, sorted by {self.sort_by}):" - ) + print(f"\n📊 Profiling results for {self.name} (top {self.limit}, sorted by {self.sort_by}):") print("=" * 80) print(s.getvalue()) print("=" * 80) @@ -71,9 +69,7 @@ def _save_stats(self): output_dir.mkdir(parents=True, exist_ok=True) # Generate safe filename - safe_name = "".join( - c for c in self.name if c.isalnum() or c in (" ", "-", "_") - ).rstrip() + safe_name = "".join(c for c in self.name if c.isalnum() or c in (" ", "-", "_")).rstrip() safe_name = safe_name.replace(" ", "_") # Save binary stats file (overwrite if exists) @@ -111,9 +107,7 @@ def _generate_dot_file(self, stats_file: Path, name: str): # Generate dot file with open(dot_file, "w") as f: - subprocess.run( - ["gprof2dot", "-f", "pstats", str(stats_file)], stdout=f, check=True - ) + subprocess.run(["gprof2dot", "-f", "pstats", str(stats_file)], stdout=f, check=True) print(f"📊 Dot file saved: {dot_file}") @@ -151,9 +145,7 @@ def profile(sort_by: str = "cumulative", limit: int = 20, save_stats: bool = Tru def decorator(func: Callable) -> Callable: @wraps(func) def wrapper(*args, **kwargs): - with Profiler( - func.__name__, save_stats=save_stats, sort_by=sort_by, limit=limit - ): + with Profiler(func.__name__, save_stats=save_stats, sort_by=sort_by, limit=limit): return func(*args, **kwargs) return wrapper diff --git a/tests/vectors/others/test_parameters.json b/tests/vectors/others/test_parameters.json new file mode 100644 index 0000000..5f7b9e8 --- /dev/null +++ b/tests/vectors/others/test_parameters.json @@ -0,0 +1,16 @@ +{ + "domain_size": 1024, + "h": { + "x": "fc08f51fded9706747d310226bacbe992452f1c2248a82813e768f0199fbc824", + "y": "d64dcb4cca8d299d9248582b3c473094c6784f7b60be06d4a754ee6e3b73a241" + }, + "result": { + "x": "d3cc691ca41245cd5f2b0f296fa9e92160c0c1487a0284dd2b0ee0c5f38cc962", + "y": "34ece6c3d54f571e5e0129673b49b3ac6df406cf84551f2c400111123ecec941" + }, + "ring_size": 1024, + "seed": { + "x": "99b606929820569a878cb565828cfb458540987bf1d0f32e2397b09d6c652d4e", + "y": "1f4f86a282b593962e4b48ecce56f77adb3ed6fdf3effbf75adb06d6b19ce462" + } +} diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..a575f02 --- /dev/null +++ b/tox.ini @@ -0,0 +1,16 @@ +[tox] +envlist = py311, py312, py313, py314 +isolated_build = True + +[testenv] +description = Run unit tests +deps = + pytest>=8.0.0 + pytest-cov>=4.0.0 +commands = + pytest {posargs:tests} + +[testenv:py314] +description = Run unit tests on Python 3.14 (allowed to fail) +allowlist_externals = * +ignore_outcome = True diff --git a/uv.lock b/uv.lock index 9280d2d..43471e4 100644 --- a/uv.lock +++ b/uv.lock @@ -1,6 +1,6 @@ version = 1 revision = 3 -requires-python = ">=3.12" +requires-python = ">=3.11" [[package]] name = "annotated-types" @@ -40,6 +40,19 @@ version = "7.12.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/89/26/4a96807b193b011588099c3b5c89fbb05294e5b90e71018e065465f34eb6/coverage-7.12.0.tar.gz", hash = "sha256:fc11e0a4e372cb5f282f16ef90d4a585034050ccda536451901abfb19a57f40c", size = 819341, upload-time = "2025-11-18T13:34:20.766Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/0c/0dfe7f0487477d96432e4815537263363fb6dd7289743a796e8e51eabdf2/coverage-7.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa124a3683d2af98bd9d9c2bfa7a5076ca7e5ab09fdb96b81fa7d89376ae928f", size = 217535, upload-time = "2025-11-18T13:32:08.812Z" }, + { url = "https://files.pythonhosted.org/packages/9b/f5/f9a4a053a5bbff023d3bec259faac8f11a1e5a6479c2ccf586f910d8dac7/coverage-7.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d93fbf446c31c0140208dcd07c5d882029832e8ed7891a39d6d44bd65f2316c3", size = 218044, upload-time = "2025-11-18T13:32:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/95/c5/84fc3697c1fa10cd8571919bf9693f693b7373278daaf3b73e328d502bc8/coverage-7.12.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:52ca620260bd8cd6027317bdd8b8ba929be1d741764ee765b42c4d79a408601e", size = 248440, upload-time = "2025-11-18T13:32:12.536Z" }, + { url = "https://files.pythonhosted.org/packages/f4/36/2d93fbf6a04670f3874aed397d5a5371948a076e3249244a9e84fb0e02d6/coverage-7.12.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f3433ffd541380f3a0e423cff0f4926d55b0cc8c1d160fdc3be24a4c03aa65f7", size = 250361, upload-time = "2025-11-18T13:32:13.852Z" }, + { url = "https://files.pythonhosted.org/packages/5d/49/66dc65cc456a6bfc41ea3d0758c4afeaa4068a2b2931bf83be6894cf1058/coverage-7.12.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f7bbb321d4adc9f65e402c677cd1c8e4c2d0105d3ce285b51b4d87f1d5db5245", size = 252472, upload-time = "2025-11-18T13:32:15.068Z" }, + { url = "https://files.pythonhosted.org/packages/35/1f/ebb8a18dffd406db9fcd4b3ae42254aedcaf612470e8712f12041325930f/coverage-7.12.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:22a7aade354a72dff3b59c577bfd18d6945c61f97393bc5fb7bd293a4237024b", size = 248592, upload-time = "2025-11-18T13:32:16.328Z" }, + { url = "https://files.pythonhosted.org/packages/da/a8/67f213c06e5ea3b3d4980df7dc344d7fea88240b5fe878a5dcbdfe0e2315/coverage-7.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3ff651dcd36d2fea66877cd4a82de478004c59b849945446acb5baf9379a1b64", size = 250167, upload-time = "2025-11-18T13:32:17.687Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/e52aef68154164ea40cc8389c120c314c747fe63a04b013a5782e989b77f/coverage-7.12.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:31b8b2e38391a56e3cea39d22a23faaa7c3fc911751756ef6d2621d2a9daf742", size = 248238, upload-time = "2025-11-18T13:32:19.2Z" }, + { url = "https://files.pythonhosted.org/packages/1f/a4/4d88750bcf9d6d66f77865e5a05a20e14db44074c25fd22519777cb69025/coverage-7.12.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:297bc2da28440f5ae51c845a47c8175a4db0553a53827886e4fb25c66633000c", size = 247964, upload-time = "2025-11-18T13:32:21.027Z" }, + { url = "https://files.pythonhosted.org/packages/a7/6b/b74693158899d5b47b0bf6238d2c6722e20ba749f86b74454fac0696bb00/coverage-7.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ff7651cc01a246908eac162a6a86fc0dbab6de1ad165dfb9a1e2ec660b44984", size = 248862, upload-time = "2025-11-18T13:32:22.304Z" }, + { url = "https://files.pythonhosted.org/packages/18/de/6af6730227ce0e8ade307b1cc4a08e7f51b419a78d02083a86c04ccceb29/coverage-7.12.0-cp311-cp311-win32.whl", hash = "sha256:313672140638b6ddb2c6455ddeda41c6a0b208298034544cfca138978c6baed6", size = 220033, upload-time = "2025-11-18T13:32:23.714Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a1/e7f63021a7c4fe20994359fcdeae43cbef4a4d0ca36a5a1639feeea5d9e1/coverage-7.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a1783ed5bd0d5938d4435014626568dc7f93e3cb99bc59188cc18857c47aa3c4", size = 220966, upload-time = "2025-11-18T13:32:25.599Z" }, + { url = "https://files.pythonhosted.org/packages/77/e8/deae26453f37c20c3aa0c4433a1e32cdc169bf415cce223a693117aa3ddd/coverage-7.12.0-cp311-cp311-win_arm64.whl", hash = "sha256:4648158fd8dd9381b5847622df1c90ff314efbfc1df4550092ab6013c238a5fc", size = 219637, upload-time = "2025-11-18T13:32:27.265Z" }, { url = "https://files.pythonhosted.org/packages/02/bf/638c0427c0f0d47638242e2438127f3c8ee3cfc06c7fdeb16778ed47f836/coverage-7.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:29644c928772c78512b48e14156b81255000dcfd4817574ff69def189bcb3647", size = 217704, upload-time = "2025-11-18T13:32:28.906Z" }, { url = "https://files.pythonhosted.org/packages/08/e1/706fae6692a66c2d6b871a608bbde0da6281903fa0e9f53a39ed441da36a/coverage-7.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8638cbb002eaa5d7c8d04da667813ce1067080b9a91099801a0053086e52b736", size = 218064, upload-time = "2025-11-18T13:32:30.161Z" }, { url = "https://files.pythonhosted.org/packages/a9/8b/eb0231d0540f8af3ffda39720ff43cb91926489d01524e68f60e961366e4/coverage-7.12.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:083631eeff5eb9992c923e14b810a179798bb598e6a0dd60586819fc23be6e60", size = 249560, upload-time = "2025-11-18T13:32:31.835Z" }, @@ -108,12 +121,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ce/a3/43b749004e3c09452e39bb56347a008f0a0668aad37324a99b5c8ca91d9e/coverage-7.12.0-py3-none-any.whl", hash = "sha256:159d50c0b12e060b15ed3d39f87ed43d4f7f7ad40b8a534f4dd331adbb51104a", size = 209503, upload-time = "2025-11-18T13:34:18.892Z" }, ] +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + [[package]] name = "cython" version = "3.2.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/83/36/cce2972e13e83ffe58bc73bfd9d37340b5e5113e8243841a57511c7ae1c2/cython-3.2.1.tar.gz", hash = "sha256:2be1e4d0cbdf7f4cd4d9b8284a034e1989b59fd060f6bd4d24bf3729394d2ed8", size = 3270455, upload-time = "2025-11-12T19:02:59.847Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/48/d4/ba7b9f341ec168de78bd659600e04bb7de3b2d069bf98b2178a135e88ea4/cython-3.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cb32c650e7f4476941d1f735cae75a2067d5e3279576273bb8802e8ea907222", size = 2949720, upload-time = "2025-11-12T19:03:17.492Z" }, + { url = "https://files.pythonhosted.org/packages/ad/47/c42417f424c0b928361f48d7dd0ae72716ee21f647b73ceb16f66b98663e/cython-3.2.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a2b306813d7f28aa0a2c3e4e63ada1427a8109917532df942cd5429db228252", size = 3242127, upload-time = "2025-11-12T19:03:19.227Z" }, + { url = "https://files.pythonhosted.org/packages/e6/fc/1040460889129551649ec35be45e05169871fbcf71bd8e13c533e86f9468/cython-3.2.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0959d9a36d4f004ce63acc1474b3c606745af98b65e8ae709efd0c10988e9d6b", size = 3377094, upload-time = "2025-11-12T19:03:21.25Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f2/8c754298eefa40e21af0ae3592837c6e71254900d5aea1c8859e96b11de5/cython-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:60c62e734421365135cc2842013d883136054a26c617c001be494235edfc447a", size = 2767824, upload-time = "2025-11-12T19:03:23.317Z" }, { url = "https://files.pythonhosted.org/packages/ee/0e/19d5041b87f98ed19c94c388607cd27c1f7458078c3bad5de2dead55b2e1/cython-3.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ea5097d97afd2ab14e98637b7033eba5146de29a5dedf89f5e946076396ab891", size = 2966736, upload-time = "2025-11-12T19:03:25.064Z" }, { url = "https://files.pythonhosted.org/packages/84/b8/bcc36d9d2464348106984956608a52a42a01ab44ea64031207dffdebc078/cython-3.2.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a4bf12de0475bb6a21e2336a4a04dc4a2b4dd0507a2a3c703e045f3484266605", size = 3221633, upload-time = "2025-11-12T19:03:26.754Z" }, { url = "https://files.pythonhosted.org/packages/79/20/7d4807fe4ebcef9f20f2e5f93312d0f5d02f9f76524fd4e37706d04e83f7/cython-3.2.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18c64a0f69a1b8164de70ec7efc72250c589fec21519170de21582300f6aaed9", size = 3389542, upload-time = "2025-11-12T19:03:28.656Z" }, @@ -147,6 +169,26 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/bd/d4/16916f3dc20a3f5455b63c35dcb260b3716f59ce27a93586804e70e431d5/cytoolz-1.1.0.tar.gz", hash = "sha256:13a7bf254c3c0d28b12e2290b82aed0f0977a4c2a2bf84854fcdc7796a29f3b0", size = 642510, upload-time = "2025-10-19T00:44:56.174Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/69/82/edf1d0c32b6222f2c22e5618d6db855d44eb59f9b6f22436ff963c5d0a5c/cytoolz-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dba8e5a8c6e3c789d27b0eb5e7ce5ed7d032a7a9aae17ca4ba5147b871f6e327", size = 1314345, upload-time = "2025-10-19T00:40:13.273Z" }, + { url = "https://files.pythonhosted.org/packages/2d/b5/0e3c1edaa26c2bd9db90cba0ac62c85bbca84224c7ae1c2e0072c4ea64c5/cytoolz-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:44b31c05addb0889167a720123b3b497b28dd86f8a0aeaf3ae4ffa11e2c85d55", size = 989259, upload-time = "2025-10-19T00:40:15.196Z" }, + { url = "https://files.pythonhosted.org/packages/09/aa/e2b2ee9fc684867e817640764ea5807f9d25aa1e7bdba02dd4b249aab0f7/cytoolz-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:653cb18c4fc5d8a8cfce2bce650aabcbe82957cd0536827367d10810566d5294", size = 986551, upload-time = "2025-10-19T00:40:16.831Z" }, + { url = "https://files.pythonhosted.org/packages/39/9f/4e8ee41acf6674f10a9c2c9117b2f219429a5a0f09bba6135f34ca4f08a6/cytoolz-1.1.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:853a5b4806915020c890e1ce70cc056bbc1dd8bc44f2d74d555cccfd7aefba7d", size = 2688378, upload-time = "2025-10-19T00:40:18.552Z" }, + { url = "https://files.pythonhosted.org/packages/78/94/ef006f3412bc22444d855a0fc9ecb81424237fb4e5c1a1f8f5fb79ac978f/cytoolz-1.1.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7b44e9de86bea013fe84fd8c399d6016bbb96c37c5290769e5c99460b9c53e5", size = 2798299, upload-time = "2025-10-19T00:40:20.191Z" }, + { url = "https://files.pythonhosted.org/packages/df/aa/365953926ee8b4f2e07df7200c0d73632155908c8867af14b2d19cc9f1f7/cytoolz-1.1.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:098d628a801dc142e9740126be5624eb7aef1d732bc7a5719f60a2095547b485", size = 2639311, upload-time = "2025-10-19T00:40:22.289Z" }, + { url = "https://files.pythonhosted.org/packages/7c/ee/62beaaee7df208f22590ad07ef8875519af49c52ca39d99460b14a00f15a/cytoolz-1.1.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:779ee4096ed7a82cffab89372ffc339631c285079dbf33dbe7aff1f6174985df", size = 2979532, upload-time = "2025-10-19T00:40:24.006Z" }, + { url = "https://files.pythonhosted.org/packages/c5/04/2211251e450bed111ada1194dc42c461da9aea441de62a01e4085ea6de9f/cytoolz-1.1.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f2ce18dd99533d077e9712f9faa852f389f560351b1efd2f2bdb193a95eddde2", size = 3018632, upload-time = "2025-10-19T00:40:26.175Z" }, + { url = "https://files.pythonhosted.org/packages/ed/a2/4a3400e4d07d3916172bf74fede08020d7b4df01595d8a97f1e9507af5ae/cytoolz-1.1.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac266a34437812cf841cecbfe19f355ab9c3dd1ef231afc60415d40ff12a76e4", size = 2788579, upload-time = "2025-10-19T00:40:27.878Z" }, + { url = "https://files.pythonhosted.org/packages/fe/82/bb88caa53a41f600e7763c517d50e2efbbe6427ea395716a92b83f44882a/cytoolz-1.1.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1920b9b9c13d60d0bb6cd14594b3bce0870022eccb430618c37156da5f2b7a55", size = 2593024, upload-time = "2025-10-19T00:40:29.601Z" }, + { url = "https://files.pythonhosted.org/packages/09/a8/8b25e59570da16c7a0f173b8c6ec0aa6f3abd47fd385c007485acb459896/cytoolz-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47caa376dafd2bdc29f8a250acf59c810ec9105cd6f7680b9a9d070aae8490ec", size = 2715304, upload-time = "2025-10-19T00:40:31.151Z" }, + { url = "https://files.pythonhosted.org/packages/d4/56/faec7696f235521b926ffdf92c102f5b029f072d28e1020364e55b084820/cytoolz-1.1.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5ab2c97d8aaa522b038cca9187b1153347af22309e7c998b14750c6fdec7b1cb", size = 2654461, upload-time = "2025-10-19T00:40:32.884Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/f790ed167c04b8d2a33bed30770a9b7066fc4f573321d797190e5f05685f/cytoolz-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4bce006121b120e8b359244ee140bb0b1093908efc8b739db8dbaa3f8fb42139", size = 2672077, upload-time = "2025-10-19T00:40:34.543Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b3/80b8183e7eee44f45bfa3cdd3ebdadf3dd43ffc686f96d442a6c4dded45d/cytoolz-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7fc0f1e4e9bb384d26e73c6657bbc26abdae4ff66a95933c00f3d578be89181b", size = 2881589, upload-time = "2025-10-19T00:40:36.315Z" }, + { url = "https://files.pythonhosted.org/packages/8f/05/ac5ba5ddb88a3ba7ecea4bf192194a838af564d22ea7a4812cbb6bd106ce/cytoolz-1.1.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:dd3f894ff972da1994d06ac6157d74e40dda19eb31fe5e9b7863ca4278c3a167", size = 2589924, upload-time = "2025-10-19T00:40:38.317Z" }, + { url = "https://files.pythonhosted.org/packages/8e/cd/100483cae3849d24351c8333a815dc6adaf3f04912486e59386d86d9db9a/cytoolz-1.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0846f49cf8a4496bd42659040e68bd0484ce6af819709cae234938e039203ba0", size = 2868059, upload-time = "2025-10-19T00:40:40.025Z" }, + { url = "https://files.pythonhosted.org/packages/34/6e/3a7c56b325772d39397fc3aafb4dc054273982097178b6c3917c6dad48de/cytoolz-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:16a3af394ade1973226d64bb2f9eb3336adbdea03ed5b134c1bbec5a3b20028e", size = 2721692, upload-time = "2025-10-19T00:40:41.621Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ca/9fdaee32c3bc769dfb7e7991d9499136afccea67e423d097b8fb3c5acbc1/cytoolz-1.1.0-cp311-cp311-win32.whl", hash = "sha256:b786c9c8aeab76cc2f76011e986f7321a23a56d985b77d14f155d5e5514ea781", size = 899349, upload-time = "2025-10-19T00:40:43.183Z" }, + { url = "https://files.pythonhosted.org/packages/fd/04/2ab98edeea90311e4029e1643e43d2027b54da61453292d9ea51a103ee87/cytoolz-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:ebf06d1c5344fb22fee71bf664234733e55db72d74988f2ecb7294b05e4db30c", size = 945831, upload-time = "2025-10-19T00:40:44.693Z" }, + { url = "https://files.pythonhosted.org/packages/b4/8d/777d86ea6bcc68b0fc926b0ef8ab51819e2176b37aadea072aac949d5231/cytoolz-1.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:b63f5f025fac893393b186e132e3e242de8ee7265d0cd3f5bdd4dda93f6616c9", size = 904076, upload-time = "2025-10-19T00:40:46.678Z" }, { url = "https://files.pythonhosted.org/packages/c6/ec/01426224f7acf60183d3921b25e1a8e71713d3d39cb464d64ac7aace6ea6/cytoolz-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:99f8e134c9be11649342853ec8c90837af4089fc8ff1e8f9a024a57d1fa08514", size = 1327800, upload-time = "2025-10-19T00:40:48.674Z" }, { url = "https://files.pythonhosted.org/packages/b4/07/e07e8fedd332ac9626ad58bea31416dda19bfd14310731fa38b16a97e15f/cytoolz-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0a6f44cf9319c30feb9a50aa513d777ef51efec16f31c404409e7deb8063df64", size = 997118, upload-time = "2025-10-19T00:40:50.919Z" }, { url = "https://files.pythonhosted.org/packages/ab/72/c0f766d63ed2f9ea8dc8e1628d385d99b41fb834ce17ac3669e3f91e115d/cytoolz-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:945580dc158c557172fca899a35a99a16fbcebf6db0c77cb6621084bc82189f9", size = 991169, upload-time = "2025-10-19T00:40:52.887Z" }, @@ -253,11 +295,16 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/66/35/0fec2769660ca6472bbf3317ab634675827bb706d193e3240aaf20eab961/cytoolz-1.1.0-cp314-cp314t-win32.whl", hash = "sha256:3d407140f5604a89578285d4aac7b18b8eafa055cf776e781aabb89c48738fad", size = 960842, upload-time = "2025-10-19T00:44:01.143Z" }, { url = "https://files.pythonhosted.org/packages/46/b4/b7ce3d3cd20337becfec978ecfa6d0ef64884d0cf32d44edfed8700914b9/cytoolz-1.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:56e5afb69eb6e1b3ffc34716ee5f92ffbdb5cb003b3a5ca4d4b0fe700e217162", size = 1020835, upload-time = "2025-10-19T00:44:03.246Z" }, { url = "https://files.pythonhosted.org/packages/2c/1f/0498009aa563a9c5d04f520aadc6e1c0942434d089d0b2f51ea986470f55/cytoolz-1.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:27b19b4a286b3ff52040efa42dbe403730aebe5fdfd2def704eb285e2125c63e", size = 927963, upload-time = "2025-10-19T00:44:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/84/32/0522207170294cf691112a93c70a8ef942f60fa9ff8e793b63b1f09cedc0/cytoolz-1.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f32e93a55681d782fc6af939f6df36509d65122423cbc930be39b141064adff8", size = 922014, upload-time = "2025-10-19T00:44:44.911Z" }, + { url = "https://files.pythonhosted.org/packages/4c/49/9be2d24adaa18fa307ff14e3e43f02b2ae4b69c4ce51cee6889eb2114990/cytoolz-1.1.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:5d9bc596751cbda8073e65be02ca11706f00029768fbbbc81e11a8c290bb41aa", size = 918134, upload-time = "2025-10-19T00:44:47.122Z" }, + { url = "https://files.pythonhosted.org/packages/5c/b3/6a76c3b94c6c87c72ea822e7e67405be6b649c2e37778eeac7c0c0c69de8/cytoolz-1.1.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9b16660d01c3931951fab49db422c627897c38c1a1f0393a97582004019a4887", size = 981970, upload-time = "2025-10-19T00:44:48.906Z" }, + { url = "https://files.pythonhosted.org/packages/f6/8a/606e4c7ed14aa6a86aee6ca84a2cb804754dc6c4905b8f94e09e49f1ce60/cytoolz-1.1.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b7de5718e2113d4efccea3f06055758cdbc17388ecc3341ba4d1d812837d7c1a", size = 978877, upload-time = "2025-10-19T00:44:50.819Z" }, + { url = "https://files.pythonhosted.org/packages/97/ec/ad474dcb1f6c1ebfdda3c2ad2edbb1af122a0e79c9ff2cb901ffb5f59662/cytoolz-1.1.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a12a2a1a6bc44099491c05a12039efa08cc33a3d0f8c7b0566185e085e139283", size = 964279, upload-time = "2025-10-19T00:44:52.476Z" }, + { url = "https://files.pythonhosted.org/packages/68/8c/d245fd416c69d27d51f14d5ad62acc4ee5971088ee31c40ffe1cc109af68/cytoolz-1.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:047defa7f5f9a32f82373dbc3957289562e8a3fa58ae02ec8e4dca4f43a33a21", size = 916630, upload-time = "2025-10-19T00:44:54.059Z" }, ] [[package]] name = "dot-ring" -version = "0.1.4" source = { editable = "." } dependencies = [ { name = "gmpy2" }, @@ -352,6 +399,13 @@ version = "2.2.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/07/bd/c6c154ce734a3e6187871b323297d8e5f3bdf9feaafc5212381538bc19e4/gmpy2-2.2.1.tar.gz", hash = "sha256:e83e07567441b78cb87544910cb3cc4fe94e7da987e93ef7622e76fb96650432", size = 234228, upload-time = "2024-07-21T05:33:00.715Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/ec/ab67751ac0c4088ed21cf9a2a7f9966bf702ca8ebfc3204879cf58c90179/gmpy2-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:98e947491c67523d3147a500f377bb64d0b115e4ab8a12d628fb324bb0e142bf", size = 880346, upload-time = "2024-07-21T05:31:25.531Z" }, + { url = "https://files.pythonhosted.org/packages/97/7c/bdc4a7a2b0e543787a9354e80fdcf846c4e9945685218cef4ca938d25594/gmpy2-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ccd319a3a87529484167ae1391f937ac4a8724169fd5822bbb541d1eab612b0", size = 694518, upload-time = "2024-07-21T05:31:27.78Z" }, + { url = "https://files.pythonhosted.org/packages/fc/44/ea903003bb4c3af004912fb0d6488e346bd76968f11a7472a1e60dee7dd7/gmpy2-2.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:827bcd433e5d62f1b732f45e6949419da4a53915d6c80a3c7a5a03d5a783a03a", size = 1653491, upload-time = "2024-07-21T05:31:29.968Z" }, + { url = "https://files.pythonhosted.org/packages/c9/70/5bce281b7cd664c04f1c9d47a37087db37b2be887bce738340e912ad86c8/gmpy2-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7131231fc96f57272066295c81cbf11b3233a9471659bca29ddc90a7bde9bfa", size = 1706487, upload-time = "2024-07-21T05:31:32.476Z" }, + { url = "https://files.pythonhosted.org/packages/2a/52/1f773571f21cf0319fc33218a1b384f29de43053965c05ed32f7e6729115/gmpy2-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1cc6f2bb68ee00c20aae554e111dc781a76140e00c31e4eda5c8f2d4168ed06c", size = 1637415, upload-time = "2024-07-21T05:31:34.591Z" }, + { url = "https://files.pythonhosted.org/packages/99/4c/390daf67c221b3f4f10b5b7d9293e61e4dbd48956a38947679c5a701af27/gmpy2-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ae388fe46e3d20af4675451a4b6c12fc1bb08e6e0e69ee47072638be21bf42d8", size = 1657781, upload-time = "2024-07-21T05:31:36.81Z" }, + { url = "https://files.pythonhosted.org/packages/61/cd/86e47bccb3636389e29c4654a0e5ac52926d832897f2f64632639b63ffc1/gmpy2-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:8b472ee3c123b77979374da2293ebf2c170b88212e173d64213104956d4678fb", size = 1203346, upload-time = "2024-07-21T05:31:39.344Z" }, { url = "https://files.pythonhosted.org/packages/9a/ee/8f9f65e2bac334cfe13b3fc3f8962d5fc2858ebcf4517690d2d24afa6d0e/gmpy2-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90d03a1be1b1ad3944013fae5250316c3f4e6aec45ecdf189a5c7422d640004d", size = 885231, upload-time = "2024-07-21T05:31:41.471Z" }, { url = "https://files.pythonhosted.org/packages/07/1c/bf29f6bf8acd72c3cf85d04e7db1bb26dd5507ee2387770bb787bc54e2a5/gmpy2-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd09dd43d199908c1d1d501c5de842b3bf754f99b94af5b5ef0e26e3b716d2d5", size = 696569, upload-time = "2024-07-21T05:31:43.768Z" }, { url = "https://files.pythonhosted.org/packages/7c/cc/38d33eadeccd81b604a95b67d43c71b246793b7c441f1d7c3b41978cd1cf/gmpy2-2.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3232859fda3e96fd1aecd6235ae20476ed4506562bcdef6796a629b78bb96acd", size = 1655776, upload-time = "2024-07-21T05:31:46.272Z" }, @@ -392,6 +446,17 @@ version = "2.3.5" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/76/65/21b3bc86aac7b8f2862db1e808f1ea22b028e30a225a34a5ede9bf8678f2/numpy-2.3.5.tar.gz", hash = "sha256:784db1dcdab56bf0517743e746dfb0f885fc68d948aba86eeec2cba234bdf1c0", size = 20584950, upload-time = "2025-11-16T22:52:42.067Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/43/77/84dd1d2e34d7e2792a236ba180b5e8fcc1e3e414e761ce0253f63d7f572e/numpy-2.3.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:de5672f4a7b200c15a4127042170a694d4df43c992948f5e1af57f0174beed10", size = 17034641, upload-time = "2025-11-16T22:49:19.336Z" }, + { url = "https://files.pythonhosted.org/packages/2a/ea/25e26fa5837106cde46ae7d0b667e20f69cbbc0efd64cba8221411ab26ae/numpy-2.3.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:acfd89508504a19ed06ef963ad544ec6664518c863436306153e13e94605c218", size = 12528324, upload-time = "2025-11-16T22:49:22.582Z" }, + { url = "https://files.pythonhosted.org/packages/4d/1a/e85f0eea4cf03d6a0228f5c0256b53f2df4bc794706e7df019fc622e47f1/numpy-2.3.5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:ffe22d2b05504f786c867c8395de703937f934272eb67586817b46188b4ded6d", size = 5356872, upload-time = "2025-11-16T22:49:25.408Z" }, + { url = "https://files.pythonhosted.org/packages/5c/bb/35ef04afd567f4c989c2060cde39211e4ac5357155c1833bcd1166055c61/numpy-2.3.5-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:872a5cf366aec6bb1147336480fef14c9164b154aeb6542327de4970282cd2f5", size = 6893148, upload-time = "2025-11-16T22:49:27.549Z" }, + { url = "https://files.pythonhosted.org/packages/f2/2b/05bbeb06e2dff5eab512dfc678b1cc5ee94d8ac5956a0885c64b6b26252b/numpy-2.3.5-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3095bdb8dd297e5920b010e96134ed91d852d81d490e787beca7e35ae1d89cf7", size = 14557282, upload-time = "2025-11-16T22:49:30.964Z" }, + { url = "https://files.pythonhosted.org/packages/65/fb/2b23769462b34398d9326081fad5655198fcf18966fcb1f1e49db44fbf31/numpy-2.3.5-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8cba086a43d54ca804ce711b2a940b16e452807acebe7852ff327f1ecd49b0d4", size = 16897903, upload-time = "2025-11-16T22:49:34.191Z" }, + { url = "https://files.pythonhosted.org/packages/ac/14/085f4cf05fc3f1e8aa95e85404e984ffca9b2275a5dc2b1aae18a67538b8/numpy-2.3.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6cf9b429b21df6b99f4dee7a1218b8b7ffbbe7df8764dc0bd60ce8a0708fed1e", size = 16341672, upload-time = "2025-11-16T22:49:37.2Z" }, + { url = "https://files.pythonhosted.org/packages/6f/3b/1f73994904142b2aa290449b3bb99772477b5fd94d787093e4f24f5af763/numpy-2.3.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:396084a36abdb603546b119d96528c2f6263921c50df3c8fd7cb28873a237748", size = 18838896, upload-time = "2025-11-16T22:49:39.727Z" }, + { url = "https://files.pythonhosted.org/packages/cd/b9/cf6649b2124f288309ffc353070792caf42ad69047dcc60da85ee85fea58/numpy-2.3.5-cp311-cp311-win32.whl", hash = "sha256:b0c7088a73aef3d687c4deef8452a3ac7c1be4e29ed8bf3b366c8111128ac60c", size = 6563608, upload-time = "2025-11-16T22:49:42.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/44/9fe81ae1dcc29c531843852e2874080dc441338574ccc4306b39e2ff6e59/numpy-2.3.5-cp311-cp311-win_amd64.whl", hash = "sha256:a414504bef8945eae5f2d7cb7be2d4af77c5d1cb5e20b296c2c25b61dff2900c", size = 13078442, upload-time = "2025-11-16T22:49:43.99Z" }, + { url = "https://files.pythonhosted.org/packages/6d/a7/f99a41553d2da82a20a2f22e93c94f928e4490bb447c9ff3c4ff230581d3/numpy-2.3.5-cp311-cp311-win_arm64.whl", hash = "sha256:0cd00b7b36e35398fa2d16af7b907b65304ef8bb4817a550e06e5012929830fa", size = 10458555, upload-time = "2025-11-16T22:49:47.092Z" }, { url = "https://files.pythonhosted.org/packages/44/37/e669fe6cbb2b96c62f6bbedc6a81c0f3b7362f6a59230b23caa673a85721/numpy-2.3.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74ae7b798248fe62021dbf3c914245ad45d1a6b0cb4a29ecb4b31d0bfbc4cc3e", size = 16733873, upload-time = "2025-11-16T22:49:49.84Z" }, { url = "https://files.pythonhosted.org/packages/c5/65/df0db6c097892c9380851ab9e44b52d4f7ba576b833996e0080181c0c439/numpy-2.3.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee3888d9ff7c14604052b2ca5535a30216aa0a58e948cdd3eeb8d3415f638769", size = 12259838, upload-time = "2025-11-16T22:49:52.863Z" }, { url = "https://files.pythonhosted.org/packages/5b/e1/1ee06e70eb2136797abe847d386e7c0e830b67ad1d43f364dd04fa50d338/numpy-2.3.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:612a95a17655e213502f60cfb9bf9408efdc9eb1d5f50535cc6eb365d11b42b5", size = 5088378, upload-time = "2025-11-16T22:49:55.055Z" }, @@ -447,6 +512,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/51/41/851c4b4082402d9ea860c3626db5d5df47164a712cb23b54be028b184c1c/numpy-2.3.5-cp314-cp314t-win32.whl", hash = "sha256:93eebbcf1aafdf7e2ddd44c2923e2672e1010bddc014138b229e49725b4d6be5", size = 6479806, upload-time = "2025-11-16T22:52:14.641Z" }, { url = "https://files.pythonhosted.org/packages/90/30/d48bde1dfd93332fa557cff1972fbc039e055a52021fbef4c2c4b1eefd17/numpy-2.3.5-cp314-cp314t-win_amd64.whl", hash = "sha256:c8a9958e88b65c3b27e22ca2a076311636850b612d6bbfb76e8d156aacde2aaf", size = 13105760, upload-time = "2025-11-16T22:52:17.975Z" }, { url = "https://files.pythonhosted.org/packages/2d/fd/4b5eb0b3e888d86aee4d198c23acec7d214baaf17ea93c1adec94c9518b9/numpy-2.3.5-cp314-cp314t-win_arm64.whl", hash = "sha256:6203fdf9f3dc5bdaed7319ad8698e685c7a3be10819f41d32a0723e611733b42", size = 10545459, upload-time = "2025-11-16T22:52:20.55Z" }, + { url = "https://files.pythonhosted.org/packages/c6/65/f9dea8e109371ade9c782b4e4756a82edf9d3366bca495d84d79859a0b79/numpy-2.3.5-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f0963b55cdd70fad460fa4c1341f12f976bb26cb66021a5580329bd498988310", size = 16910689, upload-time = "2025-11-16T22:52:23.247Z" }, + { url = "https://files.pythonhosted.org/packages/00/4f/edb00032a8fb92ec0a679d3830368355da91a69cab6f3e9c21b64d0bb986/numpy-2.3.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f4255143f5160d0de972d28c8f9665d882b5f61309d8362fdd3e103cf7bf010c", size = 12457053, upload-time = "2025-11-16T22:52:26.367Z" }, + { url = "https://files.pythonhosted.org/packages/16/a4/e8a53b5abd500a63836a29ebe145fc1ab1f2eefe1cfe59276020373ae0aa/numpy-2.3.5-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:a4b9159734b326535f4dd01d947f919c6eefd2d9827466a696c44ced82dfbc18", size = 5285635, upload-time = "2025-11-16T22:52:29.266Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2f/37eeb9014d9c8b3e9c55bc599c68263ca44fdbc12a93e45a21d1d56df737/numpy-2.3.5-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:2feae0d2c91d46e59fcd62784a3a83b3fb677fead592ce51b5a6fbb4f95965ff", size = 6801770, upload-time = "2025-11-16T22:52:31.421Z" }, + { url = "https://files.pythonhosted.org/packages/7d/e4/68d2f474df2cb671b2b6c2986a02e520671295647dad82484cde80ca427b/numpy-2.3.5-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ffac52f28a7849ad7576293c0cb7b9f08304e8f7d738a8cb8a90ec4c55a998eb", size = 14391768, upload-time = "2025-11-16T22:52:33.593Z" }, + { url = "https://files.pythonhosted.org/packages/b8/50/94ccd8a2b141cb50651fddd4f6a48874acb3c91c8f0842b08a6afc4b0b21/numpy-2.3.5-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63c0e9e7eea69588479ebf4a8a270d5ac22763cc5854e9a7eae952a3908103f7", size = 16729263, upload-time = "2025-11-16T22:52:36.369Z" }, + { url = "https://files.pythonhosted.org/packages/2d/ee/346fa473e666fe14c52fcdd19ec2424157290a032d4c41f98127bfb31ac7/numpy-2.3.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f16417ec91f12f814b10bafe79ef77e70113a2f5f7018640e7425ff979253425", size = 12967213, upload-time = "2025-11-16T22:52:39.38Z" }, ] [[package]] @@ -513,6 +585,20 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, @@ -569,10 +655,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, ] [[package]] @@ -627,7 +725,7 @@ name = "pytest-cov" version = "7.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "coverage" }, + { name = "coverage", extra = ["toml"] }, { name = "pluggy" }, { name = "pytest" }, ] @@ -657,6 +755,55 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" }, ] +[[package]] +name = "tomli" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, + { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, + { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, + { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, + { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, + { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, + { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, + { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, + { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, + { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, + { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, + { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, + { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, + { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, + { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, + { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, + { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, + { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, + { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, +] + [[package]] name = "toolz" version = "1.1.0"