Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions .github/workflows/elliptic-curve.yml
Original file line number Diff line number Diff line change
Expand Up @@ -88,3 +88,21 @@ jobs:
- run: cargo test --no-default-features
- run: cargo test
- run: cargo test --all-features

test-careful:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@nightly
- run: cargo install cargo-careful
- run: cargo careful test --all-features

test-miri:
runs-on: ubuntu-latest
env:
MIRIFLAGS: "-Zmiri-symbolic-alignment-check -Zmiri-strict-provenance"
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@nightly
- run: rustup component add miri && cargo miri setup
- run: cargo miri test --all-features
23 changes: 22 additions & 1 deletion elliptic-curve/src/dev.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
//! the traits in this crate.

use crate::{
Curve, CurveArithmetic, FieldBytesEncoding, PrimeCurve,
BatchNormalize, Curve, CurveArithmetic, FieldBytesEncoding, PrimeCurve,
array::typenum::U32,
bigint::{Limb, U256},
error::{Error, Result},
Expand All @@ -17,13 +17,17 @@ use crate::{
zeroize::DefaultIsZeroes,
};
use core::{
array,
iter::{Product, Sum},
ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign},
};
use ff::{Field, PrimeField};
use hex_literal::hex;
use pkcs8::AssociatedOid;

#[cfg(feature = "alloc")]
use alloc::vec::Vec;

#[cfg(feature = "bits")]
use ff::PrimeFieldBits;

Expand Down Expand Up @@ -584,6 +588,23 @@ pub enum ProjectivePoint {
Other(AffinePoint),
}

impl<const N: usize> BatchNormalize<[ProjectivePoint; N]> for ProjectivePoint {
type Output = [AffinePoint; N];

fn batch_normalize(points: &[ProjectivePoint; N]) -> [AffinePoint; N] {
array::from_fn(|index| points[index].into())
}
}

#[cfg(feature = "alloc")]
impl BatchNormalize<[ProjectivePoint]> for ProjectivePoint {
type Output = Vec<AffinePoint>;

fn batch_normalize(points: &[ProjectivePoint]) -> Vec<AffinePoint> {
points.iter().copied().map(AffinePoint::from).collect()
}
}

impl ConstantTimeEq for ProjectivePoint {
fn ct_eq(&self, other: &Self) -> Choice {
match (self, other) {
Expand Down
3 changes: 2 additions & 1 deletion elliptic-curve/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
html_logo_url = "https://raw.githubusercontent.com/RustCrypto/media/8f1a9894/logo.svg",
html_favicon_url = "https://raw.githubusercontent.com/RustCrypto/media/8f1a9894/logo.svg"
)]
#![forbid(unsafe_code)]
// Only allowed for newtype casts.
#![deny(unsafe_code)]
#![warn(
clippy::cast_lossless,
clippy::cast_possible_truncation,
Expand Down
4 changes: 2 additions & 2 deletions elliptic-curve/src/point.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ pub trait AffineCoordinates {

/// Normalize point(s) in projective representation by converting them to their affine ones.
#[cfg(feature = "arithmetic")]
pub trait BatchNormalize<Points: ?Sized>: group::Curve {
pub trait BatchNormalize<Points: ?Sized> {
/// The output of the batch normalization; a container of affine points.
type Output: AsRef<[Self::AffineRepr]>;
type Output;

/// Perform a batched conversion to affine representation on a sequence of projective points
/// at an amortized cost that should be practically as efficient as a single conversion.
Expand Down
107 changes: 106 additions & 1 deletion elliptic-curve/src/point/non_identity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,14 @@ use group::{Curve, Group, GroupEncoding, prime::PrimeCurveAffine};
use rand_core::CryptoRng;
use subtle::{Choice, ConditionallySelectable, ConstantTimeEq, CtOption};

#[cfg(feature = "alloc")]
use alloc::vec::Vec;

#[cfg(feature = "serde")]
use serdect::serde::{Deserialize, Serialize, de, ser};
use zeroize::Zeroize;

use crate::{CurveArithmetic, NonZeroScalar, Scalar};
use crate::{BatchNormalize, CurveArithmetic, NonZeroScalar, Scalar};

/// Non-identity point type.
///
Expand All @@ -19,6 +22,7 @@ use crate::{CurveArithmetic, NonZeroScalar, Scalar};
/// In the context of ECC, it's useful for ensuring that certain arithmetic
/// cannot result in the identity point.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(transparent)]
pub struct NonIdentity<P> {
point: P,
}
Expand Down Expand Up @@ -103,6 +107,72 @@ impl<P> AsRef<P> for NonIdentity<P> {
}
}

impl<const N: usize, P> BatchNormalize<[Self; N]> for NonIdentity<P>
where
P: Curve + BatchNormalize<[P; N], Output = [P::AffineRepr; N]>,
{
type Output = [NonIdentity<P::AffineRepr>; N];

fn batch_normalize(points: &[Self; N]) -> [NonIdentity<P::AffineRepr>; N] {
// Ensure casting is safe.
// This always succeeds because `NonIdentity` is `repr(transparent)`.
debug_assert_eq!(size_of::<P>(), size_of::<NonIdentity<P>>());
debug_assert_eq!(align_of::<P>(), align_of::<NonIdentity<P>>());

#[allow(unsafe_code)]
// SAFETY: `NonIdentity` is `repr(transparent)`.
let points: &[P; N] = unsafe { &*points.as_ptr().cast() };
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Curious if you could extract an AsRef impl here, but perhaps I can experiment with that myself

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Apparently not, because array is a foreign type. Same applies to slices.

I guess we could add it as a method?

let affine_points = <P as BatchNormalize<_>>::batch_normalize(points);

// Ensure `array::map()` can be optimized to a `memcpy`.
debug_assert_eq!(
size_of::<P::AffineRepr>(),
size_of::<NonIdentity<P::AffineRepr>>()
);
debug_assert_eq!(
align_of::<P::AffineRepr>(),
align_of::<NonIdentity<P::AffineRepr>>()
);

affine_points.map(|point| NonIdentity { point })
}
}

#[cfg(feature = "alloc")]
impl<P> BatchNormalize<[Self]> for NonIdentity<P>
where
P: Curve + BatchNormalize<[P], Output = Vec<P::AffineRepr>>,
{
type Output = Vec<NonIdentity<P::AffineRepr>>;

fn batch_normalize(points: &[Self]) -> Vec<NonIdentity<P::AffineRepr>> {
// Ensure casting is safe.
// This always succeeds because `NonIdentity` is `repr(transparent)`.
debug_assert_eq!(size_of::<P>(), size_of::<NonIdentity<P>>());
debug_assert_eq!(align_of::<P>(), align_of::<NonIdentity<P>>());

#[allow(unsafe_code)]
// SAFETY: `NonIdentity` is `repr(transparent)`.
let points: &[P] = unsafe { &*(points as *const [NonIdentity<P>] as *const [P]) };
let affine_points = <P as BatchNormalize<_>>::batch_normalize(points);

// Ensure `into_iter()` + `collect()` can be optimized away.
debug_assert_eq!(
size_of::<P::AffineRepr>(),
size_of::<NonIdentity<P::AffineRepr>>()
);
debug_assert_eq!(
align_of::<P::AffineRepr>(),
align_of::<NonIdentity<P::AffineRepr>>()
);

affine_points
.into_iter()
.map(|point| NonIdentity { point })
.collect()
}
}

impl<P> ConditionallySelectable for NonIdentity<P>
where
P: ConditionallySelectable,
Expand Down Expand Up @@ -238,6 +308,7 @@ impl<P: Group> Zeroize for NonIdentity<P> {
#[cfg(all(test, feature = "dev"))]
mod tests {
use super::NonIdentity;
use crate::BatchNormalize;
use crate::dev::{AffinePoint, NonZeroScalar, ProjectivePoint, SecretKey};
use group::GroupEncoding;
use hex_literal::hex;
Expand Down Expand Up @@ -303,4 +374,38 @@ mod tests {

assert_eq!(point.to_point(), pk.to_projective());
}

#[test]
fn batch_normalize() {
let point = ProjectivePoint::from_bytes(
&hex!("02c9afa9d845ba75166b5c215767b1d6934e50c3db36e89b127b8a622b120f6721").into(),
)
.unwrap();
let point = NonIdentity::new(point).unwrap();
let points = [point, point];

for (point, affine_point) in points
.into_iter()
.zip(NonIdentity::batch_normalize(&points))
{
assert_eq!(point.to_affine(), affine_point);
}
}

#[test]
#[cfg(feature = "alloc")]
fn batch_normalize_alloc() {
let point = ProjectivePoint::from_bytes(
&hex!("02c9afa9d845ba75166b5c215767b1d6934e50c3db36e89b127b8a622b120f6721").into(),
)
.unwrap();
let point = NonIdentity::new(point).unwrap();
let points = vec![point, point];

let affine_points = NonIdentity::batch_normalize(points.as_slice());

for (point, affine_point) in points.into_iter().zip(affine_points) {
assert_eq!(point.to_affine(), affine_point);
}
}
}