ring/ec/curve25519/
scalar.rs1use crate::{
16 arithmetic::limbs_from_hex,
17 digest, error, limb,
18 polyfill::slice::{self, AsChunks},
19};
20use core::array;
21
22#[repr(transparent)]
23pub struct Scalar([u8; SCALAR_LEN]);
24
25pub const SCALAR_LEN: usize = 32;
26
27impl Scalar {
28 pub fn from_bytes_checked(bytes: [u8; SCALAR_LEN]) -> Result<Self, error::Unspecified> {
31 const ORDER: [limb::Limb; SCALAR_LEN / limb::LIMB_BYTES] =
32 limbs_from_hex("1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3ed");
33 let order = ORDER.map(limb::Limb::from);
34
35 let (limbs_as_bytes, _empty): (AsChunks<u8, { limb::LIMB_BYTES }>, _) =
36 slice::as_chunks(&bytes);
37 debug_assert!(_empty.is_empty());
38 let limbs: [limb::Limb; SCALAR_LEN / limb::LIMB_BYTES] =
39 array::from_fn(|i| limb::Limb::from_le_bytes(limbs_as_bytes[i]));
40 limb::verify_limbs_less_than_limbs_leak_bit(&limbs, &order)?;
41
42 Ok(Self(bytes))
43 }
44
45 pub fn from_sha512_digest_reduced(digest: digest::Digest) -> Self {
47 prefixed_extern! {
48 fn x25519_sc_reduce(s: &mut UnreducedScalar);
49 }
50 let mut unreduced = [0u8; digest::SHA512_OUTPUT_LEN];
51 unreduced.copy_from_slice(digest.as_ref());
52 unsafe { x25519_sc_reduce(&mut unreduced) };
53 Self((&unreduced[..SCALAR_LEN]).try_into().unwrap())
54 }
55}
56
57#[repr(transparent)]
58pub struct MaskedScalar([u8; SCALAR_LEN]);
59
60impl MaskedScalar {
61 pub fn from_bytes_masked(bytes: [u8; SCALAR_LEN]) -> Self {
62 prefixed_extern! {
63 fn x25519_sc_mask(a: &mut [u8; SCALAR_LEN]);
64 }
65 let mut r = Self(bytes);
66 unsafe { x25519_sc_mask(&mut r.0) };
67 r
68 }
69}
70
71impl From<MaskedScalar> for Scalar {
72 fn from(MaskedScalar(scalar): MaskedScalar) -> Self {
73 Self(scalar)
74 }
75}
76
77type UnreducedScalar = [u8; UNREDUCED_SCALAR_LEN];
78const UNREDUCED_SCALAR_LEN: usize = SCALAR_LEN * 2;