Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make Blake3Hash always encode as Hex in JSON #3049

Merged
merged 6 commits into from
Sep 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion crates/pallet-subspace/src/mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -469,7 +469,7 @@ pub fn create_signed_vote(
);
let sector_slot_challenge = sector_id.derive_sector_slot_challenge(&global_challenge);
let masked_chunk = (Simd::from(solution.chunk.to_bytes())
^ Simd::from(solution.proof_of_space.hash()))
^ Simd::from(*solution.proof_of_space.hash()))
.to_array();

// Check that solution quality is not too high
Expand Down
12 changes: 6 additions & 6 deletions crates/sc-consensus-subspace-rpc/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,8 @@ use subspace_archiving::archiver::NewArchivedSegment;
use subspace_core_primitives::crypto::kzg::Kzg;
use subspace_core_primitives::objects::GlobalObjectMapping;
use subspace_core_primitives::{
Blake3Hash, Blake3HashHex, BlockHash, HistorySize, Piece, PieceIndex, PublicKey, SegmentHeader,
SegmentIndex, SlotNumber, Solution,
Blake3Hash, BlockHash, HistorySize, Piece, PieceIndex, PublicKey, SegmentHeader, SegmentIndex,
SlotNumber, Solution,
};
use subspace_erasure_coding::ErasureCoding;
use subspace_farmer_components::FarmerProtocolInfo;
Expand Down Expand Up @@ -181,7 +181,7 @@ pub trait SubspaceRpcApi {
unsubscribe = "subspace_unsubscribeFilteredObjectMappings",
item = GlobalObjectMapping,
)]
fn subscribe_filtered_object_mappings(&self, hashes: Vec<Blake3HashHex>);
fn subscribe_filtered_object_mappings(&self, hashes: Vec<Blake3Hash>);
}

#[derive(Default)]
Expand Down Expand Up @@ -850,7 +850,7 @@ where
fn subscribe_filtered_object_mappings(
&self,
pending: PendingSubscriptionSink,
hashes: Vec<Blake3HashHex>,
hashes: Vec<Blake3Hash>,
) {
// TODO: deny unsafe subscriptions?

Expand All @@ -876,7 +876,7 @@ where
return;
};

let mut hashes = HashSet::<Blake3Hash>::from_iter(hashes.into_iter().map(|hash| *hash));
let mut hashes = HashSet::<Blake3Hash>::from_iter(hashes);
let hash_count = hashes.len();

// The genesis segment isn't included in this stream, see
Expand All @@ -888,7 +888,7 @@ where
let objects = archived_segment_notification
.archived_segment
.global_object_mappings()
.filter(|object| hashes.remove(&*object.hash))
.filter(|object| hashes.remove(&object.hash))
.collect::<Vec<_>>();

stream::iter(objects)
Expand Down
3 changes: 1 addition & 2 deletions crates/sc-proof-of-time/src/verifier/tests.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use crate::verifier::PotVerifier;
use sp_consensus_slots::Slot;
use sp_consensus_subspace::{PotNextSlotInput, PotParametersChange};
use std::mem;
use std::num::NonZeroU32;
use subspace_core_primitives::{Blake3Hash, PotSeed};

Expand Down Expand Up @@ -128,7 +127,7 @@ fn test_basic() {
fn parameters_change() {
let genesis_seed = PotSeed::from(SEED);
let slot_iterations_1 = NonZeroU32::new(512).unwrap();
let entropy = [1; mem::size_of::<Blake3Hash>()];
let entropy = Blake3Hash::from([1; Blake3Hash::SIZE]);
let checkpoints_1 = subspace_proof_of_time::prove(genesis_seed, slot_iterations_1).unwrap();
let slot_iterations_2 = slot_iterations_1.saturating_mul(NonZeroU32::new(2).unwrap());
let checkpoints_2 = subspace_proof_of_time::prove(
Expand Down
2 changes: 1 addition & 1 deletion crates/sp-domains-fraud-proof/src/host_functions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,7 @@ where

let bundle = bundles.get(bundle_index as usize)?;
let bundle_vrf_hash =
U256::from_be_bytes(bundle.sealed_header.header.proof_of_election.vrf_hash());
U256::from_be_bytes(*bundle.sealed_header.header.proof_of_election.vrf_hash());

self.domain_runtime_call(
runtime_code,
Expand Down
2 changes: 1 addition & 1 deletion crates/sp-domains-fraud-proof/src/verification.rs
Original file line number Diff line number Diff line change
Expand Up @@ -608,7 +608,7 @@ where

let domain_tx_range = U256::MAX / INITIAL_DOMAIN_TX_RANGE;
let bundle_vrf_hash =
U256::from_be_bytes(bundle.sealed_header.header.proof_of_election.vrf_hash());
U256::from_be_bytes(*bundle.sealed_header.header.proof_of_election.vrf_hash());

let is_tx_in_range = fraud_proof_runtime_interface::domain_runtime_call(
domain_runtime_code,
Expand Down
5 changes: 2 additions & 3 deletions crates/sp-domains/src/merkle_tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ use parity_scale_codec::{Decode, Encode};
use rs_merkle::Hasher;
use scale_info::TypeInfo;
use sp_runtime::traits::{BlakeTwo256, Hash};
use subspace_core_primitives::Blake3Hash;

/// Merkle tree using [`Blake2b256Algorithm`].
pub type MerkleTree = rs_merkle::MerkleTree<Blake2b256Algorithm>;
Expand Down Expand Up @@ -40,9 +39,9 @@ impl Default for Blake2b256Algorithm {
}

impl Hasher for Blake2b256Algorithm {
type Hash = Blake3Hash;
type Hash = [u8; 32];

fn hash(data: &[u8]) -> Blake3Hash {
fn hash(data: &[u8]) -> Self::Hash {
let mut hasher = Blake2b::new();
hasher.update(data);
hasher.finalize_fixed().into()
Expand Down
7 changes: 3 additions & 4 deletions crates/subspace-core-primitives/src/checksum.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
mod tests;

use crate::Blake3Hash;
use core::mem;
use parity_scale_codec::{Decode, Encode, EncodeLike, Error, Input, Output};

/// Output wrapper for SCALE codec that will write Blake3 checksum at the end of the encoding
Expand Down Expand Up @@ -91,7 +90,7 @@ where
fn finish(self) -> (Blake3Hash, &'a mut I) {
// Compute checksum at the very end of decoding
let hash = *self.hasher.finalize().as_bytes();
(hash, self.input)
(hash.into(), self.input)
}
}

Expand All @@ -105,7 +104,7 @@ where
{
#[inline]
fn size_hint(&self) -> usize {
self.0.size_hint() + mem::size_of::<Blake3Hash>()
self.0.size_hint() + Blake3Hash::SIZE
}

#[inline]
Expand All @@ -118,7 +117,7 @@ where

#[inline]
fn encoded_size(&self) -> usize {
self.0.encoded_size() + mem::size_of::<Blake3Hash>()
self.0.encoded_size() + Blake3Hash::SIZE
}
}

Expand Down
3 changes: 1 addition & 2 deletions crates/subspace-core-primitives/src/checksum/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ use super::Blake3Checksummed;
use crate::Blake3Hash;
use parity_scale_codec::{Decode, Encode};
use rand::prelude::*;
use std::mem;

#[test]
fn basic() {
Expand All @@ -13,7 +12,7 @@ fn basic() {

// Encoding is extended with checksum
assert_eq!(
plain_encoding.len() + mem::size_of::<Blake3Hash>(),
plain_encoding.len() + Blake3Hash::SIZE,
checksummed_encoding.len()
);

Expand Down
10 changes: 5 additions & 5 deletions crates/subspace-core-primitives/src/crypto.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,20 +38,20 @@ use scale_info::{Type, TypeInfo};

/// BLAKE3 hashing of a single value.
pub fn blake3_hash(data: &[u8]) -> Blake3Hash {
*blake3::hash(data).as_bytes()
blake3::hash(data).as_bytes().into()
}

/// BLAKE3 hashing of a single value in parallel (only useful for large values well above 128kiB).
#[cfg(feature = "parallel")]
pub fn blake3_hash_parallel(data: &[u8]) -> Blake3Hash {
let mut state = blake3::Hasher::new();
state.update_rayon(data);
*state.finalize().as_bytes()
state.finalize().as_bytes().into()
}

/// BLAKE3 keyed hashing of a single value.
pub fn blake3_hash_with_key(key: &[u8; 32], data: &[u8]) -> Blake3Hash {
*blake3::keyed_hash(key, data).as_bytes()
blake3::keyed_hash(key, data).as_bytes().into()
}

/// BLAKE3 hashing of a list of values.
Expand All @@ -60,15 +60,15 @@ pub fn blake3_hash_list(data: &[&[u8]]) -> Blake3Hash {
for d in data {
state.update(d);
}
*state.finalize().as_bytes()
state.finalize().as_bytes().into()
}

/// BLAKE3 hashing of a single value truncated to 254 bits as Scalar for usage with KZG.
pub fn blake3_254_hash_to_scalar(data: &[u8]) -> Scalar {
let mut hash = blake3_hash(data);
// Erase first 2 bits to effectively truncate the hash (number is interpreted as big-endian)
hash[0] &= 0b00111111;
Scalar::try_from(hash)
Scalar::try_from(*hash)
.expect("Last bit erased, thus hash is guaranteed to fit into scalar; qed")
}

Expand Down
92 changes: 71 additions & 21 deletions crates/subspace-core-primitives/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,13 @@ use alloc::boxed::Box;
use alloc::vec;
#[cfg(not(feature = "std"))]
use alloc::vec::Vec;
use core::array::TryFromSliceError;
use core::fmt;
use core::num::{NonZeroU64, NonZeroU8};
use core::simd::Simd;
use core::str::FromStr;
use derive_more::{Add, AsMut, AsRef, Deref, DerefMut, Display, Div, From, Into, Mul, Rem, Sub};
use hex::FromHex;
use num_traits::{WrappingAdd, WrappingSub};
use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
pub use pieces::{
Expand All @@ -76,14 +78,7 @@ pub const REWARD_SIGNING_CONTEXT: &[u8] = b"subspace_reward";
/// Byte length of a randomness type.
pub const RANDOMNESS_LENGTH: usize = 32;

/// Size of BLAKE3 hash output (in bytes).
pub const BLAKE3_HASH_SIZE: usize = 32;

/// BLAKE3 hash output
pub type Blake3Hash = [u8; BLAKE3_HASH_SIZE];

/// BLAKE3 hash output wrapper, which serializes it as a hex string
// TODO: rename this type to Blake3Hash into a newtype, after checking for any breaking changes
/// BLAKE3 hash output transparent wrapper
#[derive(
Default,
Copy,
Expand All @@ -94,7 +89,8 @@ pub type Blake3Hash = [u8; BLAKE3_HASH_SIZE];
PartialOrd,
Hash,
From,
Into,
AsRef,
AsMut,
Deref,
DerefMut,
Encode,
Expand All @@ -104,14 +100,68 @@ pub type Blake3Hash = [u8; BLAKE3_HASH_SIZE];
)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "serde", serde(transparent))]
pub struct Blake3HashHex(#[cfg_attr(feature = "serde", serde(with = "hex"))] Blake3Hash);
pub struct Blake3Hash(#[cfg_attr(feature = "serde", serde(with = "hex"))] [u8; Self::SIZE]);

impl fmt::Debug for Blake3HashHex {
impl fmt::Debug for Blake3Hash {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", hex::encode(self.0))
}
}

impl AsRef<[u8]> for Blake3Hash {
#[inline]
fn as_ref(&self) -> &[u8] {
&self.0
}
}

impl AsMut<[u8]> for Blake3Hash {
#[inline]
fn as_mut(&mut self) -> &mut [u8] {
&mut self.0
}
}

impl FromHex for Blake3Hash {
type Error = hex::FromHexError;

fn from_hex<T: AsRef<[u8]>>(hex: T) -> Result<Self, Self::Error> {
let data = hex::decode(hex)?
.try_into()
.map_err(|_| hex::FromHexError::InvalidStringLength)?;

Ok(Self(data))
}
}

impl From<&[u8; Self::SIZE]> for Blake3Hash {
#[inline]
fn from(value: &[u8; Self::SIZE]) -> Self {
Self(*value)
}
}

impl TryFrom<&[u8]> for Blake3Hash {
type Error = TryFromSliceError;

#[inline]
fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
Ok(Self(value.try_into()?))
}
}

impl From<Blake3Hash> for [u8; Blake3Hash::SIZE] {
#[inline]
fn from(value: Blake3Hash) -> Self {
value.0
}
}

impl Blake3Hash {
/// Size of BLAKE3 hash output (in bytes).
pub const SIZE: usize = 32;
}

/// Type of randomness.
#[derive(
Debug,
Expand Down Expand Up @@ -411,7 +461,7 @@ impl PotOutput {
/// Derives the global randomness from the output
#[inline]
pub fn derive_global_randomness(&self) -> Randomness {
Randomness::from(blake3_hash(&self.0))
Randomness::from(*blake3_hash(&self.0))
}

/// Derive seed from proof of time in case entropy injection is not needed
Expand All @@ -423,7 +473,7 @@ impl PotOutput {
/// Derive seed from proof of time with entropy injection
#[inline]
pub fn seed_with_entropy(&self, entropy: &Blake3Hash) -> PotSeed {
let hash = blake3_hash_list(&[entropy, &self.0]);
let hash = blake3_hash_list(&[entropy.as_ref(), &self.0]);
let mut seed = PotSeed::default();
seed.copy_from_slice(&hash[..Self::SIZE]);
seed
Expand Down Expand Up @@ -1009,7 +1059,7 @@ pub struct SectorId(#[cfg_attr(feature = "serde", serde(with = "hex"))] Blake3Ha
impl AsRef<[u8]> for SectorId {
#[inline]
fn as_ref(&self) -> &[u8] {
&self.0
self.0.as_ref()
}
}

Expand Down Expand Up @@ -1042,7 +1092,7 @@ impl SectorId {
let piece_offset_bytes = piece_offset.to_bytes();
let mut key = [0; 32];
key[..piece_offset_bytes.len()].copy_from_slice(&piece_offset_bytes);
U256::from_le_bytes(blake3_hash_with_key(&key, &self.0))
U256::from_le_bytes(*blake3_hash_with_key(&key, self.as_ref()))
};
let history_size_in_pieces = history_size.in_pieces().get();
let num_interleaved_pieces = 1.max(
Expand Down Expand Up @@ -1073,8 +1123,8 @@ impl SectorId {
&self,
global_challenge: &Blake3Hash,
) -> SectorSlotChallenge {
let sector_slot_challenge = Simd::from(self.0) ^ Simd::from(*global_challenge);
SectorSlotChallenge(sector_slot_challenge.to_array())
let sector_slot_challenge = Simd::from(*self.0) ^ Simd::from(**global_challenge);
SectorSlotChallenge(sector_slot_challenge.to_array().into())
}

/// Derive evaluation seed
Expand All @@ -1084,12 +1134,12 @@ impl SectorId {
history_size: HistorySize,
) -> PosSeed {
let evaluation_seed = blake3_hash_list(&[
&self.0,
self.as_ref(),
&piece_offset.to_bytes(),
&history_size.get().to_le_bytes(),
]);

PosSeed::from(evaluation_seed)
PosSeed::from(*evaluation_seed)
}

/// Derive history size when sector created at `history_size` expires.
Expand All @@ -1104,8 +1154,8 @@ impl SectorId {
let sector_expiration_check_history_size =
history_size.sector_expiration_check(min_sector_lifetime)?;

let input_hash = U256::from_le_bytes(blake3_hash_list(&[
&self.0,
let input_hash = U256::from_le_bytes(*blake3_hash_list(&[
self.as_ref(),
sector_expiration_check_segment_commitment.as_ref(),
]));

Expand Down
Loading
Loading