Skip to content

Commit

Permalink
Merge pull request #99 from availproject/miguel/halborn_audit/hal_0x
Browse files Browse the repository at this point in the history
Audit: Minor/Info items of audit
  • Loading branch information
ToufeeqP authored Jun 28, 2024
2 parents a1df40a + 445cfa6 commit f10b958
Show file tree
Hide file tree
Showing 20 changed files with 93 additions and 100 deletions.
6 changes: 3 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion core/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "avail-core"
version = "0.6.1"
version = "0.6.2"
authors = []
edition = "2021"
license = "Apache-2.0"
Expand Down
7 changes: 3 additions & 4 deletions core/src/asdr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ use sp_std::alloc::format;
/// the decoding fails.
pub const EXTRINSIC_FORMAT_VERSION: u8 = 4;

/// The `SingaturePayload` of `UncheckedExtrinsic`.
/// The `SignaturePayload` of `UncheckedExtrinsic`.
type SignaturePayload<Address, Signature, Extra> = (Address, Signature, Extra);

/// An extrinsic right from the external world. This is unchecked and so can contain a signature.
Expand Down Expand Up @@ -735,13 +735,12 @@ mod tests {
use core::mem::transmute;

let op = unsigned_to_opaque();
let new_op = unsafe {
unsafe {
// Using `transmute` because `OpaqueExtrinsic.0` is not public.
let mut raw = transmute::<OpaqueExtrinsic, Vec<u8>>(op);
raw.pop();
transmute::<Vec<u8>, OpaqueExtrinsic>(raw)
};
new_op
}
}

#[test_case( unsigned_to_opaque() => true ; "Unsigned Ex")]
Expand Down
17 changes: 13 additions & 4 deletions core/src/constants.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use core::num::NonZeroU32;
use sp_arithmetic::Perbill;
use static_assertions::const_assert;
use static_assertions::{const_assert, const_assert_eq};

/// We allow `Normal` extrinsics to fill up the block up to 85%, the rest can be used
/// by Operational extrinsics.
Expand All @@ -14,16 +14,14 @@ const_assert!(BLOCK_CHUNK_SIZE.get() > 0);
pub const BLOCK_CHUNK_SIZE: NonZeroU32 = unsafe { NonZeroU32::new_unchecked(32) };

/// Money matters.
// TODO: evaluate whether we should consider moving this into avail
pub mod currency {

pub type Balance = u128;

/// `AVAIL` has 18 decimal positions.
pub const AVAIL: Balance = 1_000_000_000_000_000_000;

/// Cents of AVAIL has 16 decimal positions (100 Cents = $1)
/// 1 Cents = `10_000_000_000_000_000`
/// Cents of AVAIL has 16 decimal positions (100 Cents = 1)
pub const CENTS: Balance = AVAIL / 100;

/// Millicent of AVAIL has 13 decimal positions( 1000 mCents = 1 cent).
Expand All @@ -41,3 +39,14 @@ pub mod currency {
/// `PICO_AVAIL` has 6 decimal positions
pub const PICO_AVAIL: Balance = NANO_AVAIL / 1_000;
}

pub mod kate {
use super::*;
pub const EXTENSION_FACTOR: u32 = 2;
pub const COMMITMENT_SIZE: usize = 48;
pub const DATA_CHUNK_SIZE: usize = 31;
pub const CHUNK_SIZE: usize = 32;

const_assert_eq!(DATA_CHUNK_SIZE, CHUNK_SIZE - 1);
const_assert_eq!(CHUNK_SIZE, BLOCK_CHUNK_SIZE.get() as usize);
}
4 changes: 2 additions & 2 deletions core/src/data_lookup/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ impl TryFrom<CompactDataLookup> for DataLookup {

let last_range = offset..compacted.size;
if !last_range.is_empty() {
index.push((prev_id, offset..compacted.size));
index.push((prev_id, last_range));
}

let lookup = DataLookup { index };
Expand Down Expand Up @@ -204,7 +204,7 @@ mod test {
#[test_case( vec![(0, 15), (1, 20), (2, 150)] => Ok(vec![(0,0..15),(1, 15..35), (2, 35..185)]); "Valid case")]
#[test_case( vec![(0, usize::MAX)] => Err(Error::OffsetOverflows); "Offset overflows at zero")]
#[test_case( vec![(0, (u32::MAX -1) as usize), (1, 2)] => Err(Error::OffsetOverflows); "Offset overflows at non zero")]
#[test_case( vec![(1, 10), (0, 2)] => Err(Error::DataNotSorted); "Unsortend data")]
#[test_case( vec![(1, 10), (0, 2)] => Err(Error::DataNotSorted); "Unsorted data")]
#[test_case( vec![] => Ok(vec![]); "Empty data")]
fn from_id_and_len(
id_len_data: Vec<(u32, usize)>,
Expand Down
2 changes: 1 addition & 1 deletion core/src/data_proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use serde::{Deserialize, Serialize};
#[cfg(feature = "runtime")]
use sp_io::hashing::keccak_256;

/// Max data supported on bidge (Ethereum calldata limits)
/// Max data supported on bridge (Ethereum calldata limits)
pub const BOUNDED_DATA_MAX_LENGTH: u32 = 102_400;

/// Maximum size of data allowed in the bridge
Expand Down
2 changes: 1 addition & 1 deletion core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ macro_rules! keccak256_concat_update {
}};
}

/// Calculates the Kecck 256 of arguments with NO extra allocations to join inputs.
/// Calculates the Keccak 256 of arguments with NO extra allocations to join inputs.
#[macro_export]
macro_rules! keccak256_concat{
($($arg:tt)*) => {{
Expand Down
2 changes: 1 addition & 1 deletion kate/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "kate"
version = "0.9.0"
version = "0.9.1"
authors = ["Denis Ermolin <[email protected]>"]
edition = "2021"
license = "Apache-2.0"
Expand Down
2 changes: 1 addition & 1 deletion kate/recovery/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "kate-recovery"
version = "0.9.3"
version = "0.10.0"
authors = ["Denis Ermolin <[email protected]>"]
edition = "2018"
license = "Apache-2.0"
Expand Down
49 changes: 23 additions & 26 deletions kate/recovery/src/com.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
use crate::matrix;
#[cfg(feature = "std")]
use crate::{data, sparse_slice_read::SparseSliceRead};
use core::{num::TryFromIntError, ops::Range};

#[cfg(feature = "std")]
use avail_core::{
constants::kate::{CHUNK_SIZE, DATA_CHUNK_SIZE},
ensure,
};
use avail_core::{data_lookup::Error as DataLookupError, AppId, DataLookup};

use sp_std::prelude::*;
use thiserror_no_std::Error;

#[cfg(feature = "std")]
use crate::data;
#[cfg(feature = "std")]
use crate::{config, sparse_slice_read::SparseSliceRead};
#[cfg(feature = "std")]
use avail_core::ensure;
#[cfg(feature = "std")]
use codec::{Decode, IoReader};
#[cfg(feature = "std")]
Expand Down Expand Up @@ -177,9 +178,9 @@ pub fn reconstruct_app_extrinsics(
app_id: AppId,
) -> Result<AppData, ReconstructionError> {
let data = reconstruct_available(dimensions, cells)?;
const_assert!(config::CHUNK_SIZE as u64 <= u32::MAX as u64);
const_assert!(CHUNK_SIZE as u64 <= u32::MAX as u64);
let range = index
.projected_range_of(app_id, config::CHUNK_SIZE as u32)
.projected_range_of(app_id, CHUNK_SIZE as u32)
.ok_or(ReconstructionError::MissingId(app_id))?;

Ok(unflatten_padded_data(vec![(app_id, range)], data)?
Expand All @@ -203,8 +204,8 @@ pub fn reconstruct_extrinsics(
) -> Result<Vec<(AppId, AppData)>, ReconstructionError> {
let data = reconstruct_available(dimensions, cells)?;

const_assert!(config::CHUNK_SIZE as u64 <= u32::MAX as u64);
let ranges = lookup.projected_ranges(config::CHUNK_SIZE as u32)?;
const_assert!(CHUNK_SIZE as u64 <= u32::MAX as u64);
let ranges = lookup.projected_ranges(CHUNK_SIZE as u32)?;
unflatten_padded_data(ranges, data).map_err(ReconstructionError::DataDecodingError)
}

Expand All @@ -218,7 +219,7 @@ pub fn reconstruct_extrinsics(
pub fn reconstruct_columns(
dimensions: matrix::Dimensions,
cells: &[data::Cell],
) -> Result<HashMap<u16, Vec<[u8; config::CHUNK_SIZE]>>, ReconstructionError> {
) -> Result<HashMap<u16, Vec<[u8; CHUNK_SIZE]>>, ReconstructionError> {
let cells: Vec<data::DataCell> = cells.iter().cloned().map(Into::into).collect::<Vec<_>>();
let columns = map_cells(dimensions, cells)?;

Expand All @@ -235,7 +236,7 @@ pub fn reconstruct_columns(
let column = reconstruct_column(dimensions.extended_rows(), &cells)?
.iter()
.map(BlsScalar::to_bytes)
.collect::<Vec<[u8; config::CHUNK_SIZE]>>();
.collect::<Vec<[u8; CHUNK_SIZE]>>();

Ok((col, column))
})
Expand Down Expand Up @@ -266,7 +267,7 @@ fn reconstruct_available(
})
.collect::<Result<Vec<Vec<_>>, ReconstructionError>>()?;

let mut result: Vec<u8> = Vec::with_capacity(scalars.len() * config::CHUNK_SIZE);
let mut result: Vec<u8> = Vec::with_capacity(scalars.len() * CHUNK_SIZE);

for (row, col) in dimensions.iter_data() {
let bytes = scalars
Expand All @@ -275,7 +276,7 @@ fn reconstruct_available(
.map(Option::as_ref)
.unwrap_or(None)
.map(BlsScalar::to_bytes)
.unwrap_or_else(|| [0; config::CHUNK_SIZE]);
.unwrap_or_else(|| [0; CHUNK_SIZE]);
result.extend(bytes);
}
Ok(result)
Expand Down Expand Up @@ -318,14 +319,14 @@ pub fn decode_app_extrinsics(
.and_then(|column| column.get(&row_number))
.filter(|cell| !cell.data.is_empty())
{
None => app_data.extend(vec![0; config::CHUNK_SIZE]),
None => app_data.extend(vec![0; CHUNK_SIZE]),
Some(cell) => app_data.extend(cell.data),
}
}

const_assert!((config::CHUNK_SIZE as u64) <= (u32::MAX as u64));
const_assert!((CHUNK_SIZE as u64) <= (u32::MAX as u64));
let ranges = index
.projected_range_of(app_id, config::CHUNK_SIZE as u32)
.projected_range_of(app_id, CHUNK_SIZE as u32)
.map(|range| vec![(app_id, range)])
.unwrap_or_default();

Expand Down Expand Up @@ -363,20 +364,17 @@ pub fn unflatten_padded_data(
ranges: Vec<(AppId, AppDataRange)>,
data: Vec<u8>,
) -> Result<Vec<(AppId, AppData)>, UnflattenError> {
ensure!(
data.len() % config::CHUNK_SIZE == 0,
UnflattenError::InvalidLen
);
ensure!(data.len() % CHUNK_SIZE == 0, UnflattenError::InvalidLen);

fn extract_encoded_extrinsic(range_data: &[u8]) -> SparseSliceRead {
const_assert_ne!(config::CHUNK_SIZE, 0);
const_assert_ne!(config::DATA_CHUNK_SIZE, 0);
const_assert_ne!(CHUNK_SIZE, 0);
const_assert_ne!(DATA_CHUNK_SIZE, 0);

// INTERNAL: Chunk into 32 bytes (CHUNK_SIZE), then remove padding (0..30 bytes).
SparseSliceRead::from_iter(
range_data
.chunks_exact(config::CHUNK_SIZE)
.map(|chunk| &chunk[0..config::DATA_CHUNK_SIZE]),
.chunks_exact(CHUNK_SIZE)
.map(|chunk| &chunk[0..DATA_CHUNK_SIZE]),
)
}

Expand Down Expand Up @@ -577,7 +575,6 @@ pub fn reconstruct_column(
let mut subset: Vec<Option<BlsScalar>> = Vec::with_capacity(row_count_sz);

// fill up vector in ordered fashion
// @note the way it's done should be improved
for i in 0..row_count {
subset.push(find_row_by_index(i, cells));
}
Expand Down
9 changes: 4 additions & 5 deletions kate/recovery/src/commitments.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
use crate::config::COMMITMENT_SIZE;
use avail_core::constants::kate::{CHUNK_SIZE, COMMITMENT_SIZE};
use core::{array::TryFromSliceError, convert::TryInto, num::TryFromIntError};
use sp_std::prelude::*;
use thiserror_no_std::Error;

#[cfg(feature = "std")]
use crate::{com, config, matrix};
use crate::{com, matrix};
#[cfg(feature = "std")]
use avail_core::{ensure, AppId, DataLookup};
#[cfg(feature = "std")]
Expand Down Expand Up @@ -69,13 +69,13 @@ impl From<dusk_plonk::error::Error> for Error {

#[cfg(feature = "std")]
fn try_into_scalar(chunk: &[u8]) -> Result<BlsScalar, Error> {
let sized_chunk = <[u8; config::CHUNK_SIZE]>::try_from(chunk)?;
let sized_chunk = <[u8; CHUNK_SIZE]>::try_from(chunk)?;
BlsScalar::from_bytes(&sized_chunk).map_err(From::from)
}

#[cfg(feature = "std")]
fn try_into_scalars(data: &[u8]) -> Result<Vec<BlsScalar>, Error> {
let chunks = data.chunks_exact(config::CHUNK_SIZE);
let chunks = data.chunks_exact(CHUNK_SIZE);
ensure!(chunks.remainder().is_empty(), Error::BadLen);
chunks
.map(try_into_scalar)
Expand Down Expand Up @@ -113,7 +113,6 @@ pub fn verify_equality(
}

let dim_cols = dimensions.width();
// @TODO Opening Key here???
let (prover_key, _) = public_params.trim(dim_cols)?;
let domain = EvaluationDomain::new(dim_cols)?;

Expand Down
5 changes: 0 additions & 5 deletions kate/recovery/src/config.rs

This file was deleted.

2 changes: 1 addition & 1 deletion kate/recovery/src/couscous.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,5 @@ use dusk_plonk::commitment_scheme::kzg10::PublicParameters;
pub fn public_params() -> PublicParameters {
let pp_bytes = include_bytes!("../../src/pp_1024.data");
PublicParameters::from_slice(pp_bytes)
.expect("Deserialising of public parameters should work for serialised pp")
.expect("Deserializing of public parameters should work for serialized pp")
}
1 change: 0 additions & 1 deletion kate/recovery/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

pub mod com;
pub mod commitments;
pub mod config;
pub mod data;
pub mod matrix;
pub mod proof;
Expand Down
Loading

0 comments on commit f10b958

Please sign in to comment.