1
0
Fork 0
mirror of https://github.com/BLAKE3-team/BLAKE3 synced 2024-04-25 14:55:10 +02:00

more cleaup of undocumented API

This commit is contained in:
Jack O'Connor 2021-03-28 20:01:18 -04:00
parent dab97de401
commit 4b7babbe99
5 changed files with 12 additions and 14 deletions

View File

@ -308,7 +308,7 @@ fn maybe_memmap_file(file: &File) -> Result<Option<memmap::Mmap>> {
fn write_hex_output(mut output: blake3::OutputReader, args: &Args) -> Result<()> {
// Encoding multiples of the block size is most efficient.
let mut len = args.len()?;
let mut block = [0; blake3::BLOCK_LEN];
let mut block = [0; blake3::guts::BLOCK_LEN];
while len > 0 {
output.fill(&mut block);
let hex_str = hex::encode(&block[..]);

View File

@ -4,8 +4,9 @@ extern crate test;
use arrayref::array_ref;
use arrayvec::ArrayVec;
use blake3::guts::{BLOCK_LEN, CHUNK_LEN};
use blake3::platform::{Platform, MAX_SIMD_DEGREE};
use blake3::{BLOCK_LEN, CHUNK_LEN, OUT_LEN};
use blake3::OUT_LEN;
use rand::prelude::*;
use test::Bencher;

View File

@ -6,6 +6,9 @@
//! We could stabilize something like this module in the future. If you have a
//! use case for it, please let us know by filing a GitHub issue.
pub const BLOCK_LEN: usize = 64;
pub const CHUNK_LEN: usize = 1024;
#[derive(Clone, Debug)]
pub struct ChunkState(crate::ChunkState);

View File

@ -79,7 +79,7 @@ mod test;
#[doc(hidden)]
pub mod guts;
// The platform module is pub for benchmarks only. It is not stable.
/// Undocumented and unstable, for benchmarks only.
#[doc(hidden)]
pub mod platform;
@ -128,14 +128,8 @@ pub const OUT_LEN: usize = 32;
/// The number of bytes in a key, 32.
pub const KEY_LEN: usize = 32;
// These constants are pub for incremental use cases like `bao`, as well as
// tests and benchmarks. Most callers should not need them.
#[doc(hidden)]
pub const BLOCK_LEN: usize = 64;
#[doc(hidden)]
pub const CHUNK_LEN: usize = 1024;
#[doc(hidden)]
pub const MAX_DEPTH: usize = 54; // 2^54 * CHUNK_LEN = 2^64
const MAX_DEPTH: usize = 54; // 2^54 * CHUNK_LEN = 2^64
use guts::{BLOCK_LEN, CHUNK_LEN};
// While iterating the compression function within a chunk, the CV is
// represented as words, to avoid doing two extra endianness conversions for
@ -540,7 +534,7 @@ impl fmt::Debug for ChunkState {
// use full-width SIMD vectors for parent hashing. Without parallel parent
// hashing, we lose about 10% of overall throughput on AVX2 and AVX-512.
// pub for benchmarks
/// Undocumented and unstable, for benchmarks only.
#[doc(hidden)]
#[derive(Clone, Copy)]
pub enum IncrementCounter {

View File

@ -1,9 +1,9 @@
use blake3::{BLOCK_LEN, CHUNK_LEN};
use blake3::guts::{BLOCK_LEN, CHUNK_LEN};
use serde::{Deserialize, Serialize};
// A non-multiple of 4 is important, since one possible bug is to fail to emit
// partial words.
pub const OUTPUT_LEN: usize = 2 * blake3::BLOCK_LEN + 3;
pub const OUTPUT_LEN: usize = 2 * BLOCK_LEN + 3;
pub const TEST_CASES: &[usize] = &[
0,