1
0
Fork 0
mirror of https://github.com/BLAKE3-team/BLAKE3 synced 2024-05-11 17:56:21 +02:00

some comment typos

This commit is contained in:
Jack O'Connor 2020-02-27 09:52:46 -05:00
parent c197a773ac
commit 0432f9c7a3
2 changed files with 4 additions and 4 deletions

View File

@ -425,8 +425,8 @@ INLINE void hasher_merge_cv_stack(blake3_hasher *self, uint64_t total_len) {
// compress_subtree_to_parent_node(). That function always returns the top
// *two* chaining values of the subtree it's compressing. We then do lazy
// merging with each of them separately, so that the second CV will always
// remain unmerged. (The compress_subtree_to_parent_node also helps us support
// extendable output when we're hashing an input all-at-once.)
// remain unmerged. (That also helps us support extendable output when we're
// hashing an input all-at-once.)
INLINE void hasher_push_cv(blake3_hasher *self, uint8_t new_cv[BLAKE3_OUT_LEN],
uint64_t chunk_counter) {
hasher_merge_cv_stack(self, chunk_counter);

View File

@ -921,8 +921,8 @@ impl Hasher {
// compress_subtree_to_parent_node(). That function always returns the top
// *two* chaining values of the subtree it's compressing. We then do lazy
// merging with each of them separately, so that the second CV will always
// remain unmerged. (The compress_subtree_to_parent_node also helps us
// support extendable output when we're hashing an input all-at-once.)
// remain unmerged. (That also helps us support extendable output when
// we're hashing an input all-at-once.)
fn push_cv(&mut self, new_cv: &CVBytes, chunk_counter: u64) {
self.merge_cv_stack(chunk_counter);
self.cv_stack.push(*new_cv);