chore(scanner): always return values to the pools when hashing blocks (#10377)

There are some return statements in between, but putting back the values
isn't my motivation (hardly ever happens), I just find this more
readable. Same with moving `hashLength`: Placed next to the pool the
connection with `sha256.New()` is closer.

Followup to:
chore(scanner): reduce memory pressure by using pools inside hasher #10222
6e26fab3a0

Signed-off-by: Simon Frei <freisim93@gmail.com>
This commit is contained in:
Simon Frei
2025-09-07 17:00:19 +02:00
committed by GitHub
parent 4b8d07d91c
commit f0328abeaa

View File

@@ -31,6 +31,8 @@ var bufPool = sync.Pool{
},
}
const hashLength = sha256.Size
var hashPool = sync.Pool{
New: func() any {
return sha256.New()
@@ -43,9 +45,6 @@ func Blocks(ctx context.Context, r io.Reader, blocksize int, sizehint int64, cou
counter = &noopCounter{}
}
hf := hashPool.Get().(hash.Hash) //nolint:forcetypeassert
const hashLength = sha256.Size
var blocks []protocol.BlockInfo
var hashes, thisHash []byte
@@ -62,8 +61,14 @@ func Blocks(ctx context.Context, r io.Reader, blocksize int, sizehint int64, cou
hashes = make([]byte, 0, hashLength*numBlocks)
}
hf := hashPool.Get().(hash.Hash) //nolint:forcetypeassert
// A 32k buffer is used for copying into the hash function.
buf := bufPool.Get().(*[bufSize]byte)[:] //nolint:forcetypeassert
defer func() {
bufPool.Put((*[bufSize]byte)(buf))
hf.Reset()
hashPool.Put(hf)
}()
var offset int64
lr := io.LimitReader(r, int64(blocksize)).(*io.LimitedReader)
@@ -102,9 +107,6 @@ func Blocks(ctx context.Context, r io.Reader, blocksize int, sizehint int64, cou
hf.Reset()
}
bufPool.Put((*[bufSize]byte)(buf))
hf.Reset()
hashPool.Put(hf)
if len(blocks) == 0 {
// Empty file