summaryrefslogtreecommitdiff
path: root/util/bloom/bloom_test.go
diff options
context:
space:
mode:
Diffstat (limited to 'util/bloom/bloom_test.go')
-rw-r--r--util/bloom/bloom_test.go28
1 files changed, 11 insertions, 17 deletions
diff --git a/util/bloom/bloom_test.go b/util/bloom/bloom_test.go
index 12799e6a3..2380b8d87 100644
--- a/util/bloom/bloom_test.go
+++ b/util/bloom/bloom_test.go
@@ -56,7 +56,7 @@ func TestOptimal(t *testing.T) {
numFP := []int{100, 25, 5}
if testing.Short() {
- numElementsCases = []int{2000, 20000}
+ numElementsCases = []int{2000, 200000}
fpRateCases = []float64{0.001, 0.00001}
numFP = []int{100, 25}
}
@@ -99,7 +99,6 @@ func closeEnough(a, b, maxerr float64) (bool, float64) {
}
// based on "github.com/willf/bloom"
-// Changes here might need to be replicated to xor_test.go estimateFalsePositiveRateXor()
func (f *Filter) estimateFalsePositiveRate(numAdded uint32, numFP int) float64 {
x := make([]byte, 4)
for i := uint32(0); i < numAdded; i++ {
@@ -183,21 +182,18 @@ func TestMarshalJSON(t *testing.T) {
}
}
-const largeFilterElements = 150000
-
-// BenchmarkCreateLargeBloomFilter should have the same structure as xor_test.go BenchmarkCreateLargeXorFilter
-func BenchmarkCreateLargeBloomFilter(b *testing.B) {
+func BenchmarkCreateLargeFilter(b *testing.B) {
// dialing mu=25000; 3 servers; so each mailbox is 75000 real and 75000 noise
// for a total of 150000 elements in the dialing bloom filter
+ numElements := 150000
for i := 0; i < b.N; i++ {
- numBits, numHashes := Optimal(largeFilterElements, 1e-10)
+ numBits, numHashes := Optimal(numElements, 1e-10)
f := New(numBits, numHashes, 1234)
x := make([]byte, 4)
- for i := uint32(0); i < uint32(largeFilterElements); i++ {
+ for i := uint32(0); i < uint32(numElements); i++ {
binary.BigEndian.PutUint32(x, i)
f.Set(x)
}
- f.MarshalBinary()
}
}
@@ -355,26 +351,24 @@ func BenchmarkBloomFilterSet(b *testing.B) {
}
}
-const filterTestElements = 1000000
-
-// See also BenchmarkXorFilterTest
func BenchmarkBloomFilterTest(b *testing.B) {
- sizeBits, numHashes := Optimal(filterTestElements, 0.01)
+ bfElements := 1000000
+ sizeBits, numHashes := Optimal(bfElements, 0.01)
prefix := uint32(0)
bf := New(sizeBits, numHashes, prefix)
- dataset := make([][]byte, filterTestElements)
- for n := 0; n < filterTestElements; n++ {
+ dataset := make([][]byte, bfElements)
+ for n := 0; n < bfElements; n++ {
hash := crypto.Hash([]byte{byte(n), byte(n >> 8), byte(n >> 16), byte(n >> 24)})
dataset[n] = hash[:]
}
// set half of them.
- for n := 0; n < filterTestElements/2; n++ {
+ for n := 0; n < bfElements/2; n++ {
bf.Set(dataset[n])
}
b.ResetTimer()
for x := 0; x < b.N; x++ {
- bf.Test(dataset[x%filterTestElements])
+ bf.Test(dataset[x%bfElements])
}
}