proxygen
HistogramTest.cpp
Go to the documentation of this file.
1 /*
2  * Copyright 2011-present Facebook, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <folly/stats/Histogram.h>
18 
21 
22 using folly::Histogram;
23 
24 // Insert 100 evenly distributed values into a histogram with 100 buckets
25 TEST(Histogram, Test100) {
26  Histogram<int64_t> h(1, 0, 100);
27 
28  for (unsigned int n = 0; n < 100; ++n) {
29  h.addValue(n);
30  }
31 
32  // 100 buckets, plus 1 for below min, and 1 for above max
33  EXPECT_EQ(h.getNumBuckets(), 102);
34 
35  double epsilon = 1e-6;
36  for (unsigned int n = 0; n <= 100; ++n) {
37  double pct = n / 100.0;
38 
39  // Floating point arithmetic isn't 100% accurate, and if we just divide
40  // (n / 100) the value should be exactly on a bucket boundary. Add espilon
41  // to ensure we fall in the upper bucket.
42  if (n < 100) {
43  double lowPct = -1.0;
44  double highPct = -1.0;
45  unsigned int bucketIdx =
46  h.getPercentileBucketIdx(pct + epsilon, &lowPct, &highPct);
47  EXPECT_EQ(n + 1, bucketIdx);
48  EXPECT_FLOAT_EQ(n / 100.0, lowPct);
49  EXPECT_FLOAT_EQ((n + 1) / 100.0, highPct);
50  }
51 
52  // Also test n - epsilon, to test falling in the lower bucket.
53  if (n > 0) {
54  double lowPct = -1.0;
55  double highPct = -1.0;
56  unsigned int bucketIdx =
57  h.getPercentileBucketIdx(pct - epsilon, &lowPct, &highPct);
58  EXPECT_EQ(n, bucketIdx);
59  EXPECT_FLOAT_EQ((n - 1) / 100.0, lowPct);
60  EXPECT_FLOAT_EQ(n / 100.0, highPct);
61  }
62 
63  // Check getPercentileEstimate()
65  }
66 }
67 
68 // Test calling getPercentileBucketIdx() and getPercentileEstimate() on an
69 // empty histogram
70 TEST(Histogram, TestEmpty) {
71  Histogram<int64_t> h(1, 0, 100);
72 
73  for (unsigned int n = 0; n <= 100; ++n) {
74  double pct = n / 100.0;
75 
76  double lowPct = -1.0;
77  double highPct = -1.0;
78  unsigned int bucketIdx = h.getPercentileBucketIdx(pct, &lowPct, &highPct);
79  EXPECT_EQ(1, bucketIdx);
80  EXPECT_FLOAT_EQ(0.0, lowPct);
81  EXPECT_FLOAT_EQ(0.0, highPct);
82 
84  }
85 }
86 
87 // Test calling getPercentileBucketIdx() and getPercentileEstimate() on a
88 // histogram with just a single value.
89 TEST(Histogram, Test1) {
90  Histogram<int64_t> h(1, 0, 100);
91  h.addValue(42);
92 
93  for (unsigned int n = 0; n < 100; ++n) {
94  double pct = n / 100.0;
95 
96  double lowPct = -1.0;
97  double highPct = -1.0;
98  unsigned int bucketIdx = h.getPercentileBucketIdx(pct, &lowPct, &highPct);
99  EXPECT_EQ(43, bucketIdx);
100  EXPECT_FLOAT_EQ(0.0, lowPct);
101  EXPECT_FLOAT_EQ(1.0, highPct);
102 
103  EXPECT_EQ(42, h.getPercentileEstimate(pct));
104  }
105 }
106 
107 // Test adding enough numbers to make the sum value overflow in the
108 // "below min" bucket
109 TEST(Histogram, TestOverflowMin) {
110  Histogram<int64_t> h(1, 0, 100);
111 
112  for (unsigned int n = 0; n < 9; ++n) {
113  h.addValue(-0x0fffffffffffffff);
114  }
115 
116  // Compute a percentile estimate. We only added values to the "below min"
117  // bucket, so this should check that bucket. We're mainly verifying that the
118  // code doesn't crash here when the bucket average is larger than the max
119  // value that is supposed to be in the bucket.
120  int64_t estimate = h.getPercentileEstimate(0.05);
121  // The code will return the smallest possible value when it detects an
122  // overflow beyond the minimum value.
124 }
125 
126 // Test adding enough numbers to make the sum value overflow in the
127 // "above max" bucket
128 TEST(Histogram, TestOverflowMax) {
129  Histogram<int64_t> h(1, 0, 100);
130 
131  for (unsigned int n = 0; n < 9; ++n) {
132  h.addValue(0x0fffffffffffffff);
133  }
134 
135  // The code will return the maximum possible value when it detects an
136  // overflow beyond the max value.
137  int64_t estimate = h.getPercentileEstimate(0.95);
139 }
140 
141 // Test adding enough numbers to make the sum value overflow in one of the
142 // normal buckets
143 TEST(Histogram, TestOverflowBucket) {
144  Histogram<int64_t> h(0x0100000000000000, 0, 0x1000000000000000);
145 
146  for (unsigned int n = 0; n < 9; ++n) {
147  h.addValue(0x0fffffffffffffff);
148  }
149 
150  // The histogram code should return the bucket midpoint
151  // when it detects overflow.
152  int64_t estimate = h.getPercentileEstimate(0.95);
153  EXPECT_EQ(0x0f80000000000000, estimate);
154 }
155 
156 TEST(Histogram, TestDouble) {
157  // Insert 100 evenly spaced values into a histogram
158  Histogram<double> h(100.0, 0.0, 5000.0);
159  for (double n = 50; n < 5000; n += 100) {
160  h.addValue(n);
161  }
162  EXPECT_EQ(52, h.getNumBuckets());
163  EXPECT_EQ(2500.0, h.getPercentileEstimate(0.5));
164  EXPECT_EQ(4500.0, h.getPercentileEstimate(0.9));
165 }
166 
167 // Test where the bucket width is not an even multiple of the histogram range
168 TEST(Histogram, TestDoubleInexactWidth) {
169  Histogram<double> h(100.0, 0.0, 4970.0);
170  for (double n = 50; n < 5000; n += 100) {
171  h.addValue(n);
172  }
173  EXPECT_EQ(52, h.getNumBuckets());
174  EXPECT_EQ(2500.0, h.getPercentileEstimate(0.5));
175  EXPECT_EQ(4500.0, h.getPercentileEstimate(0.9));
176 
177  EXPECT_EQ(0, h.getBucketByIndex(51).count);
178  h.addValue(4990);
179  h.addValue(5100);
180  EXPECT_EQ(2, h.getBucketByIndex(51).count);
181  EXPECT_EQ(2600.0, h.getPercentileEstimate(0.5));
182 }
183 
184 // Test where the bucket width is larger than the histogram range
185 // (There isn't really much point to defining a histogram this way,
186 // but we want to ensure that it still works just in case.)
187 TEST(Histogram, TestDoubleWidthTooBig) {
188  Histogram<double> h(100.0, 0.0, 7.0);
189  EXPECT_EQ(3, h.getNumBuckets());
190 
191  for (double n = 0; n < 7; n += 1) {
192  h.addValue(n);
193  }
194  EXPECT_EQ(0, h.getBucketByIndex(0).count);
195  EXPECT_EQ(7, h.getBucketByIndex(1).count);
196  EXPECT_EQ(0, h.getBucketByIndex(2).count);
197  EXPECT_EQ(3.0, h.getPercentileEstimate(0.5));
198 
199  h.addValue(-1.0);
200  EXPECT_EQ(1, h.getBucketByIndex(0).count);
201  h.addValue(7.5);
202  EXPECT_EQ(1, h.getBucketByIndex(2).count);
203  EXPECT_NEAR(3.0, h.getPercentileEstimate(0.5), 1e-14);
204 }
205 
206 // Test that we get counts right
207 TEST(Histogram, Counts) {
208  Histogram<int32_t> h(1, 0, 10);
209  EXPECT_EQ(12, h.getNumBuckets());
211 
212  // Add one to each bucket, make sure the counts match
213  for (int32_t i = 0; i < 10; i++) {
214  h.addValue(i);
215  EXPECT_EQ(i + 1, h.computeTotalCount());
216  }
217 
218  // Add a lot to one bucket, make sure the counts still make sense
219  for (int32_t i = 0; i < 100; i++) {
220  h.addValue(0);
221  }
222  EXPECT_EQ(110, h.computeTotalCount());
223 }
ValueType getPercentileEstimate(double pct) const
Definition: Histogram.h:434
*than *hazptr_holder h
Definition: Hazptr.h:116
void addValue(ValueType value)
Definition: Histogram.h:252
size_t getPercentileBucketIdx(double pct, double *lowPct=nullptr, double *highPct=nullptr) const
Definition: Histogram.h:416
LogLevel max
Definition: LogLevel.cpp:31
const Bucket & getBucketByIndex(size_t idx) const
Definition: Histogram.h:374
#define EXPECT_EQ(val1, val2)
Definition: gtest.h:1922
uint64_t computeTotalCount() const
Definition: Histogram.h:405
#define EXPECT_FLOAT_EQ(val1, val2)
Definition: gtest.h:2027
TEST(Histogram, Test100)
LogLevel min
Definition: LogLevel.cpp:30
size_t getNumBuckets() const
Definition: Histogram.h:369
#define EXPECT_NEAR(val1, val2, abs_error)
Definition: gtest.h:2043