1/*
2 * Copyright 2013-present Facebook, Inc.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <folly/stats/TimeseriesHistogram.h>
18
19#include <random>
20
21#include <folly/portability/GTest.h>
22#include <folly/stats/TimeseriesHistogram-defs.h>
23
24using namespace std;
25using namespace folly;
26using std::chrono::seconds;
27
28namespace {
29namespace IntMTMHTS {
30enum Levels {
31 MINUTE,
32 TEN_MINUTE,
33 HOUR,
34 ALLTIME,
35 NUM_LEVELS,
36};
37
38const seconds kDurations[] = {
39 seconds(60),
40 seconds(600),
41 seconds(3600),
42 seconds(0),
43};
44} // namespace IntMTMHTS
45
46namespace IntMHTS {
47enum Levels {
48 MINUTE,
49 HOUR,
50 ALLTIME,
51 NUM_LEVELS,
52};
53
54const seconds kDurations[] = {
55 seconds(60),
56 seconds(3600),
57 seconds(0),
58};
59} // namespace IntMHTS
60
61typedef std::mt19937 RandomInt32;
62
63using StatsClock = folly::LegacyStatsClock<std::chrono::seconds>;
64StatsClock::time_point mkTimePoint(int value) {
65 return StatsClock::time_point(StatsClock::duration(value));
66}
67} // namespace
68
69TEST(TimeseriesHistogram, Percentile) {
70 RandomInt32 random(5);
71 // [10, 109], 12 buckets including above and below
72 {
73 TimeseriesHistogram<int> h(
74 10,
75 10,
76 110,
77 MultiLevelTimeSeries<int>(
78 60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
79
80 EXPECT_EQ(0, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME));
81
82 EXPECT_EQ(12, h.getNumBuckets());
83 EXPECT_EQ(10, h.getBucketSize());
84 EXPECT_EQ(10, h.getMin());
85 EXPECT_EQ(110, h.getMax());
86
87 for (size_t i = 0; i < h.getNumBuckets(); ++i) {
88 EXPECT_EQ(4, h.getBucket(i).numLevels());
89 }
90
91 int maxVal = 120;
92 h.addValue(mkTimePoint(0), 0);
93 h.addValue(mkTimePoint(0), maxVal);
94 for (int i = 0; i < 98; i++) {
95 h.addValue(mkTimePoint(0), random() % maxVal);
96 }
97
98 h.update(mkTimePoint(1500000000));
99 // bucket 0 stores everything below min, so its minimum
100 // is the lowest possible number
101 EXPECT_EQ(
102 std::numeric_limits<int>::min(),
103 h.getPercentileBucketMin(1, IntMTMHTS::ALLTIME));
104 EXPECT_EQ(110, h.getPercentileBucketMin(99, IntMTMHTS::ALLTIME));
105
106 EXPECT_EQ(-2, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME));
107 EXPECT_EQ(-1, h.getPercentileEstimate(1, IntMTMHTS::ALLTIME));
108 EXPECT_EQ(119, h.getPercentileEstimate(99, IntMTMHTS::ALLTIME));
109 EXPECT_EQ(120, h.getPercentileEstimate(100, IntMTMHTS::ALLTIME));
110 }
111}
112
113TEST(TimeseriesHistogram, String) {
114 RandomInt32 random(5);
115 // [10, 109], 12 buckets including above and below
116 {
117 TimeseriesHistogram<int> hist(
118 10,
119 10,
120 110,
121 MultiLevelTimeSeries<int>(
122 60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
123
124 int maxVal = 120;
125 hist.addValue(mkTimePoint(0), 0);
126 hist.addValue(mkTimePoint(0), maxVal);
127 for (int i = 0; i < 98; i++) {
128 hist.addValue(mkTimePoint(0), random() % maxVal);
129 }
130
131 hist.update(mkTimePoint(0));
132
133 const char* const kStringValues1[IntMTMHTS::NUM_LEVELS] = {
134 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
135 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
136 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
137 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
138 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
139 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
140 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
141 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
142 };
143
144 CHECK_EQ(IntMTMHTS::NUM_LEVELS, hist.getNumLevels());
145
146 for (size_t level = 0; level < hist.getNumLevels(); ++level) {
147 EXPECT_EQ(kStringValues1[level], hist.getString(level));
148 }
149
150 const char* const kStringValues2[IntMTMHTS::NUM_LEVELS] = {
151 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
152 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
153 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
154 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
155 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
156 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
157 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
158 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
159 };
160
161 CHECK_EQ(IntMTMHTS::NUM_LEVELS, hist.getNumLevels());
162
163 for (size_t level = 0; level < hist.getNumLevels(); ++level) {
164 EXPECT_EQ(kStringValues2[level], hist.getString(level));
165 }
166 }
167}
168
169TEST(TimeseriesHistogram, Clear) {
170 {
171 TimeseriesHistogram<int> hist(
172 10,
173 0,
174 100,
175 MultiLevelTimeSeries<int>(
176 60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
177
178 for (int now = 0; now < 3600; now++) {
179 for (int i = 0; i < 100; i++) {
180 hist.addValue(mkTimePoint(now), i, 2); // adds each item 2 times
181 }
182 }
183
184 // check clearing
185 hist.clear();
186
187 for (size_t b = 0; b < hist.getNumBuckets(); ++b) {
188 EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::MINUTE));
189 EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
190 EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::HOUR));
191 EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
192 }
193
194 for (int pct = 0; pct <= 100; pct++) {
195 EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
196 EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
197 EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
198 EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
199
200 EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::MINUTE));
201 EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::TEN_MINUTE));
202 EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::HOUR));
203 EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::ALLTIME));
204 }
205 }
206}
207
208TEST(TimeseriesHistogram, Basic) {
209 {
210 TimeseriesHistogram<int> hist(
211 10,
212 0,
213 100,
214 MultiLevelTimeSeries<int>(
215 60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
216
217 for (int now = 0; now < 3600; now++) {
218 for (int i = 0; i < 100; i++) {
219 hist.addValue(mkTimePoint(now), i);
220 }
221 }
222
223 hist.update(mkTimePoint(3599));
224 for (int pct = 1; pct <= 100; pct++) {
225 int expected = (pct - 1) / 10 * 10;
226 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
227 EXPECT_EQ(
228 expected, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
229 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
230 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
231 }
232
233 for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
234 EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE));
235 EXPECT_EQ(6000, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
236 EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::HOUR));
237 EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
238 }
239 EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
240 EXPECT_EQ(
241 0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::MINUTE));
242
243 EXPECT_EQ(6000, hist.count(IntMTMHTS::MINUTE));
244 EXPECT_EQ(60000, hist.count(IntMTMHTS::TEN_MINUTE));
245 EXPECT_EQ(360000, hist.count(IntMTMHTS::HOUR));
246 EXPECT_EQ(360000, hist.count(IntMTMHTS::ALLTIME));
247
248 // Each second we added 4950 total over 100 data points
249 EXPECT_EQ(297000, hist.sum(IntMTMHTS::MINUTE));
250 EXPECT_EQ(2970000, hist.sum(IntMTMHTS::TEN_MINUTE));
251 EXPECT_EQ(17820000, hist.sum(IntMTMHTS::HOUR));
252 EXPECT_EQ(17820000, hist.sum(IntMTMHTS::ALLTIME));
253
254 EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::MINUTE));
255 EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::TEN_MINUTE));
256 EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::HOUR));
257 EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::ALLTIME));
258 EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::MINUTE));
259 EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::TEN_MINUTE));
260 EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::HOUR));
261 EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::ALLTIME));
262
263 EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::MINUTE));
264 EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::TEN_MINUTE));
265 EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::HOUR));
266 EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::ALLTIME));
267 EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::MINUTE));
268 EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::TEN_MINUTE));
269 EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::HOUR));
270 EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::ALLTIME));
271
272 EXPECT_EQ(1000, hist.count(mkTimePoint(10), mkTimePoint(20)));
273 EXPECT_EQ(49500, hist.sum(mkTimePoint(10), mkTimePoint(20)));
274 EXPECT_EQ(4950, hist.rate(mkTimePoint(10), mkTimePoint(20)));
275 EXPECT_EQ(49.5, hist.avg<double>(mkTimePoint(10), mkTimePoint(20)));
276
277 EXPECT_EQ(200, hist.count(mkTimePoint(3550), mkTimePoint(3552)));
278 EXPECT_EQ(9900, hist.sum(mkTimePoint(3550), mkTimePoint(3552)));
279 EXPECT_EQ(4950, hist.rate(mkTimePoint(3550), mkTimePoint(3552)));
280 EXPECT_EQ(49.5, hist.avg<double>(mkTimePoint(3550), mkTimePoint(3552)));
281
282 EXPECT_EQ(0, hist.count(mkTimePoint(4550), mkTimePoint(4552)));
283 EXPECT_EQ(0, hist.sum(mkTimePoint(4550), mkTimePoint(4552)));
284 EXPECT_EQ(0, hist.rate(mkTimePoint(4550), mkTimePoint(4552)));
285 EXPECT_EQ(0, hist.avg<double>(mkTimePoint(4550), mkTimePoint(4552)));
286 }
287
288 // -----------------
289
290 {
291 TimeseriesHistogram<int> hist(
292 10,
293 0,
294 100,
295 MultiLevelTimeSeries<int>(
296 60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
297
298 for (int now = 0; now < 3600; now++) {
299 for (int i = 0; i < 100; i++) {
300 hist.addValue(mkTimePoint(now), i, 2); // adds each item 2 times
301 }
302 }
303
304 hist.update(mkTimePoint(3599));
305 for (int pct = 1; pct <= 100; pct++) {
306 int expected = (pct - 1) / 10 * 10;
307 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
308 EXPECT_EQ(
309 expected, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
310 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
311 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
312 }
313
314 for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
315 EXPECT_EQ(600 * 2, hist.getBucket(b).count(IntMTMHTS::MINUTE));
316 EXPECT_EQ(6000 * 2, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
317 EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::HOUR));
318 EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
319 }
320 EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
321 EXPECT_EQ(
322 0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::MINUTE));
323 }
324
325 // -----------------
326
327 {
328 TimeseriesHistogram<int> hist(
329 10,
330 0,
331 100,
332 MultiLevelTimeSeries<int>(
333 60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
334
335 for (int now = 0; now < 3600; now++) {
336 for (int i = 0; i < 50; i++) {
337 hist.addValue(mkTimePoint(now), i * 2, 2); // adds each item 2 times
338 }
339 }
340
341 hist.update(mkTimePoint(3599));
342 for (int pct = 1; pct <= 100; pct++) {
343 int expected = (pct - 1) / 10 * 10;
344 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
345 EXPECT_EQ(
346 expected, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
347 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
348 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
349 }
350
351 EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
352 EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::TEN_MINUTE));
353 EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::HOUR));
354 EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::ALLTIME));
355 EXPECT_EQ(
356 0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::MINUTE));
357 EXPECT_EQ(
358 0,
359 hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::TEN_MINUTE));
360 EXPECT_EQ(
361 0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::HOUR));
362 EXPECT_EQ(
363 0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::ALLTIME));
364
365 for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
366 EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE));
367 EXPECT_EQ(6000, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
368 EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::HOUR));
369 EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
370 }
371
372 for (int i = 0; i < 100; ++i) {
373 hist.addValue(mkTimePoint(3599), 200 + i);
374 }
375 hist.update(mkTimePoint(3599));
376 EXPECT_EQ(
377 100,
378 hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::ALLTIME));
379 }
380}
381
382TEST(TimeseriesHistogram, QueryByInterval) {
383 TimeseriesHistogram<int> mhts(
384 8,
385 8,
386 120,
387 MultiLevelTimeSeries<int>(60, IntMHTS::NUM_LEVELS, IntMHTS::kDurations));
388
389 mhts.update(mkTimePoint(0));
390
391 int curTime;
392 for (curTime = 0; curTime < 7200; curTime++) {
393 mhts.addValue(mkTimePoint(curTime), 1);
394 }
395 for (curTime = 7200; curTime < 7200 + 3540; curTime++) {
396 mhts.addValue(mkTimePoint(curTime), 10);
397 }
398 for (curTime = 7200 + 3540; curTime < 7200 + 3600; curTime++) {
399 mhts.addValue(mkTimePoint(curTime), 100);
400 }
401
402 mhts.update(mkTimePoint(7200 + 3600 - 1));
403
404 struct TimeInterval {
405 TimeInterval(int s, int e) : start(mkTimePoint(s)), end(mkTimePoint(e)) {}
406
407 StatsClock::time_point start;
408 StatsClock::time_point end;
409 };
410 TimeInterval intervals[12] = {
411 {curTime - 60, curTime},
412 {curTime - 3600, curTime},
413 {curTime - 7200, curTime},
414 {curTime - 3600, curTime - 60},
415 {curTime - 7200, curTime - 60},
416 {curTime - 7200, curTime - 3600},
417 {curTime - 50, curTime - 20},
418 {curTime - 3020, curTime - 20},
419 {curTime - 7200, curTime - 20},
420 {curTime - 3000, curTime - 1000},
421 {curTime - 7200, curTime - 1000},
422 {curTime - 7200, curTime - 3600},
423 };
424
425 int expectedSums[12] = {
426 6000,
427 41400,
428 32400,
429 35400,
430 32129,
431 16200,
432 3000,
433 33600,
434 32308,
435 20000,
436 27899,
437 16200,
438 };
439
440 int expectedCounts[12] = {
441 60,
442 3600,
443 7200,
444 3540,
445 7139,
446 3600,
447 30,
448 3000,
449 7178,
450 2000,
451 6199,
452 3600,
453 };
454
455 // The first 7200 values added all fell below the histogram minimum,
456 // and went into the bucket that tracks all of the too-small values.
457 // This bucket reports a minimum value of the smallest possible integer.
458 int belowMinBucket = std::numeric_limits<int>::min();
459
460 int expectedValues[12][3] = {
461 {96, 96, 96},
462 {8, 8, 96},
463 {belowMinBucket, belowMinBucket, 8}, // alltime
464 {8, 8, 8},
465 {belowMinBucket, belowMinBucket, 8}, // alltime
466 {belowMinBucket, belowMinBucket, 8}, // alltime
467 {96, 96, 96},
468 {8, 8, 96},
469 {belowMinBucket, belowMinBucket, 8}, // alltime
470 {8, 8, 8},
471 {belowMinBucket, belowMinBucket, 8}, // alltime
472 {belowMinBucket, belowMinBucket, 8} // alltime
473 };
474
475 for (int i = 0; i < 12; i++) {
476 const auto& itv = intervals[i];
477 int s = mhts.sum(itv.start, itv.end);
478 EXPECT_EQ(expectedSums[i], s);
479
480 int c = mhts.count(itv.start, itv.end);
481 EXPECT_EQ(expectedCounts[i], c);
482 }
483
484 // 3 levels
485 for (int i = 1; i <= 100; i++) {
486 EXPECT_EQ(96, mhts.getPercentileBucketMin(i, 0));
487 EXPECT_EQ(
488 96,
489 mhts.getPercentileBucketMin(
490 i, mkTimePoint(curTime - 60), mkTimePoint(curTime)));
491 EXPECT_EQ(
492 8,
493 mhts.getPercentileBucketMin(
494 i, mkTimePoint(curTime - 3540), mkTimePoint(curTime - 60)));
495 }
496
497 EXPECT_EQ(8, mhts.getPercentileBucketMin(1, 1));
498 EXPECT_EQ(8, mhts.getPercentileBucketMin(98, 1));
499 EXPECT_EQ(96, mhts.getPercentileBucketMin(99, 1));
500 EXPECT_EQ(96, mhts.getPercentileBucketMin(100, 1));
501
502 EXPECT_EQ(belowMinBucket, mhts.getPercentileBucketMin(1, 2));
503 EXPECT_EQ(belowMinBucket, mhts.getPercentileBucketMin(66, 2));
504 EXPECT_EQ(8, mhts.getPercentileBucketMin(67, 2));
505 EXPECT_EQ(8, mhts.getPercentileBucketMin(99, 2));
506 EXPECT_EQ(96, mhts.getPercentileBucketMin(100, 2));
507
508 // 0 is currently the value for bucket 0 (below min)
509 for (int i = 0; i < 12; i++) {
510 const auto& itv = intervals[i];
511 int v = mhts.getPercentileBucketMin(1, itv.start, itv.end);
512 EXPECT_EQ(expectedValues[i][0], v);
513
514 v = mhts.getPercentileBucketMin(50, itv.start, itv.end);
515 EXPECT_EQ(expectedValues[i][1], v);
516
517 v = mhts.getPercentileBucketMin(99, itv.start, itv.end);
518 EXPECT_EQ(expectedValues[i][2], v);
519 }
520
521 for (int i = 0; i < 12; i++) {
522 const auto& itv = intervals[i];
523 // Some of the older intervals that fall in the alltime bucket
524 // are off by 1 or 2 in their estimated counts.
525 size_t tolerance = 0;
526 if (itv.start <= mkTimePoint(curTime - 7200)) {
527 tolerance = 2;
528 } else if (itv.start <= mkTimePoint(curTime - 3000)) {
529 tolerance = 1;
530 }
531 size_t actualCount = (itv.end - itv.start).count();
532 size_t estimatedCount = mhts.count(itv.start, itv.end);
533 EXPECT_GE(actualCount, estimatedCount);
534 EXPECT_LE(actualCount - tolerance, estimatedCount);
535 }
536}
537
538TEST(TimeseriesHistogram, SingleUniqueValue) {
539 int values[] = {-1, 0, 500, 1000, 1500};
540 for (int ii = 0; ii < 5; ++ii) {
541 int value = values[ii];
542 TimeseriesHistogram<int> h(
543 10,
544 0,
545 1000,
546 MultiLevelTimeSeries<int>(
547 60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
548
549 const int kNumIters = 1000;
550 for (int jj = 0; jj < kNumIters; ++jj) {
551 h.addValue(mkTimePoint(1), value);
552 }
553 h.update(mkTimePoint(1));
554 // since we've only added one unique value, all percentiles should
555 // be that value
556 EXPECT_EQ(h.getPercentileEstimate(10, 0), value);
557 EXPECT_EQ(h.getPercentileEstimate(50, 0), value);
558 EXPECT_EQ(h.getPercentileEstimate(99, 0), value);
559
560 // Things get trickier if there are multiple unique values.
561 const int kNewValue = 750;
562 for (int kk = 0; kk < 2 * kNumIters; ++kk) {
563 h.addValue(mkTimePoint(1), kNewValue);
564 }
565 h.update(mkTimePoint(1));
566 EXPECT_NEAR(h.getPercentileEstimate(50, 0), kNewValue + 5, 5);
567 if (value >= 0 && value <= 1000) {
568 // only do further testing if value is within our bucket range,
569 // else estimates can be wildly off
570 if (kNewValue > value) {
571 EXPECT_NEAR(h.getPercentileEstimate(10, 0), value + 5, 5);
572 EXPECT_NEAR(h.getPercentileEstimate(99, 0), kNewValue + 5, 5);
573 } else {
574 EXPECT_NEAR(h.getPercentileEstimate(10, 0), kNewValue + 5, 5);
575 EXPECT_NEAR(h.getPercentileEstimate(99, 0), value + 5, 5);
576 }
577 }
578 }
579}
580