aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/clickhouse/src/Common/EventRateMeter.h
blob: 3a21a80ce8b14304b608d52ae72c27eb89c5ec28 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
#pragma once

#include <base/defines.h>

#include <Common/ExponentiallySmoothedCounter.h>

#include <numbers>


namespace DB
{

/// Event count measurement with exponential smoothing intended for computing time derivatives
class EventRateMeter
{
public:
    explicit EventRateMeter(double now, double period_)
        : period(period_)
        , half_decay_time(period * std::numbers::ln2) // for `ExponentiallySmoothedAverage::sumWeights()` to be equal to `1/period`
    {
        reset(now);
    }

    /// Add `count` events happened at `now` instant.
    /// Previous events that are older than `period` from `now` will be forgotten
    /// in a way to keep average event rate the same, using exponential smoothing.
    /// NOTE: Adding events into distant past (further than `period`) must be avoided.
    void add(double now, double count)
    {
        // Remove data for initial heating stage that can present at the beginning of a query.
        // Otherwise it leads to wrong gradual increase of average value, turning algorithm into not very reactive.
        if (count != 0.0 && ++data_points < 5)
        {
            start = events.time;
            events = ExponentiallySmoothedAverage();
        }

        if (now - period <= start) // precise counting mode
            events = ExponentiallySmoothedAverage(events.value + count, now);
        else // exponential smoothing mode
            events.add(count, now, half_decay_time);
    }

    /// Compute average event rate throughout `[now - period, now]` period.
    /// If measurements are just started (`now - period < start`), then average
    /// is computed based on shorter `[start; now]` period to avoid initial linear growth.
    double rate(double now)
    {
        add(now, 0);
        if (unlikely(now <= start))
            return 0;
        if (now - period <= start) // precise counting mode
            return events.value / (now - start);
        else // exponential smoothing mode
            return events.get(half_decay_time); // equals to `events.value / period`
    }

    void reset(double now)
    {
        start = now;
        events = ExponentiallySmoothedAverage();
        data_points = 0;
    }

private:
    const double period;
    const double half_decay_time;
    double start; // Instant in past without events before it; when measurement started or reset
    ExponentiallySmoothedAverage events; // Estimated number of events in the last `period`
    size_t data_points = 0;
};

}