aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAlexey Efimov <34044711+adameat@users.noreply.github.com>2024-01-03 16:44:08 +0100
committerGitHub <noreply@github.com>2024-01-03 18:44:08 +0300
commit2ae46a88a9042f1b3bee6f9b7a523a1b0fecd285 (patch)
treeed150fb65be33a858269eaf703e2bc12f6d56e2a
parent3c0bb781afe9473bb694a7e64f5066d128329744 (diff)
downloadydb-2ae46a88a9042f1b3bee6f9b7a523a1b0fecd285.tar.gz
improve normalization (#827)
-rw-r--r--ydb/core/graph/service/service_impl.cpp19
-rw-r--r--ydb/core/graph/shard/backends.cpp185
-rw-r--r--ydb/core/graph/shard/backends.h23
-rw-r--r--ydb/core/graph/shard/tx_monitoring.cpp3
-rw-r--r--ydb/core/graph/shard/ut/shard_ut.cpp257
5 files changed, 334 insertions, 153 deletions
diff --git a/ydb/core/graph/service/service_impl.cpp b/ydb/core/graph/service/service_impl.cpp
index 5ecd8f983d..89746613f5 100644
--- a/ydb/core/graph/service/service_impl.cpp
+++ b/ydb/core/graph/service/service_impl.cpp
@@ -162,9 +162,12 @@ public:
BLOG_D("Database " << Database << " resolved to shard " << GraphShardId);
ConnectShard();
return;
+ } else {
+ BLOG_D("Error resolving database " << Database << " - no graph shard (switching to pumpkin mode)");
+ return Become(&TGraphService::StatePumpkin);
}
}
- BLOG_W("Error resolving database " << Database << " incomplete response / no graph shard");
+ BLOG_W("Error resolving database " << Database << " incomplete response");
} else {
if (!request->ResultSet.empty()) {
BLOG_W("Error resolving database " << Database << " error " << request->ResultSet.front().Status);
@@ -213,6 +216,13 @@ public:
}
}
+ void HandlePumpkin(TEvGraph::TEvGetMetrics::TPtr& ev) {
+ BLOG_TRACE("TEvGetMetrics(Pumpkin)");
+ TEvGraph::TEvMetricsResult* response = new TEvGraph::TEvMetricsResult();
+ response->Record.SetError("GraphShard is not enabled on the database");
+ Send(ev->Sender, response, 0, ev->Cookie);
+ }
+
STATEFN(StateWork) {
switch (ev->GetTypeRewrite()) {
hFunc(TEvGraph::TEvSendMetrics, Handle);
@@ -223,6 +233,13 @@ public:
cFunc(TEvents::TSystem::Wakeup, HandleTimeout);
}
}
+
+ STATEFN(StatePumpkin) {
+ switch (ev->GetTypeRewrite()) {
+ hFunc(TEvGraph::TEvGetMetrics, HandlePumpkin);
+ cFunc(TEvents::TSystem::Wakeup, HandleTimeout);
+ }
+ }
};
diff --git a/ydb/core/graph/shard/backends.cpp b/ydb/core/graph/shard/backends.cpp
index 7573c6e04c..e7393bac05 100644
--- a/ydb/core/graph/shard/backends.cpp
+++ b/ydb/core/graph/shard/backends.cpp
@@ -5,54 +5,99 @@
namespace NKikimr {
namespace NGraph {
-template<>
-std::vector<TInstant> TMemoryBackend::Downsample<TInstant>(const std::vector<TInstant>& data, size_t maxPoints) {
- if (data.size() <= maxPoints) {
- return data;
- }
- std::vector<TInstant> result;
- double coeff = (double)maxPoints / data.size();
- result.resize(maxPoints);
- size_t ltrg = maxPoints;
- for (size_t src = 0; src < data.size(); ++src) {
- size_t trg = floor(coeff * src);
- if (trg != ltrg) {
- result[trg] = data[src]; // we expect sorted data so we practically use min() here
- ltrg = trg;
- }
+void TBaseBackend::NormalizeAndDownsample(TMetricsValues& values, size_t maxPoints) {
+ if (values.Timestamps.size() <= maxPoints) {
+ return;
+ }
+
+ TMetricsValues result;
+ result.Timestamps.resize(maxPoints);
+ result.Values.resize(values.Values.size());
+ for (auto& values : result.Values) {
+ values.resize(maxPoints);
}
- return result;
-}
-template<>
-std::vector<double> TMemoryBackend::Downsample<double>(const std::vector<double>& data, size_t maxPoints) {
- if (data.size() <= maxPoints) {
- return data;
- }
- std::vector<double> result;
- double coeff = (double)maxPoints / data.size();
- result.resize(maxPoints);
- size_t ltrg = 0;
- long cnt = 0;
- for (size_t src = 0; src < data.size(); ++src) {
- if (isnan(data[src])) {
- continue;
+ size_t srcSize = values.Timestamps.size();
+ size_t trgSize = result.Timestamps.size();
+ TInstant frontTs = values.Timestamps.front();
+ TInstant backTs = values.Timestamps.back();
+ TDuration distanceTs = backTs - frontTs;
+ // normalize timestamps
+ size_t src = 0;
+ size_t trg = 0;
+ size_t trgSrc = 0;
+ size_t trgRest = trgSize - 1;
+ while (src < srcSize && trg < trgSize) {
+ TInstant expected = frontTs + TDuration::Seconds((trg - trgSrc) * distanceTs.Seconds() / trgRest);
+ if (expected < values.Timestamps[src]) {
+ result.Timestamps[trg] = values.Timestamps[src];
+ frontTs = values.Timestamps[src];
+ distanceTs = backTs - frontTs;
+ trgSrc = trg;
+ trgRest = trgSize - trg - 1;
+ ++src;
+ ++trg;
+ } else if (expected == values.Timestamps[src]) {
+ result.Timestamps[trg] = values.Timestamps[src];
+ ++src;
+ ++trg;
+ } else if (expected > values.Timestamps[src]) {
+ result.Timestamps[trg] = expected;
+ ++trg;
+ do {
+ ++src;
+ if (src >= srcSize) {
+ break;
+ }
+ } while (values.Timestamps[src] < expected);
}
- size_t trg = floor(coeff * src);
- if (trg != ltrg && cnt > 0) {
- if (cnt > 1) {
- result[ltrg] /= cnt;
+ }
+ // aggregate values
+ for (size_t numVal = 0; numVal < result.Values.size(); ++numVal) {
+ double accm = NAN; // avg impl
+ long cnt = 0;
+ const std::vector<double>& srcValues(values.Values[numVal]);
+ std::vector<double>& trgValues(result.Values[numVal]);
+ size_t trgPos = 0;
+ for (size_t srcPos = 0; srcPos < srcValues.size(); ++srcPos) {
+ double srcValue = srcValues[srcPos];
+ if (!isnan(srcValue)) {
+ if (isnan(accm)) { // avg impl
+ accm = srcValue;
+ cnt = 1;
+ } else {
+ accm += srcValue;
+ cnt += 1;
+ }
+ }
+ if (values.Timestamps[srcPos] >= result.Timestamps[trgPos]) {
+ if (isnan(accm)) { // avg impl
+ trgValues[trgPos] = NAN;
+ } else {
+ trgValues[trgPos] = accm / cnt;
+ }
+ ++trgPos;
+ accm = NAN;
+ cnt = 0;
}
- cnt = 0;
}
- result[trg] += data[src];
- ++cnt;
- ltrg = trg;
}
- if (cnt > 1) {
- result[ltrg] /= cnt;
+ values = std::move(result);
+}
+
+void TBaseBackend::FillResult(TMetricsValues& values, const NKikimrGraph::TEvGetMetrics& get, NKikimrGraph::TEvMetricsResult& result) {
+ if (get.HasMaxPoints() && values.Timestamps.size() > get.GetMaxPoints()) {
+ NormalizeAndDownsample(values, get.GetMaxPoints());
+ }
+ result.Clear();
+ auto time = result.MutableTime();
+ time->Reserve(values.Timestamps.size());
+ for (const TInstant t : values.Timestamps) {
+ time->Add(t.Seconds());
+ }
+ for (std::vector<double>& values : values.Values) {
+ result.AddData()->MutableValues()->Add(values.begin(), values.end());
}
- return result;
}
void TMemoryBackend::StoreMetrics(TMetricsData&& data) {
@@ -93,37 +138,20 @@ void TMemoryBackend::GetMetrics(const NKikimrGraph::TEvGetMetrics& get, NKikimrG
}
indexes.push_back(idx);
}
- std::vector<TInstant> timestamps;
- std::vector<std::vector<double>> values;
- values.resize(indexes.size());
+ TMetricsValues metricValues;
+ metricValues.Values.resize(indexes.size());
for (auto it = itLeft; it != itRight; ++it) {
- timestamps.push_back(it->Timestamp);
+ metricValues.Timestamps.push_back(it->Timestamp);
for (size_t num = 0; num < indexes.size(); ++num) {
size_t idx = indexes[num];
if (idx < it->Values.size()) {
- values[num].push_back(it->Values[idx]);
+ metricValues.Values[num].push_back(it->Values[idx]);
} else {
- values[num].push_back(NAN);
+ metricValues.Values[num].push_back(NAN);
}
}
}
- if (get.HasMaxPoints() && timestamps.size() > get.GetMaxPoints()) {
- timestamps = Downsample(timestamps, get.GetMaxPoints());
- BLOG_TRACE("GetMetrics timestamps=" << timestamps.size());
- for (std::vector<double>& values : values) {
- values = Downsample(values, get.GetMaxPoints());
- BLOG_TRACE("GetMetrics values=" << values.size());
- }
- }
- result.Clear();
- auto time = result.MutableTime();
- time->Reserve(timestamps.size());
- for (const TInstant t : timestamps) {
- time->Add(t.Seconds());
- }
- for (std::vector<double>& values : values) {
- result.AddData()->MutableValues()->Add(values.begin(), values.end());
- }
+ FillResult(metricValues, get, result);
}
void TMemoryBackend::ClearData(TInstant cutline, TInstant& newStartTimestamp) {
@@ -173,49 +201,32 @@ bool TLocalBackend::GetMetrics(NTabletFlatExecutor::TTransactionContext& txc, co
metricIdx[itMetricIdx->second] = nMetric;
}
}
- std::vector<TInstant> timestamps;
- std::vector<std::vector<double>> values;
+ TMetricsValues metricValues;
auto rowset = db.Table<Schema::MetricsValues>().GreaterOrEqual(minTime).LessOrEqual(maxTime).Select();
if (!rowset.IsReady()) {
return false;
}
ui64 lastTime = 0;
- values.resize(get.MetricsSize());
+ metricValues.Values.resize(get.MetricsSize());
while (!rowset.EndOfSet()) {
ui64 time = rowset.GetValue<Schema::MetricsValues::Timestamp>();
if (time != lastTime) {
lastTime = time;
- timestamps.push_back(TInstant::Seconds(time));
- for (auto& vals : values) {
+ metricValues.Timestamps.push_back(TInstant::Seconds(time));
+ for (auto& vals : metricValues.Values) {
vals.emplace_back(NAN);
}
}
ui64 id = rowset.GetValue<Schema::MetricsValues::Id>();
auto itIdx = metricIdx.find(id);
if (itIdx != metricIdx.end()) {
- values.back()[itIdx->second] = rowset.GetValue<Schema::MetricsValues::Value>();
+ metricValues.Values.back()[itIdx->second] = rowset.GetValue<Schema::MetricsValues::Value>();
}
if (!rowset.Next()) {
return false;
}
}
- if (get.HasMaxPoints() && timestamps.size() > get.GetMaxPoints()) {
- timestamps = TMemoryBackend::Downsample(timestamps, get.GetMaxPoints());
- BLOG_TRACE("GetMetrics timestamps=" << timestamps.size());
- for (std::vector<double>& values : values) {
- values = TMemoryBackend::Downsample(values, get.GetMaxPoints());
- BLOG_TRACE("GetMetrics values=" << values.size());
- }
- }
- result.Clear();
- auto time = result.MutableTime();
- time->Reserve(timestamps.size());
- for (const TInstant t : timestamps) {
- time->Add(t.Seconds());
- }
- for (std::vector<double>& values : values) {
- result.AddData()->MutableValues()->Add(values.begin(), values.end());
- }
+ FillResult(metricValues, get, result);
return true;
}
diff --git a/ydb/core/graph/shard/backends.h b/ydb/core/graph/shard/backends.h
index 0c9e82e0ba..c2767e31ed 100644
--- a/ydb/core/graph/shard/backends.h
+++ b/ydb/core/graph/shard/backends.h
@@ -17,15 +17,25 @@ struct TMetricsData {
std::unordered_map<TString, double> Values;
};
-class TMemoryBackend {
+class TBaseBackend {
+public:
+ struct TMetricsValues {
+ std::vector<TInstant> Timestamps;
+ std::vector<std::vector<double>> Values;
+ };
+
+ static void NormalizeAndDownsample(TMetricsValues& values, size_t maxPoints);
+ static void FillResult(TMetricsValues& values, const NKikimrGraph::TEvGetMetrics& get, NKikimrGraph::TEvMetricsResult& result);
+
+ std::unordered_map<TString, ui64> MetricsIndex; // mapping name -> id
+};
+
+class TMemoryBackend : public TBaseBackend {
public:
void StoreMetrics(TMetricsData&& data);
void GetMetrics(const NKikimrGraph::TEvGetMetrics& get, NKikimrGraph::TEvMetricsResult& result) const;
void ClearData(TInstant cutline, TInstant& newStartTimestamp);
- template<typename ValueType>
- static std::vector<ValueType> Downsample(const std::vector<ValueType>& data, size_t maxPoints);
-
TString GetLogPrefix() const;
struct TMetricsRecord {
@@ -41,11 +51,10 @@ public:
}
};
- std::unordered_map<TString, size_t> MetricsIndex; // mapping name -> id
std::deque<TMetricsRecord> MetricsValues;
};
-class TLocalBackend {
+class TLocalBackend : public TBaseBackend {
public:
static constexpr ui64 MAX_ROWS_TO_DELETE = 1000;
@@ -54,8 +63,6 @@ public:
bool ClearData(NTabletFlatExecutor::TTransactionContext& txc, TInstant cutline, TInstant& newStartTimestamp);
TString GetLogPrefix() const;
-
- std::unordered_map<TString, ui64> MetricsIndex; // mapping name -> id
};
} // NGraph
diff --git a/ydb/core/graph/shard/tx_monitoring.cpp b/ydb/core/graph/shard/tx_monitoring.cpp
index 9629f84a60..1ecc8c111c 100644
--- a/ydb/core/graph/shard/tx_monitoring.cpp
+++ b/ydb/core/graph/shard/tx_monitoring.cpp
@@ -51,7 +51,8 @@ public:
html << "<tr><td>Memory.RecordsSize</td><td>" << Self->MemoryBackend.MetricsValues.size() << "</td></tr>";
html << "<tr><td>Local.MetricsSize</td><td>" << Self->LocalBackend.MetricsIndex.size() << "</td></tr>";
- html << "<tr><td>Local.StartTimestamp</td><td>" << Self->StartTimestamp << "</td></tr>";
+ html << "<tr><td>StartTimestamp</td><td>" << Self->StartTimestamp << "</td></tr>";
+ html << "<tr><td>ClearTimestamp</td><td>" << Self->ClearTimestamp << "</td></tr>";
html << "</table>";
html << "</html>";
diff --git a/ydb/core/graph/shard/ut/shard_ut.cpp b/ydb/core/graph/shard/ut/shard_ut.cpp
index c700c2f141..a4e86ba894 100644
--- a/ydb/core/graph/shard/ut/shard_ut.cpp
+++ b/ydb/core/graph/shard/ut/shard_ut.cpp
@@ -18,7 +18,7 @@ Y_DECLARE_OUT_SPEC(, std::vector<TInstant>, stream, value) {
if (it != value.begin()) {
stream << ',';
}
- stream << it->GetValue();
+ stream << it->Seconds();
}
stream << ']';
}
@@ -34,81 +34,226 @@ Y_DECLARE_OUT_SPEC(, std::vector<double>, stream, value) {
stream << ']';
}
+Y_DECLARE_OUT_SPEC(, std::vector<std::vector<double>>, stream, value) {
+ stream << '[';
+ for (auto it = value.begin(); it != value.end(); ++it) {
+ if (it != value.begin()) {
+ stream << ',';
+ }
+ stream << *it;
+ }
+ stream << ']';
+}
+
namespace NKikimr {
using namespace Tests;
using namespace NSchemeShardUT_Private;
Y_UNIT_TEST_SUITE(GraphShard) {
- Y_UNIT_TEST(DownsampleFixed) {
- std::vector<TInstant> sourceData = {
- TInstant::FromValue( 1 ),
- TInstant::FromValue( 2 ),
- TInstant::FromValue( 3 ),
- TInstant::FromValue( 4 ),
- TInstant::FromValue( 5 ),
- TInstant::FromValue( 6 ),
- TInstant::FromValue( 7 ),
- TInstant::FromValue( 8 ),
- TInstant::FromValue( 9 ),
- TInstant::FromValue( 10 )
+ Y_UNIT_TEST(NormalizeAndDownsample1) {
+ NGraph::TBaseBackend::TMetricsValues values;
+ values.Timestamps = {
+ TInstant::Seconds( 100 ),
+ TInstant::Seconds( 200 ),
+ TInstant::Seconds( 300 ),
+ TInstant::Seconds( 400 ),
+ TInstant::Seconds( 500 ),
+ TInstant::Seconds( 600 ),
+ TInstant::Seconds( 700 ),
+ TInstant::Seconds( 800 ),
+ TInstant::Seconds( 900 ),
+ TInstant::Seconds( 1000 )
};
+ values.Values.push_back({1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
+ values.Values.push_back({1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
+
{
- std::vector<TInstant> targetData = NGraph::TMemoryBackend::Downsample(sourceData, 10);
- Ctest << targetData << Endl;
- std::vector<TInstant> canonData = {
- TInstant::FromValue( 1 ),
- TInstant::FromValue( 2 ),
- TInstant::FromValue( 3 ),
- TInstant::FromValue( 4 ),
- TInstant::FromValue( 5 ),
- TInstant::FromValue( 6 ),
- TInstant::FromValue( 7 ),
- TInstant::FromValue( 8 ),
- TInstant::FromValue( 9 ),
- TInstant::FromValue( 10 )
+ NGraph::TBaseBackend::NormalizeAndDownsample(values, 10);
+ Ctest << values.Timestamps << Endl;
+ Ctest << values.Values << Endl;
+ std::vector<TInstant> canonTimestamps = {
+ TInstant::Seconds( 100 ),
+ TInstant::Seconds( 200 ),
+ TInstant::Seconds( 300 ),
+ TInstant::Seconds( 400 ),
+ TInstant::Seconds( 500 ),
+ TInstant::Seconds( 600 ),
+ TInstant::Seconds( 700 ),
+ TInstant::Seconds( 800 ),
+ TInstant::Seconds( 900 ),
+ TInstant::Seconds( 1000 )
};
- UNIT_ASSERT(targetData == canonData);
+ std::vector<double> canonValues = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
+ UNIT_ASSERT(values.Timestamps == canonTimestamps);
+ UNIT_ASSERT(values.Values.size() == 2);
+ UNIT_ASSERT(values.Values[0] == canonValues);
+ UNIT_ASSERT(values.Values[1] == canonValues);
}
+ }
+
+ Y_UNIT_TEST(NormalizeAndDownsample2) {
+ NGraph::TBaseBackend::TMetricsValues values;
+ values.Timestamps = {
+ TInstant::Seconds( 100 ),
+ TInstant::Seconds( 200 ),
+ TInstant::Seconds( 300 ),
+ TInstant::Seconds( 400 ),
+ TInstant::Seconds( 500 ),
+ TInstant::Seconds( 510 ),
+ TInstant::Seconds( 520 ),
+ TInstant::Seconds( 530 ),
+ TInstant::Seconds( 540 ),
+ TInstant::Seconds( 550 ),
+ TInstant::Seconds( 560 ),
+ TInstant::Seconds( 570 ),
+ TInstant::Seconds( 580 ),
+ TInstant::Seconds( 590 ),
+ TInstant::Seconds( 600 )
+ };
+ values.Values.push_back({1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15});
+
{
- std::vector<TInstant> targetData = NGraph::TMemoryBackend::Downsample(sourceData, 5);
- Ctest << targetData << Endl;
- std::vector<TInstant> canonData = {
- TInstant::FromValue( 1 ),
- TInstant::FromValue( 3 ),
- TInstant::FromValue( 5 ),
- TInstant::FromValue( 7 ),
- TInstant::FromValue( 9 )
+ NGraph::TBaseBackend::NormalizeAndDownsample(values, 10);
+ Ctest << values.Timestamps << Endl;
+ Ctest << values.Values << Endl;
+ std::vector<TInstant> canonTimestamps = {
+ TInstant::Seconds( 100 ),
+ TInstant::Seconds( 200 ),
+ TInstant::Seconds( 300 ),
+ TInstant::Seconds( 400 ),
+ TInstant::Seconds( 500 ),
+ TInstant::Seconds( 520 ),
+ TInstant::Seconds( 540 ),
+ TInstant::Seconds( 560 ),
+ TInstant::Seconds( 580 ),
+ TInstant::Seconds( 600 )
};
- UNIT_ASSERT(targetData == canonData);
- }
- {
- std::vector<TInstant> targetData = NGraph::TMemoryBackend::Downsample(sourceData, 1);
- Ctest << targetData << Endl;
- std::vector<TInstant> canonData = { TInstant::FromValue( 1 ) };
- UNIT_ASSERT(targetData == canonData);
+ std::vector<double> canonValues = {1, 2, 3, 4, 5, 6.5, 8.5, 10.5, 12.5, 14.5};
+ UNIT_ASSERT(values.Timestamps == canonTimestamps);
+ UNIT_ASSERT(values.Values.size() == 1);
+ UNIT_ASSERT(values.Values[0] == canonValues);
}
}
- Y_UNIT_TEST(DownsampleFloat) {
- std::vector<double> sourceData = {1,2,3,4,5, 6, 7, 8, 9, 10};
+ Y_UNIT_TEST(NormalizeAndDownsample3) {
+ NGraph::TBaseBackend::TMetricsValues values;
+ values.Timestamps = {
+ TInstant::Seconds( 100 ),
+ TInstant::Seconds( 200 ),
+ TInstant::Seconds( 300 ),
+ TInstant::Seconds( 400 ),
+ TInstant::Seconds( 500 ),
+ TInstant::Seconds( 510 ),
+ TInstant::Seconds( 520 ),
+ TInstant::Seconds( 530 ),
+ TInstant::Seconds( 540 ),
+ TInstant::Seconds( 550 ),
+ TInstant::Seconds( 560 ),
+ TInstant::Seconds( 570 ),
+ TInstant::Seconds( 580 ),
+ TInstant::Seconds( 590 ),
+ TInstant::Seconds( 600 )
+ };
+ values.Values.push_back({1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15});
+
{
- std::vector<double> targetData = NGraph::TMemoryBackend::Downsample(sourceData, 10);
- Ctest << targetData << Endl;
- std::vector<double> canonData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
- UNIT_ASSERT(targetData == canonData);
+ NGraph::TBaseBackend::NormalizeAndDownsample(values, 6);
+ Ctest << values.Timestamps << Endl;
+ Ctest << values.Values << Endl;
+ std::vector<TInstant> canonTimestamps = {
+ TInstant::Seconds( 100 ),
+ TInstant::Seconds( 200 ),
+ TInstant::Seconds( 300 ),
+ TInstant::Seconds( 400 ),
+ TInstant::Seconds( 500 ),
+ TInstant::Seconds( 600 )
+ };
+ std::vector<double> canonValues = {1, 2, 3, 4, 5, 10.5};
+ UNIT_ASSERT(values.Timestamps == canonTimestamps);
+ UNIT_ASSERT(values.Values.size() == 1);
+ UNIT_ASSERT(values.Values[0] == canonValues);
}
+ }
+
+ Y_UNIT_TEST(NormalizeAndDownsample4) {
+ NGraph::TBaseBackend::TMetricsValues values;
+ values.Timestamps = {
+ TInstant::Seconds( 100 ),
+ TInstant::Seconds( 200 ),
+ TInstant::Seconds( 300 ),
+ TInstant::Seconds( 400 ),
+ TInstant::Seconds( 500 ),
+ TInstant::Seconds( 510 ),
+ TInstant::Seconds( 520 ),
+ TInstant::Seconds( 530 ),
+ TInstant::Seconds( 540 ),
+ TInstant::Seconds( 550 ),
+ TInstant::Seconds( 560 ),
+ TInstant::Seconds( 570 ),
+ TInstant::Seconds( 580 ),
+ TInstant::Seconds( 590 ),
+ TInstant::Seconds( 600 )
+ };
+ values.Values.push_back({1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15});
+
{
- std::vector<double> targetData = NGraph::TMemoryBackend::Downsample(sourceData, 5);
- Ctest << targetData << Endl;
- std::vector<double> canonData = {1.5, 3.5, 5.5, 7.5, 9.5};
- UNIT_ASSERT(targetData == canonData);
+ NGraph::TBaseBackend::NormalizeAndDownsample(values, 3);
+ Ctest << values.Timestamps << Endl;
+ Ctest << values.Values << Endl;
+ std::vector<TInstant> canonTimestamps = {
+ TInstant::Seconds( 100 ),
+ TInstant::Seconds( 350 ),
+ TInstant::Seconds( 600 )
+ };
+ std::vector<double> canonValues = {1, 3, 10};
+ UNIT_ASSERT(values.Timestamps == canonTimestamps);
+ UNIT_ASSERT(values.Values.size() == 1);
+ UNIT_ASSERT(values.Values[0] == canonValues);
}
+ }
+
+ Y_UNIT_TEST(NormalizeAndDownsample5) {
+ NGraph::TBaseBackend::TMetricsValues values;
+ values.Timestamps = {
+ TInstant::Seconds( 100 ),
+ TInstant::Seconds( 200 ),
+ TInstant::Seconds( 300 ),
+ TInstant::Seconds( 400 ),
+ TInstant::Seconds( 500 ),
+ TInstant::Seconds( 510 ),
+ TInstant::Seconds( 520 ),
+ TInstant::Seconds( 530 ),
+ TInstant::Seconds( 540 ),
+ TInstant::Seconds( 550 ),
+ TInstant::Seconds( 560 ),
+ TInstant::Seconds( 570 ),
+ TInstant::Seconds( 580 ),
+ TInstant::Seconds( 590 ),
+ TInstant::Seconds( 600 )
+ };
+ values.Values.push_back({1, 2, 3, 4, 5, 6, 7, 8, 9, 10, NAN, 12, NAN, 14, 15});
+
{
- std::vector<double> targetData = NGraph::TMemoryBackend::Downsample(sourceData, 1);
- Ctest << targetData << Endl;
- std::vector<double> canonData = {5.5};
- UNIT_ASSERT(targetData == canonData);
+ NGraph::TBaseBackend::NormalizeAndDownsample(values, 10);
+ Ctest << values.Timestamps << Endl;
+ Ctest << values.Values << Endl;
+ std::vector<TInstant> canonTimestamps = {
+ TInstant::Seconds( 100 ),
+ TInstant::Seconds( 200 ),
+ TInstant::Seconds( 300 ),
+ TInstant::Seconds( 400 ),
+ TInstant::Seconds( 500 ),
+ TInstant::Seconds( 520 ),
+ TInstant::Seconds( 540 ),
+ TInstant::Seconds( 560 ),
+ TInstant::Seconds( 580 ),
+ TInstant::Seconds( 600 )
+ };
+ std::vector<double> canonValues = {1, 2, 3, 4, 5, 6.5, 8.5, 10, 12, 14.5};
+ UNIT_ASSERT(values.Timestamps == canonTimestamps);
+ UNIT_ASSERT(values.Values.size() == 1);
+ UNIT_ASSERT(values.Values[0] == canonValues);
}
}