Path: blob/main/component/otelcol/receiver/prometheus/internal/metricfamily_test.go
5460 views
// Copyright The OpenTelemetry Authors1//2// Licensed under the Apache License, Version 2.0 (the "License");3// you may not use this file except in compliance with the License.4// You may obtain a copy of the License at5//6// http://www.apache.org/licenses/LICENSE-2.07//8// Unless required by applicable law or agreed to in writing, software9// distributed under the License is distributed on an "AS IS" BASIS,10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.11// See the License for the specific language governing permissions and12// limitations under the License.1314package internal1516import (17"math"18"testing"19"time"2021"github.com/prometheus/prometheus/model/labels"22"github.com/prometheus/prometheus/model/textparse"23"github.com/prometheus/prometheus/model/value"24"github.com/prometheus/prometheus/scrape"25"github.com/stretchr/testify/require"26"go.opentelemetry.io/collector/pdata/pcommon"27"go.opentelemetry.io/collector/pdata/pmetric"28"go.uber.org/zap"29)3031type testMetadataStore map[string]scrape.MetricMetadata3233func (tmc testMetadataStore) GetMetadata(familyName string) (scrape.MetricMetadata, bool) {34lookup, ok := tmc[familyName]35return lookup, ok36}3738func (tmc testMetadataStore) ListMetadata() []scrape.MetricMetadata { return nil }3940func (tmc testMetadataStore) SizeMetadata() int { return 0 }4142func (tmc testMetadataStore) LengthMetadata() int {43return len(tmc)44}4546var mc = testMetadataStore{47"counter": scrape.MetricMetadata{48Metric: "cr",49Type: textparse.MetricTypeCounter,50Help: "This is some help for a counter",51Unit: "By",52},53"gauge": scrape.MetricMetadata{54Metric: "ge",55Type: textparse.MetricTypeGauge,56Help: "This is some help for a gauge",57Unit: "1",58},59"gaugehistogram": scrape.MetricMetadata{60Metric: "gh",61Type: textparse.MetricTypeGaugeHistogram,62Help: "This is some help for a gauge histogram",63Unit: "?",64},65"histogram": scrape.MetricMetadata{66Metric: "hg",67Type: textparse.MetricTypeHistogram,68Help: "This is some help for a histogram",69Unit: "ms",70},71"histogram_stale": scrape.MetricMetadata{72Metric: "hg_stale",73Type: textparse.MetricTypeHistogram,74Help: "This is some help for a histogram",75Unit: "ms",76},77"summary": scrape.MetricMetadata{78Metric: "s",79Type: textparse.MetricTypeSummary,80Help: "This is some help for a summary",81Unit: "ms",82},83"summary_stale": scrape.MetricMetadata{84Metric: "s_stale",85Type: textparse.MetricTypeSummary,86Help: "This is some help for a summary",87Unit: "ms",88},89"unknown": scrape.MetricMetadata{90Metric: "u",91Type: textparse.MetricTypeUnknown,92Help: "This is some help for an unknown metric",93Unit: "?",94},95}9697func TestMetricGroupData_toDistributionUnitTest(t *testing.T) {98type scrape struct {99at int64100value float64101metric string102extraLabel labels.Label103}104tests := []struct {105name string106metricName string107labels labels.Labels108scrapes []*scrape109want func() pmetric.HistogramDataPoint110wantErr bool111intervalStartTimeMs int64112}{113{114name: "histogram with startTimestamp",115metricName: "histogram",116intervalStartTimeMs: 11,117labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),118scrapes: []*scrape{119{at: 11, value: 66, metric: "histogram_count"},120{at: 11, value: 1004.78, metric: "histogram_sum"},121{at: 11, value: 33, metric: "histogram_bucket", extraLabel: labels.Label{Name: "le", Value: "0.75"}},122{at: 11, value: 55, metric: "histogram_bucket", extraLabel: labels.Label{Name: "le", Value: "2.75"}},123{at: 11, value: 66, metric: "histogram_bucket", extraLabel: labels.Label{Name: "le", Value: "+Inf"}},124},125want: func() pmetric.HistogramDataPoint {126point := pmetric.NewHistogramDataPoint()127point.SetCount(66)128point.SetSum(1004.78)129point.SetTimestamp(pcommon.Timestamp(11 * time.Millisecond)) // the time in milliseconds -> nanoseconds.130point.ExplicitBounds().FromRaw([]float64{0.75, 2.75})131point.BucketCounts().FromRaw([]uint64{33, 22, 11})132point.SetStartTimestamp(pcommon.Timestamp(11 * time.Millisecond)) // the time in milliseconds -> nanoseconds.133attributes := point.Attributes()134attributes.PutStr("a", "A")135attributes.PutStr("b", "B")136return point137},138},139{140name: "histogram that is stale",141metricName: "histogram_stale",142intervalStartTimeMs: 11,143labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),144scrapes: []*scrape{145{at: 11, value: math.Float64frombits(value.StaleNaN), metric: "histogram_stale_count"},146{at: 11, value: math.Float64frombits(value.StaleNaN), metric: "histogram_stale_sum"},147{at: 11, value: math.Float64frombits(value.StaleNaN), metric: "histogram_bucket", extraLabel: labels.Label{Name: "le", Value: "0.75"}},148{at: 11, value: math.Float64frombits(value.StaleNaN), metric: "histogram_bucket", extraLabel: labels.Label{Name: "le", Value: "2.75"}},149{at: 11, value: math.Float64frombits(value.StaleNaN), metric: "histogram_bucket", extraLabel: labels.Label{Name: "le", Value: "+Inf"}},150},151want: func() pmetric.HistogramDataPoint {152point := pmetric.NewHistogramDataPoint()153point.SetTimestamp(pcommon.Timestamp(11 * time.Millisecond)) // the time in milliseconds -> nanoseconds.154point.SetFlags(pmetric.DefaultDataPointFlags.WithNoRecordedValue(true))155point.ExplicitBounds().FromRaw([]float64{0.75, 2.75})156point.BucketCounts().FromRaw([]uint64{0, 0, 0})157point.SetStartTimestamp(pcommon.Timestamp(11 * time.Millisecond)) // the time in milliseconds -> nanoseconds.158attributes := point.Attributes()159attributes.PutStr("a", "A")160attributes.PutStr("b", "B")161return point162},163},164{165name: "histogram with inconsistent timestamps",166metricName: "histogram_inconsistent_ts",167intervalStartTimeMs: 11,168labels: labels.FromMap(map[string]string{"a": "A", "le": "0.75", "b": "B"}),169scrapes: []*scrape{170{at: 11, value: math.Float64frombits(value.StaleNaN), metric: "histogram_stale_count"},171{at: 12, value: math.Float64frombits(value.StaleNaN), metric: "histogram_stale_sum"},172{at: 13, value: math.Float64frombits(value.StaleNaN), metric: "value"},173},174wantErr: true,175},176}177178for _, tt := range tests {179tt := tt180t.Run(tt.name, func(t *testing.T) {181mp := newMetricFamily(tt.metricName, mc, zap.NewNop())182for i, tv := range tt.scrapes {183var lbls labels.Labels184if tv.extraLabel.Name != "" {185lbls = labels.NewBuilder(tt.labels).Set(tv.extraLabel.Name, tv.extraLabel.Value).Labels(lbls)186} else {187lbls = tt.labels.Copy()188}189err := mp.Add(tv.metric, lbls, tv.at, tv.value)190if tt.wantErr {191if i != 0 {192require.Error(t, err)193}194} else {195require.NoError(t, err)196}197}198if tt.wantErr {199// Don't check the result if we got an error200return201}202203require.Len(t, mp.groups, 1)204groupKey := mp.getGroupKey(tt.labels.Copy())205require.NotNil(t, mp.groups[groupKey])206207sl := pmetric.NewMetricSlice()208mp.appendMetric(sl)209210require.Equal(t, 1, sl.Len(), "Exactly one metric expected")211metric := sl.At(0)212require.Equal(t, mc[tt.metricName].Help, metric.Description(), "Expected help metadata in metric description")213require.Equal(t, mc[tt.metricName].Unit, metric.Unit(), "Expected unit metadata in metric")214215hdpL := metric.Histogram().DataPoints()216require.Equal(t, 1, hdpL.Len(), "Exactly one point expected")217got := hdpL.At(0)218want := tt.want()219require.Equal(t, want, got, "Expected the points to be equal")220})221}222}223224func TestMetricGroupData_toSummaryUnitTest(t *testing.T) {225type scrape struct {226at int64227value float64228metric string229}230231type labelsScrapes struct {232labels labels.Labels233scrapes []*scrape234}235tests := []struct {236name string237labelsScrapes []*labelsScrapes238want func() pmetric.SummaryDataPoint239wantErr bool240}{241{242name: "summary",243labelsScrapes: []*labelsScrapes{244{245labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),246scrapes: []*scrape{247{at: 14, value: 10, metric: "summary_count"},248{at: 14, value: 15, metric: "summary_sum"},249},250},251{252labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.0", "b": "B"}),253scrapes: []*scrape{254{at: 14, value: 8, metric: "value"},255},256},257{258labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.75", "b": "B"}),259scrapes: []*scrape{260{at: 14, value: 33.7, metric: "value"},261},262},263{264labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.50", "b": "B"}),265scrapes: []*scrape{266{at: 14, value: 27, metric: "value"},267},268},269{270labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.90", "b": "B"}),271scrapes: []*scrape{272{at: 14, value: 56, metric: "value"},273},274},275{276labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.99", "b": "B"}),277scrapes: []*scrape{278{at: 14, value: 82, metric: "value"},279},280},281},282want: func() pmetric.SummaryDataPoint {283point := pmetric.NewSummaryDataPoint()284point.SetCount(10)285point.SetSum(15)286qtL := point.QuantileValues()287qn0 := qtL.AppendEmpty()288qn0.SetQuantile(0)289qn0.SetValue(8)290qn50 := qtL.AppendEmpty()291qn50.SetQuantile(.5)292qn50.SetValue(27)293qn75 := qtL.AppendEmpty()294qn75.SetQuantile(.75)295qn75.SetValue(33.7)296qn90 := qtL.AppendEmpty()297qn90.SetQuantile(.9)298qn90.SetValue(56)299qn99 := qtL.AppendEmpty()300qn99.SetQuantile(.99)301qn99.SetValue(82)302point.SetTimestamp(pcommon.Timestamp(14 * time.Millisecond)) // the time in milliseconds -> nanoseconds.303point.SetStartTimestamp(pcommon.Timestamp(14 * time.Millisecond)) // the time in milliseconds -> nanoseconds304attributes := point.Attributes()305attributes.PutStr("a", "A")306attributes.PutStr("b", "B")307return point308},309},310{311name: "summary_stale",312labelsScrapes: []*labelsScrapes{313{314labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.0", "b": "B"}),315scrapes: []*scrape{316{at: 14, value: 10, metric: "summary_stale_count"},317{at: 14, value: 12, metric: "summary_stale_sum"},318{at: 14, value: 8, metric: "value"},319},320},321{322labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.75", "b": "B"}),323scrapes: []*scrape{324{at: 14, value: 10, metric: "summary_stale_count"},325{at: 14, value: 1004.78, metric: "summary_stale_sum"},326{at: 14, value: 33.7, metric: "value"},327},328},329{330labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.50", "b": "B"}),331scrapes: []*scrape{332{at: 14, value: 10, metric: "summary_stale_count"},333{at: 14, value: 13, metric: "summary_stale_sum"},334{at: 14, value: 27, metric: "value"},335},336},337{338labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.90", "b": "B"}),339scrapes: []*scrape{340{at: 14, value: 10, metric: "summary_stale_count"},341{at: 14, value: 14, metric: "summary_stale_sum"},342{at: 14, value: 56, metric: "value"},343},344},345{346labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.99", "b": "B"}),347scrapes: []*scrape{348{at: 14, value: math.Float64frombits(value.StaleNaN), metric: "summary_stale_count"},349{at: 14, value: math.Float64frombits(value.StaleNaN), metric: "summary_stale_sum"},350{at: 14, value: math.Float64frombits(value.StaleNaN), metric: "value"},351},352},353},354want: func() pmetric.SummaryDataPoint {355point := pmetric.NewSummaryDataPoint()356qtL := point.QuantileValues()357qn0 := qtL.AppendEmpty()358point.SetFlags(pmetric.DefaultDataPointFlags.WithNoRecordedValue(true))359qn0.SetQuantile(0)360qn0.SetValue(0)361qn50 := qtL.AppendEmpty()362qn50.SetQuantile(.5)363qn50.SetValue(0)364qn75 := qtL.AppendEmpty()365qn75.SetQuantile(.75)366qn75.SetValue(0)367qn90 := qtL.AppendEmpty()368qn90.SetQuantile(.9)369qn90.SetValue(0)370qn99 := qtL.AppendEmpty()371qn99.SetQuantile(.99)372qn99.SetValue(0)373point.SetTimestamp(pcommon.Timestamp(14 * time.Millisecond)) // the time in milliseconds -> nanoseconds.374point.SetStartTimestamp(pcommon.Timestamp(14 * time.Millisecond)) // the time in milliseconds -> nanoseconds375attributes := point.Attributes()376attributes.PutStr("a", "A")377attributes.PutStr("b", "B")378return point379},380},381{382name: "summary with inconsistent timestamps",383labelsScrapes: []*labelsScrapes{384{385labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),386scrapes: []*scrape{387{at: 11, value: 10, metric: "summary_count"},388{at: 14, value: 15, metric: "summary_sum"},389},390},391},392wantErr: true,393},394}395396for _, tt := range tests {397tt := tt398t.Run(tt.name, func(t *testing.T) {399mp := newMetricFamily(tt.name, mc, zap.NewNop())400for _, lbs := range tt.labelsScrapes {401for i, scrape := range lbs.scrapes {402err := mp.Add(scrape.metric, lbs.labels.Copy(), scrape.at, scrape.value)403if tt.wantErr {404// The first scrape won't have an error405if i != 0 {406require.Error(t, err)407}408} else {409require.NoError(t, err)410}411}412}413if tt.wantErr {414// Don't check the result if we got an error415return416}417418require.Len(t, mp.groups, 1)419groupKey := mp.getGroupKey(tt.labelsScrapes[0].labels.Copy())420require.NotNil(t, mp.groups[groupKey])421422sl := pmetric.NewMetricSlice()423mp.appendMetric(sl)424425require.Equal(t, 1, sl.Len(), "Exactly one metric expected")426metric := sl.At(0)427require.Equal(t, mc[tt.name].Help, metric.Description(), "Expected help metadata in metric description")428require.Equal(t, mc[tt.name].Unit, metric.Unit(), "Expected unit metadata in metric")429430sdpL := metric.Summary().DataPoints()431require.Equal(t, 1, sdpL.Len(), "Exactly one point expected")432got := sdpL.At(0)433want := tt.want()434require.Equal(t, want, got, "Expected the points to be equal")435})436}437}438439func TestMetricGroupData_toNumberDataUnitTest(t *testing.T) {440type scrape struct {441at int64442value float64443metric string444}445tests := []struct {446name string447metricKind string448labels labels.Labels449scrapes []*scrape450intervalStartTimestampMs int64451want func() pmetric.NumberDataPoint452}{453{454metricKind: "counter",455name: "counter:: startTimestampMs of 11",456intervalStartTimestampMs: 11,457labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),458scrapes: []*scrape{459{at: 13, value: 33.7, metric: "value"},460},461want: func() pmetric.NumberDataPoint {462point := pmetric.NewNumberDataPoint()463point.SetDoubleValue(33.7)464point.SetTimestamp(pcommon.Timestamp(13 * time.Millisecond)) // the time in milliseconds -> nanoseconds.465point.SetStartTimestamp(pcommon.Timestamp(13 * time.Millisecond)) // the time in milliseconds -> nanoseconds.466attributes := point.Attributes()467attributes.PutStr("a", "A")468attributes.PutStr("b", "B")469return point470},471},472{473name: "counter:: startTimestampMs of 0",474metricKind: "counter",475intervalStartTimestampMs: 0,476labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),477scrapes: []*scrape{478{at: 28, value: 99.9, metric: "value"},479},480want: func() pmetric.NumberDataPoint {481point := pmetric.NewNumberDataPoint()482point.SetDoubleValue(99.9)483point.SetTimestamp(pcommon.Timestamp(28 * time.Millisecond)) // the time in milliseconds -> nanoseconds.484point.SetStartTimestamp(pcommon.Timestamp(28 * time.Millisecond)) // the time in milliseconds -> nanoseconds.485attributes := point.Attributes()486attributes.PutStr("a", "A")487attributes.PutStr("b", "B")488return point489},490},491}492493for _, tt := range tests {494tt := tt495t.Run(tt.name, func(t *testing.T) {496mp := newMetricFamily(tt.metricKind, mc, zap.NewNop())497for _, tv := range tt.scrapes {498require.NoError(t, mp.Add(tv.metric, tt.labels.Copy(), tv.at, tv.value))499}500501require.Len(t, mp.groups, 1)502groupKey := mp.getGroupKey(tt.labels.Copy())503require.NotNil(t, mp.groups[groupKey])504505sl := pmetric.NewMetricSlice()506mp.appendMetric(sl)507508require.Equal(t, 1, sl.Len(), "Exactly one metric expected")509metric := sl.At(0)510require.Equal(t, mc[tt.metricKind].Help, metric.Description(), "Expected help metadata in metric description")511require.Equal(t, mc[tt.metricKind].Unit, metric.Unit(), "Expected unit metadata in metric")512513ndpL := metric.Sum().DataPoints()514require.Equal(t, 1, ndpL.Len(), "Exactly one point expected")515got := ndpL.At(0)516want := tt.want()517require.Equal(t, want, got, "Expected the points to be equal")518})519}520}521522523