Path: blob/main/component/otelcol/receiver/prometheus/internal/transaction_test.go
5414 views
// Copyright The OpenTelemetry Authors1//2// Licensed under the Apache License, Version 2.0 (the "License");3// you may not use this file except in compliance with the License.4// You may obtain a copy of the License at5//6// http://www.apache.org/licenses/LICENSE-2.07//8// Unless required by applicable law or agreed to in writing, software9// distributed under the License is distributed on an "AS IS" BASIS,10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.11// See the License for the specific language governing permissions and12// limitations under the License.1314package internal1516import (17"context"18"errors"19"testing"20"time"2122"github.com/prometheus/common/model"23"github.com/prometheus/prometheus/model/labels"24"github.com/prometheus/prometheus/model/metadata"25"github.com/prometheus/prometheus/scrape"26"github.com/stretchr/testify/assert"27"github.com/stretchr/testify/require"28"go.opentelemetry.io/collector/component/componenttest"29"go.opentelemetry.io/collector/config"30"go.opentelemetry.io/collector/consumer/consumertest"31"go.opentelemetry.io/collector/obsreport"32"go.opentelemetry.io/collector/pdata/pcommon"33"go.opentelemetry.io/collector/pdata/pmetric"34)3536const (37startTimestamp = pcommon.Timestamp(1555366608340000000)38ts = int64(1555366610000)39interval = int64(15 * 1000)40tsNanos = pcommon.Timestamp(ts * 1e6)41tsPlusIntervalNanos = pcommon.Timestamp((ts + interval) * 1e6)42)4344var (45target = scrape.NewTarget(46// processedLabels contain label values after processing (e.g. relabeling)47labels.FromMap(map[string]string{48model.InstanceLabel: "localhost:8080",49}),50// discoveredLabels contain labels prior to any processing51labels.FromMap(map[string]string{52model.AddressLabel: "address:8080",53model.SchemeLabel: "http",54}),55nil)5657scrapeCtx = scrape.ContextWithMetricMetadataStore(58scrape.ContextWithTarget(context.Background(), target),59testMetadataStore(testMetadata))60)6162func TestTransactionCommitWithoutAdding(t *testing.T) {63tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, consumertest.NewNop(), nil, componenttest.NewNopReceiverCreateSettings(), nopObsRecv())64assert.NoError(t, tr.Commit())65}6667func TestTransactionRollbackDoesNothing(t *testing.T) {68tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, consumertest.NewNop(), nil, componenttest.NewNopReceiverCreateSettings(), nopObsRecv())69assert.NoError(t, tr.Rollback())70}7172func TestTransactionUpdateMetadataDoesNothing(t *testing.T) {73tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, consumertest.NewNop(), nil, componenttest.NewNopReceiverCreateSettings(), nopObsRecv())74_, err := tr.UpdateMetadata(0, labels.New(), metadata.Metadata{})75assert.NoError(t, err)76}7778func TestTransactionAppendNoTarget(t *testing.T) {79badLabels := labels.FromStrings(model.MetricNameLabel, "counter_test")80tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, consumertest.NewNop(), nil, componenttest.NewNopReceiverCreateSettings(), nopObsRecv())81_, err := tr.Append(0, badLabels, time.Now().Unix()*1000, 1.0)82assert.Error(t, err)83}8485func TestTransactionAppendNoMetricName(t *testing.T) {86jobNotFoundLb := labels.FromMap(map[string]string{87model.InstanceLabel: "localhost:8080",88model.JobLabel: "test2",89})90tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, consumertest.NewNop(), nil, componenttest.NewNopReceiverCreateSettings(), nopObsRecv())91_, err := tr.Append(0, jobNotFoundLb, time.Now().Unix()*1000, 1.0)92assert.ErrorIs(t, err, errMetricNameNotFound)9394assert.ErrorIs(t, tr.Commit(), errNoDataToBuild)95}9697func TestTransactionAppendEmptyMetricName(t *testing.T) {98tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, consumertest.NewNop(), nil, componenttest.NewNopReceiverCreateSettings(), nopObsRecv())99_, err := tr.Append(0, labels.FromMap(map[string]string{100model.InstanceLabel: "localhost:8080",101model.JobLabel: "test2",102model.MetricNameLabel: "",103}), time.Now().Unix()*1000, 1.0)104assert.ErrorIs(t, err, errMetricNameNotFound)105}106107func TestTransactionAppendResource(t *testing.T) {108sink := new(consumertest.MetricsSink)109tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, sink, nil, componenttest.NewNopReceiverCreateSettings(), nopObsRecv())110_, err := tr.Append(0, labels.FromMap(map[string]string{111model.InstanceLabel: "localhost:8080",112model.JobLabel: "test",113model.MetricNameLabel: "counter_test",114}), time.Now().Unix()*1000, 1.0)115assert.NoError(t, err)116_, err = tr.Append(0, labels.FromMap(map[string]string{117model.InstanceLabel: "localhost:8080",118model.JobLabel: "test",119model.MetricNameLabel: startTimeMetricName,120}), time.Now().UnixMilli(), 1.0)121assert.NoError(t, err)122assert.NoError(t, tr.Commit())123expectedResource := CreateResource("test", "localhost:8080", labels.FromStrings(model.SchemeLabel, "http"))124mds := sink.AllMetrics()125require.Len(t, mds, 1)126gotResource := mds[0].ResourceMetrics().At(0).Resource()127require.Equal(t, expectedResource, gotResource)128}129130func TestTransactionCommitErrorWhenAdjusterError(t *testing.T) {131goodLabels := labels.FromMap(map[string]string{132model.InstanceLabel: "localhost:8080",133model.JobLabel: "test",134model.MetricNameLabel: "counter_test",135})136sink := new(consumertest.MetricsSink)137adjusterErr := errors.New("adjuster error")138tr := newTransaction(scrapeCtx, &errorAdjuster{err: adjusterErr}, sink, nil, componenttest.NewNopReceiverCreateSettings(), nopObsRecv())139_, err := tr.Append(0, goodLabels, time.Now().Unix()*1000, 1.0)140assert.NoError(t, err)141assert.ErrorIs(t, tr.Commit(), adjusterErr)142}143144// Ensure that we reject duplicate label keys. See https://github.com/open-telemetry/wg-prometheus/issues/44.145func TestTransactionAppendDuplicateLabels(t *testing.T) {146sink := new(consumertest.MetricsSink)147tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, sink, nil, componenttest.NewNopReceiverCreateSettings(), nopObsRecv())148149dupLabels := labels.FromStrings(150model.InstanceLabel, "0.0.0.0:8855",151model.JobLabel, "test",152model.MetricNameLabel, "counter_test",153"a", "1",154"a", "6",155"z", "9",156)157158_, err := tr.Append(0, dupLabels, 1917, 1.0)159require.Error(t, err)160assert.Contains(t, err.Error(), `invalid sample: non-unique label names: "a"`)161}162163func TestTransactionAppendHistogramNoLe(t *testing.T) {164sink := new(consumertest.MetricsSink)165tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, sink, nil, componenttest.NewNopReceiverCreateSettings(), nopObsRecv())166167goodLabels := labels.FromStrings(168model.InstanceLabel, "0.0.0.0:8855",169model.JobLabel, "test",170model.MetricNameLabel, "hist_test_bucket",171)172173_, err := tr.Append(0, goodLabels, 1917, 1.0)174require.ErrorIs(t, err, errEmptyLeLabel)175}176177func TestTransactionAppendSummaryNoQuantile(t *testing.T) {178sink := new(consumertest.MetricsSink)179tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, sink, nil, componenttest.NewNopReceiverCreateSettings(), nopObsRecv())180181goodLabels := labels.FromStrings(182model.InstanceLabel, "0.0.0.0:8855",183model.JobLabel, "test",184model.MetricNameLabel, "summary_test",185)186187_, err := tr.Append(0, goodLabels, 1917, 1.0)188require.ErrorIs(t, err, errEmptyQuantileLabel)189}190191func nopObsRecv() *obsreport.Receiver {192return obsreport.NewReceiver(obsreport.ReceiverSettings{193ReceiverID: config.NewComponentID("prometheus"),194Transport: transport,195ReceiverCreateSettings: componenttest.NewNopReceiverCreateSettings(),196})197}198199func TestMetricBuilderCounters(t *testing.T) {200tests := []buildTestData{201{202name: "single-item",203inputs: []*testScrapedPage{204{205pts: []*testDataPoint{206createDataPoint("counter_test", 100, "foo", "bar"),207},208},209},210wants: func() []pmetric.Metrics {211md0 := pmetric.NewMetrics()212mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()213m0 := mL0.AppendEmpty()214m0.SetName("counter_test")215sum := m0.SetEmptySum()216sum.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)217sum.SetIsMonotonic(true)218pt0 := sum.DataPoints().AppendEmpty()219pt0.SetDoubleValue(100.0)220pt0.SetStartTimestamp(startTimestamp)221pt0.SetTimestamp(tsNanos)222pt0.Attributes().PutStr("foo", "bar")223224return []pmetric.Metrics{md0}225},226},227{228name: "two-items",229inputs: []*testScrapedPage{230{231pts: []*testDataPoint{232createDataPoint("counter_test", 150, "foo", "bar"),233createDataPoint("counter_test", 25, "foo", "other"),234},235},236},237wants: func() []pmetric.Metrics {238md0 := pmetric.NewMetrics()239mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()240m0 := mL0.AppendEmpty()241m0.SetName("counter_test")242sum := m0.SetEmptySum()243sum.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)244sum.SetIsMonotonic(true)245pt0 := sum.DataPoints().AppendEmpty()246pt0.SetDoubleValue(150.0)247pt0.SetStartTimestamp(startTimestamp)248pt0.SetTimestamp(tsNanos)249pt0.Attributes().PutStr("foo", "bar")250251pt1 := sum.DataPoints().AppendEmpty()252pt1.SetDoubleValue(25.0)253pt1.SetStartTimestamp(startTimestamp)254pt1.SetTimestamp(tsNanos)255pt1.Attributes().PutStr("foo", "other")256257return []pmetric.Metrics{md0}258},259},260{261name: "two-metrics",262inputs: []*testScrapedPage{263{264pts: []*testDataPoint{265createDataPoint("counter_test", 150, "foo", "bar"),266createDataPoint("counter_test", 25, "foo", "other"),267createDataPoint("counter_test2", 100, "foo", "bar"),268},269},270},271wants: func() []pmetric.Metrics {272md0 := pmetric.NewMetrics()273mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()274m0 := mL0.AppendEmpty()275m0.SetName("counter_test")276sum0 := m0.SetEmptySum()277sum0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)278sum0.SetIsMonotonic(true)279pt0 := sum0.DataPoints().AppendEmpty()280pt0.SetDoubleValue(150.0)281pt0.SetStartTimestamp(startTimestamp)282pt0.SetTimestamp(tsNanos)283pt0.Attributes().PutStr("foo", "bar")284285pt1 := sum0.DataPoints().AppendEmpty()286pt1.SetDoubleValue(25.0)287pt1.SetStartTimestamp(startTimestamp)288pt1.SetTimestamp(tsNanos)289pt1.Attributes().PutStr("foo", "other")290291m1 := mL0.AppendEmpty()292m1.SetName("counter_test2")293sum1 := m1.SetEmptySum()294sum1.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)295sum1.SetIsMonotonic(true)296pt2 := sum1.DataPoints().AppendEmpty()297pt2.SetDoubleValue(100.0)298pt2.SetStartTimestamp(startTimestamp)299pt2.SetTimestamp(tsNanos)300pt2.Attributes().PutStr("foo", "bar")301302return []pmetric.Metrics{md0}303},304},305{306name: "metrics-with-poor-names",307inputs: []*testScrapedPage{308{309pts: []*testDataPoint{310createDataPoint("poor_name_count", 100, "foo", "bar"),311},312},313},314wants: func() []pmetric.Metrics {315md0 := pmetric.NewMetrics()316mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()317m0 := mL0.AppendEmpty()318m0.SetName("poor_name_count")319sum := m0.SetEmptySum()320sum.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)321sum.SetIsMonotonic(true)322pt0 := sum.DataPoints().AppendEmpty()323pt0.SetDoubleValue(100.0)324pt0.SetStartTimestamp(startTimestamp)325pt0.SetTimestamp(tsNanos)326pt0.Attributes().PutStr("foo", "bar")327328return []pmetric.Metrics{md0}329},330},331}332333for _, tt := range tests {334t.Run(tt.name, func(t *testing.T) {335tt.run(t)336})337}338}339340func TestMetricBuilderGauges(t *testing.T) {341tests := []buildTestData{342{343name: "one-gauge",344inputs: []*testScrapedPage{345{346pts: []*testDataPoint{347createDataPoint("gauge_test", 100, "foo", "bar"),348},349},350{351pts: []*testDataPoint{352createDataPoint("gauge_test", 90, "foo", "bar"),353},354},355},356wants: func() []pmetric.Metrics {357md0 := pmetric.NewMetrics()358mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()359m0 := mL0.AppendEmpty()360m0.SetName("gauge_test")361gauge0 := m0.SetEmptyGauge()362pt0 := gauge0.DataPoints().AppendEmpty()363pt0.SetDoubleValue(100.0)364pt0.SetStartTimestamp(0)365pt0.SetTimestamp(tsNanos)366pt0.Attributes().PutStr("foo", "bar")367368md1 := pmetric.NewMetrics()369mL1 := md1.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()370m1 := mL1.AppendEmpty()371m1.SetName("gauge_test")372gauge1 := m1.SetEmptyGauge()373pt1 := gauge1.DataPoints().AppendEmpty()374pt1.SetDoubleValue(90.0)375pt1.SetStartTimestamp(0)376pt1.SetTimestamp(tsPlusIntervalNanos)377pt1.Attributes().PutStr("foo", "bar")378379return []pmetric.Metrics{md0, md1}380},381},382{383name: "gauge-with-different-tags",384inputs: []*testScrapedPage{385{386pts: []*testDataPoint{387createDataPoint("gauge_test", 100, "foo", "bar"),388createDataPoint("gauge_test", 200, "bar", "foo"),389},390},391},392wants: func() []pmetric.Metrics {393md0 := pmetric.NewMetrics()394mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()395m0 := mL0.AppendEmpty()396m0.SetName("gauge_test")397gauge0 := m0.SetEmptyGauge()398pt0 := gauge0.DataPoints().AppendEmpty()399pt0.SetDoubleValue(100.0)400pt0.SetStartTimestamp(0)401pt0.SetTimestamp(tsNanos)402pt0.Attributes().PutStr("foo", "bar")403404pt1 := gauge0.DataPoints().AppendEmpty()405pt1.SetDoubleValue(200.0)406pt1.SetStartTimestamp(0)407pt1.SetTimestamp(tsNanos)408pt1.Attributes().PutStr("bar", "foo")409410return []pmetric.Metrics{md0}411},412},413{414// TODO: A decision need to be made. If we want to have the behavior which can generate different tag key415// sets because metrics come and go416name: "gauge-comes-and-go-with-different-tagset",417inputs: []*testScrapedPage{418{419pts: []*testDataPoint{420createDataPoint("gauge_test", 100, "foo", "bar"),421createDataPoint("gauge_test", 200, "bar", "foo"),422},423},424{425pts: []*testDataPoint{426createDataPoint("gauge_test", 20, "foo", "bar"),427},428},429},430wants: func() []pmetric.Metrics {431md0 := pmetric.NewMetrics()432mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()433m0 := mL0.AppendEmpty()434m0.SetName("gauge_test")435gauge0 := m0.SetEmptyGauge()436pt0 := gauge0.DataPoints().AppendEmpty()437pt0.SetDoubleValue(100.0)438pt0.SetStartTimestamp(0)439pt0.SetTimestamp(tsNanos)440pt0.Attributes().PutStr("foo", "bar")441442pt1 := gauge0.DataPoints().AppendEmpty()443pt1.SetDoubleValue(200.0)444pt1.SetStartTimestamp(0)445pt1.SetTimestamp(tsNanos)446pt1.Attributes().PutStr("bar", "foo")447448md1 := pmetric.NewMetrics()449mL1 := md1.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()450m1 := mL1.AppendEmpty()451m1.SetName("gauge_test")452gauge1 := m1.SetEmptyGauge()453pt2 := gauge1.DataPoints().AppendEmpty()454pt2.SetDoubleValue(20.0)455pt2.SetStartTimestamp(0)456pt2.SetTimestamp(tsPlusIntervalNanos)457pt2.Attributes().PutStr("foo", "bar")458459return []pmetric.Metrics{md0, md1}460},461},462}463464for _, tt := range tests {465t.Run(tt.name, func(t *testing.T) {466tt.run(t)467})468}469}470471func TestMetricBuilderUntyped(t *testing.T) {472tests := []buildTestData{473{474name: "one-unknown",475inputs: []*testScrapedPage{476{477pts: []*testDataPoint{478createDataPoint("unknown_test", 100, "foo", "bar"),479},480},481},482wants: func() []pmetric.Metrics {483md0 := pmetric.NewMetrics()484mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()485m0 := mL0.AppendEmpty()486m0.SetName("unknown_test")487gauge0 := m0.SetEmptyGauge()488pt0 := gauge0.DataPoints().AppendEmpty()489pt0.SetDoubleValue(100.0)490pt0.SetStartTimestamp(0)491pt0.SetTimestamp(tsNanos)492pt0.Attributes().PutStr("foo", "bar")493494return []pmetric.Metrics{md0}495},496},497{498name: "no-type-hint",499inputs: []*testScrapedPage{500{501pts: []*testDataPoint{502createDataPoint("something_not_exists", 100, "foo", "bar"),503createDataPoint("theother_not_exists", 200, "foo", "bar"),504createDataPoint("theother_not_exists", 300, "bar", "foo"),505},506},507},508wants: func() []pmetric.Metrics {509md0 := pmetric.NewMetrics()510mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()511m0 := mL0.AppendEmpty()512m0.SetName("something_not_exists")513gauge0 := m0.SetEmptyGauge()514pt0 := gauge0.DataPoints().AppendEmpty()515pt0.SetDoubleValue(100.0)516pt0.SetTimestamp(tsNanos)517pt0.Attributes().PutStr("foo", "bar")518519m1 := mL0.AppendEmpty()520m1.SetName("theother_not_exists")521gauge1 := m1.SetEmptyGauge()522pt1 := gauge1.DataPoints().AppendEmpty()523pt1.SetDoubleValue(200.0)524pt1.SetTimestamp(tsNanos)525pt1.Attributes().PutStr("foo", "bar")526527pt2 := gauge1.DataPoints().AppendEmpty()528pt2.SetDoubleValue(300.0)529pt2.SetTimestamp(tsNanos)530pt2.Attributes().PutStr("bar", "foo")531532return []pmetric.Metrics{md0}533},534},535{536name: "untype-metric-poor-names",537inputs: []*testScrapedPage{538{539pts: []*testDataPoint{540createDataPoint("some_count", 100, "foo", "bar"),541},542},543},544wants: func() []pmetric.Metrics {545md0 := pmetric.NewMetrics()546mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()547m0 := mL0.AppendEmpty()548m0.SetName("some_count")549gauge0 := m0.SetEmptyGauge()550pt0 := gauge0.DataPoints().AppendEmpty()551pt0.SetDoubleValue(100.0)552pt0.SetTimestamp(tsNanos)553pt0.Attributes().PutStr("foo", "bar")554555return []pmetric.Metrics{md0}556},557},558}559560for _, tt := range tests {561t.Run(tt.name, func(t *testing.T) {562tt.run(t)563})564}565}566567func TestMetricBuilderHistogram(t *testing.T) {568tests := []buildTestData{569{570name: "single item",571inputs: []*testScrapedPage{572{573pts: []*testDataPoint{574createDataPoint("hist_test_bucket", 1, "foo", "bar", "le", "10"),575createDataPoint("hist_test_bucket", 2, "foo", "bar", "le", "20"),576createDataPoint("hist_test_bucket", 10, "foo", "bar", "le", "+inf"),577createDataPoint("hist_test_sum", 99, "foo", "bar"),578createDataPoint("hist_test_count", 10, "foo", "bar"),579},580},581},582wants: func() []pmetric.Metrics {583md0 := pmetric.NewMetrics()584mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()585m0 := mL0.AppendEmpty()586m0.SetName("hist_test")587hist0 := m0.SetEmptyHistogram()588hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)589pt0 := hist0.DataPoints().AppendEmpty()590pt0.SetCount(10)591pt0.SetSum(99)592pt0.ExplicitBounds().FromRaw([]float64{10, 20})593pt0.BucketCounts().FromRaw([]uint64{1, 1, 8})594pt0.SetTimestamp(tsNanos)595pt0.SetStartTimestamp(startTimestamp)596pt0.Attributes().PutStr("foo", "bar")597598return []pmetric.Metrics{md0}599},600},601{602name: "multi-groups",603inputs: []*testScrapedPage{604{605pts: []*testDataPoint{606createDataPoint("hist_test_bucket", 1, "foo", "bar", "le", "10"),607createDataPoint("hist_test_bucket", 2, "foo", "bar", "le", "20"),608createDataPoint("hist_test_bucket", 10, "foo", "bar", "le", "+inf"),609createDataPoint("hist_test_sum", 99, "foo", "bar"),610createDataPoint("hist_test_count", 10, "foo", "bar"),611createDataPoint("hist_test_bucket", 1, "key2", "v2", "le", "10"),612createDataPoint("hist_test_bucket", 2, "key2", "v2", "le", "20"),613createDataPoint("hist_test_bucket", 3, "key2", "v2", "le", "+inf"),614createDataPoint("hist_test_sum", 50, "key2", "v2"),615createDataPoint("hist_test_count", 3, "key2", "v2"),616},617},618},619wants: func() []pmetric.Metrics {620md0 := pmetric.NewMetrics()621mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()622m0 := mL0.AppendEmpty()623m0.SetName("hist_test")624hist0 := m0.SetEmptyHistogram()625hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)626pt0 := hist0.DataPoints().AppendEmpty()627pt0.SetCount(10)628pt0.SetSum(99)629pt0.ExplicitBounds().FromRaw([]float64{10, 20})630pt0.BucketCounts().FromRaw([]uint64{1, 1, 8})631pt0.SetTimestamp(tsNanos)632pt0.SetStartTimestamp(startTimestamp)633pt0.Attributes().PutStr("foo", "bar")634635pt1 := hist0.DataPoints().AppendEmpty()636pt1.SetCount(3)637pt1.SetSum(50)638pt1.ExplicitBounds().FromRaw([]float64{10, 20})639pt1.BucketCounts().FromRaw([]uint64{1, 1, 1})640pt1.SetTimestamp(tsNanos)641pt1.SetStartTimestamp(startTimestamp)642pt1.Attributes().PutStr("key2", "v2")643644return []pmetric.Metrics{md0}645},646},647{648name: "multi-groups-and-families",649inputs: []*testScrapedPage{650{651pts: []*testDataPoint{652createDataPoint("hist_test_bucket", 1, "foo", "bar", "le", "10"),653createDataPoint("hist_test_bucket", 2, "foo", "bar", "le", "20"),654createDataPoint("hist_test_bucket", 10, "foo", "bar", "le", "+inf"),655createDataPoint("hist_test_sum", 99, "foo", "bar"),656createDataPoint("hist_test_count", 10, "foo", "bar"),657createDataPoint("hist_test_bucket", 1, "key2", "v2", "le", "10"),658createDataPoint("hist_test_bucket", 2, "key2", "v2", "le", "20"),659createDataPoint("hist_test_bucket", 3, "key2", "v2", "le", "+inf"),660createDataPoint("hist_test_sum", 50, "key2", "v2"),661createDataPoint("hist_test_count", 3, "key2", "v2"),662createDataPoint("hist_test2_bucket", 1, "foo", "bar", "le", "10"),663createDataPoint("hist_test2_bucket", 2, "foo", "bar", "le", "20"),664createDataPoint("hist_test2_bucket", 3, "foo", "bar", "le", "+inf"),665createDataPoint("hist_test2_sum", 50, "foo", "bar"),666createDataPoint("hist_test2_count", 3, "foo", "bar"),667},668},669},670wants: func() []pmetric.Metrics {671md0 := pmetric.NewMetrics()672mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()673m0 := mL0.AppendEmpty()674m0.SetName("hist_test")675hist0 := m0.SetEmptyHistogram()676hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)677pt0 := hist0.DataPoints().AppendEmpty()678pt0.SetCount(10)679pt0.SetSum(99)680pt0.ExplicitBounds().FromRaw([]float64{10, 20})681pt0.BucketCounts().FromRaw([]uint64{1, 1, 8})682pt0.SetTimestamp(tsNanos)683pt0.SetStartTimestamp(startTimestamp)684pt0.Attributes().PutStr("foo", "bar")685686pt1 := hist0.DataPoints().AppendEmpty()687pt1.SetCount(3)688pt1.SetSum(50)689pt1.ExplicitBounds().FromRaw([]float64{10, 20})690pt1.BucketCounts().FromRaw([]uint64{1, 1, 1})691pt1.SetTimestamp(tsNanos)692pt1.SetStartTimestamp(startTimestamp)693pt1.Attributes().PutStr("key2", "v2")694695m1 := mL0.AppendEmpty()696m1.SetName("hist_test2")697hist1 := m1.SetEmptyHistogram()698hist1.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)699pt2 := hist1.DataPoints().AppendEmpty()700pt2.SetCount(3)701pt2.SetSum(50)702pt2.ExplicitBounds().FromRaw([]float64{10, 20})703pt2.BucketCounts().FromRaw([]uint64{1, 1, 1})704pt2.SetTimestamp(tsNanos)705pt2.SetStartTimestamp(startTimestamp)706pt2.Attributes().PutStr("foo", "bar")707708return []pmetric.Metrics{md0}709},710},711{712name: "unordered-buckets",713inputs: []*testScrapedPage{714{715pts: []*testDataPoint{716createDataPoint("hist_test_bucket", 10, "foo", "bar", "le", "+inf"),717createDataPoint("hist_test_bucket", 1, "foo", "bar", "le", "10"),718createDataPoint("hist_test_bucket", 2, "foo", "bar", "le", "20"),719createDataPoint("hist_test_sum", 99, "foo", "bar"),720createDataPoint("hist_test_count", 10, "foo", "bar"),721},722},723},724wants: func() []pmetric.Metrics {725md0 := pmetric.NewMetrics()726mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()727m0 := mL0.AppendEmpty()728m0.SetName("hist_test")729hist0 := m0.SetEmptyHistogram()730hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)731pt0 := hist0.DataPoints().AppendEmpty()732pt0.SetCount(10)733pt0.SetSum(99)734pt0.ExplicitBounds().FromRaw([]float64{10, 20})735pt0.BucketCounts().FromRaw([]uint64{1, 1, 8})736pt0.SetTimestamp(tsNanos)737pt0.SetStartTimestamp(startTimestamp)738pt0.Attributes().PutStr("foo", "bar")739740return []pmetric.Metrics{md0}741},742},743{744// this won't likely happen in real env, as prometheus won't generate histogram with less than 3 buckets745name: "only-one-bucket",746inputs: []*testScrapedPage{747{748pts: []*testDataPoint{749createDataPoint("hist_test_bucket", 3, "foo", "bar", "le", "+inf"),750createDataPoint("hist_test_count", 3, "foo", "bar"),751createDataPoint("hist_test_sum", 100, "foo", "bar"),752},753},754},755wants: func() []pmetric.Metrics {756md0 := pmetric.NewMetrics()757mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()758m0 := mL0.AppendEmpty()759m0.SetName("hist_test")760hist0 := m0.SetEmptyHistogram()761hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)762pt0 := hist0.DataPoints().AppendEmpty()763pt0.SetCount(3)764pt0.SetSum(100)765pt0.BucketCounts().FromRaw([]uint64{3})766pt0.SetTimestamp(tsNanos)767pt0.SetStartTimestamp(startTimestamp)768pt0.Attributes().PutStr("foo", "bar")769770return []pmetric.Metrics{md0}771},772},773{774// this won't likely happen in real env, as prometheus won't generate histogram with less than 3 buckets775name: "only-one-bucket-noninf",776inputs: []*testScrapedPage{777{778pts: []*testDataPoint{779createDataPoint("hist_test_bucket", 3, "foo", "bar", "le", "20"),780createDataPoint("hist_test_count", 3, "foo", "bar"),781createDataPoint("hist_test_sum", 100, "foo", "bar"),782},783},784},785wants: func() []pmetric.Metrics {786md0 := pmetric.NewMetrics()787mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()788m0 := mL0.AppendEmpty()789m0.SetName("hist_test")790hist0 := m0.SetEmptyHistogram()791hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)792pt0 := hist0.DataPoints().AppendEmpty()793pt0.SetCount(3)794pt0.SetSum(100)795pt0.BucketCounts().FromRaw([]uint64{3})796pt0.SetTimestamp(tsNanos)797pt0.SetStartTimestamp(startTimestamp)798pt0.Attributes().PutStr("foo", "bar")799800return []pmetric.Metrics{md0}801},802},803{804name: "no-sum",805inputs: []*testScrapedPage{806{807pts: []*testDataPoint{808createDataPoint("hist_test_bucket", 1, "foo", "bar", "le", "10"),809createDataPoint("hist_test_bucket", 2, "foo", "bar", "le", "20"),810createDataPoint("hist_test_bucket", 3, "foo", "bar", "le", "+inf"),811createDataPoint("hist_test_count", 3, "foo", "bar"),812},813},814},815wants: func() []pmetric.Metrics {816md0 := pmetric.NewMetrics()817mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()818m0 := mL0.AppendEmpty()819m0.SetName("hist_test")820hist0 := m0.SetEmptyHistogram()821hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)822pt0 := hist0.DataPoints().AppendEmpty()823pt0.SetCount(3)824pt0.ExplicitBounds().FromRaw([]float64{10, 20})825pt0.BucketCounts().FromRaw([]uint64{1, 1, 1})826pt0.SetTimestamp(tsNanos)827pt0.SetStartTimestamp(startTimestamp)828pt0.Attributes().PutStr("foo", "bar")829830return []pmetric.Metrics{md0}831},832},833{834name: "corrupted-no-buckets",835inputs: []*testScrapedPage{836{837pts: []*testDataPoint{838createDataPoint("hist_test_sum", 99),839createDataPoint("hist_test_count", 10),840},841},842},843wants: func() []pmetric.Metrics {844return []pmetric.Metrics{pmetric.NewMetrics()}845},846},847{848name: "corrupted-no-count",849inputs: []*testScrapedPage{850{851pts: []*testDataPoint{852createDataPoint("hist_test_bucket", 1, "foo", "bar", "le", "10"),853createDataPoint("hist_test_bucket", 2, "foo", "bar", "le", "20"),854createDataPoint("hist_test_bucket", 3, "foo", "bar", "le", "+inf"),855createDataPoint("hist_test_sum", 99, "foo", "bar"),856},857},858},859wants: func() []pmetric.Metrics {860return []pmetric.Metrics{pmetric.NewMetrics()}861},862},863}864865for _, tt := range tests {866t.Run(tt.name, func(t *testing.T) {867tt.run(t)868})869}870}871872func TestMetricBuilderSummary(t *testing.T) {873tests := []buildTestData{874{875name: "no-sum-and-count",876inputs: []*testScrapedPage{877{878pts: []*testDataPoint{879createDataPoint("summary_test", 5, "foo", "bar", "quantile", "1"),880},881},882},883wants: func() []pmetric.Metrics {884return []pmetric.Metrics{pmetric.NewMetrics()}885},886},887{888name: "no-count",889inputs: []*testScrapedPage{890{891pts: []*testDataPoint{892createDataPoint("summary_test", 1, "foo", "bar", "quantile", "0.5"),893createDataPoint("summary_test", 2, "foo", "bar", "quantile", "0.75"),894createDataPoint("summary_test", 5, "foo", "bar", "quantile", "1"),895createDataPoint("summary_test_sum", 500, "foo", "bar"),896},897},898},899wants: func() []pmetric.Metrics {900return []pmetric.Metrics{pmetric.NewMetrics()}901},902},903{904name: "no-sum",905inputs: []*testScrapedPage{906{907pts: []*testDataPoint{908createDataPoint("summary_test", 1, "foo", "bar", "quantile", "0.5"),909createDataPoint("summary_test", 2, "foo", "bar", "quantile", "0.75"),910createDataPoint("summary_test", 5, "foo", "bar", "quantile", "1"),911createDataPoint("summary_test_count", 500, "foo", "bar"),912},913},914},915wants: func() []pmetric.Metrics {916md0 := pmetric.NewMetrics()917mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()918m0 := mL0.AppendEmpty()919m0.SetName("summary_test")920sum0 := m0.SetEmptySummary()921pt0 := sum0.DataPoints().AppendEmpty()922pt0.SetTimestamp(tsNanos)923pt0.SetStartTimestamp(startTimestamp)924pt0.SetCount(500)925pt0.SetSum(0.0)926pt0.Attributes().PutStr("foo", "bar")927qvL := pt0.QuantileValues()928q50 := qvL.AppendEmpty()929q50.SetQuantile(.50)930q50.SetValue(1.0)931q75 := qvL.AppendEmpty()932q75.SetQuantile(.75)933q75.SetValue(2.0)934q100 := qvL.AppendEmpty()935q100.SetQuantile(1)936q100.SetValue(5.0)937938return []pmetric.Metrics{md0}939},940},941{942name: "empty-quantiles",943inputs: []*testScrapedPage{944{945pts: []*testDataPoint{946createDataPoint("summary_test_sum", 100, "foo", "bar"),947createDataPoint("summary_test_count", 500, "foo", "bar"),948},949},950},951wants: func() []pmetric.Metrics {952md0 := pmetric.NewMetrics()953mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()954m0 := mL0.AppendEmpty()955m0.SetName("summary_test")956sum0 := m0.SetEmptySummary()957pt0 := sum0.DataPoints().AppendEmpty()958pt0.SetStartTimestamp(startTimestamp)959pt0.SetTimestamp(tsNanos)960pt0.SetCount(500)961pt0.SetSum(100.0)962pt0.Attributes().PutStr("foo", "bar")963964return []pmetric.Metrics{md0}965},966},967{968name: "regular-summary",969inputs: []*testScrapedPage{970{971pts: []*testDataPoint{972createDataPoint("summary_test", 1, "foo", "bar", "quantile", "0.5"),973createDataPoint("summary_test", 2, "foo", "bar", "quantile", "0.75"),974createDataPoint("summary_test", 5, "foo", "bar", "quantile", "1"),975createDataPoint("summary_test_sum", 100, "foo", "bar"),976createDataPoint("summary_test_count", 500, "foo", "bar"),977},978},979},980wants: func() []pmetric.Metrics {981md0 := pmetric.NewMetrics()982mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()983m0 := mL0.AppendEmpty()984m0.SetName("summary_test")985sum0 := m0.SetEmptySummary()986pt0 := sum0.DataPoints().AppendEmpty()987pt0.SetStartTimestamp(startTimestamp)988pt0.SetTimestamp(tsNanos)989pt0.SetCount(500)990pt0.SetSum(100.0)991pt0.Attributes().PutStr("foo", "bar")992qvL := pt0.QuantileValues()993q50 := qvL.AppendEmpty()994q50.SetQuantile(.50)995q50.SetValue(1.0)996q75 := qvL.AppendEmpty()997q75.SetQuantile(.75)998q75.SetValue(2.0)999q100 := qvL.AppendEmpty()1000q100.SetQuantile(1)1001q100.SetValue(5.0)10021003return []pmetric.Metrics{md0}1004},1005},1006}10071008for _, tt := range tests {1009t.Run(tt.name, func(t *testing.T) {1010tt.run(t)1011})1012}1013}10141015type buildTestData struct {1016name string1017inputs []*testScrapedPage1018wants func() []pmetric.Metrics1019}10201021func (tt buildTestData) run(t *testing.T) {1022wants := tt.wants()1023assert.EqualValues(t, len(wants), len(tt.inputs))1024st := ts1025for i, page := range tt.inputs {1026sink := new(consumertest.MetricsSink)1027tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, sink, nil, componenttest.NewNopReceiverCreateSettings(), nopObsRecv())1028for _, pt := range page.pts {1029// set ts for testing1030pt.t = st1031_, err := tr.Append(0, pt.lb, pt.t, pt.v)1032assert.NoError(t, err)1033}1034assert.NoError(t, tr.Commit())1035mds := sink.AllMetrics()1036if wants[i].ResourceMetrics().Len() == 0 {1037// Receiver does not emit empty metrics, so will not have anything in the sink.1038require.Len(t, mds, 0)1039st += interval1040continue1041}1042require.Len(t, mds, 1)1043assertEquivalentMetrics(t, wants[i], mds[0])1044st += interval1045}1046}10471048type errorAdjuster struct {1049err error1050}10511052func (ea *errorAdjuster) AdjustMetrics(pmetric.Metrics) error {1053return ea.err1054}10551056type startTimeAdjuster struct {1057startTime pcommon.Timestamp1058}10591060func (s *startTimeAdjuster) AdjustMetrics(metrics pmetric.Metrics) error {1061for i := 0; i < metrics.ResourceMetrics().Len(); i++ {1062rm := metrics.ResourceMetrics().At(i)1063for j := 0; j < rm.ScopeMetrics().Len(); j++ {1064ilm := rm.ScopeMetrics().At(j)1065for k := 0; k < ilm.Metrics().Len(); k++ {1066metric := ilm.Metrics().At(k)1067switch metric.Type() {1068case pmetric.MetricTypeSum:1069dps := metric.Sum().DataPoints()1070for l := 0; l < dps.Len(); l++ {1071dps.At(l).SetStartTimestamp(s.startTime)1072}1073case pmetric.MetricTypeSummary:1074dps := metric.Summary().DataPoints()1075for l := 0; l < dps.Len(); l++ {1076dps.At(l).SetStartTimestamp(s.startTime)1077}1078case pmetric.MetricTypeHistogram:1079dps := metric.Histogram().DataPoints()1080for l := 0; l < dps.Len(); l++ {1081dps.At(l).SetStartTimestamp(s.startTime)1082}1083}1084}1085}1086}1087return nil1088}10891090type testDataPoint struct {1091lb labels.Labels1092t int641093v float641094}10951096type testScrapedPage struct {1097pts []*testDataPoint1098}10991100func createDataPoint(mname string, value float64, tagPairs ...string) *testDataPoint {1101var lbls []string1102lbls = append(lbls, tagPairs...)1103lbls = append(lbls, model.MetricNameLabel, mname)1104lbls = append(lbls, model.JobLabel, "job")1105lbls = append(lbls, model.InstanceLabel, "instance")11061107return &testDataPoint{1108lb: labels.FromStrings(lbls...),1109t: ts,1110v: value,1111}1112}11131114func assertEquivalentMetrics(t *testing.T, want, got pmetric.Metrics) {1115require.Equal(t, want.ResourceMetrics().Len(), got.ResourceMetrics().Len())1116if want.ResourceMetrics().Len() == 0 {1117return1118}1119for i := 0; i < want.ResourceMetrics().Len(); i++ {1120wantSm := want.ResourceMetrics().At(i).ScopeMetrics()1121gotSm := got.ResourceMetrics().At(i).ScopeMetrics()1122require.Equal(t, wantSm.Len(), gotSm.Len())1123if wantSm.Len() == 0 {1124return1125}11261127for j := 0; j < wantSm.Len(); j++ {1128wantMs := wantSm.At(j).Metrics()1129gotMs := gotSm.At(j).Metrics()1130require.Equal(t, wantMs.Len(), gotMs.Len())11311132wmap := map[string]pmetric.Metric{}1133gmap := map[string]pmetric.Metric{}11341135for k := 0; k < wantMs.Len(); k++ {1136wi := wantMs.At(k)1137wmap[wi.Name()] = wi1138gi := gotMs.At(k)1139gmap[gi.Name()] = gi1140}1141assert.EqualValues(t, wmap, gmap)1142}1143}1144}114511461147