Path: blob/main/component/phlare/scrape/scrape_loop_test.go
4096 views
package scrape12import (3"context"4"net/http"5"net/http/httptest"6"net/url"7"sort"8"strings"9"testing"10"time"1112"github.com/grafana/agent/component/discovery"13"github.com/grafana/agent/component/phlare"14"github.com/grafana/agent/pkg/util"15"github.com/prometheus/common/model"16"github.com/prometheus/prometheus/discovery/targetgroup"17"github.com/prometheus/prometheus/model/labels"18"github.com/stretchr/testify/require"19"go.uber.org/atomic"20"go.uber.org/goleak"21)2223func TestScrapePool(t *testing.T) {24defer goleak.VerifyNone(t, goleak.IgnoreTopFunction("go.opencensus.io/stats/view.(*worker).start"))2526args := NewDefaultArguments()27args.Targets = []discovery.Target{28{"instance": "foo"},29}30args.ProfilingConfig.Block.Enabled = false31args.ProfilingConfig.Goroutine.Enabled = false32args.ProfilingConfig.Memory.Enabled = false3334p, err := newScrapePool(args, phlare.AppendableFunc(35func(ctx context.Context, labels labels.Labels, samples []*phlare.RawSample) error {36return nil37}),38util.TestLogger(t))39require.NoError(t, err)4041defer p.stop()4243for _, tt := range []struct {44name string45groups []*targetgroup.Group46expected []*Target47}{48{49name: "no targets",50groups: []*targetgroup.Group{},51expected: []*Target{},52},53{54name: "targets",55groups: []*targetgroup.Group{56{57Targets: []model.LabelSet{58{model.AddressLabel: "localhost:9090"},59{model.AddressLabel: "localhost:8080"},60},61Labels: model.LabelSet{"foo": "bar"},62},63},64expected: []*Target{65NewTarget(66labels.FromStrings("instance", "localhost:8080", "foo", "bar", model.AddressLabel, "localhost:8080", model.MetricNameLabel, pprofMutex, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/mutex"),67labels.FromStrings("foo", "bar", model.AddressLabel, "localhost:8080", model.MetricNameLabel, pprofMutex, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/mutex"),68url.Values{},69),70NewTarget(71labels.FromStrings("instance", "localhost:8080", "foo", "bar", model.AddressLabel, "localhost:8080", model.MetricNameLabel, pprofProcessCPU, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/profile"),72labels.FromStrings("foo", "bar", model.AddressLabel, "localhost:8080", model.MetricNameLabel, pprofProcessCPU, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/profile"),73url.Values{"seconds": []string{"14"}},74),75NewTarget(76labels.FromStrings("instance", "localhost:9090", "foo", "bar", model.AddressLabel, "localhost:9090", model.MetricNameLabel, pprofMutex, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/mutex"),77labels.FromStrings("foo", "bar", model.AddressLabel, "localhost:9090", model.MetricNameLabel, pprofMutex, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/mutex"),78url.Values{},79),80NewTarget(81labels.FromStrings("instance", "localhost:9090", "foo", "bar", model.AddressLabel, "localhost:9090", model.MetricNameLabel, pprofProcessCPU, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/profile"),82labels.FromStrings("foo", "bar", model.AddressLabel, "localhost:9090", model.MetricNameLabel, pprofProcessCPU, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/profile"),83url.Values{"seconds": []string{"14"}},84),85},86},87{88name: "Remove targets",89groups: []*targetgroup.Group{90{91Targets: []model.LabelSet{92{model.AddressLabel: "localhost:9090"},93},94},95},96expected: []*Target{97NewTarget(98labels.FromStrings("instance", "localhost:9090", model.AddressLabel, "localhost:9090", model.MetricNameLabel, pprofMutex, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/mutex"),99labels.FromStrings(model.AddressLabel, "localhost:9090", model.MetricNameLabel, pprofMutex, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/mutex"),100url.Values{},101),102NewTarget(103labels.FromStrings("instance", "localhost:9090", model.AddressLabel, "localhost:9090", model.MetricNameLabel, pprofProcessCPU, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/profile"),104labels.FromStrings(model.AddressLabel, "localhost:9090", model.MetricNameLabel, pprofProcessCPU, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/profile"),105url.Values{"seconds": []string{"14"}},106),107},108},109{110name: "Sync targets",111groups: []*targetgroup.Group{112{113Targets: []model.LabelSet{114{model.AddressLabel: "localhost:9090", "__type__": "foo"},115},116},117},118expected: []*Target{119NewTarget(120labels.FromStrings("instance", "localhost:9090", model.AddressLabel, "localhost:9090", model.MetricNameLabel, pprofMutex, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/mutex"),121labels.FromStrings("__type__", "foo", model.AddressLabel, "localhost:9090", model.MetricNameLabel, pprofMutex, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/mutex"),122url.Values{},123),124NewTarget(125labels.FromStrings("instance", "localhost:9090", model.AddressLabel, "localhost:9090", model.MetricNameLabel, pprofProcessCPU, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/profile"),126labels.FromStrings("__type__", "foo", model.AddressLabel, "localhost:9090", model.MetricNameLabel, pprofProcessCPU, model.SchemeLabel, "http", ProfilePath, "/debug/pprof/profile"),127url.Values{"seconds": []string{"14"}},128),129},130},131} {132tt := tt133t.Run(tt.name, func(t *testing.T) {134p.sync(tt.groups)135actual := p.ActiveTargets()136sort.Sort(Targets(actual))137sort.Sort(Targets(tt.expected))138require.Equal(t, tt.expected, actual)139require.Empty(t, p.DroppedTargets())140})141}142143// reload the cfg144args.ScrapeTimeout = 1 * time.Second145args.ScrapeInterval = 2 * time.Second146p.reload(args)147for _, ta := range p.activeTargets {148require.Equal(t, 1*time.Second, ta.timeout)149require.Equal(t, 2*time.Second, ta.interval)150}151}152153func TestScrapeLoop(t *testing.T) {154defer goleak.VerifyNone(t, goleak.IgnoreTopFunction("go.opencensus.io/stats/view.(*worker).start"))155156down := atomic.NewBool(false)157server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {158// The test was failing on Windows, as the scrape loop was too fast for159// the Windows timer resolution.160// This used to lead the `t.lastScrapeDuration = time.Since(start)` to161// be recorded as zero. The small delay here allows the timer to record162// the time since the last scrape properly.163time.Sleep(2 * time.Millisecond)164if down.Load() {165w.WriteHeader(http.StatusInternalServerError)166}167w.Write([]byte("ok"))168}))169defer server.Close()170appendTotal := atomic.NewInt64(0)171172loop := newScrapeLoop(173NewTarget(174labels.FromStrings(175model.SchemeLabel, "http",176model.AddressLabel, strings.TrimPrefix(server.URL, "http://"),177ProfilePath, "/debug/pprof/profile",178), labels.FromStrings(), url.Values{179"seconds": []string{"1"},180}),181server.Client(),182phlare.AppendableFunc(func(_ context.Context, labels labels.Labels, samples []*phlare.RawSample) error {183appendTotal.Inc()184require.Equal(t, []byte("ok"), samples[0].RawProfile)185return nil186}),187200*time.Millisecond, 30*time.Second, util.TestLogger(t))188defer loop.stop(true)189190require.Equal(t, HealthUnknown, loop.Health())191loop.start()192require.Eventually(t, func() bool { return appendTotal.Load() > 3 }, 5000*time.Millisecond, 100*time.Millisecond)193require.Equal(t, HealthGood, loop.Health())194195down.Store(true)196require.Eventually(t, func() bool {197return HealthBad == loop.Health()198}, time.Second, 100*time.Millisecond)199200require.Error(t, loop.LastError())201require.WithinDuration(t, time.Now(), loop.LastScrape(), 1*time.Second)202require.NotEmpty(t, loop.LastScrapeDuration())203}204205206