Path: blob/main/component/phlare/scrape/scrape_test.go
4096 views
package scrape12import (3"context"4"fmt"5"net/http"6"net/http/httptest"7"strings"8"testing"9"time"1011"github.com/grafana/agent/component"12"github.com/grafana/agent/component/discovery"13"github.com/grafana/agent/component/phlare"14"github.com/grafana/agent/component/prometheus/scrape"15"github.com/grafana/agent/pkg/cluster"16"github.com/grafana/agent/pkg/river"17"github.com/grafana/agent/pkg/util"18"github.com/prometheus/client_golang/prometheus"19"github.com/prometheus/common/model"20"github.com/stretchr/testify/require"21"go.uber.org/atomic"22"go.uber.org/goleak"23)2425func TestComponent(t *testing.T) {26defer goleak.VerifyNone(t, goleak.IgnoreTopFunction("go.opencensus.io/stats/view.(*worker).start"))27reloadInterval = 100 * time.Millisecond28arg := NewDefaultArguments()29arg.JobName = "test"30c, err := New(component.Options{31Logger: util.TestFlowLogger(t),32Registerer: prometheus.NewRegistry(),33OnStateChange: func(e component.Exports) {},34Clusterer: &cluster.Clusterer{Node: cluster.NewLocalNode("")},35}, arg)36require.NoError(t, err)37ctx, cancel := context.WithCancel(context.Background())38defer cancel()39go func() {40err := c.Run(ctx)41require.NoError(t, err)42}()4344// trigger an update45require.Empty(t, c.appendable.Children())46require.Empty(t, c.DebugInfo().(scrape.ScraperStatus).TargetStatus)4748arg.ForwardTo = []phlare.Appendable{phlare.NoopAppendable}49arg.Targets = []discovery.Target{50{51model.AddressLabel: "foo",52},53{54model.AddressLabel: "bar",55},56}57c.Update(arg)5859require.Eventually(t, func() bool {60fmt.Println(c.DebugInfo().(scrape.ScraperStatus).TargetStatus)61return len(c.appendable.Children()) == 1 && len(c.DebugInfo().(scrape.ScraperStatus).TargetStatus) == 1062}, 5*time.Second, 100*time.Millisecond)63}6465func TestUnmarshalConfig(t *testing.T) {66for name, tt := range map[string]struct {67in string68expected func() Arguments69expectedErr string70}{71"default": {72in: `73targets = [74{"__address__" = "localhost:9090", "foo" = "bar"},75]76forward_to = null77`,78expected: func() Arguments {79r := NewDefaultArguments()80r.Targets = []discovery.Target{81{82"__address__": "localhost:9090",83"foo": "bar",84},85}86return r87},88},89"custom": {90in: `91targets = [92{"__address__" = "localhost:9090", "foo" = "bar"},93{"__address__" = "localhost:8080", "foo" = "buzz"},94]95forward_to = null96profiling_config {97path_prefix = "v1/"9899profile.block {100enabled = false101}102103profile.custom "something" {104enabled = true105path = "/debug/fgprof"106delta = true107}108}109`,110expected: func() Arguments {111r := NewDefaultArguments()112r.Targets = []discovery.Target{113{114"__address__": "localhost:9090",115"foo": "bar",116},117{118"__address__": "localhost:8080",119"foo": "buzz",120},121}122r.ProfilingConfig.Block.Enabled = false123r.ProfilingConfig.Custom = append(r.ProfilingConfig.Custom, CustomProfilingTarget{124Enabled: true,125Path: "/debug/fgprof",126Delta: true,127Name: "something",128})129r.ProfilingConfig.PprofPrefix = "v1/"130return r131},132},133"invalid cpu timeout": {134in: `135targets = []136forward_to = null137scrape_timeout = "1s"138scrape_interval = "0.5s"139`,140expectedErr: "process_cpu scrape_timeout must be at least 2 seconds",141},142"invalid timeout/interval": {143in: `144targets = []145forward_to = null146scrape_timeout = "4s"147scrape_interval = "5s"148`,149expectedErr: "scrape_timeout must be greater than scrape_interval",150},151"invalid HTTPClientConfig": {152in: `153targets = []154forward_to = null155scrape_timeout = "5s"156scrape_interval = "1s"157bearer_token = "token"158bearer_token_file = "/path/to/file.token"159`,160expectedErr: "at most one of bearer_token & bearer_token_file must be configured",161},162} {163tt := tt164name := name165t.Run(name, func(t *testing.T) {166arg := Arguments{}167if tt.expectedErr != "" {168err := river.Unmarshal([]byte(tt.in), &arg)169require.Error(t, err)170require.Equal(t, tt.expectedErr, err.Error())171return172}173require.NoError(t, river.Unmarshal([]byte(tt.in), &arg))174require.Equal(t, tt.expected(), arg)175})176}177}178179func TestUpdateWhileScraping(t *testing.T) {180args := NewDefaultArguments()181// speed up reload interval for this tests182old := reloadInterval183reloadInterval = 1 * time.Microsecond184defer func() {185reloadInterval = old186}()187args.ScrapeInterval = 1 * time.Second188189c, err := New(component.Options{190Logger: util.TestFlowLogger(t),191Registerer: prometheus.NewRegistry(),192OnStateChange: func(e component.Exports) {},193Clusterer: &cluster.Clusterer{Node: cluster.NewLocalNode("")},194}, args)195require.NoError(t, err)196scraping := atomic.NewBool(false)197ctx, cancel := context.WithCancel(context.Background())198199server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {200scraping.Store(true)201select {202case <-ctx.Done():203return204case <-time.After(15 * time.Second):205}206w.WriteHeader(http.StatusOK)207}))208defer server.Close()209210address := strings.TrimPrefix(server.URL, "http://")211212defer cancel()213214go c.Run(ctx)215216args.Targets = []discovery.Target{217{218model.AddressLabel: address,219"foo": "bar",220},221{222model.AddressLabel: address,223"foo": "buz",224},225}226227c.Update(args)228c.scraper.reload()229// Wait for the targets to be scraping.230require.Eventually(t, func() bool {231return scraping.Load()232}, 10*time.Second, 1*time.Second)233234// Send updates to the targets.235done := make(chan struct{})236go func() {237for i := 0; i < 100; i++ {238args.Targets = []discovery.Target{239{240model.AddressLabel: address,241"foo": fmt.Sprintf("%d", i),242},243}244require.NoError(t, c.Update(args))245c.scraper.reload()246}247close(done)248}()249select {250case <-done:251case <-time.After(10 * time.Second):252t.Fatal("timed out waiting for updates to finish")253}254}255256257