diff --git a/scrape/scrape.go b/scrape/scrape.go index 6be2525fe0..33683b4caf 100644 --- a/scrape/scrape.go +++ b/scrape/scrape.go @@ -1182,7 +1182,7 @@ func newScrapeLoop(opts scrapeLoopOptions) *scrapeLoop { interval: opts.interval, timeout: opts.timeout, sampleMutator: func(l labels.Labels) labels.Labels { - return mutateSampleLabels(l, opts.target, opts.sp.config.HonorTimestamps, opts.sp.config.MetricRelabelConfigs) + return mutateSampleLabels(l, opts.target, opts.sp.config.HonorLabels, opts.sp.config.MetricRelabelConfigs) }, reportSampleMutator: func(l labels.Labels) labels.Labels { return mutateReportSampleLabels(l, opts.target) }, scraper: opts.scraper, diff --git a/scrape/scrape_test.go b/scrape/scrape_test.go index ae004bbd56..7aa633d387 100644 --- a/scrape/scrape_test.go +++ b/scrape/scrape_test.go @@ -5853,3 +5853,82 @@ func BenchmarkScrapePoolRestartLoops(b *testing.B) { sp.restartLoops(true) } } + +// TestNewScrapeLoopHonorLabelsWiring verifies that newScrapeLoop correctly wires +// HonorLabels (not HonorTimestamps) to the sampleMutator. +func TestNewScrapeLoopHonorLabelsWiring(t *testing.T) { + // Scraped metric has label "lbl" with value "scraped". + // Discovery target has label "lbl" with value "discovery". + // With honor_labels=true, the scraped value should win. + // With honor_labels=false, the discovery value should win and scraped moves to exported_lbl. + testCases := []struct { + name string + honorLabels bool + expectedLbl string + expectedExpLbl string // exported_lbl value, empty if not expected + }{ + { + name: "honor_labels=true", + honorLabels: true, + expectedLbl: "scraped", + }, + { + name: "honor_labels=false", + honorLabels: false, + expectedLbl: "discovery", + expectedExpLbl: "scraped", + }, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + ts, scrapedTwice := newScrapableServer(`metric{lbl="scraped"} 1`) + defer ts.Close() + + testURL, err := url.Parse(ts.URL) + require.NoError(t, err) + + s := teststorage.New(t) + defer s.Close() + + cfg := &config.ScrapeConfig{ + JobName: "test", + Scheme: "http", + HonorLabels: tc.honorLabels, + HonorTimestamps: !tc.honorLabels, // Opposite of HonorLabels to catch wiring bugs + ScrapeInterval: model.Duration(1 * time.Second), + ScrapeTimeout: model.Duration(100 * time.Millisecond), + MetricNameValidationScheme: model.UTF8Validation, + } + + sp, err := newScrapePool(cfg, s, 0, nil, nil, &Options{skipOffsetting: true}, newTestScrapeMetrics(t)) + require.NoError(t, err) + defer sp.stop() + + // Sync with a target that has a conflicting label. + sp.Sync([]*targetgroup.Group{{ + Targets: []model.LabelSet{{ + model.AddressLabel: model.LabelValue(testURL.Host), + "lbl": "discovery", + }}, + }}) + require.Len(t, sp.ActiveTargets(), 1) + + // Wait for scrape to complete. + select { + case <-time.After(5 * time.Second): + t.Fatal("scrape did not complete in time") + case <-scrapedTwice: + } + + // Query the storage to verify label values. + q, err := s.Querier(time.Time{}.UnixNano(), time.Now().UnixNano()) + require.NoError(t, err) + defer q.Close() + + series := q.Select(t.Context(), false, nil, labels.MustNewMatcher(labels.MatchEqual, "__name__", "metric")) + require.True(t, series.Next(), "metric series not found") + require.Equal(t, tc.expectedLbl, series.At().Labels().Get("lbl")) + require.Equal(t, tc.expectedExpLbl, series.At().Labels().Get("exported_lbl")) + }) + } +}