mirror of
https://github.com/prometheus/prometheus.git
synced 2026-02-19 02:29:16 -05:00
Merge pull request #17977 from tinitiuset/tinitiuset/parser-configuration
PromQL: Refactor parser to use instance configuration instead of global flags
This commit is contained in:
commit
7d40cd47e2
39 changed files with 539 additions and 425 deletions
|
|
@ -218,6 +218,8 @@ type flagConfig struct {
|
|||
|
||||
promqlEnableDelayedNameRemoval bool
|
||||
|
||||
parserOpts parser.Options
|
||||
|
||||
promslogConfig promslog.Config
|
||||
}
|
||||
|
||||
|
|
@ -255,10 +257,10 @@ func (c *flagConfig) setFeatureListOptions(logger *slog.Logger) error {
|
|||
c.enableConcurrentRuleEval = true
|
||||
logger.Info("Experimental concurrent rule evaluation enabled.")
|
||||
case "promql-experimental-functions":
|
||||
parser.EnableExperimentalFunctions = true
|
||||
c.parserOpts.EnableExperimentalFunctions = true
|
||||
logger.Info("Experimental PromQL functions enabled.")
|
||||
case "promql-duration-expr":
|
||||
parser.ExperimentalDurationExpr = true
|
||||
c.parserOpts.ExperimentalDurationExpr = true
|
||||
logger.Info("Experimental duration expression parsing enabled.")
|
||||
case "native-histograms":
|
||||
logger.Warn("This option for --enable-feature is a no-op. To scrape native histograms, set the scrape_native_histograms scrape config setting to true.", "option", o)
|
||||
|
|
@ -292,10 +294,10 @@ func (c *flagConfig) setFeatureListOptions(logger *slog.Logger) error {
|
|||
c.promqlEnableDelayedNameRemoval = true
|
||||
logger.Info("Experimental PromQL delayed name removal enabled.")
|
||||
case "promql-extended-range-selectors":
|
||||
parser.EnableExtendedRangeSelectors = true
|
||||
c.parserOpts.EnableExtendedRangeSelectors = true
|
||||
logger.Info("Experimental PromQL extended range selectors enabled.")
|
||||
case "promql-binop-fill-modifiers":
|
||||
parser.EnableBinopFillModifiers = true
|
||||
c.parserOpts.EnableBinopFillModifiers = true
|
||||
logger.Info("Experimental PromQL binary operator fill modifiers enabled.")
|
||||
case "":
|
||||
continue
|
||||
|
|
@ -630,6 +632,8 @@ func main() {
|
|||
os.Exit(1)
|
||||
}
|
||||
|
||||
promqlParser := parser.NewParser(cfg.parserOpts)
|
||||
|
||||
if agentMode && len(serverOnlyFlags) > 0 {
|
||||
fmt.Fprintf(os.Stderr, "The following flag(s) can not be used in agent mode: %q", serverOnlyFlags)
|
||||
os.Exit(3)
|
||||
|
|
@ -684,7 +688,7 @@ func main() {
|
|||
}
|
||||
|
||||
// Parse rule files to verify they exist and contain valid rules.
|
||||
if err := rules.ParseFiles(cfgFile.RuleFiles, cfgFile.GlobalConfig.MetricNameValidationScheme); err != nil {
|
||||
if err := rules.ParseFiles(cfgFile.RuleFiles, cfgFile.GlobalConfig.MetricNameValidationScheme, promqlParser); err != nil {
|
||||
absPath, pathErr := filepath.Abs(cfg.configFile)
|
||||
if pathErr != nil {
|
||||
absPath = cfg.configFile
|
||||
|
|
@ -921,6 +925,7 @@ func main() {
|
|||
EnableDelayedNameRemoval: cfg.promqlEnableDelayedNameRemoval,
|
||||
EnableTypeAndUnitLabels: cfg.scrape.EnableTypeAndUnitLabels,
|
||||
FeatureRegistry: features.DefaultRegistry,
|
||||
Parser: promqlParser,
|
||||
}
|
||||
|
||||
queryEngine = promql.NewEngine(opts)
|
||||
|
|
@ -944,6 +949,7 @@ func main() {
|
|||
return time.Duration(cfgFile.GlobalConfig.RuleQueryOffset)
|
||||
},
|
||||
FeatureRegistry: features.DefaultRegistry,
|
||||
Parser: promqlParser,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -963,6 +969,7 @@ func main() {
|
|||
cfg.web.LookbackDelta = time.Duration(cfg.lookbackDelta)
|
||||
cfg.web.IsAgent = agentMode
|
||||
cfg.web.AppName = modeAppName
|
||||
cfg.web.Parser = promqlParser
|
||||
|
||||
cfg.web.Version = &web.PrometheusVersion{
|
||||
Version: version.Version,
|
||||
|
|
|
|||
|
|
@ -61,7 +61,10 @@ import (
|
|||
"github.com/prometheus/prometheus/util/documentcli"
|
||||
)
|
||||
|
||||
var promqlEnableDelayedNameRemoval = false
|
||||
var (
|
||||
promqlEnableDelayedNameRemoval = false
|
||||
promtoolParserOpts parser.Options
|
||||
)
|
||||
|
||||
func init() {
|
||||
// This can be removed when the legacy global mode is fully deprecated.
|
||||
|
|
@ -348,13 +351,13 @@ func main() {
|
|||
for o := range strings.SplitSeq(f, ",") {
|
||||
switch o {
|
||||
case "promql-experimental-functions":
|
||||
parser.EnableExperimentalFunctions = true
|
||||
promtoolParserOpts.EnableExperimentalFunctions = true
|
||||
case "promql-delayed-name-removal":
|
||||
promqlEnableDelayedNameRemoval = true
|
||||
case "promql-duration-expr":
|
||||
parser.ExperimentalDurationExpr = true
|
||||
promtoolParserOpts.ExperimentalDurationExpr = true
|
||||
case "promql-extended-range-selectors":
|
||||
parser.EnableExtendedRangeSelectors = true
|
||||
promtoolParserOpts.EnableExtendedRangeSelectors = true
|
||||
case "":
|
||||
continue
|
||||
default:
|
||||
|
|
@ -362,6 +365,7 @@ func main() {
|
|||
}
|
||||
}
|
||||
}
|
||||
promtoolParser := parser.NewParser(promtoolParserOpts)
|
||||
|
||||
switch parsedCmd {
|
||||
case sdCheckCmd.FullCommand():
|
||||
|
|
@ -380,7 +384,7 @@ func main() {
|
|||
os.Exit(CheckWebConfig(*webConfigFiles...))
|
||||
|
||||
case checkRulesCmd.FullCommand():
|
||||
os.Exit(CheckRules(newRulesLintConfig(*checkRulesLint, *checkRulesLintFatal, *checkRulesIgnoreUnknownFields, model.UTF8Validation), *ruleFiles...))
|
||||
os.Exit(CheckRules(newRulesLintConfig(*checkRulesLint, *checkRulesLintFatal, *checkRulesIgnoreUnknownFields, model.UTF8Validation), promtoolParser, *ruleFiles...))
|
||||
|
||||
case checkMetricsCmd.FullCommand():
|
||||
os.Exit(CheckMetrics(*checkMetricsExtended, *checkMetricsLint))
|
||||
|
|
@ -420,6 +424,7 @@ func main() {
|
|||
EnableNegativeOffset: true,
|
||||
EnableDelayedNameRemoval: promqlEnableDelayedNameRemoval,
|
||||
},
|
||||
promtoolParser,
|
||||
*testRulesRun,
|
||||
*testRulesDiff,
|
||||
*testRulesDebug,
|
||||
|
|
@ -431,7 +436,7 @@ func main() {
|
|||
os.Exit(checkErr(benchmarkWrite(*benchWriteOutPath, *benchSamplesFile, *benchWriteNumMetrics, *benchWriteNumScrapes)))
|
||||
|
||||
case tsdbAnalyzeCmd.FullCommand():
|
||||
os.Exit(checkErr(analyzeBlock(ctx, *analyzePath, *analyzeBlockID, *analyzeLimit, *analyzeRunExtended, *analyzeMatchers)))
|
||||
os.Exit(checkErr(analyzeBlock(ctx, *analyzePath, *analyzeBlockID, *analyzeLimit, *analyzeRunExtended, *analyzeMatchers, promtoolParser)))
|
||||
|
||||
case tsdbListCmd.FullCommand():
|
||||
os.Exit(checkErr(listBlocks(*listPath, *listHumanReadable)))
|
||||
|
|
@ -441,10 +446,10 @@ func main() {
|
|||
if *dumpFormat == "seriesjson" {
|
||||
format = formatSeriesSetLabelsToJSON
|
||||
}
|
||||
os.Exit(checkErr(dumpTSDBData(ctx, *dumpPath, *dumpSandboxDirRoot, *dumpMinTime, *dumpMaxTime, *dumpMatch, format)))
|
||||
os.Exit(checkErr(dumpTSDBData(ctx, *dumpPath, *dumpSandboxDirRoot, *dumpMinTime, *dumpMaxTime, *dumpMatch, format, promtoolParser)))
|
||||
|
||||
case tsdbDumpOpenMetricsCmd.FullCommand():
|
||||
os.Exit(checkErr(dumpTSDBData(ctx, *dumpOpenMetricsPath, *dumpOpenMetricsSandboxDirRoot, *dumpOpenMetricsMinTime, *dumpOpenMetricsMaxTime, *dumpOpenMetricsMatch, formatSeriesSetOpenMetrics)))
|
||||
os.Exit(checkErr(dumpTSDBData(ctx, *dumpOpenMetricsPath, *dumpOpenMetricsSandboxDirRoot, *dumpOpenMetricsMinTime, *dumpOpenMetricsMaxTime, *dumpOpenMetricsMatch, formatSeriesSetOpenMetrics, promtoolParser)))
|
||||
// TODO(aSquare14): Work on adding support for custom block size.
|
||||
case openMetricsImportCmd.FullCommand():
|
||||
os.Exit(backfillOpenMetrics(*importFilePath, *importDBPath, *importHumanReadable, *importQuiet, *maxBlockDuration, *openMetricsLabels))
|
||||
|
|
@ -460,15 +465,15 @@ func main() {
|
|||
|
||||
case promQLFormatCmd.FullCommand():
|
||||
checkExperimental(*experimental)
|
||||
os.Exit(checkErr(formatPromQL(*promQLFormatQuery)))
|
||||
os.Exit(checkErr(formatPromQL(*promQLFormatQuery, promtoolParser)))
|
||||
|
||||
case promQLLabelsSetCmd.FullCommand():
|
||||
checkExperimental(*experimental)
|
||||
os.Exit(checkErr(labelsSetPromQL(*promQLLabelsSetQuery, *promQLLabelsSetType, *promQLLabelsSetName, *promQLLabelsSetValue)))
|
||||
os.Exit(checkErr(labelsSetPromQL(*promQLLabelsSetQuery, *promQLLabelsSetType, *promQLLabelsSetName, *promQLLabelsSetValue, promtoolParser)))
|
||||
|
||||
case promQLLabelsDeleteCmd.FullCommand():
|
||||
checkExperimental(*experimental)
|
||||
os.Exit(checkErr(labelsDeletePromQL(*promQLLabelsDeleteQuery, *promQLLabelsDeleteName)))
|
||||
os.Exit(checkErr(labelsDeletePromQL(*promQLLabelsDeleteQuery, *promQLLabelsDeleteName, promtoolParser)))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -614,7 +619,7 @@ func CheckConfig(agentMode, checkSyntaxOnly bool, lintSettings configLintConfig,
|
|||
if !checkSyntaxOnly {
|
||||
scrapeConfigsFailed := lintScrapeConfigs(scrapeConfigs, lintSettings)
|
||||
failed = failed || scrapeConfigsFailed
|
||||
rulesFailed, rulesHaveErrors := checkRules(ruleFiles, lintSettings.rulesLintConfig)
|
||||
rulesFailed, rulesHaveErrors := checkRules(ruleFiles, lintSettings.rulesLintConfig, parser.NewParser(parser.Options{}))
|
||||
failed = failed || rulesFailed
|
||||
hasErrors = hasErrors || rulesHaveErrors
|
||||
}
|
||||
|
|
@ -841,13 +846,13 @@ func checkSDFile(filename string) ([]*targetgroup.Group, error) {
|
|||
}
|
||||
|
||||
// CheckRules validates rule files.
|
||||
func CheckRules(ls rulesLintConfig, files ...string) int {
|
||||
func CheckRules(ls rulesLintConfig, p parser.Parser, files ...string) int {
|
||||
failed := false
|
||||
hasErrors := false
|
||||
if len(files) == 0 {
|
||||
failed, hasErrors = checkRulesFromStdin(ls)
|
||||
failed, hasErrors = checkRulesFromStdin(ls, p)
|
||||
} else {
|
||||
failed, hasErrors = checkRules(files, ls)
|
||||
failed, hasErrors = checkRules(files, ls, p)
|
||||
}
|
||||
|
||||
if failed && hasErrors {
|
||||
|
|
@ -861,7 +866,7 @@ func CheckRules(ls rulesLintConfig, files ...string) int {
|
|||
}
|
||||
|
||||
// checkRulesFromStdin validates rule from stdin.
|
||||
func checkRulesFromStdin(ls rulesLintConfig) (bool, bool) {
|
||||
func checkRulesFromStdin(ls rulesLintConfig, p parser.Parser) (bool, bool) {
|
||||
failed := false
|
||||
hasErrors := false
|
||||
fmt.Println("Checking standard input")
|
||||
|
|
@ -870,7 +875,7 @@ func checkRulesFromStdin(ls rulesLintConfig) (bool, bool) {
|
|||
fmt.Fprintln(os.Stderr, " FAILED:", err)
|
||||
return true, true
|
||||
}
|
||||
rgs, errs := rulefmt.Parse(data, ls.ignoreUnknownFields, ls.nameValidationScheme)
|
||||
rgs, errs := rulefmt.Parse(data, ls.ignoreUnknownFields, ls.nameValidationScheme, p)
|
||||
if errs != nil {
|
||||
failed = true
|
||||
fmt.Fprintln(os.Stderr, " FAILED:")
|
||||
|
|
@ -899,12 +904,12 @@ func checkRulesFromStdin(ls rulesLintConfig) (bool, bool) {
|
|||
}
|
||||
|
||||
// checkRules validates rule files.
|
||||
func checkRules(files []string, ls rulesLintConfig) (bool, bool) {
|
||||
func checkRules(files []string, ls rulesLintConfig, p parser.Parser) (bool, bool) {
|
||||
failed := false
|
||||
hasErrors := false
|
||||
for _, f := range files {
|
||||
fmt.Println("Checking", f)
|
||||
rgs, errs := rulefmt.ParseFile(f, ls.ignoreUnknownFields, ls.nameValidationScheme)
|
||||
rgs, errs := rulefmt.ParseFile(f, ls.ignoreUnknownFields, ls.nameValidationScheme, p)
|
||||
if errs != nil {
|
||||
failed = true
|
||||
fmt.Fprintln(os.Stderr, " FAILED:")
|
||||
|
|
@ -1345,8 +1350,8 @@ func checkTargetGroupsForScrapeConfig(targetGroups []*targetgroup.Group, scfg *c
|
|||
return nil
|
||||
}
|
||||
|
||||
func formatPromQL(query string) error {
|
||||
expr, err := parser.ParseExpr(query)
|
||||
func formatPromQL(query string, p parser.Parser) error {
|
||||
expr, err := p.ParseExpr(query)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -1355,8 +1360,8 @@ func formatPromQL(query string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func labelsSetPromQL(query, labelMatchType, name, value string) error {
|
||||
expr, err := parser.ParseExpr(query)
|
||||
func labelsSetPromQL(query, labelMatchType, name, value string, p parser.Parser) error {
|
||||
expr, err := p.ParseExpr(query)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -1400,8 +1405,8 @@ func labelsSetPromQL(query, labelMatchType, name, value string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func labelsDeletePromQL(query, name string) error {
|
||||
expr, err := parser.ParseExpr(query)
|
||||
func labelsDeletePromQL(query, name string, p parser.Parser) error {
|
||||
expr, err := p.ParseExpr(query)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -37,6 +37,7 @@ import (
|
|||
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/model/rulefmt"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
"github.com/prometheus/prometheus/promql/promqltest"
|
||||
)
|
||||
|
||||
|
|
@ -187,7 +188,7 @@ func TestCheckDuplicates(t *testing.T) {
|
|||
c := test
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
rgs, err := rulefmt.ParseFile(c.ruleFile, false, model.UTF8Validation)
|
||||
rgs, err := rulefmt.ParseFile(c.ruleFile, false, model.UTF8Validation, parser.NewParser(parser.Options{}))
|
||||
require.Empty(t, err)
|
||||
dups := checkDuplicates(rgs.Groups)
|
||||
require.Equal(t, c.expectedDups, dups)
|
||||
|
|
@ -196,7 +197,7 @@ func TestCheckDuplicates(t *testing.T) {
|
|||
}
|
||||
|
||||
func BenchmarkCheckDuplicates(b *testing.B) {
|
||||
rgs, err := rulefmt.ParseFile("./testdata/rules_large.yml", false, model.UTF8Validation)
|
||||
rgs, err := rulefmt.ParseFile("./testdata/rules_large.yml", false, model.UTF8Validation, parser.NewParser(parser.Options{}))
|
||||
require.Empty(b, err)
|
||||
|
||||
for b.Loop() {
|
||||
|
|
@ -602,7 +603,7 @@ func TestCheckRules(t *testing.T) {
|
|||
defer func(v *os.File) { os.Stdin = v }(os.Stdin)
|
||||
os.Stdin = r
|
||||
|
||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false, false, model.UTF8Validation))
|
||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false, false, model.UTF8Validation), parser.NewParser(parser.Options{}))
|
||||
require.Equal(t, successExitCode, exitCode)
|
||||
})
|
||||
|
||||
|
|
@ -624,7 +625,7 @@ func TestCheckRules(t *testing.T) {
|
|||
defer func(v *os.File) { os.Stdin = v }(os.Stdin)
|
||||
os.Stdin = r
|
||||
|
||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false, false, model.UTF8Validation))
|
||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false, false, model.UTF8Validation), parser.NewParser(parser.Options{}))
|
||||
require.Equal(t, failureExitCode, exitCode)
|
||||
})
|
||||
|
||||
|
|
@ -646,7 +647,7 @@ func TestCheckRules(t *testing.T) {
|
|||
defer func(v *os.File) { os.Stdin = v }(os.Stdin)
|
||||
os.Stdin = r
|
||||
|
||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, true, false, model.UTF8Validation))
|
||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, true, false, model.UTF8Validation), parser.NewParser(parser.Options{}))
|
||||
require.Equal(t, lintErrExitCode, exitCode)
|
||||
})
|
||||
}
|
||||
|
|
@ -664,19 +665,19 @@ func TestCheckRulesWithFeatureFlag(t *testing.T) {
|
|||
func TestCheckRulesWithRuleFiles(t *testing.T) {
|
||||
t.Run("rules-good", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false, false, model.UTF8Validation), "./testdata/rules.yml")
|
||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false, false, model.UTF8Validation), parser.NewParser(parser.Options{}), "./testdata/rules.yml")
|
||||
require.Equal(t, successExitCode, exitCode)
|
||||
})
|
||||
|
||||
t.Run("rules-bad", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false, false, model.UTF8Validation), "./testdata/rules-bad.yml")
|
||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false, false, model.UTF8Validation), parser.NewParser(parser.Options{}), "./testdata/rules-bad.yml")
|
||||
require.Equal(t, failureExitCode, exitCode)
|
||||
})
|
||||
|
||||
t.Run("rules-lint-fatal", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, true, false, model.UTF8Validation), "./testdata/prometheus-rules.lint.yml")
|
||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, true, false, model.UTF8Validation), parser.NewParser(parser.Options{}), "./testdata/prometheus-rules.lint.yml")
|
||||
require.Equal(t, lintErrExitCode, exitCode)
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -408,13 +408,13 @@ func openBlock(path, blockID string) (*tsdb.DBReadOnly, tsdb.BlockReader, error)
|
|||
return db, b, nil
|
||||
}
|
||||
|
||||
func analyzeBlock(ctx context.Context, path, blockID string, limit int, runExtended bool, matchers string) error {
|
||||
func analyzeBlock(ctx context.Context, path, blockID string, limit int, runExtended bool, matchers string, p parser.Parser) error {
|
||||
var (
|
||||
selectors []*labels.Matcher
|
||||
err error
|
||||
)
|
||||
if len(matchers) > 0 {
|
||||
selectors, err = parser.ParseMetricSelector(matchers)
|
||||
selectors, err = p.ParseMetricSelector(matchers)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -478,24 +478,24 @@ func analyzeBlock(ctx context.Context, path, blockID string, limit int, runExten
|
|||
labelpairsCount := map[string]uint64{}
|
||||
entries := 0
|
||||
var (
|
||||
p index.Postings
|
||||
refs []storage.SeriesRef
|
||||
postings index.Postings
|
||||
refs []storage.SeriesRef
|
||||
)
|
||||
if len(matchers) > 0 {
|
||||
p, err = tsdb.PostingsForMatchers(ctx, ir, selectors...)
|
||||
postings, err = tsdb.PostingsForMatchers(ctx, ir, selectors...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Expand refs first and cache in memory.
|
||||
// So later we don't have to expand again.
|
||||
refs, err = index.ExpandPostings(p)
|
||||
refs, err = index.ExpandPostings(postings)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fmt.Printf("Matched series: %d\n", len(refs))
|
||||
p = index.NewListPostings(refs)
|
||||
postings = index.NewListPostings(refs)
|
||||
} else {
|
||||
p, err = ir.Postings(ctx, "", "") // The special all key.
|
||||
postings, err = ir.Postings(ctx, "", "") // The special all key.
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -503,8 +503,8 @@ func analyzeBlock(ctx context.Context, path, blockID string, limit int, runExten
|
|||
|
||||
chks := []chunks.Meta{}
|
||||
builder := labels.ScratchBuilder{}
|
||||
for p.Next() {
|
||||
if err = ir.Series(p.At(), &builder, &chks); err != nil {
|
||||
for postings.Next() {
|
||||
if err = ir.Series(postings.At(), &builder, &chks); err != nil {
|
||||
return err
|
||||
}
|
||||
// Amount of the block time range not covered by this series.
|
||||
|
|
@ -517,8 +517,8 @@ func analyzeBlock(ctx context.Context, path, blockID string, limit int, runExten
|
|||
entries++
|
||||
})
|
||||
}
|
||||
if p.Err() != nil {
|
||||
return p.Err()
|
||||
if postings.Err() != nil {
|
||||
return postings.Err()
|
||||
}
|
||||
fmt.Printf("Postings (unique label pairs): %d\n", len(labelpairsUncovered))
|
||||
fmt.Printf("Postings entries (total label pairs): %d\n", entries)
|
||||
|
|
@ -706,7 +706,7 @@ func analyzeCompaction(ctx context.Context, block tsdb.BlockReader, indexr tsdb.
|
|||
|
||||
type SeriesSetFormatter func(series storage.SeriesSet) error
|
||||
|
||||
func dumpTSDBData(ctx context.Context, dbDir, sandboxDirRoot string, mint, maxt int64, match []string, formatter SeriesSetFormatter) (err error) {
|
||||
func dumpTSDBData(ctx context.Context, dbDir, sandboxDirRoot string, mint, maxt int64, match []string, formatter SeriesSetFormatter, p parser.Parser) (err error) {
|
||||
db, err := tsdb.OpenDBReadOnly(dbDir, sandboxDirRoot, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -720,7 +720,7 @@ func dumpTSDBData(ctx context.Context, dbDir, sandboxDirRoot string, mint, maxt
|
|||
}
|
||||
defer q.Close()
|
||||
|
||||
matcherSets, err := parser.ParseMetricSelectors(match)
|
||||
matcherSets, err := p.ParseMetricSelectors(match)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ import (
|
|||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
"github.com/prometheus/prometheus/promql/promqltest"
|
||||
"github.com/prometheus/prometheus/tsdb"
|
||||
)
|
||||
|
|
@ -71,6 +72,7 @@ func getDumpedSamples(t *testing.T, databasePath, sandboxDirRoot string, mint, m
|
|||
maxt,
|
||||
match,
|
||||
formatter,
|
||||
parser.NewParser(parser.Options{}),
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
|
|
|
|||
|
|
@ -47,11 +47,11 @@ import (
|
|||
|
||||
// RulesUnitTest does unit testing of rules based on the unit testing files provided.
|
||||
// More info about the file format can be found in the docs.
|
||||
func RulesUnitTest(queryOpts promqltest.LazyLoaderOpts, runStrings []string, diffFlag, debug, ignoreUnknownFields bool, files ...string) int {
|
||||
return RulesUnitTestResult(io.Discard, queryOpts, runStrings, diffFlag, debug, ignoreUnknownFields, files...)
|
||||
func RulesUnitTest(queryOpts promqltest.LazyLoaderOpts, p parser.Parser, runStrings []string, diffFlag, debug, ignoreUnknownFields bool, files ...string) int {
|
||||
return RulesUnitTestResult(io.Discard, queryOpts, p, runStrings, diffFlag, debug, ignoreUnknownFields, files...)
|
||||
}
|
||||
|
||||
func RulesUnitTestResult(results io.Writer, queryOpts promqltest.LazyLoaderOpts, runStrings []string, diffFlag, debug, ignoreUnknownFields bool, files ...string) int {
|
||||
func RulesUnitTestResult(results io.Writer, queryOpts promqltest.LazyLoaderOpts, p parser.Parser, runStrings []string, diffFlag, debug, ignoreUnknownFields bool, files ...string) int {
|
||||
failed := false
|
||||
junit := &junitxml.JUnitXML{}
|
||||
|
||||
|
|
@ -61,7 +61,7 @@ func RulesUnitTestResult(results io.Writer, queryOpts promqltest.LazyLoaderOpts,
|
|||
}
|
||||
|
||||
for _, f := range files {
|
||||
if errs := ruleUnitTest(f, queryOpts, run, diffFlag, debug, ignoreUnknownFields, junit.Suite(f)); errs != nil {
|
||||
if errs := ruleUnitTest(f, queryOpts, p, run, diffFlag, debug, ignoreUnknownFields, junit.Suite(f)); errs != nil {
|
||||
fmt.Fprintln(os.Stderr, " FAILED:")
|
||||
for _, e := range errs {
|
||||
fmt.Fprintln(os.Stderr, e.Error())
|
||||
|
|
@ -83,7 +83,7 @@ func RulesUnitTestResult(results io.Writer, queryOpts promqltest.LazyLoaderOpts,
|
|||
return successExitCode
|
||||
}
|
||||
|
||||
func ruleUnitTest(filename string, queryOpts promqltest.LazyLoaderOpts, run *regexp.Regexp, diffFlag, debug, ignoreUnknownFields bool, ts *junitxml.TestSuite) []error {
|
||||
func ruleUnitTest(filename string, queryOpts promqltest.LazyLoaderOpts, p parser.Parser, run *regexp.Regexp, diffFlag, debug, ignoreUnknownFields bool, ts *junitxml.TestSuite) []error {
|
||||
b, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
ts.Abort(err)
|
||||
|
|
@ -132,6 +132,7 @@ func ruleUnitTest(filename string, queryOpts promqltest.LazyLoaderOpts, run *reg
|
|||
if t.Interval == 0 {
|
||||
t.Interval = unitTestInp.EvaluationInterval
|
||||
}
|
||||
t.parser = p
|
||||
ers := t.test(testname, evalInterval, groupOrderMap, queryOpts, diffFlag, debug, ignoreUnknownFields, unitTestInp.FuzzyCompare, unitTestInp.RuleFiles...)
|
||||
if ers != nil {
|
||||
for _, e := range ers {
|
||||
|
|
@ -219,6 +220,8 @@ type testGroup struct {
|
|||
ExternalURL string `yaml:"external_url,omitempty"`
|
||||
TestGroupName string `yaml:"name,omitempty"`
|
||||
StartTimestamp testStartTimestamp `yaml:"start_timestamp,omitempty"`
|
||||
|
||||
parser parser.Parser `yaml:"-"`
|
||||
}
|
||||
|
||||
// test performs the unit tests.
|
||||
|
|
@ -482,10 +485,10 @@ Outer:
|
|||
|
||||
var expSamples []parsedSample
|
||||
for _, s := range testCase.ExpSamples {
|
||||
lb, err := parser.ParseMetric(s.Labels)
|
||||
lb, err := tg.parser.ParseMetric(s.Labels)
|
||||
var hist *histogram.FloatHistogram
|
||||
if err == nil && s.Histogram != "" {
|
||||
_, values, parseErr := parser.ParseSeriesDesc("{} " + s.Histogram)
|
||||
_, values, parseErr := tg.parser.ParseSeriesDesc("{} " + s.Histogram)
|
||||
switch {
|
||||
case parseErr != nil:
|
||||
err = parseErr
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ import (
|
|||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
"github.com/prometheus/prometheus/promql/promqltest"
|
||||
"github.com/prometheus/prometheus/util/junitxml"
|
||||
)
|
||||
|
|
@ -153,7 +154,7 @@ func TestRulesUnitTest(t *testing.T) {
|
|||
}
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
if got := RulesUnitTest(tt.queryOpts, nil, false, false, false, tt.args.files...); got != tt.want {
|
||||
if got := RulesUnitTest(tt.queryOpts, parser.NewParser(parser.Options{}), nil, false, false, false, tt.args.files...); got != tt.want {
|
||||
t.Errorf("RulesUnitTest() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
|
|
@ -161,7 +162,7 @@ func TestRulesUnitTest(t *testing.T) {
|
|||
t.Run("Junit xml output ", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
var buf bytes.Buffer
|
||||
if got := RulesUnitTestResult(&buf, promqltest.LazyLoaderOpts{}, nil, false, false, false, reuseFiles...); got != 1 {
|
||||
if got := RulesUnitTestResult(&buf, promqltest.LazyLoaderOpts{}, parser.NewParser(parser.Options{}), nil, false, false, false, reuseFiles...); got != 1 {
|
||||
t.Errorf("RulesUnitTestResults() = %v, want 1", got)
|
||||
}
|
||||
var test junitxml.JUnitXML
|
||||
|
|
@ -277,7 +278,7 @@ func TestRulesUnitTestRun(t *testing.T) {
|
|||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
got := RulesUnitTest(tt.queryOpts, tt.args.run, false, false, tt.ignoreUnknownFields, tt.args.files...)
|
||||
got := RulesUnitTest(tt.queryOpts, parser.NewParser(parser.Options{}), tt.args.run, false, false, tt.ignoreUnknownFields, tt.args.files...)
|
||||
require.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -97,7 +97,7 @@ type ruleGroups struct {
|
|||
}
|
||||
|
||||
// Validate validates all rules in the rule groups.
|
||||
func (g *RuleGroups) Validate(node ruleGroups, nameValidationScheme model.ValidationScheme) (errs []error) {
|
||||
func (g *RuleGroups) Validate(node ruleGroups, nameValidationScheme model.ValidationScheme, p parser.Parser) (errs []error) {
|
||||
if err := namevalidationutil.CheckNameValidationScheme(nameValidationScheme); err != nil {
|
||||
errs = append(errs, err)
|
||||
return errs
|
||||
|
|
@ -134,7 +134,7 @@ func (g *RuleGroups) Validate(node ruleGroups, nameValidationScheme model.Valida
|
|||
set[g.Name] = struct{}{}
|
||||
|
||||
for i, r := range g.Rules {
|
||||
for _, node := range r.Validate(node.Groups[j].Rules[i], nameValidationScheme) {
|
||||
for _, node := range r.Validate(node.Groups[j].Rules[i], nameValidationScheme, p) {
|
||||
var ruleName string
|
||||
if r.Alert != "" {
|
||||
ruleName = r.Alert
|
||||
|
|
@ -198,7 +198,7 @@ type RuleNode struct {
|
|||
}
|
||||
|
||||
// Validate the rule and return a list of encountered errors.
|
||||
func (r *Rule) Validate(node RuleNode, nameValidationScheme model.ValidationScheme) (nodes []WrappedError) {
|
||||
func (r *Rule) Validate(node RuleNode, nameValidationScheme model.ValidationScheme, p parser.Parser) (nodes []WrappedError) {
|
||||
if r.Record != "" && r.Alert != "" {
|
||||
nodes = append(nodes, WrappedError{
|
||||
err: errors.New("only one of 'record' and 'alert' must be set"),
|
||||
|
|
@ -219,7 +219,7 @@ func (r *Rule) Validate(node RuleNode, nameValidationScheme model.ValidationSche
|
|||
err: errors.New("field 'expr' must be set in rule"),
|
||||
node: &node.Expr,
|
||||
})
|
||||
} else if _, err := parser.ParseExpr(r.Expr); err != nil {
|
||||
} else if _, err := p.ParseExpr(r.Expr); err != nil {
|
||||
nodes = append(nodes, WrappedError{
|
||||
err: fmt.Errorf("could not parse expression: %w", err),
|
||||
node: &node.Expr,
|
||||
|
|
@ -339,7 +339,7 @@ func testTemplateParsing(rl *Rule) (errs []error) {
|
|||
}
|
||||
|
||||
// Parse parses and validates a set of rules.
|
||||
func Parse(content []byte, ignoreUnknownFields bool, nameValidationScheme model.ValidationScheme) (*RuleGroups, []error) {
|
||||
func Parse(content []byte, ignoreUnknownFields bool, nameValidationScheme model.ValidationScheme, p parser.Parser) (*RuleGroups, []error) {
|
||||
var (
|
||||
groups RuleGroups
|
||||
node ruleGroups
|
||||
|
|
@ -364,16 +364,16 @@ func Parse(content []byte, ignoreUnknownFields bool, nameValidationScheme model.
|
|||
return nil, errs
|
||||
}
|
||||
|
||||
return &groups, groups.Validate(node, nameValidationScheme)
|
||||
return &groups, groups.Validate(node, nameValidationScheme, p)
|
||||
}
|
||||
|
||||
// ParseFile reads and parses rules from a file.
|
||||
func ParseFile(file string, ignoreUnknownFields bool, nameValidationScheme model.ValidationScheme) (*RuleGroups, []error) {
|
||||
func ParseFile(file string, ignoreUnknownFields bool, nameValidationScheme model.ValidationScheme, p parser.Parser) (*RuleGroups, []error) {
|
||||
b, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
return nil, []error{fmt.Errorf("%s: %w", file, err)}
|
||||
}
|
||||
rgs, errs := Parse(b, ignoreUnknownFields, nameValidationScheme)
|
||||
rgs, errs := Parse(b, ignoreUnknownFields, nameValidationScheme, p)
|
||||
for i := range errs {
|
||||
errs[i] = fmt.Errorf("%s: %w", file, errs[i])
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,17 +22,21 @@ import (
|
|||
"github.com/prometheus/common/model"
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.yaml.in/yaml/v3"
|
||||
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
)
|
||||
|
||||
var testParser = parser.NewParser(parser.Options{})
|
||||
|
||||
func TestParseFileSuccess(t *testing.T) {
|
||||
_, errs := ParseFile("testdata/test.yaml", false, model.UTF8Validation)
|
||||
_, errs := ParseFile("testdata/test.yaml", false, model.UTF8Validation, testParser)
|
||||
require.Empty(t, errs, "unexpected errors parsing file")
|
||||
|
||||
_, errs = ParseFile("testdata/utf-8_lname.good.yaml", false, model.UTF8Validation)
|
||||
_, errs = ParseFile("testdata/utf-8_lname.good.yaml", false, model.UTF8Validation, testParser)
|
||||
require.Empty(t, errs, "unexpected errors parsing file")
|
||||
_, errs = ParseFile("testdata/utf-8_annotation.good.yaml", false, model.UTF8Validation)
|
||||
_, errs = ParseFile("testdata/utf-8_annotation.good.yaml", false, model.UTF8Validation, testParser)
|
||||
require.Empty(t, errs, "unexpected errors parsing file")
|
||||
_, errs = ParseFile("testdata/legacy_validation_annotation.good.yaml", false, model.LegacyValidation)
|
||||
_, errs = ParseFile("testdata/legacy_validation_annotation.good.yaml", false, model.LegacyValidation, testParser)
|
||||
require.Empty(t, errs, "unexpected errors parsing file")
|
||||
}
|
||||
|
||||
|
|
@ -41,7 +45,7 @@ func TestParseFileSuccessWithAliases(t *testing.T) {
|
|||
/
|
||||
sum without(instance) (rate(requests_total[5m]))
|
||||
`
|
||||
rgs, errs := ParseFile("testdata/test_aliases.yaml", false, model.UTF8Validation)
|
||||
rgs, errs := ParseFile("testdata/test_aliases.yaml", false, model.UTF8Validation, testParser)
|
||||
require.Empty(t, errs, "unexpected errors parsing file")
|
||||
for _, rg := range rgs.Groups {
|
||||
require.Equal(t, "HighAlert", rg.Rules[0].Alert)
|
||||
|
|
@ -119,7 +123,7 @@ func TestParseFileFailure(t *testing.T) {
|
|||
if c.nameValidationScheme == model.UnsetValidation {
|
||||
c.nameValidationScheme = model.UTF8Validation
|
||||
}
|
||||
_, errs := ParseFile(filepath.Join("testdata", c.filename), false, c.nameValidationScheme)
|
||||
_, errs := ParseFile(filepath.Join("testdata", c.filename), false, c.nameValidationScheme, testParser)
|
||||
require.NotEmpty(t, errs, "Expected error parsing %s but got none", c.filename)
|
||||
require.ErrorContainsf(t, errs[0], c.errMsg, "Expected error for %s.", c.filename)
|
||||
})
|
||||
|
|
@ -215,7 +219,7 @@ groups:
|
|||
}
|
||||
|
||||
for _, tst := range tests {
|
||||
rgs, errs := Parse([]byte(tst.ruleString), false, model.UTF8Validation)
|
||||
rgs, errs := Parse([]byte(tst.ruleString), false, model.UTF8Validation, testParser)
|
||||
require.NotNil(t, rgs, "Rule parsing, rule=\n"+tst.ruleString)
|
||||
passed := (tst.shouldPass && len(errs) == 0) || (!tst.shouldPass && len(errs) > 0)
|
||||
require.True(t, passed, "Rule validation failed, rule=\n"+tst.ruleString)
|
||||
|
|
@ -242,7 +246,7 @@ groups:
|
|||
annotations:
|
||||
summary: "Instance {{ $labels.instance }} up"
|
||||
`
|
||||
_, errs := Parse([]byte(group), false, model.UTF8Validation)
|
||||
_, errs := Parse([]byte(group), false, model.UTF8Validation, testParser)
|
||||
require.Len(t, errs, 2, "Expected two errors")
|
||||
var err00 *Error
|
||||
require.ErrorAs(t, errs[0], &err00)
|
||||
|
|
|
|||
|
|
@ -36,6 +36,8 @@ import (
|
|||
"github.com/prometheus/prometheus/util/teststorage"
|
||||
)
|
||||
|
||||
var testParser = parser.NewParser(parser.Options{})
|
||||
|
||||
func setupRangeQueryTestData(stor *teststorage.TestStorage, _ *promql.Engine, interval, numIntervals int) error {
|
||||
ctx := context.Background()
|
||||
|
||||
|
|
@ -332,10 +334,6 @@ func rangeQueryCases() []benchCase {
|
|||
}
|
||||
|
||||
func BenchmarkRangeQuery(b *testing.B) {
|
||||
parser.EnableExtendedRangeSelectors = true
|
||||
b.Cleanup(func() {
|
||||
parser.EnableExtendedRangeSelectors = false
|
||||
})
|
||||
stor := teststorage.New(b)
|
||||
stor.DisableCompactions() // Don't want auto-compaction disrupting timings.
|
||||
|
||||
|
|
@ -344,6 +342,7 @@ func BenchmarkRangeQuery(b *testing.B) {
|
|||
Reg: nil,
|
||||
MaxSamples: 50000000,
|
||||
Timeout: 100 * time.Second,
|
||||
Parser: parser.NewParser(parser.Options{EnableExtendedRangeSelectors: true}),
|
||||
}
|
||||
engine := promqltest.NewTestEngineWithOpts(b, opts)
|
||||
|
||||
|
|
@ -804,13 +803,13 @@ func BenchmarkParser(b *testing.B) {
|
|||
b.Run(c, func(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for b.Loop() {
|
||||
parser.ParseExpr(c)
|
||||
testParser.ParseExpr(c)
|
||||
}
|
||||
})
|
||||
}
|
||||
for _, c := range cases {
|
||||
b.Run("preprocess "+c, func(b *testing.B) {
|
||||
expr, _ := parser.ParseExpr(c)
|
||||
expr, _ := testParser.ParseExpr(c)
|
||||
start, end := time.Now().Add(-time.Hour), time.Now()
|
||||
for b.Loop() {
|
||||
promql.PreprocessExpr(expr, start, end, 0)
|
||||
|
|
@ -822,7 +821,7 @@ func BenchmarkParser(b *testing.B) {
|
|||
b.Run(name, func(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for b.Loop() {
|
||||
parser.ParseExpr(c)
|
||||
testParser.ParseExpr(c)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,11 +23,7 @@ import (
|
|||
)
|
||||
|
||||
func TestDurationVisitor(t *testing.T) {
|
||||
// Enable experimental duration expression parsing.
|
||||
parser.ExperimentalDurationExpr = true
|
||||
t.Cleanup(func() {
|
||||
parser.ExperimentalDurationExpr = false
|
||||
})
|
||||
p := parser.NewParser(parser.Options{ExperimentalDurationExpr: true})
|
||||
complexExpr := `sum_over_time(
|
||||
rate(metric[5m] offset 1h)[10m:30s] offset 2h
|
||||
) +
|
||||
|
|
@ -38,7 +34,7 @@ func TestDurationVisitor(t *testing.T) {
|
|||
metric[2h * 0.5]
|
||||
)`
|
||||
|
||||
expr, err := parser.ParseExpr(complexExpr)
|
||||
expr, err := p.ParseExpr(complexExpr)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = parser.Walk(&durationVisitor{}, expr, nil)
|
||||
|
|
|
|||
|
|
@ -335,6 +335,9 @@ type EngineOpts struct {
|
|||
|
||||
// FeatureRegistry is the registry for tracking enabled/disabled features.
|
||||
FeatureRegistry features.Collector
|
||||
|
||||
// Parser is the PromQL parser instance used for parsing expressions.
|
||||
Parser parser.Parser
|
||||
}
|
||||
|
||||
// Engine handles the lifetime of queries from beginning to end.
|
||||
|
|
@ -354,6 +357,7 @@ type Engine struct {
|
|||
enablePerStepStats bool
|
||||
enableDelayedNameRemoval bool
|
||||
enableTypeAndUnitLabels bool
|
||||
parser parser.Parser
|
||||
}
|
||||
|
||||
// NewEngine returns a new engine.
|
||||
|
|
@ -432,6 +436,10 @@ func NewEngine(opts EngineOpts) *Engine {
|
|||
metrics.maxConcurrentQueries.Set(-1)
|
||||
}
|
||||
|
||||
if opts.Parser == nil {
|
||||
opts.Parser = parser.NewParser(parser.Options{})
|
||||
}
|
||||
|
||||
if opts.LookbackDelta == 0 {
|
||||
opts.LookbackDelta = defaultLookbackDelta
|
||||
if l := opts.Logger; l != nil {
|
||||
|
|
@ -460,7 +468,9 @@ func NewEngine(opts EngineOpts) *Engine {
|
|||
r.Enable(features.PromQL, "per_query_lookback_delta")
|
||||
r.Enable(features.PromQL, "subqueries")
|
||||
|
||||
parser.RegisterFeatures(r)
|
||||
if opts.Parser != nil {
|
||||
opts.Parser.RegisterFeatures(r)
|
||||
}
|
||||
}
|
||||
|
||||
return &Engine{
|
||||
|
|
@ -476,6 +486,7 @@ func NewEngine(opts EngineOpts) *Engine {
|
|||
enablePerStepStats: opts.EnablePerStepStats,
|
||||
enableDelayedNameRemoval: opts.EnableDelayedNameRemoval,
|
||||
enableTypeAndUnitLabels: opts.EnableTypeAndUnitLabels,
|
||||
parser: opts.Parser,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -524,7 +535,7 @@ func (ng *Engine) NewInstantQuery(ctx context.Context, q storage.Queryable, opts
|
|||
return nil, err
|
||||
}
|
||||
defer finishQueue()
|
||||
expr, err := parser.ParseExpr(qs)
|
||||
expr, err := ng.parser.ParseExpr(qs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -545,7 +556,7 @@ func (ng *Engine) NewRangeQuery(ctx context.Context, q storage.Queryable, opts Q
|
|||
return nil, err
|
||||
}
|
||||
defer finishQueue()
|
||||
expr, err := parser.ParseExpr(qs)
|
||||
expr, err := ng.parser.ParseExpr(qs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -27,12 +27,14 @@ import (
|
|||
"github.com/prometheus/prometheus/util/annotations"
|
||||
)
|
||||
|
||||
var testParser = parser.NewParser(parser.Options{})
|
||||
|
||||
func TestRecoverEvaluatorRuntime(t *testing.T) {
|
||||
var output bytes.Buffer
|
||||
logger := promslog.New(&promslog.Config{Writer: &output})
|
||||
ev := &evaluator{logger: logger}
|
||||
|
||||
expr, _ := parser.ParseExpr("sum(up)")
|
||||
expr, _ := testParser.ParseExpr("sum(up)")
|
||||
|
||||
var err error
|
||||
|
||||
|
|
|
|||
|
|
@ -52,8 +52,6 @@ const (
|
|||
)
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
// Enable experimental functions testing
|
||||
parser.EnableExperimentalFunctions = true
|
||||
testutil.TolerantVerifyLeak(m)
|
||||
}
|
||||
|
||||
|
|
@ -1508,11 +1506,6 @@ load 10s
|
|||
}
|
||||
|
||||
func TestExtendedRangeSelectors(t *testing.T) {
|
||||
parser.EnableExtendedRangeSelectors = true
|
||||
t.Cleanup(func() {
|
||||
parser.EnableExtendedRangeSelectors = false
|
||||
})
|
||||
|
||||
engine := newTestEngine(t)
|
||||
storage := promqltest.LoadedStorage(t, `
|
||||
load 10s
|
||||
|
|
@ -1660,6 +1653,40 @@ func TestExtendedRangeSelectors(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
// TestParserConfigIsolation ensures the engine's parser configuration is respected.
|
||||
func TestParserConfigIsolation(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
storage := promqltest.LoadedStorage(t, `
|
||||
load 10s
|
||||
metric 1+1x10
|
||||
`)
|
||||
t.Cleanup(func() { storage.Close() })
|
||||
|
||||
query := "metric[10s] smoothed"
|
||||
t.Run("engine_with_feature_disabled_rejects", func(t *testing.T) {
|
||||
engine := promql.NewEngine(promql.EngineOpts{
|
||||
MaxSamples: 1000, Timeout: 10 * time.Second,
|
||||
Parser: parser.NewParser(parser.Options{EnableExtendedRangeSelectors: false}),
|
||||
})
|
||||
t.Cleanup(func() { _ = engine.Close() })
|
||||
_, err := engine.NewInstantQuery(ctx, storage, nil, query, time.Unix(10, 0))
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), "parse")
|
||||
})
|
||||
t.Run("engine_with_feature_enabled_accepts", func(t *testing.T) {
|
||||
engine := promql.NewEngine(promql.EngineOpts{
|
||||
MaxSamples: 1000, Timeout: 10 * time.Second,
|
||||
Parser: parser.NewParser(parser.Options{EnableExtendedRangeSelectors: true}),
|
||||
})
|
||||
t.Cleanup(func() { _ = engine.Close() })
|
||||
q, err := engine.NewInstantQuery(ctx, storage, nil, query, time.Unix(10, 0))
|
||||
require.NoError(t, err)
|
||||
defer q.Close()
|
||||
res := q.Exec(ctx)
|
||||
require.NoError(t, res.Err)
|
||||
})
|
||||
}
|
||||
|
||||
func TestAtModifier(t *testing.T) {
|
||||
engine := newTestEngine(t)
|
||||
storage := promqltest.LoadedStorage(t, `
|
||||
|
|
@ -3233,7 +3260,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) {
|
|||
|
||||
for _, test := range testCases {
|
||||
t.Run(test.input, func(t *testing.T) {
|
||||
expr, err := parser.ParseExpr(test.input)
|
||||
expr, err := testParser.ParseExpr(test.input)
|
||||
require.NoError(t, err)
|
||||
expr, err = promql.PreprocessExpr(expr, startTime, endTime, 0)
|
||||
require.NoError(t, err)
|
||||
|
|
@ -3842,6 +3869,7 @@ func TestEvaluationWithDelayedNameRemovalDisabled(t *testing.T) {
|
|||
MaxSamples: 10000,
|
||||
Timeout: 10 * time.Second,
|
||||
EnableDelayedNameRemoval: false,
|
||||
Parser: parser.NewParser(promqltest.TestParserOpts),
|
||||
}
|
||||
engine := promqltest.NewTestEngineWithOpts(t, opts)
|
||||
|
||||
|
|
|
|||
|
|
@ -60,6 +60,8 @@ const (
|
|||
// Use package-scope symbol table to avoid memory allocation on every fuzzing operation.
|
||||
var symbolTable = labels.NewSymbolTable()
|
||||
|
||||
var fuzzParser = parser.NewParser(parser.Options{})
|
||||
|
||||
func fuzzParseMetricWithContentType(in []byte, contentType string) int {
|
||||
p, warning := textparse.New(in, contentType, symbolTable, textparse.ParserOptions{})
|
||||
if p == nil || warning != nil {
|
||||
|
|
@ -103,7 +105,7 @@ func FuzzParseMetricSelector(in []byte) int {
|
|||
if len(in) > maxInputSize {
|
||||
return fuzzMeh
|
||||
}
|
||||
_, err := parser.ParseMetricSelector(string(in))
|
||||
_, err := fuzzParser.ParseMetricSelector(string(in))
|
||||
if err == nil {
|
||||
return fuzzInteresting
|
||||
}
|
||||
|
|
@ -116,7 +118,7 @@ func FuzzParseExpr(in []byte) int {
|
|||
if len(in) > maxInputSize {
|
||||
return fuzzMeh
|
||||
}
|
||||
_, err := parser.ParseExpr(string(in))
|
||||
_, err := fuzzParser.ParseExpr(string(in))
|
||||
if err == nil {
|
||||
return fuzzInteresting
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,16 +18,15 @@ import "github.com/prometheus/prometheus/util/features"
|
|||
// RegisterFeatures registers all PromQL features with the feature registry.
|
||||
// This includes operators (arithmetic and comparison/set), aggregators (standard
|
||||
// and experimental), and functions.
|
||||
func RegisterFeatures(r features.Collector) {
|
||||
func (pql *promQLParser) RegisterFeatures(r features.Collector) {
|
||||
// Register core PromQL language keywords.
|
||||
for keyword, itemType := range key {
|
||||
if itemType.IsKeyword() {
|
||||
// Handle experimental keywords separately.
|
||||
switch keyword {
|
||||
case "anchored", "smoothed":
|
||||
r.Set(features.PromQL, keyword, EnableExtendedRangeSelectors)
|
||||
r.Set(features.PromQL, keyword, pql.options.EnableExtendedRangeSelectors)
|
||||
case "fill", "fill_left", "fill_right":
|
||||
r.Set(features.PromQL, keyword, EnableBinopFillModifiers)
|
||||
r.Set(features.PromQL, keyword, pql.options.EnableBinopFillModifiers)
|
||||
default:
|
||||
r.Enable(features.PromQL, keyword)
|
||||
}
|
||||
|
|
@ -44,16 +43,16 @@ func RegisterFeatures(r features.Collector) {
|
|||
// Register aggregators.
|
||||
for a := ItemType(aggregatorsStart + 1); a < aggregatorsEnd; a++ {
|
||||
if a.IsAggregator() {
|
||||
experimental := a.IsExperimentalAggregator() && !EnableExperimentalFunctions
|
||||
experimental := a.IsExperimentalAggregator() && !pql.options.EnableExperimentalFunctions
|
||||
r.Set(features.PromQLOperators, a.String(), !experimental)
|
||||
}
|
||||
}
|
||||
|
||||
// Register functions.
|
||||
for f, fc := range Functions {
|
||||
r.Set(features.PromQLFunctions, f, !fc.Experimental || EnableExperimentalFunctions)
|
||||
r.Set(features.PromQLFunctions, f, !fc.Experimental || pql.options.EnableExperimentalFunctions)
|
||||
}
|
||||
|
||||
// Register experimental parser features.
|
||||
r.Set(features.PromQL, "duration_expr", ExperimentalDurationExpr)
|
||||
r.Set(features.PromQL, "duration_expr", pql.options.ExperimentalDurationExpr)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,9 +23,6 @@ type Function struct {
|
|||
Experimental bool
|
||||
}
|
||||
|
||||
// EnableExperimentalFunctions controls whether experimentalFunctions are enabled.
|
||||
var EnableExperimentalFunctions bool
|
||||
|
||||
// Functions is a list of all functions supported by PromQL, including their types.
|
||||
var Functions = map[string]*Function{
|
||||
"abs": {
|
||||
|
|
|
|||
|
|
@ -456,7 +456,7 @@ function_call : IDENTIFIER function_call_body
|
|||
if !exist{
|
||||
yylex.(*parser).addParseErrf($1.PositionRange(),"unknown function with name %q", $1.Val)
|
||||
}
|
||||
if fn != nil && fn.Experimental && !EnableExperimentalFunctions {
|
||||
if fn != nil && fn.Experimental && !yylex.(*parser).options.EnableExperimentalFunctions {
|
||||
yylex.(*parser).addParseErrf($1.PositionRange(),"function %q is not enabled", $1.Val)
|
||||
}
|
||||
$$ = &Call{
|
||||
|
|
|
|||
|
|
@ -1459,7 +1459,7 @@ yydefault:
|
|||
if !exist {
|
||||
yylex.(*parser).addParseErrf(yyDollar[1].item.PositionRange(), "unknown function with name %q", yyDollar[1].item.Val)
|
||||
}
|
||||
if fn != nil && fn.Experimental && !EnableExperimentalFunctions {
|
||||
if fn != nil && fn.Experimental && !yylex.(*parser).options.EnableExperimentalFunctions {
|
||||
yylex.(*parser).addParseErrf(yyDollar[1].item.PositionRange(), "function %q is not enabled", yyDollar[1].item.Val)
|
||||
}
|
||||
yyVAL.node = &Call{
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ import (
|
|||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/model/timestamp"
|
||||
"github.com/prometheus/prometheus/promql/parser/posrange"
|
||||
"github.com/prometheus/prometheus/util/features"
|
||||
"github.com/prometheus/prometheus/util/strutil"
|
||||
)
|
||||
|
||||
|
|
@ -39,18 +40,104 @@ var parserPool = sync.Pool{
|
|||
},
|
||||
}
|
||||
|
||||
// ExperimentalDurationExpr is a flag to enable experimental duration expression parsing.
|
||||
var ExperimentalDurationExpr bool
|
||||
|
||||
// EnableExtendedRangeSelectors is a flag to enable experimental extended range selectors.
|
||||
var EnableExtendedRangeSelectors bool
|
||||
|
||||
// EnableBinopFillModifiers is a flag to enable experimental fill modifiers for binary operators.
|
||||
var EnableBinopFillModifiers bool
|
||||
// Options holds the configuration for the PromQL parser.
|
||||
type Options struct {
|
||||
EnableExperimentalFunctions bool
|
||||
ExperimentalDurationExpr bool
|
||||
EnableExtendedRangeSelectors bool
|
||||
EnableBinopFillModifiers bool
|
||||
}
|
||||
|
||||
// Parser provides PromQL parsing methods. Create one with NewParser.
|
||||
type Parser interface {
|
||||
ParseExpr() (Expr, error)
|
||||
Close()
|
||||
ParseExpr(input string) (Expr, error)
|
||||
ParseMetric(input string) (labels.Labels, error)
|
||||
ParseMetricSelector(input string) ([]*labels.Matcher, error)
|
||||
ParseMetricSelectors(matchers []string) ([][]*labels.Matcher, error)
|
||||
ParseSeriesDesc(input string) (labels.Labels, []SequenceValue, error)
|
||||
RegisterFeatures(r features.Collector)
|
||||
}
|
||||
|
||||
type promQLParser struct {
|
||||
options Options
|
||||
}
|
||||
|
||||
// NewParser returns a new PromQL Parser configured with the given options.
|
||||
func NewParser(opts Options) Parser {
|
||||
return &promQLParser{options: opts}
|
||||
}
|
||||
|
||||
func (pql *promQLParser) ParseExpr(input string) (Expr, error) {
|
||||
p := newParser(input, pql.options)
|
||||
defer p.Close()
|
||||
return p.parseExpr()
|
||||
}
|
||||
|
||||
func (pql *promQLParser) ParseMetric(input string) (m labels.Labels, err error) {
|
||||
p := newParser(input, pql.options)
|
||||
defer p.Close()
|
||||
defer p.recover(&err)
|
||||
|
||||
parseResult := p.parseGenerated(START_METRIC)
|
||||
if parseResult != nil {
|
||||
m = parseResult.(labels.Labels)
|
||||
}
|
||||
|
||||
if len(p.parseErrors) != 0 {
|
||||
err = p.parseErrors
|
||||
}
|
||||
|
||||
return m, err
|
||||
}
|
||||
|
||||
func (pql *promQLParser) ParseMetricSelector(input string) (m []*labels.Matcher, err error) {
|
||||
p := newParser(input, pql.options)
|
||||
defer p.Close()
|
||||
defer p.recover(&err)
|
||||
|
||||
parseResult := p.parseGenerated(START_METRIC_SELECTOR)
|
||||
if parseResult != nil {
|
||||
m = parseResult.(*VectorSelector).LabelMatchers
|
||||
}
|
||||
|
||||
if len(p.parseErrors) != 0 {
|
||||
err = p.parseErrors
|
||||
}
|
||||
|
||||
return m, err
|
||||
}
|
||||
|
||||
func (pql *promQLParser) ParseMetricSelectors(matchers []string) ([][]*labels.Matcher, error) {
|
||||
var matcherSets [][]*labels.Matcher
|
||||
for _, s := range matchers {
|
||||
ms, err := pql.ParseMetricSelector(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
matcherSets = append(matcherSets, ms)
|
||||
}
|
||||
return matcherSets, nil
|
||||
}
|
||||
|
||||
func (pql *promQLParser) ParseSeriesDesc(input string) (lbls labels.Labels, values []SequenceValue, err error) {
|
||||
p := newParser(input, pql.options)
|
||||
p.lex.seriesDesc = true
|
||||
|
||||
defer p.Close()
|
||||
defer p.recover(&err)
|
||||
|
||||
parseResult := p.parseGenerated(START_SERIES_DESCRIPTION)
|
||||
if parseResult != nil {
|
||||
result := parseResult.(*seriesDescription)
|
||||
lbls = result.labels
|
||||
values = result.values
|
||||
}
|
||||
|
||||
if len(p.parseErrors) != 0 {
|
||||
err = p.parseErrors
|
||||
}
|
||||
|
||||
return lbls, values, err
|
||||
}
|
||||
|
||||
type parser struct {
|
||||
|
|
@ -75,18 +162,12 @@ type parser struct {
|
|||
// built histogram had a counter_reset_hint explicitly specified.
|
||||
// This is used to populate CounterResetHintSet in SequenceValue.
|
||||
lastHistogramCounterResetHintSet bool
|
||||
|
||||
options Options
|
||||
}
|
||||
|
||||
type Opt func(p *parser)
|
||||
|
||||
func WithFunctions(functions map[string]*Function) Opt {
|
||||
return func(p *parser) {
|
||||
p.functions = functions
|
||||
}
|
||||
}
|
||||
|
||||
// NewParser returns a new parser.
|
||||
func NewParser(input string, opts ...Opt) *parser { //nolint:revive // unexported-return
|
||||
// newParser returns a new low-level parser instance from the pool.
|
||||
func newParser(input string, opts Options) *parser {
|
||||
p := parserPool.Get().(*parser)
|
||||
|
||||
p.functions = Functions
|
||||
|
|
@ -94,6 +175,7 @@ func NewParser(input string, opts ...Opt) *parser { //nolint:revive // unexporte
|
|||
p.parseErrors = nil
|
||||
p.generatedParserResult = nil
|
||||
p.lastClosing = posrange.Pos(0)
|
||||
p.options = opts
|
||||
|
||||
// Clear lexer struct before reusing.
|
||||
p.lex = Lexer{
|
||||
|
|
@ -101,15 +183,17 @@ func NewParser(input string, opts ...Opt) *parser { //nolint:revive // unexporte
|
|||
state: lexStatements,
|
||||
}
|
||||
|
||||
// Apply user define options.
|
||||
for _, opt := range opts {
|
||||
opt(p)
|
||||
}
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
func (p *parser) ParseExpr() (expr Expr, err error) {
|
||||
// newParserWithFunctions returns a new low-level parser instance with custom functions.
|
||||
func newParserWithFunctions(input string, opts Options, functions map[string]*Function) *parser {
|
||||
p := newParser(input, opts)
|
||||
p.functions = functions
|
||||
return p
|
||||
}
|
||||
|
||||
func (p *parser) parseExpr() (expr Expr, err error) {
|
||||
defer p.recover(&err)
|
||||
|
||||
parseResult := p.parseGenerated(START_EXPRESSION)
|
||||
|
|
@ -179,64 +263,6 @@ func EnrichParseError(err error, enrich func(parseErr *ParseErr)) {
|
|||
}
|
||||
}
|
||||
|
||||
// ParseExpr returns the expression parsed from the input.
|
||||
func ParseExpr(input string) (expr Expr, err error) {
|
||||
p := NewParser(input)
|
||||
defer p.Close()
|
||||
return p.ParseExpr()
|
||||
}
|
||||
|
||||
// ParseMetric parses the input into a metric.
|
||||
func ParseMetric(input string) (m labels.Labels, err error) {
|
||||
p := NewParser(input)
|
||||
defer p.Close()
|
||||
defer p.recover(&err)
|
||||
|
||||
parseResult := p.parseGenerated(START_METRIC)
|
||||
if parseResult != nil {
|
||||
m = parseResult.(labels.Labels)
|
||||
}
|
||||
|
||||
if len(p.parseErrors) != 0 {
|
||||
err = p.parseErrors
|
||||
}
|
||||
|
||||
return m, err
|
||||
}
|
||||
|
||||
// ParseMetricSelector parses the provided textual metric selector into a list of
|
||||
// label matchers.
|
||||
func ParseMetricSelector(input string) (m []*labels.Matcher, err error) {
|
||||
p := NewParser(input)
|
||||
defer p.Close()
|
||||
defer p.recover(&err)
|
||||
|
||||
parseResult := p.parseGenerated(START_METRIC_SELECTOR)
|
||||
if parseResult != nil {
|
||||
m = parseResult.(*VectorSelector).LabelMatchers
|
||||
}
|
||||
|
||||
if len(p.parseErrors) != 0 {
|
||||
err = p.parseErrors
|
||||
}
|
||||
|
||||
return m, err
|
||||
}
|
||||
|
||||
// ParseMetricSelectors parses a list of provided textual metric selectors into lists of
|
||||
// label matchers.
|
||||
func ParseMetricSelectors(matchers []string) (m [][]*labels.Matcher, err error) {
|
||||
var matcherSets [][]*labels.Matcher
|
||||
for _, s := range matchers {
|
||||
matchers, err := ParseMetricSelector(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
matcherSets = append(matcherSets, matchers)
|
||||
}
|
||||
return matcherSets, nil
|
||||
}
|
||||
|
||||
// SequenceValue is an omittable value in a sequence of time series values.
|
||||
type SequenceValue struct {
|
||||
Value float64
|
||||
|
|
@ -264,30 +290,6 @@ type seriesDescription struct {
|
|||
values []SequenceValue
|
||||
}
|
||||
|
||||
// ParseSeriesDesc parses the description of a time series. It is only used in
|
||||
// the PromQL testing framework code.
|
||||
func ParseSeriesDesc(input string) (labels labels.Labels, values []SequenceValue, err error) {
|
||||
p := NewParser(input)
|
||||
p.lex.seriesDesc = true
|
||||
|
||||
defer p.Close()
|
||||
defer p.recover(&err)
|
||||
|
||||
parseResult := p.parseGenerated(START_SERIES_DESCRIPTION)
|
||||
if parseResult != nil {
|
||||
result := parseResult.(*seriesDescription)
|
||||
|
||||
labels = result.labels
|
||||
values = result.values
|
||||
}
|
||||
|
||||
if len(p.parseErrors) != 0 {
|
||||
err = p.parseErrors
|
||||
}
|
||||
|
||||
return labels, values, err
|
||||
}
|
||||
|
||||
// addParseErrf formats the error and appends it to the list of parsing errors.
|
||||
func (p *parser) addParseErrf(positionRange posrange.PositionRange, format string, args ...any) {
|
||||
p.addParseErr(positionRange, fmt.Errorf(format, args...))
|
||||
|
|
@ -433,7 +435,7 @@ func (p *parser) newBinaryExpression(lhs Node, op Item, modifiers, rhs Node) *Bi
|
|||
ret.RHS = rhs.(Expr)
|
||||
ret.Op = op.Typ
|
||||
|
||||
if !EnableBinopFillModifiers && (ret.VectorMatching.FillValues.LHS != nil || ret.VectorMatching.FillValues.RHS != nil) {
|
||||
if !p.options.EnableBinopFillModifiers && (ret.VectorMatching.FillValues.LHS != nil || ret.VectorMatching.FillValues.RHS != nil) {
|
||||
p.addParseErrf(ret.PositionRange(), "binop fill modifiers are experimental and not enabled")
|
||||
return ret
|
||||
}
|
||||
|
|
@ -476,7 +478,7 @@ func (p *parser) newAggregateExpr(op Item, modifier, args Node, overread bool) (
|
|||
|
||||
desiredArgs := 1
|
||||
if ret.Op.IsAggregatorWithParam() {
|
||||
if !EnableExperimentalFunctions && ret.Op.IsExperimentalAggregator() {
|
||||
if !p.options.EnableExperimentalFunctions && ret.Op.IsExperimentalAggregator() {
|
||||
p.addParseErrf(ret.PositionRange(), "%s() is experimental and must be enabled with --enable-feature=promql-experimental-functions", ret.Op)
|
||||
return ret
|
||||
}
|
||||
|
|
@ -1073,7 +1075,7 @@ func (p *parser) addOffsetExpr(e Node, expr *DurationExpr) {
|
|||
}
|
||||
|
||||
func (p *parser) setAnchored(e Node) {
|
||||
if !EnableExtendedRangeSelectors {
|
||||
if !p.options.EnableExtendedRangeSelectors {
|
||||
p.addParseErrf(e.PositionRange(), "anchored modifier is experimental and not enabled")
|
||||
return
|
||||
}
|
||||
|
|
@ -1096,7 +1098,7 @@ func (p *parser) setAnchored(e Node) {
|
|||
}
|
||||
|
||||
func (p *parser) setSmoothed(e Node) {
|
||||
if !EnableExtendedRangeSelectors {
|
||||
if !p.options.EnableExtendedRangeSelectors {
|
||||
p.addParseErrf(e.PositionRange(), "smoothed modifier is experimental and not enabled")
|
||||
return
|
||||
}
|
||||
|
|
@ -1192,7 +1194,7 @@ func (p *parser) getAtModifierVars(e Node) (**int64, *ItemType, *posrange.Pos, b
|
|||
}
|
||||
|
||||
func (p *parser) experimentalDurationExpr(e Expr) {
|
||||
if !ExperimentalDurationExpr {
|
||||
if !p.options.ExperimentalDurationExpr {
|
||||
p.addParseErrf(e.PositionRange(), "experimental duration expression is not enabled")
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -31,6 +31,8 @@ import (
|
|||
"github.com/prometheus/prometheus/util/testutil"
|
||||
)
|
||||
|
||||
var testParser = NewParser(Options{})
|
||||
|
||||
func repeatError(query string, err error, start, startStep, end, endStep, count int) (errs ParseErrors) {
|
||||
for i := range count {
|
||||
errs = append(errs, ParseErr{
|
||||
|
|
@ -5297,18 +5299,14 @@ func readable(s string) string {
|
|||
}
|
||||
|
||||
func TestParseExpressions(t *testing.T) {
|
||||
// Enable experimental functions testing.
|
||||
EnableExperimentalFunctions = true
|
||||
// Enable experimental duration expression parsing.
|
||||
ExperimentalDurationExpr = true
|
||||
t.Cleanup(func() {
|
||||
EnableExperimentalFunctions = false
|
||||
ExperimentalDurationExpr = false
|
||||
optsParser := NewParser(Options{
|
||||
EnableExperimentalFunctions: true,
|
||||
ExperimentalDurationExpr: true,
|
||||
})
|
||||
|
||||
for _, test := range testExpr {
|
||||
t.Run(readable(test.input), func(t *testing.T) {
|
||||
expr, err := ParseExpr(test.input)
|
||||
expr, err := optsParser.ParseExpr(test.input)
|
||||
|
||||
// Unexpected errors are always caused by a bug.
|
||||
require.NotEqual(t, err, errUnexpected, "unexpected error occurred")
|
||||
|
|
@ -5436,7 +5434,7 @@ func TestParseSeriesDesc(t *testing.T) {
|
|||
|
||||
for _, tc := range tests {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
l, v, err := ParseSeriesDesc(tc.input)
|
||||
l, v, err := testParser.ParseSeriesDesc(tc.input)
|
||||
if tc.expectError != "" {
|
||||
require.Contains(t, err.Error(), tc.expectError)
|
||||
} else {
|
||||
|
|
@ -5450,7 +5448,7 @@ func TestParseSeriesDesc(t *testing.T) {
|
|||
|
||||
// NaN has no equality. Thus, we need a separate test for it.
|
||||
func TestNaNExpression(t *testing.T) {
|
||||
expr, err := ParseExpr("NaN")
|
||||
expr, err := testParser.ParseExpr("NaN")
|
||||
require.NoError(t, err)
|
||||
|
||||
nl, ok := expr.(*NumberLiteral)
|
||||
|
|
@ -5878,7 +5876,7 @@ func TestParseHistogramSeries(t *testing.T) {
|
|||
},
|
||||
} {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
_, vals, err := ParseSeriesDesc(test.input)
|
||||
_, vals, err := testParser.ParseSeriesDesc(test.input)
|
||||
if test.expectedError != "" {
|
||||
require.EqualError(t, err, test.expectedError)
|
||||
return
|
||||
|
|
@ -5950,7 +5948,7 @@ func TestHistogramTestExpression(t *testing.T) {
|
|||
t.Run(test.name, func(t *testing.T) {
|
||||
expression := test.input.TestExpression()
|
||||
require.Equal(t, test.expected, expression)
|
||||
_, vals, err := ParseSeriesDesc("{} " + expression)
|
||||
_, vals, err := testParser.ParseSeriesDesc("{} " + expression)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, vals, 1)
|
||||
canonical := vals[0].Histogram
|
||||
|
|
@ -5962,7 +5960,7 @@ func TestHistogramTestExpression(t *testing.T) {
|
|||
|
||||
func TestParseSeries(t *testing.T) {
|
||||
for _, test := range testSeries {
|
||||
metric, vals, err := ParseSeriesDesc(test.input)
|
||||
metric, vals, err := testParser.ParseSeriesDesc(test.input)
|
||||
|
||||
// Unexpected errors are always caused by a bug.
|
||||
require.NotEqual(t, err, errUnexpected, "unexpected error occurred")
|
||||
|
|
@ -5978,7 +5976,7 @@ func TestParseSeries(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestRecoverParserRuntime(t *testing.T) {
|
||||
p := NewParser("foo bar")
|
||||
p := newParser("foo bar", Options{})
|
||||
var err error
|
||||
|
||||
defer func() {
|
||||
|
|
@ -5991,7 +5989,7 @@ func TestRecoverParserRuntime(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestRecoverParserError(t *testing.T) {
|
||||
p := NewParser("foo bar")
|
||||
p := newParser("foo bar", Options{})
|
||||
var err error
|
||||
|
||||
e := errors.New("custom error")
|
||||
|
|
@ -6026,12 +6024,12 @@ func TestExtractSelectors(t *testing.T) {
|
|||
[]string{},
|
||||
},
|
||||
} {
|
||||
expr, err := ParseExpr(tc.input)
|
||||
expr, err := testParser.ParseExpr(tc.input)
|
||||
require.NoError(t, err)
|
||||
|
||||
var expected [][]*labels.Matcher
|
||||
for _, s := range tc.expected {
|
||||
selector, err := ParseMetricSelector(s)
|
||||
selector, err := testParser.ParseMetricSelector(s)
|
||||
require.NoError(t, err)
|
||||
expected = append(expected, selector)
|
||||
}
|
||||
|
|
@ -6048,11 +6046,37 @@ func TestParseCustomFunctions(t *testing.T) {
|
|||
ReturnType: ValueTypeVector,
|
||||
}
|
||||
input := "custom_func(metric[1m])"
|
||||
p := NewParser(input, WithFunctions(funcs))
|
||||
expr, err := p.ParseExpr()
|
||||
p := newParserWithFunctions(input, Options{}, funcs)
|
||||
expr, err := p.parseExpr()
|
||||
require.NoError(t, err)
|
||||
|
||||
call, ok := expr.(*Call)
|
||||
require.True(t, ok)
|
||||
require.Equal(t, "custom_func", call.Func.Name)
|
||||
}
|
||||
|
||||
func TestNewParser(t *testing.T) {
|
||||
p := NewParser(Options{
|
||||
EnableExperimentalFunctions: true,
|
||||
ExperimentalDurationExpr: true,
|
||||
})
|
||||
|
||||
// ParseExpr should work.
|
||||
expr, err := p.ParseExpr("up")
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, expr)
|
||||
|
||||
// ParseMetricSelector should work.
|
||||
matchers, err := p.ParseMetricSelector(`{job="prometheus"}`)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, matchers, 1)
|
||||
|
||||
// ParseMetricSelectors should work.
|
||||
matcherSets, err := p.ParseMetricSelectors([]string{`{job="prometheus"}`, `{job="grafana"}`})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, matcherSets, 2)
|
||||
|
||||
// Invalid input should return errors.
|
||||
_, err = p.ParseExpr("===")
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -114,7 +114,7 @@ task:errors:rate10s{job="s"}))`,
|
|||
},
|
||||
}
|
||||
for _, test := range inputs {
|
||||
expr, err := ParseExpr(test.in)
|
||||
expr, err := testParser.ParseExpr(test.in)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, test.out, Prettify(expr))
|
||||
|
|
@ -185,7 +185,7 @@ func TestBinaryExprPretty(t *testing.T) {
|
|||
}
|
||||
for _, test := range inputs {
|
||||
t.Run(test.in, func(t *testing.T) {
|
||||
expr, err := ParseExpr(test.in)
|
||||
expr, err := testParser.ParseExpr(test.in)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, test.out, Prettify(expr))
|
||||
|
|
@ -261,7 +261,7 @@ func TestCallExprPretty(t *testing.T) {
|
|||
},
|
||||
}
|
||||
for _, test := range inputs {
|
||||
expr, err := ParseExpr(test.in)
|
||||
expr, err := testParser.ParseExpr(test.in)
|
||||
require.NoError(t, err)
|
||||
|
||||
fmt.Println("=>", expr.String())
|
||||
|
|
@ -308,7 +308,7 @@ func TestParenExprPretty(t *testing.T) {
|
|||
},
|
||||
}
|
||||
for _, test := range inputs {
|
||||
expr, err := ParseExpr(test.in)
|
||||
expr, err := testParser.ParseExpr(test.in)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, test.out, Prettify(expr))
|
||||
|
|
@ -334,7 +334,7 @@ func TestStepInvariantExpr(t *testing.T) {
|
|||
},
|
||||
}
|
||||
for _, test := range inputs {
|
||||
expr, err := ParseExpr(test.in)
|
||||
expr, err := testParser.ParseExpr(test.in)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, test.out, Prettify(expr))
|
||||
|
|
@ -594,7 +594,7 @@ or
|
|||
},
|
||||
}
|
||||
for _, test := range inputs {
|
||||
expr, err := ParseExpr(test.in)
|
||||
expr, err := testParser.ParseExpr(test.in)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, test.out, Prettify(expr))
|
||||
}
|
||||
|
|
@ -662,7 +662,7 @@ func TestUnaryPretty(t *testing.T) {
|
|||
}
|
||||
for _, test := range inputs {
|
||||
t.Run(test.in, func(t *testing.T) {
|
||||
expr, err := ParseExpr(test.in)
|
||||
expr, err := testParser.ParseExpr(test.in)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, test.out, Prettify(expr))
|
||||
})
|
||||
|
|
@ -670,11 +670,7 @@ func TestUnaryPretty(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestDurationExprPretty(t *testing.T) {
|
||||
// Enable experimental duration expression parsing.
|
||||
ExperimentalDurationExpr = true
|
||||
t.Cleanup(func() {
|
||||
ExperimentalDurationExpr = false
|
||||
})
|
||||
optsParser := NewParser(Options{ExperimentalDurationExpr: true})
|
||||
maxCharactersPerLine = 10
|
||||
inputs := []struct {
|
||||
in, out string
|
||||
|
|
@ -700,7 +696,7 @@ func TestDurationExprPretty(t *testing.T) {
|
|||
}
|
||||
for _, test := range inputs {
|
||||
t.Run(test.in, func(t *testing.T) {
|
||||
expr, err := ParseExpr(test.in)
|
||||
expr, err := optsParser.ParseExpr(test.in)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, test.out, Prettify(expr))
|
||||
})
|
||||
|
|
|
|||
|
|
@ -22,11 +22,10 @@ import (
|
|||
)
|
||||
|
||||
func TestExprString(t *testing.T) {
|
||||
ExperimentalDurationExpr = true
|
||||
EnableBinopFillModifiers = true
|
||||
t.Cleanup(func() {
|
||||
ExperimentalDurationExpr = false
|
||||
EnableBinopFillModifiers = false
|
||||
optsParser := NewParser(Options{
|
||||
ExperimentalDurationExpr: true,
|
||||
EnableExtendedRangeSelectors: true,
|
||||
EnableBinopFillModifiers: true,
|
||||
})
|
||||
// A list of valid expressions that are expected to be
|
||||
// returned as out when calling String(). If out is empty the output
|
||||
|
|
@ -320,14 +319,9 @@ func TestExprString(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
EnableExtendedRangeSelectors = true
|
||||
t.Cleanup(func() {
|
||||
EnableExtendedRangeSelectors = false
|
||||
})
|
||||
|
||||
for _, test := range inputs {
|
||||
t.Run(test.in, func(t *testing.T) {
|
||||
expr, err := ParseExpr(test.in)
|
||||
expr, err := optsParser.ParseExpr(test.in)
|
||||
require.NoError(t, err)
|
||||
|
||||
exp := test.in
|
||||
|
|
@ -352,7 +346,7 @@ func BenchmarkExprString(b *testing.B) {
|
|||
|
||||
for _, test := range inputs {
|
||||
b.Run(readable(test), func(b *testing.B) {
|
||||
expr, err := ParseExpr(test)
|
||||
expr, err := testParser.ParseExpr(test)
|
||||
require.NoError(b, err)
|
||||
for b.Loop() {
|
||||
_ = expr.String()
|
||||
|
|
@ -484,7 +478,7 @@ func TestBinaryExprUTF8Labels(t *testing.T) {
|
|||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
expr, err := ParseExpr(tc.input)
|
||||
expr, err := testParser.ParseExpr(tc.input)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse: %v", err)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -45,14 +45,11 @@ func TestConcurrentRangeQueries(t *testing.T) {
|
|||
Reg: nil,
|
||||
MaxSamples: 50000000,
|
||||
Timeout: 100 * time.Second,
|
||||
Parser: parser.NewParser(parser.Options{
|
||||
EnableExperimentalFunctions: true,
|
||||
EnableExtendedRangeSelectors: true,
|
||||
}),
|
||||
}
|
||||
// Enable experimental functions testing
|
||||
parser.EnableExperimentalFunctions = true
|
||||
parser.EnableExtendedRangeSelectors = true
|
||||
t.Cleanup(func() {
|
||||
parser.EnableExperimentalFunctions = false
|
||||
parser.EnableExtendedRangeSelectors = false
|
||||
})
|
||||
engine := promqltest.NewTestEngineWithOpts(t, opts)
|
||||
|
||||
const interval = 10000 // 10s interval.
|
||||
|
|
|
|||
|
|
@ -86,6 +86,14 @@ func LoadedStorage(t testing.TB, input string) *teststorage.TestStorage {
|
|||
return test.storage.(*teststorage.TestStorage)
|
||||
}
|
||||
|
||||
// TestParserOpts are the parser options used for all built-in test engines.
|
||||
var TestParserOpts = parser.Options{
|
||||
EnableExperimentalFunctions: true,
|
||||
ExperimentalDurationExpr: true,
|
||||
EnableExtendedRangeSelectors: true,
|
||||
EnableBinopFillModifiers: true,
|
||||
}
|
||||
|
||||
// NewTestEngine creates a promql.Engine with enablePerStepStats, lookbackDelta and maxSamples, and returns it.
|
||||
func NewTestEngine(tb testing.TB, enablePerStepStats bool, lookbackDelta time.Duration, maxSamples int) *promql.Engine {
|
||||
return NewTestEngineWithOpts(tb, promql.EngineOpts{
|
||||
|
|
@ -99,6 +107,7 @@ func NewTestEngine(tb testing.TB, enablePerStepStats bool, lookbackDelta time.Du
|
|||
EnablePerStepStats: enablePerStepStats,
|
||||
LookbackDelta: lookbackDelta,
|
||||
EnableDelayedNameRemoval: true,
|
||||
Parser: parser.NewParser(TestParserOpts),
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -151,18 +160,8 @@ func RunBuiltinTests(t TBRun, engine promql.QueryEngine) {
|
|||
}
|
||||
|
||||
// RunBuiltinTestsWithStorage runs an acceptance test suite against the provided engine and storage.
|
||||
// The engine must be created with ParserOptions that enable all experimental features used in the test files.
|
||||
func RunBuiltinTestsWithStorage(t TBRun, engine promql.QueryEngine, newStorage func(testing.TB) storage.Storage) {
|
||||
t.Cleanup(func() {
|
||||
parser.EnableExperimentalFunctions = false
|
||||
parser.ExperimentalDurationExpr = false
|
||||
parser.EnableExtendedRangeSelectors = false
|
||||
parser.EnableBinopFillModifiers = false
|
||||
})
|
||||
parser.EnableExperimentalFunctions = true
|
||||
parser.ExperimentalDurationExpr = true
|
||||
parser.EnableExtendedRangeSelectors = true
|
||||
parser.EnableBinopFillModifiers = true
|
||||
|
||||
files, err := fs.Glob(testsFs, "*/*.test")
|
||||
require.NoError(t, err)
|
||||
|
||||
|
|
@ -298,7 +297,8 @@ func parseLoad(lines []string, i int, startTime time.Time) (int, *loadCmd, error
|
|||
}
|
||||
|
||||
func parseSeries(defLine string, line int) (labels.Labels, []parser.SequenceValue, error) {
|
||||
metric, vals, err := parser.ParseSeriesDesc(defLine)
|
||||
testParser := parser.NewParser(TestParserOpts)
|
||||
metric, vals, err := testParser.ParseSeriesDesc(defLine)
|
||||
if err != nil {
|
||||
parser.EnrichParseError(err, func(parseErr *parser.ParseErr) {
|
||||
parseErr.LineOffset = line
|
||||
|
|
@ -427,7 +427,7 @@ func (t *test) parseEval(lines []string, i int) (int, *evalCmd, error) {
|
|||
expr = rangeParts[5]
|
||||
}
|
||||
|
||||
_, err := parser.ParseExpr(expr)
|
||||
_, err := parserForBuiltinTests.ParseExpr(expr)
|
||||
if err != nil {
|
||||
parser.EnrichParseError(err, func(parseErr *parser.ParseErr) {
|
||||
parseErr.LineOffset = i
|
||||
|
|
@ -1363,8 +1363,13 @@ type atModifierTestCase struct {
|
|||
evalTime time.Time
|
||||
}
|
||||
|
||||
// parserForBuiltinTests is the parser used when parsing expressions in the
|
||||
// built-in test framework (e.g. atModifierTestCases). It must match the Parser
|
||||
// used by NewTestEngine so that expressions parse consistently.
|
||||
var parserForBuiltinTests = parser.NewParser(TestParserOpts)
|
||||
|
||||
func atModifierTestCases(exprStr string, evalTime time.Time) ([]atModifierTestCase, error) {
|
||||
expr, err := parser.ParseExpr(exprStr)
|
||||
expr, err := parserForBuiltinTests.ParseExpr(exprStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -115,7 +115,7 @@ func TestAlertingRuleTemplateWithHistogram(t *testing.T) {
|
|||
return []promql.Sample{{H: &h}}, nil
|
||||
}
|
||||
|
||||
expr, err := parser.ParseExpr("foo")
|
||||
expr, err := testParser.ParseExpr("foo")
|
||||
require.NoError(t, err)
|
||||
|
||||
rule := NewAlertingRule(
|
||||
|
|
@ -159,7 +159,7 @@ func TestAlertingRuleLabelsUpdate(t *testing.T) {
|
|||
http_requests{job="app-server", instance="0"} 75 85 70 70 stale
|
||||
`)
|
||||
|
||||
expr, err := parser.ParseExpr(`http_requests < 100`)
|
||||
expr, err := testParser.ParseExpr(`http_requests < 100`)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule := NewAlertingRule(
|
||||
|
|
@ -264,7 +264,7 @@ func TestAlertingRuleExternalLabelsInTemplate(t *testing.T) {
|
|||
http_requests{job="app-server", instance="0"} 75 85 70 70
|
||||
`)
|
||||
|
||||
expr, err := parser.ParseExpr(`http_requests < 100`)
|
||||
expr, err := testParser.ParseExpr(`http_requests < 100`)
|
||||
require.NoError(t, err)
|
||||
|
||||
ruleWithoutExternalLabels := NewAlertingRule(
|
||||
|
|
@ -358,7 +358,7 @@ func TestAlertingRuleExternalURLInTemplate(t *testing.T) {
|
|||
http_requests{job="app-server", instance="0"} 75 85 70 70
|
||||
`)
|
||||
|
||||
expr, err := parser.ParseExpr(`http_requests < 100`)
|
||||
expr, err := testParser.ParseExpr(`http_requests < 100`)
|
||||
require.NoError(t, err)
|
||||
|
||||
ruleWithoutExternalURL := NewAlertingRule(
|
||||
|
|
@ -452,7 +452,7 @@ func TestAlertingRuleEmptyLabelFromTemplate(t *testing.T) {
|
|||
http_requests{job="app-server", instance="0"} 75 85 70 70
|
||||
`)
|
||||
|
||||
expr, err := parser.ParseExpr(`http_requests < 100`)
|
||||
expr, err := testParser.ParseExpr(`http_requests < 100`)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule := NewAlertingRule(
|
||||
|
|
@ -507,7 +507,7 @@ func TestAlertingRuleQueryInTemplate(t *testing.T) {
|
|||
http_requests{job="app-server", instance="0"} 70 85 70 70
|
||||
`)
|
||||
|
||||
expr, err := parser.ParseExpr(`sum(http_requests) < 100`)
|
||||
expr, err := testParser.ParseExpr(`sum(http_requests) < 100`)
|
||||
require.NoError(t, err)
|
||||
|
||||
ruleWithQueryInTemplate := NewAlertingRule(
|
||||
|
|
@ -592,7 +592,7 @@ func TestAlertingRuleDuplicate(t *testing.T) {
|
|||
|
||||
now := time.Now()
|
||||
|
||||
expr, _ := parser.ParseExpr(`vector(0) or label_replace(vector(0),"test","x","","")`)
|
||||
expr, _ := testParser.ParseExpr(`vector(0) or label_replace(vector(0),"test","x","","")`)
|
||||
rule := NewAlertingRule(
|
||||
"foo",
|
||||
expr,
|
||||
|
|
@ -635,7 +635,7 @@ func TestAlertingRuleLimit(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
expr, _ := parser.ParseExpr(`metric > 0`)
|
||||
expr, _ := testParser.ParseExpr(`metric > 0`)
|
||||
rule := NewAlertingRule(
|
||||
"foo",
|
||||
expr,
|
||||
|
|
@ -758,7 +758,7 @@ func TestSendAlertsDontAffectActiveAlerts(t *testing.T) {
|
|||
al := &Alert{State: StateFiring, Labels: lbls, ActiveAt: time.Now()}
|
||||
rule.active[h] = al
|
||||
|
||||
expr, err := parser.ParseExpr("foo")
|
||||
expr, err := testParser.ParseExpr("foo")
|
||||
require.NoError(t, err)
|
||||
rule.vector = expr
|
||||
|
||||
|
|
@ -799,7 +799,7 @@ func TestKeepFiringFor(t *testing.T) {
|
|||
http_requests{job="app-server", instance="0"} 75 85 70 70 10x5
|
||||
`)
|
||||
|
||||
expr, err := parser.ParseExpr(`http_requests > 50`)
|
||||
expr, err := testParser.ParseExpr(`http_requests > 50`)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule := NewAlertingRule(
|
||||
|
|
@ -909,7 +909,7 @@ func TestPendingAndKeepFiringFor(t *testing.T) {
|
|||
http_requests{job="app-server", instance="0"} 75 10x10
|
||||
`)
|
||||
|
||||
expr, err := parser.ParseExpr(`http_requests > 50`)
|
||||
expr, err := testParser.ParseExpr(`http_requests > 50`)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule := NewAlertingRule(
|
||||
|
|
@ -969,7 +969,7 @@ func TestAlertingEvalWithOrigin(t *testing.T) {
|
|||
lbs = labels.FromStrings("test", "test")
|
||||
)
|
||||
|
||||
expr, err := parser.ParseExpr(query)
|
||||
expr, err := testParser.ParseExpr(query)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule := NewAlertingRule(
|
||||
|
|
|
|||
|
|
@ -138,6 +138,9 @@ type ManagerOptions struct {
|
|||
|
||||
// FeatureRegistry is used to register rule manager features.
|
||||
FeatureRegistry features.Collector
|
||||
|
||||
// Parser is the PromQL parser used for parsing rule expressions.
|
||||
Parser parser.Parser
|
||||
}
|
||||
|
||||
// NewManager returns an implementation of Manager, ready to be started
|
||||
|
|
@ -158,8 +161,12 @@ func NewManager(o *ManagerOptions) *Manager {
|
|||
o.Metrics = NewGroupMetrics(o.Registerer)
|
||||
}
|
||||
|
||||
if o.Parser == nil {
|
||||
o.Parser = parser.NewParser(parser.Options{})
|
||||
}
|
||||
|
||||
if o.GroupLoader == nil {
|
||||
o.GroupLoader = FileLoader{}
|
||||
o.GroupLoader = FileLoader{parser: o.Parser}
|
||||
}
|
||||
|
||||
if o.RuleConcurrencyController == nil {
|
||||
|
|
@ -320,14 +327,18 @@ type GroupLoader interface {
|
|||
}
|
||||
|
||||
// FileLoader is the default GroupLoader implementation. It defers to rulefmt.ParseFile
|
||||
// and parser.ParseExpr.
|
||||
type FileLoader struct{}
|
||||
|
||||
func (FileLoader) Load(identifier string, ignoreUnknownFields bool, nameValidationScheme model.ValidationScheme) (*rulefmt.RuleGroups, []error) {
|
||||
return rulefmt.ParseFile(identifier, ignoreUnknownFields, nameValidationScheme)
|
||||
// for loading and uses the configured Parser for expression parsing.
|
||||
type FileLoader struct {
|
||||
parser parser.Parser
|
||||
}
|
||||
|
||||
func (FileLoader) Parse(query string) (parser.Expr, error) { return parser.ParseExpr(query) }
|
||||
func (fl FileLoader) Load(identifier string, ignoreUnknownFields bool, nameValidationScheme model.ValidationScheme) (*rulefmt.RuleGroups, []error) {
|
||||
return rulefmt.ParseFile(identifier, ignoreUnknownFields, nameValidationScheme, fl.parser)
|
||||
}
|
||||
|
||||
func (fl FileLoader) Parse(query string) (parser.Expr, error) {
|
||||
return fl.parser.ParseExpr(query)
|
||||
}
|
||||
|
||||
// LoadGroups reads groups from a list of files.
|
||||
func (m *Manager) LoadGroups(
|
||||
|
|
@ -606,7 +617,7 @@ func FromMaps(maps ...map[string]string) labels.Labels {
|
|||
}
|
||||
|
||||
// ParseFiles parses the rule files corresponding to glob patterns.
|
||||
func ParseFiles(patterns []string, nameValidationScheme model.ValidationScheme) error {
|
||||
func ParseFiles(patterns []string, nameValidationScheme model.ValidationScheme, p parser.Parser) error {
|
||||
files := map[string]string{}
|
||||
for _, pat := range patterns {
|
||||
fns, err := filepath.Glob(pat)
|
||||
|
|
@ -626,7 +637,7 @@ func ParseFiles(patterns []string, nameValidationScheme model.ValidationScheme)
|
|||
}
|
||||
}
|
||||
for fn, pat := range files {
|
||||
_, errs := rulefmt.ParseFile(fn, false, nameValidationScheme)
|
||||
_, errs := rulefmt.ParseFile(fn, false, nameValidationScheme, p)
|
||||
if len(errs) > 0 {
|
||||
return fmt.Errorf("parse rules from file %q (pattern: %q): %w", fn, pat, errors.Join(errs...))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -42,7 +42,6 @@ import (
|
|||
"github.com/prometheus/prometheus/model/timestamp"
|
||||
"github.com/prometheus/prometheus/model/value"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
"github.com/prometheus/prometheus/promql/promqltest"
|
||||
"github.com/prometheus/prometheus/storage"
|
||||
"github.com/prometheus/prometheus/tsdb"
|
||||
|
|
@ -63,7 +62,7 @@ func TestAlertingRule(t *testing.T) {
|
|||
http_requests{job="app-server", instance="1", group="canary", severity="overwrite-me"} 80 90 100 110 120 130 140
|
||||
`)
|
||||
|
||||
expr, err := parser.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`)
|
||||
expr, err := testParser.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule := NewAlertingRule(
|
||||
|
|
@ -205,7 +204,7 @@ func TestForStateAddSamples(t *testing.T) {
|
|||
http_requests{job="app-server", instance="1", group="canary", severity="overwrite-me"} 80 90 100 110 120 130 140
|
||||
`)
|
||||
|
||||
expr, err := parser.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`)
|
||||
expr, err := testParser.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule := NewAlertingRule(
|
||||
|
|
@ -366,7 +365,7 @@ func TestForStateRestore(t *testing.T) {
|
|||
http_requests{job="app-server", instance="1", group="canary", severity="overwrite-me"} 125 90 60 0 0 25 0 0 40 0 130
|
||||
`)
|
||||
|
||||
expr, err := parser.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`)
|
||||
expr, err := testParser.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`)
|
||||
require.NoError(t, err)
|
||||
|
||||
ng := testEngine(t)
|
||||
|
|
@ -551,7 +550,7 @@ func TestStaleness(t *testing.T) {
|
|||
Logger: promslog.NewNopLogger(),
|
||||
}
|
||||
|
||||
expr, err := parser.ParseExpr("a + 1")
|
||||
expr, err := testParser.ParseExpr("a + 1")
|
||||
require.NoError(t, err)
|
||||
rule := NewRecordingRule("a_plus_one", expr, labels.Labels{})
|
||||
group := NewGroup(GroupOptions{
|
||||
|
|
@ -809,7 +808,7 @@ func TestUpdate(t *testing.T) {
|
|||
}
|
||||
|
||||
// Groups will be recreated if updated.
|
||||
rgs, errs := rulefmt.ParseFile("fixtures/rules.yaml", false, model.UTF8Validation)
|
||||
rgs, errs := rulefmt.ParseFile("fixtures/rules.yaml", false, model.UTF8Validation, testParser)
|
||||
require.Empty(t, errs, "file parsing failures")
|
||||
|
||||
tmpFile, err := os.CreateTemp("", "rules.test.*.yaml")
|
||||
|
|
@ -929,7 +928,7 @@ func TestNotify(t *testing.T) {
|
|||
ResendDelay: 2 * time.Second,
|
||||
}
|
||||
|
||||
expr, err := parser.ParseExpr("a > 1")
|
||||
expr, err := testParser.ParseExpr("a > 1")
|
||||
require.NoError(t, err)
|
||||
rule := NewAlertingRule("aTooHigh", expr, 0, 0, labels.Labels{}, labels.Labels{}, labels.EmptyLabels(), "", true, promslog.NewNopLogger())
|
||||
group := NewGroup(GroupOptions{
|
||||
|
|
@ -1300,7 +1299,7 @@ func TestRuleHealthUpdates(t *testing.T) {
|
|||
Logger: promslog.NewNopLogger(),
|
||||
}
|
||||
|
||||
expr, err := parser.ParseExpr("a + 1")
|
||||
expr, err := testParser.ParseExpr("a + 1")
|
||||
require.NoError(t, err)
|
||||
rule := NewRecordingRule("a_plus_one", expr, labels.Labels{})
|
||||
group := NewGroup(GroupOptions{
|
||||
|
|
@ -1346,7 +1345,7 @@ func TestRuleGroupEvalIterationFunc(t *testing.T) {
|
|||
http_requests{instance="0"} 75 85 50 0 0 25 0 0 40 0 120
|
||||
`)
|
||||
|
||||
expr, err := parser.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`)
|
||||
expr, err := testParser.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`)
|
||||
require.NoError(t, err)
|
||||
|
||||
testValue := 1
|
||||
|
|
@ -1481,7 +1480,7 @@ func TestNativeHistogramsInRecordingRules(t *testing.T) {
|
|||
Logger: promslog.NewNopLogger(),
|
||||
}
|
||||
|
||||
expr, err := parser.ParseExpr("sum(histogram_metric)")
|
||||
expr, err := testParser.ParseExpr("sum(histogram_metric)")
|
||||
require.NoError(t, err)
|
||||
rule := NewRecordingRule("sum:histogram_metric", expr, labels.Labels{})
|
||||
|
||||
|
|
@ -1582,23 +1581,23 @@ func TestDependencyMap(t *testing.T) {
|
|||
Logger: promslog.NewNopLogger(),
|
||||
}
|
||||
|
||||
expr, err := parser.ParseExpr("sum by (user) (rate(requests[1m]))")
|
||||
expr, err := testParser.ParseExpr("sum by (user) (rate(requests[1m]))")
|
||||
require.NoError(t, err)
|
||||
rule := NewRecordingRule("user:requests:rate1m", expr, labels.Labels{})
|
||||
|
||||
expr, err = parser.ParseExpr("user:requests:rate1m <= 0")
|
||||
expr, err = testParser.ParseExpr("user:requests:rate1m <= 0")
|
||||
require.NoError(t, err)
|
||||
rule2 := NewAlertingRule("ZeroRequests", expr, 0, 0, labels.Labels{}, labels.Labels{}, labels.EmptyLabels(), "", true, promslog.NewNopLogger())
|
||||
|
||||
expr, err = parser.ParseExpr("sum by (user) (rate(requests[5m]))")
|
||||
expr, err = testParser.ParseExpr("sum by (user) (rate(requests[5m]))")
|
||||
require.NoError(t, err)
|
||||
rule3 := NewRecordingRule("user:requests:rate5m", expr, labels.Labels{})
|
||||
|
||||
expr, err = parser.ParseExpr("increase(user:requests:rate1m[1h])")
|
||||
expr, err = testParser.ParseExpr("increase(user:requests:rate1m[1h])")
|
||||
require.NoError(t, err)
|
||||
rule4 := NewRecordingRule("user:requests:increase1h", expr, labels.Labels{})
|
||||
|
||||
expr, err = parser.ParseExpr(`sum by (user) ({__name__=~"user:requests.+5m"})`)
|
||||
expr, err = testParser.ParseExpr(`sum by (user) ({__name__=~"user:requests.+5m"})`)
|
||||
require.NoError(t, err)
|
||||
rule5 := NewRecordingRule("user:requests:sum5m", expr, labels.Labels{})
|
||||
|
||||
|
|
@ -1640,7 +1639,7 @@ func TestNoDependency(t *testing.T) {
|
|||
Logger: promslog.NewNopLogger(),
|
||||
}
|
||||
|
||||
expr, err := parser.ParseExpr("sum by (user) (rate(requests[1m]))")
|
||||
expr, err := testParser.ParseExpr("sum by (user) (rate(requests[1m]))")
|
||||
require.NoError(t, err)
|
||||
rule := NewRecordingRule("user:requests:rate1m", expr, labels.Labels{})
|
||||
|
||||
|
|
@ -1671,7 +1670,7 @@ func TestDependenciesEdgeCases(t *testing.T) {
|
|||
Opts: opts,
|
||||
})
|
||||
|
||||
expr, err := parser.ParseExpr("sum by (user) (rate(requests[1m]))")
|
||||
expr, err := testParser.ParseExpr("sum by (user) (rate(requests[1m]))")
|
||||
require.NoError(t, err)
|
||||
rule := NewRecordingRule("user:requests:rate1m", expr, labels.Labels{})
|
||||
|
||||
|
|
@ -1682,11 +1681,11 @@ func TestDependenciesEdgeCases(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("rules which reference no series", func(t *testing.T) {
|
||||
expr, err := parser.ParseExpr("one")
|
||||
expr, err := testParser.ParseExpr("one")
|
||||
require.NoError(t, err)
|
||||
rule1 := NewRecordingRule("1", expr, labels.Labels{})
|
||||
|
||||
expr, err = parser.ParseExpr("two")
|
||||
expr, err = testParser.ParseExpr("two")
|
||||
require.NoError(t, err)
|
||||
rule2 := NewRecordingRule("2", expr, labels.Labels{})
|
||||
|
||||
|
|
@ -1704,11 +1703,11 @@ func TestDependenciesEdgeCases(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("rule with regexp matcher on metric name", func(t *testing.T) {
|
||||
expr, err := parser.ParseExpr("sum(requests)")
|
||||
expr, err := testParser.ParseExpr("sum(requests)")
|
||||
require.NoError(t, err)
|
||||
rule1 := NewRecordingRule("first", expr, labels.Labels{})
|
||||
|
||||
expr, err = parser.ParseExpr(`sum({__name__=~".+"})`)
|
||||
expr, err = testParser.ParseExpr(`sum({__name__=~".+"})`)
|
||||
require.NoError(t, err)
|
||||
rule2 := NewRecordingRule("second", expr, labels.Labels{})
|
||||
|
||||
|
|
@ -1726,11 +1725,11 @@ func TestDependenciesEdgeCases(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("rule with not equal matcher on metric name", func(t *testing.T) {
|
||||
expr, err := parser.ParseExpr("sum(requests)")
|
||||
expr, err := testParser.ParseExpr("sum(requests)")
|
||||
require.NoError(t, err)
|
||||
rule1 := NewRecordingRule("first", expr, labels.Labels{})
|
||||
|
||||
expr, err = parser.ParseExpr(`sum({__name__!="requests", service="app"})`)
|
||||
expr, err = testParser.ParseExpr(`sum({__name__!="requests", service="app"})`)
|
||||
require.NoError(t, err)
|
||||
rule2 := NewRecordingRule("second", expr, labels.Labels{})
|
||||
|
||||
|
|
@ -1748,11 +1747,11 @@ func TestDependenciesEdgeCases(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("rule with not regexp matcher on metric name", func(t *testing.T) {
|
||||
expr, err := parser.ParseExpr("sum(requests)")
|
||||
expr, err := testParser.ParseExpr("sum(requests)")
|
||||
require.NoError(t, err)
|
||||
rule1 := NewRecordingRule("first", expr, labels.Labels{})
|
||||
|
||||
expr, err = parser.ParseExpr(`sum({__name__!~"requests.+", service="app"})`)
|
||||
expr, err = testParser.ParseExpr(`sum({__name__!~"requests.+", service="app"})`)
|
||||
require.NoError(t, err)
|
||||
rule2 := NewRecordingRule("second", expr, labels.Labels{})
|
||||
|
||||
|
|
@ -1772,27 +1771,27 @@ func TestDependenciesEdgeCases(t *testing.T) {
|
|||
for _, metaMetric := range []string{alertMetricName, alertForStateMetricName} {
|
||||
t.Run(metaMetric, func(t *testing.T) {
|
||||
t.Run("rule querying alerts meta-metric with alertname", func(t *testing.T) {
|
||||
expr, err := parser.ParseExpr("sum(requests) > 0")
|
||||
expr, err := testParser.ParseExpr("sum(requests) > 0")
|
||||
require.NoError(t, err)
|
||||
rule1 := NewAlertingRule("first", expr, 0, 0, labels.Labels{}, labels.Labels{}, labels.EmptyLabels(), "", true, promslog.NewNopLogger())
|
||||
|
||||
expr, err = parser.ParseExpr(fmt.Sprintf(`sum(%s{alertname="test"}) > 0`, metaMetric))
|
||||
expr, err = testParser.ParseExpr(fmt.Sprintf(`sum(%s{alertname="test"}) > 0`, metaMetric))
|
||||
require.NoError(t, err)
|
||||
rule2 := NewAlertingRule("second", expr, 0, 0, labels.Labels{}, labels.Labels{}, labels.EmptyLabels(), "", true, promslog.NewNopLogger())
|
||||
|
||||
expr, err = parser.ParseExpr(fmt.Sprintf(`sum(%s{alertname=~"first.*"}) > 0`, metaMetric))
|
||||
expr, err = testParser.ParseExpr(fmt.Sprintf(`sum(%s{alertname=~"first.*"}) > 0`, metaMetric))
|
||||
require.NoError(t, err)
|
||||
rule3 := NewAlertingRule("third", expr, 0, 0, labels.Labels{}, labels.Labels{}, labels.EmptyLabels(), "", true, promslog.NewNopLogger())
|
||||
|
||||
expr, err = parser.ParseExpr(fmt.Sprintf(`sum(%s{alertname!="first"}) > 0`, metaMetric))
|
||||
expr, err = testParser.ParseExpr(fmt.Sprintf(`sum(%s{alertname!="first"}) > 0`, metaMetric))
|
||||
require.NoError(t, err)
|
||||
rule4 := NewAlertingRule("fourth", expr, 0, 0, labels.Labels{}, labels.Labels{}, labels.EmptyLabels(), "", true, promslog.NewNopLogger())
|
||||
|
||||
expr, err = parser.ParseExpr("sum(failures)")
|
||||
expr, err = testParser.ParseExpr("sum(failures)")
|
||||
require.NoError(t, err)
|
||||
rule5 := NewRecordingRule("fifth", expr, labels.Labels{})
|
||||
|
||||
expr, err = parser.ParseExpr(fmt.Sprintf(`fifth > 0 and sum(%s{alertname="fourth"}) > 0`, metaMetric))
|
||||
expr, err = testParser.ParseExpr(fmt.Sprintf(`fifth > 0 and sum(%s{alertname="fourth"}) > 0`, metaMetric))
|
||||
require.NoError(t, err)
|
||||
rule6 := NewAlertingRule("sixth", expr, 0, 0, labels.Labels{}, labels.Labels{}, labels.EmptyLabels(), "", true, promslog.NewNopLogger())
|
||||
|
||||
|
|
@ -1831,23 +1830,23 @@ func TestDependenciesEdgeCases(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("rule querying alerts meta-metric without alertname", func(t *testing.T) {
|
||||
expr, err := parser.ParseExpr("sum(requests)")
|
||||
expr, err := testParser.ParseExpr("sum(requests)")
|
||||
require.NoError(t, err)
|
||||
rule1 := NewRecordingRule("first", expr, labels.Labels{})
|
||||
|
||||
expr, err = parser.ParseExpr(`sum(requests) > 0`)
|
||||
expr, err = testParser.ParseExpr(`sum(requests) > 0`)
|
||||
require.NoError(t, err)
|
||||
rule2 := NewAlertingRule("second", expr, 0, 0, labels.Labels{}, labels.Labels{}, labels.EmptyLabels(), "", true, promslog.NewNopLogger())
|
||||
|
||||
expr, err = parser.ParseExpr(fmt.Sprintf(`sum(%s) > 0`, metaMetric))
|
||||
expr, err = testParser.ParseExpr(fmt.Sprintf(`sum(%s) > 0`, metaMetric))
|
||||
require.NoError(t, err)
|
||||
rule3 := NewAlertingRule("third", expr, 0, 0, labels.Labels{}, labels.Labels{}, labels.EmptyLabels(), "", true, promslog.NewNopLogger())
|
||||
|
||||
expr, err = parser.ParseExpr("sum(failures)")
|
||||
expr, err = testParser.ParseExpr("sum(failures)")
|
||||
require.NoError(t, err)
|
||||
rule4 := NewRecordingRule("fourth", expr, labels.Labels{})
|
||||
|
||||
expr, err = parser.ParseExpr(fmt.Sprintf(`fourth > 0 and sum(%s) > 0`, metaMetric))
|
||||
expr, err = testParser.ParseExpr(fmt.Sprintf(`fourth > 0 and sum(%s) > 0`, metaMetric))
|
||||
require.NoError(t, err)
|
||||
rule5 := NewAlertingRule("fifth", expr, 0, 0, labels.Labels{}, labels.Labels{}, labels.EmptyLabels(), "", true, promslog.NewNopLogger())
|
||||
|
||||
|
|
@ -1891,11 +1890,11 @@ func TestNoMetricSelector(t *testing.T) {
|
|||
Logger: promslog.NewNopLogger(),
|
||||
}
|
||||
|
||||
expr, err := parser.ParseExpr("sum by (user) (rate(requests[1m]))")
|
||||
expr, err := testParser.ParseExpr("sum by (user) (rate(requests[1m]))")
|
||||
require.NoError(t, err)
|
||||
rule := NewRecordingRule("user:requests:rate1m", expr, labels.Labels{})
|
||||
|
||||
expr, err = parser.ParseExpr(`count({user="bob"})`)
|
||||
expr, err = testParser.ParseExpr(`count({user="bob"})`)
|
||||
require.NoError(t, err)
|
||||
rule2 := NewRecordingRule("user:requests:rate1m", expr, labels.Labels{})
|
||||
|
||||
|
|
@ -1920,15 +1919,15 @@ func TestDependentRulesWithNonMetricExpression(t *testing.T) {
|
|||
Logger: promslog.NewNopLogger(),
|
||||
}
|
||||
|
||||
expr, err := parser.ParseExpr("sum by (user) (rate(requests[1m]))")
|
||||
expr, err := testParser.ParseExpr("sum by (user) (rate(requests[1m]))")
|
||||
require.NoError(t, err)
|
||||
rule := NewRecordingRule("user:requests:rate1m", expr, labels.Labels{})
|
||||
|
||||
expr, err = parser.ParseExpr("user:requests:rate1m <= 0")
|
||||
expr, err = testParser.ParseExpr("user:requests:rate1m <= 0")
|
||||
require.NoError(t, err)
|
||||
rule2 := NewAlertingRule("ZeroRequests", expr, 0, 0, labels.Labels{}, labels.Labels{}, labels.EmptyLabels(), "", true, promslog.NewNopLogger())
|
||||
|
||||
expr, err = parser.ParseExpr("3")
|
||||
expr, err = testParser.ParseExpr("3")
|
||||
require.NoError(t, err)
|
||||
rule3 := NewRecordingRule("three", expr, labels.Labels{})
|
||||
|
||||
|
|
@ -2596,11 +2595,11 @@ func TestLabels_FromMaps(t *testing.T) {
|
|||
|
||||
func TestParseFiles(t *testing.T) {
|
||||
t.Run("good files", func(t *testing.T) {
|
||||
err := ParseFiles([]string{filepath.Join("fixtures", "rules.y*ml")}, model.UTF8Validation)
|
||||
err := ParseFiles([]string{filepath.Join("fixtures", "rules.y*ml")}, model.UTF8Validation, testParser)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
t.Run("bad files", func(t *testing.T) {
|
||||
err := ParseFiles([]string{filepath.Join("fixtures", "invalid_rules.y*ml")}, model.UTF8Validation)
|
||||
err := ParseFiles([]string{filepath.Join("fixtures", "invalid_rules.y*ml")}, model.UTF8Validation, testParser)
|
||||
require.ErrorContains(t, err, "field unexpected_field not found in type rulefmt.Rule")
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -29,10 +29,12 @@ import (
|
|||
"github.com/prometheus/prometheus/util/testutil"
|
||||
)
|
||||
|
||||
var testParser = parser.NewParser(parser.Options{})
|
||||
|
||||
var (
|
||||
ruleEvaluationTime = time.Unix(0, 0).UTC()
|
||||
exprWithMetricName, _ = parser.ParseExpr(`sort(metric)`)
|
||||
exprWithoutMetricName, _ = parser.ParseExpr(`sort(metric + metric)`)
|
||||
exprWithMetricName, _ = testParser.ParseExpr(`sort(metric)`)
|
||||
exprWithoutMetricName, _ = testParser.ParseExpr(`sort(metric + metric)`)
|
||||
)
|
||||
|
||||
var ruleEvalTestScenarios = []struct {
|
||||
|
|
@ -170,7 +172,7 @@ func TestRuleEvalDuplicate(t *testing.T) {
|
|||
|
||||
now := time.Now()
|
||||
|
||||
expr, _ := parser.ParseExpr(`vector(0) or label_replace(vector(0),"test","x","","")`)
|
||||
expr, _ := testParser.ParseExpr(`vector(0) or label_replace(vector(0),"test","x","","")`)
|
||||
rule := NewRecordingRule("foo", expr, labels.FromStrings("test", "test"))
|
||||
_, err := rule.Eval(ctx, 0, now, EngineQueryFunc(engine, storage), nil, 0)
|
||||
require.Error(t, err)
|
||||
|
|
@ -203,7 +205,7 @@ func TestRecordingRuleLimit(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
expr, _ := parser.ParseExpr(`metric > 0`)
|
||||
expr, _ := testParser.ParseExpr(`metric > 0`)
|
||||
rule := NewRecordingRule(
|
||||
"foo",
|
||||
expr,
|
||||
|
|
@ -238,7 +240,7 @@ func TestRecordingEvalWithOrigin(t *testing.T) {
|
|||
lbs = labels.FromStrings("foo", "bar")
|
||||
)
|
||||
|
||||
expr, err := parser.ParseExpr(query)
|
||||
expr, err := testParser.ParseExpr(query)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule := NewRecordingRule(name, expr, lbs)
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@
|
|||
package fuzzing
|
||||
|
||||
import (
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
"github.com/prometheus/prometheus/promql/promqltest"
|
||||
)
|
||||
|
||||
|
|
@ -58,18 +57,6 @@ func GetCorpusForFuzzParseMetricSelector() []string {
|
|||
|
||||
// GetCorpusForFuzzParseExpr returns the seed corpus for FuzzParseExpr.
|
||||
func GetCorpusForFuzzParseExpr() ([]string, error) {
|
||||
// Save original values and restore them after parsing test expressions.
|
||||
defer func(funcs, durationExpr, rangeSelectors bool) {
|
||||
parser.EnableExperimentalFunctions = funcs
|
||||
parser.ExperimentalDurationExpr = durationExpr
|
||||
parser.EnableExtendedRangeSelectors = rangeSelectors
|
||||
}(parser.EnableExperimentalFunctions, parser.ExperimentalDurationExpr, parser.EnableExtendedRangeSelectors)
|
||||
|
||||
// Enable experimental features to parse all test expressions.
|
||||
parser.EnableExperimentalFunctions = true
|
||||
parser.ExperimentalDurationExpr = true
|
||||
parser.EnableExtendedRangeSelectors = true
|
||||
|
||||
// Get built-in test expressions.
|
||||
builtInExprs, err := promqltest.GetBuiltInExprs()
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -33,6 +33,8 @@ const (
|
|||
// Use package-scope symbol table to avoid memory allocation on every fuzzing operation.
|
||||
var symbolTable = labels.NewSymbolTable()
|
||||
|
||||
var fuzzParser = parser.NewParser(parser.Options{})
|
||||
|
||||
// FuzzParseMetricText fuzzes the metric parser with "text/plain" content type.
|
||||
//
|
||||
// Note that this is not the parser for the text-based exposition-format; that
|
||||
|
|
@ -109,7 +111,7 @@ func FuzzParseMetricSelector(f *testing.F) {
|
|||
if len(in) > maxInputSize {
|
||||
t.Skip()
|
||||
}
|
||||
_, err := parser.ParseMetricSelector(in)
|
||||
_, err := fuzzParser.ParseMetricSelector(in)
|
||||
// We don't care about errors, just that we don't panic.
|
||||
_ = err
|
||||
})
|
||||
|
|
@ -117,17 +119,6 @@ func FuzzParseMetricSelector(f *testing.F) {
|
|||
|
||||
// FuzzParseExpr fuzzes the expression parser.
|
||||
func FuzzParseExpr(f *testing.F) {
|
||||
parser.EnableExperimentalFunctions = true
|
||||
parser.ExperimentalDurationExpr = true
|
||||
parser.EnableExtendedRangeSelectors = true
|
||||
parser.EnableBinopFillModifiers = true
|
||||
f.Cleanup(func() {
|
||||
parser.EnableExperimentalFunctions = false
|
||||
parser.ExperimentalDurationExpr = false
|
||||
parser.EnableExtendedRangeSelectors = false
|
||||
parser.EnableBinopFillModifiers = false
|
||||
})
|
||||
|
||||
// Add seed corpus from built-in test expressions
|
||||
corpus, err := GetCorpusForFuzzParseExpr()
|
||||
if err != nil {
|
||||
|
|
@ -141,11 +132,17 @@ func FuzzParseExpr(f *testing.F) {
|
|||
f.Add(expr)
|
||||
}
|
||||
|
||||
p := parser.NewParser(parser.Options{
|
||||
EnableExperimentalFunctions: true,
|
||||
ExperimentalDurationExpr: true,
|
||||
EnableExtendedRangeSelectors: true,
|
||||
EnableBinopFillModifiers: true,
|
||||
})
|
||||
f.Fuzz(func(t *testing.T, in string) {
|
||||
if len(in) > maxInputSize {
|
||||
t.Skip()
|
||||
}
|
||||
_, err := parser.ParseExpr(in)
|
||||
_, err := p.ParseExpr(in)
|
||||
// We don't care about errors, just that we don't panic.
|
||||
_ = err
|
||||
})
|
||||
|
|
|
|||
|
|
@ -25,6 +25,8 @@ import (
|
|||
"github.com/prometheus/prometheus/storage"
|
||||
)
|
||||
|
||||
var testParser = parser.NewParser(parser.Options{})
|
||||
|
||||
// FixtureSeries creates a simple series with the "up" metric.
|
||||
func FixtureSeries() []storage.Series {
|
||||
// Use timestamps relative to "now" so queries work.
|
||||
|
|
@ -73,7 +75,7 @@ func FixtureMultipleSeries() []storage.Series {
|
|||
// FixtureRuleGroups creates a simple set of rule groups for testing.
|
||||
func FixtureRuleGroups() []*rules.Group {
|
||||
// Create a simple recording rule.
|
||||
expr, _ := parser.ParseExpr("up == 1")
|
||||
expr, _ := testParser.ParseExpr("up == 1")
|
||||
recordingRule := rules.NewRecordingRule(
|
||||
"job:up:sum",
|
||||
expr,
|
||||
|
|
@ -81,7 +83,7 @@ func FixtureRuleGroups() []*rules.Group {
|
|||
)
|
||||
|
||||
// Create a simple alerting rule.
|
||||
alertExpr, _ := parser.ParseExpr("up == 0")
|
||||
alertExpr, _ := testParser.ParseExpr("up == 0")
|
||||
alertingRule := rules.NewAlertingRule(
|
||||
"InstanceDown",
|
||||
alertExpr,
|
||||
|
|
|
|||
|
|
@ -259,6 +259,8 @@ type API struct {
|
|||
|
||||
featureRegistry features.Collector
|
||||
openAPIBuilder *OpenAPIBuilder
|
||||
|
||||
parser parser.Parser
|
||||
}
|
||||
|
||||
// NewAPI returns an initialized API type.
|
||||
|
|
@ -301,6 +303,7 @@ func NewAPI(
|
|||
overrideErrorCode OverrideErrorCode,
|
||||
featureRegistry features.Collector,
|
||||
openAPIOptions OpenAPIOptions,
|
||||
promqlParser parser.Parser,
|
||||
) *API {
|
||||
a := &API{
|
||||
QueryEngine: qe,
|
||||
|
|
@ -332,10 +335,15 @@ func NewAPI(
|
|||
overrideErrorCode: overrideErrorCode,
|
||||
featureRegistry: featureRegistry,
|
||||
openAPIBuilder: NewOpenAPIBuilder(openAPIOptions, logger),
|
||||
parser: promqlParser,
|
||||
|
||||
remoteReadHandler: remote.NewReadHandler(logger, registerer, q, configFunc, remoteReadSampleLimit, remoteReadConcurrencyLimit, remoteReadMaxBytesInFrame),
|
||||
}
|
||||
|
||||
if a.parser == nil {
|
||||
a.parser = parser.NewParser(parser.Options{})
|
||||
}
|
||||
|
||||
a.InstallCodec(JSONCodec{})
|
||||
|
||||
if statsRenderer != nil {
|
||||
|
|
@ -560,8 +568,8 @@ func (api *API) query(r *http.Request) (result apiFuncResult) {
|
|||
}, nil, warnings, qry.Close}
|
||||
}
|
||||
|
||||
func (*API) formatQuery(r *http.Request) (result apiFuncResult) {
|
||||
expr, err := parser.ParseExpr(r.FormValue("query"))
|
||||
func (api *API) formatQuery(r *http.Request) (result apiFuncResult) {
|
||||
expr, err := api.parser.ParseExpr(r.FormValue("query"))
|
||||
if err != nil {
|
||||
return invalidParamError(err, "query")
|
||||
}
|
||||
|
|
@ -569,8 +577,8 @@ func (*API) formatQuery(r *http.Request) (result apiFuncResult) {
|
|||
return apiFuncResult{expr.Pretty(0), nil, nil, nil}
|
||||
}
|
||||
|
||||
func (*API) parseQuery(r *http.Request) apiFuncResult {
|
||||
expr, err := parser.ParseExpr(r.FormValue("query"))
|
||||
func (api *API) parseQuery(r *http.Request) apiFuncResult {
|
||||
expr, err := api.parser.ParseExpr(r.FormValue("query"))
|
||||
if err != nil {
|
||||
return invalidParamError(err, "query")
|
||||
}
|
||||
|
|
@ -699,7 +707,7 @@ func (api *API) queryExemplars(r *http.Request) apiFuncResult {
|
|||
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||
}
|
||||
|
||||
expr, err := parser.ParseExpr(r.FormValue("query"))
|
||||
expr, err := api.parser.ParseExpr(r.FormValue("query"))
|
||||
if err != nil {
|
||||
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||
}
|
||||
|
|
@ -762,7 +770,7 @@ func (api *API) labelNames(r *http.Request) apiFuncResult {
|
|||
return invalidParamError(err, "end")
|
||||
}
|
||||
|
||||
matcherSets, err := parseMatchersParam(r.Form["match[]"])
|
||||
matcherSets, err := api.parseMatchersParam(r.Form["match[]"])
|
||||
if err != nil {
|
||||
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||
}
|
||||
|
|
@ -850,7 +858,7 @@ func (api *API) labelValues(r *http.Request) (result apiFuncResult) {
|
|||
return invalidParamError(err, "end")
|
||||
}
|
||||
|
||||
matcherSets, err := parseMatchersParam(r.Form["match[]"])
|
||||
matcherSets, err := api.parseMatchersParam(r.Form["match[]"])
|
||||
if err != nil {
|
||||
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||
}
|
||||
|
|
@ -969,7 +977,7 @@ func (api *API) series(r *http.Request) (result apiFuncResult) {
|
|||
return invalidParamError(err, "end")
|
||||
}
|
||||
|
||||
matcherSets, err := parseMatchersParam(r.Form["match[]"])
|
||||
matcherSets, err := api.parseMatchersParam(r.Form["match[]"])
|
||||
if err != nil {
|
||||
return invalidParamError(err, "match[]")
|
||||
}
|
||||
|
|
@ -1264,7 +1272,7 @@ func (api *API) targetMetadata(r *http.Request) apiFuncResult {
|
|||
var matchers []*labels.Matcher
|
||||
var err error
|
||||
if matchTarget != "" {
|
||||
matchers, err = parser.ParseMetricSelector(matchTarget)
|
||||
matchers, err = api.parser.ParseMetricSelector(matchTarget)
|
||||
if err != nil {
|
||||
return invalidParamError(err, "match_target")
|
||||
}
|
||||
|
|
@ -1583,7 +1591,7 @@ func (api *API) rules(r *http.Request) apiFuncResult {
|
|||
rgSet := queryFormToSet(r.Form["rule_group[]"])
|
||||
fSet := queryFormToSet(r.Form["file[]"])
|
||||
|
||||
matcherSets, err := parseMatchersParam(r.Form["match[]"])
|
||||
matcherSets, err := api.parseMatchersParam(r.Form["match[]"])
|
||||
if err != nil {
|
||||
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||
}
|
||||
|
|
@ -2036,7 +2044,7 @@ func (api *API) deleteSeries(r *http.Request) apiFuncResult {
|
|||
}
|
||||
|
||||
for _, s := range r.Form["match[]"] {
|
||||
matchers, err := parser.ParseMetricSelector(s)
|
||||
matchers, err := api.parser.ParseMetricSelector(s)
|
||||
if err != nil {
|
||||
return invalidParamError(err, "match[]")
|
||||
}
|
||||
|
|
@ -2245,8 +2253,8 @@ func parseDuration(s string) (time.Duration, error) {
|
|||
return 0, fmt.Errorf("cannot parse %q to a valid duration", s)
|
||||
}
|
||||
|
||||
func parseMatchersParam(matchers []string) ([][]*labels.Matcher, error) {
|
||||
matcherSets, err := parser.ParseMetricSelectors(matchers)
|
||||
func (api *API) parseMatchersParam(matchers []string) ([][]*labels.Matcher, error) {
|
||||
matcherSets, err := api.parser.ParseMetricSelectors(matchers)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -63,6 +63,8 @@ import (
|
|||
"github.com/prometheus/prometheus/util/testutil"
|
||||
)
|
||||
|
||||
var testParser = parser.NewParser(parser.Options{})
|
||||
|
||||
func testEngine(t *testing.T) *promql.Engine {
|
||||
t.Helper()
|
||||
return promqltest.NewTestEngineWithOpts(t, promql.EngineOpts{
|
||||
|
|
@ -250,11 +252,11 @@ type rulesRetrieverMock struct {
|
|||
}
|
||||
|
||||
func (m *rulesRetrieverMock) CreateAlertingRules() {
|
||||
expr1, err := parser.ParseExpr(`absent(test_metric3) != 1`)
|
||||
expr1, err := testParser.ParseExpr(`absent(test_metric3) != 1`)
|
||||
require.NoError(m.testing, err)
|
||||
expr2, err := parser.ParseExpr(`up == 1`)
|
||||
expr2, err := testParser.ParseExpr(`up == 1`)
|
||||
require.NoError(m.testing, err)
|
||||
expr3, err := parser.ParseExpr(`vector(1)`)
|
||||
expr3, err := testParser.ParseExpr(`vector(1)`)
|
||||
require.NoError(m.testing, err)
|
||||
|
||||
rule1 := rules.NewAlertingRule(
|
||||
|
|
@ -353,7 +355,7 @@ func (m *rulesRetrieverMock) CreateRuleGroups() {
|
|||
r = append(r, alertrule)
|
||||
}
|
||||
|
||||
recordingExpr, err := parser.ParseExpr(`vector(1)`)
|
||||
recordingExpr, err := testParser.ParseExpr(`vector(1)`)
|
||||
require.NoError(m.testing, err, "unable to parse alert expression")
|
||||
recordingRule := rules.NewRecordingRule("recording-rule-1", recordingExpr, labels.Labels{})
|
||||
recordingRule2 := rules.NewRecordingRule("recording-rule-2", recordingExpr, labels.FromStrings("testlabel", "rule"))
|
||||
|
|
@ -506,6 +508,7 @@ func TestEndpoints(t *testing.T) {
|
|||
config: func() config.Config { return samplePrometheusCfg },
|
||||
ready: func(f http.HandlerFunc) http.HandlerFunc { return f },
|
||||
rulesRetriever: algr.toFactory(),
|
||||
parser: testParser,
|
||||
}
|
||||
testEndpoints(t, api, testTargetRetriever, true)
|
||||
})
|
||||
|
|
@ -570,6 +573,7 @@ func TestEndpoints(t *testing.T) {
|
|||
config: func() config.Config { return samplePrometheusCfg },
|
||||
ready: func(f http.HandlerFunc) http.HandlerFunc { return f },
|
||||
rulesRetriever: algr.toFactory(),
|
||||
parser: testParser,
|
||||
}
|
||||
testEndpoints(t, api, testTargetRetriever, false)
|
||||
})
|
||||
|
|
@ -595,6 +599,7 @@ func TestGetSeries(t *testing.T) {
|
|||
|
||||
api := &API{
|
||||
Queryable: s,
|
||||
parser: testParser,
|
||||
}
|
||||
request := func(method string, matchers ...string) (*http.Request, error) {
|
||||
u, err := url.Parse("http://example.com")
|
||||
|
|
@ -659,6 +664,7 @@ func TestGetSeries(t *testing.T) {
|
|||
expectedErrorType: errorExec,
|
||||
api: &API{
|
||||
Queryable: errorTestQueryable{err: errors.New("generic")},
|
||||
parser: testParser,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -667,6 +673,7 @@ func TestGetSeries(t *testing.T) {
|
|||
expectedErrorType: errorInternal,
|
||||
api: &API{
|
||||
Queryable: errorTestQueryable{err: promql.ErrStorage{Err: errors.New("generic")}},
|
||||
parser: testParser,
|
||||
},
|
||||
},
|
||||
} {
|
||||
|
|
@ -704,6 +711,7 @@ func TestQueryExemplars(t *testing.T) {
|
|||
Queryable: s,
|
||||
QueryEngine: testEngine(t),
|
||||
ExemplarQueryable: s,
|
||||
parser: testParser,
|
||||
}
|
||||
|
||||
request := func(method string, qs url.Values) (*http.Request, error) {
|
||||
|
|
@ -760,6 +768,7 @@ func TestQueryExemplars(t *testing.T) {
|
|||
expectedErrorType: errorExec,
|
||||
api: &API{
|
||||
ExemplarQueryable: errorTestQueryable{err: errors.New("generic")},
|
||||
parser: testParser,
|
||||
},
|
||||
query: url.Values{
|
||||
"query": []string{`test_metric3{foo="boo"} - test_metric4{foo="bar"}`},
|
||||
|
|
@ -772,6 +781,7 @@ func TestQueryExemplars(t *testing.T) {
|
|||
expectedErrorType: errorInternal,
|
||||
api: &API{
|
||||
ExemplarQueryable: errorTestQueryable{err: promql.ErrStorage{Err: errors.New("generic")}},
|
||||
parser: testParser,
|
||||
},
|
||||
query: url.Values{
|
||||
"query": []string{`test_metric3{foo="boo"} - test_metric4{foo="bar"}`},
|
||||
|
|
@ -812,6 +822,7 @@ func TestLabelNames(t *testing.T) {
|
|||
|
||||
api := &API{
|
||||
Queryable: s,
|
||||
parser: testParser,
|
||||
}
|
||||
request := func(method, limit string, matchers ...string) (*http.Request, error) {
|
||||
u, err := url.Parse("http://example.com")
|
||||
|
|
@ -876,6 +887,7 @@ func TestLabelNames(t *testing.T) {
|
|||
expectedErrorType: errorExec,
|
||||
api: &API{
|
||||
Queryable: errorTestQueryable{err: errors.New("generic")},
|
||||
parser: testParser,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -884,6 +896,7 @@ func TestLabelNames(t *testing.T) {
|
|||
expectedErrorType: errorInternal,
|
||||
api: &API{
|
||||
Queryable: errorTestQueryable{err: promql.ErrStorage{Err: errors.New("generic")}},
|
||||
parser: testParser,
|
||||
},
|
||||
},
|
||||
} {
|
||||
|
|
@ -916,6 +929,7 @@ func TestStats(t *testing.T) {
|
|||
api := &API{
|
||||
Queryable: s,
|
||||
QueryEngine: testEngine(t),
|
||||
parser: testParser,
|
||||
now: func() time.Time {
|
||||
return time.Unix(123, 0)
|
||||
},
|
||||
|
|
@ -4101,6 +4115,7 @@ func TestAdminEndpoints(t *testing.T) {
|
|||
dbDir: dir,
|
||||
ready: func(f http.HandlerFunc) http.HandlerFunc { return f },
|
||||
enableAdmin: tc.enableAdmin,
|
||||
parser: testParser,
|
||||
}
|
||||
|
||||
endpoint := tc.endpoint(api)
|
||||
|
|
@ -4850,6 +4865,7 @@ func TestQueryTimeout(t *testing.T) {
|
|||
now: func() time.Time { return now },
|
||||
config: func() config.Config { return samplePrometheusCfg },
|
||||
ready: func(f http.HandlerFunc) http.HandlerFunc { return f },
|
||||
parser: testParser,
|
||||
}
|
||||
|
||||
query := url.Values{
|
||||
|
|
|
|||
|
|
@ -34,6 +34,7 @@ import (
|
|||
"github.com/prometheus/prometheus/config"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
"github.com/prometheus/prometheus/promql/promqltest"
|
||||
"github.com/prometheus/prometheus/rules"
|
||||
"github.com/prometheus/prometheus/scrape"
|
||||
|
|
@ -170,6 +171,7 @@ func createPrometheusAPI(t *testing.T, q storage.SampleAndChunkQueryable, overri
|
|||
overrideErrorCode,
|
||||
nil,
|
||||
OpenAPIOptions{},
|
||||
parser.NewParser(parser.Options{}),
|
||||
)
|
||||
|
||||
promRouter := route.New().WithPrefix("/api/v1")
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ import (
|
|||
|
||||
"github.com/prometheus/common/route"
|
||||
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
"github.com/prometheus/prometheus/web/api/testhelpers"
|
||||
)
|
||||
|
||||
|
|
@ -90,19 +91,20 @@ func newTestAPI(t *testing.T, cfg testhelpers.APIConfig) *testhelpers.APIWrapper
|
|||
params.NotificationsSub,
|
||||
params.Gatherer,
|
||||
params.Registerer,
|
||||
nil, // statsRenderer
|
||||
false, // rwEnabled
|
||||
nil, // acceptRemoteWriteProtoMsgs
|
||||
false, // otlpEnabled
|
||||
false, // otlpDeltaToCumulative
|
||||
false, // otlpNativeDeltaIngestion
|
||||
false, // stZeroIngestionEnabled
|
||||
5*time.Minute, // lookbackDelta
|
||||
false, // enableTypeAndUnitLabels
|
||||
false, // appendMetadata
|
||||
nil, // overrideErrorCode
|
||||
nil, // featureRegistry
|
||||
OpenAPIOptions{}, // openAPIOptions
|
||||
nil, // statsRenderer
|
||||
false, // rwEnabled
|
||||
nil, // acceptRemoteWriteProtoMsgs
|
||||
false, // otlpEnabled
|
||||
false, // otlpDeltaToCumulative
|
||||
false, // otlpNativeDeltaIngestion
|
||||
false, // stZeroIngestionEnabled
|
||||
5*time.Minute, // lookbackDelta
|
||||
false, // enableTypeAndUnitLabels
|
||||
false, // appendMetadata
|
||||
nil, // overrideErrorCode
|
||||
nil, // featureRegistry
|
||||
OpenAPIOptions{}, // openAPIOptions
|
||||
parser.NewParser(parser.Options{}), // promqlParser
|
||||
)
|
||||
|
||||
// Register routes.
|
||||
|
|
|
|||
|
|
@ -32,7 +32,6 @@ import (
|
|||
"github.com/prometheus/prometheus/model/timestamp"
|
||||
"github.com/prometheus/prometheus/model/value"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
"github.com/prometheus/prometheus/storage"
|
||||
"github.com/prometheus/prometheus/tsdb"
|
||||
"github.com/prometheus/prometheus/tsdb/chunkenc"
|
||||
|
|
@ -64,7 +63,7 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
matcherSets, err := parser.ParseMetricSelectors(req.Form["match[]"])
|
||||
matcherSets, err := h.options.Parser.ParseMetricSelectors(req.Form["match[]"])
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
|
|
|
|||
|
|
@ -35,6 +35,7 @@ import (
|
|||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/model/textparse"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
"github.com/prometheus/prometheus/promql/promqltest"
|
||||
"github.com/prometheus/prometheus/storage"
|
||||
"github.com/prometheus/prometheus/tsdb"
|
||||
|
|
@ -42,6 +43,8 @@ import (
|
|||
"github.com/prometheus/prometheus/util/testutil"
|
||||
)
|
||||
|
||||
var testParser = parser.NewParser(parser.Options{})
|
||||
|
||||
var scenarios = map[string]struct {
|
||||
params string
|
||||
externalLabels labels.Labels
|
||||
|
|
@ -220,6 +223,7 @@ func TestFederation(t *testing.T) {
|
|||
config: &config.Config{
|
||||
GlobalConfig: config.GlobalConfig{},
|
||||
},
|
||||
options: &Options{Parser: testParser},
|
||||
}
|
||||
|
||||
for name, scenario := range scenarios {
|
||||
|
|
@ -264,6 +268,7 @@ func TestFederation_NotReady(t *testing.T) {
|
|||
ExternalLabels: scenario.externalLabels,
|
||||
},
|
||||
},
|
||||
options: &Options{Parser: testParser},
|
||||
}
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "http://example.org/federate?"+scenario.params, nil)
|
||||
|
|
@ -440,6 +445,7 @@ func TestFederationWithNativeHistograms(t *testing.T) {
|
|||
config: &config.Config{
|
||||
GlobalConfig: config.GlobalConfig{},
|
||||
},
|
||||
options: &Options{Parser: testParser},
|
||||
}
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "http://example.org/federate?match[]=test_metric", nil)
|
||||
|
|
|
|||
|
|
@ -54,6 +54,7 @@ import (
|
|||
"github.com/prometheus/prometheus/config"
|
||||
"github.com/prometheus/prometheus/notifier"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
"github.com/prometheus/prometheus/rules"
|
||||
"github.com/prometheus/prometheus/scrape"
|
||||
"github.com/prometheus/prometheus/storage"
|
||||
|
|
@ -307,6 +308,9 @@ type Options struct {
|
|||
Gatherer prometheus.Gatherer
|
||||
Registerer prometheus.Registerer
|
||||
FeatureRegistry features.Collector
|
||||
|
||||
// Parser is the PromQL parser used for parsing query expressions.
|
||||
Parser parser.Parser
|
||||
}
|
||||
|
||||
// New initializes a new web Handler.
|
||||
|
|
@ -314,6 +318,9 @@ func New(logger *slog.Logger, o *Options) *Handler {
|
|||
if logger == nil {
|
||||
logger = promslog.NewNopLogger()
|
||||
}
|
||||
if o.Parser == nil {
|
||||
o.Parser = parser.NewParser(parser.Options{})
|
||||
}
|
||||
|
||||
m := newMetrics(o.Registerer)
|
||||
router := route.New().
|
||||
|
|
@ -417,6 +424,7 @@ func New(logger *slog.Logger, o *Options) *Handler {
|
|||
ExternalURL: o.ExternalURL.String(),
|
||||
Version: version,
|
||||
},
|
||||
o.Parser,
|
||||
)
|
||||
|
||||
if r := o.FeatureRegistry; r != nil {
|
||||
|
|
|
|||
Loading…
Reference in a new issue