mirror of
https://github.com/hashicorp/vault.git
synced 2026-02-18 18:38:08 -05:00
Add limit to JSON nesting depth (#31069)
* Add limit to JSON nesting depth * Add JSON limit check to http handler * Add changelog
This commit is contained in:
parent
e2273dbd77
commit
eedc2b7426
11 changed files with 587 additions and 53 deletions
3
changelog/31069.txt
Normal file
3
changelog/31069.txt
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
```release-note:change
|
||||
http: Add JSON configurable limits to HTTP handling for JSON payloads: `max_json_depth`, `max_json_string_value_length`, `max_json_object_entry_count`, `max_json_array_element_count`.
|
||||
```
|
||||
|
|
@ -899,6 +899,26 @@ func (c *ServerCommand) InitListeners(config *server.Config, disableClustering b
|
|||
}
|
||||
props["max_request_size"] = fmt.Sprintf("%d", lnConfig.MaxRequestSize)
|
||||
|
||||
if lnConfig.CustomMaxJSONDepth == 0 {
|
||||
lnConfig.CustomMaxJSONDepth = vaulthttp.CustomMaxJSONDepth
|
||||
}
|
||||
props["max_json_depth"] = fmt.Sprintf("%d", lnConfig.CustomMaxJSONDepth)
|
||||
|
||||
if lnConfig.CustomMaxJSONStringValueLength == 0 {
|
||||
lnConfig.CustomMaxJSONStringValueLength = vaulthttp.CustomMaxJSONStringValueLength
|
||||
}
|
||||
props["max_json_string_value_length"] = fmt.Sprintf("%d", lnConfig.CustomMaxJSONStringValueLength)
|
||||
|
||||
if lnConfig.CustomMaxJSONObjectEntryCount == 0 {
|
||||
lnConfig.CustomMaxJSONObjectEntryCount = vaulthttp.CustomMaxJSONObjectEntryCount
|
||||
}
|
||||
props["max_json_object_entry_count"] = fmt.Sprintf("%d", lnConfig.CustomMaxJSONObjectEntryCount)
|
||||
|
||||
if lnConfig.CustomMaxJSONArrayElementCount == 0 {
|
||||
lnConfig.CustomMaxJSONArrayElementCount = vaulthttp.CustomMaxJSONArrayElementCount
|
||||
}
|
||||
props["max_json_array_element_count"] = fmt.Sprintf("%d", lnConfig.CustomMaxJSONArrayElementCount)
|
||||
|
||||
if lnConfig.MaxRequestDuration == 0 {
|
||||
lnConfig.MaxRequestDuration = vault.DefaultMaxRequestDuration
|
||||
}
|
||||
|
|
|
|||
|
|
@ -85,6 +85,43 @@ const (
|
|||
// VaultSnapshotRecoverParam is the query parameter sent when Vault should
|
||||
// recover the data from a loaded snapshot
|
||||
VaultSnapshotRecoverParam = "recover_snapshot_id"
|
||||
|
||||
// CustomMaxJSONDepth specifies the maximum nesting depth of a JSON object.
|
||||
// This limit is designed to prevent stack exhaustion attacks from deeply
|
||||
// nested JSON payloads, which could otherwise lead to a denial-of-service
|
||||
// (DoS) vulnerability. The default value of 300 is intentionally generous
|
||||
// to support complex but legitimate configurations, while still providing
|
||||
// a safeguard against malicious or malformed input. This value is
|
||||
// configurable to accommodate unique environmental requirements.
|
||||
CustomMaxJSONDepth = 300
|
||||
|
||||
// CustomMaxJSONStringValueLength defines the maximum allowed length for a single
|
||||
// string value within a JSON payload, in bytes. This is a critical defense
|
||||
// against excessive memory allocation attacks where a client might send a
|
||||
// very large string value to exhaust server memory. The default of 1MB
|
||||
// (1024 * 1024 bytes) is chosen to comfortably accommodate large secrets
|
||||
// such as private keys, certificate chains, or detailed configuration data,
|
||||
// without permitting unbounded allocation. This value is configurable.
|
||||
CustomMaxJSONStringValueLength = 1024 * 1024 // 1MB
|
||||
|
||||
// CustomMaxJSONObjectEntryCount sets the maximum number of key-value pairs
|
||||
// allowed in a single JSON object. This limit helps mitigate the risk of
|
||||
// hash-collision denial-of-service (HashDoS) attacks and prevents general
|
||||
// resource exhaustion from parsing objects with an excessive number of
|
||||
// entries. A default of 10,000 entries is well beyond the scope of typical
|
||||
// Vault secrets or configurations, providing a high ceiling for normal
|
||||
// operations while ensuring stability. This value is configurable.
|
||||
CustomMaxJSONObjectEntryCount = 10000
|
||||
|
||||
// CustomMaxJSONArrayElementCount determines the maximum number of elements
|
||||
// permitted in a single JSON array. This is particularly relevant for API
|
||||
// endpoints that can return large lists, such as the result of a `LIST`
|
||||
// operation on a secrets engine path. The default limit of 10,000 elements
|
||||
// prevents a single request from causing excessive memory consumption. While
|
||||
// most environments will fall well below this limit, it is configurable for
|
||||
// systems that require handling larger datasets, though pagination is the
|
||||
// recommended practice for such cases.
|
||||
CustomMaxJSONArrayElementCount = 10000
|
||||
)
|
||||
|
||||
var (
|
||||
|
|
|
|||
|
|
@ -938,7 +938,7 @@ func TestHandler_MaxRequestSize(t *testing.T) {
|
|||
"bar": strings.Repeat("a", 1025),
|
||||
})
|
||||
|
||||
require.ErrorContains(t, err, "error parsing JSON")
|
||||
require.ErrorContains(t, err, "http: request body too large")
|
||||
}
|
||||
|
||||
// TestHandler_MaxRequestSize_Memory sets the max request size to 1024 bytes,
|
||||
|
|
|
|||
|
|
@ -147,7 +147,7 @@ func buildLogicalRequestNoAuth(perfStandby bool, ra *vault.RouterAccess, w http.
|
|||
if err != nil {
|
||||
status := http.StatusBadRequest
|
||||
logical.AdjustErrorStatusCode(&status, err)
|
||||
return nil, nil, status, fmt.Errorf("error parsing JSON")
|
||||
return nil, nil, status, fmt.Errorf("error parsing JSON: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -310,8 +310,15 @@ func TestLogical_RequestSizeDisableLimit(t *testing.T) {
|
|||
|
||||
// Write a very large object, should pass as MaxRequestSize set to -1/Negative value
|
||||
|
||||
// Test change: Previously used DefaultMaxRequestSize to create a large payload.
|
||||
// However, after introducing JSON limits, the test successfully disables the first layer (MaxRequestSize),
|
||||
// but its large 32MB payload is then correctly caught by the second layer—specifically,
|
||||
// the CustomMaxStringValueLength limit, which defaults to 1MB.
|
||||
// Create a payload that is larger than a typical small limit (e.g., > 1KB),
|
||||
// but is well within the default JSON string length limit (1MB).
|
||||
// This isolates the test to *only* the MaxRequestSize behavior.
|
||||
resp := testHttpPut(t, token, addr+"/v1/secret/foo", map[string]interface{}{
|
||||
"data": make([]byte, DefaultMaxRequestSize),
|
||||
"data": make([]byte, 2048),
|
||||
})
|
||||
testResponseStatus(t, resp, http.StatusNoContent)
|
||||
}
|
||||
|
|
|
|||
64
http/util.go
64
http/util.go
|
|
@ -15,6 +15,7 @@ import (
|
|||
"github.com/hashicorp/go-multierror"
|
||||
"github.com/hashicorp/vault/helper/namespace"
|
||||
"github.com/hashicorp/vault/limits"
|
||||
"github.com/hashicorp/vault/sdk/helper/jsonutil"
|
||||
"github.com/hashicorp/vault/sdk/logical"
|
||||
"github.com/hashicorp/vault/vault"
|
||||
"github.com/hashicorp/vault/vault/quotas"
|
||||
|
|
@ -24,25 +25,80 @@ var nonVotersAllowed = false
|
|||
|
||||
func wrapMaxRequestSizeHandler(handler http.Handler, props *vault.HandlerProperties) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
var maxRequestSize int64
|
||||
var maxRequestSize, maxJSONDepth, maxStringValueLength, maxObjectEntryCount, maxArrayElementCount int64
|
||||
|
||||
if props.ListenerConfig != nil {
|
||||
maxRequestSize = props.ListenerConfig.MaxRequestSize
|
||||
maxJSONDepth = props.ListenerConfig.CustomMaxJSONDepth
|
||||
maxStringValueLength = props.ListenerConfig.CustomMaxJSONStringValueLength
|
||||
maxObjectEntryCount = props.ListenerConfig.CustomMaxJSONObjectEntryCount
|
||||
maxArrayElementCount = props.ListenerConfig.CustomMaxJSONArrayElementCount
|
||||
}
|
||||
|
||||
if maxRequestSize == 0 {
|
||||
maxRequestSize = DefaultMaxRequestSize
|
||||
}
|
||||
ctx := r.Context()
|
||||
originalBody := r.Body
|
||||
if maxJSONDepth == 0 {
|
||||
maxJSONDepth = CustomMaxJSONDepth
|
||||
}
|
||||
if maxStringValueLength == 0 {
|
||||
maxStringValueLength = CustomMaxJSONStringValueLength
|
||||
}
|
||||
if maxObjectEntryCount == 0 {
|
||||
maxObjectEntryCount = CustomMaxJSONObjectEntryCount
|
||||
}
|
||||
if maxArrayElementCount == 0 {
|
||||
maxArrayElementCount = CustomMaxJSONArrayElementCount
|
||||
}
|
||||
|
||||
jsonLimits := jsonutil.JSONLimits{
|
||||
MaxDepth: int(maxJSONDepth),
|
||||
MaxStringValueLength: int(maxStringValueLength),
|
||||
MaxObjectEntryCount: int(maxObjectEntryCount),
|
||||
MaxArrayElementCount: int(maxArrayElementCount),
|
||||
}
|
||||
|
||||
// If the payload is JSON, the VerifyMaxDepthStreaming function will perform validations.
|
||||
buf, err := jsonLimitsValidation(w, r, maxRequestSize, jsonLimits)
|
||||
if err != nil {
|
||||
respondError(w, http.StatusInternalServerError, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Replace the body and update the context.
|
||||
// This ensures the request object is in a consistent state for all downstream handlers.
|
||||
// Because the original request body stream has been fully consumed by io.ReadAll,
|
||||
// we must replace it so that subsequent handlers can read the content.
|
||||
r.Body = newMultiReaderCloser(buf, r.Body)
|
||||
contextBody := r.Body
|
||||
ctx := logical.CreateContextOriginalBody(r.Context(), contextBody)
|
||||
|
||||
if maxRequestSize > 0 {
|
||||
r.Body = http.MaxBytesReader(w, r.Body, maxRequestSize)
|
||||
}
|
||||
ctx = logical.CreateContextOriginalBody(ctx, originalBody)
|
||||
r = r.WithContext(ctx)
|
||||
|
||||
handler.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
func jsonLimitsValidation(w http.ResponseWriter, r *http.Request, maxRequestSize int64, jsonLimits jsonutil.JSONLimits) (*bytes.Buffer, error) {
|
||||
// The TeeReader reads from the original body and writes a copy to our buffer.
|
||||
// We wrap the original body with a MaxBytesReader first to enforce the hard size limit.
|
||||
var limitedTeeReader io.Reader
|
||||
buf := &bytes.Buffer{}
|
||||
bodyReader := r.Body
|
||||
if maxRequestSize > 0 {
|
||||
bodyReader = http.MaxBytesReader(w, r.Body, maxRequestSize)
|
||||
}
|
||||
limitedTeeReader = io.TeeReader(bodyReader, buf)
|
||||
_, err := jsonutil.VerifyMaxDepthStreaming(limitedTeeReader, jsonLimits)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func wrapRequestLimiterHandler(handler http.Handler, props *vault.HandlerProperties) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
request := r.WithContext(
|
||||
|
|
|
|||
|
|
@ -149,6 +149,24 @@ type Listener struct {
|
|||
// DisableRequestLimiter allows per-listener disabling of the Request Limiter.
|
||||
DisableRequestLimiterRaw any `hcl:"disable_request_limiter"`
|
||||
DisableRequestLimiter bool `hcl:"-"`
|
||||
|
||||
// JSON-specific limits
|
||||
|
||||
// CustomMaxJSONDepth specifies the maximum nesting depth of a JSON object.
|
||||
CustomMaxJSONDepthRaw interface{} `hcl:"max_json_depth"`
|
||||
CustomMaxJSONDepth int64 `hcl:"-"`
|
||||
|
||||
// CustomMaxJSONStringValueLength defines the maximum allowed length for a string in a JSON payload.
|
||||
CustomMaxJSONStringValueLengthRaw interface{} `hcl:"max_json_string_value_length"`
|
||||
CustomMaxJSONStringValueLength int64 `hcl:"-"`
|
||||
|
||||
// CustomMaxJSONObjectEntryCount sets the maximum number of key-value pairs in a JSON object.
|
||||
CustomMaxJSONObjectEntryCountRaw interface{} `hcl:"max_json_object_entry_count"`
|
||||
CustomMaxJSONObjectEntryCount int64 `hcl:"-"`
|
||||
|
||||
// CustomMaxJSONArrayElementCount determines the maximum number of elements in a JSON array.
|
||||
CustomMaxJSONArrayElementCountRaw interface{} `hcl:"max_json_array_element_count"`
|
||||
CustomMaxJSONArrayElementCount int64 `hcl:"-"`
|
||||
}
|
||||
|
||||
// AgentAPI allows users to select which parts of the Agent API they want enabled.
|
||||
|
|
@ -468,6 +486,10 @@ func (l *Listener) parseRequestSettings() error {
|
|||
return fmt.Errorf("invalid value for disable_request_limiter: %w", err)
|
||||
}
|
||||
|
||||
if err := l.parseJSONLimitsSettings(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
@ -710,3 +732,35 @@ func (l *Listener) parseRedactionSettings() error {
|
|||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *Listener) parseJSONLimitsSettings() error {
|
||||
if err := parseAndClearInt(&l.CustomMaxJSONDepthRaw, &l.CustomMaxJSONDepth); err != nil {
|
||||
return fmt.Errorf("error parsing max_json_depth: %w", err)
|
||||
}
|
||||
if l.CustomMaxJSONDepth < 0 {
|
||||
return fmt.Errorf("max_json_depth cannot be negative")
|
||||
}
|
||||
|
||||
if err := parseAndClearInt(&l.CustomMaxJSONStringValueLengthRaw, &l.CustomMaxJSONStringValueLength); err != nil {
|
||||
return fmt.Errorf("error parsing max_json_string_value_length: %w", err)
|
||||
}
|
||||
if l.CustomMaxJSONStringValueLength < 0 {
|
||||
return fmt.Errorf("max_json_string_value_length cannot be negative")
|
||||
}
|
||||
|
||||
if err := parseAndClearInt(&l.CustomMaxJSONObjectEntryCountRaw, &l.CustomMaxJSONObjectEntryCount); err != nil {
|
||||
return fmt.Errorf("error parsing max_json_object_entry_count: %w", err)
|
||||
}
|
||||
if l.CustomMaxJSONObjectEntryCount < 0 {
|
||||
return fmt.Errorf("max_json_object_entry_count cannot be negative")
|
||||
}
|
||||
|
||||
if err := parseAndClearInt(&l.CustomMaxJSONArrayElementCountRaw, &l.CustomMaxJSONArrayElementCount); err != nil {
|
||||
return fmt.Errorf("error parsing max_json_array_element_count: %w", err)
|
||||
}
|
||||
if l.CustomMaxJSONArrayElementCount < 0 {
|
||||
return fmt.Errorf("max_json_array_element_count cannot be negative")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -214,16 +214,24 @@ func TestListener_parseRequestSettings(t *testing.T) {
|
|||
t.Parallel()
|
||||
|
||||
tests := map[string]struct {
|
||||
rawMaxRequestSize any
|
||||
expectedMaxRequestSize int64
|
||||
rawMaxRequestDuration any
|
||||
expectedDuration time.Duration
|
||||
rawRequireRequestHeader any
|
||||
expectedRequireRequestHeader bool
|
||||
rawDisableRequestLimiter any
|
||||
expectedDisableRequestLimiter bool
|
||||
isErrorExpected bool
|
||||
errorMessage string
|
||||
rawMaxRequestSize any
|
||||
expectedMaxRequestSize int64
|
||||
rawMaxRequestDuration any
|
||||
expectedDuration time.Duration
|
||||
rawRequireRequestHeader any
|
||||
expectedRequireRequestHeader bool
|
||||
rawDisableRequestLimiter any
|
||||
expectedDisableRequestLimiter bool
|
||||
rawCustomMaxJSONDepth any
|
||||
expectedCustomMaxJSONDepth int64
|
||||
rawCustomMaxJSONStringValueLength any
|
||||
expectedCustomMaxJSONStringValueLength int64
|
||||
rawCustomMaxJSONObjectEntryCount any
|
||||
expectedCustomMaxJSONObjectEntryCount int64
|
||||
rawCustomMaxJSONArrayElementCount any
|
||||
expectedCustomMaxJSONArrayElementCount int64
|
||||
isErrorExpected bool
|
||||
errorMessage string
|
||||
}{
|
||||
"nil": {
|
||||
isErrorExpected: false,
|
||||
|
|
@ -238,37 +246,65 @@ func TestListener_parseRequestSettings(t *testing.T) {
|
|||
expectedMaxRequestSize: 5,
|
||||
isErrorExpected: false,
|
||||
},
|
||||
"max-request-duration-bad": {
|
||||
rawMaxRequestDuration: "juan",
|
||||
"max-json-depth-bad": {
|
||||
rawCustomMaxJSONDepth: "badvalue",
|
||||
isErrorExpected: true,
|
||||
errorMessage: "error parsing max_request_duration",
|
||||
errorMessage: "error parsing max_json_depth",
|
||||
},
|
||||
"max-request-duration-good": {
|
||||
rawMaxRequestDuration: "30s",
|
||||
expectedDuration: 30 * time.Second,
|
||||
isErrorExpected: false,
|
||||
"max-json-depth-negative": {
|
||||
rawCustomMaxJSONDepth: "-1",
|
||||
isErrorExpected: true,
|
||||
errorMessage: "max_json_depth cannot be negative",
|
||||
},
|
||||
"require-request-header-bad": {
|
||||
rawRequireRequestHeader: "juan",
|
||||
expectedRequireRequestHeader: false,
|
||||
isErrorExpected: true,
|
||||
errorMessage: "invalid value for require_request_header",
|
||||
"max-json-depth-good": {
|
||||
rawCustomMaxJSONDepth: "100",
|
||||
expectedCustomMaxJSONDepth: 100,
|
||||
isErrorExpected: false,
|
||||
},
|
||||
"require-request-header-good": {
|
||||
rawRequireRequestHeader: "true",
|
||||
expectedRequireRequestHeader: true,
|
||||
isErrorExpected: false,
|
||||
"max-json-string-value-length-bad": {
|
||||
rawCustomMaxJSONStringValueLength: "badvalue",
|
||||
isErrorExpected: true,
|
||||
errorMessage: "error parsing max_json_string_value_length",
|
||||
},
|
||||
"disable-request-limiter-bad": {
|
||||
rawDisableRequestLimiter: "badvalue",
|
||||
expectedDisableRequestLimiter: false,
|
||||
isErrorExpected: true,
|
||||
errorMessage: "invalid value for disable_request_limiter",
|
||||
"max-json-string-value-length-negative": {
|
||||
rawCustomMaxJSONStringValueLength: "-1",
|
||||
isErrorExpected: true,
|
||||
errorMessage: "max_json_string_value_length cannot be negative",
|
||||
},
|
||||
"disable-request-limiter-good": {
|
||||
rawDisableRequestLimiter: "true",
|
||||
expectedDisableRequestLimiter: true,
|
||||
isErrorExpected: false,
|
||||
"max-json-string-value-length-good": {
|
||||
rawCustomMaxJSONStringValueLength: "2048",
|
||||
expectedCustomMaxJSONStringValueLength: 2048,
|
||||
isErrorExpected: false,
|
||||
},
|
||||
"custom-max-json-object-entry-count-bad": {
|
||||
rawCustomMaxJSONObjectEntryCount: "badvalue",
|
||||
isErrorExpected: true,
|
||||
errorMessage: "error parsing max_json_object_entry_count",
|
||||
},
|
||||
"max-json-object-entry-count-negative": {
|
||||
rawCustomMaxJSONObjectEntryCount: "-1",
|
||||
isErrorExpected: true,
|
||||
errorMessage: "max_json_object_entry_count cannot be negative",
|
||||
},
|
||||
"max-json-object-entry-count-good": {
|
||||
rawCustomMaxJSONObjectEntryCount: "500",
|
||||
expectedCustomMaxJSONObjectEntryCount: 500,
|
||||
isErrorExpected: false,
|
||||
},
|
||||
"max-json-array-element-count-bad": {
|
||||
rawCustomMaxJSONArrayElementCount: "badvalue",
|
||||
isErrorExpected: true,
|
||||
errorMessage: "error parsing max_json_array_element_count",
|
||||
},
|
||||
"max-json-array-element-count-negative": {
|
||||
rawCustomMaxJSONArrayElementCount: "-1",
|
||||
isErrorExpected: true,
|
||||
errorMessage: "max_json_array_element_count cannot be negative",
|
||||
},
|
||||
"max-json-array-element-count-good": {
|
||||
rawCustomMaxJSONArrayElementCount: "500",
|
||||
expectedCustomMaxJSONArrayElementCount: 500,
|
||||
isErrorExpected: false,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
@ -278,12 +314,15 @@ func TestListener_parseRequestSettings(t *testing.T) {
|
|||
t.Run(name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Configure listener with raw values
|
||||
l := &Listener{
|
||||
MaxRequestSizeRaw: tc.rawMaxRequestSize,
|
||||
MaxRequestDurationRaw: tc.rawMaxRequestDuration,
|
||||
RequireRequestHeaderRaw: tc.rawRequireRequestHeader,
|
||||
DisableRequestLimiterRaw: tc.rawDisableRequestLimiter,
|
||||
MaxRequestSizeRaw: tc.rawMaxRequestSize,
|
||||
MaxRequestDurationRaw: tc.rawMaxRequestDuration,
|
||||
RequireRequestHeaderRaw: tc.rawRequireRequestHeader,
|
||||
DisableRequestLimiterRaw: tc.rawDisableRequestLimiter,
|
||||
CustomMaxJSONDepthRaw: tc.rawCustomMaxJSONDepth,
|
||||
CustomMaxJSONStringValueLengthRaw: tc.rawCustomMaxJSONStringValueLength,
|
||||
CustomMaxJSONObjectEntryCountRaw: tc.rawCustomMaxJSONObjectEntryCount,
|
||||
CustomMaxJSONArrayElementCountRaw: tc.rawCustomMaxJSONArrayElementCount,
|
||||
}
|
||||
|
||||
err := l.parseRequestSettings()
|
||||
|
|
@ -293,15 +332,21 @@ func TestListener_parseRequestSettings(t *testing.T) {
|
|||
require.Error(t, err)
|
||||
require.ErrorContains(t, err, tc.errorMessage)
|
||||
default:
|
||||
// Assert we got the relevant values.
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, tc.expectedMaxRequestSize, l.MaxRequestSize)
|
||||
require.Equal(t, tc.expectedDuration, l.MaxRequestDuration)
|
||||
require.Equal(t, tc.expectedCustomMaxJSONDepth, l.CustomMaxJSONDepth)
|
||||
require.Equal(t, tc.expectedCustomMaxJSONStringValueLength, l.CustomMaxJSONStringValueLength)
|
||||
require.Equal(t, tc.expectedCustomMaxJSONObjectEntryCount, l.CustomMaxJSONObjectEntryCount)
|
||||
require.Equal(t, tc.expectedCustomMaxJSONArrayElementCount, l.CustomMaxJSONArrayElementCount)
|
||||
require.Equal(t, tc.expectedRequireRequestHeader, l.RequireRequestHeader)
|
||||
require.Equal(t, tc.expectedDisableRequestLimiter, l.DisableRequestLimiter)
|
||||
require.Equal(t, tc.expectedDuration, l.MaxRequestDuration)
|
||||
|
||||
// Ensure the state was modified for the raw values.
|
||||
require.Nil(t, l.MaxRequestSizeRaw)
|
||||
require.Nil(t, l.CustomMaxJSONDepthRaw)
|
||||
require.Nil(t, l.CustomMaxJSONStringValueLengthRaw)
|
||||
require.Nil(t, l.CustomMaxJSONObjectEntryCountRaw)
|
||||
require.Nil(t, l.CustomMaxJSONArrayElementCountRaw)
|
||||
require.Nil(t, l.MaxRequestDurationRaw)
|
||||
require.Nil(t, l.RequireRequestHeaderRaw)
|
||||
require.Nil(t, l.DisableRequestLimiterRaw)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
package jsonutil
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"encoding/json"
|
||||
|
|
@ -14,7 +15,7 @@ import (
|
|||
"github.com/hashicorp/vault/sdk/helper/compressutil"
|
||||
)
|
||||
|
||||
// Encodes/Marshals the given object into JSON
|
||||
// EncodeJSON encodes/marshals the given object into JSON
|
||||
func EncodeJSON(in interface{}) ([]byte, error) {
|
||||
if in == nil {
|
||||
return nil, fmt.Errorf("input for encoding is nil")
|
||||
|
|
@ -84,7 +85,7 @@ func DecodeJSON(data []byte, out interface{}) error {
|
|||
return DecodeJSONFromReader(bytes.NewReader(data), out)
|
||||
}
|
||||
|
||||
// Decodes/Unmarshals the given io.Reader pointing to a JSON, into a desired object
|
||||
// DecodeJSONFromReader Decodes/Unmarshals the given io.Reader pointing to a JSON, into a desired object
|
||||
func DecodeJSONFromReader(r io.Reader, out interface{}) error {
|
||||
if r == nil {
|
||||
return fmt.Errorf("'io.Reader' being decoded is nil")
|
||||
|
|
@ -101,3 +102,146 @@ func DecodeJSONFromReader(r io.Reader, out interface{}) error {
|
|||
// Since 'out' is an interface representing a pointer, pass it to the decoder without an '&'
|
||||
return dec.Decode(out)
|
||||
}
|
||||
|
||||
// containerState holds information about an open JSON container (object or array).
|
||||
type containerState struct {
|
||||
Type json.Delim // '{' or '['
|
||||
Count int // Number of entries (for objects) or elements for arrays)
|
||||
}
|
||||
|
||||
// JSONLimits defines the configurable limits for JSON validation.
|
||||
type JSONLimits struct {
|
||||
MaxDepth int
|
||||
MaxStringValueLength int
|
||||
MaxObjectEntryCount int
|
||||
MaxArrayElementCount int
|
||||
}
|
||||
|
||||
// isWhitespace checks if a byte is a JSON whitespace character.
|
||||
func isWhitespace(b byte) bool {
|
||||
return b == ' ' || b == '\t' || b == '\n' || b == '\r'
|
||||
}
|
||||
|
||||
// VerifyMaxDepthStreaming scans the JSON stream to determine its maximum nesting depth
|
||||
// and enforce various limits. It first checks if the stream is likely JSON before proceeding.
|
||||
func VerifyMaxDepthStreaming(jsonReader io.Reader, limits JSONLimits) (int, error) {
|
||||
// Use a buffered reader to peek at the stream without consuming it from the original reader.
|
||||
bufReader := bufio.NewReader(jsonReader)
|
||||
|
||||
// Find the first non-whitespace character.
|
||||
var firstByte byte
|
||||
var err error
|
||||
for {
|
||||
firstByte, err = bufReader.ReadByte()
|
||||
if err != nil {
|
||||
// If we hit EOF before finding a real character, it's an empty or whitespace-only payload.
|
||||
if err == io.EOF {
|
||||
return 0, nil
|
||||
}
|
||||
return 0, err // A different I/O error occurred.
|
||||
}
|
||||
if !isWhitespace(firstByte) {
|
||||
break // Found the first significant character.
|
||||
}
|
||||
}
|
||||
|
||||
// If the payload doesn't start with '{' or '[', assume it's not a JSON object or array
|
||||
// and that our limits do not apply.
|
||||
if firstByte != '{' && firstByte != '[' {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
fullStreamReader := io.MultiReader(bytes.NewReader([]byte{firstByte}), bufReader)
|
||||
decoder := json.NewDecoder(fullStreamReader)
|
||||
decoder.UseNumber()
|
||||
|
||||
var (
|
||||
maxDepth = 0
|
||||
currentDepth = 0
|
||||
isKeyExpected bool
|
||||
)
|
||||
containerInfoStack := make([]containerState, 0, limits.MaxDepth)
|
||||
|
||||
for {
|
||||
t, err := decoder.Token()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
// Any error from the decoder is now considered a real error.
|
||||
return 0, fmt.Errorf("error reading JSON token: %w", err)
|
||||
}
|
||||
|
||||
switch v := t.(type) {
|
||||
case json.Delim:
|
||||
switch v {
|
||||
case '{', '[':
|
||||
currentDepth++
|
||||
// Check against the limit directly.
|
||||
if currentDepth > limits.MaxDepth {
|
||||
return 0, fmt.Errorf("JSON input exceeds allowed nesting depth")
|
||||
}
|
||||
if currentDepth > maxDepth {
|
||||
maxDepth = currentDepth
|
||||
}
|
||||
|
||||
containerInfoStack = append(containerInfoStack, containerState{Type: v, Count: 0})
|
||||
if v == '{' {
|
||||
isKeyExpected = true
|
||||
}
|
||||
case '}', ']':
|
||||
if len(containerInfoStack) == 0 {
|
||||
return 0, fmt.Errorf("malformed JSON: unmatched closing delimiter '%c'", v)
|
||||
}
|
||||
top := containerInfoStack[len(containerInfoStack)-1]
|
||||
containerInfoStack = containerInfoStack[:len(containerInfoStack)-1]
|
||||
currentDepth--
|
||||
if (v == '}' && top.Type != '{') || (v == ']' && top.Type != '[') {
|
||||
return 0, fmt.Errorf("malformed JSON: mismatched closing delimiter '%c' for opening '%c'", v, top.Type)
|
||||
}
|
||||
if len(containerInfoStack) > 0 && containerInfoStack[len(containerInfoStack)-1].Type == '{' {
|
||||
isKeyExpected = false
|
||||
}
|
||||
}
|
||||
case string:
|
||||
if len(v) > limits.MaxStringValueLength {
|
||||
return 0, fmt.Errorf("JSON string value exceeds allowed length")
|
||||
}
|
||||
if len(containerInfoStack) > 0 {
|
||||
top := &containerInfoStack[len(containerInfoStack)-1]
|
||||
if top.Type == '{' {
|
||||
if isKeyExpected {
|
||||
top.Count++
|
||||
if top.Count > limits.MaxObjectEntryCount {
|
||||
return 0, fmt.Errorf("JSON object exceeds allowed entry count")
|
||||
}
|
||||
isKeyExpected = false
|
||||
}
|
||||
} else if top.Type == '[' {
|
||||
top.Count++
|
||||
if top.Count > limits.MaxArrayElementCount {
|
||||
return 0, fmt.Errorf("JSON array exceeds allowed element count")
|
||||
}
|
||||
}
|
||||
}
|
||||
default: // Handles numbers, booleans, and nulls
|
||||
if len(containerInfoStack) > 0 {
|
||||
top := &containerInfoStack[len(containerInfoStack)-1]
|
||||
if top.Type == '[' {
|
||||
top.Count++
|
||||
if top.Count > limits.MaxArrayElementCount {
|
||||
return 0, fmt.Errorf("JSON array exceeds allowed element count")
|
||||
}
|
||||
} else if top.Type == '{' {
|
||||
isKeyExpected = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(containerInfoStack) != 0 {
|
||||
return 0, fmt.Errorf("malformed JSON, unclosed containers")
|
||||
}
|
||||
|
||||
return maxDepth, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,6 +12,46 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/hashicorp/vault/sdk/helper/compressutil"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
const (
|
||||
// CustomMaxJSONDepth specifies the maximum nesting depth of a JSON object.
|
||||
// This limit is designed to prevent stack exhaustion attacks from deeply
|
||||
// nested JSON payloads, which could otherwise lead to a denial-of-service
|
||||
// (DoS) vulnerability. The default value of 500 is intentionally generous
|
||||
// to support complex but legitimate configurations, while still providing
|
||||
// a safeguard against malicious or malformed input. This value is
|
||||
// configurable to accommodate unique environmental requirements.
|
||||
CustomMaxJSONDepth = 500
|
||||
|
||||
// CustomMaxJSONStringValueLength defines the maximum allowed length for a single
|
||||
// string value within a JSON payload, in bytes. This is a critical defense
|
||||
// against excessive memory allocation attacks where a client might send a
|
||||
// very large string value to exhaust server memory. The default of 1MB
|
||||
// (1024 * 1024 bytes) is chosen to comfortably accommodate large secrets
|
||||
// such as private keys, certificate chains, or detailed configuration data,
|
||||
// without permitting unbounded allocation. This value is configurable.
|
||||
CustomMaxJSONStringValueLength = 1024 * 1024 // 1MB
|
||||
|
||||
// CustomMaxJSONObjectEntryCount sets the maximum number of key-value pairs
|
||||
// allowed in a single JSON object. This limit helps mitigate the risk of
|
||||
// hash-collision denial-of-service (HashDoS) attacks and prevents general
|
||||
// resource exhaustion from parsing objects with an excessive number of
|
||||
// entries. A default of 10,000 entries is well beyond the scope of typical
|
||||
// Vault secrets or configurations, providing a high ceiling for normal
|
||||
// operations while ensuring stability. This value is configurable.
|
||||
CustomMaxJSONObjectEntryCount = 10000
|
||||
|
||||
// CustomMaxJSONArrayElementCount determines the maximum number of elements
|
||||
// permitted in a single JSON array. This is particularly relevant for API
|
||||
// endpoints that can return large lists, such as the result of a `LIST`
|
||||
// operation on a secrets engine path. The default limit of 10,000 elements
|
||||
// prevents a single request from causing excessive memory consumption. While
|
||||
// most environments will fall well below this limit, it is configurable for
|
||||
// systems that require handling larger datasets, though pagination is the
|
||||
// recommended practice for such cases.
|
||||
CustomMaxJSONArrayElementCount = 10000
|
||||
)
|
||||
|
||||
func TestJSONUtil_CompressDecompressJSON(t *testing.T) {
|
||||
|
|
@ -59,7 +99,7 @@ func TestJSONUtil_CompressDecompressJSON(t *testing.T) {
|
|||
t.Fatalf("expected a failure")
|
||||
}
|
||||
|
||||
// Compress an object
|
||||
// Compress an object with BestSpeed
|
||||
compressedBytes, err = EncodeJSONAndCompress(expected, &compressutil.CompressionConfig{
|
||||
Type: compressutil.CompressionTypeGzip,
|
||||
GzipCompressionLevel: gzip.BestSpeed,
|
||||
|
|
@ -142,3 +182,131 @@ func TestJSONUtil_DecodeJSONFromReader(t *testing.T) {
|
|||
t.Fatalf("bad: expected:%#v\nactual:%#v", expected, actual)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONUtil_Limits(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
jsonInput string
|
||||
expectError bool
|
||||
errorMsg string
|
||||
}{
|
||||
// Depth Limits
|
||||
{
|
||||
name: "JSON exceeding max depth",
|
||||
jsonInput: generateComplexJSON(CustomMaxJSONDepth + 1),
|
||||
expectError: true,
|
||||
errorMsg: "JSON input exceeds allowed nesting depth",
|
||||
},
|
||||
{
|
||||
name: "JSON at max allowed depth",
|
||||
jsonInput: generateComplexJSON(CustomMaxJSONDepth),
|
||||
expectError: false,
|
||||
},
|
||||
// Malformed JSON
|
||||
{
|
||||
name: "Malformed - Unmatched opening brace",
|
||||
jsonInput: `{"a": {`,
|
||||
expectError: true,
|
||||
errorMsg: "malformed JSON, unclosed containers",
|
||||
},
|
||||
{
|
||||
name: "Malformed - Unmatched closing brace",
|
||||
jsonInput: `{}}`,
|
||||
expectError: true,
|
||||
errorMsg: "error reading JSON token: invalid character '}' looking for beginning of value",
|
||||
},
|
||||
// String Length Limits
|
||||
{
|
||||
name: "String value exceeding max length",
|
||||
jsonInput: fmt.Sprintf(`{"key": "%s"}`, strings.Repeat("a", CustomMaxJSONStringValueLength+1)),
|
||||
expectError: true,
|
||||
errorMsg: "JSON string value exceeds allowed length",
|
||||
},
|
||||
{
|
||||
name: "String at max length",
|
||||
jsonInput: fmt.Sprintf(`{"key": "%s"}`, strings.Repeat("a", CustomMaxJSONStringValueLength)),
|
||||
expectError: false,
|
||||
},
|
||||
// Object Entry Count Limits
|
||||
{
|
||||
name: "Object exceeding max entry count",
|
||||
jsonInput: fmt.Sprintf(`{%s}`, generateObjectEntries(CustomMaxJSONObjectEntryCount+1)),
|
||||
expectError: true,
|
||||
errorMsg: "JSON object exceeds allowed entry count",
|
||||
},
|
||||
{
|
||||
name: "Object at max entry count",
|
||||
jsonInput: fmt.Sprintf(`{%s}`, generateObjectEntries(CustomMaxJSONObjectEntryCount)),
|
||||
expectError: false,
|
||||
},
|
||||
// Array Element Count Limits
|
||||
{
|
||||
name: "Array exceeding max element count",
|
||||
jsonInput: fmt.Sprintf(`[%s]`, generateArrayElements(CustomMaxJSONArrayElementCount+1)),
|
||||
expectError: true,
|
||||
errorMsg: "JSON array exceeds allowed element count",
|
||||
},
|
||||
{
|
||||
name: "Array at max element count",
|
||||
jsonInput: fmt.Sprintf(`[%s]`, generateArrayElements(CustomMaxJSONArrayElementCount)),
|
||||
expectError: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
limits := JSONLimits{
|
||||
MaxDepth: CustomMaxJSONDepth,
|
||||
MaxStringValueLength: CustomMaxJSONStringValueLength,
|
||||
MaxObjectEntryCount: CustomMaxJSONObjectEntryCount,
|
||||
MaxArrayElementCount: CustomMaxJSONArrayElementCount,
|
||||
}
|
||||
|
||||
_, err := VerifyMaxDepthStreaming(bytes.NewReader([]byte(tt.jsonInput)), limits)
|
||||
|
||||
if tt.expectError {
|
||||
require.Error(t, err, "expected an error but got nil")
|
||||
require.Contains(t, err.Error(), tt.errorMsg, "error message mismatch")
|
||||
} else {
|
||||
require.NoError(t, err, "did not expect an error but got one")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// generateComplexJSON generates a valid JSON string with a specified nesting depth.
|
||||
func generateComplexJSON(depth int) string {
|
||||
if depth <= 0 {
|
||||
return "{}"
|
||||
}
|
||||
// Build the nested structure from the inside out.
|
||||
json := "1"
|
||||
for i := 0; i < depth; i++ {
|
||||
json = fmt.Sprintf(`{"a":%s}`, json)
|
||||
}
|
||||
return json
|
||||
}
|
||||
|
||||
// generateObjectEntries creates a string of object entries for testing.
|
||||
func generateObjectEntries(count int) string {
|
||||
var sb strings.Builder
|
||||
for i := 0; i < count; i++ {
|
||||
sb.WriteString(fmt.Sprintf(`"key%d":%d`, i, i))
|
||||
if i < count-1 {
|
||||
sb.WriteString(",")
|
||||
}
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
// generateArrayElements creates a string of array elements for testing.
|
||||
func generateArrayElements(count int) string {
|
||||
var sb strings.Builder
|
||||
for i := 0; i < count; i++ {
|
||||
sb.WriteString(fmt.Sprintf("%d", i))
|
||||
if i < count-1 {
|
||||
sb.WriteString(",")
|
||||
}
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in a new issue