Agent Auto Auth Self Healing for Templates (#26172)

* Agent Auto Auth Self Healing for Templates

* Added changelog

* Edited go.sum

* Edit changelog wording
This commit is contained in:
divyaac 2024-03-27 10:06:45 -07:00 committed by GitHub
parent c9dafc1971
commit 6cac3b3bf5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
21 changed files with 606 additions and 41 deletions

3
changelog/26172.txt Normal file
View file

@ -0,0 +1,3 @@
```release-note:improvement
agent: Agent will re-trigger auto auth if token used for rendering templates has been revoked, has exceeded the number of uses, or is a bogus value.
```

View file

@ -83,8 +83,7 @@ type AgentCommand struct {
// Telemetry object
metricsHelper *metricsutil.MetricsHelper
cleanupGuard sync.Once
cleanupGuard sync.Once
startedCh chan struct{} // for tests
reloadedCh chan struct{} // for tests
@ -787,7 +786,7 @@ func (c *AgentCommand) Run(args []string) int {
})
g.Add(func() error {
err := ss.Run(ctx, ah.OutputCh, sinks)
err := ss.Run(ctx, ah.OutputCh, sinks, ah.AuthInProgress)
c.logger.Info("sinks finished, exiting")
// Start goroutine to drain from ah.OutputCh from this point onward
@ -818,7 +817,7 @@ func (c *AgentCommand) Run(args []string) int {
})
g.Add(func() error {
return ts.Run(ctx, ah.TemplateTokenCh, config.Templates)
return ts.Run(ctx, ah.TemplateTokenCh, config.Templates, ah.AuthInProgress, ah.InvalidToken)
}, func(error) {
// Let the lease cache know this is a shutdown; no need to evict
// everything

View file

@ -0,0 +1,480 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package agent
import (
"context"
"errors"
"fmt"
"os"
"path/filepath"
"testing"
"time"
ctconfig "github.com/hashicorp/consul-template/config"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/vault/api"
agentConfig "github.com/hashicorp/vault/command/agent/config"
"github.com/hashicorp/vault/command/agent/template"
"github.com/hashicorp/vault/command/agentproxyshared/auth"
token_file "github.com/hashicorp/vault/command/agentproxyshared/auth/token-file"
"github.com/hashicorp/vault/command/agentproxyshared/sink"
"github.com/hashicorp/vault/command/agentproxyshared/sink/file"
vaulthttp "github.com/hashicorp/vault/http"
"github.com/hashicorp/vault/sdk/helper/logging"
"github.com/hashicorp/vault/sdk/helper/pointerutil"
"github.com/hashicorp/vault/vault"
"github.com/stretchr/testify/require"
)
const (
lookupSelfTemplateContents = `{{ with secret "auth/token/lookup-self" }}{{ .Data.id }}{{ end }}`
kvDataTemplateContents = `"{{ with secret "secret/data/otherapp" }}{{ .Data.data.username }}{{ end }}"`
kvAccessPolicy = `
path "/kv/*" {
capabilities = ["create", "read", "update", "delete", "list"]
}
path "/secret/*" {
capabilities = ["create", "read", "update", "delete", "list"]
}`
)
// TestAutoAuthSelfHealing_TokenFileAuth_SinkOutput tests that
// if the token is revoked, Auto Auth is re-triggered and a valid new token
// is written to a sink, and the template is correctly rendered with the new token
func TestAutoAuthSelfHealing_TokenFileAuth_SinkOutput(t *testing.T) {
logger := logging.NewVaultLogger(hclog.Trace)
cluster := vault.NewTestCluster(t,
&vault.CoreConfig{},
&vault.TestClusterOptions{
NumCores: 1,
HandlerFunc: vaulthttp.Handler,
})
cluster.Start()
defer cluster.Cleanup()
vault.TestWaitActive(t, cluster.Cores[0].Core)
serverClient := cluster.Cores[0].Client
// Unset the environment variable so that agent picks up the right test
// cluster address
defer os.Setenv(api.EnvVaultAddress, os.Getenv(api.EnvVaultAddress))
os.Unsetenv(api.EnvVaultAddress)
// create temp dir for this test run
tmpDir, err := os.MkdirTemp("", "TestAutoAuth_SelfHealing")
require.NoError(t, err)
defer os.RemoveAll(tmpDir)
// Create token
secret, err := serverClient.Auth().Token().Create(&api.TokenCreateRequest{
Policies: []string{"test-autoauth"},
})
require.NoError(t, err)
token := secret.Auth.ClientToken
// Write token to vault-token file
tokenFilePath := filepath.Join(tmpDir, "vault-token")
tokenFile, err := os.Create(tokenFilePath)
require.NoError(t, err)
_, err = tokenFile.WriteString(token)
require.NoError(t, err)
err = tokenFile.Close()
require.NoError(t, err)
defer os.Remove(tokenFilePath)
require.NoError(t, err)
ctx, cancel := context.WithCancel(context.Background())
// Create auth handler
am, err := token_file.NewTokenFileAuthMethod(&auth.AuthConfig{
Logger: logger.Named("auth.method"),
Config: map[string]interface{}{
"token_file_path": filepath.Join(tmpDir, "vault-token"),
},
})
require.NoError(t, err)
ahConfig := &auth.AuthHandlerConfig{
Logger: logger.Named("auth.handler"),
Client: serverClient,
EnableExecTokenCh: true,
EnableTemplateTokenCh: true,
EnableReauthOnNewCredentials: true,
ExitOnError: false,
}
ah := auth.NewAuthHandler(ahConfig)
errCh := make(chan error)
go func() {
errCh <- ah.Run(ctx, am)
}()
defer func() {
select {
case <-ctx.Done():
case err := <-errCh:
if err != nil {
t.Fatal(err)
}
}
}()
// Create sink file server
sinkFilePath := filepath.Join(tmpDir, "token-file")
_, err = os.Create(sinkFilePath)
defer os.Remove(sinkFilePath)
require.NoError(t, err)
config := &sink.SinkConfig{
Logger: logger.Named("sink.file"),
Config: map[string]interface{}{
"path": sinkFilePath,
},
}
fs, err := file.NewFileSink(config)
if err != nil {
t.Fatal(err)
}
config.Sink = fs
ss := sink.NewSinkServer(&sink.SinkServerConfig{
Logger: logger.Named("sink.server"),
Client: serverClient,
})
go func() {
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config}, ah.AuthInProgress)
}()
defer func() {
select {
case <-ctx.Done():
case err := <-errCh:
if err != nil {
t.Fatal(err)
}
}
}()
// Create template server
sc := template.ServerConfig{
Logger: logging.NewVaultLogger(hclog.Trace),
AgentConfig: &agentConfig.Config{
Vault: &agentConfig.Vault{
Address: serverClient.Address(),
TLSSkipVerify: true,
},
TemplateConfig: &agentConfig.TemplateConfig{
StaticSecretRenderInt: time.Second * 2,
},
AutoAuth: &agentConfig.AutoAuth{
Sinks: []*agentConfig.Sink{{Type: "file", Config: map[string]interface{}{
"path": filepath.Join(filepath.Join(tmpDir, "lookup-self")),
}}},
},
ExitAfterAuth: false,
},
LogLevel: hclog.Trace,
LogWriter: hclog.DefaultOutput,
ExitAfterAuth: false,
}
templateTest := &ctconfig.TemplateConfig{
Contents: pointerutil.StringPtr(lookupSelfTemplateContents),
}
dstFile := fmt.Sprintf("%s/%s", tmpDir, "lookup-self")
templateTest.Destination = pointerutil.StringPtr(dstFile)
templatesToRender := []*ctconfig.TemplateConfig{templateTest}
var server *template.Server
server = template.NewServer(&sc)
go func() {
errCh <- server.Run(ctx, ah.TemplateTokenCh, templatesToRender, ah.AuthInProgress, ah.InvalidToken)
}()
defer func() {
select {
case <-ctx.Done():
case err := <-errCh:
if err != nil {
t.Fatal(err)
}
}
}()
// Must be done at the very end so that nothing is blocking
defer cancel()
// Trigger template render
ah.TemplateTokenCh <- token
fileInfo, err := waitForFiles(t, filepath.Join(tmpDir, "token-file"), time.Time{})
require.NoError(t, err)
tokenInSink, err := os.ReadFile(filepath.Join(tmpDir, "token-file"))
require.NoError(t, err)
require.Equal(t, string(tokenInSink), token)
// Revoke Token
t.Logf("revoking token")
serverClient.Auth().Token().RevokeOrphan(token)
// Create new token
tokenSecret, err := serverClient.Auth().Token().Create(&api.TokenCreateRequest{
Policies: []string{"test-autoauth"},
})
require.NoError(t, err)
newToken := tokenSecret.Auth.ClientToken
// Write token to file
err = os.WriteFile(filepath.Join(tmpDir, "vault-token"), []byte(newToken), 0o600)
require.NoError(t, err)
// Wait for auto-auth to complete
_, err = waitForFiles(t, filepath.Join(tmpDir, "token-file"), fileInfo.ModTime())
require.NoError(t, err)
// Verify the new token has been written to a file sink after re-authenticating using lookup-self
tokenInSink, err = os.ReadFile(filepath.Join(tmpDir, "token-file"))
require.NoError(t, err)
require.Equal(t, string(tokenInSink), newToken)
// Verify the template has now been correctly rendered with the new token
templateContents, err := os.ReadFile(filepath.Join(tmpDir, "lookup-self"))
require.NoError(t, err)
require.Equal(t, string(templateContents), newToken)
}
// Test_NoAutoAuthSelfHealing_BadPolicy tests that auto auth
// is not re-triggered if a token with incorrect policy access
// is used to render a template
func Test_NoAutoAuthSelfHealing_BadPolicy(t *testing.T) {
logger := logging.NewVaultLogger(hclog.Trace)
cluster := vault.NewTestCluster(t,
&vault.CoreConfig{},
&vault.TestClusterOptions{
NumCores: 1,
HandlerFunc: vaulthttp.Handler,
})
cluster.Start()
defer cluster.Cleanup()
vault.TestWaitActive(t, cluster.Cores[0].Core)
serverClient := cluster.Cores[0].Client
// Unset the environment variable so that agent picks up the right test
// cluster address
defer os.Setenv(api.EnvVaultAddress, os.Getenv(api.EnvVaultAddress))
os.Unsetenv(api.EnvVaultAddress)
// Create temp dir for this test run
tmpDir, err := os.MkdirTemp("", "TestAutoAuth_SelfHealing")
require.NoError(t, err)
defer os.RemoveAll(tmpDir)
// Write a policy with correct access to the secrets
serverClient.Sys().PutPolicy("kv-access", kvAccessPolicy)
// Create a token without enough policy access to the kv secrets
secret, err := serverClient.Auth().Token().Create(&api.TokenCreateRequest{})
require.NoError(t, err)
token := secret.Auth.ClientToken
// Write token to vault-token file
tokenFilePath := filepath.Join(tmpDir, "vault-token")
tokenFile, err := os.Create(tokenFilePath)
require.NoError(t, err)
_, err = tokenFile.WriteString(token)
require.NoError(t, err)
err = tokenFile.Close()
require.NoError(t, err)
defer os.Remove(tokenFilePath)
require.NoError(t, err)
ctx, cancel := context.WithCancel(context.Background())
// Create auth handler
am, err := token_file.NewTokenFileAuthMethod(&auth.AuthConfig{
Logger: logger.Named("auth.method"),
Config: map[string]interface{}{
"token_file_path": filepath.Join(filepath.Join(tmpDir, "vault-token")),
},
})
require.NoError(t, err)
ahConfig := &auth.AuthHandlerConfig{
Logger: logger.Named("auth.handler"),
Client: serverClient,
EnableExecTokenCh: true,
EnableReauthOnNewCredentials: true,
ExitOnError: false,
}
ah := auth.NewAuthHandler(ahConfig)
errCh := make(chan error)
go func() {
errCh <- ah.Run(ctx, am)
}()
defer func() {
select {
case <-ctx.Done():
case err := <-errCh:
if err != nil {
t.Fatal(err)
}
}
}()
// Create sink file server
sinkFilePath := filepath.Join(tmpDir, "token-file")
_, err = os.Create(sinkFilePath)
defer os.Remove(sinkFilePath)
require.NoError(t, err)
config := &sink.SinkConfig{
Logger: logger.Named("sink.file"),
Config: map[string]interface{}{
"path": sinkFilePath,
},
}
fs, err := file.NewFileSink(config)
if err != nil {
t.Fatal(err)
}
config.Sink = fs
ss := sink.NewSinkServer(&sink.SinkServerConfig{
Logger: logger.Named("sink.server"),
Client: serverClient,
})
go func() {
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config}, ah.AuthInProgress)
}()
defer func() {
select {
case <-ctx.Done():
case err := <-errCh:
if err != nil {
t.Fatal(err)
}
}
}()
// Create template server
sc := template.ServerConfig{
Logger: logging.NewVaultLogger(hclog.Trace),
AgentConfig: &agentConfig.Config{
Vault: &agentConfig.Vault{
Address: serverClient.Address(),
TLSSkipVerify: true,
},
TemplateConfig: &agentConfig.TemplateConfig{
StaticSecretRenderInt: time.Second * 5,
},
// Need to crate at least one sink output so that it does not exit after rendering
AutoAuth: &agentConfig.AutoAuth{
Sinks: []*agentConfig.Sink{
{
Type: "file",
Config: map[string]interface{}{
"path": filepath.Join(filepath.Join(tmpDir, "kvData")),
},
},
},
},
ExitAfterAuth: false,
},
LogLevel: hclog.Trace,
LogWriter: hclog.DefaultOutput,
ExitAfterAuth: false,
}
templateTest := &ctconfig.TemplateConfig{
Contents: pointerutil.StringPtr(kvDataTemplateContents),
}
dstFile := fmt.Sprintf("%s/%s", tmpDir, "kvData")
templateTest.Destination = pointerutil.StringPtr(dstFile)
templatesToRender := []*ctconfig.TemplateConfig{templateTest}
var server *template.Server
server = template.NewServer(&sc)
go func() {
errCh <- server.Run(ctx, ah.TemplateTokenCh, templatesToRender, ah.AuthInProgress, ah.InvalidToken)
}()
defer func() {
select {
case <-ctx.Done():
case err := <-errCh:
if err != nil {
t.Fatal(err)
}
}
}()
// Must be done at the very end so that nothing is blocking
defer cancel()
// Trigger template render
ah.TemplateTokenCh <- token
_, err = waitForFiles(t, filepath.Join(tmpDir, "token-file"), time.Time{})
require.NoError(t, err)
tokenInSink, err := os.ReadFile(filepath.Join(tmpDir, "token-file"))
require.NoError(t, err)
require.Equal(t, string(tokenInSink), token)
// Create new token with the correct policy access
tokenSecret, err := serverClient.Auth().Token().Create(&api.TokenCreateRequest{
Policies: []string{"kv-access"},
})
require.NoError(t, err)
newToken := tokenSecret.Auth.ClientToken
// Write token to file
err = os.WriteFile(filepath.Join(tmpDir, "vault-token"), []byte(token), 0o600)
require.NoError(t, err)
// Wait for any potential *incorrect* re-triggers of auto auth
time.Sleep(time.Second * 5)
// Auto auth should not have been re-triggered because of just a permission denied error
// Verify that the new token has NOT been written to the token sink
tokenInSink, err = os.ReadFile(filepath.Join(tmpDir, "token-file"))
require.NoError(t, err)
require.NotEqual(t, string(tokenInSink), newToken)
require.Equal(t, string(tokenInSink), token)
}
func waitForFiles(t *testing.T, filePath string, prevModTime time.Time) (os.FileInfo, error) {
var err error
var fileInfo os.FileInfo
tick := time.Tick(100 * time.Millisecond)
timeout := time.After(5 * time.Second)
// We need to wait for the templates to render...
for {
select {
case <-timeout:
return nil, fmt.Errorf("timed out waiting for templates to render, last error: %v", err)
case <-tick:
}
fileInfo, err = os.Stat(filePath)
if err != nil {
if errors.Is(err, os.ErrNotExist) {
continue
}
return nil, err
}
// Keep waiting until the file has been updated since the previous mod time
if !fileInfo.ModTime().After(prevModTime) {
continue
}
return fileInfo, nil
}
}

View file

@ -15,7 +15,7 @@ import (
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/auth/credentials/providers"
"github.com/aliyun/alibaba-cloud-sdk-go/services/sts"
uuid "github.com/hashicorp/go-uuid"
"github.com/hashicorp/go-uuid"
vaultalicloud "github.com/hashicorp/vault-plugin-auth-alicloud"
"github.com/hashicorp/vault/api"
"github.com/hashicorp/vault/command/agentproxyshared/auth"
@ -147,7 +147,7 @@ func TestAliCloudEndToEnd(t *testing.T) {
Client: client,
})
go func() {
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config})
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config}, ah.AuthInProgress)
}()
defer func() {
select {

View file

@ -256,7 +256,7 @@ func testAppRoleEndToEnd(t *testing.T, removeSecretIDFile bool, bindSecretID boo
})
go func() {
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config})
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config}, ah.AuthInProgress)
}()
defer func() {
select {
@ -639,7 +639,7 @@ func testAppRoleWithWrapping(t *testing.T, bindSecretID bool, secretIDLess bool,
Client: client,
})
go func() {
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config})
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config}, ah.AuthInProgress)
}()
defer func() {

View file

@ -10,7 +10,7 @@ import (
"testing"
"time"
hclog "github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/vault/api"
credAppRole "github.com/hashicorp/vault/builtin/credential/approle"
"github.com/hashicorp/vault/command/agentproxyshared/auth"
@ -184,7 +184,7 @@ func TestTokenPreload_UsingAutoAuth(t *testing.T) {
}()
go func() {
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config})
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config}, ah.AuthInProgress)
}()
defer func() {
select {

View file

@ -14,8 +14,8 @@ import (
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/sts"
hclog "github.com/hashicorp/go-hclog"
uuid "github.com/hashicorp/go-uuid"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-uuid"
"github.com/hashicorp/vault/api"
vaultaws "github.com/hashicorp/vault/builtin/credential/aws"
"github.com/hashicorp/vault/command/agentproxyshared/auth"
@ -163,7 +163,7 @@ func TestAWSEndToEnd(t *testing.T) {
Client: client,
})
go func() {
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config})
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config}, ah.AuthInProgress)
}()
defer func() {
select {

View file

@ -12,13 +12,13 @@ import (
"testing"
"time"
hclog "github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-hclog"
log "github.com/hashicorp/go-hclog"
"github.com/hashicorp/vault/api"
credAppRole "github.com/hashicorp/vault/builtin/credential/approle"
"github.com/hashicorp/vault/command/agentproxyshared/auth"
agentapprole "github.com/hashicorp/vault/command/agentproxyshared/auth/approle"
cache "github.com/hashicorp/vault/command/agentproxyshared/cache"
"github.com/hashicorp/vault/command/agentproxyshared/cache"
"github.com/hashicorp/vault/command/agentproxyshared/sink"
"github.com/hashicorp/vault/command/agentproxyshared/sink/file"
"github.com/hashicorp/vault/command/agentproxyshared/sink/inmem"
@ -241,7 +241,7 @@ func TestCache_UsingAutoAuthToken(t *testing.T) {
inmemSinkConfig.Sink = inmemSink
go func() {
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config, inmemSinkConfig})
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config, inmemSinkConfig}, ah.AuthInProgress)
}()
defer func() {
select {

View file

@ -12,7 +12,7 @@ import (
"testing"
"time"
hclog "github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/vault/api"
vaultcert "github.com/hashicorp/vault/builtin/credential/cert"
"github.com/hashicorp/vault/builtin/logical/pki"
@ -198,7 +198,7 @@ func testCertEndToEnd(t *testing.T, withCertRoleName, ahWrapping bool) {
Client: client,
})
go func() {
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config})
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config}, ah.AuthInProgress)
}()
defer func() {
select {
@ -536,7 +536,7 @@ func TestCertEndToEnd_CertsInConfig(t *testing.T) {
Client: client,
})
go func() {
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config})
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config}, ah.AuthInProgress)
}()
defer func() {
select {

View file

@ -10,7 +10,7 @@ import (
"testing"
"time"
hclog "github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-hclog"
credCF "github.com/hashicorp/vault-plugin-auth-cf"
"github.com/hashicorp/vault-plugin-auth-cf/testing/certificates"
cfAPI "github.com/hashicorp/vault-plugin-auth-cf/testing/cf"
@ -150,7 +150,7 @@ func TestCFEndToEnd(t *testing.T) {
Client: client,
})
go func() {
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config})
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config}, ah.AuthInProgress)
}()
defer func() {
select {

View file

@ -11,7 +11,7 @@ import (
"testing"
"time"
hclog "github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-hclog"
vaultjwt "github.com/hashicorp/vault-plugin-auth-jwt"
"github.com/hashicorp/vault/api"
"github.com/hashicorp/vault/command/agentproxyshared/auth"
@ -223,7 +223,7 @@ func testJWTEndToEnd(t *testing.T, ahWrapping, useSymlink, removeJWTAfterReading
Client: client,
})
go func() {
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config})
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config}, ah.AuthInProgress)
}()
defer func() {
select {

View file

@ -10,7 +10,7 @@ import (
"testing"
"time"
hclog "github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-hclog"
vaultoci "github.com/hashicorp/vault-plugin-auth-oci"
"github.com/hashicorp/vault/api"
"github.com/hashicorp/vault/command/agentproxyshared/auth"
@ -165,7 +165,7 @@ func TestOCIEndToEnd(t *testing.T) {
Client: client,
})
go func() {
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config})
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config}, ah.AuthInProgress)
}()
defer func() {
select {

View file

@ -14,17 +14,21 @@ import (
"fmt"
"io"
"math"
"strings"
sync "sync/atomic"
"time"
ctconfig "github.com/hashicorp/consul-template/config"
"github.com/hashicorp/consul-template/manager"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/vault/api"
"github.com/hashicorp/vault/command/agent/config"
"github.com/hashicorp/vault/command/agent/internal/ctmanager"
"github.com/hashicorp/vault/helper/useragent"
"github.com/hashicorp/vault/sdk/helper/backoff"
"github.com/hashicorp/vault/sdk/helper/consts"
"github.com/hashicorp/vault/sdk/helper/pointerutil"
"github.com/hashicorp/vault/sdk/logical"
"go.uber.org/atomic"
)
@ -90,7 +94,7 @@ func NewServer(conf *ServerConfig) *Server {
// Run kicks off the internal Consul Template runner, and listens for changes to
// the token from the AuthHandler. If Done() is called on the context, shut down
// the Runner and return
func (ts *Server) Run(ctx context.Context, incoming chan string, templates []*ctconfig.TemplateConfig) error {
func (ts *Server) Run(ctx context.Context, incoming chan string, templates []*ctconfig.TemplateConfig, tokenRenewalInProgress *sync.Bool, invalidTokenCh chan error) error {
if incoming == nil {
return errors.New("template server: incoming channel is nil")
}
@ -156,7 +160,6 @@ func (ts *Server) Run(ctx context.Context, incoming chan string, templates []*ct
case <-ctx.Done():
ts.runner.Stop()
return nil
case token := <-incoming:
if token != *latestToken {
ts.logger.Info("template server received new token")
@ -243,6 +246,31 @@ func (ts *Server) Run(ctx context.Context, incoming chan string, templates []*ct
ts.runner.Stop()
return nil
}
default:
// We are using default instead of a new case block to prioritize the case where <-incoming has a new value over
// receiving an error message from the consul-template server
select {
case err := <-ts.runner.ServerErrCh:
var responseError *api.ResponseError
ok := errors.As(err, &responseError)
if !ok {
ts.logger.Error("template server: could not extract error response")
continue
}
if responseError.StatusCode == 403 && strings.Contains(responseError.Error(), logical.ErrInvalidToken.Error()) && !tokenRenewalInProgress.Load() {
ts.logger.Info("template server: received invalid token error")
// Drain the error channel before sending a new error
select {
case <-invalidTokenCh:
default:
}
invalidTokenCh <- err
}
default:
continue
}
}
}
}

View file

@ -11,6 +11,7 @@ import (
"net/http/httptest"
"os"
"strings"
sync "sync/atomic"
"testing"
"time"
@ -387,8 +388,9 @@ func TestServerRun(t *testing.T) {
}
errCh := make(chan error)
serverErrCh := make(chan error, 1)
go func() {
errCh <- server.Run(ctx, templateTokenCh, templatesToRender)
errCh <- server.Run(ctx, templateTokenCh, templatesToRender, &sync.Bool{}, serverErrCh)
}()
// send a dummy value to trigger the internal Runner to query for secret
@ -492,8 +494,9 @@ func TestNewServerLogLevels(t *testing.T) {
defer cancel()
errCh := make(chan error)
serverErrCh := make(chan error, 1)
go func() {
errCh <- server.Run(ctx, templateTokenCh, templatesToRender)
errCh <- server.Run(ctx, templateTokenCh, templatesToRender, &sync.Bool{}, serverErrCh)
}()
// send a dummy value to trigger auth so the server will exit

View file

@ -111,7 +111,7 @@ func TestTokenFileEndToEnd(t *testing.T) {
Client: client,
})
go func() {
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config})
errCh <- ss.Run(ctx, ah.OutputCh, []*sink.SinkConfig{config}, ah.AuthInProgress)
}()
defer func() {
select {

View file

@ -10,6 +10,7 @@ import (
"math"
"math/rand"
"net/http"
"sync/atomic"
"time"
"github.com/armon/go-metrics"
@ -51,6 +52,8 @@ type AuthHandler struct {
OutputCh chan string
TemplateTokenCh chan string
ExecTokenCh chan string
AuthInProgress *atomic.Bool
InvalidToken chan error
token string
userAgent string
metricsSignifier string
@ -92,6 +95,8 @@ func NewAuthHandler(conf *AuthHandlerConfig) *AuthHandler {
OutputCh: make(chan string, 1),
TemplateTokenCh: make(chan string, 1),
ExecTokenCh: make(chan string, 1),
InvalidToken: make(chan error, 1),
AuthInProgress: &atomic.Bool{},
token: conf.Token,
logger: conf.Logger,
client: conf.Client,
@ -180,6 +185,17 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error {
first := true
for {
// We will unset this bool in sink.go once the token has been written to
// any sinks, or the sink server stops
ah.AuthInProgress.Store(true)
// Drain any Invalid Token errors from the channel that could have been sent before AuthInProgress
// was set to true
select {
case <-ah.InvalidToken:
ah.logger.Info("renewal already in progress, draining extra auth renewal triggers")
default:
// Do nothing, keep going
}
select {
case <-ctx.Done():
return nil
@ -494,6 +510,14 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error {
case <-credCh:
ah.logger.Info("auth method found new credentials, re-authenticating")
break LifetimeWatcherLoop
default:
select {
case <-ah.InvalidToken:
ah.logger.Info("invalid token found, re-authenticating")
break LifetimeWatcherLoop
default:
continue
}
}
}
}

View file

@ -13,8 +13,8 @@ import (
"testing"
"time"
hclog "github.com/hashicorp/go-hclog"
uuid "github.com/hashicorp/go-uuid"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-uuid"
"github.com/hashicorp/vault/command/agentproxyshared/sink"
"github.com/hashicorp/vault/sdk/helper/logging"
)
@ -37,8 +37,10 @@ func TestSinkServer(t *testing.T) {
in := make(chan string)
sinks := []*sink.SinkConfig{fs1, fs2}
errCh := make(chan error)
tokenRenewalInProgress := &atomic.Bool{}
tokenRenewalInProgress.Store(true)
go func() {
errCh <- ss.Run(ctx, in, sinks)
errCh <- ss.Run(ctx, in, sinks, tokenRenewalInProgress)
}()
// Seed a token
@ -67,6 +69,10 @@ func TestSinkServer(t *testing.T) {
t.Fatalf("expected %s, got %s", uuidStr, string(fileBytes))
}
}
if tokenRenewalInProgress.Load() {
t.Fatal("should have reset tokenRenewalInProgress to false")
}
}
type badSink struct {
@ -104,8 +110,11 @@ func TestSinkServerRetry(t *testing.T) {
in := make(chan string)
sinks := []*sink.SinkConfig{{Sink: b1}, {Sink: b2}}
errCh := make(chan error)
tokenRenewalInProgress := &atomic.Bool{}
tokenRenewalInProgress.Store(true)
go func() {
errCh <- ss.Run(ctx, in, sinks)
errCh <- ss.Run(ctx, in, sinks, tokenRenewalInProgress)
}()
// Seed a token
@ -120,6 +129,10 @@ func TestSinkServerRetry(t *testing.T) {
t.Fatal("bad try count")
}
if !tokenRenewalInProgress.Load() {
t.Fatal("token renewal should still be in progress, sink server has not exited")
}
in <- "good"
time.Sleep(2 * time.Second)
@ -138,4 +151,8 @@ func TestSinkServerRetry(t *testing.T) {
t.Fatal(err)
}
}
if tokenRenewalInProgress.Load() {
t.Fatal("should have reset tokenRenewalInProgress to false")
}
}

View file

@ -13,7 +13,7 @@ import (
"sync/atomic"
"time"
hclog "github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/vault/api"
"github.com/hashicorp/vault/helper/dhutil"
"github.com/hashicorp/vault/sdk/helper/jsonutil"
@ -72,7 +72,7 @@ func NewSinkServer(conf *SinkServerConfig) *SinkServer {
// Run executes the server's run loop, which is responsible for reading
// in new tokens and pushing them out to the various sinks.
func (ss *SinkServer) Run(ctx context.Context, incoming chan string, sinks []*SinkConfig) error {
func (ss *SinkServer) Run(ctx context.Context, incoming chan string, sinks []*SinkConfig, tokenWriteInProgress *atomic.Bool) error {
latestToken := new(string)
writeSink := func(currSink *SinkConfig, currToken string) error {
if currToken != *latestToken {
@ -101,6 +101,7 @@ func (ss *SinkServer) Run(ctx context.Context, incoming chan string, sinks []*Si
ss.logger.Info("starting sink server")
defer func() {
tokenWriteInProgress.Store(false)
ss.logger.Info("sink server stopped")
}()
@ -138,6 +139,7 @@ func (ss *SinkServer) Run(ctx context.Context, incoming chan string, sinks []*Si
}
} else {
ss.logger.Trace("no sinks, ignoring new token")
tokenWriteInProgress.Store(false)
if ss.exitAfterAuth {
ss.logger.Trace("no sinks, exitAfterAuth, bye")
return nil
@ -164,8 +166,11 @@ func (ss *SinkServer) Run(ctx context.Context, incoming chan string, sinks []*Si
sinkCh <- st
}
} else {
if atomic.LoadInt32(ss.remaining) == 0 && ss.exitAfterAuth {
return nil
if atomic.LoadInt32(ss.remaining) == 0 {
tokenWriteInProgress.Store(false)
if ss.exitAfterAuth {
return nil
}
}
}
}

View file

@ -744,7 +744,7 @@ func (c *ProxyCommand) Run(args []string) int {
})
g.Add(func() error {
err := ss.Run(ctx, ah.OutputCh, sinks)
err := ss.Run(ctx, ah.OutputCh, sinks, ah.AuthInProgress)
c.logger.Info("sinks finished, exiting")
// Start goroutine to drain from ah.OutputCh from this point onward

4
go.mod
View file

@ -83,7 +83,7 @@ require (
github.com/hashicorp/cap v0.5.0
github.com/hashicorp/cap/ldap v0.0.0-20230914221201-c4eecc7e31f7
github.com/hashicorp/cli v1.1.6
github.com/hashicorp/consul-template v0.36.1-0.20240213145952-6c83e89b48af
github.com/hashicorp/consul-template v0.37.3
github.com/hashicorp/consul/api v1.27.0
github.com/hashicorp/errwrap v1.1.0
github.com/hashicorp/eventlogger v0.2.8
@ -448,7 +448,7 @@ require (
github.com/microsoft/kiota-serialization-text-go v1.0.0 // indirect
github.com/microsoftgraph/msgraph-sdk-go v1.32.0 // indirect
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.1 // indirect
github.com/miekg/dns v1.1.43 // indirect
github.com/miekg/dns v1.1.50 // indirect
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect
github.com/mitchellh/hashstructure v1.1.0 // indirect
github.com/mitchellh/pointerstructure v1.2.1 // indirect

6
go.sum
View file

@ -2328,6 +2328,8 @@ github.com/hashicorp/cli v1.1.6 h1:CMOV+/LJfL1tXCOKrgAX0uRKnzjj/mpmqNXloRSy2K8=
github.com/hashicorp/cli v1.1.6/go.mod h1:MPon5QYlgjjo0BSoAiN0ESeT5fRzDjVRp+uioJ0piz4=
github.com/hashicorp/consul-template v0.36.1-0.20240213145952-6c83e89b48af h1:DrkJy2yiqrHIVEqgtn4X0A7j5wjy5MxrJXvGNVwtSsY=
github.com/hashicorp/consul-template v0.36.1-0.20240213145952-6c83e89b48af/go.mod h1:bvidXKwpfXzJ1X4wDw68OXnVxy5k7HLOHhOf5gnQr3M=
github.com/hashicorp/consul-template v0.37.3 h1:zN03UckwrrRB0EwH3+Io8S5h0Zl+Aa9RpYkma207t2g=
github.com/hashicorp/consul-template v0.37.3/go.mod h1:ckdzFLHdF/1A4L11ifxkzy3gXHeF1YbKSbXkN6W33+s=
github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q=
github.com/hashicorp/consul/api v1.27.0 h1:gmJ6DPKQog1426xsdmgk5iqDyoRiNc+ipBdJOqKQFjc=
github.com/hashicorp/consul/api v1.27.0/go.mod h1:JkekNRSou9lANFdt+4IKx3Za7XY0JzzpQjEb4Ivo1c8=
@ -2883,6 +2885,8 @@ github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKju
github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI=
github.com/miekg/dns v1.1.43 h1:JKfpVSCB84vrAmHzyrsxB5NAr5kLoMXZArPSw7Qlgyg=
github.com/miekg/dns v1.1.43/go.mod h1:+evo5L0630/F6ca/Z9+GAqzhjGyn8/c+TBaOyfEl0V4=
github.com/miekg/dns v1.1.50 h1:DQUfb9uc6smULcREF09Uc+/Gd46YWqJd5DbpPE9xkcA=
github.com/miekg/dns v1.1.50/go.mod h1:e3IlAVfNqAllflbibAZEWOXOQ+Ynzk/dDozDxY7XnME=
github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs=
github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs=
github.com/mikesmitty/edkey v0.0.0-20170222072505-3356ea4e686a h1:eU8j/ClY2Ty3qdHnn0TyW3ivFoPC/0F1gQZz8yTxbbE=
@ -3802,6 +3806,7 @@ golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20210520170846-37e1c6afe023/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210726213435-c6fcb2dbf985/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210825183410-e898025ed96a/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
@ -4206,6 +4211,7 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.6-0.20210726203631-07bc1bf47fb2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=