packer/command/hcl2_upgrade.go
Lucas Bajolet b6141fd532 command: copy AWS plugin structures for upgrade
The hcl2_upgrade command transforms a JSON template into an HCL2
template for use with Packer.

The command is quite fragile already, but given that this is the last
remaining fragment that causes Packer to depend on the AWS SDK directly,
we can do away with it.

This commit therefore imports the definitions for AWS access config, so
we can extract this information from the JSON template, and include it
in the definition of the output source for AWS, since we manage this one
differently from other sources.

This allows us to not depend on the AWS plugin directly, which in turn
makes Packer not need to link with the AWS plugin when compiling the
executable.
We are still depending on the AWS SDK for now since the SDK exposes a
aws_secretsmanager function that can be used for interpolation (legacy
JSON interpolation to be clear), so this cannot be removed from now, but
we should consider some form of remediation in the future.
2024-12-17 17:42:58 -05:00

1487 lines
44 KiB
Go

// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package command
import (
"bytes"
"context"
"fmt"
"io"
"os"
"path/filepath"
"reflect"
"regexp"
"sort"
"strconv"
"strings"
texttemplate "text/template"
"text/template/parse"
"github.com/hashicorp/go-multierror"
"github.com/hashicorp/hcl/v2/hclwrite"
hcl2shim "github.com/hashicorp/packer-plugin-sdk/hcl2helper"
"github.com/hashicorp/packer-plugin-sdk/template"
"github.com/hashicorp/packer/packer"
"github.com/mitchellh/mapstructure"
"github.com/posener/complete"
"github.com/zclconf/go-cty/cty"
)
const (
hcl2UpgradeFileHeader = `# This file was autogenerated by the 'packer hcl2_upgrade' command. We
# recommend double checking that everything is correct before going forward. We
# also recommend treating this file as disposable. The HCL2 blocks in this
# file can be moved to other files. For example, the variable blocks could be
# moved to their own 'variables.pkr.hcl' file, etc. Those files need to be
# suffixed with '.pkr.hcl' to be visible to Packer. To use multiple files at
# once they also need to be in the same folder. 'packer inspect folder/'
# will describe to you what is in that folder.
# Avoid mixing go templating calls ( for example ` + "```{{ upper(`string`) }}```" + ` )
# and HCL2 calls (for example '${ var.string_value_example }' ). They won't be
# executed together and the outcome will be unknown.
`
inputVarHeader = `
# All generated input variables will be of 'string' type as this is how Packer JSON
# views them; you can change their type later on. Read the variables type
# constraints documentation
# https://www.packer.io/docs/templates/hcl_templates/variables#type-constraints for more info.`
localsVarHeader = `
# All locals variables are generated from variables that uses expressions
# that are not allowed in HCL2 variables.
# Read the documentation for locals blocks here:
# https://www.packer.io/docs/templates/hcl_templates/blocks/locals`
packerBlockHeader = `
# See https://www.packer.io/docs/templates/hcl_templates/blocks/packer for more info
`
sourcesHeader = `
# source blocks are generated from your builders; a source can be referenced in
# build blocks. A build block runs provisioner and post-processors on a
# source. Read the documentation for source blocks here:
# https://www.packer.io/docs/templates/hcl_templates/blocks/source`
buildHeader = `
# a build block invokes sources and runs provisioning steps on them. The
# documentation for build blocks can be found here:
# https://www.packer.io/docs/templates/hcl_templates/blocks/build
`
amazonAmiDataHeader = `
# The amazon-ami data block is generated from your amazon builder source_ami_filter; a data
# from this block can be referenced in source and locals blocks.
# Read the documentation for data blocks here:
# https://www.packer.io/docs/templates/hcl_templates/blocks/data
# Read the documentation for the Amazon AMI Data Source here:
# https://www.packer.io/plugins/datasources/amazon/ami`
amazonSecretsManagerDataHeader = `
# The amazon-secretsmanager data block is generated from your aws_secretsmanager template function; a data
# from this block can be referenced in source and locals blocks.
# Read the documentation for data blocks here:
# https://www.packer.io/docs/templates/hcl_templates/blocks/data
# Read the documentation for the Amazon Secrets Manager Data Source here:
# https://www.packer.io/plugins/datasources/amazon/secretsmanager`
)
var (
amazonSecretsManagerMap = map[string]map[string]interface{}{}
localsVariableMap = map[string]string{}
timestamp = false
isotime = false
strftime = false
)
// knownPlugins represent the HashiCorp maintained plugins the we can confidently
// construct a required plugins block for.
var knownPlugins = map[string]string{
"amazon": "github.com/hashicorp/amazon",
"ansible": "github.com/hashicorp/ansible",
"azure": "github.com/hashicorp/azure",
"docker": "github.com/hashicorp/docker",
"googlecompute": "github.com/hashicorp/googlecompute",
"qemu": "github.com/hashicorp/qemu",
"vagrant": "github.com/hashicorp/vagrant",
"virtualbox": "github.com/hashicorp/virtualbox",
"vmware": "github.com/hashicorp/vmware",
"vsphere": "github.com/hashicorp/vsphere",
}
// unknownPluginName represents any plugin not in knownPlugins or bundled into Packer
const unknownPluginName string = "unknown"
type HCL2UpgradeCommand struct {
Meta
}
func (c *HCL2UpgradeCommand) Run(args []string) int {
ctx, cleanup := handleTermInterrupt(c.Ui)
defer cleanup()
cfg, ret := c.ParseArgs(args)
if ret != 0 {
return ret
}
return c.RunContext(ctx, cfg)
}
func (c *HCL2UpgradeCommand) ParseArgs(args []string) (*HCL2UpgradeArgs, int) {
var cfg HCL2UpgradeArgs
flags := c.Meta.FlagSet("hcl2_upgrade")
flags.Usage = func() { c.Ui.Say(c.Help()) }
cfg.AddFlagSets(flags)
if err := flags.Parse(args); err != nil {
return &cfg, 1
}
args = flags.Args()
if len(args) != 1 {
flags.Usage()
return &cfg, 1
}
cfg.Path = args[0]
if cfg.OutputFile == "" {
cfg.OutputFile = cfg.Path + ".pkr.hcl"
}
return &cfg, 0
}
type BlockParser interface {
Parse(*template.Template) error
Write(*bytes.Buffer)
}
func (c *HCL2UpgradeCommand) RunContext(_ context.Context, cla *HCL2UpgradeArgs) int {
var output io.Writer
if err := os.MkdirAll(filepath.Dir(cla.OutputFile), 0755); err != nil {
c.Ui.Error(fmt.Sprintf("Failed to create output directory: %v", err))
return 1
}
if f, err := os.Create(cla.OutputFile); err == nil {
output = f
defer f.Close()
} else {
c.Ui.Error(fmt.Sprintf("Failed to create output file: %v", err))
return 1
}
if cla.WithAnnotations {
if _, err := output.Write([]byte(hcl2UpgradeFileHeader)); err != nil {
c.Ui.Error(fmt.Sprintf("Failed to write to file: %v", err))
return 1
}
}
hdl, ret := c.GetConfigFromJSON(&cla.MetaArgs)
if ret != 0 {
c.Ui.Error("Failed to get config from JSON")
return 1
}
core := hdl.(*packer.Core)
if err := core.Initialize(packer.InitializeOptions{
// Note: this is always true here as the DAG is only usable for
// HCL2 configs, so since the command only works on JSON templates,
// we can safely use the phased approach, which changes nothing.
UseSequential: true,
}); err != nil {
c.Ui.Error(fmt.Sprintf("Ignoring following initialization error: %v", err))
}
tpl := core.Template
// Parse blocks
packerBlock := &PackerParser{
WithAnnotations: cla.WithAnnotations,
}
if err := packerBlock.Parse(tpl); err != nil {
c.Ui.Error(fmt.Sprintf("Ignoring following Parse error: %v", err))
ret = 1
}
variables := &VariableParser{
WithAnnotations: cla.WithAnnotations,
}
if err := variables.Parse(tpl); err != nil {
c.Ui.Error(fmt.Sprintf("Ignoring following variables.Parse error: %v", err))
ret = 1
}
locals := &LocalsParser{
LocalsOut: variables.localsOut,
WithAnnotations: cla.WithAnnotations,
}
if err := locals.Parse(tpl); err != nil {
c.Ui.Error(fmt.Sprintf("Ignoring following locals.Parse error: %v", err))
ret = 1
}
builders := []*template.Builder{}
{
// sort builders to avoid map's randomness
for _, builder := range tpl.Builders {
builders = append(builders, builder)
}
}
sort.Slice(builders, func(i, j int) bool {
return builders[i].Type+builders[i].Name < builders[j].Type+builders[j].Name
})
amazonAmiDatasource := &AmazonAmiDatasourceParser{
Builders: builders,
WithAnnotations: cla.WithAnnotations,
}
if err := amazonAmiDatasource.Parse(tpl); err != nil {
c.Ui.Error(fmt.Sprintf("Ignoring following amazonAmiDatasource.Parse error: %v", err))
ret = 1
}
sources := &SourceParser{
Builders: builders,
BuilderPlugins: c.Meta.CoreConfig.Components.PluginConfig.Builders,
WithAnnotations: cla.WithAnnotations,
}
if err := sources.Parse(tpl); err != nil {
c.Ui.Error(fmt.Sprintf("Ignoring following sources.Parse error: %v", err))
ret = 1
}
build := &BuildParser{
Builders: builders,
WithAnnotations: cla.WithAnnotations,
}
if err := build.Parse(tpl); err != nil {
c.Ui.Error(fmt.Sprintf("Ignoring following build.Parse error: %v", err))
ret = 1
}
amazonSecretsDatasource := &AmazonSecretsDatasourceParser{
WithAnnotations: cla.WithAnnotations,
}
if err := amazonSecretsDatasource.Parse(tpl); err != nil {
c.Ui.Error(fmt.Sprintf("Ignoring following amazonSecretsDatasource.Parse error: %v", err))
ret = 1
}
// Write file
out := &bytes.Buffer{}
for _, block := range []BlockParser{
packerBlock,
variables,
amazonSecretsDatasource,
amazonAmiDatasource,
locals,
sources,
build,
} {
block.Write(out)
}
if _, err := output.Write(hclwrite.Format(out.Bytes())); err != nil {
c.Ui.Error(fmt.Sprintf("Failed to write to file: %v", err))
return 1
}
c.Ui.Say(fmt.Sprintf("Successfully created %s. Exit %d", cla.OutputFile, ret))
return ret
}
type UnhandleableArgumentError struct {
Call string
Correspondance string
Docs string
}
func (uc UnhandleableArgumentError) Error() string {
return fmt.Sprintf(`unhandled %q call:
# there is no way to automatically upgrade the %[1]q call.
# Please manually upgrade to %s
# Visit %s for more infos.`, uc.Call, uc.Correspondance, uc.Docs)
}
func fallbackReturn(err error, s []byte) []byte {
if strings.Contains(err.Error(), "unhandled") {
return append([]byte(fmt.Sprintf("\n# %s\n", err)), s...)
}
return append([]byte(fmt.Sprintf("\n# could not parse template for following block: %q\n", err)), s...)
}
// reTemplate writes a new template to `out` and escapes all unknown variables
// so that we don't interpret them later on when interpreting the template
func reTemplate(nd parse.Node, out io.Writer, funcs texttemplate.FuncMap) error {
switch node := nd.(type) {
case *parse.ActionNode:
// Leave pipes as-is
if len(node.Pipe.Cmds) > 1 {
fmt.Fprintf(out, "%s", node.String())
return nil
}
cmd := node.Pipe.Cmds[0]
args := cmd.Args
if len(args) > 1 {
// Function calls with parameters are left aside
fmt.Fprintf(out, "%s", node.String())
return nil
}
_, ok := funcs[args[0].String()]
if ok {
// Known functions left as-is
fmt.Fprintf(out, "%s", node.String())
return nil
}
// Escape anything that isn't in the func map
fmt.Fprintf(out, "{{ \"{{\" }} %s {{ \"}}\" }}", cmd.String())
// TODO maybe node.Pipe.Decls? Though in Packer templates they're not
// supported officially so they can be left aside for now
case *parse.ListNode:
for _, child := range node.Nodes {
err := reTemplate(child, out, funcs)
if err != nil {
return err
}
}
case *parse.TextNode:
_, err := fmt.Fprintf(out, "%s", node.Text)
if err != nil {
return err
}
default:
return fmt.Errorf("unhandled node type %s", reflect.TypeOf(nd))
}
return nil
}
// transposeTemplatingCalls executes parts of blocks as go template files and replaces
// their result with their hcl2 variant. If something goes wrong the template
// containing the go template string is returned.
func transposeTemplatingCalls(s []byte) []byte {
funcErrors := &multierror.Error{
ErrorFormat: func(es []error) string {
if len(es) == 1 {
return fmt.Sprintf("# 1 error occurred upgrading the following block:\n\t# %s\n", es[0])
}
points := make([]string, len(es))
for i, err := range es {
if i == len(es)-1 {
points[i] = fmt.Sprintf("# %s", err)
continue
}
points[i] = fmt.Sprintf("# %s\n", err)
}
return fmt.Sprintf(
"# %d errors occurred upgrading the following block:\n\t%s",
len(es), strings.Join(points, "\n\t"))
},
}
funcMap := texttemplate.FuncMap{
"aws_secretsmanager": func(a ...string) string {
if len(a) == 2 {
for key, config := range amazonSecretsManagerMap {
nameOk := config["name"] == a[0]
keyOk := config["key"] == a[1]
if nameOk && keyOk {
return fmt.Sprintf("${data.amazon-secretsmanager.%s.value}", key)
}
}
id := fmt.Sprintf("autogenerated_%d", len(amazonSecretsManagerMap)+1)
amazonSecretsManagerMap[id] = map[string]interface{}{
"name": a[0],
"key": a[1],
}
return fmt.Sprintf("${data.amazon-secretsmanager.%s.value}", id)
}
for key, config := range amazonSecretsManagerMap {
nameOk := config["name"] == a[0]
if nameOk {
return fmt.Sprintf("${data.amazon-secretsmanager.%s.value}", key)
}
}
id := fmt.Sprintf("autogenerated_%d", len(amazonSecretsManagerMap)+1)
amazonSecretsManagerMap[id] = map[string]interface{}{
"name": a[0],
}
return fmt.Sprintf("${data.amazon-secretsmanager.%s.value}", id)
},
"timestamp": func() string {
timestamp = true
return "${local.timestamp}"
},
"isotime": func(a ...string) string {
if len(a) == 0 {
// returns rfc3339 formatted string.
return "${timestamp()}"
}
// otherwise a valid isotime func has one input.
isotime = true
return fmt.Sprintf("${legacy_isotime(\"%s\")}", a[0])
},
"strftime": func(a ...string) string {
if len(a) == 0 {
// returns rfc3339 formatted string.
return "${timestamp()}"
}
strftime = true
return fmt.Sprintf("${legacy_strftime(\"%s\")}", a[0])
},
"user": func(in string) string {
if _, ok := localsVariableMap[in]; ok {
// variable is now a local
return fmt.Sprintf("${local.%s}", in)
}
return fmt.Sprintf("${var.%s}", in)
},
"env": func(in string) string {
return fmt.Sprintf("${env(%q)}", in)
},
"build": func(a string) string {
return fmt.Sprintf("${build.%s}", a)
},
"data": func(a string) string {
return fmt.Sprintf("${data.%s}", a)
},
"template_dir": func() string {
return "${path.root}"
},
"pwd": func() string {
return "${path.cwd}"
},
"packer_version": func() string {
return "${packer.version}"
},
"uuid": func() string {
return "${uuidv4()}"
},
"lower": func(a string) (string, error) {
funcErrors = multierror.Append(funcErrors, UnhandleableArgumentError{
"lower",
"`lower(var.example)`",
"https://www.packer.io/docs/templates/hcl_templates/functions/string/lower",
})
return fmt.Sprintf("{{ lower `%s` }}", a), nil
},
"upper": func(a string) (string, error) {
funcErrors = multierror.Append(funcErrors, UnhandleableArgumentError{
"upper",
"`upper(var.example)`",
"https://www.packer.io/docs/templates/hcl_templates/functions/string/upper",
})
return fmt.Sprintf("{{ upper `%s` }}", a), nil
},
"split": func(a, b string, n int) (string, error) {
funcErrors = multierror.Append(funcErrors, UnhandleableArgumentError{
"split",
"`split(separator, string)`",
"https://www.packer.io/docs/templates/hcl_templates/functions/string/split",
})
return fmt.Sprintf("{{ split `%s` `%s` %d }}", a, b, n), nil
},
"replace": func(a, b string, n int, c string) (string, error) {
funcErrors = multierror.Append(funcErrors, UnhandleableArgumentError{
"replace",
"`replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`",
"https://www.packer.io/docs/templates/hcl_templates/functions/string/replace or https://www.packer.io/docs/templates/hcl_templates/functions/string/regex_replace",
})
return fmt.Sprintf("{{ replace `%s` `%s` `%s` %d }}", a, b, c, n), nil
},
"replace_all": func(a, b, c string) (string, error) {
funcErrors = multierror.Append(funcErrors, UnhandleableArgumentError{
"replace_all",
"`replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`",
"https://www.packer.io/docs/templates/hcl_templates/functions/string/replace or https://www.packer.io/docs/templates/hcl_templates/functions/string/regex_replace",
})
return fmt.Sprintf("{{ replace_all `%s` `%s` `%s` }}", a, b, c), nil
},
"clean_resource_name": func(a string) (string, error) {
funcErrors = multierror.Append(funcErrors, UnhandleableArgumentError{
"clean_resource_name",
"use custom validation rules, `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`",
"https://packer.io/docs/templates/hcl_templates/variables#custom-validation-rules" +
" , https://www.packer.io/docs/templates/hcl_templates/functions/string/replace" +
" or https://www.packer.io/docs/templates/hcl_templates/functions/string/regex_replace",
})
return fmt.Sprintf("{{ clean_resource_name `%s` }}", a), nil
},
"build_name": func() string {
return "${build.name}"
},
"build_type": func() string {
return "${build.type}"
},
}
tpl, err := texttemplate.New("hcl2_upgrade").
Funcs(funcMap).
Parse(string(s))
if err != nil {
if strings.Contains(err.Error(), "unexpected \"\\\\\" in operand") {
// This error occurs if the operand in the text template used
// escaped quoting \" instead of bactick quoting `
// Create a regex to do a string replace on this block, to fix
// quoting.
q := fixQuoting(string(s))
unquoted := []byte(q)
tpl, err = texttemplate.New("hcl2_upgrade").
Funcs(funcMap).
Parse(string(unquoted))
if err != nil {
return fallbackReturn(err, unquoted)
}
} else {
return fallbackReturn(err, s)
}
}
retempl := &bytes.Buffer{}
if err := reTemplate(tpl.Root, retempl, funcMap); err != nil {
return fallbackReturn(err, s)
}
tpl, err = texttemplate.New("hcl2_upgrade").
Funcs(funcMap).
Parse(retempl.String())
str := &bytes.Buffer{}
if err := tpl.Execute(str, nil); err != nil {
return fallbackReturn(err, s)
}
out := str.Bytes()
if funcErrors.Len() > 0 {
return append([]byte(fmt.Sprintf("\n%s", funcErrors)), out...)
}
return out
}
// variableTransposeTemplatingCalls executes parts of blocks as go template files and replaces
// their result with their hcl2 variant for variables block only. If something goes wrong the template
// containing the go template string is returned.
// In variableTransposeTemplatingCalls the definition of aws_secretsmanager function will create a data source
// with the same name as the variable.
func variableTransposeTemplatingCalls(s []byte) (isLocal bool, body []byte) {
setIsLocal := func(a ...string) string {
isLocal = true
return ""
}
// Make locals from variables using valid template engine,
// expect the ones using only 'env'
// ref: https://www.packer.io/docs/templates/legacy_json_templates/engine#template-engine
funcMap := texttemplate.FuncMap{
"aws_secretsmanager": setIsLocal,
"timestamp": setIsLocal,
"isotime": setIsLocal,
"strftime": setIsLocal,
"user": setIsLocal,
"env": func(in string) string {
return fmt.Sprintf("${env(%q)}", in)
},
"template_dir": setIsLocal,
"pwd": setIsLocal,
"packer_version": setIsLocal,
"uuid": setIsLocal,
"lower": setIsLocal,
"upper": setIsLocal,
"split": func(_, _ string, _ int) (string, error) {
isLocal = true
return "", nil
},
"replace": func(_, _ string, _ int, _ string) (string, error) {
isLocal = true
return "", nil
},
"replace_all": func(_, _, _ string) (string, error) {
isLocal = true
return "", nil
},
}
tpl, err := texttemplate.New("hcl2_upgrade").
Funcs(funcMap).
Parse(string(s))
if err != nil {
if strings.Contains(err.Error(), "unexpected \"\\\\\" in operand") {
// This error occurs if the operand in the text template used
// escaped quoting \" instead of bactick quoting `
// Create a regex to do a string replace on this block, to fix
// quoting.
q := fixQuoting(string(s))
unquoted := []byte(q)
tpl, err = texttemplate.New("hcl2_upgrade").
Funcs(funcMap).
Parse(string(unquoted))
if err != nil {
return isLocal, fallbackReturn(err, unquoted)
}
} else {
return isLocal, fallbackReturn(err, s)
}
}
retempl := &bytes.Buffer{}
if err := reTemplate(tpl.Root, retempl, funcMap); err != nil {
return isLocal, fallbackReturn(err, s)
}
tpl, err = texttemplate.New("hcl2_upgrade").
Funcs(funcMap).
Parse(retempl.String())
str := &bytes.Buffer{}
if err := tpl.Execute(str, nil); err != nil {
return isLocal, fallbackReturn(err, s)
}
return isLocal, str.Bytes()
}
// referencedUserVariables executes parts of blocks as go template files finding user variables referenced
// within the template. This function should be called once to extract those variables referenced via the {{user `...`}}
// template function. The resulting map will contain variables defined in the JSON variables property, and some that
// are declared via var-files; to avoid duplicates the results of this function should be reconciled against tpl.Variables.
func referencedUserVariables(s []byte) map[string]*template.Variable {
userVars := make([]string, 0)
funcMap := texttemplate.FuncMap{
"user": func(in string) string {
userVars = append(userVars, in)
return ""
},
}
tpl, err := texttemplate.New("hcl2_upgrade").
Funcs(funcMap).
Parse(string(s))
if err != nil {
return nil
}
if err := tpl.Execute(&bytes.Buffer{}, nil); err != nil {
return nil
}
vars := make(map[string]*template.Variable)
for _, v := range userVars {
vars[v] = &template.Variable{
Key: v,
Required: true,
}
}
return vars
}
func jsonBodyToHCL2Body(out *hclwrite.Body, kvs map[string]interface{}) {
ks := []string{}
for k := range kvs {
ks = append(ks, k)
}
sort.Strings(ks)
for _, k := range ks {
value := kvs[k]
switch value := value.(type) {
case map[string]interface{}:
var mostComplexElem interface{}
for _, randomElem := range value {
if k == "linux_options" || k == "network_interface" || k == "shared_image_gallery" {
break
}
// HACK: we take the most complex element of that map because
// in HCL2, map of objects can be bodies, for example:
// map containing object: source_ami_filter {} ( body )
// simple string/string map: tags = {} ) ( attribute )
//
// if we could not find an object in this map then it's most
// likely a plain map and so we guess it should be and
// attribute. Though now if value refers to something that is
// an object but only contains a string or a bool; we could
// generate a faulty object. For example a (somewhat invalid)
// source_ami_filter where only `most_recent` is set.
switch randomElem.(type) {
case string, int, float64, bool:
if mostComplexElem != nil {
continue
}
mostComplexElem = randomElem
default:
mostComplexElem = randomElem
}
}
switch mostComplexElem.(type) {
case string, int, float64, bool:
out.SetAttributeValue(k, hcl2shim.HCL2ValueFromConfigValue(value))
default:
nestedBlockBody := out.AppendNewBlock(k, nil).Body()
jsonBodyToHCL2Body(nestedBlockBody, value)
}
case map[string]string, map[string]int, map[string]float64:
out.SetAttributeValue(k, hcl2shim.HCL2ValueFromConfigValue(value))
case []interface{}:
if len(value) == 0 {
continue
}
var mostComplexElem interface{}
for _, randomElem := range value {
// HACK: we take the most complex element of that slice because
// in hcl2 slices of plain types can be arrays, for example:
// simple string type: owners = ["0000000000"]
// object: launch_block_device_mappings {}
switch randomElem.(type) {
case string, int, float64, bool:
if mostComplexElem != nil {
continue
}
mostComplexElem = randomElem
default:
mostComplexElem = randomElem
}
}
switch mostComplexElem.(type) {
case map[string]interface{}:
// this is an object in a slice; so we unwrap it. We
// could try to remove any 's' suffix in the key, but
// this might not work everywhere.
for i := range value {
value := value[i].(map[string]interface{})
nestedBlockBody := out.AppendNewBlock(k, nil).Body()
jsonBodyToHCL2Body(nestedBlockBody, value)
}
continue
default:
out.SetAttributeValue(k, hcl2shim.HCL2ValueFromConfigValue(value))
}
default:
out.SetAttributeValue(k, hcl2shim.HCL2ValueFromConfigValue(value))
}
}
}
func isSensitiveVariable(key string, vars []*template.Variable) bool {
for _, v := range vars {
if v.Key == key {
return true
}
}
return false
}
func (*HCL2UpgradeCommand) Help() string {
helpText := `
Usage: packer hcl2_upgrade [options] TEMPLATE
Will transform your JSON template into an HCL2 configuration.
Options:
-output-file=path Set output file name. By default this will be the
TEMPLATE name with ".pkr.hcl" appended to it. To be a
valid Packer HCL template, it must have the suffix
".pkr.hcl"
-with-annotations Add helper annotation comments to the file to help new
HCL2 users understand the template format.
`
return strings.TrimSpace(helpText)
}
func (*HCL2UpgradeCommand) Synopsis() string {
return "transform a JSON template into an HCL2 configuration"
}
func (*HCL2UpgradeCommand) AutocompleteArgs() complete.Predictor {
return complete.PredictNothing
}
func (*HCL2UpgradeCommand) AutocompleteFlags() complete.Flags {
return complete.Flags{}
}
// Specific blocks parser responsible to parse and write the block
type PackerParser struct {
WithAnnotations bool
out []byte
}
func (p *PackerParser) Parse(tpl *template.Template) error {
reqPlugins, err := p.generateRequiredPluginsBlock(tpl)
if err != nil {
return err
}
if tpl.MinVersion == "" && reqPlugins == nil {
return nil
}
fileContent := hclwrite.NewEmptyFile()
body := fileContent.Body()
packerBody := body.AppendNewBlock("packer", nil).Body()
if tpl.MinVersion != "" {
packerBody.SetAttributeValue("required_version", cty.StringVal(fmt.Sprintf(">= %s", tpl.MinVersion)))
}
if reqPlugins != nil {
packerBody.AppendBlock(reqPlugins)
}
p.out = fileContent.Bytes()
return nil
}
func gatherPluginsFromTemplate(tpl *template.Template) []string {
plugins := map[string]struct{}{}
for _, b := range tpl.Builders {
name := knownPluginComponent(b.Type)
if name == unknownPluginName {
continue
}
plugins[knownPlugins[name]] = struct{}{}
}
for _, p := range tpl.Provisioners {
name := knownPluginComponent(p.Type)
if name == unknownPluginName {
continue
}
plugins[knownPlugins[name]] = struct{}{}
}
for _, pps := range tpl.PostProcessors {
for _, pp := range pps {
name := knownPluginComponent(pp.Type)
if name == unknownPluginName {
continue
}
plugins[knownPlugins[name]] = struct{}{}
}
}
if len(plugins) == 0 {
return nil
}
retPlugins := make([]string, 0, len(plugins))
for plugin := range plugins {
retPlugins = append(retPlugins, plugin)
}
sort.Strings(retPlugins)
return retPlugins
}
func (p *PackerParser) generateRequiredPluginsBlock(tpl *template.Template) (*hclwrite.Block, error) {
plugins := gatherPluginsFromTemplate(tpl)
if len(plugins) == 0 {
return nil, nil
}
reqPlugins := hclwrite.NewBlock("required_plugins", nil)
for _, plugin := range plugins {
pluginBlock := cty.ObjectVal(map[string]cty.Value{
"source": cty.StringVal(plugin),
"version": cty.StringVal("~> 1"),
})
reqPlugins.Body().SetAttributeValue(strings.Replace(plugin, "github.com/hashicorp/", "", 1), pluginBlock)
}
return reqPlugins, nil
}
func (p *PackerParser) Write(out *bytes.Buffer) {
if len(p.out) > 0 {
if p.WithAnnotations {
out.Write([]byte(packerBlockHeader))
}
out.Write(p.out)
}
}
type VariableParser struct {
WithAnnotations bool
variablesOut []byte
localsOut []byte
}
func makeLocal(variable *template.Variable, sensitive bool, localBody *hclwrite.Body, localsContent *hclwrite.File, hasLocals *bool) []byte {
if sensitive {
// Create Local block because this is sensitive
sensitiveLocalContent := hclwrite.NewEmptyFile()
body := sensitiveLocalContent.Body()
body.AppendNewline()
sensitiveLocalBody := body.AppendNewBlock("local", []string{variable.Key}).Body()
sensitiveLocalBody.SetAttributeValue("sensitive", cty.BoolVal(true))
sensitiveLocalBody.SetAttributeValue("expression", hcl2shim.HCL2ValueFromConfigValue(variable.Default))
localsVariableMap[variable.Key] = "local"
return sensitiveLocalContent.Bytes()
}
localBody.SetAttributeValue(variable.Key, hcl2shim.HCL2ValueFromConfigValue(variable.Default))
localsVariableMap[variable.Key] = "locals"
*hasLocals = true
return []byte{}
}
func makeVariable(variable *template.Variable, sensitive bool) []byte {
variablesContent := hclwrite.NewEmptyFile()
variablesBody := variablesContent.Body()
variablesBody.AppendNewline()
variableBody := variablesBody.AppendNewBlock("variable", []string{variable.Key}).Body()
variableBody.SetAttributeRaw("type", hclwrite.Tokens{&hclwrite.Token{Bytes: []byte("string")}})
if variable.Default != "" || !variable.Required {
shimmed := hcl2shim.HCL2ValueFromConfigValue(variable.Default)
variableBody.SetAttributeValue("default", shimmed)
}
if sensitive {
variableBody.SetAttributeValue("sensitive", cty.BoolVal(true))
}
return variablesContent.Bytes()
}
func (p *VariableParser) Parse(tpl *template.Template) error {
// Output Locals and Local blocks
localsContent := hclwrite.NewEmptyFile()
localsBody := localsContent.Body()
localsBody.AppendNewline()
localBody := localsBody.AppendNewBlock("locals", nil).Body()
hasLocals := false
if len(p.variablesOut) == 0 {
p.variablesOut = []byte{}
}
if len(p.localsOut) == 0 {
p.localsOut = []byte{}
}
if len(tpl.Variables) == 0 {
tpl.Variables = make(map[string]*template.Variable)
}
// JSON supports variable declaration via var-files.
// User variables that might be defined in a var-file
// but not in the actual JSON template should be accounted for.
userVars := referencedUserVariables(tpl.RawContents)
for name, variable := range userVars {
if _, ok := tpl.Variables[name]; ok {
continue
}
tpl.Variables[name] = variable
}
variables := []*template.Variable{}
{
// sort variables to avoid map's randomness
for _, variable := range tpl.Variables {
variables = append(variables, variable)
}
sort.Slice(variables, func(i, j int) bool {
return variables[i].Key < variables[j].Key
})
}
for _, variable := range variables {
// Create new HCL2 "variables" block, and populate the "value"
// field with the "Default" value from the JSON variable.
// Interpolate Jsonval first as an hcl variable to determine if it is
// a local. Variables referencing some form of variable expression must be defined as a local in HCL2,
// as variables in HCL2 must have a known value at parsing time.
isLocal, _ := variableTransposeTemplatingCalls([]byte(variable.Default))
sensitive := false
if isSensitiveVariable(variable.Key, tpl.SensitiveVariables) {
sensitive = true
}
// Create final HCL block and append.
if isLocal {
sensitiveBlocks := makeLocal(variable, sensitive, localBody, localsContent, &hasLocals)
if len(sensitiveBlocks) > 0 {
p.localsOut = append(p.localsOut, transposeTemplatingCalls(sensitiveBlocks)...)
}
continue
}
varbytes := makeVariable(variable, sensitive)
_, out := variableTransposeTemplatingCalls(varbytes)
p.variablesOut = append(p.variablesOut, out...)
}
if hasLocals == true {
p.localsOut = append(p.localsOut, transposeTemplatingCalls(localsContent.Bytes())...)
}
return nil
}
func (p *VariableParser) Write(out *bytes.Buffer) {
if len(p.variablesOut) > 0 {
if p.WithAnnotations {
out.Write([]byte(inputVarHeader))
}
out.Write(p.variablesOut)
}
}
type LocalsParser struct {
WithAnnotations bool
LocalsOut []byte
}
func (p *LocalsParser) Parse(tpl *template.Template) error {
// Locals where parsed with Variables
return nil
}
func (p *LocalsParser) Write(out *bytes.Buffer) {
if timestamp {
_, _ = out.Write([]byte("\n"))
if p.WithAnnotations {
fmt.Fprintln(out, `# "timestamp" template function replacement`)
}
fmt.Fprintln(out, `locals { timestamp = regex_replace(timestamp(), "[- TZ:]", "") }`)
}
if isotime {
fmt.Fprintln(out, `# The "legacy_isotime" function has been provided for backwards compatability, but we recommend switching to the timestamp and formatdate functions.`)
}
if strftime {
fmt.Fprintln(out, `# The "legacy_strftime" function has been provided for backwards compatability, but we recommend switching to the timestamp and formatdate functions.`)
}
if len(p.LocalsOut) > 0 {
if p.WithAnnotations {
out.Write([]byte(localsVarHeader))
}
out.Write(p.LocalsOut)
}
}
type AmazonSecretsDatasourceParser struct {
WithAnnotations bool
out []byte
}
func (p *AmazonSecretsDatasourceParser) Parse(_ *template.Template) error {
if p.out == nil {
p.out = []byte{}
}
keys := make([]string, 0, len(amazonSecretsManagerMap))
for k := range amazonSecretsManagerMap {
keys = append(keys, k)
}
sort.Strings(keys)
for _, dataSourceName := range keys {
datasourceContent := hclwrite.NewEmptyFile()
body := datasourceContent.Body()
body.AppendNewline()
datasourceBody := body.AppendNewBlock("data", []string{"amazon-secretsmanager", dataSourceName}).Body()
jsonBodyToHCL2Body(datasourceBody, amazonSecretsManagerMap[dataSourceName])
p.out = append(p.out, datasourceContent.Bytes()...)
}
return nil
}
func (p *AmazonSecretsDatasourceParser) Write(out *bytes.Buffer) {
if len(p.out) > 0 {
if p.WithAnnotations {
out.Write([]byte(amazonSecretsManagerDataHeader))
}
out.Write(p.out)
}
}
type AmazonAmiDatasourceParser struct {
Builders []*template.Builder
WithAnnotations bool
out []byte
}
func (p *AmazonAmiDatasourceParser) Parse(_ *template.Template) error {
if p.out == nil {
p.out = []byte{}
}
amazonAmiFilters := []map[string]interface{}{}
i := 1
for _, builder := range p.Builders {
if strings.HasPrefix(builder.Type, "amazon-") {
if sourceAmiFilter, ok := builder.Config["source_ami_filter"]; ok {
sourceAmiFilterCfg := map[string]interface{}{}
if err := mapstructure.Decode(sourceAmiFilter, &sourceAmiFilterCfg); err != nil {
return fmt.Errorf("Failed to write amazon-ami data source: %v", err)
}
sourceAmiFilterCfg, err := copyAWSAccessConfig(sourceAmiFilterCfg, builder.Config)
if err != nil {
return err
}
duplicate := false
dataSourceName := fmt.Sprintf("autogenerated_%d", i)
for j, filter := range amazonAmiFilters {
if reflect.DeepEqual(filter, sourceAmiFilterCfg) {
duplicate = true
dataSourceName = fmt.Sprintf("autogenerated_%d", j+1)
continue
}
}
// This is a hack...
// Use templating so that it could be correctly transformed later into a data resource
sourceAmiDataRef := fmt.Sprintf("{{ data `amazon-ami.%s.id` }}", dataSourceName)
if duplicate {
delete(builder.Config, "source_ami_filter")
builder.Config["source_ami"] = sourceAmiDataRef
continue
}
amazonAmiFilters = append(amazonAmiFilters, sourceAmiFilterCfg)
delete(builder.Config, "source_ami_filter")
builder.Config["source_ami"] = sourceAmiDataRef
i++
datasourceContent := hclwrite.NewEmptyFile()
body := datasourceContent.Body()
body.AppendNewline()
sourceBody := body.AppendNewBlock("data", []string{"amazon-ami", dataSourceName}).Body()
jsonBodyToHCL2Body(sourceBody, sourceAmiFilterCfg)
p.out = append(p.out, transposeTemplatingCalls(datasourceContent.Bytes())...)
}
}
}
return nil
}
type AssumeRoleConfig struct {
AssumeRoleARN string `mapstructure:"role_arn" required:"false"`
AssumeRoleDurationSeconds int `mapstructure:"duration_seconds" required:"false"`
AssumeRoleExternalID string `mapstructure:"external_id" required:"false"`
AssumeRolePolicy string `mapstructure:"policy" required:"false"`
AssumeRolePolicyARNs []string `mapstructure:"policy_arns" required:"false"`
AssumeRoleSessionName string `mapstructure:"session_name" required:"false"`
AssumeRoleTags map[string]string `mapstructure:"tags" required:"false"`
AssumeRoleTransitiveTagKeys []string `mapstructure:"transitive_tag_keys" required:"false"`
}
type VaultAWSEngineOptions struct {
Name string `mapstructure:"name"`
RoleARN string `mapstructure:"role_arn"`
TTL string `mapstructure:"ttl" required:"false"`
EngineName string `mapstructure:"engine_name"`
}
type AWSPollingConfig struct {
MaxAttempts int `mapstructure:"max_attempts" required:"false"`
DelaySeconds int `mapstructure:"delay_seconds" required:"false"`
}
type AwsAccessConfig struct {
AccessKey string `mapstructure:"access_key" required:"true"`
AssumeRole AssumeRoleConfig `mapstructure:"assume_role" required:"false"`
CustomEndpointEc2 string `mapstructure:"custom_endpoint_ec2" required:"false"`
CredsFilename string `mapstructure:"shared_credentials_file" required:"false"`
DecodeAuthZMessages bool `mapstructure:"decode_authorization_messages" required:"false"`
InsecureSkipTLSVerify bool `mapstructure:"insecure_skip_tls_verify" required:"false"`
MaxRetries int `mapstructure:"max_retries" required:"false"`
MFACode string `mapstructure:"mfa_code" required:"false"`
ProfileName string `mapstructure:"profile" required:"false"`
RawRegion string `mapstructure:"region" required:"true"`
SecretKey string `mapstructure:"secret_key" required:"true"`
SkipMetadataApiCheck bool `mapstructure:"skip_metadata_api_check"`
SkipCredsValidation bool `mapstructure:"skip_credential_validation"`
Token string `mapstructure:"token" required:"false"`
VaultAWSEngine VaultAWSEngineOptions `mapstructure:"vault_aws_engine" required:"false"`
PollingConfig *AWSPollingConfig `mapstructure:"aws_polling" required:"false"`
}
func copyAWSAccessConfig(sourceAmi map[string]interface{}, builder map[string]interface{}) (map[string]interface{}, error) {
// Transform access config to a map
accessConfigMap := map[string]interface{}{}
if err := mapstructure.Decode(AwsAccessConfig{}, &accessConfigMap); err != nil {
return sourceAmi, err
}
for k := range accessConfigMap {
// Copy only access config present in the builder
if v, ok := builder[k]; ok {
sourceAmi[k] = v
}
}
return sourceAmi, nil
}
func (p *AmazonAmiDatasourceParser) Write(out *bytes.Buffer) {
if len(p.out) > 0 {
if p.WithAnnotations {
out.Write([]byte(amazonAmiDataHeader))
}
out.Write(p.out)
}
}
type SourceParser struct {
Builders []*template.Builder
BuilderPlugins packer.BuilderSet
WithAnnotations bool
out []byte
}
func (p *SourceParser) Parse(tpl *template.Template) error {
if p.out == nil {
p.out = []byte{}
}
var unknownBuilders []string
for i, builderCfg := range p.Builders {
sourcesContent := hclwrite.NewEmptyFile()
body := sourcesContent.Body()
body.AppendNewline()
if !p.BuilderPlugins.Has(builderCfg.Type) && knownPluginComponent(builderCfg.Type) == unknownPluginName {
unknownBuilders = append(unknownBuilders, builderCfg.Type)
}
if builderCfg.Name == "" || builderCfg.Name == builderCfg.Type {
builderCfg.Name = fmt.Sprintf("autogenerated_%d", i+1)
}
builderCfg.Name = strings.ReplaceAll(strings.TrimSpace(builderCfg.Name), " ", "_")
sourceBody := body.AppendNewBlock("source", []string{builderCfg.Type, builderCfg.Name}).Body()
jsonBodyToHCL2Body(sourceBody, builderCfg.Config)
p.out = append(p.out, transposeTemplatingCalls(sourcesContent.Bytes())...)
}
// TODO update to output to stderr as opposed to having the command exit 1
if len(unknownBuilders) > 0 {
return fmt.Errorf("unknown builder type(s): %v\n", unknownBuilders)
}
return nil
}
func (p *SourceParser) Write(out *bytes.Buffer) {
if len(p.out) > 0 {
if p.WithAnnotations {
out.Write([]byte(sourcesHeader))
}
out.Write(p.out)
}
}
type BuildParser struct {
Builders []*template.Builder
WithAnnotations bool
provisioners BlockParser
postProcessors BlockParser
out []byte
}
func (p *BuildParser) Parse(tpl *template.Template) error {
if len(p.Builders) == 0 {
return nil
}
buildContent := hclwrite.NewEmptyFile()
buildBody := buildContent.Body()
if tpl.Description != "" {
buildBody.SetAttributeValue("description", cty.StringVal(tpl.Description))
buildBody.AppendNewline()
}
sourceNames := []string{}
for _, builder := range p.Builders {
sourceNames = append(sourceNames, fmt.Sprintf("source.%s.%s", builder.Type, builder.Name))
}
buildBody.SetAttributeValue("sources", hcl2shim.HCL2ValueFromConfigValue(sourceNames))
buildBody.AppendNewline()
p.out = buildContent.Bytes()
p.provisioners = &ProvisionerParser{
WithAnnotations: p.WithAnnotations,
}
if err := p.provisioners.Parse(tpl); err != nil {
return err
}
p.postProcessors = &PostProcessorParser{
WithAnnotations: p.WithAnnotations,
}
if err := p.postProcessors.Parse(tpl); err != nil {
return err
}
return nil
}
func (p *BuildParser) Write(out *bytes.Buffer) {
if len(p.out) > 0 {
if p.WithAnnotations {
out.Write([]byte(buildHeader))
} else {
_, _ = out.Write([]byte("\n"))
}
_, _ = out.Write([]byte("build {\n"))
out.Write(p.out)
p.provisioners.Write(out)
p.postProcessors.Write(out)
_, _ = out.Write([]byte("}\n"))
}
}
type ProvisionerParser struct {
WithAnnotations bool
out []byte
}
func (p *ProvisionerParser) Parse(tpl *template.Template) error {
if p.out == nil {
p.out = []byte{}
}
for _, provisioner := range tpl.Provisioners {
contentBytes := writeProvisioner("provisioner", provisioner)
p.out = append(p.out, transposeTemplatingCalls(contentBytes)...)
}
if tpl.CleanupProvisioner != nil {
contentBytes := writeProvisioner("error-cleanup-provisioner", tpl.CleanupProvisioner)
p.out = append(p.out, transposeTemplatingCalls(contentBytes)...)
}
return nil
}
func writeProvisioner(typeName string, provisioner *template.Provisioner) []byte {
provisionerContent := hclwrite.NewEmptyFile()
body := provisionerContent.Body()
block := body.AppendNewBlock(typeName, []string{provisioner.Type})
cfg := provisioner.Config
if cfg == nil {
cfg = map[string]interface{}{}
}
if len(provisioner.Except) > 0 {
cfg["except"] = provisioner.Except
}
if len(provisioner.Only) > 0 {
cfg["only"] = provisioner.Only
}
if provisioner.MaxRetries != "" {
cfg["max_retries"] = provisioner.MaxRetries
}
if provisioner.Timeout > 0 {
cfg["timeout"] = provisioner.Timeout.String()
}
if provisioner.PauseBefore > 0 {
cfg["pause_before"] = provisioner.PauseBefore.String()
}
body.AppendNewline()
jsonBodyToHCL2Body(block.Body(), cfg)
return provisionerContent.Bytes()
}
func (p *ProvisionerParser) Write(out *bytes.Buffer) {
if len(p.out) > 0 {
out.Write(p.out)
}
}
type PostProcessorParser struct {
WithAnnotations bool
out []byte
}
func (p *PostProcessorParser) Parse(tpl *template.Template) error {
if p.out == nil {
p.out = []byte{}
}
for _, pps := range tpl.PostProcessors {
postProcessorContent := hclwrite.NewEmptyFile()
body := postProcessorContent.Body()
switch len(pps) {
case 0:
continue
case 1:
default:
body = body.AppendNewBlock("post-processors", nil).Body()
}
for _, pp := range pps {
ppBody := body.AppendNewBlock("post-processor", []string{pp.Type}).Body()
if pp.KeepInputArtifact != nil {
ppBody.SetAttributeValue("keep_input_artifact", cty.BoolVal(*pp.KeepInputArtifact))
}
cfg := pp.Config
if cfg == nil {
cfg = map[string]interface{}{}
}
if len(pp.Except) > 0 {
cfg["except"] = pp.Except
}
if len(pp.Only) > 0 {
cfg["only"] = pp.Only
}
if pp.Name != "" && pp.Name != pp.Type {
cfg["name"] = pp.Name
}
jsonBodyToHCL2Body(ppBody, cfg)
}
p.out = append(p.out, transposeTemplatingCalls(postProcessorContent.Bytes())...)
}
return nil
}
func (p *PostProcessorParser) Write(out *bytes.Buffer) {
if len(p.out) > 0 {
out.Write(p.out)
}
}
func fixQuoting(old string) string {
// This regex captures golang template functions that use escaped quotes:
// {{ env \"myvar\" }}
// {{ split `some-string` \"-\" 0 }}
re := regexp.MustCompile(`{{\s*\w*(\s*(\\".*\\")\s*)+\w*\s*}}`)
body := re.ReplaceAllFunc([]byte(old), func(s []byte) []byte {
// Get the capture group
group := re.ReplaceAllString(string(s), `$1`)
unquoted, err := strconv.Unquote(fmt.Sprintf("\"%s\"", group))
if err != nil {
return s
}
return []byte(strings.Replace(string(s), group, unquoted, 1))
})
return string(body)
}
func knownPluginComponent(component string) string {
for prefix := range knownPlugins {
if strings.HasPrefix(component, prefix) {
return prefix
}
}
return unknownPluginName
}