Merge branch 'main' into refact/ACC_tests
commit
ef492ae0c0
|
@ -79,10 +79,17 @@ jobs:
|
|||
- checkout
|
||||
- run: make install-tools
|
||||
- run: make test
|
||||
- run:
|
||||
name: Check Codecov
|
||||
command: |
|
||||
curl -s -o codecov https://codecov.io/bash \
|
||||
&& VERSION=$(grep 'VERSION=\".*\"' codecov | cut -d'"' -f2) \
|
||||
&& shasum -a 512 -c --ignore-missing <(curl -s https://raw.githubusercontent.com/codecov/codecov-bash/${VERSION}/SHA512SUM)
|
||||
- run:
|
||||
name: Codecov upload
|
||||
command: |
|
||||
bash <(curl -s https://codecov.io/bash)
|
||||
chmod +x codecov
|
||||
./codecov
|
||||
- store_test_results:
|
||||
path: ./
|
||||
sign_release:
|
||||
|
@ -125,6 +132,40 @@ jobs:
|
|||
command: |
|
||||
VERSION=$(./bin/driftctl_linux_amd64 version)
|
||||
ghr -t ${GITHUB_TOKEN} -u ${CIRCLE_PROJECT_USERNAME} -r ${CIRCLE_PROJECT_REPONAME} -c ${CIRCLE_SHA1} -n ${VERSION} ${VERSION} ./bin/
|
||||
publish-aur:
|
||||
environment:
|
||||
AUR_GIT: ssh://aur@aur.archlinux.org/driftctl-bin.git
|
||||
docker:
|
||||
- image: cimg/base:2020.01
|
||||
steps:
|
||||
- checkout
|
||||
- attach_workspace:
|
||||
at: ~/project
|
||||
# Add ssh private key to allow access to AUR repository
|
||||
# This key is bound to user cloudskiff on AUR
|
||||
- add_ssh_keys:
|
||||
fingerprints:
|
||||
- "ba:05:09:d6:a6:2a:45:34:89:c4:5e:22:23:22:e8:9f"
|
||||
- run:
|
||||
name: Bump package version
|
||||
command: |
|
||||
mkdir -p ~/.ssh
|
||||
echo 'aur.archlinux.org ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIEuBKrPzbawxA/k2g6NcyV5jmqwJ2s+zpgZGZ7tpLIcN' >> ~/.ssh/known_hosts
|
||||
# Ensure ssh is properly configured
|
||||
ssh aur@aur.archlinux.org list-repos
|
||||
git clone "${AUR_GIT}" driftctl-bin
|
||||
cd driftctl-bin
|
||||
git config user.name "cloudskiff"
|
||||
git config user.email tech@cloudskiff.com
|
||||
cp ~/project/bin/driftctl_SHA256SUMS .
|
||||
./bump.sh "${CIRCLE_TAG}"
|
||||
echo "--- PKGBUILD ---"
|
||||
cat PKGBUILD
|
||||
echo "--- .SRCINFO ---"
|
||||
cat .SRCINFO
|
||||
git add PKGBUILD .SRCINFO
|
||||
git commit -m "Updated to version ${CIRCLE_TAG}"
|
||||
git push
|
||||
update-lambda:
|
||||
executor: aws-cli/default
|
||||
environment:
|
||||
|
@ -212,6 +253,14 @@ workflows:
|
|||
only: /^v.*/
|
||||
branches:
|
||||
ignore: /.*/
|
||||
- publish-aur:
|
||||
requires:
|
||||
- release
|
||||
filters:
|
||||
tags:
|
||||
only: /^v.*/
|
||||
branches:
|
||||
ignore: /.*/
|
||||
- update-lambda:
|
||||
context: driftctl-version-lambda
|
||||
requires:
|
||||
|
|
|
@ -45,6 +45,7 @@ ij_go_wrap_func_result_newline_before_rparen = true
|
|||
|
||||
[*.json]
|
||||
insert_final_newline = false
|
||||
indent_size = 2
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
|
|
|
@ -26,6 +26,13 @@
|
|||
:warning: <strong>This tool is still in beta state and will evolve in the future with potential breaking changes</strong> :warning:
|
||||
</p>
|
||||
|
||||
<details>
|
||||
<summary>Packaging status</summary>
|
||||
<a href="https://repology.org/project/driftctl/versions">
|
||||
<img src="https://repology.org/badge/vertical-allrepos/driftctl.svg" alt="Packaging status">
|
||||
</a>
|
||||
</details>
|
||||
|
||||
## Why driftctl ?
|
||||
|
||||
Infrastructure drift is a blind spot and a source of potential security issues.
|
||||
|
@ -61,7 +68,7 @@ To learn more about compiling driftctl and contributing, please refer to the [co
|
|||
|
||||
This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification and is brought to you by these [awesome contributors](CONTRIBUTORS.md).
|
||||
|
||||
Build with ❤️️ from 🇫🇷 🇯🇵 🇬🇷 🇸🇪 🇺🇸
|
||||
Build with ❤️️ from 🇫🇷 🇯🇵 🇬🇷 🇸🇪 🇺🇸 🇷🇪
|
||||
|
||||
---
|
||||
|
||||
|
|
6
main.go
6
main.go
|
@ -80,7 +80,7 @@ func run() int {
|
|||
}
|
||||
|
||||
func flushSentry() {
|
||||
fmt.Print("Sending error report ...")
|
||||
gosentry.Flush(60 * time.Second)
|
||||
fmt.Printf(" done, thank you %s\n", color.RedString("❤️"))
|
||||
ttl := 60 * time.Second
|
||||
ok := gosentry.Flush(ttl)
|
||||
logrus.WithField("timeout", ttl).WithField("success", ok).Debug("Flushed Sentry events")
|
||||
}
|
||||
|
|
|
@ -13,7 +13,8 @@ import (
|
|||
|
||||
type Change struct {
|
||||
diff.Change
|
||||
Computed bool `json:"computed"`
|
||||
Computed bool `json:"computed"`
|
||||
JsonString bool `json:"-"`
|
||||
}
|
||||
|
||||
type Changelog []Change
|
||||
|
|
|
@ -4,7 +4,6 @@ import (
|
|||
"reflect"
|
||||
|
||||
resourceaws "github.com/cloudskiff/driftctl/pkg/resource/aws"
|
||||
|
||||
"github.com/r3labs/diff/v2"
|
||||
|
||||
"github.com/cloudskiff/driftctl/pkg/alerter"
|
||||
|
@ -40,7 +39,8 @@ func (c *ComputedDiffAlert) ShouldIgnoreResource() bool {
|
|||
}
|
||||
|
||||
type Analyzer struct {
|
||||
alerter *alerter.Alerter
|
||||
alerter *alerter.Alerter
|
||||
resourceSchemaRepository resource.SchemaRepositoryInterface
|
||||
}
|
||||
|
||||
type Filter interface {
|
||||
|
@ -48,8 +48,8 @@ type Filter interface {
|
|||
IsFieldIgnored(res resource.Resource, path []string) bool
|
||||
}
|
||||
|
||||
func NewAnalyzer(alerter *alerter.Alerter) Analyzer {
|
||||
return Analyzer{alerter}
|
||||
func NewAnalyzer(alerter *alerter.Alerter, resourceSchemaRepository resource.SchemaRepositoryInterface) Analyzer {
|
||||
return Analyzer{alerter, resourceSchemaRepository}
|
||||
}
|
||||
|
||||
func (a Analyzer) Analyze(remoteResources, resourcesFromState []resource.Resource, filter Filter) (Analysis, error) {
|
||||
|
@ -81,7 +81,14 @@ func (a Analyzer) Analyze(remoteResources, resourcesFromState []resource.Resourc
|
|||
filteredRemoteResource = removeResourceByIndex(i, filteredRemoteResource)
|
||||
analysis.AddManaged(stateRes)
|
||||
|
||||
delta, _ := diff.Diff(stateRes, remoteRes)
|
||||
var delta diff.Changelog
|
||||
if resource.IsRefactoredResource(stateRes.TerraformType()) {
|
||||
stateRes, _ := stateRes.(*resource.AbstractResource)
|
||||
remoteRes, _ := remoteRes.(*resource.AbstractResource)
|
||||
delta, _ = diff.Diff(stateRes.Attrs, remoteRes.Attrs)
|
||||
} else {
|
||||
delta, _ = diff.Diff(stateRes, remoteRes)
|
||||
}
|
||||
|
||||
if len(delta) == 0 {
|
||||
continue
|
||||
|
@ -93,7 +100,16 @@ func (a Analyzer) Analyze(remoteResources, resourcesFromState []resource.Resourc
|
|||
continue
|
||||
}
|
||||
c := Change{Change: change}
|
||||
c.Computed = a.isComputedField(stateRes, c)
|
||||
if resource.IsRefactoredResource(stateRes.TerraformType()) {
|
||||
resSchema, exist := a.resourceSchemaRepository.GetSchema(stateRes.TerraformType())
|
||||
if exist {
|
||||
c.Computed = resSchema.IsComputedField(c.Path)
|
||||
c.JsonString = resSchema.IsJsonStringField(c.Path)
|
||||
}
|
||||
} else {
|
||||
c.Computed = a.isComputedField(stateRes, c)
|
||||
c.JsonString = a.isJsonStringField(stateRes, c)
|
||||
}
|
||||
if c.Computed {
|
||||
haveComputedDiff = true
|
||||
}
|
||||
|
@ -152,6 +168,15 @@ func (a Analyzer) isComputedField(stateRes resource.Resource, change Change) boo
|
|||
return false
|
||||
}
|
||||
|
||||
// isJsonStringField returns true if the field that generated the diff of a resource
|
||||
// has a jsonfield tag
|
||||
func (a Analyzer) isJsonStringField(stateRes resource.Resource, change Change) bool {
|
||||
if field, ok := a.getField(reflect.TypeOf(stateRes), change.Path); ok {
|
||||
return field.Tag.Get("jsonfield") == "true"
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// getField recursively finds the deepest field inside a resource depending on
|
||||
// its path and its type
|
||||
func (a Analyzer) getField(t reflect.Type, path []string) (reflect.StructField, bool) {
|
||||
|
|
|
@ -993,7 +993,10 @@ func TestAnalyze(t *testing.T) {
|
|||
al.SetAlerts(c.alerts)
|
||||
}
|
||||
|
||||
analyzer := NewAnalyzer(al)
|
||||
repo := testresource.InitFakeSchemaRepository("aws", "3.19.0")
|
||||
aws.InitResourcesMetadata(repo)
|
||||
|
||||
analyzer := NewAnalyzer(al, repo)
|
||||
result, err := analyzer.Analyze(c.cloud, c.iac, filter)
|
||||
|
||||
if err != nil {
|
||||
|
|
|
@ -60,9 +60,14 @@ func NewScanCmd() *cobra.Command {
|
|||
}
|
||||
opts.Output = *out
|
||||
|
||||
filterFlag, _ := cmd.Flags().GetString("filter")
|
||||
if filterFlag != "" {
|
||||
expr, err := filter.BuildExpression(filterFlag)
|
||||
filterFlag, _ := cmd.Flags().GetStringArray("filter")
|
||||
|
||||
if len(filterFlag) > 1 {
|
||||
return errors.New("Filter flag should be specified only once")
|
||||
}
|
||||
|
||||
if len(filterFlag) == 1 && filterFlag[0] != "" {
|
||||
expr, err := filter.BuildExpression(filterFlag[0])
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "unable to parse filter expression")
|
||||
}
|
||||
|
@ -79,16 +84,14 @@ func NewScanCmd() *cobra.Command {
|
|||
}
|
||||
|
||||
fl := cmd.Flags()
|
||||
fl.BoolP(
|
||||
fl.Bool(
|
||||
"quiet",
|
||||
"",
|
||||
false,
|
||||
"Do not display anything but scan results",
|
||||
)
|
||||
fl.StringP(
|
||||
fl.StringArray(
|
||||
"filter",
|
||||
"",
|
||||
"",
|
||||
[]string{},
|
||||
"JMESPath expression to filter on\n"+
|
||||
"Examples : \n"+
|
||||
" - Type == 'aws_s3_bucket' (will filter only s3 buckets)\n"+
|
||||
|
@ -146,7 +149,9 @@ func scanRun(opts *pkg.ScanOptions) error {
|
|||
|
||||
progress := globaloutput.NewProgress()
|
||||
|
||||
err := remote.Activate(opts.To, alerter, providerLibrary, supplierLibrary, progress)
|
||||
resourceSchemaRepository := resource.NewSchemaRepository()
|
||||
|
||||
err := remote.Activate(opts.To, alerter, providerLibrary, supplierLibrary, progress, resourceSchemaRepository)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -158,16 +163,16 @@ func scanRun(opts *pkg.ScanOptions) error {
|
|||
logrus.Trace("Exited")
|
||||
}()
|
||||
|
||||
scanner := pkg.NewScanner(supplierLibrary.Suppliers(), alerter)
|
||||
scanner := pkg.NewScanner(supplierLibrary.Suppliers(), alerter, resourceSchemaRepository)
|
||||
|
||||
iacSupplier, err := supplier.GetIACSupplier(opts.From, providerLibrary, opts.BackendOptions)
|
||||
iacSupplier, err := supplier.GetIACSupplier(opts.From, providerLibrary, opts.BackendOptions, resourceSchemaRepository)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
resFactory := terraform.NewTerraformResourceFactory(providerLibrary)
|
||||
|
||||
ctl := pkg.NewDriftCTL(scanner, iacSupplier, alerter, resFactory, opts)
|
||||
ctl := pkg.NewDriftCTL(scanner, iacSupplier, alerter, resFactory, opts, resourceSchemaRepository)
|
||||
|
||||
go func() {
|
||||
<-c
|
||||
|
|
|
@ -77,8 +77,7 @@ func (c *Console) Write(analysis *analyser.Analysis) error {
|
|||
pref = fmt.Sprintf("%s %s:", color.RedString("-"), path)
|
||||
}
|
||||
if change.Type == diff.UPDATE {
|
||||
isJsonString := isFieldJsonString(difference.Res, path)
|
||||
if isJsonString {
|
||||
if change.JsonString {
|
||||
prefix := " "
|
||||
fmt.Printf(" %s\n%s%s\n", pref, prefix, jsonDiff(change.From, change.To, prefix))
|
||||
continue
|
||||
|
@ -181,23 +180,6 @@ func groupByType(resources []resource.Resource) map[string][]resource.Resource {
|
|||
return result
|
||||
}
|
||||
|
||||
func isFieldJsonString(res resource.Resource, fieldName string) bool {
|
||||
t := reflect.TypeOf(res)
|
||||
var field reflect.StructField
|
||||
var ok bool
|
||||
if t.Kind() == reflect.Ptr {
|
||||
field, ok = t.Elem().FieldByName(fieldName)
|
||||
}
|
||||
if t.Kind() != reflect.Ptr {
|
||||
field, ok = t.FieldByName(fieldName)
|
||||
}
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
return field.Tag.Get("jsonstring") == "true"
|
||||
}
|
||||
|
||||
func jsonDiff(a, b interface{}, prefix string) string {
|
||||
aStr := fmt.Sprintf("%s", a)
|
||||
bStr := fmt.Sprintf("%s", b)
|
||||
|
|
|
@ -9,8 +9,11 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/zclconf/go-cty/cty/gocty"
|
||||
|
||||
"github.com/cloudskiff/driftctl/pkg/resource/aws"
|
||||
"github.com/cloudskiff/driftctl/test/goldenfile"
|
||||
testresource "github.com/cloudskiff/driftctl/test/resource"
|
||||
|
||||
"github.com/cloudskiff/driftctl/pkg/analyser"
|
||||
)
|
||||
|
@ -70,6 +73,76 @@ func TestConsole_Write(t *testing.T) {
|
|||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
repo := testresource.InitFakeSchemaRepository("aws", "3.19.0")
|
||||
aws.InitResourcesMetadata(repo)
|
||||
for _, res := range tt.args.analysis.Managed() {
|
||||
fakeRes, ok := res.(*testresource.FakeResource)
|
||||
if ok {
|
||||
impliedType, _ := gocty.ImpliedType(fakeRes)
|
||||
value, _ := gocty.ToCtyValue(fakeRes, impliedType)
|
||||
fakeRes.CtyVal = &value
|
||||
continue
|
||||
}
|
||||
fakeStringerRes, ok := res.(*testresource.FakeResourceStringer)
|
||||
if ok {
|
||||
impliedType, _ := gocty.ImpliedType(fakeStringerRes)
|
||||
value, _ := gocty.ToCtyValue(fakeStringerRes, impliedType)
|
||||
fakeStringerRes.CtyVal = &value
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
for _, res := range tt.args.analysis.Unmanaged() {
|
||||
fakeRes, ok := res.(*testresource.FakeResource)
|
||||
if ok {
|
||||
impliedType, _ := gocty.ImpliedType(fakeRes)
|
||||
value, _ := gocty.ToCtyValue(fakeRes, impliedType)
|
||||
fakeRes.CtyVal = &value
|
||||
continue
|
||||
}
|
||||
fakeStringerRes, ok := res.(*testresource.FakeResourceStringer)
|
||||
if ok {
|
||||
impliedType, _ := gocty.ImpliedType(fakeStringerRes)
|
||||
value, _ := gocty.ToCtyValue(fakeStringerRes, impliedType)
|
||||
fakeStringerRes.CtyVal = &value
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
for _, res := range tt.args.analysis.Deleted() {
|
||||
fakeRes, ok := res.(*testresource.FakeResource)
|
||||
if ok {
|
||||
impliedType, _ := gocty.ImpliedType(fakeRes)
|
||||
value, _ := gocty.ToCtyValue(fakeRes, impliedType)
|
||||
fakeRes.CtyVal = &value
|
||||
continue
|
||||
}
|
||||
fakeStringerRes, ok := res.(*testresource.FakeResourceStringer)
|
||||
if ok {
|
||||
impliedType, _ := gocty.ImpliedType(fakeStringerRes)
|
||||
value, _ := gocty.ToCtyValue(fakeStringerRes, impliedType)
|
||||
fakeStringerRes.CtyVal = &value
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
for _, d := range tt.args.analysis.Differences() {
|
||||
fakeRes, ok := d.Res.(*testresource.FakeResource)
|
||||
if ok {
|
||||
impliedType, _ := gocty.ImpliedType(fakeRes)
|
||||
value, _ := gocty.ToCtyValue(fakeRes, impliedType)
|
||||
fakeRes.CtyVal = &value
|
||||
continue
|
||||
}
|
||||
fakeStringerRes, ok := d.Res.(*testresource.FakeResourceStringer)
|
||||
if ok {
|
||||
impliedType, _ := gocty.ImpliedType(fakeStringerRes)
|
||||
value, _ := gocty.ToCtyValue(fakeStringerRes, impliedType)
|
||||
fakeStringerRes.CtyVal = &value
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
c := NewConsole()
|
||||
|
||||
stdout := os.Stdout // keep backup of the real stdout
|
||||
|
|
|
@ -108,6 +108,7 @@ func fakeAnalysisWithJsonFields() *analyser.Analysis {
|
|||
Type: "aws_diff_resource",
|
||||
}, Changelog: []analyser.Change{
|
||||
{
|
||||
JsonString: true,
|
||||
Change: diff.Change{
|
||||
Type: diff.UPDATE,
|
||||
Path: []string{"Json"},
|
||||
|
@ -121,6 +122,7 @@ func fakeAnalysisWithJsonFields() *analyser.Analysis {
|
|||
Type: "aws_diff_resource",
|
||||
}, Changelog: []analyser.Change{
|
||||
{
|
||||
JsonString: true,
|
||||
Change: diff.Change{
|
||||
Type: diff.UPDATE,
|
||||
Path: []string{"Json"},
|
||||
|
@ -176,7 +178,7 @@ func fakeAnalysisWithComputedFields() *analyser.Analysis {
|
|||
Type: "aws_diff_resource",
|
||||
},
|
||||
)
|
||||
a.AddDifference(analyser.Difference{Res: testresource.FakeResource{
|
||||
a.AddDifference(analyser.Difference{Res: &testresource.FakeResource{
|
||||
Id: "diff-id-1",
|
||||
Type: "aws_diff_resource",
|
||||
}, Changelog: []analyser.Change{
|
||||
|
|
|
@ -78,6 +78,7 @@ func TestScanCmd_Invalid(t *testing.T) {
|
|||
{args: []string{"scan", "--from", "tfstate+foobar://test"}, expected: "Unsupported IaC backend 'foobar': \nAccepted values are: s3,http,https"},
|
||||
{args: []string{"scan", "--from", "tfstate:///tmp/test", "--from", "tfstate+toto://test"}, expected: "Unsupported IaC backend 'toto': \nAccepted values are: s3,http,https"},
|
||||
{args: []string{"scan", "--filter", "Type='test'"}, expected: "unable to parse filter expression: SyntaxError: Expected tRbracket, received: tUnknown"},
|
||||
{args: []string{"scan", "--filter", "Type='test'", "--filter", "Type='test2'"}, expected: "Filter flag should be specified only once"},
|
||||
}
|
||||
|
||||
for _, tt := range cases {
|
||||
|
|
|
@ -4,7 +4,6 @@ import (
|
|||
"fmt"
|
||||
|
||||
"github.com/jmespath/go-jmespath"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/sirupsen/logrus"
|
||||
|
||||
"github.com/cloudskiff/driftctl/pkg/alerter"
|
||||
|
@ -39,12 +38,12 @@ type DriftCTL struct {
|
|||
strictMode bool
|
||||
}
|
||||
|
||||
func NewDriftCTL(remoteSupplier resource.Supplier, iacSupplier resource.Supplier, alerter *alerter.Alerter, resFactory resource.ResourceFactory, opts *ScanOptions) *DriftCTL {
|
||||
func NewDriftCTL(remoteSupplier resource.Supplier, iacSupplier resource.Supplier, alerter *alerter.Alerter, resFactory resource.ResourceFactory, opts *ScanOptions, resourceSchemaRepository resource.SchemaRepositoryInterface) *DriftCTL {
|
||||
return &DriftCTL{
|
||||
remoteSupplier,
|
||||
iacSupplier,
|
||||
alerter,
|
||||
analyser.NewAnalyzer(alerter),
|
||||
analyser.NewAnalyzer(alerter, resourceSchemaRepository),
|
||||
opts.Filter,
|
||||
resFactory,
|
||||
opts.StrictMode,
|
||||
|
@ -54,7 +53,7 @@ func NewDriftCTL(remoteSupplier resource.Supplier, iacSupplier resource.Supplier
|
|||
func (d DriftCTL) Run() (*analyser.Analysis, error) {
|
||||
remoteResources, resourcesFromState, err := d.scan()
|
||||
if err != nil {
|
||||
return nil, errors.WithStack(err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
middleware := middlewares.NewChain(
|
||||
|
@ -88,18 +87,18 @@ func (d DriftCTL) Run() (*analyser.Analysis, error) {
|
|||
logrus.Debug("Ready to run middlewares")
|
||||
err = middleware.Execute(&remoteResources, &resourcesFromState)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "Unable to run middlewares")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if d.filter != nil {
|
||||
engine := filter.NewFilterEngine(d.filter)
|
||||
remoteResources, err = engine.Run(remoteResources)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "Unable to filter remote resources")
|
||||
return nil, err
|
||||
}
|
||||
resourcesFromState, err = engine.Run(resourcesFromState)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "Unable to filter state resources")
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -109,7 +108,7 @@ func (d DriftCTL) Run() (*analyser.Analysis, error) {
|
|||
analysis, err := d.analyzer.Analyze(remoteResources, resourcesFromState, driftIgnore)
|
||||
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "Unable to perform resources analysis")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &analysis, nil
|
||||
|
|
|
@ -5,7 +5,6 @@ import (
|
|||
"testing"
|
||||
|
||||
awssdk "github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/jmespath/go-jmespath"
|
||||
"github.com/r3labs/diff/v2"
|
||||
"github.com/stretchr/testify/mock"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
|
@ -13,60 +12,67 @@ import (
|
|||
"github.com/cloudskiff/driftctl/pkg"
|
||||
"github.com/cloudskiff/driftctl/pkg/alerter"
|
||||
"github.com/cloudskiff/driftctl/pkg/analyser"
|
||||
filter2 "github.com/cloudskiff/driftctl/pkg/filter"
|
||||
"github.com/cloudskiff/driftctl/pkg/filter"
|
||||
"github.com/cloudskiff/driftctl/pkg/resource"
|
||||
"github.com/cloudskiff/driftctl/pkg/resource/aws"
|
||||
"github.com/cloudskiff/driftctl/pkg/resource/github"
|
||||
"github.com/cloudskiff/driftctl/pkg/terraform"
|
||||
"github.com/cloudskiff/driftctl/test"
|
||||
testresource "github.com/cloudskiff/driftctl/test/resource"
|
||||
)
|
||||
|
||||
type TestProvider struct {
|
||||
Name string
|
||||
Version string
|
||||
}
|
||||
|
||||
type TestCase struct {
|
||||
name string
|
||||
provider *TestProvider
|
||||
stateResources []resource.Resource
|
||||
remoteResources []resource.Resource
|
||||
filter string
|
||||
mocks func(factory resource.ResourceFactory)
|
||||
assert func(result *test.ScanResult, err error)
|
||||
options *pkg.ScanOptions
|
||||
}
|
||||
|
||||
type TestCases []TestCase
|
||||
|
||||
func runTest(t *testing.T, cases TestCases) {
|
||||
for _, c := range cases {
|
||||
if c.provider == nil {
|
||||
c.provider = &TestProvider{
|
||||
Name: "aws",
|
||||
Version: "3.19.0",
|
||||
}
|
||||
}
|
||||
repo := testresource.InitFakeSchemaRepository(c.provider.Name, c.provider.Version)
|
||||
aws.InitResourcesMetadata(repo)
|
||||
github.InitMetadatas(repo)
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
testAlerter := alerter.NewAlerter()
|
||||
|
||||
if c.stateResources == nil {
|
||||
c.stateResources = []resource.Resource{}
|
||||
}
|
||||
|
||||
stateSupplier := &resource.MockSupplier{}
|
||||
stateSupplier.On("Resources").Return(c.stateResources, nil)
|
||||
|
||||
if c.remoteResources == nil {
|
||||
c.remoteResources = []resource.Resource{}
|
||||
}
|
||||
|
||||
remoteSupplier := &resource.MockSupplier{}
|
||||
remoteSupplier.On("Resources").Return(c.remoteResources, nil)
|
||||
|
||||
var filter *jmespath.JMESPath
|
||||
if c.filter != "" {
|
||||
f, err := filter2.BuildExpression(c.filter)
|
||||
if err != nil {
|
||||
t.Fatalf("Unable to build filter expression: %s\n%s", c.filter, err)
|
||||
}
|
||||
filter = f
|
||||
}
|
||||
|
||||
resourceFactory := &terraform.MockResourceFactory{}
|
||||
|
||||
if c.mocks != nil {
|
||||
c.mocks(resourceFactory)
|
||||
}
|
||||
|
||||
driftctl := pkg.NewDriftCTL(remoteSupplier, stateSupplier, testAlerter, resourceFactory, &pkg.ScanOptions{
|
||||
Filter: filter,
|
||||
})
|
||||
driftctl := pkg.NewDriftCTL(remoteSupplier, stateSupplier, testAlerter, resourceFactory, c.options, repo)
|
||||
|
||||
analysis, err := driftctl.Run()
|
||||
|
||||
|
@ -90,45 +96,54 @@ func TestDriftctlRun_BasicBehavior(t *testing.T) {
|
|||
{
|
||||
name: "infrastructure should be in sync",
|
||||
stateResources: []resource.Resource{
|
||||
testresource.FakeResource{},
|
||||
&testresource.FakeResource{},
|
||||
},
|
||||
remoteResources: []resource.Resource{
|
||||
testresource.FakeResource{},
|
||||
&testresource.FakeResource{},
|
||||
},
|
||||
assert: func(result *test.ScanResult, err error) {
|
||||
result.AssertInfrastructureIsInSync()
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
return &pkg.ScanOptions{}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "we should have deleted resource",
|
||||
stateResources: []resource.Resource{
|
||||
testresource.FakeResource{},
|
||||
&testresource.FakeResource{},
|
||||
},
|
||||
remoteResources: []resource.Resource{},
|
||||
assert: func(result *test.ScanResult, err error) {
|
||||
result.AssertDeletedCount(1)
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
return &pkg.ScanOptions{}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "we should have unmanaged resource",
|
||||
stateResources: []resource.Resource{},
|
||||
remoteResources: []resource.Resource{
|
||||
testresource.FakeResource{},
|
||||
&testresource.FakeResource{},
|
||||
},
|
||||
assert: func(result *test.ScanResult, err error) {
|
||||
result.AssertUnmanagedCount(1)
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
return &pkg.ScanOptions{}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "we should have changes of field update",
|
||||
stateResources: []resource.Resource{
|
||||
testresource.FakeResource{
|
||||
&testresource.FakeResource{
|
||||
Id: "fake",
|
||||
FooBar: "barfoo",
|
||||
},
|
||||
},
|
||||
remoteResources: []resource.Resource{
|
||||
testresource.FakeResource{
|
||||
&testresource.FakeResource{
|
||||
Id: "fake",
|
||||
FooBar: "foobar",
|
||||
},
|
||||
|
@ -145,17 +160,20 @@ func TestDriftctlRun_BasicBehavior(t *testing.T) {
|
|||
Computed: false,
|
||||
})
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
return &pkg.ScanOptions{}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "we should have changes on computed field",
|
||||
stateResources: []resource.Resource{
|
||||
testresource.FakeResource{
|
||||
&testresource.FakeResource{
|
||||
Id: "fake",
|
||||
BarFoo: "barfoo",
|
||||
},
|
||||
},
|
||||
remoteResources: []resource.Resource{
|
||||
testresource.FakeResource{
|
||||
&testresource.FakeResource{
|
||||
Id: "fake",
|
||||
BarFoo: "foobar",
|
||||
},
|
||||
|
@ -172,11 +190,14 @@ func TestDriftctlRun_BasicBehavior(t *testing.T) {
|
|||
Computed: true,
|
||||
})
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
return &pkg.ScanOptions{}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "we should have changes of deleted field",
|
||||
stateResources: []resource.Resource{
|
||||
testresource.FakeResource{
|
||||
&testresource.FakeResource{
|
||||
Id: "fake",
|
||||
Tags: map[string]string{
|
||||
"tag1": "deleted",
|
||||
|
@ -184,7 +205,7 @@ func TestDriftctlRun_BasicBehavior(t *testing.T) {
|
|||
},
|
||||
},
|
||||
remoteResources: []resource.Resource{
|
||||
testresource.FakeResource{
|
||||
&testresource.FakeResource{
|
||||
Id: "fake",
|
||||
},
|
||||
},
|
||||
|
@ -200,16 +221,19 @@ func TestDriftctlRun_BasicBehavior(t *testing.T) {
|
|||
Computed: false,
|
||||
})
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
return &pkg.ScanOptions{}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "we should have changes of added field",
|
||||
stateResources: []resource.Resource{
|
||||
testresource.FakeResource{
|
||||
&testresource.FakeResource{
|
||||
Id: "fake",
|
||||
},
|
||||
},
|
||||
remoteResources: []resource.Resource{
|
||||
testresource.FakeResource{
|
||||
&testresource.FakeResource{
|
||||
Id: "fake",
|
||||
Tags: map[string]string{
|
||||
"tag1": "added",
|
||||
|
@ -228,6 +252,168 @@ func TestDriftctlRun_BasicBehavior(t *testing.T) {
|
|||
Computed: false,
|
||||
})
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
return &pkg.ScanOptions{}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "we should ignore default AWS IAM role when strict mode is disabled",
|
||||
stateResources: []resource.Resource{
|
||||
testresource.FakeResource{
|
||||
Id: "fake",
|
||||
},
|
||||
&aws.AwsIamPolicy{
|
||||
Id: "role-policy-test-1",
|
||||
Arn: func(p string) *string { return &p }("policy-test-1"),
|
||||
},
|
||||
},
|
||||
remoteResources: []resource.Resource{
|
||||
testresource.FakeResource{
|
||||
Id: "fake",
|
||||
},
|
||||
&aws.AwsIamRole{
|
||||
Id: "role-test-1",
|
||||
Path: func(p string) *string { return &p }("/aws-service-role/test"),
|
||||
},
|
||||
&aws.AwsIamRolePolicy{
|
||||
Id: "role-policy-test-1",
|
||||
Role: func(p string) *string { return &p }("role-test-1"),
|
||||
},
|
||||
&aws.AwsIamPolicy{
|
||||
Id: "role-policy-test-1",
|
||||
Arn: func(p string) *string { return &p }("policy-test-1"),
|
||||
},
|
||||
&aws.AwsIamPolicyAttachment{
|
||||
Id: "policy-attachment-test-1",
|
||||
PolicyArn: func(p string) *string { return &p }("policy-test-1"),
|
||||
Users: func(p []string) *[]string { return &p }([]string{}),
|
||||
Roles: func(p []string) *[]string { return &p }([]string{"role-test-1"}),
|
||||
},
|
||||
&aws.AwsIamRole{
|
||||
Id: "role-test-2",
|
||||
Path: func(p string) *string { return &p }("/not-aws-service-role/test"),
|
||||
},
|
||||
},
|
||||
assert: func(result *test.ScanResult, err error) {
|
||||
result.AssertManagedCount(2)
|
||||
result.AssertUnmanagedCount(1)
|
||||
result.AssertDeletedCount(0)
|
||||
result.AssertDriftCountTotal(0)
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
return &pkg.ScanOptions{
|
||||
StrictMode: false,
|
||||
}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "we should not ignore default AWS IAM role when strict mode is enabled",
|
||||
stateResources: []resource.Resource{
|
||||
testresource.FakeResource{
|
||||
Id: "fake",
|
||||
},
|
||||
&aws.AwsIamPolicy{
|
||||
Id: "policy-test-1",
|
||||
Arn: func(p string) *string { return &p }("policy-test-1"),
|
||||
},
|
||||
},
|
||||
remoteResources: []resource.Resource{
|
||||
testresource.FakeResource{
|
||||
Id: "fake",
|
||||
},
|
||||
&aws.AwsIamRole{
|
||||
Id: "role-test-1",
|
||||
Path: func(p string) *string { return &p }("/aws-service-role/test"),
|
||||
},
|
||||
&aws.AwsIamRolePolicy{
|
||||
Id: "role-policy-test-1",
|
||||
Role: func(p string) *string { return &p }("role-test-1"),
|
||||
},
|
||||
&aws.AwsIamPolicy{
|
||||
Id: "policy-test-1",
|
||||
Arn: func(p string) *string { return &p }("policy-test-1"),
|
||||
},
|
||||
&aws.AwsIamPolicyAttachment{
|
||||
Id: "policy-attachment-test-1",
|
||||
PolicyArn: func(p string) *string { return &p }("policy-test-1"),
|
||||
Users: func(p []string) *[]string { return &p }([]string{}),
|
||||
Roles: func(p []string) *[]string { return &p }([]string{"role-test-1"}),
|
||||
},
|
||||
&aws.AwsIamRole{
|
||||
Id: "role-test-2",
|
||||
Path: func(p string) *string { return &p }("/not-aws-service-role/test"),
|
||||
},
|
||||
},
|
||||
assert: func(result *test.ScanResult, err error) {
|
||||
result.AssertManagedCount(2)
|
||||
result.AssertUnmanagedCount(4)
|
||||
result.AssertDeletedCount(0)
|
||||
result.AssertDriftCountTotal(0)
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
return &pkg.ScanOptions{
|
||||
StrictMode: true,
|
||||
}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "we should not ignore default AWS IAM role when strict mode is enabled and a filter is specified",
|
||||
stateResources: []resource.Resource{
|
||||
testresource.FakeResource{
|
||||
Id: "fake",
|
||||
},
|
||||
&aws.AwsIamPolicy{
|
||||
Id: "policy-test-1",
|
||||
Arn: func(p string) *string { return &p }("policy-test-1"),
|
||||
},
|
||||
},
|
||||
remoteResources: []resource.Resource{
|
||||
testresource.FakeResource{
|
||||
Id: "fake",
|
||||
},
|
||||
&aws.AwsIamRole{
|
||||
Id: "role-test-1",
|
||||
Path: func(p string) *string { return &p }("/aws-service-role/test"),
|
||||
},
|
||||
&aws.AwsIamRolePolicy{
|
||||
Id: "role-policy-test-1",
|
||||
Role: func(p string) *string { return &p }("role-test-1"),
|
||||
},
|
||||
&aws.AwsIamPolicy{
|
||||
Id: "policy-test-1",
|
||||
Arn: func(p string) *string { return &p }("policy-test-1"),
|
||||
},
|
||||
&aws.AwsIamPolicyAttachment{
|
||||
Id: "policy-attachment-test-1",
|
||||
PolicyArn: func(p string) *string { return &p }("policy-test-1"),
|
||||
Users: func(p []string) *[]string { return &p }([]string{}),
|
||||
Roles: func(p []string) *[]string { return &p }([]string{"role-test-1"}),
|
||||
},
|
||||
&aws.AwsIamRole{
|
||||
Id: "role-test-2",
|
||||
Path: func(p string) *string { return &p }("/not-aws-service-role/test"),
|
||||
},
|
||||
},
|
||||
assert: func(result *test.ScanResult, err error) {
|
||||
result.AssertCoverage(0)
|
||||
result.AssertInfrastructureIsNotSync()
|
||||
result.AssertManagedCount(0)
|
||||
result.AssertUnmanagedCount(1)
|
||||
result.AssertDeletedCount(0)
|
||||
result.AssertDriftCountTotal(0)
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
filterStr := "Id=='role-test-1'"
|
||||
f, err := filter.BuildExpression(filterStr)
|
||||
if err != nil {
|
||||
t.Fatalf("Unable to build filter expression: %s\n%s", filterStr, err)
|
||||
}
|
||||
|
||||
return &pkg.ScanOptions{
|
||||
Filter: f,
|
||||
StrictMode: true,
|
||||
}
|
||||
}(t),
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -249,11 +435,19 @@ func TestDriftctlRun_BasicFilter(t *testing.T) {
|
|||
Type: "filtered",
|
||||
},
|
||||
},
|
||||
filter: "Type=='filtered'",
|
||||
assert: func(result *test.ScanResult, err error) {
|
||||
result.AssertUnmanagedCount(1)
|
||||
result.AssertResourceUnmanaged("res2", "filtered")
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
filterStr := "Type=='filtered'"
|
||||
f, err := filter.BuildExpression(filterStr)
|
||||
if err != nil {
|
||||
t.Fatalf("Unable to build filter expression: %s\n%s", filterStr, err)
|
||||
}
|
||||
|
||||
return &pkg.ScanOptions{Filter: f}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "test filtering on Id",
|
||||
|
@ -268,11 +462,19 @@ func TestDriftctlRun_BasicFilter(t *testing.T) {
|
|||
Type: "filtered",
|
||||
},
|
||||
},
|
||||
filter: "Id=='res2'",
|
||||
assert: func(result *test.ScanResult, err error) {
|
||||
result.AssertUnmanagedCount(1)
|
||||
result.AssertResourceUnmanaged("res2", "filtered")
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
filterStr := "Id=='res2'"
|
||||
f, err := filter.BuildExpression(filterStr)
|
||||
if err != nil {
|
||||
t.Fatalf("Unable to build filter expression: %s\n%s", filterStr, err)
|
||||
}
|
||||
|
||||
return &pkg.ScanOptions{Filter: f}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "test filtering on attribute",
|
||||
|
@ -293,11 +495,19 @@ func TestDriftctlRun_BasicFilter(t *testing.T) {
|
|||
Type: "not-filtered",
|
||||
},
|
||||
},
|
||||
filter: "Attr.test_field=='value to filter on'",
|
||||
assert: func(result *test.ScanResult, err error) {
|
||||
result.AssertUnmanagedCount(1)
|
||||
result.AssertResourceUnmanaged("res1", "filtered")
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
filterStr := "Attr.test_field=='value to filter on'"
|
||||
f, err := filter.BuildExpression(filterStr)
|
||||
if err != nil {
|
||||
t.Fatalf("Unable to build filter expression: %s\n%s", filterStr, err)
|
||||
}
|
||||
|
||||
return &pkg.ScanOptions{Filter: f}
|
||||
}(t),
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -344,7 +554,6 @@ func TestDriftctlRun_Middlewares(t *testing.T) {
|
|||
})
|
||||
}), "aws_s3_bucket_policy").Times(1).Return(&foo, nil)
|
||||
},
|
||||
filter: "Type=='aws_s3_bucket_policy' && Attr.bucket=='foo'",
|
||||
assert: func(result *test.ScanResult, err error) {
|
||||
result.AssertManagedCount(1)
|
||||
result.AssertResourceHasDrift("foo", "aws_s3_bucket_policy", analyser.Change{
|
||||
|
@ -357,6 +566,15 @@ func TestDriftctlRun_Middlewares(t *testing.T) {
|
|||
Computed: false,
|
||||
})
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
filterStr := "Type=='aws_s3_bucket_policy' && Attr.bucket=='foo'"
|
||||
f, err := filter.BuildExpression(filterStr)
|
||||
if err != nil {
|
||||
t.Fatalf("Unable to build filter expression: %s\n%s", filterStr, err)
|
||||
}
|
||||
|
||||
return &pkg.ScanOptions{Filter: f}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "test instance block device middleware",
|
||||
|
@ -460,7 +678,6 @@ func TestDriftctlRun_Middlewares(t *testing.T) {
|
|||
})
|
||||
}), "aws_ebs_volume").Times(1).Return(&bar, nil)
|
||||
},
|
||||
filter: "Type=='aws_ebs_volume' && Attr.availability_zone=='us-east-1'",
|
||||
assert: func(result *test.ScanResult, err error) {
|
||||
result.AssertManagedCount(2)
|
||||
result.AssertResourceHasDrift("vol-02862d9b39045a3a4", "aws_ebs_volume", analyser.Change{
|
||||
|
@ -482,6 +699,15 @@ func TestDriftctlRun_Middlewares(t *testing.T) {
|
|||
Computed: true,
|
||||
})
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
filterStr := "Type=='aws_ebs_volume' && Attr.availability_zone=='us-east-1'"
|
||||
f, err := filter.BuildExpression(filterStr)
|
||||
if err != nil {
|
||||
t.Fatalf("Unable to build filter expression: %s\n%s", filterStr, err)
|
||||
}
|
||||
|
||||
return &pkg.ScanOptions{Filter: f}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "test route table expander middleware",
|
||||
|
@ -595,11 +821,19 @@ func TestDriftctlRun_Middlewares(t *testing.T) {
|
|||
})
|
||||
}), "aws_route").Times(1).Return(&bar, nil)
|
||||
},
|
||||
filter: "Type=='aws_route' && Attr.gateway_id=='igw-07b7844a8fd17a638'",
|
||||
assert: func(result *test.ScanResult, err error) {
|
||||
result.AssertManagedCount(2)
|
||||
result.AssertInfrastructureIsInSync()
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
filterStr := "Type=='aws_route' && Attr.gateway_id=='igw-07b7844a8fd17a638'"
|
||||
f, err := filter.BuildExpression(filterStr)
|
||||
if err != nil {
|
||||
t.Fatalf("Unable to build filter expression: %s\n%s", filterStr, err)
|
||||
}
|
||||
|
||||
return &pkg.ScanOptions{Filter: f}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "test sns topic policy expander middleware",
|
||||
|
@ -639,7 +873,6 @@ func TestDriftctlRun_Middlewares(t *testing.T) {
|
|||
})
|
||||
}), "aws_sns_topic_policy").Times(1).Return(&foo, nil)
|
||||
},
|
||||
filter: "Type=='aws_sns_topic_policy' && Attr.arn=='arn'",
|
||||
assert: func(result *test.ScanResult, err error) {
|
||||
result.AssertManagedCount(1)
|
||||
result.AssertResourceHasDrift("foo", "aws_sns_topic_policy", analyser.Change{
|
||||
|
@ -652,6 +885,15 @@ func TestDriftctlRun_Middlewares(t *testing.T) {
|
|||
Computed: false,
|
||||
})
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
filterStr := "Type=='aws_sns_topic_policy' && Attr.arn=='arn'"
|
||||
f, err := filter.BuildExpression(filterStr)
|
||||
if err != nil {
|
||||
t.Fatalf("Unable to build filter expression: %s\n%s", filterStr, err)
|
||||
}
|
||||
|
||||
return &pkg.ScanOptions{Filter: f}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "test sqs queue policy expander middleware",
|
||||
|
@ -690,7 +932,6 @@ func TestDriftctlRun_Middlewares(t *testing.T) {
|
|||
})
|
||||
}), "aws_sqs_queue_policy").Times(1).Return(&foo, nil)
|
||||
},
|
||||
filter: "Type=='aws_sqs_queue_policy' && Attr.queue_url=='foo'",
|
||||
assert: func(result *test.ScanResult, err error) {
|
||||
result.AssertManagedCount(1)
|
||||
result.AssertResourceHasDrift("foo", "aws_sqs_queue_policy", analyser.Change{
|
||||
|
@ -703,6 +944,15 @@ func TestDriftctlRun_Middlewares(t *testing.T) {
|
|||
Computed: false,
|
||||
})
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
filterStr := "Type=='aws_sqs_queue_policy' && Attr.queue_url=='foo'"
|
||||
f, err := filter.BuildExpression(filterStr)
|
||||
if err != nil {
|
||||
t.Fatalf("Unable to build filter expression: %s\n%s", filterStr, err)
|
||||
}
|
||||
|
||||
return &pkg.ScanOptions{Filter: f}
|
||||
}(t),
|
||||
},
|
||||
{
|
||||
name: "test security group rule sanitizer middleware",
|
||||
|
@ -1080,11 +1330,19 @@ func TestDriftctlRun_Middlewares(t *testing.T) {
|
|||
})
|
||||
}), "aws_security_group_rule").Times(1).Return(&rule4, nil)
|
||||
},
|
||||
filter: "Type=='aws_security_group_rule' && Attr.security_group_id=='sg-0254c038e32f25530'",
|
||||
assert: func(result *test.ScanResult, err error) {
|
||||
result.AssertManagedCount(7)
|
||||
result.AssertInfrastructureIsInSync()
|
||||
},
|
||||
options: func(t *testing.T) *pkg.ScanOptions {
|
||||
filterStr := "Type=='aws_security_group_rule' && Attr.security_group_id=='sg-0254c038e32f25530'"
|
||||
f, err := filter.BuildExpression(filterStr)
|
||||
if err != nil {
|
||||
t.Fatalf("Unable to build filter expression: %s\n%s", filterStr, err)
|
||||
}
|
||||
|
||||
return &pkg.ScanOptions{Filter: f}
|
||||
}(t),
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -4,11 +4,10 @@ import (
|
|||
"encoding/json"
|
||||
"errors"
|
||||
|
||||
"github.com/cloudskiff/driftctl/pkg/resource"
|
||||
"github.com/jmespath/go-jmespath"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
ctyjson "github.com/zclconf/go-cty/cty/json"
|
||||
|
||||
"github.com/cloudskiff/driftctl/pkg/resource"
|
||||
)
|
||||
|
||||
type FilterEngine struct {
|
||||
|
@ -37,13 +36,18 @@ func (e *FilterEngine) Run(resources []resource.Resource) ([]resource.Resource,
|
|||
// We need to serialize all attributes to untyped interface from JMESPath to work
|
||||
// map[string]string and map[string]SomeThing will not work without it
|
||||
// https://github.com/jmespath/go-jmespath/issues/22
|
||||
ctyVal := res.CtyValue()
|
||||
if ctyVal == nil {
|
||||
ctyVal = &cty.EmptyObjectVal
|
||||
}
|
||||
bytes, _ := ctyjson.Marshal(*ctyVal, ctyVal.Type())
|
||||
var attrs interface{}
|
||||
_ = json.Unmarshal(bytes, &attrs)
|
||||
if abstractRes, ok := res.(*resource.AbstractResource); ok {
|
||||
attrs = abstractRes.Attrs
|
||||
} else {
|
||||
ctyVal := res.CtyValue()
|
||||
if ctyVal == nil {
|
||||
ctyVal = &cty.EmptyObjectVal
|
||||
}
|
||||
bytes, _ := ctyjson.Marshal(*ctyVal, ctyVal.Type())
|
||||
_ = json.Unmarshal(bytes, &attrs)
|
||||
}
|
||||
|
||||
f := filtrableResource{
|
||||
Attr: attrs,
|
||||
Res: res,
|
||||
|
|
|
@ -28,7 +28,7 @@ func IsSupplierSupported(supplierKey string) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
func GetIACSupplier(configs []config.SupplierConfig, library *terraform.ProviderLibrary, backendOpts *backend.Options) (resource.Supplier, error) {
|
||||
func GetIACSupplier(configs []config.SupplierConfig, library *terraform.ProviderLibrary, backendOpts *backend.Options, resourceSchemaRepository resource.SchemaRepositoryInterface) (resource.Supplier, error) {
|
||||
chainSupplier := resource.NewChainSupplier()
|
||||
for _, config := range configs {
|
||||
if !IsSupplierSupported(config.Key) {
|
||||
|
@ -39,7 +39,7 @@ func GetIACSupplier(configs []config.SupplierConfig, library *terraform.Provider
|
|||
var err error
|
||||
switch config.Key {
|
||||
case state.TerraformStateReaderSupplier:
|
||||
supplier, err = state.NewReader(config, library, backendOpts)
|
||||
supplier, err = state.NewReader(config, library, backendOpts, resourceSchemaRepository)
|
||||
default:
|
||||
return nil, errors.Errorf("Unsupported supplier '%s'", config.Key)
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"github.com/cloudskiff/driftctl/pkg/iac/config"
|
||||
"github.com/cloudskiff/driftctl/pkg/iac/terraform/state/backend"
|
||||
"github.com/cloudskiff/driftctl/pkg/terraform"
|
||||
"github.com/cloudskiff/driftctl/test/resource"
|
||||
)
|
||||
|
||||
func TestGetIACSupplier(t *testing.T) {
|
||||
|
@ -82,7 +83,8 @@ func TestGetIACSupplier(t *testing.T) {
|
|||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
_, err := GetIACSupplier(tt.args.config, terraform.NewProviderLibrary(), tt.args.options)
|
||||
repo := resource.InitFakeSchemaRepository("aws", "3.19.0")
|
||||
_, err := GetIACSupplier(tt.args.config, terraform.NewProviderLibrary(), tt.args.options, repo)
|
||||
if tt.wantErr != nil && err.Error() != tt.wantErr.Error() {
|
||||
t.Errorf("GetIACSupplier() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
|
|
|
@ -9,6 +9,7 @@ import (
|
|||
"github.com/cloudskiff/driftctl/pkg/iac/terraform/state/enumerator"
|
||||
"github.com/cloudskiff/driftctl/pkg/remote/deserializer"
|
||||
"github.com/cloudskiff/driftctl/pkg/resource"
|
||||
|
||||
"github.com/cloudskiff/driftctl/pkg/terraform"
|
||||
"github.com/hashicorp/terraform/addrs"
|
||||
"github.com/hashicorp/terraform/states"
|
||||
|
@ -22,12 +23,13 @@ import (
|
|||
const TerraformStateReaderSupplier = "tfstate"
|
||||
|
||||
type TerraformStateReader struct {
|
||||
library *terraform.ProviderLibrary
|
||||
config config.SupplierConfig
|
||||
backend backend.Backend
|
||||
enumerator enumerator.StateEnumerator
|
||||
deserializers []deserializer.CTYDeserializer
|
||||
backendOptions *backend.Options
|
||||
library *terraform.ProviderLibrary
|
||||
config config.SupplierConfig
|
||||
backend backend.Backend
|
||||
enumerator enumerator.StateEnumerator
|
||||
deserializers []deserializer.CTYDeserializer
|
||||
backendOptions *backend.Options
|
||||
resourceSchemaRepository resource.SchemaRepositoryInterface
|
||||
}
|
||||
|
||||
func (r *TerraformStateReader) initReader() error {
|
||||
|
@ -35,8 +37,8 @@ func (r *TerraformStateReader) initReader() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func NewReader(config config.SupplierConfig, library *terraform.ProviderLibrary, backendOpts *backend.Options) (*TerraformStateReader, error) {
|
||||
reader := TerraformStateReader{library: library, config: config, deserializers: iac.Deserializers(), backendOptions: backendOpts}
|
||||
func NewReader(config config.SupplierConfig, library *terraform.ProviderLibrary, backendOpts *backend.Options, resourceSchemaRepository resource.SchemaRepositoryInterface) (*TerraformStateReader, error) {
|
||||
reader := TerraformStateReader{library: library, config: config, deserializers: iac.Deserializers(), backendOptions: backendOpts, resourceSchemaRepository: resourceSchemaRepository}
|
||||
err := reader.initReader()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -173,6 +175,22 @@ func (r *TerraformStateReader) decode(values map[string][]cty.Value) ([]resource
|
|||
"id": res.TerraformId(),
|
||||
"type": res.TerraformType(),
|
||||
}).Debug("Found IAC resource")
|
||||
if resource.IsRefactoredResource(res.TerraformType()) {
|
||||
schema, exist := r.resourceSchemaRepository.GetSchema(res.TerraformType())
|
||||
ctyAttr := resource.ToResourceAttributes(res.CtyValue())
|
||||
ctyAttr.SanitizeDefaultsV3()
|
||||
if exist && schema.NormalizeFunc != nil {
|
||||
schema.NormalizeFunc(ctyAttr)
|
||||
}
|
||||
|
||||
newRes := &resource.AbstractResource{
|
||||
Id: res.TerraformId(),
|
||||
Type: res.TerraformType(),
|
||||
Attrs: ctyAttr,
|
||||
}
|
||||
results = append(results, newRes)
|
||||
continue
|
||||
}
|
||||
normalisable, ok := res.(resource.NormalizedResource)
|
||||
if ok {
|
||||
normalizedRes, err := normalisable.NormalizeForState()
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/cloudskiff/driftctl/pkg/output"
|
||||
testresource "github.com/cloudskiff/driftctl/test/resource"
|
||||
|
||||
"github.com/cloudskiff/driftctl/pkg/iac"
|
||||
"github.com/cloudskiff/driftctl/pkg/iac/config"
|
||||
|
@ -115,12 +116,15 @@ func TestTerraformStateReader_AWS_Resources(t *testing.T) {
|
|||
library := terraform.NewProviderLibrary()
|
||||
library.AddProvider(terraform.AWS, provider)
|
||||
|
||||
repo := testresource.InitFakeSchemaRepository(terraform.AWS, "3.19.0")
|
||||
|
||||
r := &TerraformStateReader{
|
||||
config: config.SupplierConfig{
|
||||
Path: path.Join(goldenfile.GoldenFilePath, tt.dirName, "terraform.tfstate"),
|
||||
},
|
||||
library: library,
|
||||
deserializers: iac.Deserializers(),
|
||||
library: library,
|
||||
deserializers: iac.Deserializers(),
|
||||
resourceSchemaRepository: repo,
|
||||
}
|
||||
|
||||
got, err := r.Resources()
|
||||
|
|
|
@ -1,173 +1,151 @@
|
|||
[
|
||||
{
|
||||
"Aliases": null,
|
||||
"Arn": "arn:aws:cloudfront::047081014315:distribution/E1M9CNS0XSHI19",
|
||||
"CallerReference": "terraform-20210216101734792900000001",
|
||||
"Comment": null,
|
||||
"DefaultRootObject": "",
|
||||
"DomainName": "d1g0dw0i1wvlgd.cloudfront.net",
|
||||
"Enabled": false,
|
||||
"Etag": "E2CKBANLXUPWGQ",
|
||||
"HostedZoneId": "Z2FDTNDATAQYW2",
|
||||
"HttpVersion": "http2",
|
||||
"Id": "E1M9CNS0XSHI19",
|
||||
"InProgressValidationBatches": 0,
|
||||
"IsIpv6Enabled": false,
|
||||
"LastModifiedTime": "2021-02-16 10:17:35.404 +0000 UTC",
|
||||
"PriceClass": "PriceClass_All",
|
||||
"RetainOnDelete": false,
|
||||
"Status": "Deployed",
|
||||
"Tags": {},
|
||||
"TrustedSigners": [
|
||||
{
|
||||
"Enabled": false,
|
||||
"Items": []
|
||||
}
|
||||
],
|
||||
"WaitForDeployment": true,
|
||||
"WebAclId": "",
|
||||
"CustomErrorResponse": [],
|
||||
"DefaultCacheBehavior": [
|
||||
{
|
||||
"AllowedMethods": [
|
||||
"GET",
|
||||
"HEAD"
|
||||
],
|
||||
"CachedMethods": [
|
||||
"GET",
|
||||
"HEAD"
|
||||
],
|
||||
"Compress": false,
|
||||
"DefaultTtl": 86400,
|
||||
"FieldLevelEncryptionId": "",
|
||||
"MaxTtl": 31536000,
|
||||
"MinTtl": 0,
|
||||
"SmoothStreaming": false,
|
||||
"TargetOriginId": "S3-foo-cloudfront",
|
||||
"TrustedSigners": [],
|
||||
"ViewerProtocolPolicy": "allow-all",
|
||||
"ForwardedValues": [
|
||||
{
|
||||
"Headers": null,
|
||||
"QueryString": false,
|
||||
"QueryStringCacheKeys": [],
|
||||
"Cookies": [
|
||||
{
|
||||
"Forward": "none",
|
||||
"WhitelistedNames": null
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"LambdaFunctionAssociation": []
|
||||
}
|
||||
],
|
||||
"LoggingConfig": [],
|
||||
"OrderedCacheBehavior": [
|
||||
{
|
||||
"AllowedMethods": [
|
||||
"GET",
|
||||
"HEAD",
|
||||
"OPTIONS"
|
||||
],
|
||||
"CachedMethods": [
|
||||
"GET",
|
||||
"HEAD",
|
||||
"OPTIONS"
|
||||
],
|
||||
"Compress": true,
|
||||
"DefaultTtl": 86400,
|
||||
"FieldLevelEncryptionId": null,
|
||||
"MaxTtl": 31536000,
|
||||
"MinTtl": 0,
|
||||
"PathPattern": "/content/immutable/*",
|
||||
"SmoothStreaming": false,
|
||||
"TargetOriginId": "S3-foo-cloudfront",
|
||||
"TrustedSigners": null,
|
||||
"ViewerProtocolPolicy": "redirect-to-https",
|
||||
"ForwardedValues": [
|
||||
{
|
||||
"Headers": [
|
||||
"Origin"
|
||||
],
|
||||
"QueryString": false,
|
||||
"QueryStringCacheKeys": null,
|
||||
"Cookies": [
|
||||
{
|
||||
"Forward": "none",
|
||||
"WhitelistedNames": null
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"LambdaFunctionAssociation": null
|
||||
},
|
||||
{
|
||||
"AllowedMethods": [
|
||||
"GET",
|
||||
"HEAD",
|
||||
"OPTIONS"
|
||||
],
|
||||
"CachedMethods": [
|
||||
"GET",
|
||||
"HEAD"
|
||||
],
|
||||
"Compress": true,
|
||||
"DefaultTtl": 3600,
|
||||
"FieldLevelEncryptionId": null,
|
||||
"MaxTtl": 86400,
|
||||
"MinTtl": 0,
|
||||
"PathPattern": "/content/*",
|
||||
"SmoothStreaming": false,
|
||||
"TargetOriginId": "S3-foo-cloudfront",
|
||||
"TrustedSigners": null,
|
||||
"ViewerProtocolPolicy": "redirect-to-https",
|
||||
"ForwardedValues": [
|
||||
{
|
||||
"Headers": null,
|
||||
"QueryString": false,
|
||||
"QueryStringCacheKeys": null,
|
||||
"Cookies": [
|
||||
{
|
||||
"Forward": "none",
|
||||
"WhitelistedNames": null
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"LambdaFunctionAssociation": null
|
||||
}
|
||||
],
|
||||
"Origin": [
|
||||
{
|
||||
"DomainName": "foo-cloudfront.s3.eu-west-3.amazonaws.com",
|
||||
"OriginId": "S3-foo-cloudfront",
|
||||
"OriginPath": "",
|
||||
"CustomHeader": [],
|
||||
"CustomOriginConfig": [],
|
||||
"S3OriginConfig": []
|
||||
}
|
||||
],
|
||||
"OriginGroup": null,
|
||||
"Restrictions": [
|
||||
{
|
||||
"GeoRestriction": [
|
||||
{
|
||||
"Locations": null,
|
||||
"RestrictionType": "none"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"ViewerCertificate": [
|
||||
{
|
||||
"AcmCertificateArn": "",
|
||||
"CloudfrontDefaultCertificate": true,
|
||||
"IamCertificateId": "",
|
||||
"MinimumProtocolVersion": "TLSv1",
|
||||
"SslSupportMethod": ""
|
||||
}
|
||||
],
|
||||
"CtyVal": {}
|
||||
"Type": "aws_cloudfront_distribution",
|
||||
"Attrs": {
|
||||
"arn": "arn:aws:cloudfront::047081014315:distribution/E1M9CNS0XSHI19",
|
||||
"caller_reference": "terraform-20210216101734792900000001",
|
||||
"default_cache_behavior": [
|
||||
{
|
||||
"allowed_methods": [
|
||||
"GET",
|
||||
"HEAD"
|
||||
],
|
||||
"cached_methods": [
|
||||
"GET",
|
||||
"HEAD"
|
||||
],
|
||||
"compress": false,
|
||||
"default_ttl": 86400,
|
||||
"field_level_encryption_id": "",
|
||||
"forwarded_values": [
|
||||
{
|
||||
"cookies": [
|
||||
{
|
||||
"forward": "none"
|
||||
}
|
||||
],
|
||||
"query_string": false
|
||||
}
|
||||
],
|
||||
"max_ttl": 31536000,
|
||||
"min_ttl": 0,
|
||||
"smooth_streaming": false,
|
||||
"target_origin_id": "S3-foo-cloudfront",
|
||||
"viewer_protocol_policy": "allow-all"
|
||||
}
|
||||
],
|
||||
"default_root_object": "",
|
||||
"domain_name": "d1g0dw0i1wvlgd.cloudfront.net",
|
||||
"enabled": false,
|
||||
"etag": "E2CKBANLXUPWGQ",
|
||||
"hosted_zone_id": "Z2FDTNDATAQYW2",
|
||||
"http_version": "http2",
|
||||
"id": "E1M9CNS0XSHI19",
|
||||
"in_progress_validation_batches": 0,
|
||||
"is_ipv6_enabled": false,
|
||||
"last_modified_time": "2021-02-16 10:17:35.404 +0000 UTC",
|
||||
"ordered_cache_behavior": [
|
||||
{
|
||||
"allowed_methods": [
|
||||
"GET",
|
||||
"HEAD",
|
||||
"OPTIONS"
|
||||
],
|
||||
"cached_methods": [
|
||||
"GET",
|
||||
"HEAD",
|
||||
"OPTIONS"
|
||||
],
|
||||
"compress": true,
|
||||
"default_ttl": 86400,
|
||||
"field_level_encryption_id": "",
|
||||
"forwarded_values": [
|
||||
{
|
||||
"cookies": [
|
||||
{
|
||||
"forward": "none"
|
||||
}
|
||||
],
|
||||
"headers": [
|
||||
"Origin"
|
||||
],
|
||||
"query_string": false
|
||||
}
|
||||
],
|
||||
"max_ttl": 31536000,
|
||||
"min_ttl": 0,
|
||||
"path_pattern": "/content/immutable/*",
|
||||
"smooth_streaming": false,
|
||||
"target_origin_id": "S3-foo-cloudfront",
|
||||
"viewer_protocol_policy": "redirect-to-https"
|
||||
},
|
||||
{
|
||||
"allowed_methods": [
|
||||
"GET",
|
||||
"HEAD",
|
||||
"OPTIONS"
|
||||
],
|
||||
"cached_methods": [
|
||||
"GET",
|
||||
"HEAD"
|
||||
],
|
||||
"compress": true,
|
||||
"default_ttl": 3600,
|
||||
"field_level_encryption_id": "",
|
||||
"forwarded_values": [
|
||||
{
|
||||
"cookies": [
|
||||
{
|
||||
"forward": "none"
|
||||
}
|
||||
],
|
||||
"query_string": false
|
||||
}
|
||||
],
|
||||
"max_ttl": 86400,
|
||||
"min_ttl": 0,
|
||||
"path_pattern": "/content/*",
|
||||
"smooth_streaming": false,
|
||||
"target_origin_id": "S3-foo-cloudfront",
|
||||
"viewer_protocol_policy": "redirect-to-https"
|
||||
}
|
||||
],
|
||||
"origin": [
|
||||
{
|
||||
"domain_name": "foo-cloudfront.s3.eu-west-3.amazonaws.com",
|
||||
"origin_id": "S3-foo-cloudfront",
|
||||
"origin_path": ""
|
||||
}
|
||||
],
|
||||
"price_class": "PriceClass_All",
|
||||
"restrictions": [
|
||||
{
|
||||
"geo_restriction": [
|
||||
{
|
||||
"restriction_type": "none"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"retain_on_delete": false,
|
||||
"status": "Deployed",
|
||||
"trusted_signers": [
|
||||
{
|
||||
"enabled": false
|
||||
}
|
||||
],
|
||||
"viewer_certificate": [
|
||||
{
|
||||
"acm_certificate_arn": "",
|
||||
"cloudfront_default_certificate": true,
|
||||
"iam_certificate_id": "",
|
||||
"minimum_protocol_version": "TLSv1",
|
||||
"ssl_support_method": ""
|
||||
}
|
||||
],
|
||||
"wait_for_deployment": true,
|
||||
"web_acl_id": ""
|
||||
}
|
||||
}
|
||||
]
|
|
@ -53,9 +53,12 @@ func (p *progress) Stop() {
|
|||
}
|
||||
|
||||
func (p *progress) Inc() {
|
||||
if p.started.Load() {
|
||||
p.count.Inc()
|
||||
if lastVal := p.count.Load(); !p.started.Load() {
|
||||
logrus.Debug("Progress received a tic after stopping. Restarting...")
|
||||
p.Start()
|
||||
p.count.Store(lastVal)
|
||||
}
|
||||
p.count.Inc()
|
||||
}
|
||||
|
||||
func (p *progress) Val() uint64 {
|
||||
|
|
|
@ -10,9 +10,14 @@ import (
|
|||
func TestProgressTimeoutDoesNotInc(t *testing.T) {
|
||||
progress := NewProgress()
|
||||
progress.Start()
|
||||
progress.Inc()
|
||||
progress.Stop() // should not hang
|
||||
progress.Inc() // should not hang or inc
|
||||
assert.Equal(t, uint64(0), progress.Val())
|
||||
progress.Inc() // should restart progress and inc
|
||||
assert.Equal(t, uint64(2), progress.Val())
|
||||
assert.Equal(t, true, progress.started.Load())
|
||||
|
||||
progress.Stop()
|
||||
assert.Equal(t, false, progress.started.Load())
|
||||
}
|
||||
|
||||
func TestProgressTimeoutDoesNotHang(t *testing.T) {
|
||||
|
@ -21,10 +26,9 @@ func TestProgressTimeoutDoesNotHang(t *testing.T) {
|
|||
time.Sleep(progressTimeout)
|
||||
for progress.started.Load() == true {
|
||||
}
|
||||
progress.Inc() // should not hang or inc
|
||||
progress.Inc() // should not hang but inc
|
||||
progress.Stop() // should not hang
|
||||
assert.Equal(t, uint64(0), progress.Val())
|
||||
|
||||
assert.Equal(t, uint64(1), progress.Val())
|
||||
}
|
||||
|
||||
func TestProgress(t *testing.T) {
|
||||
|
|
|
@ -6,6 +6,7 @@ import (
|
|||
"github.com/cloudskiff/driftctl/pkg/remote/aws/client"
|
||||
"github.com/cloudskiff/driftctl/pkg/remote/aws/repository"
|
||||
"github.com/cloudskiff/driftctl/pkg/resource"
|
||||
"github.com/cloudskiff/driftctl/pkg/resource/aws"
|
||||
"github.com/cloudskiff/driftctl/pkg/terraform"
|
||||
)
|
||||
|
||||
|
@ -15,7 +16,7 @@ const RemoteAWSTerraform = "aws+tf"
|
|||
* Initialize remote (configure credentials, launch tf providers and start gRPC clients)
|
||||
* Required to use Scanner
|
||||
*/
|
||||
func Init(alerter *alerter.Alerter, providerLibrary *terraform.ProviderLibrary, supplierLibrary *resource.SupplierLibrary, progress output.Progress) error {
|
||||
func Init(alerter *alerter.Alerter, providerLibrary *terraform.ProviderLibrary, supplierLibrary *resource.SupplierLibrary, progress output.Progress, resourceSchemaRepository *resource.SchemaRepository) error {
|
||||
provider, err := NewAWSTerraformProvider(progress)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -77,5 +78,8 @@ func Init(alerter *alerter.Alerter, providerLibrary *terraform.ProviderLibrary,
|
|||
supplierLibrary.AddSupplier(NewKMSAliasSupplier(provider))
|
||||
supplierLibrary.AddSupplier(NewLambdaEventSourceMappingSupplier(provider))
|
||||
|
||||
resourceSchemaRepository.Init(provider.Schema())
|
||||
aws.InitResourcesMetadata(resourceSchemaRepository)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import (
|
|||
"github.com/cloudskiff/driftctl/pkg/alerter"
|
||||
"github.com/cloudskiff/driftctl/pkg/output"
|
||||
"github.com/cloudskiff/driftctl/pkg/resource"
|
||||
"github.com/cloudskiff/driftctl/pkg/resource/github"
|
||||
"github.com/cloudskiff/driftctl/pkg/terraform"
|
||||
)
|
||||
|
||||
|
@ -13,7 +14,7 @@ const RemoteGithubTerraform = "github+tf"
|
|||
* Initialize remote (configure credentials, launch tf providers and start gRPC clients)
|
||||
* Required to use Scanner
|
||||
*/
|
||||
func Init(alerter *alerter.Alerter, providerLibrary *terraform.ProviderLibrary, supplierLibrary *resource.SupplierLibrary, progress output.Progress) error {
|
||||
func Init(alerter *alerter.Alerter, providerLibrary *terraform.ProviderLibrary, supplierLibrary *resource.SupplierLibrary, progress output.Progress, resourceSchemaRepository *resource.SchemaRepository) error {
|
||||
provider, err := NewGithubTerraformProvider(progress)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -33,5 +34,8 @@ func Init(alerter *alerter.Alerter, providerLibrary *terraform.ProviderLibrary,
|
|||
supplierLibrary.AddSupplier(NewGithubTeamMembershipSupplier(provider, repository))
|
||||
supplierLibrary.AddSupplier(NewGithubBranchProtectionSupplier(provider, repository))
|
||||
|
||||
resourceSchemaRepository.Init(provider.Schema())
|
||||
github.InitMetadatas(resourceSchemaRepository)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -24,12 +24,12 @@ func IsSupported(remote string) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
func Activate(remote string, alerter *alerter.Alerter, providerLibrary *terraform.ProviderLibrary, supplierLibrary *resource.SupplierLibrary, progress output.Progress) error {
|
||||
func Activate(remote string, alerter *alerter.Alerter, providerLibrary *terraform.ProviderLibrary, supplierLibrary *resource.SupplierLibrary, progress output.Progress, resourceSchemaRepository *resource.SchemaRepository) error {
|
||||
switch remote {
|
||||
case aws.RemoteAWSTerraform:
|
||||
return aws.Init(alerter, providerLibrary, supplierLibrary, progress)
|
||||
return aws.Init(alerter, providerLibrary, supplierLibrary, progress, resourceSchemaRepository)
|
||||
case github.RemoteGithubTerraform:
|
||||
return github.Init(alerter, providerLibrary, supplierLibrary, progress)
|
||||
return github.Init(alerter, providerLibrary, supplierLibrary, progress, resourceSchemaRepository)
|
||||
default:
|
||||
return errors.Errorf("unsupported remote '%s'", remote)
|
||||
}
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
// GENERATED, DO NOT EDIT THIS FILE
|
||||
package aws
|
||||
|
||||
import "github.com/zclconf/go-cty/cty"
|
||||
import (
|
||||
"github.com/cloudskiff/driftctl/pkg/resource"
|
||||
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
)
|
||||
|
||||
const AwsCloudfrontDistributionResourceType = "aws_cloudfront_distribution"
|
||||
|
||||
|
@ -155,3 +159,13 @@ func (r *AwsCloudfrontDistribution) TerraformType() string {
|
|||
func (r *AwsCloudfrontDistribution) CtyValue() *cty.Value {
|
||||
return r.CtyVal
|
||||
}
|
||||
|
||||
func initAwsCloudfrontDistributionMetaData(resourceSchemaRepository resource.SchemaRepositoryInterface) {
|
||||
resourceSchemaRepository.SetNormalizeFunc(AwsCloudfrontDistributionResourceType, func(val *resource.Attributes) {
|
||||
val.SafeDelete([]string{"etag"})
|
||||
val.SafeDelete([]string{"last_modified_time"})
|
||||
val.SafeDelete([]string{"retain_on_delete"})
|
||||
val.SafeDelete([]string{"status"})
|
||||
val.SafeDelete([]string{"wait_for_deployment"})
|
||||
})
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ func TestAcc_Aws_CloudfrontDistribution(t *testing.T) {
|
|||
analyser.Change{
|
||||
Change: diff.Change{
|
||||
Type: diff.UPDATE,
|
||||
Path: []string{"IsIpv6Enabled"},
|
||||
Path: []string{"is_ipv6_enabled"},
|
||||
From: false,
|
||||
To: true,
|
||||
},
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
package aws
|
||||
|
||||
import "github.com/cloudskiff/driftctl/pkg/resource"
|
||||
|
||||
func InitResourcesMetadata(resourceSchemaRepository resource.SchemaRepositoryInterface) {
|
||||
initAwsCloudfrontDistributionMetaData(resourceSchemaRepository)
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
package github
|
||||
|
||||
import "github.com/cloudskiff/driftctl/pkg/resource"
|
||||
|
||||
func InitMetadatas(resourceSchemaRepository resource.SchemaRepositoryInterface) {
|
||||
|
||||
}
|
|
@ -2,9 +2,14 @@ package resource
|
|||
|
||||
import (
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
ctyjson "github.com/zclconf/go-cty/cty/json"
|
||||
)
|
||||
|
||||
type Resource interface {
|
||||
|
@ -13,6 +18,37 @@ type Resource interface {
|
|||
CtyValue() *cty.Value
|
||||
}
|
||||
|
||||
var refactoredResources = []string{
|
||||
"aws_cloudfront_distribution",
|
||||
}
|
||||
|
||||
func IsRefactoredResource(typ string) bool {
|
||||
for _, refactoredResource := range refactoredResources {
|
||||
if typ == refactoredResource {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type AbstractResource struct {
|
||||
Id string
|
||||
Type string
|
||||
Attrs *Attributes
|
||||
}
|
||||
|
||||
func (a *AbstractResource) TerraformId() string {
|
||||
return a.Id
|
||||
}
|
||||
|
||||
func (a *AbstractResource) TerraformType() string {
|
||||
return a.Type
|
||||
}
|
||||
|
||||
func (a *AbstractResource) CtyValue() *cty.Value {
|
||||
return nil
|
||||
}
|
||||
|
||||
type ResourceFactory interface {
|
||||
CreateResource(data interface{}, ty string) (*cty.Value, error)
|
||||
}
|
||||
|
@ -70,3 +106,188 @@ func Sort(res []Resource) []Resource {
|
|||
})
|
||||
return res
|
||||
}
|
||||
|
||||
func ToResourceAttributes(val *cty.Value) *Attributes {
|
||||
if val == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
bytes, _ := ctyjson.Marshal(*val, val.Type())
|
||||
var attrs Attributes
|
||||
err := json.Unmarshal(bytes, &attrs)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return &attrs
|
||||
}
|
||||
|
||||
type Attributes map[string]interface{}
|
||||
|
||||
func (a *Attributes) Get(path string) (interface{}, bool) {
|
||||
val, exist := (*a)[path]
|
||||
return val, exist
|
||||
}
|
||||
|
||||
func (a *Attributes) SafeDelete(path []string) {
|
||||
for i, key := range path {
|
||||
if i == len(path)-1 {
|
||||
delete(*a, key)
|
||||
return
|
||||
}
|
||||
|
||||
v, exists := (*a)[key]
|
||||
if !exists {
|
||||
return
|
||||
}
|
||||
m, ok := v.(Attributes)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
*a = m
|
||||
}
|
||||
}
|
||||
|
||||
func (a *Attributes) SafeSet(path []string, value interface{}) error {
|
||||
for i, key := range path {
|
||||
if i == len(path)-1 {
|
||||
(*a)[key] = value
|
||||
return nil
|
||||
}
|
||||
|
||||
v, exists := (*a)[key]
|
||||
if !exists {
|
||||
(*a)[key] = map[string]interface{}{}
|
||||
v = (*a)[key]
|
||||
}
|
||||
|
||||
m, ok := v.(Attributes)
|
||||
if !ok {
|
||||
return errors.Errorf("Path %s cannot be set: %s is not a nested struct", strings.Join(path, "."), key)
|
||||
}
|
||||
*a = m
|
||||
}
|
||||
return errors.New("Error setting value") // should not happen ?
|
||||
}
|
||||
|
||||
func (a *Attributes) SanitizeDefaults() {
|
||||
original := reflect.ValueOf(*a)
|
||||
copy := reflect.New(original.Type()).Elem()
|
||||
a.run("", original, copy)
|
||||
*a = copy.Interface().(Attributes)
|
||||
}
|
||||
|
||||
func (a *Attributes) run(path string, original, copy reflect.Value) {
|
||||
switch original.Kind() {
|
||||
case reflect.Ptr:
|
||||
originalValue := original.Elem()
|
||||
if !originalValue.IsValid() {
|
||||
return
|
||||
}
|
||||
copy.Set(reflect.New(originalValue.Type()))
|
||||
a.run(path, originalValue, copy.Elem())
|
||||
case reflect.Interface:
|
||||
// Get rid of the wrapping interface
|
||||
originalValue := original.Elem()
|
||||
if !originalValue.IsValid() {
|
||||
return
|
||||
}
|
||||
if originalValue.Kind() == reflect.Slice || originalValue.Kind() == reflect.Map {
|
||||
if originalValue.Len() == 0 {
|
||||
return
|
||||
}
|
||||
}
|
||||
// Create a new object. Now new gives us a pointer, but we want the value it
|
||||
// points to, so we have to call Elem() to unwrap it
|
||||
copyValue := reflect.New(originalValue.Type()).Elem()
|
||||
a.run(path, originalValue, copyValue)
|
||||
copy.Set(copyValue)
|
||||
|
||||
case reflect.Struct:
|
||||
for i := 0; i < original.NumField(); i += 1 {
|
||||
field := original.Field(i)
|
||||
a.run(concatenatePath(path, field.String()), field, copy.Field(i))
|
||||
}
|
||||
case reflect.Slice:
|
||||
copy.Set(reflect.MakeSlice(original.Type(), original.Len(), original.Cap()))
|
||||
for i := 0; i < original.Len(); i += 1 {
|
||||
a.run(concatenatePath(path, strconv.Itoa(i)), original.Index(i), copy.Index(i))
|
||||
}
|
||||
case reflect.Map:
|
||||
copy.Set(reflect.MakeMap(original.Type()))
|
||||
for _, key := range original.MapKeys() {
|
||||
originalValue := original.MapIndex(key)
|
||||
copyValue := reflect.New(originalValue.Type()).Elem()
|
||||
a.run(concatenatePath(path, key.String()), originalValue, copyValue)
|
||||
copy.SetMapIndex(key, copyValue)
|
||||
}
|
||||
default:
|
||||
copy.Set(original)
|
||||
}
|
||||
}
|
||||
|
||||
func concatenatePath(path, next string) string {
|
||||
if path == "" {
|
||||
return next
|
||||
}
|
||||
return strings.Join([]string{path, next}, ".")
|
||||
}
|
||||
|
||||
func (a *Attributes) SanitizeDefaultsV3() {
|
||||
original := reflect.ValueOf(*a)
|
||||
copy := reflect.New(original.Type()).Elem()
|
||||
a.runV3("", original, copy)
|
||||
*a = copy.Interface().(Attributes)
|
||||
}
|
||||
|
||||
func (a *Attributes) runV3(path string, original, copy reflect.Value) bool {
|
||||
switch original.Kind() {
|
||||
case reflect.Ptr:
|
||||
originalValue := original.Elem()
|
||||
if !originalValue.IsValid() {
|
||||
return false
|
||||
}
|
||||
copy.Set(reflect.New(originalValue.Type()))
|
||||
a.runV3(path, originalValue, copy.Elem())
|
||||
case reflect.Interface:
|
||||
// Get rid of the wrapping interface
|
||||
originalValue := original.Elem()
|
||||
if !originalValue.IsValid() {
|
||||
return false
|
||||
}
|
||||
if originalValue.Kind() == reflect.Slice || originalValue.Kind() == reflect.Map {
|
||||
if originalValue.Len() == 0 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
// Create a new object. Now new gives us a pointer, but we want the value it
|
||||
// points to, so we have to call Elem() to unwrap it
|
||||
copyValue := reflect.New(originalValue.Type()).Elem()
|
||||
a.runV3(path, originalValue, copyValue)
|
||||
copy.Set(copyValue)
|
||||
|
||||
case reflect.Struct:
|
||||
for i := 0; i < original.NumField(); i += 1 {
|
||||
field := original.Field(i)
|
||||
a.runV3(concatenatePath(path, field.String()), field, copy.Field(i))
|
||||
}
|
||||
case reflect.Slice:
|
||||
copy.Set(reflect.MakeSlice(original.Type(), original.Len(), original.Cap()))
|
||||
for i := 0; i < original.Len(); i += 1 {
|
||||
a.runV3(concatenatePath(path, strconv.Itoa(i)), original.Index(i), copy.Index(i))
|
||||
}
|
||||
case reflect.Map:
|
||||
copy.Set(reflect.MakeMap(original.Type()))
|
||||
for _, key := range original.MapKeys() {
|
||||
originalValue := original.MapIndex(key)
|
||||
copyValue := reflect.New(originalValue.Type()).Elem()
|
||||
created := a.runV3(concatenatePath(path, key.String()), originalValue, copyValue)
|
||||
if created {
|
||||
copy.SetMapIndex(key, copyValue)
|
||||
}
|
||||
}
|
||||
default:
|
||||
copy.Set(original)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
|
|
@ -0,0 +1,152 @@
|
|||
package resource
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
// Normalize empty slices and map to nil
|
||||
func TestSanitizeDefaults(t *testing.T) {
|
||||
cases := map[string]struct {
|
||||
input Attributes
|
||||
expected interface{}
|
||||
}{
|
||||
"simple": {
|
||||
input: Attributes{
|
||||
"emptyStringSlice": []string{},
|
||||
"emptyIntSlice": []int{},
|
||||
"emptyBoolSlice": []bool{},
|
||||
"emptyMap": map[string]string{},
|
||||
"nilInterface": interface{}(nil),
|
||||
"not_deleted": "value",
|
||||
},
|
||||
expected: Attributes{
|
||||
"emptyStringSlice": nil,
|
||||
"emptyIntSlice": nil,
|
||||
"emptyBoolSlice": nil,
|
||||
"emptyMap": nil,
|
||||
"nilInterface": nil,
|
||||
"not_deleted": "value",
|
||||
},
|
||||
},
|
||||
"nested": {
|
||||
input: Attributes{
|
||||
"should": map[string]interface{}{
|
||||
"be_deleted": map[string]interface{}{},
|
||||
"be_deleted_too": []string{},
|
||||
"not_be_deleted": "no",
|
||||
},
|
||||
"not_deleted": "value",
|
||||
},
|
||||
expected: Attributes{
|
||||
"should": map[string]interface{}{
|
||||
"be_deleted": nil,
|
||||
"be_deleted_too": nil,
|
||||
"not_be_deleted": "no",
|
||||
},
|
||||
"not_deleted": "value",
|
||||
},
|
||||
},
|
||||
"nested_slice": {
|
||||
input: Attributes{
|
||||
"should": []map[string][]interface{}{
|
||||
{
|
||||
"be": []interface{}{
|
||||
map[string]interface{}{
|
||||
"removed": []string{},
|
||||
"removed_too": map[string]string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: Attributes{
|
||||
"should": []map[string][]interface{}{
|
||||
{
|
||||
"be": []interface{}{
|
||||
map[string]interface{}{
|
||||
"removed": nil,
|
||||
"removed_too": nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for k, c := range cases {
|
||||
t.Run(k, func(t *testing.T) {
|
||||
c.input.SanitizeDefaults()
|
||||
assert.Equal(t, c.expected, c.input)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Delete empty or nil slices and maps
|
||||
func TestSanitizeDefaultsV3(t *testing.T) {
|
||||
cases := map[string]struct {
|
||||
input Attributes
|
||||
expected interface{}
|
||||
}{
|
||||
"simple": {
|
||||
input: Attributes{
|
||||
"emptyStringSlice": []string{},
|
||||
"emptyIntSlice": []int{},
|
||||
"emptyBoolSlice": []bool{},
|
||||
"emptyMap": map[string]string{},
|
||||
"nilInterface": interface{}(nil),
|
||||
"not_deleted": "value",
|
||||
},
|
||||
expected: Attributes{
|
||||
"not_deleted": "value",
|
||||
},
|
||||
},
|
||||
"nested": {
|
||||
input: Attributes{
|
||||
"should": map[string]interface{}{
|
||||
"be_deleted": map[string]interface{}{},
|
||||
"be_deleted_too": []string{},
|
||||
"not_be_deleted": "no",
|
||||
"not_be_deleted_too": []string(nil),
|
||||
},
|
||||
"not_deleted": "value",
|
||||
},
|
||||
expected: Attributes{
|
||||
"should": map[string]interface{}{
|
||||
"not_be_deleted": "no",
|
||||
},
|
||||
"not_deleted": "value",
|
||||
},
|
||||
},
|
||||
"nested_slice": {
|
||||
input: Attributes{
|
||||
"should": []map[string][]interface{}{
|
||||
{
|
||||
"be": []interface{}{
|
||||
map[string]interface{}{
|
||||
"removed": []string{},
|
||||
"removed_too": map[string]string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: Attributes{
|
||||
"should": []map[string][]interface{}{
|
||||
{
|
||||
"be": []interface{}{
|
||||
map[string]interface{}{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for k, c := range cases {
|
||||
t.Run(k, func(t *testing.T) {
|
||||
c.input.SanitizeDefaultsV3()
|
||||
assert.Equal(t, c.expected, c.input)
|
||||
})
|
||||
}
|
||||
}
|
|
@ -0,0 +1,111 @@
|
|||
package resource
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/hashicorp/terraform/configs/configschema"
|
||||
"github.com/hashicorp/terraform/providers"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type AttributeSchema struct {
|
||||
ConfigSchema configschema.Attribute
|
||||
JsonString bool
|
||||
}
|
||||
|
||||
type Schema struct {
|
||||
Attributes map[string]AttributeSchema
|
||||
NormalizeFunc func(val *Attributes)
|
||||
}
|
||||
|
||||
func (s *Schema) IsComputedField(path []string) bool {
|
||||
metadata, exist := s.Attributes[strings.Join(path, ".")]
|
||||
if !exist {
|
||||
return false
|
||||
}
|
||||
return metadata.ConfigSchema.Computed
|
||||
}
|
||||
|
||||
func (s *Schema) IsJsonStringField(path []string) bool {
|
||||
metadata, exist := s.Attributes[strings.Join(path, ".")]
|
||||
if !exist {
|
||||
return false
|
||||
}
|
||||
return metadata.JsonString
|
||||
}
|
||||
|
||||
type SchemaRepositoryInterface interface {
|
||||
GetSchema(resourceType string) (*Schema, bool)
|
||||
UpdateSchema(typ string, schemasMutators map[string]func(attributeSchema *AttributeSchema))
|
||||
SetNormalizeFunc(typ string, normalizeFunc func(val *Attributes))
|
||||
}
|
||||
|
||||
type SchemaRepository struct {
|
||||
schemas map[string]*Schema
|
||||
}
|
||||
|
||||
func NewSchemaRepository() *SchemaRepository {
|
||||
return &SchemaRepository{
|
||||
schemas: make(map[string]*Schema),
|
||||
}
|
||||
}
|
||||
|
||||
func (r *SchemaRepository) GetSchema(resourceType string) (*Schema, bool) {
|
||||
schema, exist := r.schemas[resourceType]
|
||||
return schema, exist
|
||||
}
|
||||
|
||||
func (r *SchemaRepository) fetchNestedBlocks(root string, metadata map[string]AttributeSchema, block map[string]*configschema.NestedBlock) {
|
||||
for s, nestedBlock := range block {
|
||||
path := s
|
||||
if root != "" {
|
||||
path = strings.Join([]string{root, s}, ".")
|
||||
}
|
||||
for s2, attr := range nestedBlock.Attributes {
|
||||
nestedPath := strings.Join([]string{path, s2}, ".")
|
||||
metadata[nestedPath] = AttributeSchema{
|
||||
ConfigSchema: *attr,
|
||||
}
|
||||
}
|
||||
r.fetchNestedBlocks(path, metadata, nestedBlock.BlockTypes)
|
||||
}
|
||||
}
|
||||
|
||||
func (r *SchemaRepository) Init(schema map[string]providers.Schema) {
|
||||
for typ, sch := range schema {
|
||||
attributeMetas := map[string]AttributeSchema{}
|
||||
for s, attribute := range sch.Block.Attributes {
|
||||
attributeMetas[s] = AttributeSchema{
|
||||
ConfigSchema: *attribute,
|
||||
}
|
||||
}
|
||||
|
||||
r.fetchNestedBlocks("", attributeMetas, sch.Block.BlockTypes)
|
||||
|
||||
r.schemas[typ] = &Schema{
|
||||
Attributes: attributeMetas,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *SchemaRepository) UpdateSchema(typ string, schemasMutators map[string]func(attributeSchema *AttributeSchema)) {
|
||||
for s, f := range schemasMutators {
|
||||
metadata, exist := r.GetSchema(typ)
|
||||
if !exist {
|
||||
logrus.WithFields(logrus.Fields{"type": typ}).Warning("Unable to set metadata, no schema found")
|
||||
return
|
||||
}
|
||||
m := (*metadata).Attributes[s]
|
||||
f(&m)
|
||||
(*metadata).Attributes[s] = m
|
||||
}
|
||||
}
|
||||
|
||||
func (r *SchemaRepository) SetNormalizeFunc(typ string, normalizeFunc func(val *Attributes)) {
|
||||
metadata, exist := r.GetSchema(typ)
|
||||
if !exist {
|
||||
logrus.WithFields(logrus.Fields{"type": typ}).Warning("Unable to set normalize func, no schema found")
|
||||
return
|
||||
}
|
||||
(*metadata).NormalizeFunc = normalizeFunc
|
||||
}
|
|
@ -3,27 +3,27 @@ package pkg
|
|||
import (
|
||||
"context"
|
||||
|
||||
"github.com/cloudskiff/driftctl/pkg/remote"
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/cloudskiff/driftctl/pkg/parallel"
|
||||
"github.com/sirupsen/logrus"
|
||||
|
||||
"github.com/cloudskiff/driftctl/pkg/alerter"
|
||||
"github.com/cloudskiff/driftctl/pkg/parallel"
|
||||
"github.com/cloudskiff/driftctl/pkg/remote"
|
||||
"github.com/cloudskiff/driftctl/pkg/resource"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type Scanner struct {
|
||||
resourceSuppliers []resource.Supplier
|
||||
runner *parallel.ParallelRunner
|
||||
alerter *alerter.Alerter
|
||||
resourceSuppliers []resource.Supplier
|
||||
runner *parallel.ParallelRunner
|
||||
alerter *alerter.Alerter
|
||||
resourceSchemaRepository *resource.SchemaRepository
|
||||
}
|
||||
|
||||
func NewScanner(resourceSuppliers []resource.Supplier, alerter *alerter.Alerter) *Scanner {
|
||||
func NewScanner(resourceSuppliers []resource.Supplier, alerter *alerter.Alerter, resourceSchemaRepository *resource.SchemaRepository) *Scanner {
|
||||
return &Scanner{
|
||||
resourceSuppliers: resourceSuppliers,
|
||||
runner: parallel.NewParallelRunner(context.TODO(), 10),
|
||||
alerter: alerter,
|
||||
resourceSuppliers: resourceSuppliers,
|
||||
runner: parallel.NewParallelRunner(context.TODO(), 10),
|
||||
alerter: alerter,
|
||||
resourceSchemaRepository: resourceSchemaRepository,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -58,6 +58,24 @@ loop:
|
|||
break loop
|
||||
}
|
||||
for _, res := range resources.([]resource.Resource) {
|
||||
|
||||
if resource.IsRefactoredResource(res.TerraformType()) {
|
||||
schema, exist := s.resourceSchemaRepository.GetSchema(res.TerraformType())
|
||||
ctyAttr := resource.ToResourceAttributes(res.CtyValue())
|
||||
ctyAttr.SanitizeDefaultsV3()
|
||||
if exist && schema.NormalizeFunc != nil {
|
||||
schema.NormalizeFunc(ctyAttr)
|
||||
}
|
||||
|
||||
newRes := &resource.AbstractResource{
|
||||
Id: res.TerraformId(),
|
||||
Type: res.TerraformType(),
|
||||
Attrs: ctyAttr,
|
||||
}
|
||||
results = append(results, newRes)
|
||||
continue
|
||||
}
|
||||
|
||||
normalisable, ok := res.(resource.NormalizedResource)
|
||||
if ok {
|
||||
normalizedRes, err := normalisable.NormalizeForProvider()
|
||||
|
|
|
@ -3,6 +3,10 @@ package resource
|
|||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/cloudskiff/driftctl/pkg/resource"
|
||||
"github.com/cloudskiff/driftctl/test/schemas"
|
||||
|
||||
"github.com/hashicorp/terraform/providers"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
)
|
||||
|
||||
|
@ -64,3 +68,18 @@ func (r *FakeResourceStringer) CtyValue() *cty.Value {
|
|||
func (d *FakeResourceStringer) String() string {
|
||||
return fmt.Sprintf("Name: '%s'", d.Name)
|
||||
}
|
||||
|
||||
func InitFakeSchemaRepository(provider, version string) resource.SchemaRepositoryInterface {
|
||||
repo := resource.NewSchemaRepository()
|
||||
schema := make(map[string]providers.Schema)
|
||||
if provider != "" {
|
||||
s, err := schemas.ReadTestSchema(provider, version)
|
||||
if err != nil {
|
||||
// TODO HANDLER ERROR PROPERLY
|
||||
panic(err)
|
||||
}
|
||||
schema = s
|
||||
}
|
||||
repo.Init(schema)
|
||||
return repo
|
||||
}
|
||||
|
|
|
@ -74,7 +74,7 @@ func (r *ScanResult) AssertResourceHasNoDrift(id, ty string) {
|
|||
}
|
||||
|
||||
func (r *ScanResult) AssertCoverage(expected int) {
|
||||
r.Equal(expected, r.Coverage)
|
||||
r.Equal(expected, r.Coverage())
|
||||
}
|
||||
|
||||
func (r *ScanResult) AssertDriftCountTotal(count int) {
|
||||
|
@ -110,3 +110,17 @@ func (r ScanResult) AssertInfrastructureIsInSync() {
|
|||
),
|
||||
)
|
||||
}
|
||||
|
||||
func (r ScanResult) AssertInfrastructureIsNotSync() {
|
||||
r.Equal(
|
||||
false,
|
||||
r.Analysis.IsSync(),
|
||||
fmt.Sprintf(
|
||||
"Infrastructure is in sync: %+v\nUnmanaged:\n%+v\nDeleted:\n%+v\nDifferences:\n%+v\n",
|
||||
r.Analysis.Summary(),
|
||||
r.Analysis.Unmanaged(),
|
||||
r.Analysis.Deleted(),
|
||||
r.Analysis.Differences(),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,23 @@
|
|||
package schemas
|
||||
|
||||
import (
|
||||
gojson "encoding/json"
|
||||
"io/ioutil"
|
||||
"path"
|
||||
"runtime"
|
||||
|
||||
"github.com/hashicorp/terraform/providers"
|
||||
)
|
||||
|
||||
func ReadTestSchema(provider, version string) (map[string]providers.Schema, error) {
|
||||
_, filename, _, _ := runtime.Caller(0)
|
||||
content, err := ioutil.ReadFile(path.Join(path.Dir(filename), provider, version, "schema.json"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var schema map[string]providers.Schema
|
||||
if err := gojson.Unmarshal(content, &schema); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return schema, nil
|
||||
}
|
Loading…
Reference in New Issue