Split aws_s3_bucket_analytic

main
Elie 2021-07-06 15:00:41 +02:00 committed by sundowndev
parent 92dbd59471
commit 97f65abdbd
14 changed files with 268 additions and 466294 deletions

View File

@ -68,6 +68,8 @@ func Init(version string, alerter *alerter.Alerter,
remoteLibrary.AddDetailsFetcher(aws.AwsS3BucketMetricResourceType, NewS3BucketMetricsDetailsFetcher(provider, deserializer))
remoteLibrary.AddEnumerator(NewS3BucketPolicyEnumerator(s3Repository, factory, provider.Config))
remoteLibrary.AddDetailsFetcher(aws.AwsS3BucketPolicyResourceType, NewS3BucketPolicyDetailsFetcher(provider, deserializer))
remoteLibrary.AddEnumerator(NewS3BucketAnalyticEnumerator(s3Repository, factory, provider.Config))
remoteLibrary.AddDetailsFetcher(aws.AwsS3BucketAnalyticsConfigurationResourceType, NewS3BucketAnalyticDetailsFetcher(provider, deserializer))
remoteLibrary.AddEnumerator(NewEC2EbsVolumeEnumerator(ec2repository, factory))
remoteLibrary.AddDetailsFetcher(aws.AwsEbsVolumeResourceType, common.NewGenericDetailsFetcher(aws.AwsEbsVolumeResourceType, provider, deserializer))
@ -147,7 +149,6 @@ func Init(version string, alerter *alerter.Alerter,
remoteLibrary.AddEnumerator(NewLambdaFunctionEnumerator(lambdaRepository, factory))
remoteLibrary.AddDetailsFetcher(aws.AwsLambdaFunctionResourceType, NewLambdaFunctionDetailsFetcher(provider, deserializer))
supplierLibrary.AddSupplier(NewS3BucketAnalyticSupplier(provider, s3Repository, deserializer))
supplierLibrary.AddSupplier(NewIamUserSupplier(provider, deserializer, iamRepository))
supplierLibrary.AddSupplier(NewIamUserPolicySupplier(provider, deserializer, iamRepository))
supplierLibrary.AddSupplier(NewIamUserPolicyAttachmentSupplier(provider, deserializer, iamRepository))

View File

@ -0,0 +1,38 @@
package aws
import (
"github.com/cloudskiff/driftctl/pkg/resource"
"github.com/cloudskiff/driftctl/pkg/resource/aws"
"github.com/cloudskiff/driftctl/pkg/terraform"
)
type S3BucketAnalyticDetailsFetcher struct {
reader terraform.ResourceReader
deserializer *resource.Deserializer
}
func NewS3BucketAnalyticDetailsFetcher(provider terraform.ResourceReader, deserializer *resource.Deserializer) *S3BucketAnalyticDetailsFetcher {
return &S3BucketAnalyticDetailsFetcher{
reader: provider,
deserializer: deserializer,
}
}
func (r *S3BucketAnalyticDetailsFetcher) ReadDetails(res resource.Resource) (resource.Resource, error) {
ctyVal, err := r.reader.ReadResource(terraform.ReadResourceArgs{
Ty: aws.AwsS3BucketAnalyticsConfigurationResourceType,
ID: res.TerraformId(),
Attributes: map[string]string{
"alias": *res.Attributes().GetString("region"),
},
})
if err != nil {
return nil, err
}
deserializedRes, err := r.deserializer.DeserializeOne(aws.AwsS3BucketAnalyticsConfigurationResourceType, *ctyVal)
if err != nil {
return nil, err
}
return deserializedRes, nil
}

View File

@ -0,0 +1,74 @@
package aws
import (
"fmt"
"github.com/cloudskiff/driftctl/pkg/remote/aws/repository"
remoteerror "github.com/cloudskiff/driftctl/pkg/remote/error"
tf "github.com/cloudskiff/driftctl/pkg/remote/terraform"
"github.com/cloudskiff/driftctl/pkg/resource"
"github.com/cloudskiff/driftctl/pkg/resource/aws"
"github.com/sirupsen/logrus"
)
type S3BucketAnalyticEnumerator struct {
repository repository.S3Repository
factory resource.ResourceFactory
providerConfig tf.TerraformProviderConfig
}
func NewS3BucketAnalyticEnumerator(repo repository.S3Repository, factory resource.ResourceFactory, providerConfig tf.TerraformProviderConfig) *S3BucketAnalyticEnumerator {
return &S3BucketAnalyticEnumerator{
repository: repo,
factory: factory,
providerConfig: providerConfig,
}
}
func (e *S3BucketAnalyticEnumerator) SupportedType() resource.ResourceType {
return aws.AwsS3BucketAnalyticsConfigurationResourceType
}
func (e *S3BucketAnalyticEnumerator) Enumerate() ([]resource.Resource, error) {
buckets, err := e.repository.ListAllBuckets()
if err != nil {
return nil, remoteerror.NewResourceEnumerationErrorWithType(err, string(e.SupportedType()), aws.AwsS3BucketResourceType)
}
results := make([]resource.Resource, len(buckets))
for _, bucket := range buckets {
region, err := e.repository.GetBucketLocation(*bucket.Name)
if err != nil {
return nil, err
}
if region == "" || region != e.providerConfig.DefaultAlias {
logrus.WithFields(logrus.Fields{
"region": region,
"bucket": *bucket.Name,
}).Debug("Skipped bucket analytic")
continue
}
analyticsConfigurationList, err := e.repository.ListBucketAnalyticsConfigurations(bucket, region)
if err != nil {
return nil, remoteerror.NewResourceEnumerationError(err, string(e.SupportedType()))
}
for _, analytics := range analyticsConfigurationList {
id := fmt.Sprintf("%s:%s", *bucket.Name, *analytics.Id)
results = append(
results,
e.factory.CreateAbstractResource(
string(e.SupportedType()),
id,
map[string]interface{}{
"region": region,
},
),
)
}
}
return results, err
}

View File

@ -1,90 +0,0 @@
package aws
import (
"fmt"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/cloudskiff/driftctl/pkg/remote/aws/repository"
remoteerror "github.com/cloudskiff/driftctl/pkg/remote/error"
tf "github.com/cloudskiff/driftctl/pkg/remote/terraform"
"github.com/cloudskiff/driftctl/pkg/resource"
"github.com/cloudskiff/driftctl/pkg/resource/aws"
"github.com/cloudskiff/driftctl/pkg/terraform"
"github.com/zclconf/go-cty/cty"
)
type S3BucketAnalyticSupplier struct {
reader terraform.ResourceReader
deserializer *resource.Deserializer
repository repository.S3Repository
runner *terraform.ParallelResourceReader
providerConfig tf.TerraformProviderConfig
}
func NewS3BucketAnalyticSupplier(provider *AWSTerraformProvider, repository repository.S3Repository, deserializer *resource.Deserializer) *S3BucketAnalyticSupplier {
return &S3BucketAnalyticSupplier{
provider,
deserializer,
repository,
terraform.NewParallelResourceReader(provider.Runner().SubRunner()),
provider.Config,
}
}
func (s *S3BucketAnalyticSupplier) Resources() ([]resource.Resource, error) {
buckets, err := s.repository.ListAllBuckets()
if err != nil {
return nil, remoteerror.NewResourceEnumerationErrorWithType(err, aws.AwsS3BucketAnalyticsConfigurationResourceType, aws.AwsS3BucketResourceType)
}
for _, bucket := range buckets {
bucket := *bucket
region, err := s.repository.GetBucketLocation(*bucket.Name)
if err != nil {
return nil, err
}
if region == "" || region != s.providerConfig.DefaultAlias {
continue
}
if err := s.listBucketAnalyticConfiguration(&bucket, region); err != nil {
return nil, remoteerror.NewResourceEnumerationError(err, aws.AwsS3BucketAnalyticsConfigurationResourceType)
}
}
ctyVals, err := s.runner.Wait()
if err != nil {
return nil, err
}
return s.deserializer.Deserialize(aws.AwsS3BucketAnalyticsConfigurationResourceType, ctyVals)
}
func (s *S3BucketAnalyticSupplier) listBucketAnalyticConfiguration(bucket *s3.Bucket, region string) error {
analyticsConfigurationList, err := s.repository.ListBucketAnalyticsConfigurations(bucket, region)
if err != nil {
return err
}
for _, analytics := range analyticsConfigurationList {
id := fmt.Sprintf("%s:%s", *bucket.Name, *analytics.Id)
s.runner.Run(func() (cty.Value, error) {
s3BucketAnalytic, err := s.reader.ReadResource(
terraform.ReadResourceArgs{
Ty: aws.AwsS3BucketAnalyticsConfigurationResourceType,
ID: id,
Attributes: map[string]string{
"alias": region,
},
},
)
if err != nil {
return cty.NilVal, err
}
return *s3BucketAnalytic, err
})
}
return nil
}

View File

@ -1,166 +0,0 @@
package aws
import (
"context"
"testing"
awssdk "github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/cloudskiff/driftctl/pkg/parallel"
"github.com/cloudskiff/driftctl/pkg/remote/aws/client"
"github.com/cloudskiff/driftctl/pkg/remote/aws/repository"
"github.com/cloudskiff/driftctl/pkg/remote/cache"
remoteerror "github.com/cloudskiff/driftctl/pkg/remote/error"
tf "github.com/cloudskiff/driftctl/pkg/remote/terraform"
"github.com/cloudskiff/driftctl/pkg/resource"
resourceaws "github.com/cloudskiff/driftctl/pkg/resource/aws"
testresource "github.com/cloudskiff/driftctl/test/resource"
"github.com/cloudskiff/driftctl/pkg/terraform"
"github.com/cloudskiff/driftctl/test"
"github.com/cloudskiff/driftctl/test/goldenfile"
"github.com/cloudskiff/driftctl/test/mocks"
"github.com/stretchr/testify/assert"
)
func TestS3BucketAnalyticSupplier_Resources(t *testing.T) {
tests := []struct {
test string
dirName string
mocks func(repository *repository.MockS3Repository)
listError error
wantErr error
}{
{
test: "multiple bucket with multiple analytics",
dirName: "s3_bucket_analytics_multiple",
mocks: func(repository *repository.MockS3Repository) {
repository.On(
"ListAllBuckets",
).Return([]*s3.Bucket{
{Name: awssdk.String("bucket-martin-test-drift")},
{Name: awssdk.String("bucket-martin-test-drift2")},
{Name: awssdk.String("bucket-martin-test-drift3")},
}, nil)
repository.On(
"GetBucketLocation",
"bucket-martin-test-drift",
).Return(
"eu-west-1",
nil,
)
repository.On(
"GetBucketLocation",
"bucket-martin-test-drift2",
).Return(
"eu-west-3",
nil,
)
repository.On(
"GetBucketLocation",
"bucket-martin-test-drift3",
).Return(
"ap-northeast-1",
nil,
)
repository.On(
"ListBucketAnalyticsConfigurations",
&s3.Bucket{Name: awssdk.String("bucket-martin-test-drift2")},
"eu-west-3",
).Return(
[]*s3.AnalyticsConfiguration{
{Id: awssdk.String("Analytics_Bucket2")},
{Id: awssdk.String("Analytics2_Bucket2")},
},
nil,
)
},
},
{
test: "cannot list bucket", dirName: "s3_bucket_analytics_list_bucket",
mocks: func(repository *repository.MockS3Repository) {
repository.On("ListAllBuckets").Return(nil, awserr.NewRequestFailure(nil, 403, ""))
},
wantErr: remoteerror.NewResourceEnumerationErrorWithType(awserr.NewRequestFailure(nil, 403, ""), resourceaws.AwsS3BucketAnalyticsConfigurationResourceType, resourceaws.AwsS3BucketResourceType),
},
{
test: "cannot list Analytics", dirName: "s3_bucket_analytics_list_analytics",
mocks: func(repository *repository.MockS3Repository) {
repository.On("ListAllBuckets").Return(
[]*s3.Bucket{
{Name: awssdk.String("bucket-martin-test-drift")},
},
nil,
)
repository.On(
"GetBucketLocation",
"bucket-martin-test-drift",
).Return(
"eu-west-3",
nil,
)
repository.On(
"ListBucketAnalyticsConfigurations",
&s3.Bucket{Name: awssdk.String("bucket-martin-test-drift")},
"eu-west-3",
).Return(
nil,
awserr.NewRequestFailure(nil, 403, ""),
)
},
wantErr: remoteerror.NewResourceEnumerationError(awserr.NewRequestFailure(nil, 403, ""), resourceaws.AwsS3BucketAnalyticsConfigurationResourceType),
},
}
for _, tt := range tests {
shouldUpdate := tt.dirName == *goldenfile.Update
providerLibrary := terraform.NewProviderLibrary()
supplierLibrary := resource.NewSupplierLibrary()
repo := testresource.InitFakeSchemaRepository("aws", "3.19.0")
resourceaws.InitResourcesMetadata(repo)
factory := terraform.NewTerraformResourceFactory(repo)
deserializer := resource.NewDeserializer(factory)
if shouldUpdate {
provider, err := InitTestAwsProvider(providerLibrary)
if err != nil {
t.Fatal(err)
}
repository := repository.NewS3Repository(client.NewAWSClientFactory(provider.session), cache.New(0))
supplierLibrary.AddSupplier(NewS3BucketAnalyticSupplier(provider, repository, deserializer))
}
t.Run(tt.test, func(t *testing.T) {
mock := repository.MockS3Repository{}
tt.mocks(&mock)
provider := mocks.NewMockedGoldenTFProvider(tt.dirName, providerLibrary.Provider(terraform.AWS), shouldUpdate)
s := &S3BucketAnalyticSupplier{
provider,
deserializer,
&mock,
terraform.NewParallelResourceReader(parallel.NewParallelRunner(context.TODO(), 10)),
tf.TerraformProviderConfig{
Name: "test",
DefaultAlias: "eu-west-3",
},
}
got, err := s.Resources()
assert.Equal(t, err, tt.wantErr)
test.CtyTestDiff(got, tt.dirName, provider, deserializer, shouldUpdate, t)
})
}
}

View File

@ -766,3 +766,157 @@ func TestS3BucketPolicy(t *testing.T) {
})
}
}
func TestS3BucketAnalytic(t *testing.T) {
tests := []struct {
test string
dirName string
mocks func(repository *repository.MockS3Repository)
wantErr error
}{
{
test: "multiple bucket with multiple analytics",
dirName: "aws_s3_bucket_analytics_multiple",
mocks: func(repository *repository.MockS3Repository) {
repository.On(
"ListAllBuckets",
).Return([]*s3.Bucket{
{Name: awssdk.String("bucket-martin-test-drift")},
{Name: awssdk.String("bucket-martin-test-drift2")},
{Name: awssdk.String("bucket-martin-test-drift3")},
}, nil)
repository.On(
"GetBucketLocation",
"bucket-martin-test-drift",
).Return(
"eu-west-1",
nil,
)
repository.On(
"GetBucketLocation",
"bucket-martin-test-drift2",
).Return(
"eu-west-3",
nil,
)
repository.On(
"GetBucketLocation",
"bucket-martin-test-drift3",
).Return(
"ap-northeast-1",
nil,
)
repository.On(
"ListBucketAnalyticsConfigurations",
&s3.Bucket{Name: awssdk.String("bucket-martin-test-drift2")},
"eu-west-3",
).Return(
[]*s3.AnalyticsConfiguration{
{Id: awssdk.String("Analytics_Bucket2")},
{Id: awssdk.String("Analytics2_Bucket2")},
},
nil,
)
},
},
{
test: "cannot list bucket", dirName: "aws_s3_bucket_analytics_list_bucket",
mocks: func(repository *repository.MockS3Repository) {
repository.On("ListAllBuckets").Return(nil, awserr.NewRequestFailure(nil, 403, ""))
},
wantErr: nil,
},
{
test: "cannot list Analytics", dirName: "aws_s3_bucket_analytics_list_analytics",
mocks: func(repository *repository.MockS3Repository) {
repository.On("ListAllBuckets").Return(
[]*s3.Bucket{
{Name: awssdk.String("bucket-martin-test-drift")},
},
nil,
)
repository.On(
"GetBucketLocation",
"bucket-martin-test-drift",
).Return(
"eu-west-3",
nil,
)
repository.On(
"ListBucketAnalyticsConfigurations",
&s3.Bucket{Name: awssdk.String("bucket-martin-test-drift")},
"eu-west-3",
).Return(
nil,
awserr.NewRequestFailure(nil, 403, ""),
)
},
wantErr: nil,
},
}
schemaRepository := testresource.InitFakeSchemaRepository("aws", "3.19.0")
resourceaws.InitResourcesMetadata(schemaRepository)
factory := terraform.NewTerraformResourceFactory(schemaRepository)
deserializer := resource.NewDeserializer(factory)
for _, c := range tests {
t.Run(c.test, func(tt *testing.T) {
shouldUpdate := c.dirName == *goldenfile.Update
session := session.Must(session.NewSessionWithOptions(session.Options{
SharedConfigState: session.SharedConfigEnable,
}))
scanOptions := ScannerOptions{Deep: true}
providerLibrary := terraform.NewProviderLibrary()
remoteLibrary := common.NewRemoteLibrary()
// Initialize mocks
alerter := &mocks.AlerterInterface{}
alerter.On("SendAlert", mock.Anything, mock.Anything).Maybe().Return()
fakeRepo := &repository.MockS3Repository{}
c.mocks(fakeRepo)
var repo repository.S3Repository = fakeRepo
providerVersion := "3.19.0"
realProvider, err := terraform2.InitTestAwsProvider(providerLibrary, providerVersion)
if err != nil {
t.Fatal(err)
}
provider := terraform2.NewFakeTerraformProvider(realProvider)
provider.WithResponse(c.dirName)
// Replace mock by real resources if we are in update mode
if shouldUpdate {
err := realProvider.Init()
if err != nil {
t.Fatal(err)
}
provider.ShouldUpdate()
repo = repository.NewS3Repository(client.NewAWSClientFactory(session), cache.New(0))
}
remoteLibrary.AddEnumerator(aws.NewS3BucketAnalyticEnumerator(repo, factory, tf.TerraformProviderConfig{
Name: "test",
DefaultAlias: "eu-west-3",
}))
remoteLibrary.AddDetailsFetcher(resourceaws.AwsS3BucketAnalyticsConfigurationResourceType, aws.NewS3BucketAnalyticDetailsFetcher(provider, deserializer))
s := NewScanner(nil, remoteLibrary, alerter, scanOptions)
got, err := s.Resources()
assert.Equal(tt, c.wantErr, err)
if err != nil {
return
}
test.TestAgainstGoldenFile(got, resourceaws.AwsS3BucketAnalyticsConfigurationResourceType, c.dirName, provider, deserializer, shouldUpdate, tt)
})
}
}