In a separate browser tab - log in to your AWS Console.
Navigate to IAM > Policies to create a new policy that your Hava Cross-Account role can use
Copy {
"Version" : "2012-10-17" ,
"Statement" : [
{
"Action" : [
"acm:DescribeCertificate" ,
"acm:GetCertificate" ,
"acm:ListCertificates" ,
"apigateway:GET" ,
"appstream:Get*" ,
"appsync:GetApiCache" ,
"appsync:ListApiKeys" ,
"appsync:ListDataSources" ,
"appsync:ListDomainNames" ,
"appsync:ListFunctions" ,
"appsync:ListGraphqlApis" ,
"appsync:ListResolvers" ,
"appsync:ListSourceApiAssociations" ,
"appsync:ListTagsForResource" ,
"appsync:ListTypes" ,
"autoscaling:Describe*" ,
"cloudformation:List*" ,
"cloudfront:Get*" ,
"cloudfront:List*" ,
"cloudsearch:Describe*" ,
"cloudsearch:List*" ,
"cloudtrail:DescribeTrails" ,
"cloudtrail:GetTrailStatus" ,
"cloudwatch:Describe*" ,
"cloudwatch:Get*" ,
"cloudwatch:List*" ,
"codecommit:BatchGetRepositories" ,
"codecommit:Get*" ,
"codecommit:GitPull" ,
"codecommit:List*" ,
"codedeploy:Batch*" ,
"codedeploy:Get*" ,
"codedeploy:List*" ,
"config:Deliver*" ,
"config:Describe*" ,
"config:Get*" ,
"datapipeline:DescribeObjects" ,
"datapipeline:DescribePipelines" ,
"datapipeline:EvaluateExpression" ,
"datapipeline:GetPipelineDefinition" ,
"datapipeline:ListPipelines" ,
"datapipeline:QueryObjects" ,
"datapipeline:ValidatePipelineDefinition" ,
"directconnect:Describe*" ,
"ds:Check*" ,
"ds:Describe*" ,
"ds:Get*" ,
"ds:List*" ,
"ds:Verify*" ,
"dynamodb:DescribeGlobalTable" ,
"dynamodb:DescribeTable" ,
"dynamodb:ListGlobalTables" ,
"dynamodb:ListTables" ,
"dynamodb:ListTagsOfResource" ,
"ec2:Describe*" ,
"ec2:GetConsoleOutput" ,
"ecr:BatchCheckLayerAvailability" ,
"ecr:BatchGetImage" ,
"ecr:DescribeRepositories" ,
"ecr:GetDownloadUrlForLayer" ,
"ecr:ListImages" ,
"ecs:Describe*" ,
"ecs:List*" ,
"eks:Describe*" ,
"eks:List*" ,
"elasticache:Describe*" ,
"elasticache:List*" ,
"elasticbeanstalk:Check*" ,
"elasticbeanstalk:Describe*" ,
"elasticbeanstalk:List*" ,
"elasticbeanstalk:RequestEnvironmentInfo" ,
"elasticbeanstalk:RetrieveEnvironmentInfo" ,
"elasticfilesystem:DescribeFileSystems" ,
"elasticfilesystem:DescribeMountTargetSecurityGroups" ,
"elasticfilesystem:DescribeMountTargets" ,
"elasticfilesystem:DescribeTags" ,
"elasticloadbalancing:Describe*" ,
"elasticmapreduce:Describe*" ,
"elasticmapreduce:List*" ,
"elastictranscoder:List*" ,
"elastictranscoder:Read*" ,
"es:DescribeDomain" ,
"es:DescribeDomainNodes" ,
"es:DescribeDomains" ,
"es:DescribeElasticsearchDomain" ,
"es:DescribeElasticsearchDomainConfig" ,
"es:DescribeElasticsearchDomains" ,
"es:DescribeReservedElasticsearchInstances" ,
"es:DescribeVpcEndpoints" ,
"es:ESHttpGet" ,
"es:ESHttpHead" ,
"es:ListDomainNames" ,
"es:ListTags" ,
"es:ListVpcEndpointAccess" ,
"es:ListVpcEndpoints" ,
"es:ListVpcEndpointsForDomain" ,
"events:DescribeApiDestination" ,
"events:DescribeArchive" ,
"events:DescribeConnection" ,
"events:DescribeEndpoint" ,
"events:DescribeEventBus" ,
"events:DescribeEventSource" ,
"events:DescribePartnerEventSource" ,
"events:DescribeReplay" ,
"events:DescribeRule" ,
"events:ListApiDestinations" ,
"events:ListArchives" ,
"events:ListConnections" ,
"events:ListEndpoints" ,
"events:ListEventBuses" ,
"events:ListEventSources" ,
"events:ListPartnerEventSourceAccounts" ,
"events:ListPartnerEventSources" ,
"events:ListReplays" ,
"events:ListRuleNamesByTarget" ,
"events:ListRules" ,
"events:ListTagsForResource" ,
"events:ListTargetsByRule" ,
"events:TestEventPattern" ,
"firehose:DescribeDeliveryStream" ,
"firehose:ListDeliveryStreams" ,
"firehose:ListTagsForDeliveryStream" ,
"glacier:DescribeJob" ,
"glacier:DescribeVault" ,
"glacier:GetDataRetrievalPolicy" ,
"glacier:GetJobOutput" ,
"glacier:GetVaultAccessPolicy" ,
"glacier:GetVaultLock" ,
"glacier:GetVaultNotifications" ,
"glacier:ListJobs" ,
"glacier:ListMultipartUploads" ,
"glacier:ListParts" ,
"glacier:ListTagsForVault" ,
"glacier:ListVaults" ,
"iam:GenerateCredentialReport" ,
"iam:Get*" ,
"iam:List*" ,
"inspector:Describe*" ,
"inspector:Get*" ,
"inspector:List*" ,
"iot:Describe*" ,
"iot:Get*" ,
"iot:List*" ,
"kafka:DescribeCluster" ,
"kafka:DescribeClusterV2" ,
"kafka:DescribeVpcConnection" ,
"kafka:ListClientVpcConnections" ,
"kafka:ListClusters" ,
"kafka:ListClustersV2" ,
"kafka:ListNodes" ,
"kafka:ListTagsForResource" ,
"kafka:ListVpcConnections" ,
"kinesis:Describe*" ,
"kinesis:DescribeStream" ,
"kinesis:DescribeStreamConsumer" ,
"kinesis:DescribeStreamSummary" ,
"kinesis:ListShards" ,
"kinesis:ListStreamConsumers" ,
"kinesis:ListStreams" ,
"kinesis:ListTagsForStream" ,
"kms:Describe*" ,
"kms:Get*" ,
"kms:List*" ,
"lambda:Get*" ,
"lambda:List*" ,
"logs:Describe*" ,
"logs:Get*" ,
"logs:TestMetricFilter" ,
"machinelearning:Describe*" ,
"machinelearning:Get*" ,
"opsworks:Describe*" ,
"opsworks:Get*" ,
"organizations:ListAccounts" ,
"rds:Describe*" ,
"rds:ListTagsForResource" ,
"redshift:Describe*" ,
"redshift:ViewQueriesInConsole" ,
"route53:Get*" ,
"route53:List*" ,
"route53domains:CheckDomainAvailability" ,
"route53domains:GetDomainDetail" ,
"route53domains:GetOperationDetail" ,
"route53domains:ListDomains" ,
"route53domains:ListOperations" ,
"route53domains:ListTagsForDomain" ,
"s3:GetAccelerateConfiguration" ,
"s3:GetAnalyticsConfiguration" ,
"s3:GetBucket*" ,
"s3:GetInventoryConfiguration" ,
"s3:GetLifecycleConfiguration" ,
"s3:GetMetricsConfiguration" ,
"s3:GetReplicationConfiguration" ,
"s3:List*" ,
"sdb:GetAttributes" ,
"sdb:List*" ,
"sdb:Select*" ,
"ses:Get*" ,
"ses:List*" ,
"sns:Get*" ,
"sns:List*" ,
"sqs:GetQueueAttributes" ,
"sqs:ListQueues" ,
"sqs:ReceiveMessage" ,
"storagegateway:Describe*" ,
"storagegateway:List*" ,
"swf:Count*" ,
"swf:Describe*" ,
"swf:Get*" ,
"swf:List*" ,
"tag:Get*" ,
"trustedadvisor:Describe*" ,
"waf-regional:Get*" ,
"waf-regional:List*" ,
"waf:Get*" ,
"waf:List*" ,
"wafv2:GetWebACL" ,
"wafv2:ListResourcesForWebACL" ,
"wafv2:ListTagsForResource" ,
"wafv2:ListWebACLs" ,
"workspaces:Describe*"
] ,
"Effect" : "Allow" ,
"Resource" : "*"
}
]
}
Click "Create Policy" and the new policy will be created.
Select the Amazon Data Source.
Ensure the "Cross Account Role" tab is selected.
Click on the "Auto Config" button. This will open up your AWS console in the Create Role dialogue with the fields pre-filled :
Ensure the Account ID and External ID match the dialogue window in Hava.
Click "Create Role" then select the new role from the list displayed.
Paste the Role ARN into the Hava dialogue box, add an optional name and click "Import"
Hava will connect to your environment and pull back the resources and relationships between them and build a complete visualisation of your environment.
From this point on Hava will sync with your AWS environment every periodically and keep track of any structural changes from a VPS level down.