Scanner for resource storage ready ...

... with some skeletal work for RDS backups in place
parent e9ccae6a
import click
from akinaka.client.aws_client import AWS_Client
from akinaka.libs import helpers, scan_resources_storage
from time import gmtime, strftime
import logging
import pprint
@click.group()
@click.option("--region", required=True, help="Region your resources are located in")
@click.option("--role-arn", required=True, help="Role ARN which contains necessary assume permissions")
@click.option("--dry-run", is_flag=True, help="Don't back anything up, just list would be backed up")
@click.pass_context
def backup(ctx, region, role_arn, dry_run):
"""
Backup subcommand. Does nothing by itself except pass the global options through to it's
subcommands via ctx
"""
ctx.obj = {
'region': region,
'role_arn': role_arn,
'dry_run': dry_run,
'log_level': ctx.obj.get('log_level')
}
pass
@backup.command()
@click.pass_context
def backup_all(ctx):
""" Backup all data in any found instances of RDS, Aurora, and S3 """
region = ctx.obj.get('region')
role_arn = ctx.obj.get('role_arn')
dry_run = ctx.obj.get('dry_run')
scanner = scan_resources_storage.ScanResources(region, role_arn)
scanner.scan_all()
if dry_run:
exit(0)
# from .backup_all import backup_backup_all
# backup_all = backup_backup_all.backup_all(region=region, role_arn=role_arn)
@backup.command()
@click.pass_context
def aurora(ctx):
""" Backup all aurora clusters found """
region = ctx.obj.get('region')
role_arn = ctx.obj.get('role_arn')
dry_run = ctx.obj.get('dry_run')
scanner = scan_resources_storage.ScanResources(region, role_arn)
scanner.scan_aurora()
if dry_run:
exit(0)
# from .aurora import backup_aurora
# aurora = backup_aurora.aurora(region=region, role_arn=role_arn)
@backup.command()
@click.pass_context
def rds(ctx):
""" Backup all RDS instances found """
region = ctx.obj.get('region')
role_arn = ctx.obj.get('role_arn')
dry_run = ctx.obj.get('dry_run')
scanner = scan_resources_storage.ScanResources(region, role_arn)
rds_arns = scanner.scan_rds()
print("Will attempt to backup the following RDS instances, unless this is a dry run:")
pprint.pprint(rds_arns)
if dry_run:
exit(0)
from .rds import backup_rds
rds = backup_rds.BackupRDS(region=region, role_arn=role_arn)
rds.backup(rds_arns=rds_arns)
@backup.command()
@click.pass_context
def s3(ctx):
""" Backup all s3 buckets found """
region = ctx.obj.get('region')
role_arn = ctx.obj.get('role_arn')
dry_run = ctx.obj.get('dry_run')
scanner = scan_resources_storage.ScanResources(region, role_arn)
scanner.scan_s3()
if dry_run:
exit(0)
# from .s3 import backup_s3
# s3 = backup_s3.s3(region=region, role_arn=role_arn)
"""
TODO:
Encrypted manual snapshots that don't use the default RDS encryption key can be shared, but you must
first share the KMS key with the account that you want to share the snapshot with. To share the key
with another account, share the IAM policy with the primary and secondary accounts. Shared encrypted
snapshots can't be restored directly from the destination account. First, copy the snapshot to the
destination account by using a KMS key in the destination account.
"""
#!/usr/bin/env python3
import boto3
from akinaka.client.aws_client import AWS_Client
from akinaka.libs import helpers
import logging
helpers.set_logger()
aws_client = AWS_Client()
class BackupRDS():
def __init__(self, region, role_arn):
self.region = region
self.role_arn = role_arn
def backup(self, rds_arns):
"""
1. Create KMS key in source account
1. Share KMS key with destination account (caller — this account)
1.
1. Create snapshot with this KMS key
1. Share KMS key with destination account
"""
print(rds_arns)
"""
Scans services that may contain data, and returns a list with information on any storage found:
FIXME: EFS is currently out of scope, due to it not being treated as a native AWS service.
i.e. There is no way to talk to objects stored in EFS outside of a VPC without peering.
[
{
"rds_arns": [
"rds_arn_a",
"rds_arn_b"
],
"aurora_arns": [
"rds_arn_a",
"rds_arn_b"
],
"s3_arns": [
"s3_arn_a",
"s3_arn_b"
]
}
]
"""
import boto3
from akinaka.client.aws_client import AWS_Client
aws_client = AWS_Client()
class ScanResources():
def __init__(self, region, role_arn):
self.region = region
self.role_arn = role_arn
def scan_all(self):
""" Scan all resource types in scope, and return separate lists for each """
rds_arns = self.scan_rds()
aurora_arns = self.scan_aurora()
s3_arns = self.scan_s3()
all_arns = { **rds_arns, **aurora_arns, **s3_arns }
return all_arns
def scan_rds(self):
""" Return list of ARNs for all RDS objects """
rds_client = aws_client.create_client('rds', self.region, self.role_arn)
response = rds_client.describe_db_instances()['DBInstances']
arns = [db['DBInstanceArn'] for db in response]
return { 'rds_arns': arns }
def scan_aurora(self):
""" Return list of ARNs for all RDS Aurora objects """
rds_client = aws_client.create_client('rds', self.region, self.role_arn)
response = rds_client.describe_db_clusters()['DBClusters']
arns = [db['DBClusterArn'] for db in response]
return { 'aurora_arns': arns }
def scan_s3(self):
""" Return list of ARNs for all S3 buckets """
s3_client = aws_client.create_client('s3', self.region, self.role_arn)
names = [bucket['Name'] for bucket in s3_client.list_buckets()['Buckets']]
arns = [ "arn:aws:s3:::" + name for name in names ]
return { 's3_arns': arns }
......@@ -10,6 +10,7 @@ def main():
from akinaka.reporting.commands import reporting as reporting_commands
from akinaka.container.commands import container as container_commands
from akinaka.k8s.commands import k8s as k8s_commands
from akinaka.backup.commands import backup as backup_commands
@click.group()
@click.option("--log-level", '-l', default="INFO", type=click.Choice(["INFO", "ERROR", "DEBUG"]), help="How much information to show in logging. Default is INFO")
......@@ -23,6 +24,7 @@ def main():
cli.add_command(reporting_commands)
cli.add_command(container_commands)
cli.add_command(k8s_commands)
cli.add_command(backup_commands)
cli()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment