Copy RDS snapshot to another account

parent 5bc27f64
......@@ -6,28 +6,24 @@ import logging
import pprint
import boto3
helpers.set_logger()
@click.group()
@click.option("--region", required=True, help="Region your resources are located in")
@click.option("--role-arn", required=True, help="ARN of a role the account to back up _to_, which can be assumed with STS")
@click.option("--live-account", required=True, help="Account ID of the account with data to backup")
@click.option("--backup-account", required=False, help="Account ID of the account to make the backups to")
@click.option("--source-role-arn", required=True, help="ARN of a role the account to back up _from_")
@click.option("--destination-role-arn", required=True, help="ARN of an assumable role in the account to back up _to_")
@click.option("--dry-run", is_flag=True, help="Don't back anything up, just list would be backed up")
@click.pass_context
def backup(ctx, region, role_arn, live_account, backup_account, dry_run):
def backup(ctx, region, source_role_arn, destination_role_arn, dry_run):
"""
Backup subcommand. Does nothing by itself except pass the global options through to it's
subcommands via ctx
"""
if backup_account == None:
sts_client = boto3.client('sts')
backup_account = sts_client.get_caller_identity()['Account']
ctx.obj = {
'region': region,
'role_arn': role_arn,
'live_account': live_account,
'backup_account': backup_account,
'source_role_arn': source_role_arn,
'destination_role_arn': destination_role_arn,
'dry_run': dry_run,
'log_level': ctx.obj.get('log_level')
}
......@@ -36,86 +32,37 @@ def backup(ctx, region, role_arn, live_account, backup_account, dry_run):
@backup.command()
@click.pass_context
def backup_all(ctx):
""" Backup all data in any found instances of RDS, Aurora, and S3 """
region = ctx.obj.get('region')
role_arn = ctx.obj.get('role_arn')
live_account = ctx.obj.get('live_account')
dry_run = ctx.obj.get('dry_run')
scanner = scan_resources_storage.ScanResources(region, role_arn)
scanner.scan_all()
if dry_run:
exit(0)
# from .backup_all import backup_backup_all
# backup_all = backup_backup_all.backup_all(region=region, role_arn=role_arn)
@backup.command()
@click.pass_context
def aurora(ctx):
""" Backup all aurora clusters found """
region = ctx.obj.get('region')
role_arn = ctx.obj.get('role_arn')
live_account = ctx.obj.get('live_account')
dry_run = ctx.obj.get('dry_run')
scanner = scan_resources_storage.ScanResources(region, role_arn)
scanner.scan_aurora()
if dry_run:
exit(0)
# from .aurora import backup_aurora
# aurora = backup_aurora.aurora(region=region, role_arn=role_arn)
@backup.command()
@click.pass_context
def rds(ctx):
""" Backup all RDS instances found """
@click.option("--take-snapshot", is_flag=True, help="TODO: Boolean, default false. Take a live snapshot now, or take the existing latest snapshot")
@click.option("--db-arns", required=False, help="Comma separated list of either DB names or ARNs to transfer")
def rds(ctx, take_snapshot, db_arns):
"""
Backup all RDS instances found if --db-arns is omitted, else look for the latest
snapshots for those DB names given and transfer them to the destination account
"""
region = ctx.obj.get('region')
role_arn = ctx.obj.get('role_arn')
live_account = ctx.obj.get('live_account')
source_role_arn = ctx.obj.get('source_role_arn')
destination_role_arn = ctx.obj.get('destination_role_arn')
dry_run = ctx.obj.get('dry_run')
backup_account = ctx.obj.get('backup_account')
scanner = scan_resources_storage.ScanResources(region, role_arn)
rds_arns = scanner.scan_rds()
if db_arns:
db_arns = [db_arns.replace(' ','')]
else:
scanner = scan_resources_storage.ScanResources(region, source_role_arn)
db_arns = db_arns or scanner.scan_rds()['rds_arns']
print("Will attempt to backup the following RDS instances, unless this is a dry run:")
pprint.pprint(rds_arns)
logging.info("Will attempt to backup the following RDS instances, unless this is a dry run:")
logging.info(db_arns)
if dry_run:
exit(0)
from .rds import backup_rds
rds = backup_rds.BackupRDS(
from .rds import transfer_snapshot
rds = transfer_snapshot.TransferSnapshot(
region=region,
assumable_role_arn=role_arn,
live_account=live_account,
backup_account=backup_account
source_role_arn=source_role_arn,
destination_role_arn=destination_role_arn
)
rds.backup(rds_arns=rds_arns)
@backup.command()
@click.pass_context
def s3(ctx):
""" Backup all s3 buckets found """
region = ctx.obj.get('region')
role_arn = ctx.obj.get('role_arn')
live_account = ctx.obj.get('live_account')
dry_run = ctx.obj.get('dry_run')
scanner = scan_resources_storage.ScanResources(region, role_arn)
scanner.scan_s3()
if dry_run:
exit(0)
# from .s3 import backup_s3
# s3 = backup_s3.s3(region=region, role_arn=role_arn)
shared_kms_key = rds.get_shared_kms_key()
rds.transfer_snapshot(take_snapshot=take_snapshot, db_arns=db_arns, source_kms_key=shared_kms_key)
"""
TODO:
Encrypted manual snapshots that don't use the default RDS encryption key can be shared, but you must
first share the KMS key with the account that you want to share the snapshot with. To share the key
with another account, share the IAM policy with the primary and secondary accounts. Shared encrypted
snapshots can't be restored directly from the destination account. First, copy the snapshot to the
destination account by using a KMS key in the destination account.
"""
#!/usr/bin/env python3
import boto3
from akinaka.client.aws_client import AWS_Client
from akinaka.libs import helpers, kms_share
import logging
helpers.set_logger()
aws_client = AWS_Client()
class BackupRDS():
def __init__(self, region, assumable_role_arn, live_account, backup_account):
self.region = region
self.assumable_role_arn = assumable_role_arn
self.live_account = live_account
self.backup_account = backup_account
def backup(self, rds_arns):
kms_sharer = kms_share.KMSShare(
region = self.region,
assumable_role_arn = self.assumable_role_arn,
share_from_account = self.live_account,
share_to_account = self.backup_account
)
shared_key = kms_sharer.get_kms_key(self.live_account)
"""
Sharing snapshots between AWS accounts involves:
1. Creating a key to share between those two accounts, and sharing it
2. (Re)encrypting a snapshot from the live account with the shared key
3. Creating a key on the destination account
4. Copying and re-encrypting the copy to the destination account with that key
This module has all the methods needed to do that, and uses them in the entrypoint
method; transfer_snapshot()
"""
#!/usr/bin/env python3
from datetime import datetime
from operator import itemgetter
from akinaka.client.aws_client import AWS_Client
from akinaka.libs import helpers, kms_share
import logging
import time
helpers.set_logger()
aws_client = AWS_Client()
class TransferSnapshot():
def __init__(
self,
region,
source_role_arn,
destination_role_arn
):
self.region = region
self.source_role_arn = source_role_arn
self.destination_role_arn = destination_role_arn
source_sts_client = aws_client.create_client('sts', self.region, self.source_role_arn)
self.source_account = source_sts_client.get_caller_identity()['Account']
destination_sts_client = aws_client.create_client('sts', self.region, self.destination_role_arn)
self.destination_account = destination_sts_client.get_caller_identity()['Account']
def get_shared_kms_key(self):
"""
Create and return shared KMS account between [self.source_account] and [self.destination_account]
"""
kms_sharer = kms_share.KMSShare(
region = self.region,
assumable_role_arn = self.source_role_arn,
share_from_account = self.source_account,
share_to_account = self.destination_account
)
return kms_sharer.get_kms_key(self.source_account)
def create_local_kms_key(self):
"""
Search for a key name that should exists if this has been run before. If not found,
create it. In both cases, return the key.
"""
destination_kms_client = aws_client.create_client('kms', self.region, self.destination_role_arn)
key_alias = "alias/{}".format(self.source_account)
try:
kms_key = destination_kms_client.describe_key(KeyId=key_alias)
logging.info("Found key: {}".format(kms_key['KeyMetadata']['Arn']))
except destination_kms_client.exceptions.NotFoundException:
kms_key = destination_kms_client.create_key()
logging.info("No existing key found, so we created one: {}".format(kms_key['KeyMetadata']['Arn']))
destination_kms_client.create_alias(
AliasName=key_alias,
TargetKeyId=kms_key['KeyMetadata']['Arn']
)
return kms_key
def transfer_snapshot(self, take_snapshot, db_arns, source_kms_key):
"""
For every DB in [db_arns], call methods to:
1. Either take a new snapshot (TODO), or use the latest automatically created one
2. Recrypt the snapshot with [source_kms_key]. This key must be shared between accounts
3. Share it with self.destination_account
4. Copy it to self.destination_account with the [destination_kms_key]
"""
for arn in db_arns:
if take_snapshot:
source_snapshot = self.take_snapshot(arn, source_kms_key)
else:
source_snapshot = self.get_latest_snapshot(arn)
source_rds_client = aws_client.create_client('rds', self.region, self.source_role_arn)
recrypted_snapshot = self.recrypt_snapshot(source_rds_client, source_snapshot, source_kms_key)
self.share_snapshot(recrypted_snapshot, self.destination_account)
destination_rds_client = aws_client.create_client('rds', self.region, self.destination_role_arn)
self.recrypt_snapshot(destination_rds_client, recrypted_snapshot, self.create_local_kms_key())
def get_latest_snapshot(self, db_arn):
"""
Return the latest snapshot for [db_arn], where the ARN can also be the name of the DB
Note: You can only use the db_arn if you are in the account with the DB in it, else you
must use the DB name
"""
source_rds_client = aws_client.create_client('rds', self.region, self.source_role_arn)
snapshots = source_rds_client.describe_db_snapshots(DBInstanceIdentifier=db_arn)['DBSnapshots']
try:
latest = sorted(snapshots, key=itemgetter('SnapshotCreateTime'))[0]
except KeyError:
logging.error("Couldn't get the latest snapshot, probably because it's still being made")
logging.info("Found automatic snapshot {}".format(latest['DBSnapshotIdentifier']))
return latest
def make_snapshot_name(self, db_name):
date = datetime.utcnow().strftime('%Y%m%d-%H%M')
return "{}-{}-{}".format(db_name, date, self.destination_account)
def recrypt_snapshot(self, rds_client, snapshot, kms_key, tags=None):
"""
Recrypt a snapshot [snapshot] with the KMS key [kms_key]. Return the recrypted snapshot.
"""
new_snapshot_id = self.make_snapshot_name(snapshot['DBInstanceIdentifier'])
try:
recrypted_snapshot = rds_client.copy_db_snapshot(
SourceDBSnapshotIdentifier=snapshot['DBSnapshotArn'],
TargetDBSnapshotIdentifier=new_snapshot_id,
KmsKeyId=kms_key['KeyMetadata']['Arn'],
Tags=[ { 'Key': 'akinaka-made', 'Value': 'true' }, ] # FIXME: Add custom tags
)
self.wait_for_snapshot(recrypted_snapshot['DBSnapshot'], rds_client)
logging.info("Recrypted snapshot {} with key {}".format(
recrypted_snapshot['DBSnapshot']['DBSnapshotIdentifier'],
kms_key['KeyMetadata']['Arn']
))
return recrypted_snapshot['DBSnapshot']
except rds_client.exceptions.DBSnapshotAlreadyExistsFault:
snapshots = rds_client.describe_db_snapshots(DBSnapshotIdentifier=new_snapshot_id)
logging.info("Found existing snapshot {}".format(snapshots['DBSnapshots'][0]['DBSnapshotIdentifier']))
return snapshots['DBSnapshots'][0]
def share_snapshot(self, snapshot, destination_account):
"""
Share [snapshot] with [destination_account]
"""
source_rds_client = aws_client.create_client('rds', self.region, self.source_role_arn)
source_rds_client.modify_db_snapshot_attribute(
DBSnapshotIdentifier=snapshot['DBSnapshotIdentifier'],
AttributeName='restore',
ValuesToAdd=[destination_account]
)
logging.info("Recrypted snapshot {} has been shared with account {}".format(snapshot['DBSnapshotIdentifier'], destination_account))
def wait_for_snapshot(self, snapshot, rds_client):
"""
Check if [snapshot] is ready by querying it every 10 seconds
"""
while True:
snapshotcheck = rds_client.describe_db_snapshots(
DBSnapshotIdentifier=snapshot['DBSnapshotIdentifier']
)['DBSnapshots'][0]
if snapshotcheck['Status'] == 'available':
logging.info("Snapshot {} complete and available!".format(snapshot['DBSnapshotIdentifier']))
break
else:
logging.info("Snapshot {} in progress, {}% complete".format(snapshot['DBSnapshotIdentifier'], snapshotcheck['PercentProgress']))
time.sleep(10)
def take_snapshot(self, db_name, source_kms_key):
"""
TODO: Take a new snapshot of [db_name] using [source_kms_key]. If we're here, we don't need to
recrypt, since we already have a shared key to begin with. Some of the logic in
transfer_snapshot() will need to be changed to accommodate this once ready
"""
return "TODO"
......@@ -19,7 +19,7 @@ class AWS_Client():
credentials = sts_client.assume_role(
RoleArn=role_arn,
RoleSessionName="gitlab-{}".format(strftime("%Y%m%d%H%M%S", gmtime())),
RoleSessionName="akinaka-{}".format(strftime("%Y%m%d%H%M%S", gmtime())),
DurationSeconds=valid_for or 900
)
......
......@@ -39,7 +39,7 @@ class KMSShare():
live_kms_client = aws_client.create_client('kms', self.region, self.assumable_role_arn)
key_alias = 'alias/RDSBackupRestoreSharedKeyWith{}'.format(self.share_to_account)
logging.info("Searching for Customer Managed KMS Key with alias {} that is already shared with account {}...".format(key_alias, self.share_to_account))
logging.info("Searching for Customer Managed KMS Key with alias {} that is already shared with account {}".format(key_alias, self.share_to_account))
try:
key = live_kms_client.describe_key(KeyId=key_alias)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment