Created
February 6, 2018 22:13
-
-
Save AndrewFarley/8d9b9070f6303ea03d877abecd51d07f to your computer and use it in GitHub Desktop.
Prepare a terraform directory for remote state properly in a way that is re-usable and shareable between developers, and is usable multiple times in the same region (via a different env name) and different times on different azs (because of the bucket/key naming scheme)
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
''' | |
This simple helper creates a S3 bucket for remote state usage, and | |
then creates a .tf file with the remote state information. This | |
is great for when team/pair developing on the same environment and | |
helps allow a stack to be able to be used on multiple accounts, or | |
many times on the same account (depending on the uniqueness of the | |
region, stack name, or env name) | |
Written by Farley <[email protected]> <[email protected] | |
NOTE: The remote_state.tf.template file must be in the current | |
folder when you execute this script. That file should have | |
exactly the following contents... | |
terraform { | |
backend "s3" { | |
bucket = "REPLACE_BUCKET_NAME_HERE | |
key = "REPLACE_KEY_HERE | |
region = "REPLACE_BUCKET_REGION_HERE | |
} | |
} | |
''' | |
############## | |
# Libraries | |
############## | |
import boto3 | |
import sys | |
import os | |
from optparse import OptionParser | |
from colorama import init | |
from colorama import Fore, Back, Style | |
init() | |
############## | |
# Cli args | |
############## | |
usage = "usage: %prog -e env-name" | |
parser = OptionParser(usage=usage) | |
parser.add_option("-r", "--region", | |
dest="region", | |
default="eu-west-1", | |
help="What AWS region we're deploying this stack to. Default: eu-west-1", | |
metavar="region-name") | |
parser.add_option("-e", "--env-type", | |
dest="env_type", | |
default="", | |
help="What is the env name of this stack, eg: dev/prod/stage. Default: -none-", | |
metavar="environment-name") | |
parser.add_option("-s", "--stack-name", | |
dest="stack_name", | |
default="", | |
help="What is the env name of this stack. Eg: gitlab, nilobot, production, farleytest. Default: -CurrentFolderName-", | |
metavar="stack-name") | |
parser.add_option("-b", "--bucket-suffix", | |
dest="bucket_prefix", | |
default="terraform-deploy-fragments", | |
help="The prefix of the bucket name that we will use/created. Default: deployment-fragments. Do NOT specify this unless you know what you're doing", | |
metavar="bucket-name") | |
parser.add_option("-a", "--account-id", | |
dest="account_id", | |
default="", | |
help="The AWS Account ID we will be working with. WARNING: Do NOT specify this unless you know what you're doing, this script will auto-detect your account id.", | |
metavar="aws-account-id") | |
parser.add_option("-f", "--force", | |
dest="force", | |
action="store_true", | |
default=False, | |
help="If we want to force this script to continue past CLI prompts automatically. This is intended for use in CI/CD-type automation.") | |
(options, args) = parser.parse_args() | |
############## | |
# Input validation | |
############## | |
if not os.path.isfile('remote_state.tf.template'): | |
print(Fore.RED + Style.BRIGHT + "ERROR: This script was used outside a folder with a remote state template file" + Style.RESET_ALL) | |
exit(1) | |
# Simple input validation | |
if options.region == "": | |
print(Fore.RED + Style.BRIGHT + "ERROR: You MUST specify a valid region with with -r" + Style.RESET_ALL) | |
parser.print_usage() | |
exit(1) | |
elif options.bucket_prefix == "": | |
print(Fore.RED + Style.BRIGHT + "ERROR: You MUST specify a valid bucket suffix with with -b" + Style.RESET_ALL) | |
parser.print_usage() | |
exit(1) | |
################ | |
# Helpers | |
################ | |
# Simple yes/no ask-er | |
def query_yes_no(question, default="yes"): | |
valid = {"yes": True, "y": True, "ye": True, | |
"no": False, "n": False} | |
if default is None: | |
prompt = " [y/n] " | |
elif default == "yes": | |
prompt = " [Y/n] " | |
elif default == "no": | |
prompt = " [y/N] " | |
else: | |
raise ValueError("invalid default answer: '%s'" % default) | |
while True: | |
sys.stdout.write(question + prompt) | |
choice = raw_input().lower() | |
if default is not None and choice == '': | |
return valid[default] | |
elif choice in valid: | |
return valid[choice] | |
else: | |
sys.stdout.write("Please respond with 'yes' or 'no' " | |
"(or 'y' or 'n').\n") | |
# Get this AWS Account ID from the API | |
def get_account_id(region): | |
# TODO: Try a few other common API calls (describe instances? or S3 calls) that can give us the account ID as a fallback. | |
# However, use security groups first, as every region automatically has a "default" security group which we can use on a fresh account | |
ec2 = boto3.client('ec2', region_name=region) | |
try: | |
result = ec2.describe_security_groups( | |
GroupNames=[ | |
'Default', | |
] | |
) | |
output = result['SecurityGroups'][0]['OwnerId'] | |
aws_account_id_cache = output | |
return output | |
except: | |
e = sys.exc_info()[0] | |
print("%r" % e) | |
raise Exception(Fore.RED + Style.BRIGHT + "Unable to get AWS Account ID automatically.\n Are you sure you have an AWS CLI Profile chosen and valid credentials setup? (hint: aws configure)" + Style.RESET_ALL) | |
# Create or confirm the S3 bucket is owned | |
# TODO: Please someone make this bucket we create automatically have Versioning enabled | |
def create_or_confirm_owned_s3_bucket(bucket_name, region, force=False): | |
s3 = boto3.client("s3", region_name=region) | |
try: | |
s3.head_bucket( | |
Bucket=bucket_name, | |
) | |
except: | |
print("Bucket is not created, creating...") | |
if not force: | |
print(Fore.RED + Style.BRIGHT + 'Warning: The deployment fragments bucket does not exist.') | |
print('This region/account must not have any terraform deployments on it.') | |
print('If this is not true, cancel and try another combination of region/account_id' + Style.RESET_ALL) | |
if not query_yes_no("Create deployment fragments bucket...?"): | |
exit(1) | |
try: | |
# TODO: Validate regex for bucket name [a-z0-9][a-z0-9-.]* | |
result = s3.create_bucket(Bucket=bucket_name, | |
CreateBucketConfiguration={ | |
'LocationConstraint': region | |
}) | |
if result['ResponseMetadata']['HTTPStatusCode'] != 200: | |
raise Exception(result) | |
# Waiting to ensure bucket's full creation before continuing... | |
time.sleep(2) | |
except Exception as e: | |
print(Fore.RED + Style.BRIGHT + "Unable to create bucket because...") | |
raise e | |
print("Enabling bucket versioning (which should ALWAYS be on for a terraform state bucket)...") | |
response = s3.put_bucket_versioning(Bucket=bucket_name, | |
VersioningConfiguration={'Status': 'Enabled'}) | |
################ | |
# Main Logic | |
################ | |
# Check if existing remote_state.tf already exists (aka this is already setup...) | |
if os.path.isfile('remote_state.tf'): | |
print(Fore.RED + Style.BRIGHT + "Warning: remote_state.tf already exists, this script was probably already run." + Style.RESET_ALL) | |
if not options.force: | |
if not query_yes_no("Would you like to override/replace this file?"): | |
exit(1) | |
print("ALERT: Removed remote_state.tf") | |
os.remove('remote_state.tf') | |
# If no stack name specified, assume it is the name of the current folder | |
print('==================================================================') | |
if options.stack_name == "": | |
ignoreme, options.stack_name = os.path.split(os.getcwd()) # Note: intentionally not using realpath, so we can symlink this prepare script | |
print(Style.BRIGHT + "Automatic Stack Name:" + Style.RESET_ALL + " {}".format(options.stack_name)) | |
else: | |
print(Style.BRIGHT + "Manual Stack Name:" + Style.RESET_ALL + " {}".format(options.stack_name)) | |
# Get/use our account id | |
if options.account_id == "": | |
options.account_id = get_account_id(options.region) | |
print(Style.BRIGHT + "Automatic AWS Account ID:" + Style.RESET_ALL + " {}".format(options.account_id)) | |
else: | |
print(Style.BRIGHT + "Manual AWS Account ID:" + Style.RESET_ALL + " {}".format(options.account_id)) | |
# Figure out our bucket name based on our account id, region, and bucket suffix | |
bucket_name = "{}-{}-{}".format(options.bucket_prefix, options.account_id, options.region) | |
print(Style.BRIGHT + "Using Bucket:" + Style.RESET_ALL + " {}".format(bucket_name)) | |
print(Style.BRIGHT + "Using Region:" + Style.RESET_ALL + " {}".format(options.region)) | |
print('==================================================================') | |
print("Creating bucket if necessary...") | |
create_or_confirm_owned_s3_bucket(bucket_name, options.region, options.force) | |
# This is the name of the .json file that will be created. If an env name | |
keyname = "{}{}".format(options.stack_name, "-{}".format(options.env_type) if options.env_type != "" else "") | |
print("Creating remote_state.tf file...") | |
with open("remote_state.tf.template", "rt") as fin: | |
with open("remote_state.tf", "wt") as fout: | |
for line in fin: | |
if 'REPLACE_BUCKET_NAME_HERE' in line: | |
fout.write(line.replace('REPLACE_BUCKET_NAME_HERE', bucket_name + '"')) | |
elif 'REPLACE_KEY_HERE' in line: | |
fout.write(line.replace('REPLACE_KEY_HERE', keyname + '"')) | |
else: | |
fout.write(line.replace('REPLACE_BUCKET_REGION_HERE', options.region + '"')) | |
print("Created remote_state.tf: ") | |
print('') | |
f = open('remote_state.tf', 'r') | |
print f.read() | |
f.close() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment