1298 lines
38 KiB
Plaintext
1298 lines
38 KiB
Plaintext
{
|
|
"metadata": {
|
|
"name": "build_ec2_instances_for_django"
|
|
},
|
|
"nbformat": 3,
|
|
"nbformat_minor": 0,
|
|
"worksheets": [
|
|
{
|
|
"cells": [
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"# Goals of this notebook\n",
|
|
"\n",
|
|
"* instantiate a new EC2 instance with which to build a new image\n",
|
|
"* configure the instance -- what's involved? Essentially turn https://github.com/Gluejar/regluit/blob/master/README.md into a fabric script\n",
|
|
"* security group\n",
|
|
"* database\n",
|
|
"* database security group\n",
|
|
"* IAM\n",
|
|
"* elastic IP\n",
|
|
"\n",
|
|
"\n",
|
|
"I'm starting to figure out the pieces using this IPython notebook, but ultimately what am I producing? Something that Eric and Andromeda can run:\n",
|
|
"\n",
|
|
"* a set of fabric commands (https://github.com/Gluejar/regluit/blob/master/fabfile.py)?\n",
|
|
"* some other form?\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"from regluit.sysadmin import aws\n",
|
|
"reload(aws)"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# look up Ubuntu EC2 image ids from alestic.com\n",
|
|
"# us-east-1 Ubuntu 12.04 LTS Precise\n",
|
|
"# EBS boot\tami-e7582d8e\n",
|
|
"\n",
|
|
"#AMI_UBUNTU_12_04_ID = 'ami-79c0ae10' # older one\n",
|
|
"AMI_UBUNTU_12_04_ID = 'ami-e7582d8e'\n",
|
|
"image = aws.ec2.get_all_images(image_ids=[AMI_UBUNTU_12_04_ID])[0]"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# name of image follows Eric Hammond's convention of dating the images\n",
|
|
"\n",
|
|
"image.id, image.name"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# sometimes we have an instance running or created already\n",
|
|
"# so we just need to get a reference to it (instead of creating a new one)\n",
|
|
"\n",
|
|
"instance = aws.instance('new_test')\n",
|
|
"instance, instance.state"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"if instance.state == 'stopped':\n",
|
|
" instance.start()"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# launch a new instance\n",
|
|
"# use default security group for now -- probably want to make a new one\n",
|
|
"\n",
|
|
"INSTANCE_NAME = 'new_test'\n",
|
|
"SECURITY_GROUP_NAME = 'testsg1'\n",
|
|
"\n",
|
|
"(instance, cmd) = aws.launch_instance(ami=AMI_UBUNTU_12_04_ID, \n",
|
|
" instance_type='t1.micro',\n",
|
|
" key_name='rdhyee_public_key',\n",
|
|
" group_name=SECURITY_GROUP_NAME,\n",
|
|
" tag='new_instance',\n",
|
|
" cmd_shell=False)"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"instance.update()"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# add name\n",
|
|
"INSTANCE_NAME = 'new_test'\n",
|
|
"instance.add_tag('Name', INSTANCE_NAME)"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# configure security group testsg1\n",
|
|
"\n",
|
|
"PORTS_TO_OPEN = [80, 443]\n",
|
|
"\n",
|
|
"for port in PORTS_TO_OPEN:\n",
|
|
" aws.ec2.authorize_security_group(group_name=SECURITY_GROUP_NAME, ip_protocol='tcp', from_port=port, to_port=port,\n",
|
|
" cidr_ip='0.0.0.0/0')\n"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"console_output = instance.get_console_output()"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# it takes time for the console output to show not be None -- I don't know exactly how long\n",
|
|
"\n",
|
|
"print console_output.output"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# http://ubuntu-smoser.blogspot.com/2010/07/verify-ssh-keys-on-ec2-instances.html\n",
|
|
"\n",
|
|
"[line for line in console_output.output.split(\"\\n\") if line.startswith(\"ec2\")]"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"instance_id = instance.id\n",
|
|
"instance_id"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"output = !source ~/gj_aws.sh; ec2-get-console-output $instance_id | grep -i ec2"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"output"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# copy a command to ssh into the instance\n",
|
|
"\n",
|
|
"cmdstring = \"ssh -oStrictHostKeyChecking=no ubuntu@{0}\".format(instance.dns_name)\n",
|
|
"# works on a mac\n",
|
|
"! echo \"$cmdstring\" | pbcopy\n",
|
|
"cmdstring"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "heading",
|
|
"level": 1,
|
|
"metadata": {},
|
|
"source": [
|
|
"dynamic execution of fabric tasks to setup the instance"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# http://docs.fabfile.org/en/1.6/usage/execution.html#using-execute-with-dynamically-set-host-lists\n",
|
|
"\n",
|
|
"import fabric\n",
|
|
"from fabric.api import run, local, env, cd, sudo\n",
|
|
"from fabric.operations import get\n",
|
|
"\n",
|
|
"from regluit.sysadmin import aws\n",
|
|
"from StringIO import StringIO\n",
|
|
"\n",
|
|
"import github\n",
|
|
"\n",
|
|
"# uncomment for debugging\n",
|
|
"# github.enable_console_debug_logging()\n",
|
|
"\n",
|
|
"from github import Github\n",
|
|
"\n",
|
|
"from django.conf import settings \n",
|
|
"\n",
|
|
"# allow us to use our ssh config files (e.g., ~/.ssh/config)\n",
|
|
"env.use_ssh_config = True\n",
|
|
"\n",
|
|
"GITHUB_REPO_NAME = \"Gluejar/regluit\"\n",
|
|
"#GITHUB_REPO_NAME = \"rdhyee/working-open-data\"\n",
|
|
"\n",
|
|
"# maybe generate some random pw -- not sure how important it is to generate some complicated PW if we configure \n",
|
|
"# security groups properly\n",
|
|
"MYSQL_ROOT_PW = \"unglueit_pw_123\"\n",
|
|
"\n",
|
|
"\n",
|
|
"# can use 3 different types of authn: https://github.com/jacquev6/PyGithub/issues/15\n",
|
|
"# can be empty, username/pw, or personal API token (https://github.com/blog/1509-personal-api-tokens)\n",
|
|
"g = Github(settings.GITHUB_AUTH_TOKEN)\n",
|
|
"\n",
|
|
"def host_type():\n",
|
|
" run('uname -s')\n",
|
|
" \n",
|
|
"def deploy():\n",
|
|
" sudo(\"aptitude update\")\n",
|
|
" sudo(\"yes | aptitude upgrade\")\n",
|
|
" sudo(\"yes | aptitude install git-core apache libapache2-mod-wsgi mysql-client python-virtualenv python-mysqldb redis-server python-lxml\")\n",
|
|
" sudo(\"yes | aptitude install python-dev\")\n",
|
|
" sudo(\"yes | aptitude install libmysqlclient-dev\")\n",
|
|
" # http://www.whatastruggle.com/postfix-non-interactive-install\n",
|
|
" sudo(\"DEBIAN_FRONTEND='noninteractive' apt-get install -y -q --force-yes postfix\")\n",
|
|
"\n",
|
|
" sudo (\"mkdir /opt/regluit\")\n",
|
|
" sudo (\"chown ubuntu:ubuntu /opt/regluit\")\n",
|
|
"\n",
|
|
" run('git config --global user.name \"Raymond Yee\"')\n",
|
|
" run('git config --global user.email \"rdhyee@gluejar.com\"')\n",
|
|
"\n",
|
|
" run('ssh-keygen -b 2048 -t rsa -f /home/ubuntu/.ssh/id_rsa -P \"\"')\n",
|
|
"\n",
|
|
" # how to get the key and push it to github\n",
|
|
" s = StringIO()\n",
|
|
" get('/home/ubuntu/.ssh/id_rsa.pub', s)\n",
|
|
" repo = g.get_repo(GITHUB_REPO_NAME)\n",
|
|
" key = repo.create_key('test deploy key', s.getvalue()) \n",
|
|
" \n",
|
|
" # http://debuggable.com/posts/disable-strict-host-checking-for-git-clone:49896ff3-0ac0-4263-9703-1eae4834cda3\n",
|
|
" run('echo -e \"Host github.com\\n\\tStrictHostKeyChecking no\\n\" >> ~/.ssh/config')\n",
|
|
" \n",
|
|
" # clone the regluit git repo into /opt/regluit\n",
|
|
" with cd(\"/opt\"):\n",
|
|
" run(\"yes | git clone git@github.com:Gluejar/regluit.git\")\n",
|
|
" \n",
|
|
" # for configuring local mysql server (5.5)\n",
|
|
" # http://stackoverflow.com/a/7740571/7782\n",
|
|
" sudo(\"debconf-set-selections <<< 'mysql-server-5.5 mysql-server/root_password password {0}'\".format(MYSQL_ROOT_PW))\n",
|
|
" sudo(\"debconf-set-selections <<< 'mysql-server-5.5 mysql-server/root_password_again password {0}'\".format(MYSQL_ROOT_PW))\n",
|
|
" sudo(\"apt-get -y install mysql-server\")\n",
|
|
" \n",
|
|
" \n",
|
|
"def test_mysql_connection():\n",
|
|
" # test connectivity to mysql-server\n",
|
|
" command = \"\"\"mysql -h 127.0.0.1 --user=root --password=unglueit_pw_123 <<'EOF'\n",
|
|
"\n",
|
|
"SHOW VARIABLES;\n",
|
|
"EOF\n",
|
|
"\"\"\"\n",
|
|
" run(command) \n",
|
|
" \n",
|
|
"def override_for_gluejar_repo():\n",
|
|
" # https://github.com/Gluejar/gluejar_dot_com/settings/keys\n",
|
|
" from StringIO import StringIO\n",
|
|
" from django.conf import settings \n",
|
|
" \n",
|
|
" \n",
|
|
" GITHUB_REPO_NAME_2 = \"Gluejar/gluejar_dot_com\"\n",
|
|
" \n",
|
|
" from github import Github\n",
|
|
" # can use 3 different types of authn: https://github.com/jacquev6/PyGithub/issues/15\n",
|
|
" # can be empty, username/pw, or personal API token (https://github.com/blog/1509-personal-api-tokens)\n",
|
|
" g = Github(settings.GITHUB_AUTH_TOKEN)\n",
|
|
" \n",
|
|
" s = StringIO()\n",
|
|
" get('/home/ubuntu/.ssh/id_rsa.pub', s)\n",
|
|
" repo = g.get_repo(GITHUB_REPO_NAME_2)\n",
|
|
" key = repo.create_key('test deploy key', s.getvalue()) \n",
|
|
" \n",
|
|
" # clone repo\n",
|
|
" \n",
|
|
" sudo (\"mkdir /opt/gluejar_dot_com\")\n",
|
|
" sudo (\"chown ubuntu:ubuntu /opt/gluejar_dot_com\")\n",
|
|
" # clone the regluit git repo into /opt/regluit\n",
|
|
" with cd(\"/opt\"):\n",
|
|
" run(\"yes | git clone git@github.com:Gluejar/gluejar_dot_com.git\")\n",
|
|
" \n",
|
|
" \n",
|
|
" # create gdc db an user\n",
|
|
" \n",
|
|
" command = \"\"\"mysql -h 127.0.0.1 --user=root --password=unglueit_pw_123 <<'EOF'\n",
|
|
"\n",
|
|
"CREATE DATABASE gdc CHARACTER SET utf8 COLLATE utf8_bin;\n",
|
|
"CREATE USER 'gdc'@'localhost' IDENTIFIED BY 'gdc';\n",
|
|
"\n",
|
|
"FLUSH PRIVILEGES;\n",
|
|
"\n",
|
|
"GRANT ALL PRIVILEGES ON gdc.* TO 'gdc'@'localhost' WITH GRANT OPTION; \n",
|
|
"EOF\n",
|
|
"\"\"\"\n",
|
|
" run(command) \n",
|
|
" \n",
|
|
" \n",
|
|
"def deploy_next():\n",
|
|
" pass\n",
|
|
" \n",
|
|
" \n",
|
|
"#hosts = ['ubuntu@ec2-75-101-232-46.compute-1.amazonaws.com']\n",
|
|
"hosts = [\"ubuntu@{0}\".format(instance.dns_name)]\n",
|
|
"\n",
|
|
"fabric.tasks.execute(deploy_next, hosts=hosts)"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"## Commands to add?\n",
|
|
"\n",
|
|
"By the time we run through a lot of the fabric script, a reboot of the system is required. After installing mysql locally, it seems that the instance needs to be rebooted. Here's some code to do so. Problem remaining is how to reboot, wait for reboot to be completed, and then pick up the next steps.\n",
|
|
"\n",
|
|
"I could issue a fabric command to apply security upgrade: `sudo unattended-upgrade`\n",
|
|
"\n",
|
|
"or \n",
|
|
"\n",
|
|
"I think there is a boto command to restart instance\n",
|
|
"\n",
|
|
" * `sudo unattended-upgrade`\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"rebooted_instance = instance.reboot()\n",
|
|
"rebooted_instance"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# looks like reboot works, but that the instance status remains running throughout time reboot happens...\n",
|
|
"# maybe we wait a specific amount of time and the try to connect "
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "heading",
|
|
"level": 1,
|
|
"metadata": {},
|
|
"source": [
|
|
"hand-installing things for expedient job on gluejar.com"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"* git repo"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "heading",
|
|
"level": 1,
|
|
"metadata": {},
|
|
"source": [
|
|
"EC2 security groups"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"* listing key existing security groups\n",
|
|
"* how to copy parameters\n",
|
|
"\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# security groups\n",
|
|
"\n",
|
|
"\n",
|
|
"security_groups = aws.ec2.get_all_security_groups()\n",
|
|
"security_groups"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# pull out the security group used for unglue.it\n",
|
|
"\n",
|
|
"web_prod_sgroup = [(group.id, group.name, group.description, group.rules) for group in security_groups if group.name=='web-production'][0]"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"web_prod_sgroup"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# http://boto.readthedocs.org/en/latest/security_groups.html\n",
|
|
"rules = web_prod_sgroup[3]\n",
|
|
"[(rule.ip_protocol, rule.from_port, rule.to_port, rule.grants, rule.groups) for rule in rules]"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"[(grant.cidr_ip) for grant in rule.grants]"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"[(grant.owner_id, grant.group_id, grant.name, grant.cidr_ip) for grant in rule.grants]"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# let's make a new security group to replicate the web-production sg\n",
|
|
"\n",
|
|
"test8_sg = aws.ec2.create_security_group('test8', 'test8 sg')"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# You need to pass in either src_group_name OR ip_protocol, from_port, to_port, and cidr_ip.\n",
|
|
"\n",
|
|
"test8_sg.authorize('tcp', 80, 80, '0.0.0.0/0')\n",
|
|
"test8_sg.authorize('tcp', 22, 22, '0.0.0.0/0')\n",
|
|
"test8_sg.authorize('tcp', 443, 443, '0.0.0.0/0')"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"test9_sg = aws.ec2.create_security_group('test9', 'test9 sg')"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"test9_sg.authorize(src_group=test8_sg)"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"test8_sg.rules"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"rules = test9_sg.rules\n",
|
|
"rule = rules[0]\n",
|
|
"grant = rule.grants[0]"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"(rule.ip_protocol, rule.from_port, rule.to_port, rule.grants)"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"grant.owner_id, grant.group_id, grant.name, grant.cidr_ip"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"test9_sg = [(group.id, group.name, group.description, group.rules) for group in security_groups if group.name=='test9'][0]"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"rules = test9_sg[3]\n",
|
|
"[(rule.ip_protocol, rule.from_port, rule.to_port, [(grant.owner_id, grant.group_id, grant.name, grant.cidr_ip) for grant in rule.grants], rule.groups) for rule in rules]"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"aws.ec2.authorize_security_group(group_name='test8', ip_protocol='tcp', from_port=80, to_port=80, cidr_ip='0.0.0.0/0')"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# let's compute the instances that are tied to the various security groups\n",
|
|
"# http://boto.readthedocs.org/en/latest/ref/ec2.html#module-boto.ec2.securitygroup\n",
|
|
"# This calculation is useful for reconstructing the relationships among instances and security groups\n",
|
|
"\n",
|
|
"\n",
|
|
"from boto.ec2 import securitygroup\n",
|
|
"\n",
|
|
"for security_group in aws.ec2.get_all_security_groups():\n",
|
|
" sg = securitygroup.SecurityGroup(name=security_group.name, connection=aws.ec2)\n",
|
|
" print security_group, [inst.id for inst in sg.instances()]\n"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# with the exception of frontend-lb, let's delete the security groups that have no attached instances \n",
|
|
"\n",
|
|
"for sg in [sg for sg in aws.ec2.get_all_security_groups() if len(sg.instances()) == 0 and sg.name != 'frontend-lb']:\n",
|
|
" print sg.name, sg.id, aws.ec2.delete_security_group(group_id=sg.id)"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "heading",
|
|
"level": 1,
|
|
"metadata": {},
|
|
"source": [
|
|
"Setting up MySQL"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"* plain old mysql on the server ( https://help.ubuntu.com/12.04/serverguide/mysql.html )\n",
|
|
"* RDS parameters to figure out\n",
|
|
"\n",
|
|
"to run mysql on server -- if you didn't have to worry about interactivity:\n",
|
|
"\n",
|
|
"> `sudo apt-get install mysql-server`"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"\"ubuntu@{0}\".format(inst.dns_name)"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# once mysql installed, how to test the basic connectivity?\n",
|
|
"\n"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"<pre>\n",
|
|
"sudo debconf-set-selections <<< 'mysql-server-5.5 mysql-server/root_password password unglueit_pw_123'\n",
|
|
"sudo debconf-set-selections <<< 'mysql-server-5.5 mysql-server/root_password_again password unglueit_pw_123'\n",
|
|
"sudo apt-get -y install mysql-server\n",
|
|
"</pre>\n",
|
|
"\n",
|
|
"<pre>\n",
|
|
"mysql -h 127.0.0.1 --user=root --password=unglueit_pw_123 \n",
|
|
"</pre>"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
" mysql -h 127.0.0.1 --user=root --password=unglueit_pw_123 <<'EOF'\n",
|
|
"\n",
|
|
" SHOW DATABASES;\n",
|
|
" EOF\n",
|
|
"\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "heading",
|
|
"level": 1,
|
|
"metadata": {},
|
|
"source": [
|
|
"Creating an Image out of Instance"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"instance.id"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"new_image = aws.ec2.create_image(instance.id, \"script_built_after_local_mysql_2013-05-24\", \n",
|
|
" description=\"next step figure out RDS\")"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# sometimes it really does take a surprisingly long time to make an image out of an instance\n",
|
|
"\n",
|
|
"# new_image = aws.ec2.get_image(image_id=u'ami-853a51ec')\n",
|
|
"new_image"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"new_image = aws.ec2.get_image(new_image)"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"new_image.state"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"Fire up an instance"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"(instance, cmd) = aws.launch_instance(ami=u'ami-853a51ec', \n",
|
|
" instance_type='t1.micro',\n",
|
|
" key_name='rdhyee_public_key',\n",
|
|
" group_name=SECURITY_GROUP_NAME,\n",
|
|
" tag='new_instance',\n",
|
|
" cmd_shell=False) \n"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "heading",
|
|
"level": 1,
|
|
"metadata": {},
|
|
"source": [
|
|
"RDS"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"http://calculator.s3.amazonaws.com/calc5.html can be used to estimate costs\n",
|
|
"\n",
|
|
"A barebones micro rds costs about $20/month\n",
|
|
"\n",
|
|
"References:\n",
|
|
"\n",
|
|
"* [boto rds intro](http://boto.readthedocs.org/en/latest/rds_tut.html)\n",
|
|
"* [boto rds api ref](http://boto.readthedocs.org/en/latest/ref/rds.html)"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"dbs = aws.all_rds()\n",
|
|
"dbs"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"\n",
|
|
"db = dbs[1]\n",
|
|
"(db.id, db.allocated_storage, db.instance_class, db.engine, db.master_username, \n",
|
|
" db.parameter_group, db.security_group, db.availability_zone, db.multi_az)"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# I forgot I already have a working rds db info displayer\n",
|
|
"aws.db_info(db)"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# parameter group\n",
|
|
"# http://boto.readthedocs.org/en/latest/ref/rds.html#module-boto.rds.parametergroup\n",
|
|
"\n",
|
|
"# I think functionality is more primitive"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"pg = aws.rds.get_all_dbparameters('production1')"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"rds = aws.rds\n",
|
|
"\n",
|
|
"def parameter_group_iteritems(group_name):\n",
|
|
"\n",
|
|
" first_page = True\n",
|
|
" get_next_page = True\n",
|
|
" \n",
|
|
" while get_next_page:\n",
|
|
" if first_page:\n",
|
|
" pg = rds.get_all_dbparameters(group_name)\n",
|
|
" first_page = False\n",
|
|
" else:\n",
|
|
" pg = rds.get_all_dbparameters(group_name, marker = pg.Marker)\n",
|
|
" \n",
|
|
" for key in pg.keys():\n",
|
|
" yield (key, pg[key])\n",
|
|
" \n",
|
|
" get_next_page = hasattr(pg, 'Marker')\n"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# try to turn parameter group into a dict to enable reproducibiity of group\n",
|
|
"\n",
|
|
"pg_dict = {}\n",
|
|
"for (key, param) in parameter_group_iteritems('production1'):\n",
|
|
" try:\n",
|
|
" key, {'name':param.name, 'type':param.type, 'description':param.description, 'value':param.value}\n",
|
|
" pg_dict[key] = {'name':param.name, 'type':param.type, 'description':param.description, 'value':param.value}\n",
|
|
" except Exception as e:\n",
|
|
" print key, e\n"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"sorted(pg_dict.keys())"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# https://github.com/boto/boto/blob/2.8.0/boto/rds/parametergroup.py#L71\n",
|
|
"\n",
|
|
"param = pg_dict.get('character_set_database')\n",
|
|
"{'name':param[\"name\"], 'type':param[\"type\"], 'description':param[\"description\"], 'value':param[\"value\"]}"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# security group"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# how to create RDS\n",
|
|
"# db = conn.create_dbinstance(\"db-master-1\", 10, 'db.m1.small', 'root', 'hunter2')"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "heading",
|
|
"level": 1,
|
|
"metadata": {},
|
|
"source": [
|
|
"IAM"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"good to get an automated handle of the IAM groups and users. To use boto to manage IAM, you will need to have AWS keys with sufficient permissions."
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"from regluit.sysadmin import aws\n",
|
|
"iam = aws.boto.connect_iam()\n",
|
|
"\n",
|
|
"\n",
|
|
"IAM_POWER_USER_PERMISSION = \"\"\"{\n",
|
|
" \"Version\": \"2012-10-17\",\n",
|
|
" \"Statement\": [\n",
|
|
" {\n",
|
|
" \"Effect\": \"Allow\",\n",
|
|
" \"NotAction\": \"iam:*\",\n",
|
|
" \"Resource\": \"*\"\n",
|
|
" }\n",
|
|
" ]\n",
|
|
"}\"\"\"\n",
|
|
"\n",
|
|
"\n",
|
|
"# get group names\n",
|
|
"\n",
|
|
"def all_iam_group_names():\n",
|
|
" return [g.group_name for g in iam.get_all_groups()['list_groups_response']['list_groups_result']['groups']]\n",
|
|
"\n",
|
|
"# get user names\n",
|
|
"\n",
|
|
"def all_iam_user_names():\n",
|
|
" return [u.user_name for u in iam.get_all_users()[u'list_users_response'][u'list_users_result']['users']]\n",
|
|
"\n",
|
|
"# mapping between groups and users\n",
|
|
"# list users and their corresponding groups.\n",
|
|
"\n",
|
|
"def iam_group_names_for_user(user_name):\n",
|
|
" return [g.group_name for g in iam.get_groups_for_user(user_name)['list_groups_for_user_response'][u'list_groups_for_user_result']['groups']]\n",
|
|
"\n",
|
|
"# for given groups, list corresponding users\n",
|
|
"\n",
|
|
"def iam_user_names_for_group(group_name):\n",
|
|
" return [u.user_name for u in iam.get_group(group_name=group_name)[u'get_group_response'][u'get_group_result']['users']]\n",
|
|
"\n",
|
|
"# find keys associated with user\n",
|
|
"\n",
|
|
"def access_keys_for_user_name(user_name):\n",
|
|
" keys = iam.get_all_access_keys(user_name=user_name)['list_access_keys_response'][u'list_access_keys_result']['access_key_metadata']\n",
|
|
" return keys\n",
|
|
"\n",
|
|
"# can we use IAM to create new IAM user and get the key / secret?\n",
|
|
"\n",
|
|
"def create_iam_user(user_name, generate_key=True):\n",
|
|
" iam_user = iam.create_user(user_name=user_name)\n",
|
|
" if generate_key:\n",
|
|
" key_output = iam.create_access_key(user_name=user_name)\n",
|
|
" access_key = key_output['create_access_key_response']['create_access_key_result']['access_key']\n",
|
|
" (key, secret) = (access_key['access_key_id'], access_key['secret_access_key'])\n",
|
|
" return (iam_user, key, secret)\n",
|
|
" else:\n",
|
|
" return (iam_user, key, None, None)\n",
|
|
"\n",
|
|
"def delete_iam_user(user_name):\n",
|
|
" \n",
|
|
" # check to see whether there is such a user_name.\n",
|
|
" try:\n",
|
|
" iam.get_user(user_name)\n",
|
|
" except boto.exception.BotoServerError as e:\n",
|
|
" return None\n",
|
|
"\n",
|
|
" # delete associated keys\n",
|
|
" \n",
|
|
" keys = access_keys_for_user_name(user_name)\n",
|
|
" \n",
|
|
" for key in keys:\n",
|
|
" # print key.access_key_id, key.status\n",
|
|
" iam.delete_access_key(access_key_id=key.access_key_id, user_name=user_name)\n",
|
|
" #result = iam.update_access_key(access_key_id=key.access_key_id, user_name=user_name, status='Inactive')\n",
|
|
" \n",
|
|
" # also need to delete associated policies\n",
|
|
" \n",
|
|
" policy_names = iam_policy_names_for_user(user_name)\n",
|
|
" \n",
|
|
" for policy_name in policy_names:\n",
|
|
" iam.delete_user_policy(user_name=user_name,policy_name=policy_name)\n",
|
|
" \n",
|
|
" # once the keys associated with the user are deleted, then proceed to delete the user\n",
|
|
"\n",
|
|
" result = iam.delete_user(user_name)\n",
|
|
" return result\n",
|
|
" \n",
|
|
"# policies\n",
|
|
"\n",
|
|
"def iam_policy_names_for_group(group_name):\n",
|
|
" return iam.get_all_group_policies(group_name=group_name)['list_group_policies_response'][u'list_group_policies_result']['policy_names']\n",
|
|
"\n",
|
|
"def iam_policy_names_for_user(user_name):\n",
|
|
" return iam.get_all_user_policies(user_name=user_name)['list_user_policies_response'][u'list_user_policies_result']['policy_names']\n",
|
|
"\n",
|
|
"def policy_document(policy_name, user_name=None, group_name=None):\n",
|
|
" if group_name is not None:\n",
|
|
" document = iam.get_group_policy(group_name=group_name, policy_name=policy_name)[u'get_group_policy_response'][u'get_group_policy_result'][u'policy_document']\n",
|
|
" return urlparse.parse_qs(\"policy={0}\".format(document))['policy'][0]\n",
|
|
" if user_name is not None:\n",
|
|
" document = iam.get_user_policy(user_name=user_name, policy_name=policy_name)[u'get_user_policy_response'][u'get_user_policy_result'][u'policy_document']\n",
|
|
" return urlparse.parse_qs(\"policy={0}\".format(document))['policy'][0]\n",
|
|
" \n",
|
|
"# get general IAM stats\n",
|
|
"\n",
|
|
"(iam.get_account_summary(), all_iam_group_names(), all_iam_user_names(),\n",
|
|
" iam_group_names_for_user('eric'), iam_user_names_for_group('gluejar'),\n",
|
|
" access_keys_for_user_name('ry-dev')\n",
|
|
" )\n",
|
|
"\n"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# test -> grab all groups and list of corresponding users\n",
|
|
"\n",
|
|
"for g in all_iam_group_names():\n",
|
|
" print g, iam_user_names_for_group(g)"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# list all keys by looping through users\n",
|
|
"\n",
|
|
"for u in all_iam_user_names():\n",
|
|
" print u, [(k.access_key_id, k.status) for k in access_keys_for_user_name(u)]"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# look at permission structures of groups and users\n",
|
|
"\n",
|
|
"from urllib import urlencode\n",
|
|
"import urlparse\n",
|
|
"\n",
|
|
"policy_names = iam_policy_names_for_group('gluejar')\n",
|
|
"\n",
|
|
"for p in policy_names:\n",
|
|
" print policy_document(group_name='gluejar', policy_name=p)\n",
|
|
" \n"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"iam_user, key, secret = create_iam_user('ry-dev-3', True)\n",
|
|
"iam.put_user_policy( user_name='ry-dev-3', policy_name='power_user_2013-06-12', policy_json=IAM_POWER_USER_PERMISSION)"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"# write out a shell script for configuring the environment with the keys for AWS\n",
|
|
"\n",
|
|
"print \"\"\"#!/bin/bash\n",
|
|
"\n",
|
|
"\n",
|
|
"export AWS_ACCESS_KEY_ID={AWS_ACCESS_KEY_ID}\n",
|
|
"export AWS_SECRET_ACCESS_KEY={AWS_SECRET_ACCESS_KEY}\n",
|
|
"\n",
|
|
"# EC2 API tools\n",
|
|
"export EC2_ACCESS_KEY=$AWS_ACCESS_KEY_ID\n",
|
|
"export EC2_SECRET_KEY=$AWS_SECRET_ACCESS_KEY\"\"\".format(**{'AWS_SECRET_ACCESS_KEY':secret, 'AWS_ACCESS_KEY_ID':key})"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"[policy_document(p, user_name='ry-dev-3') for p in iam_policy_names_for_user('ry-dev-3')]"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"delete_iam_user(user_name='ry-dev-3')"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"# Different ways to pass in AWS keys to boto\n",
|
|
"\n",
|
|
"http://boto.readthedocs.org/en/latest/boto_config_tut.html\n",
|
|
"\n",
|
|
"* Credentials passed into Connection class constructor.\n",
|
|
"* Credentials specified by environment variables\n",
|
|
"* Credentials specified as options in the config file.\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"<pre>\n",
|
|
"\n",
|
|
"#!/bin/bash\n",
|
|
"\n",
|
|
"export AWS_ACCESS_KEY={AWS_ACCESS_KEY}\n",
|
|
"export AWS_SECRET_KEY={AWS_SECRET_KEY}\n",
|
|
"\n",
|
|
"# EC2 API tools\n",
|
|
"export EC2_ACCESS_KEY=$AWS_ACCESS_KEY\n",
|
|
"export EC2_SECRET_KEY=$AWS_SECRET_KEY\n",
|
|
"\n",
|
|
"</pre>\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"%%bash\n",
|
|
"# something to convert this notebook to Python source\n",
|
|
"cd /Users/raymondyee/D/Document/Gluejar/Gluejar.github/regluit; python ~/C/src/nbconvert/nbconvert.py python build_ec2_instances_for_django.ipynb"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"import fabric\n",
|
|
"from fabric.api import run, local, env, cd, sudo\n",
|
|
"from fabric.operations import get\n",
|
|
"\n",
|
|
"\n",
|
|
"def run_on_ry_dev():\n",
|
|
"\n",
|
|
" run(\"ls -lt\") \n",
|
|
" \n",
|
|
"hosts = [\"ubuntu@ry-dev.unglue.it\"]\n",
|
|
"\n",
|
|
"fabric.tasks.execute(run_on_ry_dev, hosts=hosts)"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"collapsed": false,
|
|
"input": [
|
|
"instance.state"
|
|
],
|
|
"language": "python",
|
|
"metadata": {},
|
|
"outputs": []
|
|
}
|
|
],
|
|
"metadata": {}
|
|
}
|
|
]
|
|
} |