def clone_workers_repo(logfile=None): """ Clone workers repository """ if logfile is None: logfile = open(os.devnull, 'w') repo = 'osm-stats-workers' print '%s: Fetching latest %s repository' % (timestamp(), repo) repo_url = 'https://github.com/AmericanRedCross/%s.git' % repo if os.path.exists(repo): shutil.rmtree(repo) subprocess.call(['git', 'clone', '-bgh-pages', repo_url], stdout=logfile, stderr=logfile) return repo
def redeploy_to_ec2(name, host_string, logfile=None): """ Deploy latest docker to EC2 """ if logfile is None: logfile = open(os.devnull, 'w') try: print '%s: Redeploying to EC2' % timestamp() sys.stdout = logfile with settings(host_string=host_string, key_filename=name + '.pem', connection_attempts=3): fabfile.copy_files() fabfile.deploy() finally: sys.stdout.close() sys.stdout = sys.__stdout__
def migrate_database(repo, logfile=None, seed=True): """ Ugly way of doing database migration """ if logfile is None: logfile = open(os.devnull, 'w') # db migration - this is ugly way of doing this print '%s: Migrating database' % timestamp() # unzip migration files zfile = '%s/%s.zip' % (repo, repo) subprocess.call(['unzip', '-o', zfile, '-d', repo], stdout=logfile) os.chdir('osm-stats-workers/src/db/migrations') subprocess.call(['knex', 'migrate:latest'], stdout=logfile) if seed: subprocess.call(['knex', 'seed:run'], stdout=logfile) os.chdir('../../../../')
def deploy_to_ec2(name, host_string, logfile=None): """ Deploy latest docker to EC2 """ if logfile is None: logfile = open(os.devnull, 'w') try: print '%s: Deploying to EC2' % timestamp() sys.stdout = logfile with settings(host_string=host_string, key_filename=name + '.pem', connection_attempts=3): fabfile.setup_host(name) fabfile.copy_files() # hack to use new session so user guaranteed part of docker group subprocess.call(['fab', 'deploy', '-i%s.pem' % name, '-H %s' % host_string], stdout=logfile) finally: sys.stdout.close() sys.stdout = sys.__stdout__
def deploy_to_ec2(name, host_string, logfile=None): """ Deploy latest docker to EC2 """ if logfile is None: logfile = open(os.devnull, 'w') try: print '%s: Deploying to EC2' % timestamp() sys.stdout = logfile with settings(host_string=host_string, key_filename=name + '.pem', connection_attempts=3): fabfile.setup_host(name) fabfile.copy_files() # hack to use new session so user guaranteed part of docker group subprocess.call( ['fab', 'deploy', '-i%s.pem' % name, '-H %s' % host_string], stdout=logfile) finally: sys.stdout.close() sys.stdout = sys.__stdout__
parser.add_argument('--name', help='Base name for all AWS resources', default='osmstats') parser.add_argument('--lsize', help='Size (MB) of Lambda function', default=512) parser.add_argument('--ltimeout', help='Timeout (seconds) of Lambda function', default=300) parser.add_argument('--dbclass', help='The Amazon instance class for the RDS database', default='db.t2.medium') parser.add_argument('--ec2class', help='The Amazon instance class for the EC2', default='t2.medium') parser.add_argument('--password', help='The password to use for database', required=True) # default='t3sting9huy') parser = subparser.add_parser('update', help='Update OSM Stats with latest code') parser.add_argument('--name', help='Base name of deployment', default='osmstats') args = parser0.parse_args() logfile = open('%s.log' % args.name, 'w') if args.command == 'deploy': print '%s: Starting deployment of %s' % (timestamp(), args.name) repo = clone_workers_repo(logfile) # create stream and RDS database stream = create_stream(args.name) db = create_database(args.name, args.password, dbclass=args.dbclass) os.environ['DATABASE_URL'] = db['URL'] migrate_database(repo, logfile) # set up environment variables session = boto3._get_default_session()._session env = [ 'DEPLOY_NAME=%s' % args.name, 'KINESIS_STREAM=%s' % args.name, 'DATABASE_URL=%s' % db['URL'], 'AWS_REGION=%s' % session.get_config_variable('region'), 'AWS_ACCESS_KEY_ID=%s' % session.get_credentials().access_key,
parser.add_argument('--password', help='The password to use for database', required=True) # default='t3sting9huy') parser = subparser.add_parser('update', help='Update OSM Stats with latest code') parser.add_argument('--name', help='Base name of deployment', default='osmstats') args = parser0.parse_args() logfile = open('%s.log' % args.name, 'w') if args.command == 'deploy': print '%s: Starting deployment of %s' % (timestamp(), args.name) repo = clone_workers_repo(logfile) # create stream and RDS database stream = create_stream(args.name) db = create_database(args.name, args.password, dbclass=args.dbclass) os.environ['DATABASE_URL'] = db['URL'] migrate_database(repo, logfile) # set up environment variables session = boto3._get_default_session()._session env = [ 'DEPLOY_NAME=%s' % args.name, 'KINESIS_STREAM=%s' % args.name, 'DATABASE_URL=%s' % db['URL'], 'AWS_REGION=%s' % session.get_config_variable('region'), 'AWS_ACCESS_KEY_ID=%s' % session.get_credentials().access_key,