def on_insert_code(items): """ Deploy code onto servers as the items are created. If a new code item 'is_current', PATCH 'is_current' code with the same name and type to no longer be current. :param items: List of dicts for items to be created. """ log.debug('code | Insert | items - %s', items) for item in items: log.debug('code | POST | On Insert callback | %s', item) # Check to see if we have a current profile and core. code_query = 'where={{"meta.name":"{0}","meta.version":"{1}","meta.code_type":"{2}"}}'.format(item['meta']['name'], item['meta']['version'], item['meta']['code_type']) code = utilities.get_eve('code', code_query) log.debug('code | POST | On Insert callback | Code query result | %s', code) if not code['_meta']['total'] == 0: log.error('code | POST | On Insert callback | %s named %s-%s already exists', item['meta']['code_type'], item['meta']['name'], item['meta']['version']) abort(409, 'Error: A {0} named {1}-{2} already exists.'.format(item['meta']['code_type'], item['meta']['name'], item['meta']['version'])) if item.get('meta') and item['meta'].get('is_current') and item['meta']['is_current'] is True: query = 'where={{"meta.name":"{0}","meta.code_type":"{1}","meta.is_current": true}}'.format(item['meta']['name'], item['meta']['code_type']) code_get = utilities.get_eve('code', query) log.debug('code | Insert | current code - %s', code_get) if code_get['_meta']['total'] != 0: for code in code_get['_items']: request_payload = {'meta.is_current': False} utilities.patch_eve('code', code['_id'], request_payload) log.debug('code | Insert | Ready to deploy item - %s', item) tasks.code_deploy.delay(item)
def pre_delete_code_callback(request, lookup): """ Make sure no sites are using the code. :param request: flask.request object :param lookup: """ code = utilities.get_single_eve('code', lookup['_id']) app.logger.debug(code) if code['meta']['code_type'] in ['module', 'theme', 'library']: code_type = 'package' else: code_type = code['meta']['code_type'] app.logger.debug(code_type) site_query = 'where={{"code.{0}":"{1}"}}'.format(code_type, code['_id']) sites = utilities.get_eve('sites', site_query) app.logger.debug(sites) if not sites['_meta']['total'] == 0: for site in sites['_items']: # Create a list of sites that use this code item. # If 'sid' is a key in the site dict use it, otherwise use '_id'. site_list = [] if site.get('sid'): site_list.append(site['sid']) else: site_list.append(site['_id']) site_list_full = ', '.join(site_list) app.logger.error('Code item is in use by one or more sites:\n{0}'.format(site_list_full)) abort(409, 'A conflict happened while processing the request. Code item is in use by one or more sites.')
def command_prepare(item): """ Prepare sites to run the appropriate command. :param item: A complete command item, including new values. :return: """ logger.debug("Prepare Command\n{0}".format(item)) if item["command"] == ["clear_apc"]: execute(fabfile.clear_apc()) return if item["query"]: site_query = "where={0}".format(item["query"]) sites = utilities.get_eve("sites", site_query) logger.debug("Ran query\n{0}".format(sites)) if not sites["_meta"]["total"] == 0: for site in sites["_items"]: if item["command"] == ["correct_file_permissions"]: execute(fabfile.correct_file_directory_permissions(site)) continue if item["command"] == ["update_settings_file"]: execute(fabfile.update_settings_file(site)) continue if item["command"] == ["update_homepage_extra_files"]: execute(fabfile.update_homepage_extra_files()) continue command_run(site, item["command"], item["single_server"])
def pre_delete_code(request, lookup): """ Make sure no sites are using the code. :param request: flask.request object :param lookup: """ code = utilities.get_single_eve('code', lookup['_id']) log.debug('code | Delete | code - %s', code) # Check for sites using this piece of code. if code['meta']['code_type'] in ['module', 'theme', 'library']: code_type = 'package' else: code_type = code['meta']['code_type'] log.debug('code | Delete | code - %s | code_type - %s', code['_id'], code_type) site_query = 'where={{"code.{0}":"{1}"}}'.format(code_type, code['_id']) sites = utilities.get_eve('sites', site_query) log.debug('code | Delete | code - %s | sites result - %s', code['_id'], sites) if not sites['_meta']['total'] == 0: site_list = [] for site in sites['_items']: # Create a list of sites that use this code item. # If 'sid' is a key in the site dict use it, otherwise use '_id'. if site.get('sid'): site_list.append(site['sid']) else: site_list.append(site['_id']) site_list_full = ', '.join(site_list) log.error('code | Delete | code - %s | Code item is in use by one or more sites - %s', code['_id'], site_list_full) abort(409, 'A conflict happened while processing the request. Code item is in use by one or more sites.')
def _diff_f5(): """ Copy f5 configuration file to local sever, parse txt and create or update site items. """ f5_config_dir = "{0}/atlas/fabfile".format(path) f5_config_file = "{0}/{1}".format(f5_config_dir, f5_config_files[environment]) # If an older config file exists, copy it to a backup folder. if os.path.isfile(f5_config_file): local( "mv {0} /data/code/inventory/fabfile/backup/{1}.{2}".format( f5_config_file, f5_config_files[environment], str(time()).split(".")[0] ) ) # Copy config file from the f5 server to the Atlas server. local( "scp {0}:/config/{1} {2}/".format( serverdefs[environment]["f5_servers"][0], f5_config_files[environment], f5_config_dir ) ) # Open file from f5 with open(f5_config_file, "r") as ifile: data = ifile.read() # Use regex to parse out path values p = re.compile('"(.+/?)" := "(\w+(-\w+)?)",') sites = p.findall(data) # Iterate through sites found in f5 data for site in sites: f5only = False if site[0] in f5exceptions: f5only = True # Get path without leading slash path = site[0][1:] pool = site[1] # Set a type value based on pool if pool == "WWWLegacy": type = "legacy" elif pool == "poola-homepage" or pool == "poolb-homepage": type = "homepage" elif pool == "poolb-express": type = "express" else: type = "custom" site_query = 'where={{"path":"{0}"}}'.format(path) sites = utilities.get_eve("sites", site_query) if not sites or len(sites["_items"]) == 0: payload = {"name": path, "path": path, "pool": pool, "status": "launched", "type": type, "f5only": f5only} utilities.post_eve("sites", payload) print ("Created site record based on f5.\n{0}".format(payload)) elif pool != data["_items"][0]["pool"]: site = data["_items"][0] payload = {"pool": pool, "status": "launched", "type": type} utilities.patch_eve("sites", site["_id"], payload) print "Updated site based on f5.\n{0}".format(payload)
def delete_all_available_sites(): """ Get a list of available sites and delete them """ site_query = 'where={"status":"available"}' sites = utilities.get_eve("sites", site_query) payload = {"status": "delete"} for site in sites: utilities.patch_eve("sites", site["_id"], payload)
def available_sites_check(): site_query = 'where={"status":{"$in":["pending","available"]}}' sites = utilities.get_eve("sites", site_query) actual_site_count = sites["_meta"]["total"] if environment == "local": desired_site_count = 2 else: desired_site_count = 5 if actual_site_count < desired_site_count: needed_sites_count = desired_site_count - actual_site_count while needed_sites_count > 0: payload = {"status": "pending"} utilities.post_eve("sites", payload) needed_sites_count -= 1
def delete_stale_pending_sites(): site_query = 'where={"status":"pending"}' sites = utilities.get_eve("sites", site_query) # Loop through and remove sites that are more than 30 minutes old. for site in sites["_items"]: # Parse date string into structured time. # See https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior for mask format. date_created = time.strptime(site["_created"], "%Y-%m-%d %H:%M:%S %Z") # Get time now, Convert date_created to seconds from epoch and # calculate the age of the site. seconds_since_creation = time.time() - time.mktime(date_created) # 30 min * 60 sec = 1800 seconds if seconds_since_creation > 1800: payload = {"status": "delete"} utilities.patch_eve("sites", site["_id"], payload)
def on_delete_item_code(item): """ Remove code from servers right before the item is removed. :param item: """ log.debug('code | on delete | item - %s', item) other_static_assets = False if item['meta']['code_type'] == 'static': query = 'where={{"meta.name":"{0}","meta.code_type":"static","_id":{{"$ne":"{1}"}}}}'.format( item['meta']['name'], item['_id']) code = utilities.get_eve('code', query) if code['_meta']['total'] != 0: other_static_assets = True log.info('code | on delete | other static assets - %s', other_static_assets) tasks.code_remove.delay(item, other_static_assets)
def on_update_code(updates, original): """ Update code on the servers as the item is updated. :param updates: :param original: """ log.debug('code | on update | updates - %s | original - %s', updates, original) # If this 'is_current' PATCH code with the same name and code_type. if updates.get('meta') and updates['meta'].get('is_current') and updates['meta']['is_current'] is True: # If the name and code_type are not changing, we need to load them from the original. name = updates['meta']['name'] if updates['meta'].get('name') else original['meta']['name'] code_type = updates['meta']['code_type'] if updates['meta'].get('code_type') else original['meta']['code_type'] query = 'where={{"meta.name":"{0}","meta.code_type":"{1}","meta.is_current": true,"_id":{{"$ne":"{2}"}}}}'.format( name, code_type, original['_id']) code_get = utilities.get_eve('code', query) log.debug('code | on update | Current code - %s', code_get) for code in code_get['_items']: request_payload = {'meta.is_current': False} utilities.patch_eve('code', code['_id'], request_payload) # We need the whole record so that we can manipulate code in the right place. # Copy 'original' to a new dict, then update it with values from 'updates' to create an item to # deploy. Need to do the same process for meta first, otherwise the update will fully overwrite. if updates.get('meta'): meta = original['meta'].copy() meta.update(updates['meta']) updated_item = original.copy() updated_item.update(updates) if updates.get('meta'): updated_item['meta'] = meta if updates.has_key('meta') and (updates['meta'].has_key('name') or updates['meta'].has_key('version') or updates['meta'].has_key('code_type')): update_code = True elif updates.has_key('commit_hash') or updates.has_key('git_url'): update_code = True else: update_code = False if update_code: log.debug('code | on update | Ready to hand to Celery') # chord two tasks chord(tasks.code_update.s(updated_item, original), tasks.clear_php_cache.si())()
def backup_restore(backup_record, original_instance, package_list): """ Restore database and files to a new instance. """ log.info('Instance | Restore Backup | %s | %s', backup_record, original_instance) start_time = time() file_date = datetime.strptime(backup_record['backup_date'], "%Y-%m-%d %H:%M:%S %Z") pretty_filename = '{0}_{1}'.format( original_instance['sid'], file_date.strftime("%Y-%m-%d-%H-%M-%S")) pretty_database_filename = '{0}.sql'.format(pretty_filename) database_path = '{0}/backups/{1}'.format(BACKUP_PATH, pretty_database_filename) pretty_files_filename = '{0}.tar.gz'.format(pretty_filename) files_path = '{0}/backups/{1}'.format(BACKUP_PATH, pretty_files_filename) # Grab available instance and add packages if needed available_instances = utilities.get_eve('sites', 'where={"status":"available"}') log.debug('Instance | Restore Backup | Avaiable Instances - %s', available_instances) new_instance = next(iter(available_instances['_items']), None) # TODO: Don't switch if the code is the same if new_instance is not None: payload = {'status': 'installing'} if package_list: packages = {'code': {'package': package_list}} payload.update(packages) utilities.patch_eve('sites', new_instance['_id'], payload) else: exit('No available instances.') # Wait for code and status to update. attempts = 18 # Tries every 10 seconds to a max of 18 (or 3 minutes). while attempts: try: new_instance_refresh = utilities.get_single_eve('sites', new_instance['_id']) if new_instance_refresh['status'] != 'installed': log.info('Instance | Restore Backup | New instance is not ready | %s', new_instance['_id']) raise ValueError('Status has not yet updated.') break except ValueError, e: # If the status is not updated and we have attempts left, # remove an attempt and wait 10 seconds. attempts -= 1 if attempts is not 0: sleep(10) else: exit(str(e))
def take_down_installed_35_day_old_sites(): if environment != "prod": site_query = 'where={"status":"installed"}' sites = utilities.get_eve("sites", site_query) # Loop through and remove sites that are more than 35 days old. for site in sites["_items"]: # Parse date string into structured time. # See https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior # for mask format. date_created = time.strptime(site["_created"], "%Y-%m-%d %H:%M:%S %Z") # Get time now, Convert date_created to seconds from epoch and # calculate the age of the site. seconds_since_creation = time.time() - time.mktime(date_created) print("{0} is {1} seconds old".format(site["sid"], seconds_since_creation)) # 35 days * 24 hrs * 60 min * 60 sec = 302400 seconds if seconds_since_creation > 3024000: # Patch the status to 'take_down'. payload = {"status": "take_down"} utilities.patch_eve("sites", site["_id"], payload)
def on_updated_code(updates, original): """ Find instances that use this code asset and re-add them. :param updates: :param original: """ log.debug('code | on updated | updates - %s | original - %s', updates, original) # First get the code_type from either the update or original, then convert package types for # querying instance objects. if updates.get('meta') and updates['meta'].get('code_type'): code_type = updates['meta']['code_type'] else: code_type = original['meta']['code_type'] if code_type in ['module', 'theme', 'library']: code_type = 'package' if updates.has_key('meta'): if updates['meta']['name'] != original['meta']['name'] or updates['meta']['version'] != original['meta']['version'] or updates['meta']['code_type'] != original['meta']['code_type']: update_sites = True log.debug('code | on updated | Found meta data changes | %s', updates['meta']) else: log.debug('code | on updated | Found no meta changes that require an update') update_sites = False elif updates.has_key('commit_hash') or updates.has_key('git_url'): update_sites = True log.debug('code | on updated | Found git data changes') else: log.debug('code | on updated | Found no changes') update_sites = False if update_sites: log.info('Code | on updated | Preparing to update instances') query = 'where={{"code.{0}":"{1}"}}'.format(code_type, original['_id']) sites_get = utilities.get_eve('sites', query) if sites_get['_meta']['total'] is not 0: for site in sites_get['_items']: log.debug('code | on updated | site - %s', site) code_id_string = site['code'][code_type] payload = {'code': {code_type: code_id_string}} log.debug('code | on updated | payload - %s', payload) utilities.patch_eve('sites', site['_id'], payload)
def cron(status=None, include_packages=None, exclude_packages=None): logger.debug( "Cron | Status - {0} | Include - {1} | Exclude - {2}".format(status, include_packages, exclude_packages) ) # Build query. site_query_string = ["max_results=2000"] logger.debug("Cron - found argument") site_query_string.append("&where={") if status: logger.debug("Cron - found status") site_query_string.append('"status":"{0}",'.format(status)) else: logger.debug("Cron - No status found") site_query_string.append('"status":{"$in":["installed","launched"],') if include_packages: logger.debug("Cron - found include_packages") for package_name in include_packages: packages = utilities.get_code(name=package_name) include_packages_ids = [] if not packages["_meta"]["total"] == 0: for item in packages: include_packages_ids.append(item["_id"]) site_query_string.append('"code.package": {{"$in": {0}}},'.format(include_packages_ids)) if exclude_packages: logger.debug("Cron - found exclude_packages") for package_name in exclude_packages: packages = utilities.get_code(name=package_name) exclude_packages_ids = [] if not packages["_meta"]["total"] == 0: for item in packages: exclude_packages_ids.append(item["_id"]) site_query_string.append('"code.package": {{"$in": {0}}},'.format(exclude_packages_ids)) site_query = "".join(site_query_string) logger.debug("Query after join - {0}".format(site_query)) site_query = site_query.rstrip("\,") logger.debug("Query after rstrip - {0}".format(site_query)) site_query += "}" sites = utilities.get_eve("sites", site_query) if not sites["_meta"]["total"] == 0: for site in sites["_items"]: command_run(site, "drush cron", True)
def on_insert_code_callback(items): """ Deploy code onto servers as the items are created. If a new code item 'is_current', PATCH 'is_current' code with the same name and type to no longer be current. :param items: List of dicts for items to be created. """ app.logger.debug(items) for item in items: if item.get('meta') and item['meta'].get('is_current') and item['meta']['is_current'] == True: # Need a lowercase string when querying boolean values. Python # stores it as 'True'. query = 'where={{"meta.name":"{0}","meta.code_type":"{1}","meta.is_current": {2}}}'.format(item['meta']['name'], item['meta']['code_type'], str(item['meta']['is_current']).lower()) code_get = utilities.get_eve('code', query) app.logger.debug(code_get) for code in code_get['_items']: request_payload = {'meta.is_current': False} utilities.patch_eve('code', code['_id'], request_payload) app.logger.debug('Ready to send to Celery\n{0}'.format(item)) tasks.code_deploy.delay(item)
def on_update_code_callback(updates, original): """ Update code on the servers as the item is updated. :param updates: :param original: """ app.logger.debug(updates) app.logger.debug(original) # If this 'is_current' PATCH code with the same name and code_type. if updates.get('meta') and updates['meta'].get('is_current') and updates['meta']['is_current'] == True: # If the name and code_type are not changing, we need to load them from # the original. name = updates['meta']['name'] if updates['meta'].get('name') else original['meta']['name'] code_type = updates['meta']['code_type'] if updates['meta'].get('code_type') else original['meta']['code_type'] query = 'where={{"meta.name":"{0}","meta.code_type":"{1}","meta.is_current": {2}}}'.format(name, code_type, str(updates['meta']['is_current']).lower()) code_get = utilities.get_eve('code', query) # TODO: Filter out the site we are updating. app.logger.debug(code_get) for code in code_get['_items']: request_payload = {'meta.is_current': False} utilities.patch_eve('code', code['_id'], request_payload) # We need the whole record so that we can manipulate code in the right # place. # Copy 'original' to a new dict, then update it with values from 'updates' # to create an item to deploy. Need to do the same process for meta first, # otherwise the update will fully overwrite. meta = original['meta'].copy() meta.update(updates['meta']) updated_item = original.copy() updated_item.update(updates) updated_item['meta'] = meta app.logger.debug('Ready to hand to Celery\n{0}\n{1}'.format(updated_item, original)) tasks.code_update.delay(updated_item, original)
def sites_statistics(): """ Give some basic aggregations about site objects """ app.logger.debug('Sites | Aggregations') express_result = utilities.get_eve('sites') app.logger.debug('Sites | Aggregations | Express Result - %s', express_result) # Express sites express_sites = express_result['_items'] agg = {} count = Counter() group = Counter() # Total by state for site in express_sites: count[site['status']] += 1 group[site['update_group']] += 1 agg['express'] = {'status': dict(count), 'update_group': dict(group)} # Total agg['express']['status']['total'] = express_result['_meta']['total'] response = make_response(jsonify(agg)) return response
def get_command(machine_name): """ Get a single command. :param machine_name: command to return a definition for. """ command = [ command for command in commands.COMMANDS if command['machine_name'] == machine_name ] if not command: abort(404) else: command = command[0]['machine_name'] if request.method == 'GET': return jsonify({'command': command}) elif request.method == 'POST': # Loop through the commands list and grab the one we want app.logger.debug('Command | Execute | %s', command) if command == 'clear_php_cache': tasks.clear_php_cache.delay() elif command == 'import_code': # Grab payload, it is a JSON string from the request payload = json.loads(request.data) if not payload.get('env'): abort( 409, 'This command requires a payload containing a target `env`.' ) tasks.import_code.delay(payload['env']) elif command == 'rebalance_update_groups': tasks.rebalance_update_groups.delay() elif command == 'update_homepage_files': tasks.update_homepage_files.delay() elif command == 'update_settings_files': sites = utilities.get_eve('sites') timestamp = datetime.now() count = 0 total = sites['_meta']['total'] for instance in sites['_items']: count += 1 tasks.update_settings_file.delay(instance, timestamp, count, total) continue tasks.clear_php_cache.delay() elif command == 'heal_code': code_items = utilities.get_eve('code') tasks.code_heal.delay(code_items) elif command == 'heal_instances': instances = utilities.get_eve('sites') tasks.instance_heal.delay(instances) elif command == 'sync_instances': tasks.instance_sync.delay() elif command == 'correct_file_permissions': instances = utilities.get_eve('sites') for instance in instances['_items']: tasks.correct_file_permissions.delay(instance) continue elif command == 'backup_all_instances': tasks.backup_instances_all.delay(backup_type='on_demand') elif command == 'remove_extra_backups': tasks.remove_extra_backups.delay() return make_response( 'Command "{0}" has been initiated.'.format(command))
def import_backup(): """ Import a backup to a new instance on the current version of core, profile, and any packages that are present. If a current version of a package is not available, the import will abort. """ backup_request = request.get_json() app.logger.debug('Backup | Import | %s', backup_request) # Get the backup and then the site records. # TODO Get the list of env from the config files. # TODO Verify import is from different env, recommend restore if it is the same env. if not (backup_request.get('env') and backup_request.get('id')): abort(409, 'Error: Missing env (local, dev, test, prod) and id.') elif not backup_request.get('env'): abort(409, 'Error: Missing env (local, dev, test, prod).') elif not backup_request.get('id'): abort(409, 'Error: Missing id.') elif backup_request['env'] not in ['local', 'dev', 'test', 'prod']: abort(409, 'Error: Invalid env choose from [local, dev, test, prod]') backup_record = utilities.get_single_eve('backup', backup_request['id'], env=backup_request['env']) app.logger.debug('Backup | Import | Backup record - %s', backup_record) remote_site_record = utilities.get_single_eve( 'sites', backup_record['site'], backup_record['site_version'], env=backup_request['env']) app.logger.debug('Backup | Import | Site record - %s', remote_site_record) # Get a list of packages to include try: package_list = utilities.package_import_cross_env( remote_site_record, env=backup_request['env']) except Exception as error: abort(500, error) app.logger.info('Backup | Import | Package list - %s', package_list) # Try to get the p1 record. local_p1_instance_record = utilities.get_single_eve( 'sites', remote_site_record['sid']) app.logger.debug('Backup | Import | Local instance record - %s', local_p1_instance_record) # Try to get the path record if the site is launched. local_path_instance_record = False if remote_site_record['path'] != remote_site_record['sid']: query_string = 'where={{"path":"{0}"}}'.format( remote_site_record['path']) local_path_instance_records = utilities.get_eve('sites', query_string) app.logger.info('Backup | Import | Local path instance record - %s', local_path_instance_records) if local_path_instance_records['_meta']['total'] == 1: local_path_instance_record = True if local_p1_instance_record['_error'] and local_p1_instance_record[ '_error']['code'] == 404 and not local_path_instance_record: # Create an instance with the same sid payload = { "status": remote_site_record['status'], "sid": remote_site_record['sid'], "path": remote_site_record['path'] } response_string = 'the same' else: app.logger.info('Backup | Import | Instance sid or path exists') payload = {"status": "installed"} response_string = 'a new' # Add package list to payload if it exists if package_list: payload['code'] = {"package": package_list} # Set install payload['install'] = False new_instance = utilities.post_eve('sites', payload) app.logger.debug('Backup | Import | New instance record - %s', new_instance) env = backup_request['env'] backup_id = backup_request['id'] target_instance = new_instance['_id'] tasks.import_backup.apply_async([env, backup_id, target_instance], countdown=30) return make_response( 'Attempting to import backup to {0} sid'.format(response_string))