def links_maintenance(dry_run: bool): s3links, dblinks = list_short_links() s3keys_set = set() dbkeys_set = set() dbhashes_set = set() s3dirty_set = set() dbdirty_set = set() for page in s3links: for state in page['Contents']: if len(state['Key'][6:]) > 1: s3keys_set.add(state['Key'][6:]) for page in dblinks: for item in page['Items']: unique_subhash = item['unique_subhash']['S'] full_hash = item['full_hash']['S'] dbkeys_set.add((unique_subhash, full_hash)) dbhashes_set.add(full_hash) for dbkey in dbkeys_set: if dbkey[1] not in s3keys_set: dbdirty_set.add(dbkey) for s3key in s3keys_set: if s3key not in dbhashes_set: s3dirty_set.add(s3key) if are_you_sure('delete {} db elements:\n{}\n'.format( len(dbdirty_set), dbdirty_set)) and not dry_run: for item in dbdirty_set: print('Deleting {}'.format(item)) delete_short_link(item) if are_you_sure('delete {} s3 elements:\n{}\n'.format( len(s3dirty_set), s3dirty_set)) and not dry_run: delete_s3_links(s3dirty_set)
def environment_stop(cfg: Config): """Stops an environment.""" if cfg.env == Environment.PROD: print('Operation aborted. This would bring down the site') print( 'If you know what you are doing, edit the code in bin/lib/ce.py, function environment_stop_cmd' ) elif are_you_sure('stop environment', cfg): for asg in get_autoscaling_groups_for(cfg): group_name = asg['AutoScalingGroupName'] if asg['MinSize'] > 0: print( f"Skipping ASG {group_name} as it has a non-zero min size") continue prev = asg['DesiredCapacity'] if not prev: print( f"Skipping ASG {group_name} as it already zero desired capacity" ) continue print( f"Updating {group_name} to have desired capacity 0 (from {prev})" ) as_client.update_auto_scaling_group( AutoScalingGroupName=group_name, DesiredCapacity=0)
def tools_install(version: str, destination: str): """ Install demangling tools version VERSION. """ releases = get_tools_releases() version = Version.from_string(version, assumed_source=VersionSource.GITHUB) for release in releases: if release.version == version: if not are_you_sure("deploy tools"): return with TemporaryDirectory(prefix='ce-tools-') as td_str: td = Path(td_str) tar_dest = td / 'tarball.tar.xz' unpack_dest = td / 'tools' unpack_dest.mkdir() download_release_file(release.key, str(tar_dest)) subprocess.check_call([ 'tar', '--strip-components', '1', '-C', str(unpack_dest), '-Jxf', str(tar_dest) ]) subprocess.check_call([ 'rsync', '-a', '--delete-after', f'{unpack_dest}/', f'{destination}/' ]) click.echo(f'Tools updated to {version}') return click.echo(f'Unable to find version {version}')
def decorations_edit(cfg: Config, lang_filter: Sequence[str], name: str, regex: str, decoration: str): """Edit existing decoration NAME.""" events = get_events(cfg) for i, dec in enumerate(events['decorations']): if dec['name'] == name: regex, decoration = check_dec_args( regex or dec['regex'], decoration or json.dumps(dec['decoration'])) new_dec = { 'name': dec['name'], 'filter': lang_filter or dec['filter'], 'regex': regex, 'decoration': decoration } print('{}\n{}\n{}'.format( DECORATION_FORMAT.format('Name', 'Filters', 'Regex', 'Decoration'), DECORATION_FORMAT.format('<FROM', str(dec['filter']), dec['regex'], json.dumps(dec['decoration'])), DECORATION_FORMAT.format('>TO', str(new_dec['filter']), new_dec['regex'], json.dumps(new_dec['decoration'])))) if are_you_sure('edit decoration: {}'.format(dec['name']), cfg): events['decoration'][i] = new_dec save_event_file(cfg, json.dumps(events)) break
def motd_update(cfg: Config, message: str): """Updates the message of the day to MESSAGE.""" events = get_events(cfg) if are_you_sure( 'update motd from: {} to: {}'.format(events['motd'], message), cfg): events['motd'] = message save_event_file(cfg, json.dumps(events))
def decorations_clear(cfg: Config): """Clear all decorations.""" events = get_events(cfg) if are_you_sure( 'clear all decorations (count: {})'.format( len(events['decorations'])), cfg): events['decorations'] = [] save_event_file(cfg, json.dumps(events))
def instances_exec_all(cfg: Config, remote_cmd: Sequence[str]): """Execute REMOTE_CMD on all the instances.""" escaped = shlex.join(remote_cmd) if not are_you_sure(f'exec command {escaped} in all instances', cfg): return print("Running '{}' on all instances".format(escaped)) exec_remote_all(pick_instances(cfg), remote_cmd)
def instances_stop(cfg: Config): """Stop the instances.""" if cfg.env == Environment.PROD: print('Operation aborted. This would bring down the site') print( 'If you know what you are doing, edit the code in bin/lib/ce.py, function instances_stop_cmd' ) elif are_you_sure('stop all instances', cfg): exec_remote_all(pick_instances(cfg), ['sudo', 'systemctl', 'stop', 'compiler-explorer'])
def ads_add(cfg: Config, lang_filter: Sequence[str], html: str): """Add a community advert with HTML.""" events = get_events(cfg) new_ad = { 'html': html, 'filter': lang_filter, 'id': max([x['id'] for x in events['ads']]) + 1 if len(events['ads']) > 0 else 0 } if are_you_sure('add ad: {}'.format(ADS_FORMAT.format(new_ad['id'], str(new_ad['filter']), new_ad['html'])), cfg): events['ads'].append(new_ad) save_event_file(cfg, json.dumps(events))
def ads_remove(cfg: Config, ad_id: int, force: bool): """Remove community ad number AD_ID.""" events = get_events(cfg) for i, ad in enumerate(events['ads']): if ad['id'] == ad_id: if force or \ are_you_sure('remove ad: {}'.format(ADS_FORMAT.format(ad['id'], str(ad['filter']), ad['html'])), cfg): del events['ads'][i] save_event_file(cfg, json.dumps(events)) break
def decorations_remove(cfg: Config, name: str, force: bool): """Remove a decoration.""" events = get_events(cfg) for i, dec in enumerate(events['decorations']): if dec['name'] == name: if force or \ are_you_sure('remove decoration: {}'.format( DECORATION_FORMAT.format(dec['name'], str(dec['filter']), dec['regex'], json.dumps(dec['decoration']))), cfg): del events['decorations'][i] save_event_file(cfg, json.dumps(events)) break
def environment_refresh(cfg: Config, min_healthy_percent: int, motd: str): """Refreshes an environment. This replaces all the instances in the ASGs associated with an environment with new instances (with the latest code), while ensuring there are some left to handle the traffic while we update.""" old_motd = update_motd(cfg, motd) for asg in get_autoscaling_groups_for(cfg): group_name = asg['AutoScalingGroupName'] if asg['DesiredCapacity'] == 0: print(f"Skipping ASG {group_name} as it has a zero size") continue describe_state = as_client.describe_instance_refreshes( AutoScalingGroupName=group_name) existing_refreshes = [ x for x in describe_state['InstanceRefreshes'] if x['Status'] in ('Pending', 'InProgress') ] if existing_refreshes: refresh_id = existing_refreshes[0]['InstanceRefreshId'] print(f" Found existing refresh {refresh_id} for {group_name}") else: if not are_you_sure( f'Refresh instances in {group_name} with version {describe_current_release(cfg)}', cfg): continue print(" Starting new refresh...") refresh_result = as_client.start_instance_refresh( AutoScalingGroupName=group_name, Preferences=dict(MinHealthyPercentage=min_healthy_percent)) refresh_id = refresh_result['InstanceRefreshId'] print(f" id {refresh_id}") last_log = "" while True: time.sleep(5) describe_state = as_client.describe_instance_refreshes( AutoScalingGroupName=group_name, InstanceRefreshIds=[refresh_id]) refresh = describe_state['InstanceRefreshes'][0] status = refresh['Status'] if status == 'InProgress': log = f" {status}, {refresh['PercentageComplete']}%, " \ f"{refresh['InstancesToUpdate']} to update. " \ f"{refresh.get('StatusReason', '')}" else: log = f" Status: {status}" if log != last_log: print(log) last_log = log if status in ('Successful', 'Failed', 'Cancelled'): break update_motd(cfg, old_motd)
def ads_edit(cfg: Config, ad_id: int, html: str, lang_filter: Sequence[str]): """Edit community ad AD_ID.""" events = get_events(cfg) for i, ad in enumerate(events['ads']): if ad['id'] == ad_id: new_ad = { 'id': ad['id'], 'filter': lang_filter or ad['filter'], 'html': html or ad['html'] } print('{}\n{}\n{}'.format(ADS_FORMAT.format('Event', 'Filter(s)', 'HTML'), ADS_FORMAT.format('<FROM', str(ad['filter']), ad['html']), ADS_FORMAT.format('>TO', str(new_ad['filter']), new_ad['html']))) if are_you_sure('edit ad id: {}'.format(ad['id']), cfg): events['ads'][i] = new_ad save_event_file(cfg, json.dumps(events)) break
def links_update(link_from: str, link_to: str): """Update a link; point LINK_FROM to existing LINK_TO.""" if len(link_from) < 6: raise RuntimeError('from length must be at least 6') if len(link_to) < 6: raise RuntimeError('to length must be at least 6') base_link = get_short_link(link_from) if not base_link: raise RuntimeError('Couldn\'t find base link {}'.format(link_from)) link_to_update = get_short_link(link_to) if not link_to_update: raise RuntimeError( 'Couldn\'t find existing short link {}'.format(link_to)) link_to_update['full_hash'] = base_link['full_hash'] print('New link: {}'.format(pformat(link_to_update))) if are_you_sure('update link named {}'.format(link_to)): put_short_link(link_to_update)
def instances_restart(cfg: Config, motd: str): """Restart the instances, picking up new code.""" if not are_you_sure( 'restart all instances with version {}'.format( describe_current_release(cfg)), cfg): return begin_time = datetime.datetime.now() # Store old motd old_motd = update_motd(cfg, motd) modified_groups: Dict[str, int] = {} failed = False to_restart = pick_instances(cfg) for index, instance in enumerate(to_restart): logger.info("Restarting %s (%d of %d)...", instance, index + 1, len(to_restart)) as_instance_status = instance.describe_autoscale() if not as_instance_status: logger.warning("Skipping %s as it is no longer in the ASG", instance) continue as_group_name = as_instance_status['AutoScalingGroupName'] if as_instance_status['LifecycleState'] != 'InService': logger.warning("Skipping %s as it is not InService (%s)", instance, as_instance_status) continue try: restart_one_instance(as_group_name, instance, modified_groups) except RuntimeError as e: logger.error("Failed restarting %s - skipping: %s", instance, e) failed = True # TODO, what here? for group, desired in iter(modified_groups.items()): logger.info("Putting desired instances for %s back to %d", group, desired) as_client.update_auto_scaling_group(AutoScalingGroupName=group, DesiredCapacity=desired) update_motd(cfg, old_motd) end_time = datetime.datetime.now() delta_time = end_time - begin_time print(f'Instances restarted in {delta_time.total_seconds()} seconds') sys.exit(1 if failed else 0)
def links_name(link_from: str, link_to: str): """Give link LINK_FROM a new name LINK_TO.""" if len(link_from) < 6: raise RuntimeError('from length must be at least 6') if len(link_to) < 6: raise RuntimeError('to length must be at least 6') base_link = get_short_link(link_from) if not base_link: raise RuntimeError('Couldn\'t find base link {}'.format(link_from)) base_link['prefix']['S'] = link_to[0:6] base_link['unique_subhash']['S'] = link_to base_link['stats']['M']['clicks']['N'] = '0' base_link['creation_ip']['S'] = '0.0.0.0' # It's us, so we don't care about "anonymizing" the time base_link['creation_date']['S'] = datetime.datetime.utcnow().isoformat() title = input('Link title: ') author = input('Author(s): ') if len(author) == 0: # We explicitly ignore author = . in the site code author = '.' project = input('Project: ') description = input('Description: ') base_link['named_metadata'] = { 'M': { 'title': { 'S': title }, 'author': { 'S': author }, 'project': { 'S': project }, 'description': { 'S': description } } } print('New link: {}'.format(pformat(base_link))) if are_you_sure('create new link named {}'.format(link_to)): put_short_link(base_link)
def decorations_add(cfg: Config, lang_filter: Sequence[str], name: str, regex: str, decoration: str): """ Add a decoration called NAME matching REGEX resulting in json DECORATION. """ events = get_events(cfg) if name in [d['name'] for d in events['decorations']]: raise RuntimeError(f'Duplicate decoration name {name}') regex, decoration = check_dec_args(regex, decoration) new_decoration = { 'name': name, 'filter': lang_filter, 'regex': regex, 'decoration': decoration } if are_you_sure( 'add decoration: {}'.format( DECORATION_FORMAT.format( new_decoration['name'], str(new_decoration['filter']), new_decoration['regex'], json.dumps(new_decoration['decoration']))), cfg): events['decorations'].append(new_decoration) save_event_file(cfg, json.dumps(events))
def events_from_file(cfg: Config, file: TextIO): """Reads FILE and replaces the events file with its contents.""" new_contents = json.loads(file.read()) if are_you_sure(f'load events from file {file.name}', cfg): save_event_file(cfg, new_contents)
def ads_clear(cfg: Config): """Clear all community ads.""" events = get_events(cfg) if are_you_sure('clear all ads (count: {})'.format(len(events['ads'])), cfg): events['ads'] = [] save_event_file(cfg, json.dumps(events))
def motd_clear(cfg: Config): """Clears the message of the day.""" events = get_events(cfg) if are_you_sure('clear current motd: {}'.format(events['motd']), cfg): events['motd'] = '' save_events(cfg, events)
def builds_set_current(cfg: Config, branch: Optional[str], version: str, raw: bool, confirm: bool): """Set the current version to VERSION for this environment. If VERSION is "latest" then the latest version (optionally filtered by --branch), is set. """ if has_bouncelock_file(cfg): print( f"{cfg.env.value} is currently bounce locked. New versions can't be set until the lock is lifted" ) sys.exit(1) to_set = None release = None if raw: to_set = version else: setting_latest = version == 'latest' release = find_latest_release( branch) if setting_latest else find_release( Version.from_string(version)) if not release: print("Unable to find version " + version) if setting_latest and branch != '': print( 'Branch {} has no available versions (Bad branch/No image yet built)' .format(branch)) elif confirm: print(f'Found release {release}') to_set = release.key elif are_you_sure('change current version to {}'.format(release.key), cfg) and confirm_branch(release.branch): print(f'Found release {release}') to_set = release.key if to_set is not None and release is not None: if (cfg.env.value != 'runner') and not runner_discoveryexists( cfg.env.value, release.version): if not confirm_action( f'Compiler discovery has not run for {cfg.env.value}/{release.version}, are you sure you want to continue?' ): sys.exit(1) log_new_build(cfg, to_set) if release and release.static_key: if not deploy_staticfiles(release): print("...aborted due to deployment failure!") sys.exit(1) else: old_deploy_staticfiles(branch, to_set) set_current_key(cfg, to_set) if release: print("Marking as a release in sentry...") token = get_ssm_param("/compiler-explorer/sentryAuthToken") result = requests.post( f"https://sentry.io/api/0/organizations/compiler-explorer/releases/{release.version}/deploys/", data=dict(environment=cfg.env.value), headers=dict(Authorization=f'Bearer {token}')) if not result.ok: raise RuntimeError( f"Failed to send to sentry: {result} {result.content.decode('utf-8')}" ) print("...done", json.loads(result.content.decode()))