def adjust_cron_expression_for_schedule_tasks(check=None): operation = os.environ["OPERATION"] if validate(operation): return for group in groups: groupName = group.get("name") if check is not None: if check not in str(groupName): schedule = None else: schedule = group.get("scheduling") else: schedule = group.get("scheduling") if schedule is not None: scaling_tasks = schedule.get("tasks") if scaling_tasks is not None: for n in range(len(scaling_tasks)): schedule = scaling_tasks[n] adjusted_cron_expression = adjust_schedule(schedule.get("cron_expression"), operation=operation) scale_min_capacity = schedule.get("scale_min_capacity") scale_max_capacity = schedule.get("scale_max_capacity") scale_target_capacity = schedule.get("scale_target_capacity") configure_scheduled_tasks(groupName, adjusted_cron_expression, scale_min_capacity, scale_max_capacity, scale_target_capacity)
def configure_scheduled_tasks(group, cron_expression, scale_min_capacity, scale_max_capacity, scale_target_capacity=None): operation = os.environ["OPERATION"] tasks = [] all_tasks = [] eg = get_group_metadata(group) id = get_group_id(group) schedule = eg.get("scheduling") if schedule: scaling_tasks = schedule.get("tasks") for n in range(len(scaling_tasks)): schedule = scaling_tasks[n] schedule["task_type"] = "scale" tasks.append(schedule) for i in range(len(tasks)): try: for key, val in tasks[i].items(): if val == adjust_schedule(cron_expression, operation=reverse_operation(operation)): del tasks[i] except IndexError: # expected IndexErrors when deleting a task to be updated continue updated_task = {"task_type": "scale", "is_enabled": True, "cron_expression": cron_expression, "scale_min_capacity": scale_min_capacity, "scale_max_capacity": scale_max_capacity } if scale_target_capacity is not None: updated_task["scale_target_capacity"] = scale_target_capacity tasks.append(updated_task) for t in range(len(tasks)): task = tasks[t] for key, val in task.items(): if task.get("scale_target_capacity") is not None: t = ScheduledTask(task_type='scale', cron_expression=task["cron_expression"], scale_target_capacity=task["scale_target_capacity"], scale_min_capacity=task["scale_min_capacity"], scale_max_capacity=task["scale_max_capacity"], is_enabled=True) else: t = ScheduledTask(task_type='scale', cron_expression=task["cron_expression"], scale_min_capacity=task["scale_min_capacity"], scale_max_capacity=task["scale_max_capacity"], is_enabled=True) all_tasks.append(t) try: scheduling = Scheduling(tasks=all_tasks) group_update = Elastigroup(scheduling=scheduling) update_result = client.update_elastigroup(group_update=group_update, group_id=id) logger.info("Scheduled Tasks of Elasticgroup %s have been configured successfully." % group) except: raise
def adjust_aws_schedule_tasks(check=None): operation = os.environ["OPERATION"] if validate(operation): return groups = client.describe_auto_scaling_groups().get("AutoScalingGroups") autoscaling_groups = [] for group in groups: asg = group.get("AutoScalingGroupName") if check is not None: if check not in str(asg): continue else: autoscaling_groups.append(asg) else: autoscaling_groups.append(asg) schedules = {} for group in autoscaling_groups: scheduled_actions = client.describe_scheduled_actions( AutoScalingGroupName=group).get("ScheduledUpdateGroupActions") if not scheduled_actions: pass else: schedules[group] = scheduled_actions for schedule, actions in schedules.items(): for n in range(len(actions)): action = actions[n] AutoScalingGroupName = action.get("AutoScalingGroupName") ScheduledActionName = action.get("ScheduledActionName") StartTime = action.get("StartTime") Recurrence = action.get("Recurrence") MinSize = action.get("MinSize") MaxSize = action.get("MaxSize") update = adjust_schedule(Recurrence, operation) try: response = client.put_scheduled_update_group_action( AutoScalingGroupName=AutoScalingGroupName, ScheduledActionName=ScheduledActionName, Recurrence=update, MinSize=MinSize, MaxSize=MaxSize) logger.info( "Adusted schedule of AutoscalingGroup %s from %s to %s successfully." % (AutoScalingGroupName, Recurrence, update)) except exception as e: logger.error(e)
def adjust_kube_schedule_json_files(json_files): logger = applogger("JSON_FILES") operation = os.environ["OPERATION"] if validate(operation): return if not os.path.exists(json_files): logger.error("The provided directory is not exist.") return msgs = set() for root, dirs, files in os.walk(json_files): for file in files: if file.endswith(".json"): fname = os.path.join(root, file) with open(fname) as f: datafile = f.readlines() for line in datafile: if "schedule" in line: schedule = line.split(":")[1].lstrip() exp = str(schedule.replace('"', '')) exp = re.sub(',$', '', exp) updated_schedule = adjust_schedule( exp, operation).rstrip('\n') updated_schedule = '"%s",' % updated_schedule updated_schedule = updated_schedule + '\n' with fileinput.FileInput(fname, inplace=True) as file: for line in file: print(line.replace(schedule, updated_schedule), end='') msgs.add( "Updated Cron Expressions of %s successfully" % fname) if not msgs: logger.warn( "There is no JSON File matching the provided criteria or may be directory is empty" ) else: for msg in msgs: logger.info(msg)
def update_kube_annotations(namespace=pykube.all): api = get_kube_api() operation = os.environ["OPERATION"] new_annotations = {} for resource in "Deployment", "StackSet": if resource == "Deployment": pykube_en = pykube.Deployment.objects(api, namespace=namespace) elif resource == "StackSet": pykube_en = StackSet.objects(api, namespace=namespace) for resource_name in pykube_en: if 'zalando.org/schedule-actions' in resource_name.annotations: try: schedules = resource_name.annotations.get("zalando.org/schedule-actions") if "\'" in schedules: schedules = schedules.replace("\'", "\"") if "s3" in schedules: continue # print(schedules) updated_schedules = [] for sch in json.loads(schedules): old_schedule = sch.get("schedule") new_schedule = adjust_schedule(old_schedule, operation) sch["schedule"] = new_schedule logger.debug("Updating Schedule of %s %s from %s to %s" % (resource, resource_name.name, old_schedule, new_schedule)) updated_schedules.append(sch) try: resource_name.annotations["zalando.org/schedule-actions"] = '%s' % json.dumps(updated_schedules) resource_name.update() logger.info("Schedule of %s %s has been updated successfully. " % (resource, resource_name.name)) except Exception as e: logger.error(e) # print(resource_name.annotations["zalando.org/schedule-actions"] ) except Exception as e: logger.warn(e)
def adjust_senza_yaml_files_definitions(senza_files): operation = os.environ["OPERATION"] if validate(operation): return if not os.path.exists(senza_files): logger.error("The provided directory is not exist.") return msgs = set() for root, dirs, files in os.walk(senza_files): for file in files: if file.endswith(".yaml"): fname = os.path.join(root, file) file_name = file with open(fname) as f: if "TimeSaving" in f.read(): msgs.add( "File %s will not be modified as preotection flag TimeSaving exist" % file_name) f.close() else: with open(fname) as f: datafile = f.readlines() for line in datafile: if "cronExpression" in line: expression = line.split(":")[1].lstrip() _expression = expression.strip("\n") exp = str(expression.replace('"', '')) updated_schedule = adjust_schedule( exp, operation) with fileinput.FileInput(fname, inplace=True) as file: for line in file: print(line.replace( expression, updated_schedule), end='') msgs.add( "Updated Cron Expression of %s from %s to %s" % (file_name, _expression, updated_schedule.strip("\n"))) with fileinput.FileInput(fname, inplace=True) as file: for line in file: if "TimeSaving" in line: print(line.replace(line, line), end='') else: print(line.replace( "taskType: scale", "taskType: scale # TimeSaving" ), end='') if not msgs: logger.warn( "There is no YAML File matching the provided criteria or may be directory is empty" ) else: for msg in msgs: if str("preotection flag TimeSaving exist") in msg: logger.warn(msg) elif str("Updated Cron Expression") in msg: logger.info(msg)