def _clean_cluster_containers(cluster_name: str, parallels: int): # Load details cluster_details = load_cluster_details(cluster_name=cluster_name) admin_username = cluster_details['user']['admin_username'] master_hostname = cluster_details['master']['hostname'] redis_port = cluster_details['master']['redis']['port'] redis = Redis(host=master_hostname, port=redis_port, charset="utf-8", decode_responses=True) nodes_details = get_nodes_details( redis, cluster_name=cluster_name ) # Parallel clean with ThreadPool(parallels) as pool: params = [ [ admin_username, node_details['hostname'] ] for _, node_details in nodes_details.items() ] pool.starmap( _clean_node_containers, params )
def delete_pending_job_ticket(cluster_name: str, job_name: str): # Load details cluster_details = load_cluster_details(cluster_name=cluster_name) master_hostname = cluster_details['master']['hostname'] redis_port = cluster_details['master']['redis']['port'] redis = Redis(host=master_hostname, port=redis_port, charset="utf-8", decode_responses=True) redis.lrem(f"{cluster_name}:pending_jobs", 0, job_name)
def create_killed_job_ticket(cluster_name: str, job_name: str): # Load details cluster_details = load_cluster_details(cluster_name=cluster_name) master_hostname = cluster_details['master']['hostname'] redis_port = cluster_details['master']['redis']['port'] redis = Redis(host=master_hostname, port=redis_port, charset="utf-8", decode_responses=True) redis.lpush(f"{cluster_name}:killed_jobs", job_name)
def __init__(self, cluster_name: str, redis_port: int, check_interval: int = 10): super().__init__() self._cluster_name = cluster_name self._redis = redis.Redis(host='localhost', port=redis_port, charset="utf-8", decode_responses=True) cluster_details = load_cluster_details(cluster_name=cluster_name) self._cluster_id = cluster_details['id'] self._admin_username = cluster_details['user']['admin_username'] self._check_interval = check_interval self._killed_jobs = []
def create_job_details(cluster_name: str, job_name: str): # Load details cluster_details = load_cluster_details(cluster_name=cluster_name) job_details = load_job_details(cluster_name=cluster_name, job_name=job_name) master_hostname = cluster_details['master']['hostname'] redis_port = cluster_details['master']['redis']['port'] # Add other parameters job_details['containers'] = {} redis = Redis(host=master_hostname, port=redis_port, charset="utf-8", decode_responses=True) redis.hset(f"{cluster_name}:job_details", job_name, json.dumps(job_details))
encoding='utf8') if completed_process.returncode != 0: raise Exception(completed_process.stderr) sys.stdout.write(completed_process.stdout) if __name__ == "__main__": # Load args parser = argparse.ArgumentParser() parser.add_argument('cluster_name') parser.add_argument('node_name') parser.add_argument('parallels', type=int) args = parser.parse_args() # Load details cluster_details = load_cluster_details(cluster_name=args.cluster_name) master_hostname = cluster_details['master']['hostname'] redis_port = cluster_details['master']['redis']['port'] # Load node details redis = Redis(host=master_hostname, port=redis_port, charset="utf-8", decode_responses=True) node_details = get_node_details(redis=redis, cluster_name=args.cluster_name, node_name=args.node_name) master_details = get_master_details(redis=redis, cluster_name=args.cluster_name) master_image_files_details = master_details['image_files'] node_image_files_details = node_details['image_files']