def terminate_worker(worker_id): worker = Worker.objects.get(id=worker_id) print('Terminate Worker', worker.id) AWS().terminate(worker.worker_id) worker.status = 'terminated' worker.save()
def terminate_workers(): idle_workers = Worker.objects.filter(status='idle') for worker in idle_workers: print('Terminate Worker') AWS().terminate(worker.worker_id) print('Terminate Worker', worker.id) worker.status = 'terminated' worker.save()
def launch_worker(): #create workers worker = Worker() worker.name = 'New Worker' worker.status = 'new' worker.save() if settings.DEFAULT_PROVIDER == 'AWS': provider = Provider.objects.filter(name='AWS')[0] print(provider, provider.config) worker_result = AWS().launch(provider.config) worker.provider = 'AWS' worker.type = provider.config['instance_type'] else: worker_result = SCW().launch(provider.config) worker.provider = 'SCW' worker.type = '' worker.ip = worker_result['ip'] worker.worker_id = worker_result['id'] worker.save() install_worker.delay(worker.id)
def task_run_task(task_id): print('RUN TASK: ', task_id) log_output = '' task = Task.objects.get(id=task_id) task.output = '' start = datetime.datetime.now() manifest = task.manifest task.machine = socket.gethostbyname(socket.gethostname()) task.status = 'running' task.started = start task.save() worker = Worker.objects.filter( ip=socket.gethostbyname(socket.gethostname())).reverse()[0] worker.n_tasks += 1 worker.status = 'running task %s' % (task.id) worker.started = start worker.save() task_location = '/projects/tasks/%s/' % (task.id) command = 'mkdir -p %s' % (task_location) run(command, shell=True) command = 'mkdir -p %s/input' % (task_location) run(command, shell=True) command = 'mkdir -p %s/output' % (task_location) run(command, shell=True) command = 'mkdir -p %s/scripts' % (task_location) run(command, shell=True) os.chdir(task_location) with open('manifest.json', 'w') as fp: json.dump(manifest, fp, sort_keys=True, indent=4) # file_list = [] # for file_id in manifest['files']: # print(file_id) # file = File.objects.get(pk=file_id) # file = get_file(file) # file_list.append(file.name) #start analysis for analysis_name in manifest['analysis_types']: print('analysis_name', analysis_name) analysis = App.objects.filter(name=analysis_name)[0] print(analysis) command = 'mkdir -p /projects/programs/' run(command, shell=True) os.chdir('/projects/programs/') basename = os.path.basename(analysis.repository) print('basename', basename) command = 'git clone {}'.format(analysis.source) run(command, shell=True) os.chdir(basename) # install command = 'bash scripts/install.sh' output = check_output(command, shell=True) log_output += output.decode('utf-8') #run os.chdir(task_location) command = 'python /projects/programs/{}/main.py -i {}'.format( basename, ' '.join(manifest['files'])) print(command) output = run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) log_output += output.stdout.decode('utf-8') AWS.upload(task_location + '/output', task.id) #upload results to b2/s3 # md5_dict = calculate_md5('output/') # for hash in md5_dict: # print(hash) # try: # file = File.objects.get(md5=hash) # except: # pass # file = File(user=task.user) # file.md5 = hash # file.name = md5_dict[hash] # file.save() # source = 'output/{}'.format(file.name) # dest = 'files/{}/{}'.format(file.id, file.name) # output = b2.upload(source, dest) # file.params = output # file.location = 'b2://mendelmd/files/{}/{}'.format(file.id, file.name) # file.save() # # if task.analysis: # # task.analysis_set.all()[0].files.add(file) # task.files.add(file) # add files if needed :) task.status = 'done' stop = datetime.datetime.now() task.execution_time = str(stop - start) task.finished = stop task.output = log_output task.save() worker = Worker.objects.filter( ip=socket.gethostbyname(socket.gethostname())).reverse()[0] worker.n_tasks -= 1 if worker.n_tasks == 0: worker.status = 'idle' worker.finished = stop worker.execution_time = str(stop - start) worker.save() print('Finished Task %s' % (task.name))
def update_worker(worker_id): worker = Worker.objects.get(id=worker_id) print('Update Worker', worker.id) if settings.DEFAULT_PROVIDER == 'AWS': AWS().update(worker.ip)
def install_worker(worker_id): worker = Worker.objects.get(id=worker_id) print('Install Worker', worker.id) if settings.DEFAULT_PROVIDER == 'AWS': AWS().install(worker.ip)
def task_run_task(task_id): print('RUN TASK: ', task_id) log_output = '' task = Task.objects.get(id=task_id) task.output = '' start = datetime.datetime.now() manifest = task.manifest task.machine = socket.gethostbyname(socket.gethostname()) task.status = 'running' task.started = start task.save() worker = Worker.objects.filter(ip=socket.gethostbyname(socket.gethostname())).reverse()[0] worker.n_tasks += 1 worker.status = 'running task %s' % (task.id) worker.started = start worker.save() task_location = '/projects/tasks/%s/' % (task.id) command = 'mkdir -p %s' % (task_location) run(command, shell=True) command = 'mkdir -p %s/input' % (task_location) run(command, shell=True) command = 'mkdir -p %s/output' % (task_location) run(command, shell=True) command = 'mkdir -p %s/scripts' % (task_location) run(command, shell=True) os.chdir(task_location) with open('manifest.json', 'w') as fp: json.dump(manifest, fp, sort_keys=True,indent=4) # file_list = [] # for file_id in manifest['files']: # print(file_id) # file = File.objects.get(pk=file_id) # file = get_file(file) # file_list.append(file.name) #start analysis for analysis_name in manifest['analysis_types']: print('analysis_name', analysis_name) analysis = App.objects.filter(name=analysis_name)[0] print(analysis) command = 'mkdir -p /projects/programs/' run(command, shell=True) os.chdir('/projects/programs/') basename = os.path.basename(analysis.repository) print('basename', basename) command = 'git clone {}'.format(analysis.source) run(command, shell=True) os.chdir(basename) # install command = 'bash scripts/install.sh' output = check_output(command, shell=True) log_output += output.decode('utf-8') #run os.chdir(task_location) command = 'python /projects/programs/{}/main.py -i {}'.format(basename, ' '.join(manifest['files'])) print(command) output = run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) log_output += output.stdout.decode('utf-8') AWS.upload(task_location+'/output', task.id) #upload results to b2/s3 # md5_dict = calculate_md5('output/') # for hash in md5_dict: # print(hash) # try: # file = File.objects.get(md5=hash) # except: # pass # file = File(user=task.user) # file.md5 = hash # file.name = md5_dict[hash] # file.save() # source = 'output/{}'.format(file.name) # dest = 'files/{}/{}'.format(file.id, file.name) # output = b2.upload(source, dest) # file.params = output # file.location = 'b2://mendelmd/files/{}/{}'.format(file.id, file.name) # file.save() # # if task.analysis: # # task.analysis_set.all()[0].files.add(file) # task.files.add(file) # add files if needed :) task.status = 'done' stop = datetime.datetime.now() task.execution_time = str(stop - start) task.finished = stop task.output = log_output task.save() worker = Worker.objects.filter(ip=socket.gethostbyname(socket.gethostname())).reverse()[0] worker.n_tasks -= 1 if worker.n_tasks == 0: worker.status = 'idle' worker.finished = stop worker.execution_time = str(stop - start) worker.save() print('Finished Task %s' % (task.name))