def get(context, task_id): database = context['database'] task_document = database.find(task_id, 'tasks') if task_document is None: raise exceptions.invalid_resource_id job_document = database.find(task_document.job_id, 'jobs') job_response = JobResponse(**job_document.dict()) return TaskResponse(**task_document.dict(), job = job_response)
def update(context, job_id, job): cluster = context['cluster'] database = context['database'] container_queue = context['container_queue'] task_queue = context['task_queue'] state_queue = context['state_queue'] job_document = database.find(job_id, 'jobs') if job_document is None: raise exceptions.invalid_resource_id task_documents = database.find_many({'_id': {'$in': job_document.task_ids}}, 'tasks') task_responses = [TaskResponse(**task_document.dict()) for task_document in task_documents] if job.action == Action.start: if job_document.state != State.ready: raise exceptions.invalid_job_action(job.action, job_document.state) if job_document.scene_url is None: raise exceptions.invalid_scene_resource cluster.create_service(job_document.container_name) container_message = ContainerMessage( name = job_document.container_name, task_count = len(job_document.task_ids), upscaling = True) task_messages = [TaskMessage(**task_document.dict()) for task_document in task_documents] container_queue.publish([container_message], 'autoscaling') task_queue.publish(task_messages, job_document.container_name) job_document.state = State.running else: raise exceptions.invalid_job_action(job.action, job_document.state) job_document.updated_at = utils.utc_now() database.update({ '_id': job_id }, job_document, 'jobs') state_queue.publish(JobMessage(**job_document.dict()), job_id) return JobResponse(**job_document.dict(), tasks = task_responses)
async def process_message(channel, method, job_message): try: if job_message.id is None: assert await is_websocket_active() else: job_document = database.find(job_id, 'jobs') assert job_document is not None task_documents = database.find_many({'_id': {'$in': job_document.task_ids}}, 'tasks') task_responses = [TaskResponse(**task_document.dict()) for task_document in task_documents] job_response = JobResponse(**job_document.dict(), tasks = task_responses) await asyncio.wait_for(websocket.send_text(job_response.json(exclude_unset = True)), websocket_timeout) except: channel.stop_consuming() channel.connection.close()
def get(context, job_id, task_id = None): database = context['database'] job_document = database.find(job_id, 'jobs') if job_document is None: raise exceptions.invalid_resource_id if task_id is None: task_documents = database.find_many({'_id': {'$in': job_document.task_ids}}, 'tasks') else: if task_id in job_document.task_ids: task_documents = [database.find(task_id, 'tasks')] else: task_documents = [] task_responses = [TaskResponse(**task_document.dict()) for task_document in task_documents] return JobResponse(**job_document.dict(), tasks = task_responses)
def upload_scene(context, job_id, scene): storage = context['storage'] database = context['database'] state_queue = context['state_queue'] job_document = database.find(job_id, 'jobs') if job_document is None: raise exceptions.invalid_resource_id if job_document.state != State.ready: raise exceptions.invalid_resource_state(job_document.state) task_documents = database.find_many({'_id': {'$in': job_document.task_ids}}, 'tasks') task_responses = [TaskResponse(**task_document.dict()) for task_document in task_documents] container_document = database.find({'name': job_document.container_name}, 'containers') file_extension = utils.get_file_extension(scene.filename) def filter_by_extension(resource): return file_extension in resource.extensions resource_document = next(filter(filter_by_extension, container_document.scenes), None) if resource_document is None: raise exceptions.invalid_file_format content_type = resource_document.content_types[0] result = storage.upload(scene.file, content_type, 'scenes', f'{job_id}/scene{file_extension}') job_document.scene_url = result['resource_url'] job_document.updated_at = utils.utc_now() database.update({ '_id': job_id }, job_document, 'jobs') state_queue.publish(JobMessage(**job_document.dict()), job_id) return JobResponse(**job_document.dict(), tasks = task_responses)
def submit(context, job): database = context['database'] container_document = database.find({'name': job.container_name}, 'containers') if container_document is None: raise exceptions.invalid_container_name job_id = ObjectID() frame_groups = utils.group_frames(job.frame_range.start, job.frame_range.end, job.parallelism) def create_task(group): return TaskDocument( job_id = job_id, frame_range = FrameRange(start = group[0], end = group[len(group) - 1]), state = State.ready, retries = 0) def get_task_id(task): return task.id task_documents = list(map(create_task, frame_groups)) task_ids = list(map(get_task_id, task_documents)) job_document = JobDocument( **job.dict(), id = job_id, task_ids = task_ids, state = State.ready) database.save_many(task_documents, 'tasks') database.save(job_document, 'jobs') task_responses = [TaskResponse(**task_document.dict()) for task_document in task_documents] return JobResponse(**job_document.dict(), tasks = task_responses)
def upload_images(context, task_id, images): storage = context['storage'] database = context['database'] state_queue = context['state_queue'] task_document = database.find(task_id, 'tasks') if task_document is None: raise exceptions.invalid_resource_id if task_document.state != State.running: raise exceptions.invalid_resource_state(task_document.state) frame_count = task_document.frame_range.end - task_document.frame_range.start + 1 if frame_count != len(images): raise exceptions.image_resource_mismatch job_document = database.find(task_document.job_id, 'jobs') container_document = database.find({'name': job_document.container_name}, 'containers') def sort_by_filename(image): return image.filename def resolve_resources(resources, frame): id, image = frame file_extension = utils.get_file_extension(image.filename) def filter_by_extension(resource): return file_extension in resource.extensions resource_document = next(filter(filter_by_extension, container_document.images), None) if resource_document is None: raise exceptions.invalid_file_format pad_width = max(10, len(str(job_document.frame_range.end))) padded_id = str(id).rjust(pad_width, '0') content_type = resource_document.content_types[0] resources.append({ 'file': image.file, 'filename': f'image{padded_id}{file_extension}', 'content_type': content_type }) return resources def upload_resource(resource): result = storage.upload(resource['file'], resource['content_type'], 'images', f'{task_id}/{resource["filename"]}') return result['resource_url'] images = sorted(images, key = sort_by_filename) resources = functools.reduce(resolve_resources, enumerate(images, task_document.frame_range.start), []) resource_urls = list(map(upload_resource, resources)) task_document.image_urls = resource_urls task_document.updated_at = utils.utc_now() job_document.updated_at = utils.utc_now() database.update({ '_id': task_id }, task_document, 'tasks') database.update({ '_id': job_document.id }, job_document, 'jobs') state_queue.publish(JobMessage(**job_document.dict()), job_document.id) return TaskResponse(**task_document.dict(), job = JobResponse(**job_document.dict()))
def update(context, task_id, task): configuration = context['configuration'] storage = context['storage'] database = context['database'] container_queue = context['container_queue'] resource_queue = context['resource_queue'] state_queue = context['state_queue'] maximum_task_retries = int(configuration.get('API_MAXIMUM_TASK_RETRIES')) task_document = database.find(task_id, 'tasks') if task_document is None: raise exceptions.invalid_resource_id job_document = database.find(task_document.job_id, 'jobs') if job_document.state != State.running: raise exceptions.job_not_running def resolve_task(): container_message = ContainerMessage( name = job_document.container_name, task_count = 1, upscaling = False) container_queue.publish([container_message], 'autoscaling') document_query = { '_id': {'$in': job_document.task_ids}, 'state': {'$in': [State.ready, State.running]} } unresolved_task_count = database.count(document_query, 'tasks') if unresolved_task_count == 0: document_query['state'] = State.error error_task_count = database.count(document_query, 'tasks') job_state = State.error if error_task_count > 0 else State.done resource_message = ResourceMessage(job_id = job_document.id, job_state = job_state) resource_queue.publish([resource_message], 'packing') if task.state == State.done: if task_document.state != State.running: raise exceptions.invalid_task_state(task_document.state, task.state) if len(task_document.image_urls) == 0: raise exceptions.invalid_image_resources task_document.state = task.state task_document.updated_at = utils.utc_now() database.update({ '_id': task_id }, task_document, 'tasks') resolve_task() elif task.state == State.running: if task_document.state not in [State.ready, State.running]: raise exceptions.invalid_task_state(task_document.state, task.state) if task_document.retries < maximum_task_retries: task_document.state = task.state task_document.retries = task_document.retries + 1 else: task_document.state = State.error task_document.updated_at = utils.utc_now() database.update({ '_id': task_id }, task_document, 'tasks') if task_document.state == State.error: resolve_task() else: raise exceptions.invalid_task_state(task_document.state, task.state) job_document.updated_at = utils.utc_now() database.update({ '_id': job_document.id }, job_document, 'jobs') state_queue.publish(JobMessage(**job_document.dict()), job_document.id) return TaskResponse(**task_document.dict(), job = JobResponse(**job_document.dict()))