def job_complete_core(self, session): """ Checks if job is complete TODO Notifications upon completion Status changes of job """ # for now we assume the stat here to be accurate. if self.stat_count_tasks - self.stat_count_complete != 0: return # WIP if self.type == "Exam": # not implemented pass session.add(self) # TODO dont add to session like this self.status = "complete" Event.new_deferred( session=session, kind='task_template_completed', project_id=self.project_id, member_id=get_member(session).id if get_member(session) else None, job_id=self.id, wait_for_commit=True)
def new( session, name: str, project_string_id: str, goal: str, user, member_created, ): # Avoid circular import # Maybe in future Project_permissions class could be part of project from shared.permissions.project_permissions import Project_permissions project = Project( # Base object name=name, project_string_id=project_string_id, goal=goal, user_primary=user, # Assumed to be primary member_created=member_created) # Permissions and user associations user.projects.append(project) user.current_project_string_id = project_string_id user.project_current = project permission_add_result, permission_add_error_message = Project_permissions.add( permission="admin", user=user, sub_type=project_string_id) session.add(user, project) session.flush() member_id = user.member_id Event.new( session=session, kind="new_project", member_id=member_id, success=True, project_id=project.id, email=user.email # Caution, assumes user object is available ) project.directory_default = WorkingDir.new_user_working_dir( session, None, project, user, project_default_dir=True) report_dashboard = ReportDashboard.new(project_id=project.id) session.add(report_dashboard) session.flush() # Needed to work with adding default directory # careful this expects a default dir already assigned Project_Directory_List.add_default( session=session, working_dir_id=project.directory_default.id, project=project) return project
def __fetch_object(self, opts): bucket = self.connection_client.get_bucket(opts['bucket_name']) blob = bucket.blob(opts['path']) blob_expiry = int(time.time() + (60 * 60 * 24 * 30)) signed_url = blob.generate_signed_url(expiration=blob_expiry) # Deduct Media Type: # TODO Share this with existing process_media determine_media_type() extension = Path(opts['path']).suffix extension = extension.lower() media_type = None if extension in images_allowed_file_names: media_type = 'image' elif extension in videos_allowed_file_names: media_type = 'video' else: # TODO: Decide, do we want to raise an exception? or just do nothing? log = regular_log.default() log['error']['invalid_type'] = 'File must type of: {} {}'.format( str(images_allowed_file_names), str(videos_allowed_file_names)) log['error']['file_name'] = opts['path'] log['opts'] = opts with sessionMaker.session_scope() as session: Event.new(session=session, member_id=opts['event_data']['request_user'], kind='google_cloud_new_import_error', description='New cloud import for {}'.format( opts['path']), error_log=log) raise LookupError('File must type of: {} {}'.format( str(images_allowed_file_names), str(videos_allowed_file_names))) # metadata = self.connection_client.head_object(Bucket=opts['bucket_name, Key=path) with sessionMaker.session_scope() as session: created_input = packet.enqueue_packet( self.config_data['project_string_id'], session=session, media_url=signed_url, media_type=media_type, job_id=opts.get('job_id'), video_split_duration=opts.get('video_split_duration'), directory_id=opts.get('directory_id')) log = regular_log.default() log['opts'] = opts Event.new(session=session, member_id=opts['event_data']['request_user'], kind='google_cloud_new_import_success', description='New cloud import for {}'.format( opts['path']), error_log=opts) return {'result': created_input}