def _after_login_hook(sender, user, **extra): #clear login counter from flask import session if session.get('failed'): session.pop('failed') print ('login counter cleared') Activity.create(user, Activity.ACTION_LOGIN, user.to_mini(), 'user')
def bulk_update_bulletins(ids, bulk, cur_user_id): # build mappings u = {'id': cur_user_id} cur_user = namedtuple('cur_user', u.keys())(*u.values()) mappings = [] for bid in ids: tmp = bulk.copy() tmp['id'] = bid # indicate a bulk operation in the log tmp['comments'] = tmp.get('comments', '') + '*' # ----- handle refs update without losing existing values ------- # grab existing refs (list) refs = Bulletin.query.with_entities( Bulletin.ref).filter_by(id=bid).first().ref if not refs: refs = [] replace = tmp.get('refReplace') if replace: tmp['ref'] = tmp.get('ref', []) else: # append to existing refs tmp['ref'] = refs + tmp.get('ref', []) # handle automatic status assignement if not 'status' in tmp: if tmp.get('assigned_to_id'): status = 'Assigned' if tmp.get('first_peer_reviewer_id'): status = 'Peer Review Assigned' tmp['status'] = status mappings.append(tmp) db.session.bulk_update_mappings(Bulletin, mappings) revmaps = [] bulletins = Bulletin.query.filter(Bulletin.id.in_(ids)).all() for bulletin in bulletins: # this commits automatically tmp = { 'bulletin_id': bulletin.id, 'user_id': cur_user.id, 'data': bulletin.to_dict() } revmaps.append(tmp) db.session.bulk_insert_mappings(BulletinHistory, revmaps) # Record Activity updated = [b.to_mini() for b in bulletins] Activity.create(cur_user, Activity.ACTION_BULK_UPDATE, updated, 'bulletin') print("Bulletins Bulk Update Successful")
def bulk_update_incidents(ids, bulk, cur_user_id): # build mappings u = {'id': cur_user_id} cur_user = namedtuple('cur_user', u.keys())(*u.values()) mappings = [] for bid in ids: tmp = bulk.copy() tmp['id'] = bid # indicate a bulk operation in the log tmp['comments'] = tmp.get('comments', '') + '*' # handle automatic status assignement if not 'status' in tmp: if tmp.get('assigned_to_id'): status = 'Assigned' if tmp.get('first_peer_reviewer_id'): status = 'Peer Review Assigned' tmp['status'] = status mappings.append(tmp) db.session.bulk_update_mappings(Incident, mappings) revmaps = [] incidents = Incident.query.filter(Incident.id.in_(ids)).all() for incident in incidents: # this commits automatically tmp = { 'incident_id': incident.id, 'user_id': cur_user.id, 'data': incident.to_dict() } revmaps.append(tmp) db.session.bulk_insert_mappings(IncidentHistory, revmaps) # Record Activity updated = [i.to_mini() for i in incidents] Activity.create(cur_user, Activity.ACTION_BULK_UPDATE, updated, 'incident') print("Incidents Bulk Update Successful")
def create_bulletin(self, info): """ creates bulletin from file and its meta data :return: created bulletin """ bulletin = Bulletin() # mapping bulletin.title = info.get('bulletinTitle') bulletin.status = 'Machine Created' bulletin.comments = 'Created by ETL - {} '.format(self.batch_id) create = info.get('EXIF:CreateDate') if create: create_date = DateHelper.file_date_parse(create) if create_date: bulletin.documentation_date = create_date refs = [str(self.batch_id)] serial = info.get('EXIF:SerialNumber') if serial: refs.append(str(serial)) media = Media() media.title = bulletin.title media.media_file = info.get('filename') # handle mime type failure mime_type = info.get('File:MIMEType') duration = info.get('vduration') if duration: media.duration = duration print('duration set') if not mime_type: self.summary += now( ) + 'Problem retrieving file mime type !' + '\n' print('Problem retrieving file mime type ! \n') try: os.remove(info.get('filepath')) print('unknown file type cleaned ') except OSError: pass self.summary += '------------------------------------------------------------------------\n\n' return media.media_file_type = mime_type media.etag = info.get('etag') bulletin.medias.append(media) # add additional meta data sources = self.meta.get('sources') if sources: ids = [s.get('id') for s in sources] bulletin.sources = Source.query.filter(Source.id.in_(ids)).all() labels = self.meta.get('labels') if labels: ids = [l.get('id') for l in labels] bulletin.labels = Label.query.filter(Label.id.in_(ids)).all() vlabels = self.meta.get('ver_labels') if vlabels: ids = [l.get('id') for l in vlabels] bulletin.ver_labels = Label.query.filter(Label.id.in_(ids)).all() locations = self.meta.get('locations') if locations: ids = [l.get('id') for l in locations] bulletin.locations = Location.query.filter( Location.id.in_(ids)).all() mrefs = self.meta.get('refs') if mrefs: refs = refs + mrefs bulletin.ref = refs user = User.query.get(self.user_id) bulletin.source_link = info.get('old_path') bulletin.save() bulletin.create_revision(user_id=user.id) self.summary += 'Bulletin ID: {} \n'.format(bulletin.id) print("bulletin ID : ", bulletin.id) # Record bulletin creation activity Activity.create(user, Activity.ACTION_CREATE, bulletin.to_mini(), 'bulletin') self.summary += '------------------------------------------------------------------------\n\n' self.log.write(self.summary)
def _after_logout_hook(sender, user, **extra): Activity.create(user, Activity.ACTION_LOGOUT, user.to_mini(), 'user')
def process(self, user_id=1): """ this method will go compare video deduplication data against database bulletins and establish relationship if it doesn't exist (based on the distance parameter provided by Benetech's video deduplication tool) :return: None """ print("Processing match {}: {},{}".format(self.id, self.query_video, self.match_video)) if self.distance > CONFIG.DEDUP_MAX_DISTANCE: self.status = 2 elif self.query_video != self.match_video: b1 = Bulletin.query.filter_by(originid=self.query_video).first() b2 = Bulletin.query.filter_by(originid=self.match_video).first() if b1 and b2: rel_ids = sorted((b1.id, b2.id)) relation = Btob.query.get(rel_ids) if relation: self.status = 1 else: b = Btob(bulletin_id=rel_ids[0], related_bulletin_id=rel_ids[1]) if self.distance < CONFIG.DEDUP_LOW_DISTANCE: b.related_as = 5 self.notes = "Potentially Duplicate" if self.distance >= CONFIG.DEDUP_LOW_DISTANCE and self.distance <= CONFIG.DEDUP_MAX_DISTANCE: b.related_as = 6 self.notes = "Potentially Related" b.comment = '{}'.format(self.distance) b.save() revision_comment = 'Btob (type {}) created from match {}-{} distance {}'.format( b.related_as, rel_ids[0], rel_ids[1], self.distance) b1.comments = revision_comment b2.comments = revision_comment # Save Bulletins and register activities b1.create_revision() user = User.query.get(user_id) Activity.create(user, Activity.ACTION_UPDATE, b1.to_mini(), 'bulletin') b2.create_revision() Activity.create(user, Activity.ACTION_UPDATE, b2.to_mini(), 'bulletin') relation_dict = { 'class': 'btob', 'b1': '{}'.format(b.bulletin_id), 'b2': '{}'.format(b.related_bulletin_id), 'type': '{}'.format(b.related_as) } self.status = 3 self.result = relation_dict else: self.status = 4 self.save() print("Completed match {}".format(self.id))