def notification(self, action=None, context='transition'): # get users from roles # ors_workflow(recepients, activity, _id, action, source, destination, comment, context='transition') if action is None: action = WF_MOTORFLY_TRANSITIONS_ATTR[self.action]['resource'] # Not to self! # If closed - notify ONLY existing acl's if self.state == 'closed': tmp = self.db_wf.get('acl', {}) tmp['read']['roles'] = [ x for x in tmp['read']['roles'] if x not in ACL_CLOSED_ALL_LIST ] acl = parse_acl_flat(tmp) else: acl = parse_acl_flat(self.db_wf.get('acl', {})) ors_workflow( recepients=acl, # notify self too! event_from=RESOURCE_COLLECTION, event_from_id=self.db_wf['_id'], ors_id=self.db_wf['id'], org_id=self.db_wf.get('discipline'), ors_tags=self.db_wf.get('tags', []), action=action, source=self. initial_state, # self._state_attrs[self.initial_state]['description'], destination=self. state, # self._state_attrs[self.state]['description'], comment=self.comment, context=context)
def ors_after_update(updates, original): """After DB update, updates is just changed data""" # Only when not doing workflow transitions if updates.get('workflow', {}).get('state', None) is None: if original.get('workflow', {}).get('state', 'original') not in ['closed', 'withdrawn']: ors_save( recepients=parse_acl_flat(original.get('acl', {}), exclude_current_user=False), event_from='sportsfly_observations', event_from_id=original.get('_id', None), source=original.get('_version', 1), destination=original.get('_version', 2) + 1, context='save' )
def message(): try: # ARGS args = request.get_json(force=True) # use force=True to do anyway! event_from = args.get('event_from', None) event_from_id = args.get('event_from_id', None) msg = strip_tags(args.get('message', None)) if event_from is None or event_from_id is None or msg is None: return eve_abort(422, 'Missing parameters') # Can't do if closed or withdrawn status, acl, rest = get_acl(event_from, event_from_id, projection={ 'acl': 1, 'workflow.state': 1, 'id': 1, 'discipline': 1, 'tags': 1 }) if rest.get('workflow', {}).get('state', 'closed') in ['closed', 'withdrawn']: return eve_response_pppd( { 'data': 'Observasjonen er {}'.format( rest.get('workflow', {}).get('state', 'closed')) }, 403, 'Observation is {}'.format( rest.get('workflow', {}).get('state', 'closed'))) k = parse_acl_flat(acl) # If not self too recepients = [x for x in k if x != app.globals.get('user_id', None)] ors_message(recepients=recepients, event_from=event_from, event_from_id=event_from_id, message=msg, ors_id=rest.get('id', None), org_id=rest.get('discipline', None), ors_tags=rest.get('tags', [])) return eve_response(recepients, 200) except Exception as e: app.logger.exception('Error creating message for observation') return eve_response({}, 500)
def reminder(): """ """ """ 1) Check if has access and if not X 2) Find the x'es 3) make sure sender not an x!! see 1... 4) make sure we're not trying to notify too soon after last one => get last notification for this x+event_from+event_from_id 5) find x'es user settings - says something about their transmission preferences (mail, aggregate, sms...) 6) Notify x'es! or both x and w - with users preferences! """ try: # Args args = request.get_json(force=True) # use force=True to do anyway! event_from = args.get('event_from', None) event_from_id = args.get('event_from_id', None) if event_from is None or event_from_id is None: return eve_response_pppd({}, 403, 'Observation is closed') if event_from is None or event_from_id is None or message is None: return eve_abort(422, 'Missing parameters') status, acl, rest = get_acl(event_from, event_from_id, projection={ 'acl': 1, 'workflow.state': 1, 'id': 1, 'discipline': 1, 'tags': 1 }) # print('ACLS', acl) if rest.get('workflow', {}).get('state', 'closed') in ['closed', 'withdrawn']: return eve_response_pppd( { 'data': 'Observasjonen er {}'.format( rest.get('workflow', {}).get('state', 'closed')) }, 403, 'Observation is {}'.format( rest.get('workflow', {}).get('state', 'closed'))) recepients = parse_acl_flat(acl) disapproved_users = get_within_delay( event_from_id, 'ors_reminder', recepients) if len(recepients) > 0 else [] # Check if same users # @TODO investigate if should be each user recepients.sort() disapproved_users.sort() if disapproved_users == recepients: return eve_response_pppd( { 'data': 'Please wait for the remaining graceperiod until {}'. format((datetime.datetime.utcnow() - datetime.timedelta(seconds=REMINDER_DELTA))) }, 429, 'Too soon to send notification') # Remove disapproved recepients = [ x for x in recepients if x not in disapproved_users and x != app.globals.get('user_id', None) ] if len(recepients) == 0: return eve_response_pppd({'data': 'Fant ingen å sende til'}, 404, 'Found no recepients!') # Create notification ors_reminder(recepients, event_from=event_from, event_from_id=event_from_id, ors_id=rest.get('id', None), org_id=rest.get('discipline', None), ors_tags=rest.get('tags', [])) return eve_response(recepients, 200) except Exception as e: app.logger.exception('Error creating reminder for observation') return eve_response({}, 500)
def generate(activity, _id): data = request.get_json(force=True) resource_collection = '{}_observations'.format(activity) col = app.data.driver.db[resource_collection] cursor = col.find({ '$and': [{ '_etag': data.get('_etag', None), '_id': _id }, { '$or': [{ 'acl.execute.users': { '$in': [app.globals['user_id']] } }, { 'acl.execute.roles': { '$in': app.globals['acl']['roles'] } }] }] }) total_items = cursor.count() # _items = list(cursor.sort(sort['field'], sort['direction']).skip(max_results * (page - 1)).limit(max_results)) _items = list(cursor) if (len(_items) == 1): # print(_items) ors = _items[0] FILE_WORKING_DIR = '{}/{}/{}/{}'.format(app.config['E5X_WORKING_DIR'], activity, ors.get('id'), ors.get('_version')) file_name = 'nlf_{}_{}_v{}'.format( ors.get('_model', {}).get('type', None), ors.get('id'), ors.get('_version')) if generate_structure(activity, ors.get('id'), ors.get('_version')) is True: app.logger.debug('[E5X] Structure ok') # Process files! file_list = [] if len(ors.get('files', [])) > 0: app.logger.debug('[E5X] Adding files') col_files = app.data.driver.db['files'] # Fix data structures # if 'report' not in \ # data.get('value', {}).get('occurrence', {}).get('entities', {}).get('reportingHistory', [])[ # 0].get('attributes', {}).get('report'): data['e5x']['entities']['reportingHistory'][0]['attributes'][ 'report'] = [] # = { # 'attachments': []} # 'attributes': {'resourceLocator': []}} ## Folder for files files_working_path = '{}/{}'.format(FILE_WORKING_DIR, file_name) if os.path.exists(files_working_path) is False: _, stdout, stderr = execute(['mkdir', file_name], FILE_WORKING_DIR) app.logger.debug('[E5X] Created folder for files') for key, _file in enumerate(ors.get('files', [])): try: file = col_files.find_one( {'_id': ObjectId(_file['f'])}) """ @TODO need to verify size for LT """ try: grid_fs = GridFS(app.data.driver.db) if not grid_fs.exists(_id=file['file']): pass else: stream = grid_fs.get( file['file'] ) # get_last_version(_id=file['file']) file_list.append('{}/{}-{}'.format( file_name, key, file['name'])) with open( '{}/{}-{}'.format( files_working_path, key, file['name']), 'wb') as f: f.write(stream.read()) try: # data['e5x']['entities']['reportingHistory'][0]['attributes']['report']['attributes']['resourceLocator'].append( # {'fileName': '{}-{}'.format(key, file['name']), 'description': ''} # ) data['e5x']['entities'][ 'reportingHistory'][0]['attributes'][ 'report'].append({ 'fileName': '{}-{}'.format( key, file['name']), 'description': '' }) except Exception as e: app.logger.exception( "[ERROR] Could not add file name to report" ) app.logger.error(e) except Exception as e: app.logger.exception("[ERR] Getting files") except KeyError as e: app.logger.exception( "[ERROR] Could not add file, KeyError: {}".format( _file)) app.logger.error(e) except Exception as e: app.logger.exception( "[ERROR] Could not add file, unknown error: {}". format(_file)) app.logger.error(e) try: app.logger.debug('[III] In try') json_file_name = '{}.json'.format(file_name) # print('PATHS', FILE_WORKING_DIR, json_file_name) # 1 Dump to json file with open('{}/{}'.format(FILE_WORKING_DIR, json_file_name), 'w') as f: json.dump(remove_empty_nodes(data.get('e5x', {})), f) # 2 Generate xml file # e5x-generate.js will make folder relative to e5x-generate.js _, stdout, stderr = execute([ 'node', 'e5x-generate.js', str(ors.get('id')), str(ors.get('_version')), activity, str(data.get('rit_version', E5X_RIT_DEFAULT_VERSION)) ], app.config['E5X_WORKING_DIR']) app.logger.debug('[STDOUT] {}'.format(stdout)) app.logger.debug('[STDERR] {}'.format(stderr)) # 3 Zip it! Add files to it! if stderr.rstrip() == '': time.sleep(0.5) cmds = [ 'zip', '{}.e5x'.format(file_name), '{}.xml'.format(file_name) ] cmds += file_list # dir: # cmds += file_name # print('CMDS', file_list, cmds) _, stdout, stderr = execute(cmds, FILE_WORKING_DIR) try: status = data.get('e5x').get('entities', {}) \ .get('reportingHistory', [])[0] \ .get('attributes', {}) \ .get('reportStatus', {}).get('value', 5) except Exception as e: app.logger.exception( 'Error gettings status {}'.format(status)) status = 0 # SFTP DELIVERY! # Only dev and prod should be able to deliver to LT if app.config.get('APP_INSTANCE', '') == 'dev-removeme-test': from ext.scf import LT_SFTP_TEST_CFG as SFTP elif app.config.get('APP_INSTANCE', '') == 'prod': from ext.scf import LT_SFTP_CFG as SFTP else: app.logger.warning( 'No SFTP settings for this instance') SFTP = False # Manual flag set via local E5X_SEND_TO_LT if E5X_SEND_TO_LT is False: SFTP = False transport_status, transport = transport_e5x( FILE_WORKING_DIR, file_name, SFTP) # Some audit and bookkeeping audit = ors.get('e5x', {}).get('audit', []) audit.append({ 'date': datetime.datetime.now(), 'person_id': app.globals.get('user_id'), 'sent': transport_status, 'status': status, 'version': ors.get('_version'), 'file': '{}.e5x'.format(file_name), 'rit_version': data.get('rit_version', E5X_RIT_DEFAULT_VERSION), 'e5y': transport }) """ 'e5y': { 'key': 'abrakadabra', 'number': 'c5de0c62-fbc9-4202-bbe8-ff52c1e79ae0', 'path': '/OCCS/A24A5466CDD843FFAAAA2DA663762C5E.E4O', 'created': '2019-06-19T22:57:46.6719259+02:00', 'modified': '2019-06-19T22:57:46.6719259+02:00', 'taxonomy': '4.1.0.6' } """ e5x = { 'audit': audit, 'status': 'sent', 'latest_version': ors.get('_version') } _update = col.update_one( { '_id': ors.get('_id'), '_etag': ors.get('_etag') }, {'$set': { 'e5x': e5x }}) if not _update: app.logger.error( 'Error storing e5x delivery message in database') # print('UPDATED DB SAID: ', _update.raw_result, dir(_update)) try: recepients = parse_acl_flat(ors.get('acl', {})) ors_e5x(recepients=recepients, event_from=resource_collection, event_from_id=ors.get('_id', ''), source=ors.get('_version', ''), status=status, ors_id=ors.get('id', None), ors_tags=ors.get('tags', []), file_name='{}.e5x'.format(file_name), transport='sftp', context='sent') """ #### TEST EMAIL! recepients = list(set([app.globals.get('user_id')] + ors.get('organization', {}).get('ors', []) + ors.get('organization', {}).get('dto', []) )) # print('RECEPIENTS', recepients) message = 'Hei\n\nDette er en leveringsbekreftelse for OBSREG #{0} versjon {1}\n\n \ Levert:\t{2}\n\ Status:\t{3}\n\ Fil:\t{4}\n\ Levert via:\t{5}\n\ Instans:\t{6}\n'.format(ors.get('id', ''), ors.get('_version', ''), datetime.datetime.now(), status, '{}.e5x'.format(file_name), 'sftp', app.config.get('APP_INSTANCE', '')) subject = 'E5X Leveringsbekreftelse OBSREG {0} v{1}'.format(ors.get('id', ''), ors.get('_version', '')) notify(recepients, subject, message) """ except Exception as e: app.logger.exception( 'Error delivering e5x delivery notification') return eve_response( { 'e5x': { 'audit': audit }, 'err': traceback.format_exc() }, 200) else: app.logger.error('STDERR: {}'.format(stderr)) except Exception as e: app.logger.exception('Error processing e5x file') return eve_response( { 'ERR': 'Could not process', 'err': traceback.format_exc() }, 422) return eve_response( { 'ERR': 'Could not process e5x', 'err': traceback.format_exc() }, 422)