def receiver_serialize_receiver(receiver, language=GLSetting.memory_copy.default_language): receiver_dict = { "receiver_gus": receiver.id, "name": receiver.name, "update_date": pretty_date_time(receiver.last_update), "creation_date": pretty_date_time(receiver.creation_date), "receiver_level": receiver.receiver_level, "can_delete_submission": receiver.can_delete_submission, "username": receiver.user.username, "gpg_key_info": receiver.gpg_key_info, "gpg_key_fingerprint": receiver.gpg_key_fingerprint, "gpg_key_remove": False, "gpg_key_armor": receiver.gpg_key_armor, "gpg_key_status": receiver.gpg_key_status, "gpg_enable_notification": receiver.gpg_enable_notification, "gpg_enable_files": receiver.gpg_enable_files, "tags": receiver.tags, "tip_notification" : receiver.tip_notification, "file_notification" : receiver.file_notification, "comment_notification" : receiver.comment_notification, "notification_fields": dict(receiver.notification_fields), "failed_login": receiver.user.failed_login_count, "contexts": [] } mo = Rosetta() mo.acquire_storm_object(receiver) receiver_dict["description"] = mo.dump_translated('description', language) for context in receiver.contexts: receiver_dict['contexts'].append(context.id) return receiver_dict
def anon_serialize_receiver(receiver, language=GLSetting.memory_copy.default_language): """ @param receiver: a valid Storm object @return: a dict describing the receivers available in the node (e.g. checks if almost one context is associated, or, in node where GPG encryption is enforced, that a valid key is registered) """ receiver_dict = { "contexts": [], } for context in receiver.contexts: receiver_dict['contexts'].append(unicode(context.id)) if not len(receiver_dict['contexts']): return None receiver_dict.update({ "can_delete_submission": receiver.can_delete_submission, "creation_date": pretty_date_time(receiver.creation_date), "update_date": pretty_date_time(receiver.last_update), "name": unicode(receiver.name), "receiver_gus": unicode(receiver.id), "receiver_level": int(receiver.receiver_level), "tags": receiver.tags, }) mo = Rosetta() mo.acquire_storm_object(receiver) receiver_dict['description'] = mo.dump_translated('description', language) return receiver_dict
def admin_serialize_context(context, receipt_output, language=GLSetting.memory_copy.default_language): context_dict = { "context_gus": context.id, "creation_date": utility.pretty_date_time(context.creation_date), "last_update": utility.pretty_date_time(context.last_update), "selectable_receiver": context.selectable_receiver, "tip_max_access": context.tip_max_access, "file_max_download": context.file_max_download, "escalation_threshold": context.escalation_threshold, "receivers": [], "receipt_regexp": context.receipt_regexp, "receipt_example": receipt_output, "tags": context.tags if context.tags else [], "file_required": context.file_required, # tip expressed in day, submission in hours "tip_timetolive": context.tip_timetolive / (60 * 60 * 24), "submission_timetolive": context.submission_timetolive / (60 * 60), "select_all_receivers": context.select_all_receivers } mo = structures.Rosetta() mo.acquire_storm_object(context) for attr in mo.get_localized_attrs(): context_dict[attr] = mo.dump_translated(attr, language) fo = structures.Fields(context.localized_fields, context.unique_fields) context_dict['fields'] = fo.dump_fields(language) for receiver in context.receivers: context_dict['receivers'].append(receiver.id) return context_dict
def admin_serialize_node(node, language=GLSetting.memory_copy.default_language): node_dict = { "name": node.name, "presentation": node.presentation, "creation_date": utility.pretty_date_time(node.creation_date), "last_update": utility.pretty_date_time(node.last_update), "hidden_service": node.hidden_service, "public_site": node.public_site, "stats_update_time": node.stats_update_time, "email": node.email, "version": GLSetting.version_string, "languages_supported": LANGUAGES_SUPPORTED, "languages_enabled": node.languages_enabled, "default_language" : node.default_language, 'maximum_filesize': node.maximum_filesize, 'maximum_namesize': node.maximum_namesize, 'maximum_textsize': node.maximum_textsize, 'exception_email': node.exception_email, 'tor2web_admin': GLSetting.memory_copy.tor2web_admin, 'tor2web_submission': GLSetting.memory_copy.tor2web_submission, 'tor2web_tip': GLSetting.memory_copy.tor2web_tip, 'tor2web_receiver': GLSetting.memory_copy.tor2web_receiver, 'tor2web_unauth': GLSetting.memory_copy.tor2web_unauth, 'postpone_superpower': node.postpone_superpower, } mo = structures.Rosetta() mo.acquire_storm_object(node) for attr in mo.get_localized_attrs(): node_dict[attr] = mo.dump_translated(attr, language) return node_dict
def serialize_receivertip(rtip): rtip_dict = { 'id': unicode(rtip.id), 'creation_date' : unicode(pretty_date_time(rtip.creation_date)), 'last_access' : unicode(pretty_date_time(rtip.last_access)), 'expressed_pertinence' : unicode(rtip.expressed_pertinence), 'access_counter' : int(rtip.access_counter), } return rtip_dict
def get_tiptime_by_marker(store, marker): assert marker in InternalTip._marker itip_list = store.find(InternalTip, InternalTip.mark == marker) tipinfo_list = [] for itip in itip_list: comment_cnt = store.find(Comment, Comment.internaltip_id == itip.id).count() files_cnt = store.find(InternalFile, InternalFile.internaltip_id == itip.id).count() if not itip.context: log.err("A Tip related to a not existent Context! This would not happen if delete on cascade is working") # And the removal is forced putting 1 second of life to the Tip. tip_timetolive = 1 submission_timetolive = 1 else: tip_timetolive = itip.context.tip_timetolive submission_timetolive = itip.context.submission_timetolive serialized_tipinfo = { "id": itip.id, "creation_date": pretty_date_time(itip.creation_date), "tip_life_seconds": tip_timetolive, "submission_life_seconds": submission_timetolive, "files": files_cnt, "comments": comment_cnt, } tipinfo_list.append(serialized_tipinfo) return tipinfo_list
def _fill_event(self, type, trigger, trigger_id): if type == u'tip' and trigger == 'Tip': receiver_dict = yield admin.get_receiver(self.createdReceiver['receiver_gus']) context_dict = yield admin.get_context(self.createdContext['context_gus']) notif_dict = yield admin.get_notification() yield admin.import_memory_variables() node_dict = yield admin.get_node() self.event = Event( type = u'tip', trigger = 'Tip', notification_settings = notif_dict, node_info = node_dict, receiver_info = receiver_dict, context_info = context_dict, plugin = None, trigger_info = { 'id': trigger_id, 'creation_date': pretty_date_time(datetime_now()) } ) elif type == u'comment' and trigger == 'Comment': raise AssertionError("Not yet managed Mock comments") elif type == u'file' and trigger == 'File': raise AssertionError("Not yet managed Mock files") else: raise AssertionError("type and trigger maybe refactored, but you're using it bad")
def collect_users_overview(store): users_description_list = [] all_receivers = store.find(models.Receiver) for receiver in all_receivers: # all public of private infos are stripped, because know between the Admin resources user_description = { 'id': receiver.id, 'name': receiver.name, 'failed_login': receiver.user.failed_login_count, 'receiverfiles': [], 'receivertips': [], 'gpg_key_status': receiver.gpg_key_status, } rcvr_files = store.find(models.ReceiverFile, models.ReceiverFile.receiver_id == receiver.id ) for rfile in rcvr_files: if not rfile.internalfile: log.err("(user_overview) ReceiverFile without InternaFile available: skipped") continue user_description['receiverfiles'].append({ 'id': rfile.id, 'file_name': rfile.internalfile.name, 'downloads': rfile.downloads, 'last_access': pretty_date_time(rfile.last_access), 'status': rfile.mark, }) rcvr_tips = store.find(models.ReceiverTip, models.ReceiverTip.receiver_id == receiver.id ) for rtip in rcvr_tips: user_description['receivertips'].append({ 'internaltip_id': rtip.id, 'status': rtip.mark, 'last_access': pretty_date_time(rtip.last_access), 'notification_date': pretty_date_time(rtip.notification_date), 'access_counter': rtip.access_counter, }) users_description_list.append(user_description) return users_description_list
def serialize_internalfile(ifile): rfile_dict = { 'name': unicode(ifile.name), 'sha2sum': unicode(ifile.sha2sum), 'content_type': unicode(ifile.content_type), 'size': unicode(ifile.size), 'creation_date' : unicode(pretty_date_time(ifile.creation_date)), } return rfile_dict
def force_execution(self, aps=None, seconds=1): """ @aps: Advanced Python Scheduler object seconds: number of seconds to await before operation start force execution do not execute immidiatly self.operation(), because we want be sure that is a thread start by APScheduler """ plan_exec = utc_future_date(hours=0, seconds=seconds) plan_exec += (datetime.now() - datetime.utcnow()) try: aps.add_date_job(self.operation, plan_exec) except ValueError as exc: log.err("Failing in force schedule execution of %s planned at %s" % (self.__class__.__name__, pretty_date_time(plan_exec))) log.debug("Forced execution of %s at %s" % (self.__class__.__name__, pretty_date_time(plan_exec)))
def serialize_file(internalfile): file_desc = { "size": internalfile.size, "content_type": internalfile.content_type, "name": internalfile.name, "creation_date": pretty_date_time(internalfile.creation_date), "id": internalfile.id, "mark": internalfile.mark, "sha2sum": internalfile.sha2sum, } return file_desc
def get_receiver_tip_list(store, user_id): receiver = store.find(Receiver, Receiver.id == unicode(user_id)).one() rtiplist = store.find(ReceiverTip, ReceiverTip.receiver_id == receiver.id) rtiplist.order_by(Desc(ReceiverTip.creation_date)) rtip_summary_list = [] for rtip in rtiplist: rfiles_n = store.find(ReceiverFile, (ReceiverFile.internaltip_id == rtip.internaltip.id, ReceiverFile.receiver_id == user_id)).count() single_tip_sum = dict({ # expiry time ? # context_id ? 'access_counter': rtip.access_counter, 'expressed_pertinence': rtip.expressed_pertinence, 'creation_date' : unicode(pretty_date_time(rtip.creation_date)), 'last_access' : unicode(pretty_date_time(rtip.last_access)), 'id' : rtip.id, 'files_number': rfiles_n, }) preview_data = [] fo = Fields(rtip.internaltip.context.localized_fields, rtip.internaltip.context.unique_fields) for preview_key in fo.get_preview_keys(): # preview in a format angular.js likes entry = dict({'key' : preview_key, 'text': rtip.internaltip.wb_fields[preview_key] }) preview_data.append(entry) single_tip_sum.update({ 'preview' : preview_data }) rtip_summary_list.append(single_tip_sum) return rtip_summary_list
def serialize_receiver_file(receiverfile, internalfile): file_desc = { "size": receiverfile.size, "content_type": internalfile.content_type, "name": ("%s.pgp" % internalfile.name) if receiverfile.status == ReceiverFile._status_list[2] else internalfile.name, "creation_date": pretty_date_time(internalfile.creation_date), "downloads": receiverfile.downloads, "path": receiverfile.file_path, "sha2sum": internalfile.sha2sum, } return file_desc
def admin_serialize_receiver(receiver, language=GLSetting.memory_copy.default_language): receiver_dict = { "receiver_gus": receiver.id, "name": receiver.name, "creation_date": utility.pretty_date_time(receiver.creation_date), "last_update": utility.pretty_date_time(receiver.last_update), "receiver_level": receiver.receiver_level, "can_delete_submission": receiver.can_delete_submission, "username": receiver.user.username, "notification_fields": dict(receiver.notification_fields or {'mail_address': ''}), "failed_login": receiver.user.failed_login_count, "password": u"", "contexts": [], "tags": receiver.tags, "gpg_key_info": receiver.gpg_key_info, "gpg_key_armor": receiver.gpg_key_armor, "gpg_key_remove": False, "gpg_key_fingerprint": receiver.gpg_key_fingerprint, "gpg_key_status": receiver.gpg_key_status, "gpg_enable_notification": True if receiver.gpg_enable_notification else False, "gpg_enable_files": True if receiver.gpg_enable_files else False, "comment_notification": True if receiver.comment_notification else False, "tip_notification": True if receiver.tip_notification else False, "file_notification": True if receiver.file_notification else False, } # only 'description' at the moment is a localized object here mo = structures.Rosetta() mo.acquire_storm_object(receiver) for attr in mo.get_localized_attrs(): receiver_dict[attr] = mo.dump_translated(attr, language) for context in receiver.contexts: receiver_dict['contexts'].append(context.id) return receiver_dict
def wb_serialize_internaltip(internaltip): response = { 'id' : unicode(internaltip.id), # compatibility! until client is not patched. 'submission_gus' : unicode(internaltip.id), 'context_gus': unicode(internaltip.context_id), 'creation_date' : unicode(pretty_date_time(internaltip.creation_date)), 'expiration_date' : unicode(pretty_date_time(internaltip.expiration_date)), 'wb_fields' : dict(internaltip.wb_fields or {}), 'download_limit' : int(internaltip.download_limit), 'access_limit' : int(internaltip.access_limit), 'mark' : unicode(internaltip.mark), 'pertinence' : unicode(internaltip.pertinence_counter), 'escalation_threshold' : unicode(internaltip.escalation_threshold), 'files' : [], 'receivers' : [] } for receiver in internaltip.receivers: response['receivers'].append(receiver.id) for internalfile in internaltip.internalfiles: response['files'].append(internalfile.id) return response
def collect_files_overview(store): file_description_list = [] submission_dir = os.path.join(GLSetting.working_path, GLSetting.submission_path) disk_files = os.listdir(submission_dir) stored_ifiles = store.find(models.InternalFile) stored_ifiles.order_by(Desc(models.InternalFile.creation_date)) for ifile in stored_ifiles: file_desc = { 'id': ifile.id, 'name': ifile.name, 'content_type': ifile.content_type, 'size': ifile.size, 'itip': ifile.internaltip_id, 'creation_date': pretty_date_time(ifile.creation_date), 'rfiles': 0, 'stored': None, 'path': '', } file_desc['rfiles'] = store.find(models.ReceiverFile, models.ReceiverFile.internalfile_id == ifile.id).count() absfilepath = os.path.join(submission_dir, ifile.file_path) if os.path.isfile(absfilepath): file_desc['stored'] = True file_desc['path'] = absfilepath # disk_files contain all the files present, the InternalFiles # are removed one by one, and the goal is to keep in disk_files # all the not referenced files. if ifile.file_path in disk_files: disk_files.remove(ifile.file_path) else: log.err("Weird failure: path %s not found in %s but still on dir" % (ifile.file_path, submission_dir) ) else: log.err("InternalFile %s has not a disk reference present: %s" % (file_desc['name'], absfilepath) ) file_desc['stored'] = False file_description_list.append(file_desc) # the files remained in disk_files array are without ifile for dfile in disk_files: absfilepath = os.path.join(submission_dir, dfile) file_desc = { 'id': '', 'name': '', 'content_type': '', 'size': os.stat(absfilepath).st_size, 'itip': '', 'rfiles_associated': 0, 'stored': True, 'path': absfilepath, } file_description_list.append(file_desc) return file_description_list
def receiverfile_planning(store): """ This function roll over the InternalFile uploaded, extract a path, id and receivers associated, one entry for each combination. representing the ReceiverFile that need to be created. REMIND: (keyword) esclation escalate pertinence vote here need to be updated whenever an escalation is implemented. checking of status and marker and recipients """ try: files = store.find(InternalFile, InternalFile.mark == InternalFile._marker[0]) except Exception as excep: log.err("Unable to find InternalFile in scheduler! %s" % str(excep)) return [] rfileslist = [] for filex in files: if not filex.internaltip: log.err("Integrity failure: the file %s of %s"\ "has not an InternalTip assigned (path: %s)" % (filex.name, pretty_date_time(filex.creation_date), filex.file_path) ) try: store.remove(filex) except Exception as excep: log.err("Unable to remove InternalFile in scheduler! %s" % str(excep)) continue try: os.unlink( os.path.join(GLSetting.submission_path, filex.file_path) ) except OSError as excep: log.err("Unable to remove %s in integrity fixing routine: %s" % (filex.file_path, excep.strerror) ) continue # here we select the file which deserve to be processed. # They need to be: # From a Tip in (Tip = 'finalize' or 'first' ) # From an InternalFile (File = 'ready') # Tips may have two statuses both valid. # if these conditions are met the InternalFile(s) is/are marked as 'locked', # Whenever a delivery scheduler run, do not touch 'locked' file, and if 'locked' file # appears in the Admin interface of file overview, this mean that something is broken. if (filex.internaltip.mark == InternalTip._marker[1] or \ filex.internaltip.mark == InternalTip._marker[2]) and \ (filex.mark == InternalFile._marker[0]): filex.mark = InternalFile._marker[1] # 'locked' else: continue try: for receiver in filex.internaltip.receivers: receiver_desc = admin_serialize_receiver(receiver, GLSetting.memory_copy.default_language) if receiver_desc['gpg_key_status'] == Receiver._gpg_types[1] and receiver_desc['gpg_enable_files']: rfileslist.append([ filex.id, ReceiverFile._status_list[2], # encrypted filex.file_path, filex.size, receiver_desc ]) else: rfileslist.append([ filex.id, ReceiverFile._status_list[1], # reference filex.file_path, filex.size, receiver_desc ]) except Exception as excep: log.debug("Invalid Storm operation in checking for GPG cap: %s" % excep) continue return rfileslist
def collect_tip_overview(store, language=GLSetting.memory_copy.default_language): tip_description_list = [] all_itips = store.find(models.InternalTip) all_itips.order_by(Desc(models.InternalTip.creation_date)) for itip in all_itips: tip_description = { "id": itip.id, "creation_date": pretty_date_time(itip.creation_date), "creation_lifetime": pretty_date_time(itip.creation_date), "expiration_date": pretty_date_time(itip.expiration_date), "context_id": itip.context_id, "pertinence_counter": itip.pertinence_counter, "status": itip.mark, "receivertips": [], "internalfiles": [], "comments": [], } mo = Rosetta() mo.acquire_storm_object(itip.context) tip_description['context_name'] = mo.dump_translated('name', language) # strip uncompleted submission, until GLClient open new submission # also if no data has been supply if itip.mark == models.InternalTip._marker[0]: continue for rtip in itip.receivertips: tip_description['receivertips'].append({ 'access_counter': rtip.access_counter, 'notification_date': pretty_date_time(rtip.notification_date), # 'creation_date': pretty_date_time(rtip.creation_date), 'status': rtip.mark, 'receiver_id': rtip.receiver.id, 'receiver_username': rtip.receiver.user.username, 'receiver_name': rtip.receiver.name, # last_access censored willingly }) for ifile in itip.internalfiles: tip_description['internalfiles'].append({ 'name': ifile.name, 'size': ifile.size, 'status': ifile.mark, 'content_type': ifile.content_type }) for comment in itip.comments: tip_description['comments'].append({ 'type': comment.type, 'lifetime': pretty_date_time(comment.creation_date), }) # whistleblower tip has not a reference from itip, then: wbtip = store.find(models.WhistleblowerTip, models.WhistleblowerTip.internaltip_id == itip.id).one() if wbtip is not None: tip_description.update({ 'wb_access_counter': wbtip.access_counter, 'wb_last_access': pretty_date_time(wbtip.last_access) }) else: tip_description.update({ 'wb_access_counter': u'Deleted', 'wb_last_access': u'Never' }) tip_description_list.append(tip_description) return tip_description_list