def set_session_user(self, username, ipaddr, login_time): session_user = ObjectDict() session_user.username = username session_user.ipaddr = ipaddr session_user.login_time = login_time self.session['session_user'] = session_user self.session.save()
def get_current_user(self): session_opr = ObjectDict() session_opr.username = '******' session_opr.ipaddr = self.request.remote_ip session_opr.opr_type = 0 session_opr.login_time = utils.get_currtime() return session_opr
def get_current_user(self): username = self.get_secure_cookie("mps_user") if not username: return None user = ObjectDict() user.username = username return user
def get_current_user(self): username = self.get_secure_cookie("portal_user") if not username: return None ipaddr = self.request.remote_ip rad_session_user = self.db.query(models.SlcRadOnline).filter_by( account_number = username, framed_ipaddr = ipaddr ).first() if not rad_session_user: return None print rad_session_user raduser = self.db.query(models.SlcRadAccount).filter_by( account_number = username ).first() print raduser user = ObjectDict() user.username = username user.ipaddr = rad_session_user.framed_ipaddr user.times = rad_session_user.billing_times user.balance = raduser.balance print repr(user) return user
def db_refresh_memory_variables(store): """ This routine loads in memory few variables of node and notification tables that are subject to high usage. """ node_ro = ObjectDict(NodeFactory(store).admin_export()) GLSettings.memory_copy = node_ro GLSettings.memory_copy.accept_tor2web_access = { 'admin': node_ro.tor2web_admin, 'custodian': node_ro.tor2web_custodian, 'whistleblower': node_ro.tor2web_whistleblower, 'receiver': node_ro.tor2web_receiver, 'unauth': node_ro.tor2web_unauth } enabled_langs = models.l10n.EnabledLanguage.list(store) GLSettings.memory_copy.languages_enabled = enabled_langs notif_ro = ObjectDict(NotificationFactory(store).admin_export()) GLSettings.memory_copy.notif = notif_ro if GLSettings.developer_name: GLSettings.memory_copy.notif.source_name = GLSettings.developer_name if GLSettings.disable_mail_notification: GLSettings.memory_copy.notif.disable_admin_notification_emails = True GLSettings.memory_copy.notif.disable_custodian_notification_emails = True GLSettings.memory_copy.notif.disable_receiver_notification_emails = True GLSettings.memory_copy.private = ObjectDict( PrivateFactory(store).mem_copy_export())
def test_object_dict(self): od = ObjectDict() self.assertRaises(AttributeError, getattr, od, "something") od["foo"] = "bar" self.assertEqual(od['foo'], "bar") self.assertEqual(od.foo, "bar") od.rah = "meow" self.assertEqual(od['rah'], "meow")
def set_session_user(self, username, ipaddr, opr_type, login_time): session_opr = ObjectDict() session_opr.username = username session_opr.ipaddr = ipaddr session_opr.opr_type = opr_type session_opr.login_time = login_time self.session['session_opr'] = session_opr self.session.save()
def set_session_user(self, customer, ipaddr, login_time): session_user = ObjectDict() session_user.username = customer.customer_name session_user.cid = customer.customer_id session_user.ipaddr = ipaddr session_user.login_time = login_time self.session['session_user'] = session_user self.session.save()
def get_current_user(self): username = self.get_secure_cookie("portal_user") if not username: return None ipaddr = self.get_secure_cookie("portal_user_ip") user = ObjectDict() user.username = username user.ipaddr = ipaddr return user
def my_parse_config(filename): cp = ConfigParser.RawConfigParser() cp.read([filename]) conf = dict(raw=cp, config_file=filename) # server settings conf["debug"] = tryget(cp.getboolean, "server", "debug", False) conf["xheaders"] = tryget(cp.getboolean, "server", "xheaders", False) conf["cookie_secret"] = cp.get("server", "cookie_secret") conf["xsrf_cookies"] = tryget(cp.getboolean, "server", "xsrf_cookies", False) # make relative path absolute to this file's parent directory root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) getpath = lambda k, v: os.path.join(root, tryget(cp.get, k, v)) # locale, template and static directories conf["locale_path"] = getpath("frontend", "locale_path") conf["static_path"] = getpath("frontend", "static_path") conf["template_path"] = getpath("frontend", "template_path") # sqlite support if tryget(cp.getboolean, "sqlite", "enabled", False) is True: conf["sqlite_settings"] = \ ObjectDict(database=cp.get("sqlite", "database")) # redis support if tryget(cp.getboolean, "redis", "enabled", False) is True: conf["redis_settings"] = ObjectDict( unixsocket=tryget(cp.get, "redis", "unixsocket", None), host=tryget(cp.get, "redis", "host", "127.0.0.1"), port=tryget(cp.getint, "redis", "port", 6379), dbid=tryget(cp.getint, "redis", "dbid", 0), poolsize=tryget(cp.getint, "redis", "poolsize", 10)) # mysql support if tryget(cp.getboolean, "mysql", "enabled", False) is True: conf["mysql_settings"] = ObjectDict( host=cp.get("mysql", "host"), port=cp.getint("mysql", "port"), username=tryget(cp.get, "mysql", "username"), password=tryget(cp.get, "mysql", "password"), database=tryget(cp.get, "mysql", "database"), poolsize=tryget(cp.getint, "mysql", "poolsize", 10), debug=tryget(cp.getboolean, "mysql", "debug", False), ping=tryget(cp.getint, "mysql", "ping_interval")) # email support if tryget(cp.getboolean, "email", "enabled", False) is True: conf["email_settings"] = ObjectDict( host=cp.get("email", "host"), port=tryget(cp.getint, "email", "port"), tls=tryget(cp.getboolean, "email", "tls"), username=tryget(cp.get, "email", "username"), password=tryget(cp.get, "email", "password")) return conf
def parse_config(filename): cfg = ConfigParser.RawConfigParser() with open(filename) as fp: cfg.readfp(fp) settings = {} # web server settings settings["debug"] = xget(cfg.getboolean, "server", "debug", False) settings["xheaders"] = xget(cfg.getboolean, "server", "xheaders", False) settings["cookie_secret"] = cfg.get("server", "cookie_secret") settings["xsrf_cookies"] = xget(cfg.getboolean, "server", "xsrf_cookies", False) # get project's absolute path root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) getpath = lambda k, v: os.path.join(root, xget(cfg.get, k, v)) # locale, template and static directories' path settings["locale_path"] = getpath("frontend", "locale_path") settings["static_path"] = getpath("frontend", "static_path") settings["template_path"] = getpath("frontend", "template_path") # sqlite support if xget(cfg.getboolean, "sqlite", "enabled", False): settings["sqlite_settings"] = ObjectDict( database=cfg.get("sqlite", "database") ) else: settings["sqlite_settings"] = None # redis support if xget(cfg.getboolean, "redis", "enabled", False): settings["redis_settings"] = ObjectDict( host=cfg.get("redis", "host"), port=cfg.getint("redis", "port"), dbid=cfg.getint("redis", "dbid"), poolsize=cfg.getint("redis", "poolsize"), ) else: settings["redis_settings"] = None # mysql support if xget(cfg.getboolean, "mysql", "enabled", False): settings["mysql_settings"] = ObjectDict( host=cfg.get("mysql", "host"), port=cfg.getint("mysql", "port"), username=xget(cfg.get, "mysql", "username"), password=xget(cfg.get, "mysql", "password"), database=xget(cfg.get, "mysql", "database"), poolsize=xget(cfg.getint, "mysql", "poolsize", 10), debug=xget(cfg.getboolean, "mysql", "debug", False), ) else: settings["mysql_settings"] = None # it must always return a dict return settings
def get_current_user(self): return self.session.get("session_user") if not username: return None user = ObjectDict() user.username = username user.ipaddr = self.session.get("tr_login_ip") user.login_time = self.session.get("tr_login_time") return user
def load_complete_events(store, events_limit=GLSettings.notification_limit): """ This function do not serialize, but make an OD() of the description. events_limit represent the amount of event that can be returned by the function, events to be notified are taken in account later. """ node_desc = db_admin_serialize_node(store, GLSettings.memory_copy.default_language) event_list = [] totaleventinqueue = store.find(EventLogs, EventLogs.mail_sent == False).count() storedevnts = store.find(EventLogs, EventLogs.mail_sent == False)[: events_limit * 3] debug_event_counter = {} for i, stev in enumerate(storedevnts): if len(event_list) == events_limit: log.debug("Reached maximum number of event notifications doable on a single loop %d" % events_limit) break debug_event_counter.setdefault(stev.event_reference["kind"], 0) debug_event_counter[stev.event_reference["kind"]] += 1 if not stev.description["receiver_info"]["tip_notification"]: continue eventcomplete = OD() # node level information are not stored in the node, but fetch now eventcomplete.notification_settings = admin_serialize_notification( store.find(Notification).one(), stev.description["receiver_info"]["language"] ) eventcomplete.node_info = node_desc # event level information are decoded form DB in the old 'Event'|nametuple format: eventcomplete.receiver_info = stev.description["receiver_info"] eventcomplete.tip_info = stev.description["tip_info"] eventcomplete.subevent_info = stev.description["subevent_info"] eventcomplete.context_info = stev.description["context_info"] eventcomplete.type = stev.description["type"] # 'Tip', 'Comment' eventcomplete.trigger = stev.event_reference["kind"] # 'blah' ... eventcomplete.orm_id = stev.id event_list.append(eventcomplete) if debug_event_counter: if totaleventinqueue > (events_limit * 3): log.debug("load_complete_events: %s from %d Events" % (debug_event_counter, totaleventinqueue)) else: log.debug( "load_complete_events: %s from %d Events, with a protection limit of %d" % (debug_event_counter, totaleventinqueue, events_limit * 3) ) return event_list
def load_complete_events(store, events_limit=GLSettings.notification_limit): """ This function do not serialize, but make an OD() of the description. events_limit represent the amount of event that can be returned by the function, events to be notified are taken in account later. """ node_desc = db_admin_serialize_node(store, GLSettings.defaults.language) event_list = [] totaleventinqueue = store.find(EventLogs, EventLogs.mail_sent == False).count() storedevnts = store.find(EventLogs, EventLogs.mail_sent == False)[:events_limit * 3] debug_event_counter = {} for i, stev in enumerate(storedevnts): if len(event_list) == events_limit: log.debug("Maximum number of notification event reach (Mailflush) %d, after %d" % (events_limit, i)) break debug_event_counter.setdefault(stev.event_reference['kind'], 0) debug_event_counter[stev.event_reference['kind']] += 1 if not stev.description['receiver_info']['tip_notification']: continue eventcomplete = OD() # node level information are not stored in the node, but fetch now eventcomplete.notification_settings = admin_serialize_notification( store.find(Notification).one(), stev.description['receiver_info']['language'] ) eventcomplete.node_info = node_desc # event level information are decoded form DB in the old 'Event'|nametuple format: eventcomplete.receiver_info = stev.description['receiver_info'] eventcomplete.tip_info = stev.description['tip_info'] eventcomplete.subevent_info = stev.description['subevent_info'] eventcomplete.context_info = stev.description['context_info'] eventcomplete.type = stev.description['type'] # 'Tip', 'Comment' eventcomplete.trigger = stev.event_reference['kind'] # 'blah' ... eventcomplete.orm_id = stev.id event_list.append(eventcomplete) if debug_event_counter: if totaleventinqueue > (events_limit * 3): log.debug("load_complete_events: %s from %d Events" % (debug_event_counter, totaleventinqueue )) else: log.debug("load_complete_events: %s from %d Events, with a protection limit of %d" % (debug_event_counter, totaleventinqueue, events_limit * 3 )) return event_list
def get_current_user(self): username = self.get_secure_cookie("tra_user") if not username: return None ipaddr = self.get_secure_cookie("tra_login_ip") user = ObjectDict() user.username = username user.ipaddr = ipaddr user.opr_type = self.get_secure_cookie("tra_opr_type") user.login_time = self.get_secure_cookie("tra_login_time") return user
def parse_config(filename=None): global CONFIG_FILE_PATH if CONFIG_FILE_PATH is None and os.path.isfile(filename): CONFIG_FILE_PATH = os.path.abspath(filename) cfg = ConfigParser.RawConfigParser() with open(filename) as fp: cfg.readfp(fp) settings = {} # web server settings settings["debug"] = xget(cfg.getboolean, "server", "debug", False) settings["base_url"] = cfg.get("server", "base_url") # get project's absolute path root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) print root getpath = lambda k, v: os.path.join(root, xget(cfg.get, k, v)) # template and static directories' path settings["static_path"] = getpath("frontend", "static_path") settings["template_path"] = getpath("frontend", "template_path") # postgresql support if xget(cfg.getboolean, "postgresql", "enabled", False): settings["postgresql_settings"] = ObjectDict( host=cfg.get("postgresql", "host"), port=cfg.getint("postgresql", "port"), database=cfg.get("postgresql", "database"), poolsize=cfg.getint("postgresql", "poolsize"), username=cfg.get("postgresql", "username"), password=cfg.get("postgresql", "password")) else: settings["postgresql_settings"] = None # search engine if xget(cfg.getboolean, "search_engine", "enabled", False): settings['search_engine'] = ObjectDict( host=cfg.get('search_engine', 'host'), port=cfg.get('search_engine', 'port'), index=cfg.get('search_engine', 'index'), document_type=cfg.get('search_engine', 'document_type')) else: settings["search_engine"] = None #Neo4j server if xget(cfg.getboolean, "neo4j", "enabled", False): settings["neo4j"] = ObjectDict(host=cfg.get("neo4j", "host"), port=cfg.getint("neo4j", "port")) else: settings["neo4j"] = None return settings
def load_complete_events(store, event_number=GLSettings.notification_limit): """ _complete_ is explicit because do not serialize, but make an OD() of the description. event_number represent the amount of event that can be returned by the function, event to be notified are taken in account later. """ node_desc = db_admin_serialize_node(store, GLSettings.defaults.language) event_list = [] storedevnts = store.find(EventLogs, EventLogs.mail_sent == False) storedevnts.order_by(Asc(EventLogs.creation_date)) debug_event_counter = {} for i, stev in enumerate(storedevnts): if len(event_list) == event_number: log.debug("Maximum number of notification event reach (Mailflush) %d, after %d" % (event_number, i)) break debug_event_counter.setdefault(stev.event_reference['kind'], 0) debug_event_counter[stev.event_reference['kind']] += 1 if not stev.description['receiver_info']['tip_notification']: continue eventcomplete = OD() # node level information are not stored in the node, but fetch now eventcomplete.notification_settings = admin_serialize_notification( store.find(Notification).one(), stev.description['receiver_info']['language'] ) eventcomplete.node_info = node_desc # event level information are decoded form DB in the old 'Event'|nametuple format: eventcomplete.receiver_info = stev.description['receiver_info'] eventcomplete.tip_info = stev.description['tip_info'] eventcomplete.subevent_info = stev.description['subevent_info'] eventcomplete.context_info = stev.description['context_info'] eventcomplete.type = stev.description['type'] # 'Tip', 'Comment' eventcomplete.trigger = stev.event_reference['kind'] # 'blah' ... eventcomplete.orm_id = stev.id event_list.append(eventcomplete) if debug_event_counter: log.debug("load_complete_events: %s" % debug_event_counter) return event_list
def get_current_user(self): username = self.get_secure_cookie("opr_name") if not username: return None ipaddr = self.get_secure_cookie("opr_login_ip") opr_type = int(self.get_secure_cookie("opr_type")) login_time = self.get_secure_cookie("opr_login_time") user = ObjectDict() user.username = username user.ipaddr = ipaddr user.opr_type = opr_type user.login_time = login_time return user
def ping_mail_flush(self, notification_settings, receivers_syntesis): """ TODO This function should be implemented as a clean and testable plugin in the way defined in plugin/base.py and plugin/notification.py, and/or is the opportunity to review these classes, at the moment is a simplified version that just create a ping email and send it via sendmail. """ for _, data in receivers_syntesis.iteritems(): receiver_dict, winks = data receiver_name = receiver_dict['name'] receiver_email = receiver_dict['ping_mail_address'] fakeevent = OD() fakeevent.type = u'ping_mail' fakeevent.node_info = None fakeevent.context_info = None fakeevent.steps_info = None fakeevent.receiver_info = receiver_dict fakeevent.tip_info = None fakeevent.subevent_info = {'counter': winks} body = Templating().format_template( notification_settings['ping_mail_template'], fakeevent) title = Templating().format_template( notification_settings['ping_mail_title'], fakeevent) # so comfortable for a developer!! :) source_mail_name = GLSetting.developer_name if GLSetting.devel_mode \ else GLSetting.memory_copy.notif_source_name message = MIME_mail_build(source_mail_name, GLSetting.memory_copy.notif_source_email, receiver_name, receiver_email, title, body) fakeevent2 = OD() fakeevent2.type = "Ping mail for %s (%d info)" % (receiver_email, winks) return sendmail(authentication_username=GLSetting.memory_copy.notif_username, authentication_password=GLSetting.memory_copy.notif_password, from_address= GLSetting.memory_copy.notif_source_email, to_address= [receiver_email], message_file=message, smtp_host=GLSetting.memory_copy.notif_server, smtp_port=GLSetting.memory_copy.notif_port, security=GLSetting.memory_copy.notif_security, event=fakeevent2)
def send_pgp_alerts(self, node_desc, receiver_desc, notification_settings): fakeevent = OD() fakeevent.type = u'pgp_expiration_alert' fakeevent.node_info = node_desc fakeevent.context_info = None fakeevent.steps_info = None fakeevent.receiver_info = receiver_desc fakeevent.tip_info = None fakeevent.subevent_info = None body = Templating().format_template( notification_settings['pgp_alert_mail_template'], fakeevent) title = Templating().format_template( notification_settings['pgp_alert_mail_title'], fakeevent) to_address = receiver_desc['mail_address'] message = MIME_mail_build(GLSetting.memory_copy.notif_source_name, GLSetting.memory_copy.notif_source_email, to_address, to_address, title, body) yield sendmail(authentication_username=GLSetting.memory_copy.notif_username, authentication_password=GLSetting.memory_copy.notif_password, from_address=GLSetting.memory_copy.notif_source_email, to_address=to_address, message_file=message, smtp_host=GLSetting.memory_copy.notif_server, smtp_port=GLSetting.memory_copy.notif_port, security=GLSetting.memory_copy.notif_security, event=None)
def get_wlannotify(): articles =[] article1=ObjectDict() article1.title=u"无线上网" article1.description=u"当您已经连接Wi-Fi信号时,点击即可免费联入网络。" article1.url = "%s/mplogin?mp_openid=%s&product_id=%s&node_id=%s" % ( config.get('mps','server_base'), msg.fromuser, config.get('mps','wlan_product_id'), config.get('mps','wlan_node_id') ) article1.picurl = '%s/static/img/wlan.jpg' % config.get('mps','server_base') articles.append(article1) return articles
def ping_mail_flush(self, notification_settings, receivers_syntesis): """ TODO This function should be implemented as a clean and testable plugin in the way defined in plugin/base.py and plugin/notification.py, and/or is the opportunity to review these classes, at the moment is a simplified version that just create a ping email and send it via sendmail. """ for receiver_id, data in receivers_syntesis.iteritems(): receiver_dict, winks = data receiver_name = receiver_dict['name'] receiver_email = receiver_dict['ping_mail_address'] fakeevent = OD() fakeevent.type = u'ping_mail' fakeevent.node_info = None fakeevent.context_info = None fakeevent.steps_info = None fakeevent.receiver_info = receiver_dict fakeevent.tip_info = None fakeevent.subevent_info = {'counter': winks} body = Templating().format_template( notification_settings['ping_mail_template'], fakeevent) title = Templating().format_template( notification_settings['ping_mail_title'], fakeevent) message = MIME_mail_build(GLSetting.memory_copy.notif_source_name, GLSetting.memory_copy.notif_source_email, receiver_name, receiver_email, title, body) fakeevent2 = OD() fakeevent2.type = "Ping mail for %s (%d info)" % (receiver_email, winks) return sendmail( authentication_username=GLSetting.memory_copy.notif_username, authentication_password=GLSetting.memory_copy.notif_password, from_address=GLSetting.memory_copy.notif_source_email, to_address=[receiver_email], message_file=message, smtp_host=GLSetting.memory_copy.notif_server, smtp_port=GLSetting.memory_copy.notif_port, security=GLSetting.memory_copy.notif_security, event=fakeevent2)
def generate(self, **kwargs): """Generate this template with the given arguments.""" namespace = { "escape": escape.xhtml_escape, "xhtml_escape": escape.xhtml_escape, "url_escape": escape.url_escape, "json_encode": escape.json_encode, "squeeze": escape.squeeze, "linkify": escape.linkify, "datetime": datetime, "_utf8": escape.utf8, # for internal use "_string_types": (unicode, bytes_type), # __name__ and __loader__ allow the traceback mechanism to find # the generated source code. "__name__": self.name.replace('.', '_'), "__loader__": ObjectDict(get_source=lambda name: self.code), } namespace.update(self.namespace) namespace.update(kwargs) exec self.compiled in namespace execute = namespace["_execute"] # Clear the traceback module's cache of source data now that # we've generated a new template (mainly for this module's # unittests, where different tests reuse the same name). linecache.clearcache() try: return execute() except Exception: formatted_code = _format_code(self.code).rstrip() log.msg("%s code:" % self.name) for line in formatted_code.split("\n"): log.msg(line) raise
def set_session_user(self, username, ipaddr, login_time, **kwargs): session_user = ObjectDict() session_user.username = username session_user.ipaddr = ipaddr session_user.macaddr = kwargs.pop('macaddr', '') session_user.login_time = login_time session_user.update(**kwargs) self.session['wlan_session_user'] = session_user self.session.save()
def generate_anomaly_email(self, plausible_event): anomalevent = OD() anomalevent.type = u'receiver_notification_limit_reached' anomalevent.notification_settings = plausible_event.notification_settings anomalevent.node_info = plausible_event.node_info anomalevent.context_info = None anomalevent.receiver_info = plausible_event.receiver_info anomalevent.tip_info = None anomalevent.subevent_info = None anomalevent.orm_id = 0 return anomalevent
def respond(data, msg=None,db=None,config=None,mpsapi=None,**kwargs): products = db.query(models.SlcRadProduct).filter( models.SlcRadProduct.mps_flag == 1, models.SlcRadProduct.product_status == 0 ).limit(7) articles =[] for item in products: article=ObjectDict() article.title= item.product_name article.description = '' article.url = "%s/order?openid=%s&product_id=%s" % ( config.get('mps','server_base'), msg.fromuser, item.id ) article.picurl = '%s/static/img/mps/order_online.jpg' % config.get('mps','server_base') articles.append(article) return articles
def respond(data, msg=None,db=None,config=None,mpsapi=None,**kwargs): member = db.query(models.SlcMember).filter( models.SlcMember.weixin_id==msg.fromuser).first() if not member: return u"您当前还未绑定账号" articles =[] article=ObjectDict() article.title= u"我的工单" article.description = '' article.url = "%s/issues?openid=%s&member_id=%s" % ( config.get('mps','server_base'), msg.fromuser, member.member_id ) article.picurl = '%s/static/img/mps/issues_query.jpg' % config.get('mps','server_base') articles.append(article) return articles
def set_session_user(self, uid, username, ipaddr, login_time): session_user = ObjectDict() session_user.uid = uid session_user.username = username session_user.ipaddr = ipaddr session_user.login_time = login_time self.session['session_user'] = session_user self.session.save()
def set_session_user(self, username, ipaddr, opr_type, login_time): session_opr = ObjectDict() session_opr.username = username session_opr.ipaddr = ipaddr session_opr.opr_type = opr_type session_opr.login_time = login_time session_opr.resources = [r.rule_path for r in self.db.query(models.TrOperatorRule).filter_by(operator_name=username)] self.session['session_opr'] = session_opr self.session.save()
def set_session_user(self, username, ipaddr, opr_type, login_time): session_opr = ObjectDict() session_opr.username = username session_opr.ipaddr = ipaddr session_opr.opr_type = opr_type session_opr.login_time = login_time session_opr.resources = [ r.rule_path for r in self.db.query(models.SysOperatorRule).filter_by( operator_name=username) ] self.session['session_opr'] = session_opr self.session.save()
def fetch_docs(self, query, categories=()): page = self.get_arg('page', self.page) try: self.page = int(page) except ValueError: self.page = 1 offset = self.limit * (self.page - 1) try: data = yield self.client.search(query, categories, offset=offset, limit=self.limit) except Exception as e: self.logerr('error while retrieveing results for {!r}'.format(query)) self.render_error() defer.returnValue(None) if 'error' in data: self.logerr('error from search search: {!r}'.format(data)) self.render_error() defer.returnValue(None) results = [] for ret in data['docs']: info = self.corpus_db.get(ret['id']) if not info: self.log('document key without record {!r}'.format(ret)) continue if any(f not in info for f in ('headline', 'body', 'url')): self.log("Document with missing fields: {!r}".format(info)) break doc = ObjectDict((k,v) for (k,v) in ret.items() if not k.startswith('_')) doc.update(info) results.append(doc) defer.returnValue((data['total'], results))
def test_001_successful_session_update_on_unauth_request(self): date1 = utility.datetime_now() GLSetting.sessions = {} GLSetting.sessions[u'antani'] = ObjectDict() GLSetting.sessions[u'antani']['user_id'] = u'admin' GLSetting.sessions[u'antani']['role'] = u'admin' GLSetting.sessions[u'antani']['id'] = u'antani' GLSetting.sessions[u'antani']['refreshdate'] = date1 handler = self.request({}, headers={'X-Session': 'antani'}) yield handler.get() date2 = GLSetting.sessions.values()[0].refreshdate self.assertNotEqual(date1, date2)
def test_001_successful_session_expiry_on_admin_auth_request(self): date1 = utility.datetime_null() # oh a very old date! GLSetting.sessions = {} GLSetting.sessions[u'antani'] = ObjectDict() GLSetting.sessions[u'antani']['user_id'] = u'admin' GLSetting.sessions[u'antani']['role'] = u'admin' GLSetting.sessions[u'antani']['id'] = u'antani' GLSetting.sessions[u'antani']['refreshdate'] = date1 handler = self.request({}, headers={'X-Session': 'antani'}) self.assertRaises(errors.AdminSessionExpired, handler.get) self.assertTrue(handler.current_user is None) self.assertEqual(len(GLSetting.sessions.keys()), 0)
def send_pgp_alerts(self, receiver_desc): user_language = receiver_desc["language"] node_desc = yield admin_serialize_node(user_language) notification_settings = yield get_notification(user_language) fakeevent = OD() fakeevent.type = u"pgp_expiration_alert" fakeevent.node_info = node_desc fakeevent.context_info = None fakeevent.receiver_info = receiver_desc fakeevent.tip_info = None fakeevent.subevent_info = None subject = Templating().format_template(notification_settings["pgp_alert_mail_title"], fakeevent) body = Templating().format_template(notification_settings["pgp_alert_mail_template"], fakeevent) yield sendmail(receiver_desc["mail_address"], subject, body)
def ping_mail_flush(self, notification_settings, receivers_synthesis): for _, data in receivers_synthesis.iteritems(): receiver_dict, winks = data receiver_name = receiver_dict['name'] receiver_email = receiver_dict['ping_mail_address'] fakeevent = OD() fakeevent.type = u'ping_mail' fakeevent.node_info = None fakeevent.context_info = None fakeevent.receiver_info = receiver_dict fakeevent.tip_info = None fakeevent.subevent_info = {'counter': winks} subject = Templating().format_template(notification_settings['ping_mail_template'], fakeevent) body = Templating().format_template(notification_settings['ping_mail_title'], fakeevent) return sendmail(receiver_email, subject, body)
def send_admin_pgp_alerts(self, admin_desc, expired_or_expiring): user_language = admin_desc["language"] node_desc = yield admin_serialize_node(user_language) notification_settings = yield get_notification(user_language) fakeevent = OD() fakeevent.type = u"admin_pgp_expiration_alert" fakeevent.node_info = node_desc fakeevent.context_info = None fakeevent.receiver_info = None fakeevent.tip_info = None fakeevent.subevent_info = {"expired_or_expiring": expired_or_expiring} subject = Templating().format_template(notification_settings["admin_pgp_alert_mail_title"], fakeevent) body = Templating().format_template(notification_settings["admin_pgp_alert_mail_template"], fakeevent) admin_users = yield get_admin_users() for u in admin_users: yield sendmail(u["mail_address"], subject, body)
def send_pgp_alerts(self, receiver_desc): user_language = receiver_desc['language'] node_desc = yield admin_serialize_node(user_language) notification_settings = yield get_notification(user_language) fakeevent = OD() fakeevent.type = u'pgp_expiration_alert' fakeevent.node_info = node_desc fakeevent.context_info = None fakeevent.receiver_info = receiver_desc fakeevent.tip_info = None fakeevent.subevent_info = None subject = Templating().format_template( notification_settings['pgp_alert_mail_title'], fakeevent) body = Templating().format_template( notification_settings['pgp_alert_mail_template'], fakeevent) yield sendmail(receiver_desc['mail_address'], subject, body)
def generate(self, **kwargs): """Generate this template with the given arguments.""" namespace = { "escape": escape.xhtml_escape, "xhtml_escape": escape.xhtml_escape, "url_escape": escape.url_escape, "json_encode": escape.json_encode, "squeeze": escape.squeeze, "linkify": escape.linkify, "datetime": datetime, "_utf8": escape.utf8, # for internal use "_string_types": (unicode_type, bytes_type), # __name__ and __loader__ allow the traceback mechanism to find # the generated source code. "__name__": self.name.replace('.', '_'), "__loader__": ObjectDict(get_source=lambda name: self.code), } namespace.update(self.namespace) namespace.update(kwargs) exec_in(self.compiled, namespace) execute = namespace["_execute"] # Clear the traceback module's cache of source data now that # we've generated a new template (mainly for this module's # unittests, where different tests reuse the same name). linecache.clearcache() try: rv = execute() assert isinstance(rv, Deferred), rv if hasattr(rv, "result"): # Deferred is already resolved. # Return the result immidiatly to avoid compatibility problems. rv = rv.result if isinstance(rv, Failure): rv.raiseException() return rv except: raise TemplateError( "Error executing template " + self.name + ":\n" + _format_code(traceback.format_exception(*sys.exc_info())))
def send_admin_pgp_alerts(self, admin_desc, expired_or_expiring): user_language = admin_desc['language'] node_desc = yield admin_serialize_node(user_language) notification_settings = yield get_notification(user_language) fakeevent = OD() fakeevent.type = u'admin_pgp_expiration_alert' fakeevent.node_info = node_desc fakeevent.context_info = None fakeevent.receiver_info = None fakeevent.tip_info = None fakeevent.subevent_info = {'expired_or_expiring': expired_or_expiring} subject = Templating().format_template( notification_settings['admin_pgp_alert_mail_title'], fakeevent) body = Templating().format_template( notification_settings['admin_pgp_alert_mail_template'], fakeevent) admin_users = yield get_admin_users() for u in admin_users: yield sendmail(u['mail_address'], subject, body)
def filter_notification_event(notifque): """ :param notifque: the current notification event queue :return: a modified queue in the case some email has not to be sent Basically performs two filtering; they are defined in: 1) issue #444 2) issue #798 """ # Here we collect the Storm event of Files having as key the Tip files_event_by_tip = {} _tmp_list = [] return_filtered_list = [] # to be smoked Storm.id orm_id_to_be_skipped = [] for ne in notifque: if ne['trigger'] != u'Tip': continue files_event_by_tip.update({ne['tip_info']['id'] : []}) log.debug("Filtering function: iterating over %d Tip" % len(files_event_by_tip.keys())) # not files_event_by_tip contains N keys with an empty list, # I'm looping two times because dict has random ordering for ne in notifque: if GLSettings.memory_copy.disable_receiver_notification_emails: orm_id_to_be_skipped.append(ne['orm_id']) continue if ne['trigger'] != u'File': _tmp_list.append(ne) continue if ne['tip_info']['id'] in files_event_by_tip: orm_id_to_be_skipped.append(ne['orm_id']) else: _tmp_list.append(ne) if len(orm_id_to_be_skipped): if GLSettings.memory_copy.disable_receiver_notification_emails: log.debug("All the %d mails will be marked as sent because the admin has disabled receivers notifications" % len(orm_id_to_be_skipped)) else: log.debug("Filtering function: Marked %d Files notification to be suppressed cause part of a submission" % len(orm_id_to_be_skipped)) for ne in _tmp_list: receiver_id = ne['receiver_info']['id'] sent_emails = GLSettings.get_mail_counter(receiver_id) if sent_emails >= GLSettings.memory_copy.notification_threshold_per_hour: log.debug("Discarding email for receiver %s due to threshold already exceeded for the current hour" % receiver_id) orm_id_to_be_skipped.append(ne['orm_id']) continue GLSettings.increment_mail_counter(receiver_id) if sent_emails + 1 >= GLSettings.memory_copy.notification_threshold_per_hour: log.info("Reached threshold of %d emails with limit of %d for receiver %s" % ( sent_emails, GLSettings.memory_copy.notification_threshold_per_hour, receiver_id) ) # Append anomalyevent = OD() anomalyevent.type = u'receiver_notification_limit_reached' anomalyevent.notification_settings = ne.notification_settings anomalyevent.node_info = ne.node_info anomalyevent.context_info = None anomalyevent.receiver_info = ne.receiver_info anomalyevent.tip_info = None anomalyevent.subevent_info = None anomalyevent.orm_id = '0' return_filtered_list.append(anomalyevent) orm_id_to_be_skipped.append(ne['orm_id']) continue return_filtered_list.append(ne) log.debug("Mails filtering completed passing from #%d to #%d events" % (len(notifque), len(return_filtered_list))) # return the new list of event and the list of Storm.id return return_filtered_list, orm_id_to_be_skipped
def parse_config(filename=None): if filename is None: filename = config_file_path() global CONFIG_FILE_PATH if CONFIG_FILE_PATH is None and os.path.isfile(filename): CONFIG_FILE_PATH = os.path.abspath(filename) cfg = ConfigParser.RawConfigParser() with open(filename) as fp: cfg.readfp(fp) settings = {} # web server settings settings["debug"] = xget(cfg.getboolean, "server", "debug", False) settings["xheaders"] = xget(cfg.getboolean, "server", "xheaders", False) settings["enable_logging"] = xget(cfg.getboolean, "server", "enable_logging", False) settings["enable_caching"] = xget(cfg.getboolean, "server", "enable_caching", False) settings["base_url"] = cfg.get("server", "base_url") # get project's absolute path root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) print root getpath = lambda k, v: os.path.join(root, xget(cfg.get, k, v)) # locale, template and static directories' path settings["locale_path"] = getpath("frontend", "locale_path") settings["static_path"] = getpath("frontend", "static_path") settings["template_path"] = getpath("frontend", "template_path") settings["project_selection"] = cfg.get("frontend", "project_selection") # postgresql support if xget(cfg.getboolean, "postgresql", "enabled", False): settings["postgresql_settings"] = ObjectDict( host=cfg.get("postgresql", "host"), port=cfg.getint("postgresql", "port"), database=cfg.get("postgresql", "database"), poolsize=cfg.getint("postgresql", "poolsize"), username=cfg.get("postgresql", "username"), password=cfg.get("postgresql", "password")) else: settings["postgresql_settings"] = None # redis support if xget(cfg.getboolean, "redis", "enabled", False): settings["redis_settings"] = ObjectDict( host=cfg.get("redis", "host"), port=cfg.getint("redis", "port"), dbid=cfg.getint("redis", "dbid"), poolsize=cfg.getint("redis", "poolsize"), ) else: settings["redis_settings"] = None settings['search_engine'] = ObjectDict( host=cfg.get('search_engine', 'host'), port=cfg.get('search_engine', 'port'), index=cfg.get('search_engine', 'index')) return settings
def set_session_user(self, uid, username, ipaddr, login_time, status, expire_date, create_time, product_policy, product_name): session_user = ObjectDict() session_user.uid = uid session_user.username = username session_user.ipaddr = ipaddr session_user.login_time = login_time session_user.status = status session_user.expire_date = expire_date session_user.create_time = create_time session_user.product_policy = product_policy session_user.product_name = product_name self.session['session_user'] = session_user self.session.save() return
glclient_path = os.path.join(get_install_path(), 'GLClient', 'app') path = os.path.abspath(glclient_path) if not os.path.isdir(path): raise ConfigError("GLClient not found at the %s path" % glclient_path) return path def get_db_file(): root = get_root_path() db_dir = os.path.join(root, '_gldata') if not os.path.isdir(db_dir): os.mkdir(db_dir) db_file = os.path.join(db_dir, 'glbackend.db') return db_file main = OD() advanced = OD() advanced.debug = True main.glclient_path = get_glclient_path() if advanced.debug: print "Serving GLClient from %s" % main.glclient_path main.database_uri = 'sqlite:'+get_db_file() advanced.db_thread_pool_size = 10 advanced.scheduler_thread_pool_size = 10 advanced.data_dir = os.path.join(get_root_path(), '_gldata')
# -*- encoding: utf-8 -*- # # :authors: Arturo Filastò # :licence: see LICENSE import sys import os import logging from twisted.python import log as txlog from twisted.python.logfile import DailyLogFile from cyclone.util import ObjectDict as OD config = OD() config.debug = True # XXX make this a config option log_file = "/tmp/bridgeherder.log" log_folder = os.path.join('/', *log_file.split('/')[:-1]) log_filename = log_file.split('/')[-1] daily_logfile = DailyLogFile(log_filename, log_folder) class LoggerFactory(object): def __init__(self, options): #print options pass def start(self, application):
# -*- encoding: utf-8 -*- # # :authors: Arturo Filastò # :licence: see LICENSE import sys import os import logging from twisted.python import log as txlog from twisted.python.logfile import DailyLogFile from cyclone.util import ObjectDict as OD config = OD() config.debug = True # XXX make this a config option log_file = "/tmp/bridgeherder.log" log_folder = os.path.join('/', *log_file.split('/')[:-1]) log_filename = log_file.split('/')[-1] daily_logfile = DailyLogFile(log_filename, log_folder) class LoggerFactory(object): def __init__(self, options): #print options pass def start(self, application): logging.basicConfig()
def load_complete_events(store, event_number=GLSetting.notification_limit): """ _complete_ is explicit because do not serialize, but make an OD() of the description. event_number represent the amount of event that can be returned by the function, event to be notified are taken in account later. """ node_desc = db_admin_serialize_node(store, GLSetting.defaults.language) event_list = [] storedevnts = store.find(EventLogs, EventLogs.mail_sent == False) storedevnts.order_by(Desc(EventLogs.creation_date)) for i, stev in enumerate(storedevnts): if len(event_list) == event_number: log.debug( "Maximum number of notification event reach (Mailflush) %d, after %d" % (event_number, i)) break if not stev.description['receiver_info']['file_notification'] and \ stev.event_reference['kind'] == 'File': continue if not stev.description['receiver_info']['message_notification'] and \ stev.event_reference['kind'] == 'Message': continue if not stev.description['receiver_info']['comment_notification'] and \ stev.event_reference['kind'] == 'Comment': continue if not stev.description['receiver_info']['tip_notification'] and \ stev.event_reference['kind'] == 'Tip': continue eventcomplete = OD() # node level information are not stored in the node, but fetch now eventcomplete.notification_settings = admin_serialize_notification( store.find(Notification).one(), stev.description['receiver_info']['language']) eventcomplete.node_info = node_desc # event level information are decoded form DB in the old 'Event'|nametuple format: eventcomplete.receiver_info = stev.description['receiver_info'] eventcomplete.tip_info = stev.description['tip_info'] eventcomplete.subevent_info = stev.description['subevent_info'] eventcomplete.context_info = stev.description['context_info'] eventcomplete.steps_info = stev.description['steps_info'] eventcomplete.type = stev.description['type'] # 'Tip', 'Comment' eventcomplete.trigger = stev.event_reference[ 'kind'] # 'plaintext_blah' ... eventcomplete.storm_id = stev.id event_list.append(eventcomplete) return event_list
def set_session_user(self, username, ipaddr, opr_type, login_time): session_opr = ObjectDict() session_opr.operator_name = username session_opr.username = username session_opr.operate_ip = ipaddr session_opr.ipaddr = ipaddr session_opr.opr_type = opr_type session_opr.login_time = login_time session_opr.resources = [ r.rule_path for r in self.db.query(models.TrOperatorRule).filter_by(operator_name=username) ] _agency = self.db.query(models.TrAgency).filter_by(operator_name=username).first() session_opr.agency_id = _agency.id if _agency else None session_opr.agency_name = _agency.agency_name if _agency else None self.session['session_opr'] = session_opr self.session.save() return
'help': '', 'hint': 'check if this applies', 'required': False}, {'name': 'option', 'label': 'What option?', 'type': 'radio', 'help': 'Place here your help text', 'options': [{'label': 'Option 1', 'value': 'option1'}, {'label': 'Option 2', 'value': 'option2'}], 'hint': 'Pick one of these many options', 'required': False, } ] info = OD() info.name = "Node Name" info.description = lorem info.statistics = {'x': 20, 'y': 300, 'z': 123} info.properties = {'x': True, 'y': False} info.https_address = 'https://example.com/' info.httpo_address = 'httpo://foobar.onion/' info.contexts = [{'id': 0, 'name': 'context1', 'groups': groups, 'fields': fields, 'description': lorem, 'style': 'default', 'creation_date': footime, 'update_date': footime}, {'id': 1, 'name': 'context2',
def parse_msg(xml): if not xml: return None parser = ElementTree.fromstring(xml) msg_id = parse_node(parser, 'MsgId', get_uuid()) msg_type = parse_node(parser, 'MsgType') touser = parse_node(parser, 'ToUserName') fromuser = parse_node(parser, 'FromUserName') create_at = int(parse_node(parser, 'CreateTime', 0)) msg = ObjectDict( mid=msg_id, type=msg_type, touser=touser, fromuser=fromuser, time=create_at ) if msg_type == MSG_TYPE_TEXT: msg.content = parse_node(parser, 'Content') elif msg_type == MSG_TYPE_LOCATION: msg.location_x = parse_node(parser, 'Location_X') msg.location_y = parse_node(parser, 'Location_Y') msg.scale = int(parse_node(parser, 'Scale')) msg.label = parse_node(parser, 'Label') elif msg_type == MSG_TYPE_IMAGE: msg.picurl = parse_node(parser, 'PicUrl') elif msg_type == MSG_TYPE_VOICE: msg.media_id = parse_node(parser, 'MediaId') msg.format = parse_node(parser, 'Format') elif msg_type == MSG_TYPE_VIDEO: msg.media_id = parse_node(parser, 'MediaId') msg.thumb = parse_node(parser, 'ThumbMediaId') elif msg_type == MSG_TYPE_LINK: msg.title = parse_node(parser, 'Title') msg.description = decode(parser.find('Description').text) msg.url = parse_node(parser, 'Url') elif msg_type == MSG_TYPE_EVENT: msg.event = parse_node(parser, 'Event') msg.event_key = parse_node(parser, 'EventKey') msg.ticket = parse_node(parser, 'Ticket') if msg.event == u'LOCATION': msg.latitude = parse_node(parser, 'Latitude') msg.longitude = parse_node(parser, 'Longitude') msg.precision = parse_node(parser, 'Precision') return msg