def get_twitter_api(): """ Initialize Twitter API """ try: import tweepy except ImportError: s3_debug("s3msg", "Tweepy not available, so non-Tropo Twitter support disabled") return None else: self.tweepy = tweepy db = current.db manager = current.manager settings = current.deployment_settings manager.load("msg_twitter_settings") query = (db.msg_twitter_settings.id > 0) twitter_settings = db(query).select(limitby=(0, 1)).first() if twitter_settings and twitter_settings.twitter_account: try: oauth = tweepy.OAuthHandler(settings.twitter.oauth_consumer_key, settings.twitter.oauth_consumer_secret) oauth.set_access_token(twitter_settings.oauth_key, twitter_settings.oauth_secret) twitter_api = tweepy.API(oauth) twitter_account = tmp_twitter_settings.twitter_account return dict(twitter_api=twitter_api, twitter_account=twitter_account) except: pass return None
def execute_special_task(self, task): start = datetime.datetime.now() if task[0] == 2: fun = task[1] csv = task[2] extraArgs = task[3] if csv is None: if extraArgs is None: error = current.response.s3[fun]() else: error = current.response.s3[fun](extraArgs) elif extraArgs is None: error = current.response.s3[fun](csv) else: error = current.response.s3[fun](csv, extraArgs) if error: self.errorList.append(error) end = datetime.datetime.now() duration = end - start try: # Python-2.7 duration = '{:.2f}'.format(duration.total_seconds()/60) msg = "%s import job completed in %s mins" % (fun, duration) except AttributeError: # older Python msg = "%s import job completed in %s" % (fun, duration) self.resultList.append(msg) if current.session.s3.debug: s3_debug(msg)
def execute_special_task(self, task): start = datetime.datetime.now() if task[0] == 2: fun = task[1] csv = task[2] extraArgs = task[3] if csv is None: if extraArgs is None: error = current.response.s3[fun]() else: error = current.response.s3[fun](extraArgs) elif extraArgs is None: error = current.response.s3[fun](csv) else: error = current.response.s3[fun](csv, extraArgs) if error: self.errorList.append(error) end = datetime.datetime.now() duration = end - start try: # Python-2.7 duration = '{:.2f}'.format(duration.total_seconds() / 60) msg = "%s import job completed in %s mins" % (fun, duration) except AttributeError: # older Python msg = "%s import job completed in %s" % (fun, duration) self.resultList.append(msg) if current.session.s3.debug: s3_debug(msg)
def get_twitter_api(): """ Initialize Twitter API """ try: import tweepy except ImportError: s3_debug("s3msg", "Tweepy not available, so non-Tropo Twitter support disabled") return None else: self.tweepy = tweepy db = current.db manager = current.manager settings = current.deployment_settings manager.load("msg_twitter_settings") query = db.msg_twitter_settings.id > 0 twitter_settings = db(query).select(limitby=(0, 1)).first() if twitter_settings and twitter_settings.twitter_account: try: oauth = tweepy.OAuthHandler(settings.twitter.oauth_consumer_key, settings.twitter.oauth_consumer_secret) oauth.set_access_token(twitter_settings.oauth_key, twitter_settings.oauth_secret) twitter_api = tweepy.API(oauth) twitter_account = tmp_twitter_settings.twitter_account return dict(twitter_api=twitter_api, twitter_account=twitter_account) except: pass return None
def parser(function_name, message_id, **kwargs): """ 1st Stage Parser - called by msg.parse() Sets the appropriate Authorisation level and then calls the parser function from the template """ reply = None s3db = current.s3db # Retrieve Message table = s3db.msg_message message = current.db(table.message_id == message_id).select( limitby=(0, 1)).first() from_address = message.from_address if "<" in from_address: from_address = from_address.split("<")[1].split(">")[0] email = S3Parsing.is_session_alive(from_address) if email: current.auth.s3_impersonate(email) else: (email, password) = S3Parsing.parse_login(message) if email and password: current.auth.login_bare(email, password) expiration = current.session.auth["expiration"] table = s3db.msg_session table.insert(email=email, expiration_time=expiration, from_address=from_address) reply = "Login succesful" # The message may have multiple purposes #return reply # Load the Parser template for this deployment template = current.deployment_settings.get_msg_parser() module_name = "applications.%s.private.templates.%s.parser" \ % (current.request.application, template) __import__(module_name) mymodule = sys.modules[module_name] S3Parser = mymodule.S3Parser() # Pass the message to the parser try: fn = getattr(S3Parser, function_name) except: from s3utils import s3_debug s3_debug("Parser not found: %s" % function_name) return None reply = fn(message, **kwargs) or reply if not reply: return None # Send Reply current.msg.send(from_address, reply)
def parser(function_name, message_id, **kwargs): """ 1st Stage Parser - called by msg.parse() Sets the appropriate Authorisation level and then calls the parser function from the template """ reply = None s3db = current.s3db # Retrieve Message table = s3db.msg_message message = current.db(table.message_id == message_id).select(limitby=(0, 1) ).first() from_address = message.from_address if "<" in from_address: from_address = from_address.split("<")[1].split(">")[0] email = S3Parsing.is_session_alive(from_address) if email: current.auth.s3_impersonate(email) else: (email, password) = S3Parsing.parse_login(message) if email and password: current.auth.login_bare(email, password) expiration = current.session.auth["expiration"] table = s3db.msg_session table.insert(email = email, expiration_time = expiration, from_address = from_address) reply = "Login succesful" # The message may have multiple purposes #return reply # Load the Parser template for this deployment template = current.deployment_settings.get_msg_parser() module_name = "applications.%s.private.templates.%s.parser" \ % (current.request.application, template) __import__(module_name) mymodule = sys.modules[module_name] S3Parser = mymodule.S3Parser() # Pass the message to the parser try: fn = getattr(S3Parser, function_name) except: from s3utils import s3_debug s3_debug("Parser not found: %s" % function_name) return None reply = fn(message, **kwargs) or reply if not reply: return None # Send Reply current.msg.send(from_address, reply)
def encode(self, resource, **attr): """ Export data as a Scalable Vector Graphic @param resource: the source of the data that is to be encoded as an SVG. This may be: resource: the resource item: a list of pre-fetched values the headings are in the first row the data types are in the second row @param attr: dictionary of parameters: * title: The export filename * list_fields: Fields to include in list views """ # Get the attributes #list_fields = attr.get("list_fields") #if not list_fields: # list_fields = resource.list_fields() # @ToDo: PostGIS can extract SVG from DB (like GeoJSON) # http://postgis.refractions.net/documentation/manual-1.4/ST_AsSVG.html if resource.prefix == "gis" and resource.name == "location": #list_fields.append("wkt") list_fields = ["wkt"] elif "location_id$wkt" not in list_fields: #list_fields.append("location_id$wkt") list_fields = ["location_id$wkt"] # Clear the WKT represent current.s3db.gis_location.wkt.represent = None # Extract the data from the resource (_title, types, lfields, headers, items) = self.extractResource(resource, list_fields) # @ToDo: Support multiple records wkt = items[0]["gis_location.wkt"] if not wkt: error = "No Geometry!" from s3utils import s3_debug s3_debug(error) # Convert to SVG title = attr.get("title", resource._ids[0]) filename = "%s.svg" % title filepath = self.write_file(filename, wkt, **attr) # Response headers disposition = "attachment; filename=\"%s\"" % filename response = current.response response.headers["Content-Type"] = contenttype(".svg") response.headers["Content-disposition"] = disposition stream = open(filepath) return response.stream(stream, chunk_size=DEFAULT_CHUNK_SIZE, request=current.request)
def receive_subscribed_tweets(self): """ Function to call to drop the tweets into search_results table - called via cron """ # Initialize Twitter API twitter_settings = self.get_twitter_api() twitter_api = None if twitter_settings: twitter_api = twitter_settings["twitter_api"] if not twitter_api: # Abort return False db = self.db table = db.msg_twitter_search rows = db().select(table.ALL) results_table = db.msg_twitter_search_results # Get the latest updated post time to use it as since_id in twitter search recent_time = results_table.posted_by.max() for row in rows: query = row.search_query try: if recent_time: search_results = twitter_api.search(query, result_type="recent", show_user=True, since_id=recent_time) else: search_results = twitter_api.search(query, result_type="recent", show_user=True) search_results.reverse() for result in search_results: # Check if the tweet already exists in the table tweet_exists = db((results_table.posted_by == result.from_user) & (results_table.posted_at == result.created_at )).select().first() if tweet_exists: continue else: results_table.insert(tweet = result.text, posted_by = result.from_user, posted_at = result.created_at, twitter_search = row.id ) except tweepy.TweepError: s3_debug("Unable to get the Tweets for the user search query.") return False # Explicitly commit DB operations when running from Cron db.commit() return True
def send_text_via_twitter(self, recipient, text=""): """ Function to send text to recipient via direct message (if recipient follows us). Falls back to @mention (leaves less characters for the message). Breaks long text to chunks if needed. @ToDo: Option to Send via Tropo """ # Initialize Twitter API twitter_settings = self.get_twitter_api() tweepy = self.tweepy twitter_api = None if twitter_settings: twitter_api = twitter_settings["twitter_api"] twitter_account = twitter_settings["twitter_account"] if not twitter_api and text: # Abort return False recipient = self.sanitise_twitter_account(recipient) try: can_dm = twitter_api.exists_friendship(recipient, twitter_account) except tweepy.TweepError: # recipient not found return False if can_dm: chunks = self.break_to_chunks(text, TWITTER_MAX_CHARS) for c in chunks: try: # Note: send_direct_message() requires explicit kwargs (at least in tweepy 1.5) # See http://groups.google.com/group/tweepy/msg/790fcab8bc6affb5 twitter_api.send_direct_message(screen_name=recipient, text=c) except tweepy.TweepError: s3_debug("Unable to Tweet DM") else: prefix = "@%s " % recipient chunks = self.break_to_chunks(text, TWITTER_MAX_CHARS - len(prefix)) for c in chunks: try: twitter_api.update_status(prefix + c) except tweepy.TweepError: s3_debug("Unable to Tweet @mention") return True
def send_sms_via_modem(self, mobile, text=""): """ Function to send SMS via locally-attached Modem - needs to have the cron/sms_handler_modem.py script running """ mobile = self.sanitise_phone(mobile) # Add '+' before country code mobile = "+%s" % mobile try: self.modem.send_sms(mobile, text) return True except KeyError: s3_debug("s3msg", "Modem not available: need to have the cron/sms_handler_modem.py script running") return False
def schedule_task(self, task, args=[], # args to pass to the task vars={}, # vars to pass to the task function_name=None, start_time=None, next_run_time=None, stop_time=None, repeats=None, period=None, timeout=None, enabled=None, # None = Enabled group_name=None, ignore_duplicate=False): """ Schedule a task in web2py Scheduler @param task: name of the function/task to be scheduled @param args: args to be passed to the scheduled task @param vars: vars to be passed to the scheduled task @param function_name: function name (if different from task name) @param start_time: start_time for the scheduled task @param next_run_time: next_run_time for the the scheduled task @param stop_time: stop_time for the the scheduled task @param repeats: number of times the task to be repeated @param period: time period between two consecutive runs @param timeout: set timeout for a running task @param enabled: enabled flag for the scheduled task @param group_name: group_name for the scheduled task @param ignore_duplicate: disable or enable duplicate checking """ kwargs = {} if function_name is None: function_name = task # storing valid keyword arguments only if they are provided if start_time: kwargs["start_time"] = start_time if next_run_time: kwargs["next_run_time"] = next_run_time elif start_time: # default it to start_time kwargs["next_run_time"] = start_time if stop_time: kwargs["stop_time"] = stop_time elif start_time: # default it to one day ahead of given start_time if not isinstance(start_time, datetime.datetime): start_time = datetime.datetime.strptime(start_time, "%Y-%m-%d %H:%M:%S") stop_time = start_time + datetime.timedelta(days=1) if repeats is not None: kwargs["repeats"] = repeats if period: kwargs["period"] = period if timeout: kwargs["timeout"] = timeout if enabled != None: # NB None => enabled kwargs["enabled"] = enabled if group_name: kwargs["group_name"] = group_name if not ignore_duplicate and self._duplicate_task_exists(task, args, vars): # if duplicate task exists, do not insert a new one s3_debug("Duplicate Task, Not Inserted", value=task) return False auth = current.auth if auth.is_logged_in(): # Add the current user to the vars vars["user_id"] = auth.user.id # Add to DB for pickup by Scheduler task db = current.db record = db.scheduler_task.insert(task_name=task, function_name=function_name, args=json.dumps(args), vars=json.dumps(vars), **kwargs) return record
def execute_import_task(self, task): """ Method that will execute each import job, in order """ start = datetime.datetime.now() if task[0] == 1: manager = current.manager db = current.db request = current.request response = current.response errorString = "prepopulate error: file %s missing" # Store the view view = response.view _debug("Running job %s %s (filename=%s transform=%s)" % (task[1], task[2], task[3], task[4])) prefix = task[1] name = task[2] tablename = "%s_%s" % (prefix, name) if tablename in self.alternateTables: details = self.alternateTables[tablename] if "tablename" in details: tablename = details["tablename"] manager.load(tablename) if "loader" in details: loader = details["loader"] if loader is not None: loader() if "prefix" in details: prefix = details["prefix"] if "name" in details: name = details["name"] try: resource = manager.define_resource(prefix, name) except KeyError: # Table cannot be loaded self.errorList.append( "WARNING: Unable to find table %s import job skipped" % tablename) return # Check if the source file is accessible try: csv = open(task[3], "r") except IOError: self.errorList.append(errorString % task[3]) return # Check if the stylesheet is accessible try: open(task[4], "r") except IOError: self.errorList.append(errorString % task[4]) return extra_data = None if task[5]: try: extradata = unescape(task[5], {"'": '"'}) extradata = json.loads(extradata) extra_data = extradata except: pass try: # @todo: add extra_data and file attachments result = resource.import_xml(csv, format="csv", stylesheet=task[4], extra_data=extra_data) except SyntaxError, e: self.errorList.append("WARNING: import error - %s" % e) return if not resource.error: db.commit() else: # Must roll back if there was an error! error = resource.error self.errorList.append("%s - %s: %s" % (task[3], resource.tablename, error)) errors = current.manager.xml.collect_errors(resource) if errors: self.errorList.extend(errors) db.rollback() # Restore the view response.view = view end = datetime.datetime.now() duration = end - start csvName = task[3][task[3].rfind("/") + 1:] try: # Python-2.7 duration = '{:.2f}'.format(duration.total_seconds() / 60) msg = "%s import job completed in %s mins" % (csvName, duration) except AttributeError: # older Python msg = "%s import job completed in %s" % (csvName, duration) self.resultList.append(msg) if current.session.s3.debug: s3_debug(msg)
def write_file(filename, wkt, **attr): from xml.etree import ElementTree as et # Create an SVG XML element # @ToDo: Allow customisation of height/width iheight = 74 height = str(iheight) iwidth = 74 width = str(iwidth) doc = et.Element("svg", width=width, height=height, version="1.1", xmlns="http://www.w3.org/2000/svg") # Convert WKT from shapely.wkt import loads as wkt_loads try: # Enable C-based speedups available from 1.2.10+ from shapely import speedups speedups.enable() except: from s3utils import s3_debug s3_debug("S3GIS", "Upgrade Shapely for Performance enhancements") shape = wkt_loads(wkt) # Scale Points & invert Y axis from shapely import affinity bounds = shape.bounds # (minx, miny, maxx, maxy) swidth = abs(bounds[2] - bounds[0]) sheight = abs(bounds[3] - bounds[1]) width_multiplier = iwidth / swidth height_multiplier = iheight / sheight multiplier = min(width_multiplier, height_multiplier) * 0.9 shape = affinity.scale(shape, xfact=multiplier, yfact=-multiplier, origin="centroid") # Center Shape #centroid = shape.centroid #xoff = (iwidth / 2) - centroid.x #yoff = (iheight / 2) - centroid.y #affinity.translate(shape, xoff=xoff, yoff=yoff) geom_type = shape.geom_type if geom_type == "MultiPolygon": polygons = shape.geoms elif geom_type == "Polygon": polygons = [shape] else: error = "Unsupported Geometry: %s" % geom_type from s3utils import s3_debug s3_debug(error) # @ToDo: #elif geom_type == "LineString": # _points = shape #elif geom_type == "Point": # _points = [shape] points = [] pappend = points.append for polygon in polygons: _points = polygon.exterior.coords for point in _points: pappend("%s,%s" % (point[0], point[1])) points = " ".join(points) # Wrap in Square for Icon # @ToDo: Anti-Aliased Rounded Corners # @ToDo: Make optional fill = "rgb(167, 192, 210)" stroke = "rgb(114, 129, 145)" et.SubElement(doc, "rect", width=width, height=height, fill=fill, stroke=stroke) # @ToDo: Allow customisation of options fill = "rgb(225, 225, 225)" stroke = "rgb(165, 165, 165)" et.SubElement(doc, "polygon", points=points, fill=fill, stroke=stroke) # @ToDo: Add Attributes from list_fields # Write out File path = os.path.join(current.request.folder, "static", "cache", "svg") if not os.path.exists(path): os.makedirs(path) filepath = os.path.join(path, filename) with open(filepath, "w") as f: # ElementTree 1.2 doesn't write the SVG file header errata, so do that manually f.write("<?xml version=\"1.0\" standalone=\"no\"?>\n") f.write("<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\"\n") f.write("\"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\n") f.write(et.tostring(doc)) return filepath
def execute_import_task(self, task): """ Method that will execute each import job, in order """ start = datetime.datetime.now() if task[0] == 1: manager = current.manager db = current.db request = current.request response = current.response errorString = "prepopulate error: file %s missing" # Store the view view = response.view _debug ("Running job %s %s (filename=%s transform=%s)" % (task[1], task[2], task[3], task[4])) prefix = task[1] name = task[2] tablename = "%s_%s" % (prefix, name) if tablename in self.alternateTables: details = self.alternateTables[tablename] if "tablename" in details: tablename = details["tablename"] manager.load(tablename) if "loader" in details: loader = details["loader"] if loader is not None: loader() if "prefix" in details: prefix = details["prefix"] if "name" in details: name = details["name"] try: resource = manager.define_resource(prefix, name) except KeyError: # Table cannot be loaded self.errorList.append("WARNING: Unable to find table %s import job skipped" % tablename) return # Check if the source file is accessible try: csv = open(task[3], "r") except IOError: self.errorList.append(errorString % task[3]) return # Check if the stylesheet is accessible try: open(task[4], "r") except IOError: self.errorList.append(errorString % task[4]) return extra_data = None if task[5]: try: extradata = unescape(task[5], {"'": '"'}) extradata = json.loads(extradata) extra_data = extradata except: pass try: # @todo: add extra_data and file attachments result = resource.import_xml(csv, format="csv", stylesheet=task[4], extra_data=extra_data) except SyntaxError, e: self.errorList.append("WARNING: import error - %s" % e) return if not resource.error: db.commit() else: # Must roll back if there was an error! error = resource.error self.errorList.append("%s: %s" % (resource.tablename, error)) db.rollback() # Restore the view response.view = view end = datetime.datetime.now() duration = end - start csvName = task[3][task[3].rfind("/")+1:] try: # Python-2.7 duration = '{:.2f}'.format(duration.total_seconds()/60) msg = "%s import job completed in %s mins" % (csvName, duration) except AttributeError: # older Python msg = "%s import job completed in %s" % (csvName, duration) self.resultList.append(msg) if current.session.s3.debug: s3_debug(msg)
def process_outbox(self, contact_method="EMAIL"): """ Send Pending Messages from Outbox. If succesful then move from Outbox to Sent. Can be called from Cron @ToDo: contact_method = "ALL" """ db = current.db current.manager.load("msg_outbox") if contact_method == "SMS": table = db.msg_setting settings = db(table.id > 0).select(table.outgoing_sms_handler, limitby=(0, 1)).first() if not settings: raise ValueError("No SMS handler defined!") outgoing_sms_handler = settings.outgoing_sms_handler def dispatch_to_pe_id(pe_id): table = db.pr_contact query = (table.pe_id == pe_id) & (table.contact_method == contact_method) & (table.deleted == False) recipient = db(query).select(table.value, orderby=table.priority, limitby=(0, 1)).first() if recipient: if contact_method == "EMAIL": return self.send_email(recipient.value, subject, message) elif contact_method == "SMS": if outgoing_sms_handler == "WEB_API": return self.send_sms_via_api(recipient.value, message) elif outgoing_sms_handler == "SMTP": return self.send_sms_via_smtp(recipient.value, message) elif outgoing_sms_handler == "MODEM": return self.send_sms_via_modem(recipient.value, message) elif outgoing_sms_handler == "TROPO": # NB This does not mean the message is sent return self.send_text_via_tropo(row.id, message_id, recipient.value, message) else: return False elif contact_method == "TWITTER": return self.send_text_via_twitter(recipient.value, message) return False table = db.msg_outbox ltable = db.msg_log ptable = db.pr_person petable = db.pr_pentity query = (table.status == 1) & (table.pr_message_method == contact_method) rows = db(query).select() chainrun = False # Used to fire process_outbox again - Used when messages are sent to groups for row in rows: status = True message_id = row.message_id query = ltable.id == message_id logrow = db(query).select(limitby=(0, 1)).first() if not logrow: s3_debug("s3msg", "logrow not found") continue # Get message from msg_log message = logrow.message subject = logrow.subject sender_pe_id = logrow.pe_id # Determine list of users entity = row.pe_id query = petable.id == entity entity_type = db(query).select(petable.instance_type, limitby=(0, 1)).first() if entity_type: entity_type = entity_type.instance_type else: s3_debug("s3msg", "Entity type unknown") if entity_type == "pr_group": # Take the entities of it and add in the messaging queue - with # sender as the original sender and marks group email processed # Set system generated = True table3 = db.pr_group query = table3.pe_id == entity group_id = db(query).select(table3.id, limitby=(0, 1)).first().id table4 = db.pr_group_membership query = table4.group_id == group_id recipients = db(query).select(table4.person_id) for recipient in recipients: person_id = recipient.person_id query = ptable.id == person_id pe_id = db(query).select(ptable.pe_id, limitby=(0, 1)).first().pe_id table.insert( message_id=message_id, pe_id=pe_id, pr_message_method=contact_method, system_generated=True ) status = True chainrun = True elif entity_type == "org_organisation": # Take the entities of it and add in the messaging queue - with # sender as the original sender and marks group email processed # Set system generated = True table3 = db.org_organisation query = table3.pe_id == entity org_id = db(query).select(table3.id, limitby=(0, 1)).first().id table4 = db.hrm_human_resource query = table4.organisation_id == org_id recipients = db(query).select(table4.person_id) for recipient in recipients: person_id = recipient.person_id uery = ptable.id == person_id pe_id = db(query).select(ptable.pe_id, limitby=(0, 1)).first().pe_id table.insert( message_id=message_id, pe_id=pe_id, pr_message_method=contact_method, system_generated=True ) status = True chainrun = True if entity_type == "pr_person": # Person status = dispatch_to_pe_id(entity) if status: # Update status to sent in Outbox db(table.id == row.id).update(status=2) # Set message log to actioned db(ltable.id == message_id).update(actioned=True) # Explicitly commit DB operations when running from Cron db.commit() if chainrun: self.process_outbox(contact_method) return
#1: Email #2: SMS #4: Twitter __all__ = ["S3Msg"] import sys import string import urllib from urllib2 import urlopen from s3utils import s3_debug try: import tweepy except ImportError: s3_debug("Tweepy not available, so non-Tropo Twitter support disabled") IDENTITYTRANS = ALLCHARS = string.maketrans("", "") NOTPHONECHARS = ALLCHARS.translate(IDENTITYTRANS, string.digits) NOTTWITTERCHARS = ALLCHARS.translate(IDENTITYTRANS, string.digits + string.letters + '_') TWITTER_MAX_CHARS = 140 TWITTER_HAS_NEXT_SUFFIX = u' \u2026' TWITTER_HAS_PREV_PREFIX = u'\u2026 ' class S3Msg(object): """ Toolkit for hooking into the Messaging framework """ def __init__(self, environment, deployment_settings, db=None, T=None, mail=None, modem=None): try: self.deployment_settings = deployment_settings
def _lookup(self, values, rows=None): """ Lazy lookup values. @param values: list of values to lookup @param rows: rows referenced by values (if values are foreign keys) optional """ theset = self.theset items = {} lookup = {} # Check whether values are already in theset for v in values: if v is None: items[v] = self.none elif v in theset: items[v] = theset[v] else: lookup[v] = True if self.table is None or not lookup: return items # Get the primary key pkey = self.key table = self.table ogetattr = object.__getattribute__ try: key = ogetattr(table, pkey) except AttributeError: return items # Use the given rows to lookup the values pop = lookup.pop represent_row = self.represent_row if rows and not self.custom_lookup: for row in rows: k = row[key] if k not in theset: theset[k] = represent_row(row) if pop(k, None): items[k] = theset[k] # Retrieve additional rows as needed if lookup: if not self.custom_lookup: try: # Need for speed: assume all fields are in table fields = [ogetattr(table, f) for f in self.fields] except AttributeError: # Ok - they are not: provide debug output and filter fields if current.response.s3.debug: from s3utils import s3_debug s3_debug(sys.exc_info()[1]) fields = [ogetattr(table, f) for f in self.fields if hasattr(table, f)] else: fields = [] rows = self.lookup_rows(key, lookup.keys(), fields=fields) for row in rows: k = row[key] lookup.pop(k, None) items[k] = theset[k] = represent_row(row) if lookup: for k in lookup: items[k] = self.default # Done return items
def process_outbox(self, contact_method="EMAIL"): """ Send Pending Messages from Outbox. If succesful then move from Outbox to Sent. Can be called from Cron @ToDo: contact_method = "ALL" """ db = current.db current.manager.load("msg_outbox") if contact_method == "SMS": table = db.msg_setting settings = db(table.id > 0).select(table.outgoing_sms_handler, limitby=(0, 1)).first() if not settings: raise ValueError("No SMS handler defined!") outgoing_sms_handler = settings.outgoing_sms_handler def dispatch_to_pe_id(pe_id): table = db.pr_contact query = (table.pe_id == pe_id) & \ (table.contact_method == contact_method) & \ (table.deleted == False) recipient = db(query).select(table.value, orderby = table.priority, limitby=(0, 1)).first() if recipient: if contact_method == "EMAIL": return self.send_email(recipient.value, subject, message) elif contact_method == "SMS": if outgoing_sms_handler == "WEB_API": return self.send_sms_via_api(recipient.value, message) elif outgoing_sms_handler == "SMTP": return self.send_sms_via_smtp(recipient.value, message) elif outgoing_sms_handler == "MODEM": return self.send_sms_via_modem(recipient.value, message) elif outgoing_sms_handler == "TROPO": # NB This does not mean the message is sent return self.send_text_via_tropo(row.id, message_id, recipient.value, message) else: return False elif contact_method == "TWITTER": return self.send_text_via_twitter(recipient.value, message) return False table = db.msg_outbox ltable = db.msg_log ptable = db.pr_person petable = db.pr_pentity query = (table.status == 1) & \ (table.pr_message_method == contact_method) rows = db(query).select() chainrun = False # Used to fire process_outbox again - Used when messages are sent to groups for row in rows: status = True message_id = row.message_id query = (ltable.id == message_id) logrow = db(query).select(limitby=(0, 1)).first() if not logrow: s3_debug("s3msg", "logrow not found") continue # Get message from msg_log message = logrow.message subject = logrow.subject sender_pe_id = logrow.pe_id # Determine list of users entity = row.pe_id query = petable.id == entity entity_type = db(query).select(petable.instance_type, limitby=(0, 1)).first() if entity_type: entity_type = entity_type.instance_type else: s3_debug("s3msg", "Entity type unknown") if entity_type == "pr_group": # Take the entities of it and add in the messaging queue - with # sender as the original sender and marks group email processed # Set system generated = True table3 = db.pr_group query = (table3.pe_id == entity) group_id = db(query).select(table3.id, limitby=(0, 1)).first().id table4 = db.pr_group_membership query = (table4.group_id == group_id) recipients = db(query).select(table4.person_id) for recipient in recipients: person_id = recipient.person_id query = (ptable.id == person_id) pe_id = db(query).select(ptable.pe_id, limitby=(0, 1)).first().pe_id table.insert(message_id = message_id, pe_id = pe_id, pr_message_method = contact_method, system_generated = True) status = True chainrun = True elif entity_type == "org_organisation": # Take the entities of it and add in the messaging queue - with # sender as the original sender and marks group email processed # Set system generated = True table3 = db.org_organisation query = (table3.pe_id == entity) org_id = db(query).select(table3.id, limitby=(0, 1)).first().id table4 = db.hrm_human_resource query = (table4.organisation_id == org_id) recipients = db(query).select(table4.person_id) for recipient in recipients: person_id = recipient.person_id uery = (ptable.id == person_id) pe_id = db(query).select(ptable.pe_id, limitby=(0, 1)).first().pe_id table.insert(message_id = message_id, pe_id = pe_id, pr_message_method = contact_method, system_generated = True) status = True chainrun = True if entity_type == "pr_person": # Person status = dispatch_to_pe_id(entity) if status: # Update status to sent in Outbox db(table.id == row.id).update(status=2) # Set message log to actioned db(ltable.id == message_id).update(actioned=True) # Explicitly commit DB operations when running from Cron db.commit() if chainrun : self.process_outbox(contact_method) return