async def post_handle(self, req): code = 200 text = '' info = req.match_info if ('entity' not in info) or ('action' not in info) or ('item' not in info): code = 400 print('bad request') else: entity = sanitize(info['entity']) action = sanitize(info['action']) item = sanitize(info['item']) if entity in self.post_handlers: if action in self.post_handlers[entity]: handler = self.post_handlers[entity][action] code, res_text = await handler(entity=entity, action=action, item=item, data={ 'query': req.query, 'body': await req.text() }) return web.Response(status=code, content_type='application/json', text=text)
def create_group(title, message): """ Create a new group. """ user = users.get_current_user() groups = memcache.get(user.user_id() + "__G") if groups: count = len(groups) else: count = models.Group.gql("WHERE owner = :1", user).count() if count >= app.config.NUM_GROUPS_MAX: raise errors.MaxValueError group = models.Group( owner=user, title=util.sanitize(title, trunc=app.config.TEXT_LINE_LENGTH), message=util.sanitize(message, trunc=app.config.TEXT_BLOCK_LENGTH), ) group.put() __update_cached_group(group) try: member = models.Member(owner=user, group=group) member.put() except: raise errors.GroupJoinError __update_cached_member(group, member) return str(group.key())
def update(self, system_annotations={}): if system_annotations == self: return system_annotations = sanitize(system_annotations) temp = sanitize(self.source._sysan) temp.update(system_annotations) self.source._sysan = temp super(SystemAnnotationDict, self).update(self.source._sysan)
def update_group(group_key, title, message): """ Modify group information. """ group = __get_group_by_key(group_key) group.title = util.sanitize(title, trunc=app.config.TEXT_LINE_LENGTH) group.message = util.sanitize(message, trunc=app.config.TEXT_BLOCK_LENGTH) group.put() __update_cached_group(group)
def extractTitle(html): splited1 = html.split("<span class='st-title'>") if len(splited1) == 1: return "" else: splited2 = splited1[1].split("</span>") if "<a " in splited2[0]: splited3 = splited2[0].split(">") splited4 = splited3[1].split("</a") return util.sanitize(splited4[0]) else: return util.sanitize(splited2[0])
def parse(self, text): parsed = self.grammar.parseString(text) tossups = [] bonuses = [] for tossup in parsed.tossup: tossups.append(Tossup(util.sanitize(tossup.question), util.sanitize(tossup.answer))) for bonus in parsed.bonus: parts = [] for part in bonus.bonus_part: parts.append(BonusPart(util.sanitize(part.question), util.sanitize(part.answer), int(part.point))) bonuses.append(Bonus(util.sanitize(bonus.header[0]), parts)) return {"tossups": tossups, "bonuses": bonuses} return parsed
def update_settings(firstname, lastname): """ Modify the current user's settings. """ user = users.get_current_user() key = user.user_id() + '__SET' settings = __get_settings(user) if not settings: settings = models.Settings(key_name=key) settings.firstname = util.sanitize(firstname, strict=True, trunc=app.config.TEXT_LINE_LENGTH) settings.lastname = util.sanitize(lastname, strict=True, trunc=app.config.TEXT_LINE_LENGTH) settings.put() memcache.set(key, settings, time=app.config.TIME_CACHE_DATA)
def import_layer_component(self, component): for layer_regex in component['layers']: source_path = findFile(component['file']) if source_path is None: raise Exception("Couldn't find source file '%s'" % component['file']) source_layer = self.find_layer(layer_regex, source_path) if source_layer is None: logging.warn("Couldn't find configured layer %s in DXF files", layer_regex) continue filename = "%s-%s" % (sanitize(component['file']), sanitize(source_layer)) self.generate_layer_shapefile(source_path, source_layer, filename) yield parse_layer_config(filename, source_layer, component)
async def get_handle(self, req): code = 200 text = '' info = req.match_info if ('entity' not in info) or ('attr' not in info): code = 400 print('bad request') else: entity = sanitize(info['entity']) attr = sanitize(info['attr']) if entity in self.get_handlers: handler = self.get_handlers[entity] code, text = await handler(entity=entity, attr=attr, data={'query': req.query}) return web.Response(status=code, content_type='application/json', text=text)
async def message_handler(self, message, jail, bonkbot): author = message.author msg = util.sanitize(message.content) if msg == "reset": counts = self.cf.get("counts") counts[message.author.name] = 0 self.cf.put("counts", counts) await util.send_message(message.channel, message.author.name + "'s drink count is " + str(counts[message.author.name])) await message.delete(delay = 60) if msg in self.drink_map: counts = self.cf.get("counts") print(counts) if message.author.name in counts: print("in map") counts[message.author.name] += self.drink_map[msg] else: print("not in map") counts[message.author.name] = 1 print(counts) self.cf.put("counts", counts) await util.send_message(message.channel, message.author.name + "'s drink count is " + str(counts[message.author.name])) await message.delete(delay = 60) return if msg == "scoreboard": output = "" for person, count in sorted(self.cf.get("counts").items(), key=lambda kv: -kv[1]): output += str(count) + " | " + person + "\n" await message.channel.send(output) await message.delete(delay = 60)
def findTerm(self, text, wildcards=False): text = util.sanitize(text, wildcards=wildcards) groups = dict() for i in xrange(len(text), 0, -1): term = text[:i] deinflections = self.deinflector.deinflect(term, self.validator) if deinflections is None: self.processTerm(groups, term, wildcards=wildcards) else: for deinflection in deinflections: self.processTerm(groups, **deinflection) results = map(self.formatResult, groups.items()) results = filter(operator.truth, results) results = sorted( results, key=lambda d: (len(d['source']), 'P' in d['tags'], -len(d['rules'])), reverse=True) length = 0 for result in results: length = max(length, len(result['source'])) return results, length
def findTerm(self, text, wildcards=False): groups = dict() if wildcards and isinstance(text,list): self.processTerm(groups,u"".join(text),u"".join(text),root=text,wildcards=True) else: text = text["contentSampleFlat"] text = util.sanitize(text, wildcards=wildcards) for i in xrange(len(text), 0, -1): term = text[:i] deinflections = self.deinflector.deinflect(term, self.validator) if deinflections is None: self.processTerm(groups, term, term, wildcards=wildcards) else: for deinflection in deinflections: self.processTerm(groups, term, **deinflection) results = map(self.formatResult, groups.items()) results = filter(operator.truth, results) results = sorted(results, key=lambda d: (len(d['source']), 'P' in d['tags'],-len(d['expression']), -len(d['rules'])), reverse=True) length = 0 for result in results: length = max(length, len(result['source'])) return results, length
def extractTitleWithIcons(html): splited1 = html.split("<p class=\"title\">") if len(splited1) == 1: return "" else: splited2 = splited1[1].split("<") return util.sanitize(splited2[0])
def extractDescription(html): splited1 = html.split("class=\"arrow\">") if len(splited1) == 1: return "" else: splited2 = splited1[1].split("</div>") return util.sanitize(splited2[0])
def extractDescriptions(html): splited1 = html.split("</p><p>") if len(splited1) == 1: return "" else: splited2 = splited1[1].split("</p>") return util.sanitize(splited2[0])
def extractTitle(html): splited1 = html.split("class=\"to-dtl\">") if len(splited1) == 1: return "" else: splited2 = splited1[1].split("</a>") result = util.sanitize(splited2[0]) return result.replace(" [終]", "")
def _validate_user(self, user: str): length = len(user) if length > 30: raise AppException("Username cannot be longer than 30 characters") if length < 4: raise AppException("Username cannot be shorter than 4 characters") if sanitize(user) != user: raise AppException("Username cannot have special characters or whitespace")
def _validate_team_name(self, val): val_len = len(val) if val_len > 80: raise AppException("Team name must be less than 30 characters") if val_len < 4: raise AppException("Team name must be atleast 4 characters") if sanitize(val) != val: raise AppException("Invalid characters in team name")
def extractTitleWithIcons(html): splited1 = html.split("<span class=\"schedule-title\">") if len(splited1) == 1: splited1 = html.split("<span class=\"schedule-titleC\">") if len(splited1) == 1: return "" splited2 = splited1[1].split("</span>") return util.sanitize(splited2[0])
def extractDescriptions(html): splited1 = html.split("</td>") splited2 = splited1[0].split("<div class=\"about\">") if len(splited2) == 1: return "" else: splited3 = splited2[1].split("</div>") return util.sanitize(splited3[0])
def do_notice(client, source, target, arguments): if (len(arguments) > 1): client.notice( target, 'Sending "%s" to "%s".' % (' '.join(arguments[1:]), arguments[0])) client.notice(arguments[0], sanitize(' '.join(arguments[1:]))) else: client.notice(target, 'Usage: notice [channel or nick] [message]')
def properties(self, properties): """To set each property, _set_property is called, which calls __setitem__ which calls setattr(). The final call to setattr will pass through any validation defined in a subclass property setter. """ for key, val in sanitize(properties).items(): setattr(self, key, val)
async def message_handler(self, message, jail, bonkbot): #await self.client.user.edit(username="******") if util.sanitize(message.content) == "based": if message.author.id in bad_author_ids: response = "not based" else: response = "based" await util.send_message(message.channel, response) return True return False
def update(self, properties={}): if properties == self: return properties = sanitize(properties) for key, val in properties.iteritems(): if not self.source.has_property(key): raise AttributeError('{} has no property {}'.format( self.source, key)) setattr(self.source, key, val) super(PropertiesDict, self).update(self.source._props)
def else_statement(line): global order, transpile, variables, tabnum line = util.sanitize(line) new_order = ["else"] order.append(new_order) if not transpile: transpiler.starter(variables) transpile = True transpiler.add_line(" " * tabnum + transpiler.fill_else()) tabnum = len(order)
def validate(infile, outfile='validator_results.txt', is_archive=False): start = time.time() if is_archive: archive_filename = 'collections_only_' + os.path.basename(infile) sanitize(infile, archive_filename, True, True) cleaned_file = sanitize(infile, is_archive=is_archive) inf = open(cleaned_file) outf = open(outfile, 'w') timestamp = str(int(time.time())) r = requests.get('https://raw.githubusercontent.com/Livefyre/integration-tools/master/lfvalidator/jsonschema/conv_schema.json?%s' % timestamp) schema = json.loads(r.text) counter = defaultdict(int) count = 0 conv_ids = [] validator = Draft4Validator(schema) for i,l in enumerate(inf): try: j = json.loads(l) errors = sorted(validator.iter_errors(j), key=lambda e: e.path) # check for duplicate conv/comment id, and unreferenced parent ids has_bad_ids = check_ids(j, conv_ids, counter, outf) if not errors and not has_bad_ids: continue print '\nErrors on line %d:' % (i+1) outf.write('\nErrors on line %d:\n' % (i+1)) if has_bad_ids: print ''.join(has_bad_ids) outf.write(''.join(has_bad_ids)) for error in errors: print_error(error, j, outf, counter) except ValueError, e: print '\nError, bad JSON on line %d' % (i+1) outf.write('\nError, bad JSON on line %d\n' % (i+1)) counter['bad json,'] += 1 continue except:
def if_statement(line): global order, transpile, variables, tabnum line = util.sanitize(line) new_order = ["if"] order.append(new_order) extract = extractvar.ifl(line) if not transpile: transpiler.starter(variables) transpile = True transpiler.add_line(" " * tabnum + transpiler.fill_if(extract.get_condition())) tabnum = len(order)
async def message_handler(self, message, jail, bonkbot): command_words = util.sanitize( discord.utils.escape_mentions(message.content)).split() if self.matches(message, bonkbot, command_words): new_list = sorted( list( set(self.cf.get("trigger_words")).union( set(command_words[2:])))) self.cf.put("trigger_words", new_list) await message.channel.send(util.list_trigger_words()) return True return False
def elseif_statement(line): global order, transpile, variables, tabnum line = util.sanitize(line) new_order = ["elseif"] order.append(new_order) if not transpile: transpiler.starter(variables) transpile = True transpiler.add_line(" " * tabnum + transpiler.fill_elseif( re.findall(r"elif ?(.+ ?[=<andor>=%=]+ ?.+) ?[->:}]?", line) [0].replace("->", "").replace("{", ""))) tabnum = len(order)
def while_loop(line): global order, transpile, variables, tabnum line = util.sanitize(line) new_order = ["while"] order.append(new_order) if not transpile: transpiler.starter(variables) transpile = True extract = extractvar.Whilel(line) transpiler.add_line(" " * tabnum + transpiler.fill_while(extract.get_condition())) tabnum = len(order)
def extractDescriptions(html): desc = "" splited1 = html.split("<dd>") splited2 = splited1[1].split("</dd>") splited3 = splited2[0].split("\");\n document.write(\"") desc = util.sanitize(splited3[0].rstrip("<br>")) matchObj = re.search(r'第[0-9]+回 ', desc) if matchObj: desc = desc[matchObj.end():] return "「" + desc + "」"
async def message_handler(self, message, jail, bonkbot): author = message.author # Check if the user can be jailed. if not jail.can_jail(author): return False for word in self.cf.get("trigger_words"): if word in util.sanitize(message.content).split(): await jail.add(author) await util.send_message(message.channel, self.cf.get("jail_message")) return True return False
def findCharacters(self, text): text = util.sanitize(text, kana=False) results = list() processed = dict() for c in text: if c not in processed: match = self.dictionary.findCharacter(c) if match is not None: results.append(match) processed[c] = match return results
async def message_handler(self, message, jail, bonkbot): for data in self.cf.get("people"): if "|" in data: parts = data.split("|") person = parts[0] alias = random.choice(parts[1:]) else: person = data alias = person if person in util.sanitize(message.content): await util.send_message( message.channel, random.choice(self.cf.get("names")).replace("$", alias)) return True return False
def iterate_fasta(filenames): for filename in filenames: f = open(filename, "rU") name = None seq = "" for line in f: if line[0] == '>': if len(seq) > 0: yield name, seq name = sanitize(line[1:].rstrip()) seq = '' else: seq += ''.join(line.strip().split()) if len(seq) > 0: yield name, seq
def extractTitle(html): title = "" splited1 = html.split("<dt>") splited2 = splited1[1].split("</dt>") if "<A " in splited2[0]: splited3 = splited2[0].split(" style='color:blue'>") splited4 = splited3[1].split("</A>") title = util.sanitize(splited4[0]) else: title = util.sanitize(splited2[0]) matchObj = re.search(r'\(’[0-9]{2}\)$', title) if matchObj: title = title[:matchObj.start()] splited1 = html.split("<dd>") splited2 = splited1[1].split("</dd>") ep = util.sanitize(splited2[0]) matchObj = re.search(r'^第[0-9]+回', ep) if matchObj: ep = ep[matchObj.start():matchObj.end()] title = title + " " + ep return title
def runApp(): if ('appleID' not in session) or ('applePass' not in session): return "ERROR: Apple ID and password not found in session. Please rerun the application." try: appleID = session['appleID'] applePass = session['applePass'] device = solroute.iCloudLogin(appleID, applePass) except: return "ERROR: AppleID and ApplePass are incorrect. Please log in again." status = device.status() name = status.get('name') displayName = status.get('deviceDisplayName') battery = str( float(status.get('batteryLevel')) * 100 ) location = solroute.getLocation(device) #location = ( "40.6698", "-73.9438" ) #TEMP if location is None: return "Error: Could not find device location! Make sure the iDevice's location is turned on and reload the application." deviceInfo = "%s: %s, %s%% battery remaining" % (name, displayName, battery) origin = session.get('origin') originCoords = solroute.getCoords(origin) destin = session.get('destin') destinCoords = solroute.getCoords(destin) polyline = util.sanitize(session['polyline']) if request.args.get('name') == 'routeInfo': template = "routeInfo.html" elif request.args.get('name') == 'roadMap': template = "roadMap.html" else: template = "satMap.html" return render_template( template, lat=location['latitude'], lng=location['longitude'], deviceInfo=deviceInfo, origin=origin, originCoords=originCoords, destination=destin, destinationCoords=destinCoords, polyline=polyline )
def create_member(invitation_code): """ Create a new member associated with a group/invitation. """ invitation_code = util.sanitize(invitation_code, strict=True, trunc=40) invite = models.Invitation.gql("WHERE code = :1", invitation_code).get() if not invite: raise errors.GroupJoinError group_key = str(models.Invitation.group.get_value_for_datastore(invite)) user = users.get_current_user() # Can't use __get_group_by_key() because it verifies permission that the # current user doesn't have yet. As a bonus, this minimizes the number of # memcache reads. cache_keys = [group_key, user.user_id() + "__M"] cache_vals = memcache.get_multi(cache_keys) [group, memberships] = [cache_vals.get(x) for x in cache_keys] cache_vals.clear() if group is None: group = models.Group.get(group_key) if not group: raise errors.QueryError cache_vals[group_key] = group invite_key = str(invite.key()) invite.delete() __del_cached_invite(group, invite_key) if memberships is None: memberships = models.Member.gql("WHERE owner = :1", user).fetch(None) cache_vals[cache_keys[1]] = memberships if cache_vals: memcache.set_multi(cache_vals, time=app.config.TIME_CACHE_DATA) for m in memberships: if str(models.Member.group.get_value_for_datastore(m)) == group_key: break else: member = models.Member(owner=user, group=group) member.put() __update_cached_member(group, member) return group_key
def findTerm(self, text, wildcards=False): text = text["contentSampleFlat"] text = util.sanitize(text, wildcards=wildcards) groups = dict() length = 0 for i in xrange(len(text), 0, -1): term = text[:i] deinflections = self.deinflector.deinflect(term, self.validator) groupsBefore = len(groups) if deinflections is None: self.processTerm(groups, term, wildcards=wildcards) else: for deinflection in deinflections: self.processTerm(groups, **deinflection) if len(groups) > groupsBefore and length == 0: length = i results = map(self.formatResult, groups.items()) results = filter(operator.truth, results) results = sorted(results, key=lambda d: (len(d['source'])), reverse=True) return results, length
def system_annotations(self, sysan): """Directly set the model's _sysan column with dict sysan. """ self._sysan = sanitize(sysan)
def uploadDir(config, devicename, localroot, label): log = logging.getLogger(config['devicename']) log.info("scanningFiles") totalbytes, totalcount = util.folderInfo(localroot) log.info("uploadBegin|{localroot}|{label}|{filecount}|{bytecount}".format( localroot=localroot,label=label,filecount=totalcount, bytecount=totalbytes)) util.mail(config, config['templates']['uploadBegin']['body'].format( filecount=totalcount, megabytes=round(totalbytes/1024/1024,1)), subject=config['templates']['uploadBegin']['subject']) ftpconfig = config['ftp'] log.debug("Connecting to " + ftpconfig['server']) try: host = connectHost(ftpconfig) except ftputil.error.FTPError: log.exception("Could not connect to FTP Server|"+traceback.format_exc()) return superrootpath=ftpconfig['rootpath'] host.makedirs(superrootpath) host.chdir(superrootpath) superrootpath = host.getcwd() begintime = datetime.datetime.now() rootdirname = begintime.strftime("%Y-%m-%d") host.makedirs(rootdirname + "-incomplete") host.chdir(rootdirname + "-incomplete") remoteroot = host.getcwd() host.synchronize_times() uploadedfiles = 0 uploadedbytes = 0 skippedfiles = 0 statuslogcount=config['uploadlogcount'] statusloginterval=config['maxlogdelay'] statuslogstatus=-1 lastlogdate=begintime failed_files=[] def logProgress(): nonlocal uploadedfiles,totalcount,uploadedbytes,totalbytes log.info("uploadProgress|{uploadedfiles}/{totalcount}|{uploadedbytes}/{totalbytes}".format(**vars())) def chunkCallback(info): nonlocal uploadedbytes,statuslogcount,totalbytes,statuslogstatus,uploadedfiles,totalcount uploadedbytes += len(info) curstatus = uploadedbytes*statuslogcount//totalbytes if curstatus != statuslogstatus: statuslogstatus = curstatus logProgress() for root, dirs, files in os.walk(localroot): relroot = os.path.relpath(root, localroot) #log.debug("walking "+relroot) hostroot = os.path.normpath(os.path.join(remoteroot, util.sanitize(relroot))) try: host.chdir(hostroot) except (socket.error,ftputil.error.FTPError,OSError,IOError) as e: log.info("tmp|Connection died(a)|"+traceback.format_exc()) time.sleep(CONNDIEWAIT) host.close() host = connectHost(ftpconfig) host.chdir(hostroot) if (datetime.datetime.now()-lastlogdate).total_seconds() > statusloginterval: lastlogdate=datetime.datetime.now() logProgress() for dirname in dirs: dirname=util.sanitize(dirname) try: host.makedirs(dirname) except ftputil.error.PermanentError: log.debug("Error(b)|"+traceback.format_exc()) pass except (socket.error,ftputil.error.FTPError,OSError,IOError) as e: log.debug("Error(b)|"+traceback.format_exc()) time.sleep(CONNDIEWAIT) host.close() host = connectHost(ftpconfig) host.chdir(hostroot) host.makedirs(dirname) for fname in files: if (datetime.datetime.now()-lastlogdate).total_seconds() > statusloginterval: lastlogdate=datetime.datetime.now() logProgress() localfname=os.path.join(root,fname) if not os.path.isfile(localfname): continue hostfname=os.path.join(hostroot,util.sanitize(fname)) uploadedfiles += 1 log.debug("uploading " + os.path.join(relroot, fname)) try: uploaded = host.upload_if_newer(localfname, util.sanitize(fname), callback=chunkCallback) if not uploaded: log.debug("tmp|skipped file "+localfname) uploadedbytes+=os.path.getsize(localfname) skippedfiles += 1 except (socket.error,ftputil.error.FTPError,OSError,IOError) as e: log.info("tmp|(1)Failed uploading "+localfname+"|"+str(e)) try: time.sleep(CONNDIEWAIT) host.close() host = connectHost(ftpconfig) host.chdir(hostroot) host.upload(localfname, util.sanitize(fname), callback=chunkCallback) except (socket.error,ftputil.error.FTPError,OSError,IOError) as e: log.info("tmp|(2)Failed uploading "+localfname+"|"+str(e)) failed_files.append((localfname,hostfname)) again_failed_files = [] if len(failed_files)>0: log.info("failedFiles|"+"\n".join([local+"->"+remote for local,remote in failed_files])) while True: # retry uploading until no more files can be uploaded time.sleep(CONNDIEWAIT) host.close() host = connectHost(ftpconfig) for local,remote in failed_files: try: host.upload(local,remote,callback=chunkCallback) except (socket.error,ftputil.error.FTPError,OSError,IOError) as e: log.info("tmp|Again failed uploading "+localfname+"|"+traceback.format_exc()) again_failed_files.append((localfname,hostfname)) if len(again_failed_files) == len(failed_files): break else: failed_files = again_failed_files if len(again_failed_files)>0: log.warn("failedFiles|"+"\n".join([local+"->"+remote for local,remote in failed_files])) endtime = datetime.datetime.now() totaltime = str(datetime.timedelta(seconds=int((endtime-begintime).total_seconds()))) host.chdir(superrootpath) host.rename(remoteroot, findDirname(host, rootdirname)) host.close() if(uploadedfiles<totalcount): log.warn(str(totalcount-uploadedfiles)+" files could not be uploaded|") if util.getMountPoint(devicename) == None: log.error("Device disappeared before upload completed") # might happen because unplugged or not enough power log.info("uploadComplete|{uploadedfiles}|{uploadedbytes}|{totaltime}|{skippedfiles}".format(**vars())) util.mail(config, config['templates']['uploadComplete']['body'].format(filecount=uploadedfiles,megabytes=round(uploadedbytes/1024/1024,1),duration=totaltime), subject=config['templates']['uploadComplete']['subject'])
def create_invitation(group_key, email, greeting, send, salt): """ Create a new invitation associated with a group. """ group = __get_group_by_key(group_key) cache_keys = [group_key + x for x in ["__M", "__I"]] cache_vals = memcache.get_multi(cache_keys) [members, invites] = [cache_vals.get(x) for x in cache_keys] if not members is None: count = len(members) else: count = models.Member.gql("WHERE group = :1", group).count() if not invites is None: count += len(invites) else: count += models.Invitation.gql("WHERE group = :1", group).count() if count >= app.config.NUM_MEMBERS_MAX: raise errors.MaxValueError email = util.sanitize(email, trunc=80) # Reminder: # The request parameters are unicode. hashlib.sha1 is expecting a str. # Must encode properly for this to work. sha1 = hashlib.sha1() sha1.update(group_key.encode("ascii", "ignore") + email.encode("ascii", "ignore") + str(time.time()) + salt) code = sha1.hexdigest() invite = models.Invitation(group=group, code=code, email=email) invite.put() __update_cached_invite(invite) if send: from google.appengine.api import mail message = mail.EmailMessage() message.sender = users.get_current_user().email() message.to = email message.subject = "Share your gift wish list at Wiftgish!" greeting = util.sanitize(greeting, strict=True, trunc=1000) if not greeting: greeting = "\n" else: greeting = "\n" + greeting + "\n" domain = app.config.DOMAIN if os.environ.get("SERVER_SOFTWARE", "").startswith("Dev"): domain = "localhost:8080" message.body = """ Hi! I'm inviting you to share your gift wish lists with me on Wiftgish. To join my group: '%s', you can use the direct link: http://%s%s/groups/join/%s Or, sign in at http://%s and enter the invitation code: %s %s See you there! """ % ( cgi.escape(group.title), domain, app.config.PATH_PREFIX, code, domain, code, cgi.escape(greeting), ) message.send()
def do_notice(client, source, target, arguments): if (len(arguments) > 1): client.notice(target, 'Sending "%s" to "%s".' % (' '.join(arguments[1:]), arguments[0])) client.notice(arguments[0], sanitize(' '.join(arguments[1:]))) else: client.notice(target, 'Usage: notice [channel or nick] [message]')