def default_grouping(instance, **kw): """ Given an error, see if we can fingerprint it and find similar ones """ # prevent an infinite loop log("Firing signal: default_grouping") if instance.group: return hsh = generate_key(instance) if hsh: digest = hsh.hexdigest() try: created = False group = Group.all().filter("uid = ", digest)[0] group.count = Error.all().filter("group = ", group).count() + 1 group.save() except IndexError: created = True group = Group() group.uid = digest group.count = 1 group.save() instance.group = group instance.save() if created: signals.group_assigned.send(sender=group.__class__, instance=group)
def edit(id=None): form = RoleForm() role = Role.query.get_or_404(id) if request.method == "GET": log('role.auth', role.auths) list_auths = role.auths.split(',') temp = [] for i in list_auths: temp.append(int(i)) form.auths.data = temp log('temp', temp) if form.validate_on_submit(): data = form.data role_count = Role.query.filter_by(name=data['name']).count() if role.name != data["name"] and role_count == 1: flash("角色已经存在!", "err") return redirect(url_for("role.edit", id=id)) role.name = data["name"] stemp = [] for i in data["auths"]: stemp.append(str(i)) role.auths = ','.join(stemp) role.save() flash("标签修改成功!", "ok") redirect(url_for('role.edit', id=id)) return render_template('admin/role/edit.html', form=form, role=role)
def default_grouping(instance, **kw): """ Given an error, see if we can fingerprint it and find similar ones """ log("Firing signal: default_grouping") hsh = generate_key(instance) if hsh: digest = hsh.hexdigest() try: created = False group = Group.objects.filter(uid=digest)[0] group.count = F('count')+getattr(instance, 'count', 1) group.save() except IndexError: created = True group = Group() group.uid = digest group.count = 1 group.save() instance.group = group instance.save() if created: signals.group_assigned.send(sender=group.__class__, instance=group) signals.error_assigned.send(sender=instance.__class__, instance=instance)
def edit(id): form = PreviewForm() preview = Preview.query.filter_by(id=id).first_or_404() log('preview', preview) form.logo.validators = [] if form.validate_on_submit(): data = form.data preview_count = Preview.query.filter_by(title=data['title']).count() if preview_count == 1 and preview.title != data['title']: flash("预告标题已经存在!", "err") return redirect(url_for(".edit", id=preview.id)) preview.title = data['title'] if form.logo.data.filename != '': path = app.config["UP_DIR"] log('form.logo.data', form.logo.data) logo = saved_file(path, form.logo.data) preview.logo = logo preview.save() flash("预告编辑成功!", "ok") redirect(url_for('.edit', id=preview.id)) # log('preview logo', preview.logo) # log('preview title', preview.title) return render_template('admin/preview/edit.html', form=form, preview=preview)
def notifications_send(request): log("Firing cron: notifications_send") notifications = Notification.all().filter("type = ", "Error").filter("tried = ", False) # batch up the notifications for the user holders = {} for notif in notifications: for user in notif.user_list(): key = str(user.key()) if key not in holders: holder = Holder() holder.user = user holders[key] = holder holders[key].objs.append(notif.notifier()) holders[key].notifs.append(notif) for user_id, holder in holders.items(): try: send_error_email(holder) for notification in holder.notifs: notification.tried = True notification.completed = True notification.save() except: info = sys.exc_info() data = "%s, %s" % (info[0], info[1]) for notification in holder.notifs: notification.tried = True notification.completed = True notification.error_msg = data notification.save() return render_plain("Cron job completed")
def download_files(msg): '''下载一些文件''' group_dict = Group.name_id_dict() group = msg.User.NickName group_id = group_dict.get(group, 'group_name') data = { 'id': msg.NewMsgId, 'type': msg.Type, 'create_time': float(msg.CreateTime), 'group': group, 'group_id': group_id, 'user_actual_name': msg.ActualNickName, } file_type = msg.type file_path = os.path.join(basedir, 'data', group, file_type) if not os.path.exists(file_path): os.makedirs(file_path) fileName = os.path.join(file_path, msg.fileName) msg.download(fileName) data['content'] = fileName group_msg = GroupMsg.create(data) msg['Text'] = '下载方法' log(zh_str(msg), 'file.txt') log(zh_str(group_msg.to_dict()), 'down_load.txt') print '@%s@%s' % (msg.type, msg.fileName)
def _transfer(img_group_name, neural_args): imgs_base_dir = "{}/{}".format(paths["imagesBaseRemote"], img_group_name) style_imgs_dir = "{}/{}".format(imgs_base_dir, "styles") style_imgs = spell.ls(style_imgs_dir) content_img_dir = "{}/{}".format(imgs_base_dir, "content") content_img = spell.ls(content_img_dir)[0] neural_style_cmd = _neural_style_cmd(neural_args, style_imgs, content_img) run = spell.client.runs.new( command=neural_style_cmd, machine_type="K80", github_url="https://github.com/ebranda/neural-style-tf", attached_resources=_mounts(img_group_name)) spell.wait_until_running(run) run_id = spell.get_id(run) spell.label_run(run, "Style Transfer single") logFilePath = localfs.filepath(paths["resultsDirLocal"], "log-run-{}.txt".format(run_id)) with open(logFilePath, "w") as f: f.write("{} = {}\n".format("Run ID", run_id)) f.write("{} = {}\n".format("Command", "python {}".format(" ".join(sys.argv)))) f.write("{} = {}\n".format("Parameters", neural_args)) f.write("{} = {}\n".format("Image group", img_group_name)) f.write("{} = {}\n".format("Style images", " ".join(style_imgs))) f.write("{} = {}\n".format("Content image", content_img)) log(spell.get_run_started_message(run)) return run_id
def transfer(args): log("Running style transfer...") MAX_NUM_RUNS = 3 groups = spell.ls(paths["imagesBaseRemote"]) if not groups: raise ValueError( "No image groups found in remote directory. Make sure you uploaded some images first." ) if len(groups) > MAX_NUM_RUNS: groups = groups[0:MAX_NUM_RUNS] log("Transferring style for image sets {}...".format(groups)) quality = getstr(args, 0, True) if not quality: quality = "med" param_presets = { "low": params.neural_style_transfer_low, "med": params.neural_style_transfer_med, "high": params.neural_style_transfer_high, } if quality not in param_presets.keys(): raise ValueError("Quality parameter must be one of {}".format( list(param_presets.keys()))) neural_args = param_presets[quality] run_ids = [] for g in groups: run_id = _transfer(g, neural_args) run_ids.append(run_id) if len(run_ids) > 1: log("When runs have completed, run the command 'python run.py st_download {}-{}'" .format(run_ids[0], run_ids[-1])) else: log("When run has completed, run the command 'python run.py st_download {}'" .format(run_id)) log("to download the results to your images/results folder.")
def notifications_send(request): log("Firing cron: notifications_send") notifications = Notification.all().filter("tried = ", False) # batch up the notifications for the user holders = {} for notif in notifications: for user in notif.user_list(): key = str(user.key()) if key not in holders: holder = Holder() holder.user = user holders[key] = holder holders[key].objs.append(notif.error) holders[key].notifs.append(notif) for user_id, holder in holders.items(): try: send_error_email(holder) for notification in holder.notifs: notification.tried = True notification.completed = True notification.save() except: info = sys.exc_info() data = "%s, %s" % (info[0], info[1]) for notification in holder.notifs: notification.tried = True notification.completed = True notification.error_msg = data notification.save() return render_plain("Cron job completed")
def new_csrf_token(): """ 使用session存csrf_token,并将其返回给客户端 """ token_session = str(uuid.uuid4()) session['token'] = token_session log('new_csrf_token:', token_session) return token_session
def default_add_project_urls(instance, **kw): log("Firing signal: default_add_project_urls") if instance.project: for project_url in instance.project.projecturl_set: issue_project_url = IssueProjectURL(issue=instance, project_url=project_url, status="not_fixed") issue_project_url.save()
def notifications_cleanup(request): log("Firing cron: notifications_cleanup") expired = datetime.today() - timedelta(days=7) queryset = Notification.objects.filter(tried=True, timestamp__lt=expired) for notification in queryset: notification.delete() return render_plain("Cron job completed")
def notifications_cleanup(request): log("Firing cron: notifications_cleanup") expired = datetime.today() - timedelta(days=7) queryset = Notification.all().filter("tried = ", True).filter("timestamp < ", expired) for notification in queryset: notification.delete() return render_plain("Cron job completed")
def current_user(): if 'user_id' in session: uid = int(session['user_id']) e = User.exist(id=uid) log('current_user', e) return e else: return None
def notifications_cleanup(days=0): log("Firing cron: notifications_cleanup") expired = datetime.today() - timedelta(days=days) queryset = Notification.objects.filter(tried=True, timestamp__lt=expired) for notification in queryset: notification.delete() return render_plain("Cron job completed")
def app_before_req(): # 每次请求获取当前用户,设置全局变量 g.user = current_user() # log('='*20) # log('remote address and user:'******'x-real-ip:', request.headers['x-real-ip']) # log('x-forwarded-for:', request.headers['x-forwarded-for']) # log('=' * 20) log('before_request current_user:', g.user)
def edit(id): token = new_csrf_token() topic = Topic.find_by(id=id) tabs = Tab.find_all() log('edit topic:', topic) return render_template('topic/add.html', topic=topic, token=token, tabs=tabs)
def saved_file(path, data): log('开始保存') if not os.path.exists(path): os.makedirs(path) os.chmod(path, 'rw') filename = secure_filename(data.filename) newname = change_filename(filename) data.save(path + newname) return newname
def amo_notification(instance, **kw): """ Given an error see if we need to send a notification """ log('Firing signal: default_notification') user = User.objects.get(email__in=amo_users) if instance.domain in amo_domains: notification = Notification() notification.notifier = instance notification.save() notification.user.add(user)
def send(args, verbose=False): if not args: return None if verbose or debug: log("SENDING CMD: "+str(args)) if not debug: if isinstance(args, str): args = args.split(" ") output = subprocess.check_output(args).decode("utf-8") return output return []
def create_admin(): """ 创建管理员 """ log('程序执行创建添加管理员') for i in range(1, 10): n = str(i) form = dict(name='admin' + str(i), pwd=n.zfill(3), role_id=1) admin = Admin(form) log('a', admin) admin.save()
def default_project(instance, **kw): log("Firing signal: default_project") if instance.project_url: return error = instance.sample() if error: domain = lookup_domain(error.domain) if domain: instance.project_url = domain instance.save()
def default_browser_parsing(instance, **kw): log("Firing signal: default_browser_parsing") if instance.user_agent: bc = get() if bc: b = bc(instance.user_agent) if b: instance.user_agent_short = b.name() instance.operating_system = b.platform() instance.user_agent_parsed = True instance.save()
def pdf(request): html_o = BytesIO() pdf_o = BytesIO() bill_id = request.GET.get('bill_id') try: html_o.write(get_html(request, bill_id)) except (ValueError, TypeError) as e: log('get_html exception') log(e) return render(request, 'input.html', {'error': 'Could not retrieve bill id#%s' % bill_id}) HTML(string=html_o.getvalue().decode()).write_pdf(pdf_o) return HttpResponse(pdf_o.getvalue(), content_type='application/pdf')
def send_issue_email(holder): # unlike errors, we are assuming that there isn't going to be a huge number each time data = "\n".join([as_text(obj) for obj in holder.objs]) count = len(holder.objs) if count > 1: subject = "Reporting %s issue changes" % count else: subject = "Reporting an issue change" log("Sending email to: %s of %s issues(s)" % (holder.user.email, count)) mail.send_mail(sender=settings.DEFAULT_FROM_EMAIL, to=holder.user.email, subject=subject, body=error_msg % (data, settings.SITE_URL))
def default_notification(instance, **kw): """ Given an error see if we need to send a notification """ log("Firing signal: default_notification") # todo, this will be changed to lookup a user profile as per # http://github.com/andymckay/arecibo/issues/issue/4 if instance.priority >= 5: return notification = Notification() notification.error = instance notification.user = [ str(u.key()) for u in approved_users() ] notification.save()
def send_issue_email(holder): # unlike errors, we are assuming that there isn't going to be a huge number each time data = "\n".join([ as_text(obj) for obj in holder.objs]) count = len(holder.objs) if count > 1: subject = "Reporting %s issue changes" % count else: subject = "Reporting an issue change" log("Sending email to: %s of %s issues(s)" % (holder.user.email, count)) mail.send_mail(sender=settings.DEFAULT_FROM_EMAIL, to=holder.user.email, subject=subject, body=error_msg % (data, settings.SITE_URL))
def download(args): """Downloads one or more image files to local computer. If a range of run ids is provided then files are saved to a folder named according to the range. Args: args: A single run id or a range of run ids in the form [start]-[end] Raises: RuntimeError: If arguments are missing or illegal """ if len(args) is not 1: raise RuntimeError( "The download command requires a single parameter specifying a run number or a range" ) run_numbers = [] arg = args[0] range_requested = "-" in arg if range_requested: start, end = arg.split("-") if not utils.isinteger(start) or not utils.isinteger(end): raise ValueError("Run number must be an integer") for run_num in range(int(start), int(end) + 1): if run_num not in run_numbers: run_numbers.append(run_num) else: if not utils.isinteger(arg): raise ValueError("Run number must be an integer") run_number = int(arg) if run_number not in run_numbers: run_numbers.append(run_number) create_folder = len(run_numbers) > 1 if create_folder: output_dir = localfs.filepath(paths["resultsDirLocal"], "result-{}".format(arg)) if localfs.exists(output_dir): localfs.rm(output_dir) localfs.mkdir(output_dir) else: output_dir = paths["resultsDirLocal"] log("Fetching remote files...") if not localfs.exists(paths["resultsDirLocal"]): localfs.mkdir(paths["resultsDirLocal"]) for run_number in run_numbers: spell.download( "runs/{}/image_output/result/result.png".format(run_number)) target_path = localfs.filepath(output_dir, "result-{}.png".format(run_number)) localfs.mv("result.png", target_path) log("Saved result file [{}]".format(target_path))
def upload(): """Runs a pix2pix image upload and processing job on Spell. Expects image pair files in images/pix2pix-dataset. These can be built using the image_pairs function. After upload, images are processed to remove alpha channel since pix2pix fails on images with alpha channels. Args: machine_type: The machine type to use for the run. Defaults to 'CPU'. Raises: RuntimeError: If image files are missing. ValueError: If any image file name is illegal. """ log("Preparing to upload images...") if localfs.isempty(paths["datasetlocal"]): raise RuntimeError("Missing images in {}".format( paths["datasetlocal"])) for filename in localfs.ls(paths["datasetlocal"]): if " " in filename: # TODO check for non-alphanumeric characters raise ValueError("Image file names cannot contain spaces.") log("Removing alpha channel from images...") imageutils.strip_alpha_channel(paths["datasetlocal"]) log("Uploading. Please wait...") spell.upload(paths["datasetlocal"], paths["datasetremote"]) log("Upload complete.")
def post(request): """ Add in a post """ log("Processing email message") mailobj = mail.InboundEmailMessage(request.raw_post_data) to = mailobj.to if to in settings.ALLOWED_RECEIVING_ADDRESSES: key = settings.ARECIBO_PUBLIC_ACCOUNT_NUMBER else: key = to.split("-", 1)[1].split("@")[0] if key != settings.ARECIBO_PUBLIC_ACCOUNT_NUMBER: log("To address (%s, to %s) does not match account number (%s)" % (key, to, settings.ARECIBO_PUBLIC_ACCOUNT_NUMBER)) return for content_type, body in mailobj.bodies("text/plain"): if mailobj.subject.find(" Broken ") > -1: log("Trying to parse body using 404 parser") result = parse_404(body, mailobj.subject) else: log("Trying to parse body using 500 parser") result = parse_500(body, mailobj.subject) err = Error() result["account"] = key populate(err, result) return render_plain("message parsed")
def register(): """ 会员注册 """ form = UserForm() log('进入登录页面') if form.validate_on_submit(): data = form.data user = User(data) log('user', user) user.save() flash("注册成功!", "ok") return redirect(url_for(".register")) return render_template('home/register.html', form=form)
def add(): form = request.form.to_dict() u = current_user() reply = Reply.new(user_id=u.id, **form) Topic.update(reply.topic_id) # 主题更新时间 # 发送站内消息 receivers = users_from_content(form['content']) send_message(u, receivers, title='你被 {} AT了'.format(u.username), content=form['content']) log('sender At message: sender-{}; receivers={}'.format(u, receivers)) return redirect(url_for('topic.detail', id=form['topic_id']))
def move(): _from = request.args.get("from") _to = request.args.get("to") if not _from: log('User %r tried to move, but forgot "from" argument', get_user()) return '<h1>Missing "from" argument</h1>', 400 if not _to: log('User %r tried to move, but forgot "to" argument', get_user()) return '<h1>Missing "to" argument</h1>', 400 real_from = cfg.CLOUD_PATH / _from real_to = cfg.CLOUD_PATH / _to try: shutil.move(real_from, real_to) log("User %r moved file %r to %r", get_user(), _from, _to) return "<h1>File moved correctly</h1>", 200 except (FileNotFoundError, FileExistsError) as err: log( "User %r tried to move file %r to %r, but failed (%r)", get_user(), _from, _to, err, ) return "File not found", 400
def post(request): """ Add in a post """ log("Processing email message") mailobj = mail.InboundEmailMessage(request.raw_post_data) found = False for content_type, body in mailobj.bodies("text/plain"): found = parse(content_type, body) for content_type, body in mailobj.bodies("text/html"): found = parse(content_type, body) if not found: log("No contents found in the message.") return render_plain("message parsed")
def send_error_email(holder): alot = 10 data = "\n".join([as_text(obj) for obj in holder.objs[:alot]]) count = len(holder.objs) if count > 1: subject = "Reporting %s errors" % count else: subject = "Reporting an error" if count > alot: data += "\n...truncated. For more see the website.\n" log("Sending email to: %s of %s error(s)" % (holder.user.email, count)) mail.send_mail(sender=settings.DEFAULT_FROM_EMAIL, to=holder.user.email, subject=subject, body=error_msg % (data, settings.SITE_URL))
def default_browser_parsing(instance, **kw): # prevent an infinite loop log("Firing signal: default_browser_parsing") if instance.user_agent_parsed: return if instance.user_agent: bc = get() b = bc(instance.user_agent) if b: instance.user_agent_short = b.name() instance.operating_system = b.platform() instance.user_agent_parsed = True instance.save()
def send_error_email(holder): alot = 10 data = "\n".join([ as_text(obj) for obj in holder.objs[:alot]]) count = len(holder.objs) if count > 1: subject = "Reporting %s errors" % count else: subject = "Reporting an error" if count > alot: data += "\n...truncated. For more see the website.\n" log("Sending email to: %s of %s error(s)" % (holder.user.email, count)) mail.send_mail(sender=settings.DEFAULT_FROM_EMAIL, to=holder.user.email, subject=subject, body=error_msg % (data, settings.SITE_URL))
def default_notification(instance, **kw): """ Given an error see if we need to send a notification """ log("Firing signal: default_notification") if instance.priority >= 5: return users = approved_users() if not users.count(): return notification = Notification() notification.notifier = instance notification.save() for user in users: notification.user.add(user)
def init(self): if WineriesSearcher.db: pass #return WineriesSearcher.__single else: # first time init # test if there is db already stored in memcache self.db = memcache.get(self.mc_key) if not self.db: # reload from disk reader = csv.reader(open(DB_FILE_PATH, 'rb'), delimiter=';', quotechar='|') name_db = {} state_db = {} city_db = {} hash_db = {} # skipping first row reader.next() for row in reader: name, city, state = row if name: name_db[name] = city,state state_db.setdefault(state,[]).append((name,city)) city_db.setdefault(city,[]).append((name,state)) hash_db[make_hash_id(row)] = name log(hash_db) #log(state_db["CA"]) #log(len(state_db["CA"])) #log(city_db) #log(sorted(state_db.keys())) # fill db self.db = { "city" : city_db, "state" : state_db, "name" : name_db, "hash" : hash_db, } # store it in memcache memcache.set(self.mc_key,self.db) else: # got from memcache #log(self.db["state"].keys()) #log(self.db["state"][""]) pass
def priority(instance, **kw): log("Firing signal: priority") if instance.type in ['OperationalError', 'SMTPRecipientsRefused']: instance.priority = 7 instance.save() if instance.group: project_url = instance.group.project_url if project_url: stage = project_url.stage log("Firing signal: priority, %s" % stage) if stage == 'production': instance.priority -= 2 elif stage == 'testing': instance.priority += 2 instance.priority = min(max(instance.priority, 1), 10) instance.save()
def default_issue_notification(instance, **kw): """ Given an issue see default_issue_notification we need to send a notification """ log("Firing signal: default_notification") users = approved_users() if not users.count(): return notification = Notification() notification.type = "Issue" notification.type_key = str(instance.key()) notification.user = [ str(u.key()) for u in users ] notification.save() # turn this on when its all working #issue_created.connect(default_issue_notification, dispatch_uid="default_issue_notification") #issue_changed.connect(default_issue_notification, dispatch_uid="default_issue_notification")
def default_notification(instance, **kw): """ Given an error see if we need to send a notification """ log("Firing signal: default_notification") users = approved_users() filtered = [] for user in users: profile = get_profile(user) if profile.notification and instance.priority <= profile.notification: filtered.append(user) if not filtered: return notification = Notification() notification.type = "Error" notification.type_key = str(instance.key()) notification.user = [ str(u.key()) for u in filtered ] notification.save()
def default_grouping(instance, **kw): """ Given an error, see if we can fingerprint it and find similar ones """ log("Firing signal: default_grouping") hsh = generate_key(instance) if hsh: digest = hsh.hexdigest() try: created = False group = Group.objects.get(uid=digest) group.count = F('count')+getattr(instance, 'count', 1) group.save() except Group.DoesNotExist: created = True group = Group.objects.create(uid=digest, count=getattr(instance, 'count', 1)) instance.group = group instance.save() if created: signals.group_assigned.send(sender=group.__class__, instance=group)
def parse(self): key = "browser-capabilities-raw" raw = cache.get(key) # if the data isn't there, download it if raw is None: data = None log("Fetching from browser capabilities") try: data = urllib.urlopen("http://www.areciboapp.com/static/browscap.ini") except (IOError): pass if data: # and data.code == 200: # that should be one week (1 min > 1 hour > 1 day > 1 week) log("...succeeded") raw = data.read() cache.set(key, raw, 60 * 60 * 24 * 7) else: log("...failed") # try again in 1 hour if there was a problem cache.set(key, "", 60 * 60) raw = "" else: log("Using cached browser capabilities") string = StringIO(raw) cfg = ConfigParser() cfg.readfp(string) self.sections = [] self.items = {} self.browsers = {} parents = set() for name in cfg.sections(): qname = name for unsafe in list("^$()[].-"): qname = qname.replace(unsafe, "\%s" % unsafe) qname = qname.replace("?", ".").replace("*", ".*?") qname = "^%s$" % qname sec_re = re.compile(qname) sec = dict(regex=qname) sec.update(cfg.items(name)) p = sec.get("parent") if p: parents.add(p) self.browsers[name] = sec if name not in parents: self.sections.append(sec_re) self.items[sec_re] = sec
def search_by_name(self,name_part, state_part=None, city_part=None, use_cached=True): mc_key = "%s-%s-%s" % (name_part,state_part,city_part) #log((name_part,state_part,city_part)) if use_cached: result = memcache.get(mc_key) if result is not None: # assuming we got something return result result = [] # make a new search # find by state only case if not name_part and state_part: log("here we are") all = self.db["state"].get(state_part,[]) for item in all: result.append(item+(state_part,)) # item is tuple (name,city) log(len(result)) return result for name,item in self.db["name"].items(): if name_part in name.lower(): city, state = item if state_part: if not state_part in state: # skip it continue if city_part: if not city_part in city: # skip it continue # preparing key #name_decorated = name.replace(name_part, "<span class='name_part'>%s</span>" % name_part) result.append((name,city,state)) # sort it result = sorted(result, key=itemgetter(0)) # cache it try: memcache.set(mc_key,result) # it will store it for as long as it can except Exception, e: log("Probably too big, exc: %s" % e)
def default_add_issue(instance, **kw): log("Firing signal: default_add_issue") instance.add_log(_("Issue created."))
def default_add_comment(instance, **kw): log("Firing signal: default_add_comment") instance.issue.add_log(_("Comment created."))