def get(self): if 'id' in self.request.GET: try: replay = Replay.get_by_id(int(self.request.get('id'))) except ValueError: replay = None elif 'name' in self.request.GET: replay = Replay.all().filter('filename =', self.request.get('name')).get() if replay is None: return self.not_found() return self.redirect('/down?id=%s' % replay.key().id(), permanent=True) elif 'hash' in self.request.GET: replay = Replay.all().filter('hash = ', self.request.get('hash')).get() if replay is None: return self.not_found() filename = replay.filename content = replay.content.content # To inspect details for the below code, see http://greenbytes.de/tech/tc2231/ if u'WebKit' in self.request.user_agent: # Safari 3.0 and Chrome 2.0 accepts UTF-8 encoded string directly. filename_header = 'filename=%s' % filename.encode('utf-8') elif u'MSIE' in self.request.user_agent: # IE does not support internationalized filename at all. # It can only recognize internationalized URL, so we do the trick via routing rules. filename_header = '' else: # For others like Firefox, we follow RFC2231 (encoding extension in HTTP headers). filename_header = 'filename*=UTF-8\'\'%s' % urllib.quote(filename.encode('utf-8')) self.response.headers['Content-Type'] = "application/octet-stream" self.response.headers['Content-Disposition'] = 'attachment; ' + filename_header self.response.out.write(content)
def get(self): ord = self.request.get('o', 'a') if ord not in ('a','f'): ord = 'a' next = None # Paging by http://google-appengine.googlegroups.com/web/efficient_paging_using_key_instead_of_a_dedicated_unique_property.txt bookmark = self.request.get('from') if bookmark: try: ord, first, key = self.load_bookmark(bookmark) except (ValueError, TypeError): return self.redirect("/") replays = Replay.all(keys_only=True).filter(self.SORT[ord][1], first).filter('__key__ >=', key).order('__key__').fetch(ON_PAGE+1) if len(replays) < ON_PAGE + 1: replays.extend(Replay.all(keys_only=True).filter(self.SORT[ord][2], first).order(self.SORT[ord][0]).order('__key__').fetch(ON_PAGE+1-len(replays))) else: replays = Replay.all(keys_only=True).order(self.SORT[ord][0]).order('__key__').fetch(ON_PAGE+1) replays = Replay.get(replays) if len(replays) == ON_PAGE+1: next = self.dump_bookmark(ord, replays[ON_PAGE]) replays = replays[:-1] self.respond('main', {'title':'browse replays', 'replays': replays, 'next': next})
def get(self): next = None # Paging by http://google-appengine.googlegroups.com/web/efficient_paging_using_key_instead_of_a_dedicated_unique_property.txt bookmark = self.request.get('from') if bookmark: try: type, search, value, key = self.load_bookmark(bookmark) except (ValueError, TypeError): return self.redirect("/") replays = Replay.all(keys_only=True).filter('%s =' % type, value).filter('__key__ >=', key).order('__key__').fetch(ON_PAGE+1) if len(replays) < ON_PAGE + 1: replays.extend(Replay.all(keys_only=True).filter('%s >' % type, value).filter('%s <' % type, search + u'\ufffd').order(type).order('__key__').fetch(ON_PAGE+1-len(replays))) else: if self.request.get('player'): type = 'players' search = self.request.get('player') elif self.request.get('file'): type = 'filename' search = self.request.get('file') else: return self.redirect('/') replays = Replay.all(keys_only=True).filter('%s >=' % type, search).filter('%s <' % type, search + u'\ufffd').order(type).order('__key__').fetch(ON_PAGE+1) replays = Replay.get(replays) if len(replays) == ON_PAGE+1: next = self.dump_bookmark(type, search, replays[ON_PAGE]) replays = replays[:-1] self.respond('main', {'title': "%s search results for '%s'" % (type, search), 'replays': replays, 'next': next})
def add_dl_job(_id): # Check auth if not current_user.is_admin(): flash("Only admins can add new DL jobs.", "danger") return redirect(request.referrer or url_for("index")) # Check replay exists _replay = Replay.query.filter(Replay.id == _id).first() if _replay is None: flash("Replay {} doesn't exist.".format(_id), "danger") return redirect(request.referrer or url_for("index")) # Update status _replay.status = "WAITING_DOWNLOAD" db.session.add(_replay) # Add to job queue. queued = Replay.add_dl_job(_replay) if queued: flash("Added DL job for replay {}.".format(_id), "info") db.session.commit() else: flash("Error adding DL job for replay {}.".format(_id), "danger") db.session.rollback() return redirect(request.referrer or url_for("index"))
def message_replay(request, id): msg = Message.objects.get(id=id) if request.method == 'POST': form = ReplayForm(request.POST) if form.is_valid(): # Saving replay for current message replay = Replay() replay.message = msg replay.text = form.cleaned_data['text'] replay.save() # Set status for message msg.replayed = True msg.save() # Send email with text for message owner replay_msg = replay.text email = EmailMessage('[KaDeBo] Ответ на Ваше сообщение', replay_msg, to=[msg.email]) email.send() messages.info(request, 'Ваше сообщение было отправлено!') return HttpResponseRedirect('/admin/feedback/message/%s'%(id)) else: form = ReplayForm() data = { "form":form, "msg":msg, } return render_to_response( "admin/feedback/message/replay-admin-form.html", data, RequestContext(request, {}), )
def get(self): if 'id' in self.request.GET: key = self.request.get('id') parsed = ReplayParse.get_by_key_name(key) elif 'name' in self.request.GET: replay = Replay.all().filter('filename =', self.request.get('name')).get() if replay is None: return self.not_found() return self.redirect('/view?id=%s' % Replay.parsed.get_value_for_datastore(replay).name(), permanent=True) else: return self.redirect("/") if parsed is None: return self.not_found() self.respond('view', {'settings': parsed.settings, 'frames': parsed.frames, 'parsed': parsed})
def post(self): if isinstance(self.request.POST.get('replay', None), cgi.FieldStorage): try: filename = self.request.POST['replay'].filename if not filename.lower().endswith('.rpl'): raise ValueError('Not a replay file! (%s)' % filename) replay, key, parsed = Replay.parse(self.request.get('replay'), filename, 'save' in self.request.POST) if 'view' in self.request.POST: if parsed is None: parsed = ReplayParse.get(key) return self.respond('view', {'settings': parsed.settings, 'frames': parsed.frames, 'parsed': parsed}) except: traceback.print_exception(*sys.exc_info()) self.respond('error_parse', {}) else: self.redirect('/view?id=%s' % key.name()) else: self.redirect('/')
def search(): form = SearchForm() if form.validate_on_submit(): match_id = form.query.data error = False search_log = Search(current_user.get_id(), match_id, request.access_route[0]) # Trim whitespace chars match_id = match_id.strip() # Normalize input (in case of unicode variants of chars; can break at urllib.urlencode level later on without this) match_id = unicodedata.normalize('NFKC', match_id) # If not a decimal input, let's try pull match id from inputs we recognise if not unicode.isdecimal(match_id): # Pull out any numbers in the search query and interpret as a match id. search = re.search(r'([0-9]+)', match_id) if search is not None: match_id = search.group(1) if unicode.isdecimal(match_id): _replay = Replay.query.filter(Replay.id == match_id).first() # If we don't have match_id in database, check if it's a valid match via the WebAPI and if so add it to DB. if not _replay: try: # Only continue if the WebAPI doesn't throw an error for this match ID, and if the match ID for the # info returned matches the match_id we sent (Fixes edge-case bug that downed Dotabank once, where # a user searched 671752079671752079 and the WebAPI returned details for 368506255). match_data = steam.api.interface("IDOTA2Match_570").GetMatchDetails(match_id=match_id).get("result") if "error" not in match_data.keys() and int(match_data.get("match_id")) == int(match_id): # Use get_or_create in case of race-hazard where another request (e.g. double submit) has already processed this replay while we were waiting for match_data. # DOESN'T FIX A FOOKIN THINGA _replay, created = Replay.get_or_create(id=match_id, skip_webapi=True) if created: _replay._populate_from_webapi(match_data) db.session.add(_replay) queued = Replay.add_gc_job(_replay, skip_commit=True) if queued: flash("Replay {} was not in our database, so we've added it to the job queue to be parsed!".format(match_id), "info") try: db.session.commit() except IntegrityError: db.session.rollback() pass # F*****g piece of shit. else: db.session.rollback() error = True except steam.api.HTTPError: error = True if _replay: search_log.replay_id = _replay.id search_log.success = True db.session.add(search_log) db.session.commit() return redirect(url_for("replays.replay", _id=match_id)) # We only get this far if there was an error or the matchid is invalid. if error: flash("Replay {} was not on our database, and we encountered errors trying to add it. Please try again later.".format(match_id), "warning") else: flash("Invalid match id. If this match id corresponds to a practice match it is also interpreted as invalid - Dotabank is unable to access practice lobby replays.", "danger") search_log.success = False db.session.add(search_log) db.session.commit() return redirect(request.referrer or url_for("index"))
def search(): form = SearchForm() if form.validate_on_submit(): match_id = form.query.data error = False search_log = Search(current_user.get_id(), match_id, request.access_route[0]) # Trim whitespace chars match_id = match_id.strip() # Normalize input (in case of unicode variants of chars; can break at urllib.urlencode level later on without this) match_id = unicodedata.normalize('NFKC', match_id) # If not a decimal input, let's try pull match id from inputs we recognise if not unicode.isdecimal(match_id): # Pull out any numbers in the search query and interpret as a match id. search = re.search(r'([0-9]+)', match_id) if search is not None: match_id = search.group(1) if unicode.isdecimal(match_id): _replay = Replay.query.filter(Replay.id == match_id).first() # If we don't have match_id in database, check if it's a valid match via the WebAPI and if so add it to DB. if not _replay: try: # Only continue if the WebAPI doesn't throw an error for this match ID, and if the match ID for the # info returned matches the match_id we sent (Fixes edge-case bug that downed Dotabank once, where # a user searched 671752079671752079 and the WebAPI returned details for 368506255). match_data = steam.api.interface( "IDOTA2Match_570").GetMatchDetails( match_id=match_id).get("result") if "error" not in match_data.keys() and int( match_data.get("match_id")) == int(match_id): # Use get_or_create in case of race-hazard where another request (e.g. double submit) has already processed this replay while we were waiting for match_data. # DOESN'T FIX A FOOKIN THINGA _replay, created = Replay.get_or_create( id=match_id, skip_webapi=True) if created: _replay._populate_from_webapi(match_data) db.session.add(_replay) queued = Replay.add_gc_job(_replay, skip_commit=True) if queued: flash( "Replay {} was not in our database, so we've added it to the job queue to be parsed!" .format(match_id), "info") try: db.session.commit() except IntegrityError: db.session.rollback() pass # F*****g piece of shit. else: db.session.rollback() error = True except steam.api.HTTPError: error = True if _replay: search_log.replay_id = _replay.id search_log.success = True db.session.add(search_log) db.session.commit() return redirect(url_for("replays.replay", _id=match_id)) # We only get this far if there was an error or the matchid is invalid. if error: flash( "Replay {} was not on our database, and we encountered errors trying to add it. Please try again later." .format(match_id), "warning") else: flash( "Invalid match id. If this match id corresponds to a practice match it is also interpreted as invalid - Dotabank is unable to access practice lobby replays.", "danger") search_log.success = False db.session.add(search_log) db.session.commit() return redirect(request.referrer or url_for("index"))
def search(): form = SearchForm() if form.validate_on_submit(): match_id = form.query.data error = False search_log = Search(current_user.get_id(), match_id, request.access_route[0]) # Trim whitespace chars match_id = match_id.strip() # If not a decimal input, let's try pull match id from inputs we recognise if not unicode.isdecimal(unicode(match_id)): # Dota 2 protocol or dotabuff links search = re.search(r'(?:matchid=|matches\/)([0-9]+)', match_id) if search is not None: match_id = search.group(1) if unicode.isdecimal(unicode(match_id)): _replay = Replay.query.filter(Replay.id == match_id).first() # If we don't have match_id in database, check if it's a valid match via the WebAPI and if so add it to DB. if not _replay: try: match_data = steam.api.interface("IDOTA2Match_570").GetMatchDetails(match_id=match_id).get("result") if "error" not in match_data.keys(): # Use get_or_create in case of race-hazard where another request (e.g. double submit) has already processed this replay while we were waiting for match_data. # DOESN'T FIX A FOOKIN THINGA _replay, created = Replay.get_or_create(id=match_id, skip_webapi=True) if created: _replay._populate_from_webapi(match_data) db.session.add(_replay) queued = Replay.add_gc_job(_replay, skip_commit=True) if queued: flash("Replay {} was not in our database, so we've added it to the job queue to be parsed!".format(match_id), "info") try: db.session.commit() except IntegrityError: db.session.rollback() pass # F*****g piece of shit. else: db.session.rollback() error = True except steam.api.HTTPError: error = True if _replay: search_log.replay_id = _replay.id search_log.success = True db.session.add(search_log) db.session.commit() return redirect(url_for("replays.replay", _id=match_id)) # We only get this far if there was an error or the matchid is invalid. if error: flash("Replay {} was not on our database, and we encountered errors trying to add it. Please try again later.".format(match_id), "warning") else: flash("Invalid match id. If this match id corresponds to a practice match it is also interpreted as invalid - Dotabank is unable to access practice lobby replays.", "danger") search_log.success = False db.session.add(search_log) db.session.commit() return redirect(request.referrer or url_for("index"))