def _save_news(w,h,titles,links,wordvec): "Save articles in each feature to MongoDB" mongo.db.news.drop() # TODO: Save news until out of memory pc, wc = np.shape(h) # Loop over all the features for i in range(pc): slist = [] # Create a list of words and their weights for j in range(wc): slist.append((h[i,j], wordvec[j])) # Reverse sort the word list slist.sort() slist.reverse() # Create a list of articles for this feature flist = [] for j in range(len(titles)): # Add the article with its weight flist.append((w[j,i], titles[j], links[j])) # Reverse sort the list flist.sort() flist.reverse() # Save to mongo mongo.db.news.insert({'id': i, 'keywords': slist[:4], 'articles': flist[:4]}) mongo.db.update.drop() mongo.db.update.insert({'time': datetime.now().strftime('%d/%m/%Y %Hg:%Mp')}) cache.delete_memoized(index)
def edit_post(id, ver): if ver and ver > 1: abort(404) nom = Nom.query.get_or_404(id) author = current_user._get_current_object() post = Post.query.filter_by(nom=nom, author=author).first() if post is None: post = Post(keyword=nom.keyword, story="", nom=nom, author=author) db.session.add(post) form = PostForm() if form.validate_on_submit(): if post.keyword != form.keyword.data or \ post.story != form.story.data: post.keyword_backup = post.keyword post.keyword = form.keyword.data post.story_backup = post.story post.story = form.story.data post.shared = form.shared.data db.session.add(post) elif post.shared != form.shared.data: post.shared = form.shared.data db.session.add(post) cache.delete_memoized(user) return redirect(url_for('.nom_view', id=nom.id)) form.keyword.data = post.keyword_backup if ver else post.keyword form.story.data = post.story_backup if ver else post.story form.shared.data = post.shared return render_template('main/nom_edit.html', nom=nom, form=form, ver=ver)
def __setitem__(self, key, value): setting, created = get_or_create(Setting, key=key) setting.value = value setting.save() # Delete the Setting dictionary cache cache.delete_memoized(Setting.as_dict)
def mutate(self, info, title, description, game_id, char_id, categories_id, links): print(info) title = title.strip() errors = [] if title == '': errors.append("Title can't be empty.") if len(description) > 10000: errors.append("The description is too long (max 10000 chars).") validurls = True for link in links: validurls = validurls and validators.url(link) if not validurls: errors.append("Invalid URL(s).") if not links: errors.append("No URL(s).") if errors: return CreatePost(ok=False, errors=errors) try: newpost = PostModel(title=title, description=description, game_id=game_id, char_id=char_id, associations_ids=categories_id) for link in links: db.session.add(LinkModel(url=link, post=newpost)) db.session.commit() ok = True cache.delete_memoized(get_filtered_posts) # Invalidating posts cache return CreatePost(ok=ok, post=newpost) except sqlalchemy.exc.IntegrityError as e: print(e.orig.args) db.session.rollback() wrongLinks = LinkModel.query.filter(LinkModel.url.in_(links)).all() if wrongLinks: msg = "The following URL is already used : " errors += [msg+link.url for link in wrongLinks] if not errors: errors.append("Error while creating post.") return CreatePost(ok=False, errors=errors)
def gen(): def get_key(key): ret = "" if request.method == "POST": ret = request.form[key] elif request.method == "GET": ret = request.args.get(key) return ret url = get_key("url") nocache = get_key("nocache") if url: t0 = time.time() @cache.memoize(timeout=86400) def gen_data(uri): return Gen(url=uri).gen() if nocache: cache.delete_memoized(gen_data, url) data = gen_data(url) data["cost"] = time.time() - t0 return jsonify(data) else: return redirect(docs_url, code=301)
def cleareCache(): data = request.get_json() print("clear cache for: " + str(data['id'])) cache.delete_memoized(getCachedBooks) if (data['id'] != 'books'): cache.delete_memoized(getCachedBook, str(data['id'])) return jsonify(), 200
def logout(): """Log out the user from the application. Log out the user from the application by removing them from the session. Note: this does not log the user out of Facebook - this is done by the JavaScript SDK. """ cache.delete_memoized(get_fb_groups, g.user['access_token']) session.pop('user', None) logout_user() app.logger.debug('User logout') return redirect(url_for('index'))
def create_dealer(dealer): existing_dealer = get_dealer(dealer.dealer_code) if (existing_dealer == None): try: db.session.add(dealer) db.session.commit() cache.delete_memoized(__get_dealers_from_db__) except IntegrityError as e: print(f"Error creating dealer {e}") db.session.rollback() return dealer
def create_car(car): existing_car = get_car(car.vin) if (existing_car == None): try: db.session.add(car) db.session.commit() cache.delete_memoized(__get_cars_from_db__) except IntegrityError as e: print(f"Error creating car {e}") db.session().rollback() return car
def save_uploaded_data(session_id, data, extension): """Empty cache for current session, save the new data and file extension to file. Keyword arguments: session_id -- ID of the current session data -- Pandas dataframe containing the uploaded data extesions -- File extension of the uploaded file """ cache.delete_memoized(earthquake_data.get_earthquake_data, session_id) data.to_json(earthquake_data.TEMP_FILE_DF % session_id) with open(earthquake_data.TEMP_FILE_EXT % session_id, 'w') as file_out: file_out.write(extension)
def __init__(self, key): super().__init__() assert isinstance(key, str) and not key.startswith('/') # Invalidate cache and retry once if cache error try: resp, headers = get_cached_static_file(key) except ValueError: # Normally happened when modified the return value of get_cached_static_file(key) cache.delete_memoized(get_cached_static_file, key) resp, headers = get_cached_static_file(key) # Delete invalid cache if not resp.ok: cache.delete_memoized(get_cached_static_file, key) self.data = resp.content self.status_code = resp.status_code self.headers = headers # These headers are processed, do not use the resp.header.
def add_entry(): if request.method == 'GET': fb_groups = get_fb_groups(g.user['access_token']) return render_template('add_entry.html', user=g.user, app_id=FB_APP_ID, name=FB_APP_NAME, groups=fb_groups) else: graph = GraphAPI(g.user['access_token']) groups = request.form.getlist('group_select') app.logger.debug("group_select: " + ','.join(groups)) message = request.form.get('group_text', '') app.logger.debug("group_text: " + message) for group in groups: try: graph.put_object(group, "feed", message=message) except Exception as error: app.logger.error(error.message) else: cache.delete_memoized(get_fb_groups, g.user['access_token']) return redirect(url_for('add_entry'))
def gen(): url = get_key("url") nocache = get_key("nocache") if url: t0 = time.time() @cache.memoize(timeout=86400) def gen_data(uri): return Gen(url=uri).gen() if nocache: cache.delete_memoized(gen_data, url) data = gen_data(url) data["cost"] = time.time() - t0 return jsonify(data) else: return redirect(docs_url, code=301)
def world_image(img_name): """ Get the Chinju-fu image // main.js?version=4.2.1.0:formatted @line:12188 e.prototype._getKeyName = function() { var t = location.hostname; if (t.match(/\./)) { for (var e = t.split("."), i = 0; i < e.length; i++) e[i] = ("00" + e[i]).slice(-3); return e.join("_") } return t }, e.prototype._getPath = function(t) { var e = this._getKeyName(); return "title" == t ? o.default.settings.path_root + "resources/world/" + e + "_t.png" : "small" == t ? o.default.settings.path_root + "resources/world/" + e + "_s.png" : "large" == t ? o.default.settings.path_root + "resources/world/" + e + "_l.png" : void 0 } // end of main.js :param : img_name :return: """ world_ip = session.get('world_ip', None) if world_ip: image_name = "" for ip_sec in world_ip.split('.'): extra_zero = 3 - len(ip_sec) image_name += extra_zero * '0' + ip_sec + '_' image_name += img_name[-1] response = get_chijufu_image(world_ip, image_name) if not response.ok: cache.delete_memoized(get_chijufu_image, world_ip, image_name) body = response.content return Response(body, headers={ 'Content-Type': 'image/png', 'Cache-Control': 'no-cache' }) else: return BadResponse('world_ip not set.')
def gen(): url = get_key("url") if url is None: site = get_key('site') sid = get_key('sid') if site is not None and sid is not None: url = {'site': site, 'sid': sid} if url: t0 = time.time() @cache.memoize(timeout=86400) def gen_data(uri): return Gen(url=uri).gen() nocache = get_key("nocache") if nocache: cache.delete_memoized(gen_data, url) data = gen_data(url) data["cost"] = time.time() - t0 return jsonify(data) else: return redirect(docs_url, code=301)
def del_cache(): # Do your work here cache.delete_memoized(getresponse) print "cache deleted"
def delete_memoized_json(self): print("delete_memoized_json for %(self)s" % { 'self': self }) cache.delete_memoized(self.json)
def delete_cache(): cache.delete_memoized(getresponse) return "deleted"
def delete_memoized_all_as_json(cls): print("delete_memoized_all_as_json for %(cls)s" % {'cls': cls}) cache.delete_memoized(cls.all_as_json)
def delete_memoized_all_as_json(cls): print("delete_memoized_all_as_json for %(cls)s" % { 'cls': cls }) cache.delete_memoized(cls.all_as_json)
def delete_memoized_json(self): print("delete_memoized_json for %(self)s" % {'self': self}) cache.delete_memoized(self.json)
def logout_user_memoized(): cache.delete_memoized( load_user, current_user.id ) logout_user()
def reload_albums(): """Delete and refresh cached albums from gphotos""" refresh_dates = request.args.get("dates") == "1" gphotos.cache_albums(refresh_dates=refresh_dates) cache.delete_memoized(gphotos.get_media) return redirect("/")
def add(self, title, body, user): post = Post(title=title, body=body, author=user) cache.delete_memoized(self.list) cache.delete_memoized(self.count) self.session.add(post)
def delete(self, post_id): self.session.query(Post).filter_by(id=post_id).delete() cache.delete_memoized(self.list) cache.delete_memoized(self.count)